mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 10:35:01 +00:00
feat: create bash library to handle install/run/test of python backends (#2286)
* feat: create bash library to handle install/run/test of python backends Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * chore: minor cleanup Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * fix: remove incorrect LIMIT_TARGETS from parler-tts Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * fix: update runUnitests to handle running tests from a custom test file Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * chore: document runUnittests Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> --------- Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com>
This commit is contained in:
parent
7f4febd6c2
commit
e2de8a88f7
106 changed files with 425 additions and 1606 deletions
|
@ -14,4 +14,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
rm -rf venv
|
rm -rf venv __pycache__
|
|
@ -1,93 +0,0 @@
|
||||||
####
|
|
||||||
# Attention! This file is abandoned.
|
|
||||||
# Please use the ../common-env/transformers/transformers.yml file to manage dependencies.
|
|
||||||
###
|
|
||||||
name: autogptq
|
|
||||||
channels:
|
|
||||||
- defaults
|
|
||||||
dependencies:
|
|
||||||
- _libgcc_mutex=0.1=main
|
|
||||||
- _openmp_mutex=5.1=1_gnu
|
|
||||||
- bzip2=1.0.8=h7b6447c_0
|
|
||||||
- ca-certificates=2023.08.22=h06a4308_0
|
|
||||||
- ld_impl_linux-64=2.38=h1181459_1
|
|
||||||
- libffi=3.4.4=h6a678d5_0
|
|
||||||
- libgcc-ng=11.2.0=h1234567_1
|
|
||||||
- libgomp=11.2.0=h1234567_1
|
|
||||||
- libstdcxx-ng=11.2.0=h1234567_1
|
|
||||||
- libuuid=1.41.5=h5eee18b_0
|
|
||||||
- ncurses=6.4=h6a678d5_0
|
|
||||||
- openssl=3.0.11=h7f8727e_2
|
|
||||||
- pip=23.2.1=py311h06a4308_0
|
|
||||||
- python=3.11.5=h955ad1f_0
|
|
||||||
- readline=8.2=h5eee18b_0
|
|
||||||
- setuptools=68.0.0=py311h06a4308_0
|
|
||||||
- sqlite=3.41.2=h5eee18b_0
|
|
||||||
- tk=8.6.12=h1ccaba5_0
|
|
||||||
- wheel=0.41.2=py311h06a4308_0
|
|
||||||
- xz=5.4.2=h5eee18b_0
|
|
||||||
- zlib=1.2.13=h5eee18b_0
|
|
||||||
- pip:
|
|
||||||
- accelerate==0.27.0
|
|
||||||
- aiohttp==3.8.5
|
|
||||||
- aiosignal==1.3.1
|
|
||||||
- async-timeout==4.0.3
|
|
||||||
- attrs==23.1.0
|
|
||||||
- auto-gptq==0.7.1
|
|
||||||
- certifi==2023.7.22
|
|
||||||
- charset-normalizer==3.3.0
|
|
||||||
- datasets==2.14.5
|
|
||||||
- dill==0.3.7
|
|
||||||
- filelock==3.12.4
|
|
||||||
- frozenlist==1.4.0
|
|
||||||
- fsspec==2023.6.0
|
|
||||||
- grpcio==1.63.0
|
|
||||||
- huggingface-hub==0.16.4
|
|
||||||
- idna==3.4
|
|
||||||
- jinja2==3.1.2
|
|
||||||
- markupsafe==2.1.3
|
|
||||||
- mpmath==1.3.0
|
|
||||||
- multidict==6.0.4
|
|
||||||
- multiprocess==0.70.15
|
|
||||||
- networkx==3.1
|
|
||||||
- numpy==1.26.0
|
|
||||||
- nvidia-cublas-cu12==12.1.3.1
|
|
||||||
- nvidia-cuda-cupti-cu12==12.1.105
|
|
||||||
- nvidia-cuda-nvrtc-cu12==12.1.105
|
|
||||||
- nvidia-cuda-runtime-cu12==12.1.105
|
|
||||||
- nvidia-cudnn-cu12==8.9.2.26
|
|
||||||
- nvidia-cufft-cu12==11.0.2.54
|
|
||||||
- nvidia-curand-cu12==10.3.2.106
|
|
||||||
- nvidia-cusolver-cu12==11.4.5.107
|
|
||||||
- nvidia-cusparse-cu12==12.1.0.106
|
|
||||||
- nvidia-nccl-cu12==2.18.1
|
|
||||||
- nvidia-nvjitlink-cu12==12.2.140
|
|
||||||
- nvidia-nvtx-cu12==12.1.105
|
|
||||||
- optimum==1.17.1
|
|
||||||
- packaging==23.2
|
|
||||||
- pandas==2.1.1
|
|
||||||
- peft==0.5.0
|
|
||||||
- protobuf==4.24.4
|
|
||||||
- psutil==5.9.5
|
|
||||||
- pyarrow==13.0.0
|
|
||||||
- python-dateutil==2.8.2
|
|
||||||
- pytz==2023.3.post1
|
|
||||||
- pyyaml==6.0.1
|
|
||||||
- regex==2023.10.3
|
|
||||||
- requests==2.31.0
|
|
||||||
- rouge==1.0.1
|
|
||||||
- safetensors>=0.3.3
|
|
||||||
- six==1.16.0
|
|
||||||
- sympy==1.12
|
|
||||||
- tokenizers==0.14.0
|
|
||||||
- tqdm==4.66.1
|
|
||||||
- torch==2.2.1
|
|
||||||
- torchvision==0.17.1
|
|
||||||
- transformers==4.34.0
|
|
||||||
- transformers_stream_generator==0.0.5
|
|
||||||
- triton==2.1.0
|
|
||||||
- typing-extensions==4.8.0
|
|
||||||
- tzdata==2023.3
|
|
||||||
- urllib3==2.0.6
|
|
||||||
- xxhash==3.4.1
|
|
||||||
- yarl==1.9.2
|
|
|
@ -1,34 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
installRequirements
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the Intel image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
if [ -f "requirements-intel.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,10 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
##
|
startBackend $@
|
||||||
## A bash script wrapper that runs the autogptq server
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/autogptq.py $@
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -26,4 +26,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
rm -rf venv
|
rm -rf venv __pycache__
|
|
@ -1,34 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
installRequirements
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the Intel image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
if [ -f "requirements-intel.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,10 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
##
|
startBackend $@
|
||||||
## A bash script wrapper that runs the ttsbark server
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/ttsbark.py $@
|
|
|
@ -18,7 +18,7 @@ class TestBackendServicer(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
This method sets up the gRPC service by starting the server
|
This method sets up the gRPC service by starting the server
|
||||||
"""
|
"""
|
||||||
self.service = subprocess.Popen(["python3", "ttsbark.py", "--addr", "localhost:50051"])
|
self.service = subprocess.Popen(["python3", "backend.py", "--addr", "localhost:50051"])
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
CONDA_ENV_PATH = "transformers.yml"
|
|
||||||
|
|
||||||
ifeq ($(BUILD_TYPE), cublas)
|
|
||||||
CONDA_ENV_PATH = "transformers-nvidia.yml"
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(BUILD_TYPE), hipblas)
|
|
||||||
CONDA_ENV_PATH = "transformers-rocm.yml"
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Intel GPU are supposed to have dependencies installed in the main python
|
|
||||||
# environment, so we skip conda installation for SYCL builds.
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
ifneq (,$(findstring sycl,$(BUILD_TYPE)))
|
|
||||||
export SKIP_CONDA=1
|
|
||||||
endif
|
|
||||||
|
|
||||||
.PHONY: transformers
|
|
||||||
transformers:
|
|
||||||
@echo "Installing $(CONDA_ENV_PATH)..."
|
|
||||||
bash install.sh $(CONDA_ENV_PATH)
|
|
|
@ -1,44 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -ex
|
|
||||||
|
|
||||||
SKIP_CONDA=${SKIP_CONDA:-0}
|
|
||||||
REQUIREMENTS_FILE=$1
|
|
||||||
|
|
||||||
# Check if environment exist
|
|
||||||
conda_env_exists(){
|
|
||||||
! conda list --name "${@}" >/dev/null 2>/dev/null
|
|
||||||
}
|
|
||||||
|
|
||||||
if [ $SKIP_CONDA -eq 1 ]; then
|
|
||||||
echo "Skipping conda environment installation"
|
|
||||||
else
|
|
||||||
export PATH=$PATH:/opt/conda/bin
|
|
||||||
if conda_env_exists "transformers" ; then
|
|
||||||
echo "Creating virtual environment..."
|
|
||||||
conda env create --name transformers --file $REQUIREMENTS_FILE
|
|
||||||
echo "Virtual environment created."
|
|
||||||
else
|
|
||||||
echo "Virtual environment already exists."
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the intel image
|
|
||||||
# (no conda env)
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
pip install torch==2.1.0.post0 torchvision==0.16.0.post0 torchaudio==2.1.0.post0 intel-extension-for-pytorch==2.1.20+xpu oneccl_bind_pt==2.1.200+xpu intel-extension-for-transformers datasets sentencepiece tiktoken neural_speed optimum[openvino] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
|
||||||
fi
|
|
||||||
|
|
||||||
# If we didn't skip conda, activate the environment
|
|
||||||
# to install FlashAttention
|
|
||||||
if [ $SKIP_CONDA -eq 0 ]; then
|
|
||||||
source activate transformers
|
|
||||||
fi
|
|
||||||
if [[ $REQUIREMENTS_FILE =~ -nvidia.yml$ ]]; then
|
|
||||||
#TODO: FlashAttention is supported on nvidia and ROCm, but ROCm install can't be done this easily
|
|
||||||
pip install flash-attn --no-build-isolation
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
|
@ -1,125 +0,0 @@
|
||||||
name: transformers
|
|
||||||
channels:
|
|
||||||
- defaults
|
|
||||||
dependencies:
|
|
||||||
- _libgcc_mutex=0.1=main
|
|
||||||
- _openmp_mutex=5.1=1_gnu
|
|
||||||
- bzip2=1.0.8=h7b6447c_0
|
|
||||||
- ca-certificates=2023.08.22=h06a4308_0
|
|
||||||
- ld_impl_linux-64=2.38=h1181459_1
|
|
||||||
- libffi=3.4.4=h6a678d5_0
|
|
||||||
- libgcc-ng=11.2.0=h1234567_1
|
|
||||||
- libgomp=11.2.0=h1234567_1
|
|
||||||
- libstdcxx-ng=11.2.0=h1234567_1
|
|
||||||
- libuuid=1.41.5=h5eee18b_0
|
|
||||||
- ncurses=6.4=h6a678d5_0
|
|
||||||
- openssl=3.0.11=h7f8727e_2
|
|
||||||
- pip=23.2.1=py311h06a4308_0
|
|
||||||
- python=3.11.5=h955ad1f_0
|
|
||||||
- readline=8.2=h5eee18b_0
|
|
||||||
- setuptools=68.0.0=py311h06a4308_0
|
|
||||||
- sqlite=3.41.2=h5eee18b_0
|
|
||||||
- tk=8.6.12=h1ccaba5_0
|
|
||||||
- wheel=0.41.2=py311h06a4308_0
|
|
||||||
- xz=5.4.2=h5eee18b_0
|
|
||||||
- zlib=1.2.13=h5eee18b_0
|
|
||||||
- pip:
|
|
||||||
- accelerate==0.27.0
|
|
||||||
- aiohttp==3.8.5
|
|
||||||
- aiosignal==1.3.1
|
|
||||||
- async-timeout==4.0.3
|
|
||||||
- auto-gptq==0.7.1
|
|
||||||
- attrs==23.1.0
|
|
||||||
- bark==0.1.5
|
|
||||||
- bitsandbytes==0.43.0
|
|
||||||
- boto3==1.28.61
|
|
||||||
- botocore==1.31.61
|
|
||||||
- certifi==2023.7.22
|
|
||||||
- TTS==0.22.0
|
|
||||||
- charset-normalizer==3.3.0
|
|
||||||
- datasets==2.14.5
|
|
||||||
- sentence-transformers==2.5.1 # Updated Version
|
|
||||||
- sentencepiece==0.1.99
|
|
||||||
- dill==0.3.7
|
|
||||||
- einops==0.7.0
|
|
||||||
- encodec==0.1.1
|
|
||||||
- filelock==3.12.4
|
|
||||||
- frozenlist==1.4.0
|
|
||||||
- fsspec==2023.6.0
|
|
||||||
- funcy==2.0
|
|
||||||
- grpcio==1.63.0
|
|
||||||
- huggingface-hub
|
|
||||||
- idna==3.4
|
|
||||||
- jinja2==3.1.2
|
|
||||||
- jmespath==1.0.1
|
|
||||||
- markupsafe==2.1.3
|
|
||||||
- mpmath==1.3.0
|
|
||||||
- multidict==6.0.4
|
|
||||||
- multiprocess==0.70.15
|
|
||||||
- networkx
|
|
||||||
- numpy==1.26.0
|
|
||||||
- nvidia-cublas-cu12==12.1.3.1
|
|
||||||
- nvidia-cuda-cupti-cu12==12.1.105
|
|
||||||
- nvidia-cuda-nvrtc-cu12==12.1.105
|
|
||||||
- nvidia-cuda-runtime-cu12==12.1.105
|
|
||||||
- nvidia-cudnn-cu12==8.9.2.26
|
|
||||||
- nvidia-cufft-cu12==11.0.2.54
|
|
||||||
- nvidia-curand-cu12==10.3.2.106
|
|
||||||
- nvidia-cusolver-cu12==11.4.5.107
|
|
||||||
- nvidia-cusparse-cu12==12.1.0.106
|
|
||||||
- nvidia-nccl-cu12==2.18.1
|
|
||||||
- nvidia-nvjitlink-cu12==12.2.140
|
|
||||||
- nvidia-nvtx-cu12==12.1.105
|
|
||||||
- optimum==1.17.1
|
|
||||||
- packaging==23.2
|
|
||||||
- pandas
|
|
||||||
- peft==0.5.0
|
|
||||||
- protobuf==4.24.4
|
|
||||||
- psutil==5.9.5
|
|
||||||
- pyarrow==13.0.0
|
|
||||||
- python-dateutil==2.8.2
|
|
||||||
- pytz==2023.3.post1
|
|
||||||
- pyyaml==6.0.1
|
|
||||||
- regex==2023.10.3
|
|
||||||
- requests==2.31.0
|
|
||||||
- rouge==1.0.1
|
|
||||||
- s3transfer==0.7.0
|
|
||||||
- safetensors>=0.4.1
|
|
||||||
- scipy==1.12.0 # Updated Version
|
|
||||||
- six==1.16.0
|
|
||||||
- sympy==1.12
|
|
||||||
- tokenizers
|
|
||||||
- torch==2.1.2
|
|
||||||
- torchvision==0.16.2
|
|
||||||
- torchaudio==2.1.2
|
|
||||||
- tqdm==4.66.1
|
|
||||||
- triton==2.1.0
|
|
||||||
- typing-extensions==4.8.0
|
|
||||||
- tzdata==2023.3
|
|
||||||
- urllib3==1.26.17
|
|
||||||
- xxhash==3.4.1
|
|
||||||
- yarl==1.9.2
|
|
||||||
- soundfile
|
|
||||||
- langid
|
|
||||||
- wget
|
|
||||||
- unidecode
|
|
||||||
- pyopenjtalk-prebuilt
|
|
||||||
- pypinyin
|
|
||||||
- inflect
|
|
||||||
- cn2an
|
|
||||||
- jieba
|
|
||||||
- eng_to_ipa
|
|
||||||
- openai-whisper
|
|
||||||
- matplotlib
|
|
||||||
- gradio==3.41.2
|
|
||||||
- nltk
|
|
||||||
- sudachipy
|
|
||||||
- sudachidict_core
|
|
||||||
- vocos
|
|
||||||
- vllm>=0.4.0
|
|
||||||
- transformers>=4.38.2 # Updated Version
|
|
||||||
- transformers_stream_generator==0.0.5
|
|
||||||
- xformers==0.0.23.post1
|
|
||||||
- rerankers[transformers]
|
|
||||||
- pydantic
|
|
||||||
prefix: /opt/conda/envs/transformers
|
|
|
@ -1,113 +0,0 @@
|
||||||
name: transformers
|
|
||||||
channels:
|
|
||||||
- defaults
|
|
||||||
dependencies:
|
|
||||||
- _libgcc_mutex=0.1=main
|
|
||||||
- _openmp_mutex=5.1=1_gnu
|
|
||||||
- bzip2=1.0.8=h7b6447c_0
|
|
||||||
- ca-certificates=2023.08.22=h06a4308_0
|
|
||||||
- ld_impl_linux-64=2.38=h1181459_1
|
|
||||||
- libffi=3.4.4=h6a678d5_0
|
|
||||||
- libgcc-ng=11.2.0=h1234567_1
|
|
||||||
- libgomp=11.2.0=h1234567_1
|
|
||||||
- libstdcxx-ng=11.2.0=h1234567_1
|
|
||||||
- libuuid=1.41.5=h5eee18b_0
|
|
||||||
- ncurses=6.4=h6a678d5_0
|
|
||||||
- openssl=3.0.11=h7f8727e_2
|
|
||||||
- pip=23.2.1=py311h06a4308_0
|
|
||||||
- python=3.11.5=h955ad1f_0
|
|
||||||
- readline=8.2=h5eee18b_0
|
|
||||||
- setuptools=68.0.0=py311h06a4308_0
|
|
||||||
- sqlite=3.41.2=h5eee18b_0
|
|
||||||
- tk=8.6.12=h1ccaba5_0
|
|
||||||
- wheel=0.41.2=py311h06a4308_0
|
|
||||||
- xz=5.4.2=h5eee18b_0
|
|
||||||
- zlib=1.2.13=h5eee18b_0
|
|
||||||
- pip:
|
|
||||||
- --pre
|
|
||||||
- --extra-index-url https://download.pytorch.org/whl/nightly/
|
|
||||||
- accelerate==0.27.0
|
|
||||||
- auto-gptq==0.7.1
|
|
||||||
- aiohttp==3.8.5
|
|
||||||
- aiosignal==1.3.1
|
|
||||||
- async-timeout==4.0.3
|
|
||||||
- attrs==23.1.0
|
|
||||||
- bark==0.1.5
|
|
||||||
- boto3==1.28.61
|
|
||||||
- botocore==1.31.61
|
|
||||||
- certifi==2023.7.22
|
|
||||||
- TTS==0.22.0
|
|
||||||
- charset-normalizer==3.3.0
|
|
||||||
- datasets==2.14.5
|
|
||||||
- sentence-transformers==2.5.1 # Updated Version
|
|
||||||
- sentencepiece==0.1.99
|
|
||||||
- dill==0.3.7
|
|
||||||
- einops==0.7.0
|
|
||||||
- encodec==0.1.1
|
|
||||||
- filelock==3.12.4
|
|
||||||
- frozenlist==1.4.0
|
|
||||||
- fsspec==2023.6.0
|
|
||||||
- funcy==2.0
|
|
||||||
- grpcio==1.63.0
|
|
||||||
- huggingface-hub
|
|
||||||
- idna==3.4
|
|
||||||
- jinja2==3.1.2
|
|
||||||
- jmespath==1.0.1
|
|
||||||
- markupsafe==2.1.3
|
|
||||||
- mpmath==1.3.0
|
|
||||||
- multidict==6.0.4
|
|
||||||
- multiprocess==0.70.15
|
|
||||||
- networkx
|
|
||||||
- numpy==1.26.0
|
|
||||||
- packaging==23.2
|
|
||||||
- pandas
|
|
||||||
- peft==0.5.0
|
|
||||||
- protobuf==4.24.4
|
|
||||||
- psutil==5.9.5
|
|
||||||
- pyarrow==13.0.0
|
|
||||||
- python-dateutil==2.8.2
|
|
||||||
- pytz==2023.3.post1
|
|
||||||
- pyyaml==6.0.1
|
|
||||||
- regex==2023.10.3
|
|
||||||
- requests==2.31.0
|
|
||||||
- rouge==1.0.1
|
|
||||||
- s3transfer==0.7.0
|
|
||||||
- safetensors>=0.4.1
|
|
||||||
- scipy==1.12.0 # Updated Version
|
|
||||||
- six==1.16.0
|
|
||||||
- sympy==1.12
|
|
||||||
- tokenizers
|
|
||||||
- torch
|
|
||||||
- torchaudio
|
|
||||||
- tqdm==4.66.1
|
|
||||||
- triton==2.1.0
|
|
||||||
- typing-extensions==4.8.0
|
|
||||||
- tzdata==2023.3
|
|
||||||
- urllib3==1.26.17
|
|
||||||
- xxhash==3.4.1
|
|
||||||
- yarl==1.9.2
|
|
||||||
- soundfile
|
|
||||||
- langid
|
|
||||||
- wget
|
|
||||||
- unidecode
|
|
||||||
- optimum==1.17.1
|
|
||||||
- pyopenjtalk-prebuilt
|
|
||||||
- pypinyin
|
|
||||||
- inflect
|
|
||||||
- cn2an
|
|
||||||
- jieba
|
|
||||||
- eng_to_ipa
|
|
||||||
- openai-whisper
|
|
||||||
- matplotlib
|
|
||||||
- gradio==3.41.2
|
|
||||||
- nltk
|
|
||||||
- sudachipy
|
|
||||||
- sudachidict_core
|
|
||||||
- vocos
|
|
||||||
- vllm>=0.4.0
|
|
||||||
- transformers>=4.38.2 # Updated Version
|
|
||||||
- transformers_stream_generator==0.0.5
|
|
||||||
- xformers==0.0.23.post1
|
|
||||||
- rerankers[transformers]
|
|
||||||
- pydantic
|
|
||||||
prefix: /opt/conda/envs/transformers
|
|
|
@ -1,118 +0,0 @@
|
||||||
name: transformers
|
|
||||||
channels:
|
|
||||||
- defaults
|
|
||||||
dependencies:
|
|
||||||
- _libgcc_mutex=0.1=main
|
|
||||||
- _openmp_mutex=5.1=1_gnu
|
|
||||||
- bzip2=1.0.8=h7b6447c_0
|
|
||||||
- ca-certificates=2023.08.22=h06a4308_0
|
|
||||||
- ld_impl_linux-64=2.38=h1181459_1
|
|
||||||
- libffi=3.4.4=h6a678d5_0
|
|
||||||
- libgcc-ng=11.2.0=h1234567_1
|
|
||||||
- libgomp=11.2.0=h1234567_1
|
|
||||||
- libstdcxx-ng=11.2.0=h1234567_1
|
|
||||||
- libuuid=1.41.5=h5eee18b_0
|
|
||||||
- ncurses=6.4=h6a678d5_0
|
|
||||||
- openssl=3.0.11=h7f8727e_2
|
|
||||||
- pip=23.2.1=py311h06a4308_0
|
|
||||||
- python=3.11.5=h955ad1f_0
|
|
||||||
- readline=8.2=h5eee18b_0
|
|
||||||
- setuptools=68.0.0=py311h06a4308_0
|
|
||||||
- sqlite=3.41.2=h5eee18b_0
|
|
||||||
- tk=8.6.12=h1ccaba5_0
|
|
||||||
- wheel=0.41.2=py311h06a4308_0
|
|
||||||
- xz=5.4.2=h5eee18b_0
|
|
||||||
- zlib=1.2.13=h5eee18b_0
|
|
||||||
- pip:
|
|
||||||
- accelerate==0.27.0
|
|
||||||
- aiohttp==3.8.5
|
|
||||||
- aiosignal==1.3.1
|
|
||||||
- auto-gptq==0.7.1
|
|
||||||
- async-timeout==4.0.3
|
|
||||||
- attrs==23.1.0
|
|
||||||
- bark==0.1.5
|
|
||||||
- boto3==1.28.61
|
|
||||||
- botocore==1.31.61
|
|
||||||
- certifi==2023.7.22
|
|
||||||
- coloredlogs==15.0.1
|
|
||||||
- TTS==0.22.0
|
|
||||||
- charset-normalizer==3.3.0
|
|
||||||
- datasets==2.14.5
|
|
||||||
- sentence-transformers==2.5.1 # Updated Version
|
|
||||||
- sentencepiece==0.1.99
|
|
||||||
- dill==0.3.7
|
|
||||||
- einops==0.7.0
|
|
||||||
- encodec==0.1.1
|
|
||||||
- filelock==3.12.4
|
|
||||||
- frozenlist==1.4.0
|
|
||||||
- fsspec==2023.6.0
|
|
||||||
- funcy==2.0
|
|
||||||
- grpcio==1.63.0
|
|
||||||
- huggingface-hub
|
|
||||||
- humanfriendly==10.0
|
|
||||||
- idna==3.4
|
|
||||||
- jinja2==3.1.2
|
|
||||||
- jmespath==1.0.1
|
|
||||||
- markupsafe==2.1.3
|
|
||||||
- mpmath==1.3.0
|
|
||||||
- multidict==6.0.4
|
|
||||||
- multiprocess==0.70.15
|
|
||||||
- networkx
|
|
||||||
- numpy==1.26.0
|
|
||||||
- onnx==1.15.0
|
|
||||||
- openvino==2024.1.0
|
|
||||||
- openvino-telemetry==2024.1.0
|
|
||||||
- optimum[openvino]==1.19.1
|
|
||||||
- optimum-intel==1.16.1
|
|
||||||
- packaging==23.2
|
|
||||||
- pandas
|
|
||||||
- peft==0.5.0
|
|
||||||
- protobuf==4.24.4
|
|
||||||
- psutil==5.9.5
|
|
||||||
- pyarrow==13.0.0
|
|
||||||
- python-dateutil==2.8.2
|
|
||||||
- pytz==2023.3.post1
|
|
||||||
- pyyaml==6.0.1
|
|
||||||
- regex==2023.10.3
|
|
||||||
- requests==2.31.0
|
|
||||||
- rouge==1.0.1
|
|
||||||
- s3transfer==0.7.0
|
|
||||||
- safetensors>=0.4.1
|
|
||||||
- scipy==1.12.0 # Updated Version
|
|
||||||
- six==1.16.0
|
|
||||||
- sympy==1.12
|
|
||||||
- tokenizers
|
|
||||||
- torch==2.1.2
|
|
||||||
- torchvision==0.16.2
|
|
||||||
- torchaudio==2.1.2
|
|
||||||
- tqdm==4.66.1
|
|
||||||
- triton==2.1.0
|
|
||||||
- typing-extensions==4.8.0
|
|
||||||
- tzdata==2023.3
|
|
||||||
- urllib3==1.26.17
|
|
||||||
- xxhash==3.4.1
|
|
||||||
- yarl==1.9.2
|
|
||||||
- soundfile
|
|
||||||
- langid
|
|
||||||
- wget
|
|
||||||
- unidecode
|
|
||||||
- pyopenjtalk-prebuilt
|
|
||||||
- pypinyin
|
|
||||||
- inflect
|
|
||||||
- cn2an
|
|
||||||
- jieba
|
|
||||||
- eng_to_ipa
|
|
||||||
- openai-whisper
|
|
||||||
- matplotlib
|
|
||||||
- gradio==3.41.2
|
|
||||||
- nltk
|
|
||||||
- sudachipy
|
|
||||||
- sudachidict_core
|
|
||||||
- vocos
|
|
||||||
- vllm>=0.4.0
|
|
||||||
- transformers>=4.38.2 # Updated Version
|
|
||||||
- transformers_stream_generator==0.0.5
|
|
||||||
- xformers==0.0.23.post1
|
|
||||||
- rerankers[transformers]
|
|
||||||
- pydantic
|
|
||||||
prefix: /opt/conda/envs/transformers
|
|
213
backend/python/common/libbackend.sh
Normal file
213
backend/python/common/libbackend.sh
Normal file
|
@ -0,0 +1,213 @@
|
||||||
|
|
||||||
|
|
||||||
|
# init handles the setup of the library
|
||||||
|
#
|
||||||
|
# use the library by adding the following line to a script:
|
||||||
|
# source $(dirname $0)/../common/libbackend.sh
|
||||||
|
#
|
||||||
|
# If you want to limit what targets a backend can be used on, set the variable LIMIT_TARGETS to a
|
||||||
|
# space separated list of valid targets BEFORE sourcing the library, for example to only allow a backend
|
||||||
|
# to be used on CUDA and CPU backends:
|
||||||
|
#
|
||||||
|
# LIMIT_TARGETS="cublas cpu"
|
||||||
|
# source $(dirname $0)/../common/libbackend.sh
|
||||||
|
#
|
||||||
|
# You can use any valid BUILD_TYPE or BUILD_PROFILE, if you need to limit a backend to CUDA 12 only:
|
||||||
|
#
|
||||||
|
# LIMIT_TARGETS="cublas12"
|
||||||
|
# source $(dirname $0)/../common/libbackend.sh
|
||||||
|
#
|
||||||
|
function init() {
|
||||||
|
BACKEND_NAME=${PWD##*/}
|
||||||
|
MY_DIR=$(realpath `dirname $0`)
|
||||||
|
BUILD_PROFILE=$(getBuildProfile)
|
||||||
|
|
||||||
|
# If a backend has defined a list of valid build profiles...
|
||||||
|
if [ ! -z "${LIMIT_TARGETS}" ]; then
|
||||||
|
isValidTarget=$(checkTargets ${LIMIT_TARGETS})
|
||||||
|
if [ ${isValidTarget} != true ]; then
|
||||||
|
echo "${BACKEND_NAME} can only be used on the following targets: ${LIMIT_TARGETS}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Initializing libbackend for ${BACKEND_NAME}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# getBuildProfile will inspect the system to determine which build profile is appropriate:
|
||||||
|
# returns one of the following:
|
||||||
|
# - cublas11
|
||||||
|
# - cublas12
|
||||||
|
# - hipblas
|
||||||
|
# - intel
|
||||||
|
function getBuildProfile() {
|
||||||
|
# First check if we are a cublas build, and if so report the correct build profile
|
||||||
|
if [ x"${BUILD_TYPE}" == "xcublas" ]; then
|
||||||
|
if [ ! -z ${CUDA_MAJOR_VERSION} ]; then
|
||||||
|
# If we have been given a CUDA version, we trust it
|
||||||
|
echo ${BUILD_TYPE}${CUDA_MAJOR_VERSION}
|
||||||
|
else
|
||||||
|
# We don't know what version of cuda we are, so we report ourselves as a generic cublas
|
||||||
|
echo ${BUILD_TYPE}
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If /opt/intel exists, then we are doing an intel/ARC build
|
||||||
|
if [ -d "/opt/intel" ]; then
|
||||||
|
echo "intel"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If for any other values of BUILD_TYPE, we don't need any special handling/discovery
|
||||||
|
if [ ! -z ${BUILD_TYPE} ]; then
|
||||||
|
echo ${BUILD_TYPE}
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If there is no BUILD_TYPE set at all, set a build-profile value of CPU, we aren't building for any GPU targets
|
||||||
|
echo "cpu"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ensureVenv makes sure that the venv for the backend both exists, and is activated.
|
||||||
|
#
|
||||||
|
# This function is idempotent, so you can call it as many times as you want and it will
|
||||||
|
# always result in an activated virtual environment
|
||||||
|
function ensureVenv() {
|
||||||
|
if [ ! -d "${MY_DIR}/venv" ]; then
|
||||||
|
uv venv ${MY_DIR}/venv
|
||||||
|
echo "virtualenv created"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "x${VIRTUAL_ENV}" != "x${MY_DIR}/venv" ]; then
|
||||||
|
source ${MY_DIR}/venv/bin/activate
|
||||||
|
echo "virtualenv activated"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "activated virtualenv has been ensured"
|
||||||
|
}
|
||||||
|
|
||||||
|
# installRequirements looks for several requirements files and if they exist runs the install for them in order
|
||||||
|
#
|
||||||
|
# - requirements-install.txt
|
||||||
|
# - requirements.txt
|
||||||
|
# - requirements-${BUILD_TYPE}.txt
|
||||||
|
# - requirements-${BUILD_PROFILE}.txt
|
||||||
|
#
|
||||||
|
# BUILD_PROFILE is a pore specific version of BUILD_TYPE, ex: cuda11 or cuda12
|
||||||
|
# it can also include some options that we do not have BUILD_TYPES for, ex: intel
|
||||||
|
#
|
||||||
|
# NOTE: for BUILD_PROFILE==intel, this function does NOT automatically use the Intel python package index.
|
||||||
|
# you may want to add the following line to a requirements-intel.txt if you use one:
|
||||||
|
#
|
||||||
|
# --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
||||||
|
#
|
||||||
|
# If you need to add extra flags into the pip install command you can do so by setting the variable EXTRA_PIP_INSTALL_FLAGS
|
||||||
|
# before calling installRequirements. For example:
|
||||||
|
#
|
||||||
|
# source $(dirname $0)/../common/libbackend.sh
|
||||||
|
# EXTRA_PIP_INSTALL_FLAGS="--no-build-isolation"
|
||||||
|
# installRequirements
|
||||||
|
function installRequirements() {
|
||||||
|
ensureVenv
|
||||||
|
|
||||||
|
# These are the requirements files we will attempt to install, in order
|
||||||
|
declare -a requirementFiles=(
|
||||||
|
"${MY_DIR}/requirements-install.txt"
|
||||||
|
"${MY_DIR}/requirements.txt"
|
||||||
|
"${MY_DIR}/requirements-${BUILD_TYPE}.txt"
|
||||||
|
)
|
||||||
|
|
||||||
|
if [ "x${BUILD_TYPE}" != "x${BUILD_PROFILE}" ]; then
|
||||||
|
requirementFiles+=("${MY_DIR}/requirements-${BUILD_PROFILE}.txt")
|
||||||
|
fi
|
||||||
|
|
||||||
|
for reqFile in ${requirementFiles[@]}; do
|
||||||
|
if [ -f ${reqFile} ]; then
|
||||||
|
echo "starting requirements install for ${reqFile}"
|
||||||
|
uv pip install ${EXTRA_PIP_INSTALL_FLAGS} --requirement ${reqFile}
|
||||||
|
echo "finished requirements install for ${reqFile}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# startBackend discovers and runs the backend GRPC server
|
||||||
|
#
|
||||||
|
# You can specify a specific backend file to execute by setting BACKEND_FILE before calling startBackend.
|
||||||
|
# example:
|
||||||
|
#
|
||||||
|
# source ../common/libbackend.sh
|
||||||
|
# BACKEND_FILE="${MY_DIR}/source/backend.py"
|
||||||
|
# startBackend $@
|
||||||
|
#
|
||||||
|
# valid filenames for autodiscovered backend servers are:
|
||||||
|
# - server.py
|
||||||
|
# - backend.py
|
||||||
|
# - ${BACKEND_NAME}.py
|
||||||
|
function startBackend() {
|
||||||
|
ensureVenv
|
||||||
|
|
||||||
|
if [ ! -z ${BACKEND_FILE} ]; then
|
||||||
|
python ${BACKEND_FILE} $@
|
||||||
|
elif [ -e "${MY_DIR}/server.py" ]; then
|
||||||
|
python ${MY_DIR}/server.py $@
|
||||||
|
elif [ -e "${MY_DIR}/backend.py" ]; then
|
||||||
|
python ${MY_DIR}/backend.py $@
|
||||||
|
elif [ -e "${MY_DIR}/${BACKEND_NAME}.py" ]; then
|
||||||
|
python ${MY_DIR}/${BACKEND_NAME}.py $@
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# runUnittests discovers and runs python unittests
|
||||||
|
#
|
||||||
|
# You can specify a specific test file to use by setting TEST_FILE before calling runUnittests.
|
||||||
|
# example:
|
||||||
|
#
|
||||||
|
# source ../common/libbackend.sh
|
||||||
|
# TEST_FILE="${MY_DIR}/source/test.py"
|
||||||
|
# runUnittests $@
|
||||||
|
#
|
||||||
|
# be default a file named test.py in the backends directory will be used
|
||||||
|
function runUnittests() {
|
||||||
|
ensureVenv
|
||||||
|
|
||||||
|
if [ ! -z ${TEST_FILE} ]; then
|
||||||
|
testDir=$(dirname `realpath ${TEST_FILE}`)
|
||||||
|
testFile=$(basename ${TEST_FILE})
|
||||||
|
pushd ${testDir}
|
||||||
|
python -m unittest ${testFile}
|
||||||
|
popd
|
||||||
|
elif [ -f "${MY_DIR}/test.py" ]; then
|
||||||
|
pushd ${MY_DIR}
|
||||||
|
python -m unittest test.py
|
||||||
|
popd
|
||||||
|
else
|
||||||
|
echo "no tests defined for ${BACKEND_NAME}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
##################################################################################
|
||||||
|
# Below here are helper functions not intended to be used outside of the library #
|
||||||
|
##################################################################################
|
||||||
|
|
||||||
|
# checkTargets determines if the current BUILD_TYPE or BUILD_PROFILE is in a list of valid targets
|
||||||
|
function checkTargets() {
|
||||||
|
# Collect all provided targets into a variable and...
|
||||||
|
targets=$@
|
||||||
|
# ...convert it into an array
|
||||||
|
declare -a targets=($targets)
|
||||||
|
|
||||||
|
for target in ${targets[@]}; do
|
||||||
|
if [ "x${BUILD_TYPE}" == "x${target}" ]; then
|
||||||
|
echo true
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
if [ "x${BUILD_PROFILE}" == "x${target}" ]; then
|
||||||
|
echo true
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo false
|
||||||
|
}
|
||||||
|
|
||||||
|
init
|
19
backend/python/common/template/Makefile
Normal file
19
backend/python/common/template/Makefile
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
.DEFAULT_GOAL := install
|
||||||
|
|
||||||
|
.PHONY: install
|
||||||
|
install: protogen
|
||||||
|
bash install.sh
|
||||||
|
|
||||||
|
.PHONY: protogen
|
||||||
|
protogen: backend_pb2_grpc.py backend_pb2.py
|
||||||
|
|
||||||
|
.PHONY: protogen-clean
|
||||||
|
protogen-clean:
|
||||||
|
$(RM) backend_pb2_grpc.py backend_pb2.py
|
||||||
|
|
||||||
|
backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto
|
||||||
|
|
||||||
|
.PHONY: clean
|
||||||
|
clean: protogen-clean
|
||||||
|
rm -rf venv __pycache__
|
4
backend/python/common/template/backend.py
Executable file
4
backend/python/common/template/backend.py
Executable file
|
@ -0,0 +1,4 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
import grpc
|
||||||
|
import backend_pb2
|
||||||
|
import backend_pb2_grpc
|
6
backend/python/common/template/install.sh
Executable file
6
backend/python/common/template/install.sh
Executable file
|
@ -0,0 +1,6 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
|
installRequirements
|
2
backend/python/common/template/requirements.txt
Normal file
2
backend/python/common/template/requirements.txt
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
grpcio==1.63.0
|
||||||
|
protobuf
|
4
backend/python/common/template/run.sh
Executable file
4
backend/python/common/template/run.sh
Executable file
|
@ -0,0 +1,4 @@
|
||||||
|
#!/bin/bash
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
|
startBackend $@
|
6
backend/python/common/template/test.sh
Executable file
6
backend/python/common/template/test.sh
Executable file
|
@ -0,0 +1,6 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
|
runUnittests
|
|
@ -26,4 +26,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
rm -rf venv
|
rm -rf venv __pycache__
|
|
@ -1,34 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
installRequirements
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the Intel image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
if [ -f "requirements-intel.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,10 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
##
|
startBackend $@
|
||||||
## A bash script wrapper that runs the coqui server
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/coqui_server.py $@
|
|
|
@ -18,7 +18,7 @@ class TestBackendServicer(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
This method sets up the gRPC service by starting the server
|
This method sets up the gRPC service by starting the server
|
||||||
"""
|
"""
|
||||||
self.service = subprocess.Popen(["python3", "coqui_server.py", "--addr", "localhost:50051"])
|
self.service = subprocess.Popen(["python3", "backend.py", "--addr", "localhost:50051"])
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -36,4 +36,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
rm -rf venv
|
rm -rf venv __pycache__
|
|
@ -1,34 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
installRequirements
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the Intel image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
if [ -f "requirements-intel.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
--index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
||||||
intel-extension-for-pytorch
|
intel-extension-for-pytorch
|
||||||
torchaudio
|
torchaudio
|
||||||
torchvision
|
torchvision
|
|
@ -1,10 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
##
|
startBackend $@
|
||||||
## A bash script wrapper that runs the GRPC backend
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/backend_diffusers.py $@
|
|
|
@ -18,7 +18,7 @@ class TestBackendServicer(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
This method sets up the gRPC service by starting the server
|
This method sets up the gRPC service by starting the server
|
||||||
"""
|
"""
|
||||||
self.service = subprocess.Popen(["python3", "backend_diffusers.py", "--addr", "localhost:50051"])
|
self.service = subprocess.Popen(["python3", "backend.py", "--addr", "localhost:50051"])
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -22,4 +22,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
$(RM) -r venv source
|
$(RM) -r venv source __pycache__
|
|
@ -14,9 +14,9 @@ import torch
|
||||||
import torch.nn.functional as F
|
import torch.nn.functional as F
|
||||||
from torch import version as torch_version
|
from torch import version as torch_version
|
||||||
|
|
||||||
from tokenizer import ExLlamaTokenizer
|
from source.tokenizer import ExLlamaTokenizer
|
||||||
from generator import ExLlamaGenerator
|
from source.generator import ExLlamaGenerator
|
||||||
from model import ExLlama, ExLlamaCache, ExLlamaConfig
|
from source.model import ExLlama, ExLlamaCache, ExLlamaConfig
|
||||||
|
|
||||||
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
|
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
|
||||||
|
|
|
@ -1,56 +0,0 @@
|
||||||
name: exllama
|
|
||||||
channels:
|
|
||||||
- defaults
|
|
||||||
dependencies:
|
|
||||||
- _libgcc_mutex=0.1=main
|
|
||||||
- _openmp_mutex=5.1=1_gnu
|
|
||||||
- bzip2=1.0.8=h7b6447c_0
|
|
||||||
- ca-certificates=2023.08.22=h06a4308_0
|
|
||||||
- ld_impl_linux-64=2.38=h1181459_1
|
|
||||||
- libffi=3.4.4=h6a678d5_0
|
|
||||||
- libgcc-ng=11.2.0=h1234567_1
|
|
||||||
- libgomp=11.2.0=h1234567_1
|
|
||||||
- libstdcxx-ng=11.2.0=h1234567_1
|
|
||||||
- libuuid=1.41.5=h5eee18b_0
|
|
||||||
- ncurses=6.4=h6a678d5_0
|
|
||||||
- openssl=3.0.11=h7f8727e_2
|
|
||||||
- pip=23.2.1=py311h06a4308_0
|
|
||||||
- python=3.11.5=h955ad1f_0
|
|
||||||
- readline=8.2=h5eee18b_0
|
|
||||||
- setuptools=68.0.0=py311h06a4308_0
|
|
||||||
- sqlite=3.41.2=h5eee18b_0
|
|
||||||
- tk=8.6.12=h1ccaba5_0
|
|
||||||
- tzdata=2023c=h04d1e81_0
|
|
||||||
- wheel=0.41.2=py311h06a4308_0
|
|
||||||
- xz=5.4.2=h5eee18b_0
|
|
||||||
- zlib=1.2.13=h5eee18b_0
|
|
||||||
- pip:
|
|
||||||
- filelock==3.12.4
|
|
||||||
- fsspec==2023.9.2
|
|
||||||
- grpcio==1.63.0
|
|
||||||
- jinja2==3.1.2
|
|
||||||
- markupsafe==2.1.3
|
|
||||||
- mpmath==1.3.0
|
|
||||||
- networkx==3.1
|
|
||||||
- ninja==1.11.1
|
|
||||||
- protobuf==4.24.4
|
|
||||||
- nvidia-cublas-cu12==12.1.3.1
|
|
||||||
- nvidia-cuda-cupti-cu12==12.1.105
|
|
||||||
- nvidia-cuda-nvrtc-cu12==12.1.105
|
|
||||||
- nvidia-cuda-runtime-cu12==12.1.105
|
|
||||||
- nvidia-cudnn-cu12==8.9.2.26
|
|
||||||
- nvidia-cufft-cu12==11.0.2.54
|
|
||||||
- nvidia-curand-cu12==10.3.2.106
|
|
||||||
- nvidia-cusolver-cu12==11.4.5.107
|
|
||||||
- nvidia-cusparse-cu12==12.1.0.106
|
|
||||||
- nvidia-nccl-cu12==2.18.1
|
|
||||||
- nvidia-nvjitlink-cu12==12.2.140
|
|
||||||
- nvidia-nvtx-cu12==12.1.105
|
|
||||||
- safetensors==0.3.2
|
|
||||||
- sentencepiece==0.1.99
|
|
||||||
- sympy==1.12
|
|
||||||
- torch==2.1.0
|
|
||||||
- triton==2.1.0
|
|
||||||
- typing-extensions==4.8.0
|
|
||||||
- numpy
|
|
||||||
prefix: /opt/conda/envs/exllama
|
|
|
@ -1,37 +1,13 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
LIMIT_TARGETS="cublas"
|
||||||
|
|
||||||
if [ "$BUILD_TYPE" != "cublas" ]; then
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
echo "[exllama] Attention!!! Nvidia GPU is required - skipping installation"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
installRequirements
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
git clone https://github.com/turboderp/exllama $MY_DIR/source
|
git clone https://github.com/turboderp/exllama $MY_DIR/source
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/source/requirements.txt
|
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/source/requirements.txt
|
||||||
|
|
||||||
cp -rfv ./*py $MY_DIR/source/
|
cp -v ./*py $MY_DIR/source/
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,10 +1,7 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
LIMIT_TARGETS="cublas"
|
||||||
|
BACKEND_FILE="${MY_DIR}/source/backend.py"
|
||||||
|
|
||||||
##
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
## A bash script wrapper that runs the exllama server with uv
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
startBackend $@
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/source/exllama.py $@
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -20,4 +20,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
$(RM) -r venv source
|
$(RM) -r venv source __pycache__
|
|
@ -1,44 +1,16 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
##
|
|
||||||
## A bash script installs the required dependencies of VALL-E-X and prepares the environment
|
LIMIT_TARGETS="cublas"
|
||||||
|
EXTRA_PIP_INSTALL_FLAGS="--no-build-isolation"
|
||||||
EXLLAMA2_VERSION=c0ddebaaaf8ffd1b3529c2bb654e650bce2f790f
|
EXLLAMA2_VERSION=c0ddebaaaf8ffd1b3529c2bb654e650bce2f790f
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
if [ "$BUILD_TYPE" != "cublas" ]; then
|
installRequirements
|
||||||
echo "[exllama] Attention!!! Nvidia GPU is required - skipping installation"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
git clone https://github.com/turboderp/exllamav2 $MY_DIR/source
|
git clone https://github.com/turboderp/exllamav2 $MY_DIR/source
|
||||||
pushd ${MY_DIR}/source && git checkout -b build ${EXLLAMA2_VERSION} && popd
|
pushd ${MY_DIR}/source && git checkout -b build ${EXLLAMA2_VERSION} && popd
|
||||||
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/source/requirements.txt
|
|
||||||
# This installs exllamav2 in JIT mode so it will compile the appropriate torch extension at runtime
|
# This installs exllamav2 in JIT mode so it will compile the appropriate torch extension at runtime
|
||||||
EXLLAMA_NOCOMPILE= uv pip install ${BUILD_ISOLATION_FLAG} ${MY_DIR}/source/
|
EXLLAMA_NOCOMPILE= uv pip install ${EXTRA_PIP_INSTALL_FLAGS} ${MY_DIR}/source/
|
||||||
|
|
||||||
cp -rfv ./*py $MY_DIR/source/
|
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,10 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
LIMIT_TARGETS="cublas"
|
||||||
|
|
||||||
##
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
## A bash script wrapper that runs the exllama2 server
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
startBackend $@
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/source/exllama2_backend.py $@
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -26,4 +26,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
$(RM) -r venv
|
$(RM) -r venv __pycache__
|
|
@ -1,39 +1,9 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
if [ "$BUILD_TYPE" != "cublas" ]; then
|
LIMIT_TARGETS="cublas"
|
||||||
echo "[mamba] Attention!!! nvcc is required - skipping installation"
|
EXTRA_PIP_INSTALL_FLAGS="--no-build-isolation"
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
installRequirements
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the Intel image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
if [ -f "requirements-intel.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
|
@ -1,10 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
LIMIT_TARGETS="cublas"
|
||||||
|
|
||||||
##
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
## A bash script wrapper that runs the GRPC server
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
startBackend $@
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/backend_mamba.py $@
|
|
|
@ -20,7 +20,7 @@ class TestBackendServicer(unittest.TestCase):
|
||||||
This class contains methods to test the startup and shutdown of the gRPC service.
|
This class contains methods to test the startup and shutdown of the gRPC service.
|
||||||
"""
|
"""
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.service = subprocess.Popen(["python", "backend_mamba.py", "--addr", "localhost:50051"])
|
self.service = subprocess.Popen(["python", "backend.py", "--addr", "localhost:50051"])
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -40,4 +40,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
$(RM) -r venv
|
$(RM) -r venv __pycache__
|
|
@ -1,39 +1,11 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
installRequirements
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the Intel image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
if [ -f "requirements-intel.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# https://github.com/descriptinc/audiotools/issues/101
|
# https://github.com/descriptinc/audiotools/issues/101
|
||||||
# incompatible protobuf versions.
|
# incompatible protobuf versions.
|
||||||
PYDIR=$(ls $MY_DIR/venv/lib)
|
PYDIR=$(ls ${MY_DIR}/venv/lib)
|
||||||
curl -L https://raw.githubusercontent.com/protocolbuffers/protobuf/main/python/google/protobuf/internal/builder.py -o $MY_DIR/venv/lib/$PYDIR/site-packages/google/protobuf/internal/builder.py
|
curl -L https://raw.githubusercontent.com/protocolbuffers/protobuf/main/python/google/protobuf/internal/builder.py -o ${MY_DIR}/venv/lib/${PYDIR}/site-packages/google/protobuf/internal/builder.py
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,48 +0,0 @@
|
||||||
name: parler
|
|
||||||
channels:
|
|
||||||
- defaults
|
|
||||||
dependencies:
|
|
||||||
- _libgcc_mutex=0.1=main
|
|
||||||
- _openmp_mutex=5.1=1_gnu
|
|
||||||
- bzip2=1.0.8=h7b6447c_0
|
|
||||||
- ca-certificates=2023.08.22=h06a4308_0
|
|
||||||
- ld_impl_linux-64=2.38=h1181459_1
|
|
||||||
- libffi=3.4.4=h6a678d5_0
|
|
||||||
- libgcc-ng=11.2.0=h1234567_1
|
|
||||||
- libgomp=11.2.0=h1234567_1
|
|
||||||
- libstdcxx-ng=11.2.0=h1234567_1
|
|
||||||
- libuuid=1.41.5=h5eee18b_0
|
|
||||||
- ncurses=6.4=h6a678d5_0
|
|
||||||
- openssl=3.0.11=h7f8727e_2
|
|
||||||
- pip=23.2.1=py311h06a4308_0
|
|
||||||
- python=3.11.5=h955ad1f_0
|
|
||||||
- readline=8.2=h5eee18b_0
|
|
||||||
- setuptools=68.0.0=py311h06a4308_0
|
|
||||||
- sqlite=3.41.2=h5eee18b_0
|
|
||||||
- tk=8.6.12=h1ccaba5_0
|
|
||||||
- tzdata=2023c=h04d1e81_0
|
|
||||||
- wheel=0.41.2=py311h06a4308_0
|
|
||||||
- xz=5.4.2=h5eee18b_0
|
|
||||||
- zlib=1.2.13=h5eee18b_0
|
|
||||||
- pip:
|
|
||||||
- accelerate>=0.11.0
|
|
||||||
- grpcio==1.63.0
|
|
||||||
- numpy==1.26.0
|
|
||||||
- nvidia-cublas-cu12==12.1.3.1
|
|
||||||
- nvidia-cuda-cupti-cu12==12.1.105
|
|
||||||
- nvidia-cuda-nvrtc-cu12==12.1.105
|
|
||||||
- nvidia-cuda-runtime-cu12==12.1.105
|
|
||||||
- nvidia-cudnn-cu12==8.9.2.26
|
|
||||||
- nvidia-cufft-cu12==11.0.2.54
|
|
||||||
- nvidia-curand-cu12==10.3.2.106
|
|
||||||
- nvidia-cusolver-cu12==11.4.5.107
|
|
||||||
- nvidia-cusparse-cu12==12.1.0.106
|
|
||||||
- nvidia-nccl-cu12==2.18.1
|
|
||||||
- nvidia-nvjitlink-cu12==12.2.140
|
|
||||||
- nvidia-nvtx-cu12==12.1.105
|
|
||||||
- torch==2.1.0
|
|
||||||
- transformers>=4.34.0
|
|
||||||
- descript-audio-codec
|
|
||||||
- sentencepiece
|
|
||||||
- git+https://github.com/huggingface/parler-tts.git@10016fb0300c0dc31a0fb70e26f3affee7b62f16
|
|
||||||
prefix: /opt/conda/envs/diffusers
|
|
|
@ -1,36 +0,0 @@
|
||||||
name: parler
|
|
||||||
channels:
|
|
||||||
- defaults
|
|
||||||
dependencies:
|
|
||||||
- _libgcc_mutex=0.1=main
|
|
||||||
- _openmp_mutex=5.1=1_gnu
|
|
||||||
- bzip2=1.0.8=h7b6447c_0
|
|
||||||
- ca-certificates=2023.08.22=h06a4308_0
|
|
||||||
- ld_impl_linux-64=2.38=h1181459_1
|
|
||||||
- libffi=3.4.4=h6a678d5_0
|
|
||||||
- libgcc-ng=11.2.0=h1234567_1
|
|
||||||
- libgomp=11.2.0=h1234567_1
|
|
||||||
- libstdcxx-ng=11.2.0=h1234567_1
|
|
||||||
- libuuid=1.41.5=h5eee18b_0
|
|
||||||
- ncurses=6.4=h6a678d5_0
|
|
||||||
- openssl=3.0.11=h7f8727e_2
|
|
||||||
- pip=23.2.1=py311h06a4308_0
|
|
||||||
- python=3.11.5=h955ad1f_0
|
|
||||||
- readline=8.2=h5eee18b_0
|
|
||||||
- setuptools=68.0.0=py311h06a4308_0
|
|
||||||
- sqlite=3.41.2=h5eee18b_0
|
|
||||||
- tk=8.6.12=h1ccaba5_0
|
|
||||||
- tzdata=2023c=h04d1e81_0
|
|
||||||
- wheel=0.41.2=py311h06a4308_0
|
|
||||||
- xz=5.4.2=h5eee18b_0
|
|
||||||
- zlib=1.2.13=h5eee18b_0
|
|
||||||
- pip:
|
|
||||||
- accelerate>=0.11.0
|
|
||||||
- numpy==1.26.0
|
|
||||||
- grpcio==1.63.0
|
|
||||||
- torch==2.1.0
|
|
||||||
- transformers>=4.34.0
|
|
||||||
- descript-audio-codec
|
|
||||||
- sentencepiece
|
|
||||||
- git+https://github.com/huggingface/parler-tts.git@10016fb0300c0dc31a0fb70e26f3affee7b62f16
|
|
||||||
prefix: /opt/conda/envs/parler
|
|
|
@ -1,10 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
##
|
startBackend $@
|
||||||
## A bash script wrapper that runs the GRPC backend
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/parler_tts_server.py $@
|
|
|
@ -18,7 +18,7 @@ class TestBackendServicer(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
This method sets up the gRPC service by starting the server
|
This method sets up the gRPC service by starting the server
|
||||||
"""
|
"""
|
||||||
self.service = subprocess.Popen(["python3", "parler_tts_server.py", "--addr", "localhost:50051"])
|
self.service = subprocess.Popen(["python3", "backend.py", "--addr", "localhost:50051"])
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -28,4 +28,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
rm -rf venv
|
rm -rf venv __pycache__
|
|
@ -1,34 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
installRequirements
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the Intel image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
if [ -f "requirements-intel.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,30 +0,0 @@
|
||||||
name: petals
|
|
||||||
channels:
|
|
||||||
- defaults
|
|
||||||
dependencies:
|
|
||||||
# - _libgcc_mutex=0.1=main
|
|
||||||
# - _openmp_mutex=5.1=1_gnu
|
|
||||||
# - bzip2=1.0.8=h7b6447c_0
|
|
||||||
# - ca-certificates=2023.08.22=h06a4308_0
|
|
||||||
# - ld_impl_linux-64=2.38=h1181459_1
|
|
||||||
# - libffi=3.4.4=h6a678d5_0
|
|
||||||
# - libgcc-ng=11.2.0=h1234567_1
|
|
||||||
# - libgomp=11.2.0=h1234567_1
|
|
||||||
# - libstdcxx-ng=11.2.0=h1234567_1
|
|
||||||
# - libuuid=1.41.5=h5eee18b_0
|
|
||||||
# - ncurses=6.4=h6a678d5_0
|
|
||||||
# - openssl=3.0.11=h7f8727e_2
|
|
||||||
# - pip=23.2.1=py311h06a4308_0
|
|
||||||
- python=3.11.5=h955ad1f_0
|
|
||||||
# - readline=8.2=h5eee18b_0
|
|
||||||
# - setuptools=68.0.0=py311h06a4308_0
|
|
||||||
# - sqlite=3.41.2=h5eee18b_0
|
|
||||||
# - tk=8.6.12=h1ccaba5_0
|
|
||||||
# - tzdata=2023c=h04d1e81_0
|
|
||||||
# - wheel=0.41.2=py311h06a4308_0
|
|
||||||
# - xz=5.4.2=h5eee18b_0
|
|
||||||
# - zlib=1.2.13=h5eee18b_0
|
|
||||||
- pip:
|
|
||||||
- torch==2.1.0
|
|
||||||
- git+https://github.com/bigscience-workshop/petals
|
|
||||||
prefix: /opt/conda/envs/petals
|
|
|
@ -1,10 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
##
|
startBackend $@
|
||||||
## A bash script wrapper that runs the GRPC backend
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/backend_petals.py $@
|
|
|
@ -20,7 +20,7 @@ class TestBackendServicer(unittest.TestCase):
|
||||||
This class contains methods to test the startup and shutdown of the gRPC service.
|
This class contains methods to test the startup and shutdown of the gRPC service.
|
||||||
"""
|
"""
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.service = subprocess.Popen(["python", "backend_petals.py", "--addr", "localhost:50051"])
|
self.service = subprocess.Popen(["python", "backend.py", "--addr", "localhost:50051"])
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -27,4 +27,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
rm -rf venv
|
rm -rf venv __pycache__
|
|
@ -1,34 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
installRequirements
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the Intel image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
if [ -f "requirements-intel.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,10 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
##
|
startBackend $@
|
||||||
## A bash script wrapper that runs the GRPC backend
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/reranker.py $@
|
|
|
@ -18,7 +18,7 @@ class TestBackendServicer(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
This method sets up the gRPC service by starting the server
|
This method sets up the gRPC service by starting the server
|
||||||
"""
|
"""
|
||||||
self.service = subprocess.Popen(["python3", "reranker.py", "--addr", "localhost:50051"])
|
self.service = subprocess.Popen(["python3", "backend.py", "--addr", "localhost:50051"])
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -28,4 +28,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
rm -rf venv
|
rm -rf venv __pycache__
|
|
@ -1,34 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
installRequirements
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the Intel image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
if [ -f "requirements-intel.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,10 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
##
|
startBackend $@
|
||||||
## A bash script wrapper that runs the GRPC backend
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/sentencetransformers.py $@
|
|
|
@ -18,7 +18,7 @@ class TestBackendServicer(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
This method sets up the gRPC service by starting the server
|
This method sets up the gRPC service by starting the server
|
||||||
"""
|
"""
|
||||||
self.service = subprocess.Popen(["python3", "sentencetransformers.py", "--addr", "localhost:50051"])
|
self.service = subprocess.Popen(["python3", "backend.py", "--addr", "localhost:50051"])
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -26,4 +26,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
rm -rf venv
|
rm -rf venv __pycache__
|
|
@ -1,34 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
installRequirements
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the Intel image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
if [ -f "requirements-intel.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,10 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
##
|
startBackend $@
|
||||||
## A bash script wrapper that runs the GRPC backend
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/transformers_server.py $@
|
|
|
@ -18,7 +18,7 @@ class TestBackendServicer(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
This method sets up the gRPC service by starting the server
|
This method sets up the gRPC service by starting the server
|
||||||
"""
|
"""
|
||||||
self.service = subprocess.Popen(["python3", "transformers_server.py", "--addr", "localhost:50051"])
|
self.service = subprocess.Popen(["python3", "backend.py", "--addr", "localhost:50051"])
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -27,4 +27,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
rm -rf venv
|
rm -rf venv __pycache__
|
|
@ -1,34 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
installRequirements
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the Intel image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
if [ -f "requirements-intel.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,17 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
##
|
startBackend $@
|
||||||
## A bash script wrapper that runs the GRPC backend
|
|
||||||
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Assumes we are using the Intel oneAPI container image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
export XPU=1
|
|
||||||
fi
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
python $MY_DIR/transformers_server.py $@
|
|
|
@ -18,7 +18,7 @@ class TestBackendServicer(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
This method sets up the gRPC service by starting the server
|
This method sets up the gRPC service by starting the server
|
||||||
"""
|
"""
|
||||||
self.service = subprocess.Popen(["python3", "transformers_server.py", "--addr", "localhost:50051"])
|
self.service = subprocess.Popen(["python3", "backend.py", "--addr", "localhost:50051"])
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,16 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -30,4 +30,4 @@ backend_pb2_grpc.py backend_pb2.py:
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: protogen-clean
|
clean: protogen-clean
|
||||||
rm -rf source venv
|
rm -rf source venv __pycache__
|
|
@ -1,40 +1,14 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
|
|
||||||
BUILD_ISOLATION_FLAG=""
|
VALL_E_X_VERSION=3faaf8ccadb154d63b38070caf518ce9309ea0f4
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
uv venv ${MY_DIR}/venv
|
installRequirements
|
||||||
source ${MY_DIR}/venv/bin/activate
|
|
||||||
|
|
||||||
if [ -f "requirements-install.txt" ]; then
|
git clone https://github.com/Plachtaa/VALL-E-X.git ${MY_DIR}/source
|
||||||
# If we have a requirements-install.txt, it means that a package does not properly declare it's build time
|
pushd ${MY_DIR}/source && git checkout -b build ${VALL_E_X_VERSION} && popd
|
||||||
# dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install
|
|
||||||
# the package without build isolation
|
|
||||||
BUILD_ISOLATION_FLAG="--no-build-isolation"
|
|
||||||
uv pip install --requirement ${MY_DIR}/requirements-install.txt
|
|
||||||
fi
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt
|
|
||||||
|
|
||||||
if [ -f "requirements-${BUILD_TYPE}.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "/opt/intel" ]; then
|
|
||||||
# Intel GPU: If the directory exists, we assume we are using the Intel image
|
|
||||||
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
||||||
if [ -f "requirements-intel.txt" ]; then
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
git clone https://github.com/Plachtaa/VALL-E-X.git $MY_DIR/source
|
|
||||||
pushd $MY_DIR/source && git checkout -b build $VALL_E_X_VERSION && popd
|
|
||||||
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/source/requirements.txt
|
uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/source/requirements.txt
|
||||||
|
|
||||||
cp -rfv ./*py $MY_DIR/source/
|
cp -v ./*py $MY_DIR/source/
|
||||||
|
|
||||||
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
||||||
pip cache purge
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,10 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
BACKEND_FILE="${MY_DIR}/source/backend.py"
|
||||||
|
|
||||||
##
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
## A bash script wrapper that runs the GRPC backend
|
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
startBackend $@
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
|
||||||
|
|
||||||
pushd $MY_DIR/source && python ttsvalle.py $@
|
|
|
@ -18,7 +18,7 @@ class TestBackendServicer(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
This method sets up the gRPC service by starting the server
|
This method sets up the gRPC service by starting the server
|
||||||
"""
|
"""
|
||||||
self.service = subprocess.Popen(["python3", "ttsvalle.py", "--addr", "localhost:50051"])
|
self.service = subprocess.Popen(["python3", "backend.py", "--addr", "localhost:50051"])
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
|
|
|
@ -1,16 +1,7 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
##
|
set -e
|
||||||
## A bash script wrapper that runs python unittests
|
TEST_FILE="./source/test.py"
|
||||||
|
|
||||||
MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")"
|
source $(dirname $0)/../common/libbackend.sh
|
||||||
|
|
||||||
source $MY_DIR/venv/bin/activate
|
runUnittests
|
||||||
|
|
||||||
if [ -f "${MY_DIR}/test.py" ]; then
|
|
||||||
pushd ${MY_DIR}/source
|
|
||||||
python -m unittest test.py
|
|
||||||
popd
|
|
||||||
else
|
|
||||||
echo "ERROR: No tests defined for backend!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,101 +0,0 @@
|
||||||
name: ttsvalle
|
|
||||||
channels:
|
|
||||||
- defaults
|
|
||||||
dependencies:
|
|
||||||
- _libgcc_mutex=0.1=main
|
|
||||||
- _openmp_mutex=5.1=1_gnu
|
|
||||||
- bzip2=1.0.8=h7b6447c_0
|
|
||||||
- ca-certificates=2023.08.22=h06a4308_0
|
|
||||||
- ld_impl_linux-64=2.38=h1181459_1
|
|
||||||
- libffi=3.4.4=h6a678d5_0
|
|
||||||
- libgcc-ng=11.2.0=h1234567_1
|
|
||||||
- libgomp=11.2.0=h1234567_1
|
|
||||||
- libstdcxx-ng=11.2.0=h1234567_1
|
|
||||||
- libuuid=1.41.5=h5eee18b_0
|
|
||||||
- ncurses=6.4=h6a678d5_0
|
|
||||||
- openssl=3.0.11=h7f8727e_2
|
|
||||||
- pip=23.2.1=py310h06a4308_0
|
|
||||||
- python=3.10.13=h955ad1f_0
|
|
||||||
- readline=8.2=h5eee18b_0
|
|
||||||
- setuptools=68.0.0=py310h06a4308_0
|
|
||||||
- sqlite=3.41.2=h5eee18b_0
|
|
||||||
- tk=8.6.12=h1ccaba5_0
|
|
||||||
- tzdata=2023c=h04d1e81_0
|
|
||||||
- wheel=0.41.2=py310h06a4308_0
|
|
||||||
- xz=5.4.2=h5eee18b_0
|
|
||||||
- zlib=1.2.13=h5eee18b_0
|
|
||||||
- pip:
|
|
||||||
- aiofiles==23.2.1
|
|
||||||
- altair==5.1.2
|
|
||||||
- annotated-types==0.6.0
|
|
||||||
- anyio==3.7.1
|
|
||||||
- click==8.1.7
|
|
||||||
- cn2an==0.5.22
|
|
||||||
- cython==3.0.3
|
|
||||||
- einops==0.7.0
|
|
||||||
- encodec==0.1.1
|
|
||||||
- eng-to-ipa==0.0.2
|
|
||||||
- fastapi==0.103.2
|
|
||||||
- ffmpeg-python==0.2.0
|
|
||||||
- ffmpy==0.3.1
|
|
||||||
- fsspec==2023.9.2
|
|
||||||
- future==0.18.3
|
|
||||||
- gradio==3.47.1
|
|
||||||
- gradio-client==0.6.0
|
|
||||||
- grpcio==1.63.0
|
|
||||||
- h11==0.14.0
|
|
||||||
- httpcore==0.18.0
|
|
||||||
- httpx==0.25.0
|
|
||||||
- huggingface-hub==0.17.3
|
|
||||||
- importlib-resources==6.1.0
|
|
||||||
- inflect==7.0.0
|
|
||||||
- jieba==0.42.1
|
|
||||||
- langid==1.1.6
|
|
||||||
- llvmlite==0.41.0
|
|
||||||
- more-itertools==10.1.0
|
|
||||||
- nltk==3.8.1
|
|
||||||
- numba==0.58.0
|
|
||||||
- numpy==1.25.2
|
|
||||||
- nvidia-cublas-cu12==12.1.3.1
|
|
||||||
- nvidia-cuda-cupti-cu12==12.1.105
|
|
||||||
- nvidia-cuda-nvrtc-cu12==12.1.105
|
|
||||||
- nvidia-cuda-runtime-cu12==12.1.105
|
|
||||||
- nvidia-cudnn-cu12==8.9.2.26
|
|
||||||
- nvidia-cufft-cu12==11.0.2.54
|
|
||||||
- nvidia-curand-cu12==10.3.2.106
|
|
||||||
- nvidia-cusolver-cu12==11.4.5.107
|
|
||||||
- nvidia-cusparse-cu12==12.1.0.106
|
|
||||||
- nvidia-nccl-cu12==2.18.1
|
|
||||||
- nvidia-nvjitlink-cu12==12.2.140
|
|
||||||
- nvidia-nvtx-cu12==12.1.105
|
|
||||||
- openai-whisper==20230306
|
|
||||||
- orjson==3.9.7
|
|
||||||
- proces==0.1.7
|
|
||||||
- protobuf==4.24.4
|
|
||||||
- pydantic==2.4.2
|
|
||||||
- pydantic-core==2.10.1
|
|
||||||
- pydub==0.25.1
|
|
||||||
- pyopenjtalk-prebuilt==0.3.0
|
|
||||||
- pypinyin==0.49.0
|
|
||||||
- python-multipart==0.0.6
|
|
||||||
- regex==2023.10.3
|
|
||||||
- safetensors>=0.4.0
|
|
||||||
- semantic-version==2.10.0
|
|
||||||
- soundfile==0.12.1
|
|
||||||
- starlette==0.27.0
|
|
||||||
- sudachidict-core==20230927
|
|
||||||
- sudachipy==0.6.7
|
|
||||||
- tokenizers==0.14.1
|
|
||||||
- toolz==0.12.0
|
|
||||||
- torch==2.1.0
|
|
||||||
- torchaudio==2.1.0
|
|
||||||
- torchvision==0.16.0
|
|
||||||
- tqdm==4.66.1
|
|
||||||
- transformers==4.34.0
|
|
||||||
- triton==2.1.0
|
|
||||||
- unidecode==1.3.7
|
|
||||||
- uvicorn==0.23.2
|
|
||||||
- vocos==0.0.3
|
|
||||||
- websockets==11.0.3
|
|
||||||
- wget==3.2
|
|
||||||
prefix: /opt/conda/envs/ttsvalle
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue