diff --git a/backend/python/common-env/transformers/transformers.yml b/backend/python/common-env/transformers/transformers.yml index c4635d81..4ffb13db 100644 --- a/backend/python/common-env/transformers/transformers.yml +++ b/backend/python/common-env/transformers/transformers.yml @@ -1,4 +1,4 @@ -name: bark +name: transformers channels: - defaults dependencies: @@ -34,10 +34,8 @@ dependencies: - botocore==1.31.61 - certifi==2023.7.22 - charset-normalizer==3.3.0 - - compel==2.0.2 - datasets==2.14.5 - dill==0.3.7 - - diffusers==0.24.0 - einops==0.7.0 - encodec==0.1.1 - filelock==3.12.4 @@ -45,7 +43,7 @@ dependencies: - fsspec==2023.6.0 - funcy==2.0 - grpcio==1.59.0 - - huggingface-hub>=0.19.4 + - huggingface-hub==0.16.4 - idna==3.4 - jinja2==3.1.2 - jmespath==1.0.1 @@ -70,6 +68,7 @@ dependencies: - packaging==23.2 - pandas==2.1.1 - peft==0.5.0 + - git+https://github.com/bigscience-workshop/petals - protobuf==4.24.4 - psutil==5.9.5 - pyarrow==13.0.0 @@ -80,7 +79,7 @@ dependencies: - requests==2.31.0 - rouge==1.0.1 - s3transfer==0.7.0 - - safetensors==0.4.0 + - safetensors==0.3.3 - scipy==1.11.3 - six==1.16.0 - sympy==1.12 @@ -95,4 +94,4 @@ dependencies: - urllib3==1.26.17 - xxhash==3.4.1 - yarl==1.9.2 -prefix: /opt/conda/envs/bark +prefix: /opt/conda/envs/transformers diff --git a/backend/python/diffusers/Makefile b/backend/python/diffusers/Makefile index 8e7aabba..f3f9d4e2 100644 --- a/backend/python/diffusers/Makefile +++ b/backend/python/diffusers/Makefile @@ -1,6 +1,8 @@ .PHONY: diffusers diffusers: - $(MAKE) -C ../common-env/transformers + @echo "Creating virtual environment..." + @conda env create --name diffusers --file diffusers.yml + @echo "Virtual environment created." .PHONY: run run: diff --git a/backend/python/diffusers/diffusers.yml b/backend/python/diffusers/diffusers.yml new file mode 100644 index 00000000..0708dbda --- /dev/null +++ b/backend/python/diffusers/diffusers.yml @@ -0,0 +1,73 @@ +name: diffusers +channels: + - defaults +dependencies: + - _libgcc_mutex=0.1=main + - _openmp_mutex=5.1=1_gnu + - bzip2=1.0.8=h7b6447c_0 + - ca-certificates=2023.08.22=h06a4308_0 + - ld_impl_linux-64=2.38=h1181459_1 + - libffi=3.4.4=h6a678d5_0 + - libgcc-ng=11.2.0=h1234567_1 + - libgomp=11.2.0=h1234567_1 + - libstdcxx-ng=11.2.0=h1234567_1 + - libuuid=1.41.5=h5eee18b_0 + - ncurses=6.4=h6a678d5_0 + - openssl=3.0.11=h7f8727e_2 + - pip=23.2.1=py311h06a4308_0 + - python=3.11.5=h955ad1f_0 + - readline=8.2=h5eee18b_0 + - setuptools=68.0.0=py311h06a4308_0 + - sqlite=3.41.2=h5eee18b_0 + - tk=8.6.12=h1ccaba5_0 + - tzdata=2023c=h04d1e81_0 + - wheel=0.41.2=py311h06a4308_0 + - xz=5.4.2=h5eee18b_0 + - zlib=1.2.13=h5eee18b_0 + - pip: + - accelerate>=0.11.0 + - certifi==2023.7.22 + - charset-normalizer==3.3.0 + - compel==2.0.2 + - diffusers==0.24.0 + - filelock==3.12.4 + - fsspec==2023.9.2 + - grpcio==1.59.0 + - huggingface-hub>=0.19.4 + - idna==3.4 + - importlib-metadata==6.8.0 + - jinja2==3.1.2 + - markupsafe==2.1.3 + - mpmath==1.3.0 + - networkx==3.1 + - numpy==1.26.0 + - nvidia-cublas-cu12==12.1.3.1 + - nvidia-cuda-cupti-cu12==12.1.105 + - nvidia-cuda-nvrtc-cu12==12.1.105 + - nvidia-cuda-runtime-cu12==12.1.105 + - nvidia-cudnn-cu12==8.9.2.26 + - nvidia-cufft-cu12==11.0.2.54 + - nvidia-curand-cu12==10.3.2.106 + - nvidia-cusolver-cu12==11.4.5.107 + - nvidia-cusparse-cu12==12.1.0.106 + - nvidia-nccl-cu12==2.18.1 + - nvidia-nvjitlink-cu12==12.2.140 + - nvidia-nvtx-cu12==12.1.105 + - packaging==23.2 + - pillow==10.0.1 + - protobuf==4.24.4 + - psutil==5.9.5 + - pyparsing==3.1.1 + - pyyaml==6.0.1 + - regex==2023.10.3 + - requests==2.31.0 + - safetensors==0.4.0 + - sympy==1.12 + - torch==2.1.0 + - tqdm==4.66.1 + - transformers>=4.25.1 + - triton==2.1.0 + - typing-extensions==4.8.0 + - urllib3==2.0.6 + - zipp==3.17.0 +prefix: /opt/conda/envs/diffusers \ No newline at end of file diff --git a/backend/python/diffusers/run.sh b/backend/python/diffusers/run.sh index 0ed1fb86..8e3e1bbf 100755 --- a/backend/python/diffusers/run.sh +++ b/backend/python/diffusers/run.sh @@ -6,7 +6,7 @@ export PATH=$PATH:/opt/conda/bin # Activate conda environment -source activate transformers +source activate diffusers # get the directory where the bash script is located DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" diff --git a/backend/python/diffusers/test.sh b/backend/python/diffusers/test.sh index 072fb85b..421a1921 100644 --- a/backend/python/diffusers/test.sh +++ b/backend/python/diffusers/test.sh @@ -6,7 +6,7 @@ export PATH=$PATH:/opt/conda/bin # Activate conda environment -source activate transformers +source activate diffusers # get the directory where the bash script is located DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" diff --git a/backend/python/petals/Makefile b/backend/python/petals/Makefile index 52ce15d2..bce64a90 100644 --- a/backend/python/petals/Makefile +++ b/backend/python/petals/Makefile @@ -1,11 +1,15 @@ .PHONY: petals petals: - @echo "Creating virtual environment..." - @conda env create --name petals --file petals.yml - @echo "Virtual environment created." + $(MAKE) -C ../common-env/transformers .PHONY: run run: @echo "Running petals..." bash run.sh @echo "petals run." + +.PHONY: test +test: + @echo "Testing petals..." + bash test.sh + @echo "petals tested." diff --git a/backend/python/petals/run.sh b/backend/python/petals/run.sh index 64a1a66f..b436ed1c 100755 --- a/backend/python/petals/run.sh +++ b/backend/python/petals/run.sh @@ -9,10 +9,10 @@ export PATH=$PATH:/opt/conda/bin # if source is available use it, or use conda # if [ -f /opt/conda/bin/activate ]; then - source activate petals + source activate transformers else eval "$(conda shell.bash hook)" - conda activate petals + conda activate transformers fi # get the directory where the bash script is located diff --git a/backend/python/petals/test.sh b/backend/python/petals/test.sh new file mode 100644 index 00000000..42d1a045 --- /dev/null +++ b/backend/python/petals/test.sh @@ -0,0 +1,11 @@ +#!/bin/bash +## +## A bash script wrapper that runs the transformers server with conda + +# Activate conda environment +source activate transformers + +# get the directory where the bash script is located +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" + +python -m unittest $DIR/test_petals.py \ No newline at end of file diff --git a/backend/python/petals/test_petals.py b/backend/python/petals/test_petals.py new file mode 100644 index 00000000..a0a800e1 --- /dev/null +++ b/backend/python/petals/test_petals.py @@ -0,0 +1,76 @@ +import unittest +import subprocess +import time +import backend_pb2 +import backend_pb2_grpc + +import grpc + +import unittest +import subprocess +import time +import grpc +import backend_pb2_grpc +import backend_pb2 + +class TestBackendServicer(unittest.TestCase): + """ + TestBackendServicer is the class that tests the gRPC service. + + This class contains methods to test the startup and shutdown of the gRPC service. + """ + def setUp(self): + self.service = subprocess.Popen(["python", "backend_petals.py", "--addr", "localhost:50051"]) + time.sleep(10) + + def tearDown(self) -> None: + self.service.terminate() + self.service.wait() + + def test_server_startup(self): + try: + self.setUp() + with grpc.insecure_channel("localhost:50051") as channel: + stub = backend_pb2_grpc.BackendStub(channel) + response = stub.Health(backend_pb2.HealthMessage()) + self.assertEqual(response.message, b'OK') + except Exception as err: + print(err) + self.fail("Server failed to start") + finally: + self.tearDown() + def test_load_model(self): + """ + This method tests if the model is loaded successfully + """ + try: + self.setUp() + with grpc.insecure_channel("localhost:50051") as channel: + stub = backend_pb2_grpc.BackendStub(channel) + response = stub.LoadModel(backend_pb2.ModelOptions(Model="petals-team/StableBeluga")) + self.assertTrue(response.success) + self.assertEqual(response.message, "Model loaded successfully") + except Exception as err: + print(err) + self.fail("LoadModel service failed") + finally: + self.tearDown() + + def test_text(self): + """ + This method tests if the embeddings are generated successfully + """ + try: + self.setUp() + with grpc.insecure_channel("localhost:50051") as channel: + stub = backend_pb2_grpc.BackendStub(channel) + response = stub.LoadModel(backend_pb2.ModelOptions(Model="petals-team/StableBeluga")) + self.assertTrue(response.success) + req = backend_pb2.PredictOptions(prompt="The capital of France is") + resp = stub.Predict(req) + self.assertIsNotNone(resp.message) + except Exception as err: + print(err) + self.fail("text service failed") + finally: + self.tearDown() \ No newline at end of file