From 219751bb21f63fa56c05fad5856af69a9c8f30b5 Mon Sep 17 00:00:00 2001 From: Ettore Di Giacinto Date: Tue, 8 Aug 2023 01:27:38 +0200 Subject: [PATCH] fix: cut prompt from AutoGPTQ answers Signed-off-by: Ettore Di Giacinto --- extra/grpc/autogptq/autogptq.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/extra/grpc/autogptq/autogptq.py b/extra/grpc/autogptq/autogptq.py index bcc4e14d..7d8a45fc 100755 --- a/extra/grpc/autogptq/autogptq.py +++ b/extra/grpc/autogptq/autogptq.py @@ -61,7 +61,12 @@ class BackendServicer(backend_pb2_grpc.BackendServicer): top_p=top_p, repetition_penalty=penalty, ) - return backend_pb2.Result(message=bytes(pipeline(request.Prompt)[0]["generated_text"], encoding='utf-8')) + t = pipeline(request.Prompt)[0]["generated_text"] + # Remove prompt from response if present + if request.Prompt in t: + t = t.replace(request.Prompt, "") + + return backend_pb2.Result(message=bytes(t, encoding='utf-8')) def PredictStream(self, request, context): # Implement PredictStream RPC