From e7981152b250d8cf863106cdae8cb2c2d0e062ea Mon Sep 17 00:00:00 2001 From: quoing Date: Mon, 4 Sep 2023 19:12:53 +0200 Subject: [PATCH] [query_data example] max_chunk_overlap in PromptHelper must be in 0..1 range (#1000) **Description** Simple fix, percentage value is expected to be float in range 0..1 **Notes for Reviewers** **[Signed commits](../CONTRIBUTING.md#signing-off-on-commits-developer-certificate-of-origin)** - [x] Yes, I signed my commits. --- examples/query_data/query.py | 2 +- examples/query_data/store.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/query_data/query.py b/examples/query_data/query.py index e3dcde2d..40375960 100644 --- a/examples/query_data/query.py +++ b/examples/query_data/query.py @@ -15,7 +15,7 @@ llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="gpt-3.5-turbo # Configure prompt parameters and initialise helper max_input_size = 500 num_output = 256 -max_chunk_overlap = 20 +max_chunk_overlap = 0.2 prompt_helper = PromptHelper(max_input_size, num_output, max_chunk_overlap) diff --git a/examples/query_data/store.py b/examples/query_data/store.py index 0d628c81..9aec6217 100644 --- a/examples/query_data/store.py +++ b/examples/query_data/store.py @@ -15,7 +15,7 @@ llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="gpt-3.5-turbo # Configure prompt parameters and initialise helper max_input_size = 400 num_output = 400 -max_chunk_overlap = 30 +max_chunk_overlap = 0.3 prompt_helper = PromptHelper(max_input_size, num_output, max_chunk_overlap)