diff --git a/examples/query_data/query.py b/examples/query_data/query.py index e3dcde2d..40375960 100644 --- a/examples/query_data/query.py +++ b/examples/query_data/query.py @@ -15,7 +15,7 @@ llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="gpt-3.5-turbo # Configure prompt parameters and initialise helper max_input_size = 500 num_output = 256 -max_chunk_overlap = 20 +max_chunk_overlap = 0.2 prompt_helper = PromptHelper(max_input_size, num_output, max_chunk_overlap) diff --git a/examples/query_data/store.py b/examples/query_data/store.py index 0d628c81..9aec6217 100644 --- a/examples/query_data/store.py +++ b/examples/query_data/store.py @@ -15,7 +15,7 @@ llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="gpt-3.5-turbo # Configure prompt parameters and initialise helper max_input_size = 400 num_output = 400 -max_chunk_overlap = 30 +max_chunk_overlap = 0.3 prompt_helper = PromptHelper(max_input_size, num_output, max_chunk_overlap)