Spaces:
Runtime error
Runtime error
Update kb2.py
Browse files
kb2.py
CHANGED
|
@@ -18,15 +18,22 @@ RATE_LIMIT = 3
|
|
| 18 |
#@limits(calls=RATE_LIMIT, period=1)
|
| 19 |
def create_service_context():
|
| 20 |
|
| 21 |
-
# Constraint parameters
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
max_input_size = 4096
|
| 23 |
num_outputs = 512
|
| 24 |
max_chunk_overlap = 20
|
| 25 |
-
chunk_size_limit = 600
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
|
| 29 |
|
|
|
|
| 30 |
# LLMPredictor is a wrapper class around LangChain's LLMChain that allows easy integration into LlamaIndex
|
| 31 |
llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.5, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
|
| 32 |
|
|
|
|
| 18 |
#@limits(calls=RATE_LIMIT, period=1)
|
| 19 |
def create_service_context():
|
| 20 |
|
| 21 |
+
# Constraint parameters ORIGINAL
|
| 22 |
+
# max_input_size = 4096
|
| 23 |
+
# num_outputs = 512
|
| 24 |
+
# max_chunk_overlap = 20
|
| 25 |
+
# chunk_size_limit = 600
|
| 26 |
+
|
| 27 |
+
# Allows the user to explicitly set certain constraint parameters
|
| 28 |
+
# prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
|
| 29 |
max_input_size = 4096
|
| 30 |
num_outputs = 512
|
| 31 |
max_chunk_overlap = 20
|
| 32 |
+
chunk_size_limit = 600
|
| 33 |
+
prompt_helper = PromptHelper(max_input_size, num_outputs, chunk_overlap_ratio= 0.1, chunk_size_limit=chunk_size_limit)
|
| 34 |
+
# llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-4", max_tokens=num_outputs))
|
|
|
|
| 35 |
|
| 36 |
+
|
| 37 |
# LLMPredictor is a wrapper class around LangChain's LLMChain that allows easy integration into LlamaIndex
|
| 38 |
llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.5, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
|
| 39 |
|