Update the /chat route in app.py to accept a template_id parameter in the request JSON:
@app.route('/chat', methods=['POST'])
def chat():
user_input = request.json['input']
template_id = request.json.get('template_id', None)
if template_id:
response = conversation.predict(input=user_input, template_id=template_id)
else:
response = conversation.predict(input=user_input)
return jsonify({"response": response})
This modification checks if template_id is present in the request JSON and passes it to the conversation.predict method if it exists.
Modify the ConversationChain class in the langchain library to accept a template_id parameter in the predict method. Then, use this parameter to apply the selected template's context before sending the input to the LLM.
def predict(self, input: str, template_id: Optional[str] = None) -> str:
if template_id:
# Load the template by its ID and add its context to the input
template = self.get_template_by_id(template_id)
input_with_context = f"{template['context']} {input}"
else:
input_with_context = input
# Send the input with context (if any) to the LLM
response = self.llm.generate(input_with_context)
return response
def get_template_by_id(self, template_id: str) -> Dict[str, str]:
# Load templates from a file or a function that returns the list of templates
templates = self.load_templates()
for template in templates:
if template['id'] == template_id:
return template
raise ValueError(f"Template with ID '{template_id}' not found.")
def load_templates(self) -> List[Dict[str, str]]:
# Load templates from a file or a function that returns the list of templates
# Example: loading from a JSON file
with open('prompt_templates.json', 'r') as f:
templates = json.load(f)
return templates
Update the JavaScript code in script.js to include the template_id in the POST request to the /chat endpoint:
form.addEventListener("submit", async (e) => {
e.preventDefault();
const input = userInput.value;
const selectedTemplateId = templateSelector.value;
const response = await fetch("/chat", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ input, template_id: selectedTemplateId }),
});
const data = await response.json();
responseContainer.textContent = data.response;
});
With these changes, the selected template's context will be applied to the user input before it's sent to the LLM. Make sure to adjust the file paths and template loading process according to your project's structure.