From be2c064d4032cb0d6afe9d2b7fe4965f34e8260e Mon Sep 17 00:00:00 2001 From: Romain Quinet Date: Fri, 6 Oct 2023 23:45:47 +0200 Subject: [PATCH] Better prompts --- main.py | 61 +++++++++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 49 insertions(+), 12 deletions(-) diff --git a/main.py b/main.py index dfc8c39..d77bea7 100644 --- a/main.py +++ b/main.py @@ -3,8 +3,7 @@ from llama_index import (SimpleDirectoryReader, ServiceContext, StorageContext, from llama_index.node_parser import SimpleNodeParser from llama_index import VectorStoreIndex from llama_index.llms import OpenAI, ChatMessage, MessageRole -from llama_index.prompts import PromptTemplate -from llama_index.chat_engine.condense_question import CondenseQuestionChatEngine +from llama_index.prompts import ChatPromptTemplate import os import re @@ -39,23 +38,61 @@ else: index = load_index_from_storage(storage_context) custom_prompt = PromptTemplate( - "You have been trained on the Darknet Diaries podcast transcripts with data from october 6 2023." - "You are now an expert about it and will answer as such. You know about every episode up to number 138. \n" "----------------\n" "Chat history: {chat_history}\n" "----------------\n" "Please answer this question by referring to the podcast: {question}" ) -custom_chat_history = [] -query_engine = index.as_query_engine() -chat_engine = CondenseQuestionChatEngine.from_defaults( - query_engine=query_engine, - condense_question_prompt=custom_prompt, - chat_history=custom_chat_history, - verbose=True -) +chat_text_qa_msgs = [ + ChatMessage( + role=MessageRole.SYSTEM, + content=( + "You have been trained on the Darknet Diaries podcast transcripts with data from october 6 2023." + "You are an expert about it and will answer as such. You know about every episode up to number 138." + "Always answer the question, even if the context isn't helpful." + ) + ), + ChatMessage( + role=MessageRole.USER, + content=( + "Context information is below.\n" + "---------------------\n" + "{context_str}\n" + "---------------------\n" + "Given the context information and not prior knowledge," + "answer the question: {query_str}\n" + ) + ) +] +text_qa_template = ChatPromptTemplate(chat_text_qa_msgs) +chat_refine_msgs = [ + ChatMessage( + role=MessageRole.SYSTEM, + content="Always answer the question, even if the context isn't helpful.", + ), + ChatMessage( + role=MessageRole.USER, + content=( + "We have the opportunity to refine the original answer " + "(only if needed) with some more context below.\n" + "------------\n" + "{context_msg}\n" + "------------\n" + "Given the new context, refine the original answer to better " + "answer the question: {query_str}. " + "If the context isn't useful, output the original answer again.\n" + "Original Answer: {existing_answer}" + ), + ), +] +refine_template = ChatPromptTemplate(chat_refine_msgs) + +chat_engine = index.as_chat_engine( + text_qa_template=text_qa_template, + refine_template=refine_template +) while True: try: