Better prompts

This commit is contained in:
Romain Quinet 2023-10-06 23:45:47 +02:00
parent 3d734a3064
commit be2c064d40

59
main.py
View File

@ -3,8 +3,7 @@ from llama_index import (SimpleDirectoryReader, ServiceContext, StorageContext,
from llama_index.node_parser import SimpleNodeParser from llama_index.node_parser import SimpleNodeParser
from llama_index import VectorStoreIndex from llama_index import VectorStoreIndex
from llama_index.llms import OpenAI, ChatMessage, MessageRole from llama_index.llms import OpenAI, ChatMessage, MessageRole
from llama_index.prompts import PromptTemplate from llama_index.prompts import ChatPromptTemplate
from llama_index.chat_engine.condense_question import CondenseQuestionChatEngine
import os import os
import re import re
@ -39,23 +38,61 @@ else:
index = load_index_from_storage(storage_context) index = load_index_from_storage(storage_context)
custom_prompt = PromptTemplate( custom_prompt = PromptTemplate(
"You have been trained on the Darknet Diaries podcast transcripts with data from october 6 2023."
"You are now an expert about it and will answer as such. You know about every episode up to number 138. \n"
"----------------\n" "----------------\n"
"Chat history: {chat_history}\n" "Chat history: {chat_history}\n"
"----------------\n" "----------------\n"
"Please answer this question by referring to the podcast: {question}" "Please answer this question by referring to the podcast: {question}"
) )
custom_chat_history = [] chat_text_qa_msgs = [
query_engine = index.as_query_engine() ChatMessage(
chat_engine = CondenseQuestionChatEngine.from_defaults( role=MessageRole.SYSTEM,
query_engine=query_engine, content=(
condense_question_prompt=custom_prompt, "You have been trained on the Darknet Diaries podcast transcripts with data from october 6 2023."
chat_history=custom_chat_history, "You are an expert about it and will answer as such. You know about every episode up to number 138."
verbose=True "Always answer the question, even if the context isn't helpful."
) )
),
ChatMessage(
role=MessageRole.USER,
content=(
"Context information is below.\n"
"---------------------\n"
"{context_str}\n"
"---------------------\n"
"Given the context information and not prior knowledge,"
"answer the question: {query_str}\n"
)
)
]
text_qa_template = ChatPromptTemplate(chat_text_qa_msgs)
chat_refine_msgs = [
ChatMessage(
role=MessageRole.SYSTEM,
content="Always answer the question, even if the context isn't helpful.",
),
ChatMessage(
role=MessageRole.USER,
content=(
"We have the opportunity to refine the original answer "
"(only if needed) with some more context below.\n"
"------------\n"
"{context_msg}\n"
"------------\n"
"Given the new context, refine the original answer to better "
"answer the question: {query_str}. "
"If the context isn't useful, output the original answer again.\n"
"Original Answer: {existing_answer}"
),
),
]
refine_template = ChatPromptTemplate(chat_refine_msgs)
chat_engine = index.as_chat_engine(
text_qa_template=text_qa_template,
refine_template=refine_template
)
while True: while True:
try: try: