From 7b1cab98df540e0d78f8125a0a3e556f0505c5b6 Mon Sep 17 00:00:00 2001 From: Shaun Date: Mon, 24 Jul 2023 21:37:29 -0400 Subject: [PATCH 1/3] Update README.md (fix repo url) --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 4d70d58..9fd6ad4 100644 --- a/README.md +++ b/README.md @@ -21,8 +21,8 @@ To set up and run this project, follow these steps: 1. Clone the repository and navigate to the project directory: ```bash -git clone https://github.com/peterw/Chat-with-Git-Repo.git -cd Chat-with-Git-Repo +git clone https://github.com/peterw/Chat-with-Github-Repo.git +cd Chat-with-Github-Repo ``` 2. Install the required packages with `pip`: From 4e595c6ef8287e32fbdd63e5de88611490220feb Mon Sep 17 00:00:00 2001 From: Shaun Livingston Date: Sat, 29 Jul 2023 07:49:25 -0600 Subject: [PATCH 2/3] add custom prompt --- src/utils/chat.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/utils/chat.py b/src/utils/chat.py index 9e22068..77f252c 100644 --- a/src/utils/chat.py +++ b/src/utils/chat.py @@ -67,6 +67,16 @@ def get_text(): input_text = st.text_input("", key="input") return input_text +def get_prompt(): + prompt_template = """Use the following pieces of context to answer the question at the end. + If you don't know the answer, just say that you don't know, don't try to make up an answer. + If the question isn't about the context, just say that your purpose is to provide information about the Repo. + + {context} + + Question: {question} + """ + return PromptTemplate(template=prompt_template, input_variables=["context", "question"]) def search_db(db, query): """Search for a response to the query in the DeepLake database.""" @@ -80,7 +90,8 @@ def search_db(db, query): # Create a ChatOpenAI model instance model = ChatOpenAI(model="gpt-3.5-turbo") # Create a RetrievalQA instance from the model and retriever - qa = RetrievalQA.from_llm(model, retriever=retriever) + chain_type_kwargs = {"prompt": get_prompt()} + qa = RetrievalQA.from_chain_type(model, retriever=retriever, chain_type_kwargs=chain_type_kwargs) # Return the result of the query return qa.run(query) From 26d82dfa583ed4d787d6be8e3cc08e2794438676 Mon Sep 17 00:00:00 2001 From: Shaun Livingston Date: Sat, 29 Jul 2023 07:50:38 -0600 Subject: [PATCH 3/3] fix typo --- src/utils/chat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/utils/chat.py b/src/utils/chat.py index 77f252c..303e957 100644 --- a/src/utils/chat.py +++ b/src/utils/chat.py @@ -29,7 +29,7 @@ def run_chat_app(activeloop_dataset_path): # Initialize the session state for generated responses and past inputs if "generated" not in st.session_state: - st.session_state["generated"] = ["i am ready to help you ser"] + st.session_state["generated"] = ["i am ready to help you sir"] if "past" not in st.session_state: st.session_state["past"] = ["hello"]