Spaces:
Runtime error
Runtime error
Commit
·
30f7c43
1
Parent(s):
d969120
qa chain modifications
Browse files
StudybotAPI/backend/utils/chain_loader.py
CHANGED
|
@@ -21,7 +21,7 @@ async def llm_chain_loader(DATA_PATH: str):
|
|
| 21 |
prompt = f.read()
|
| 22 |
|
| 23 |
prompt = PromptTemplate(
|
| 24 |
-
template=prompt, input_variables=["context", "
|
| 25 |
)
|
| 26 |
|
| 27 |
llm = Clarifai(
|
|
@@ -32,24 +32,26 @@ async def llm_chain_loader(DATA_PATH: str):
|
|
| 32 |
model_version_id=config.MODEL_VERSION_ID,
|
| 33 |
)
|
| 34 |
|
| 35 |
-
|
| 36 |
-
# llm=llm,
|
| 37 |
-
# chain_type="stuff",
|
| 38 |
-
# retriever=db.as_retriever(search_type="similarity",search_kwargs={"k": 2}),
|
| 39 |
-
# return_source_documents=True,
|
| 40 |
-
# chain_type_kwargs={"prompt": prompt},
|
| 41 |
-
# )
|
| 42 |
-
|
| 43 |
-
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
| 44 |
-
qa_chain = ConversationalRetrievalChain.from_llm(
|
| 45 |
llm=llm,
|
| 46 |
chain_type="stuff",
|
| 47 |
retriever=db.as_retriever(
|
| 48 |
-
search_type="mmr", search_kwargs={"k": 2, "fetch_k": 4}
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
memory=memory,
|
| 53 |
)
|
| 54 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 55 |
app.state.qa_chain = qa_chain
|
|
|
|
| 21 |
prompt = f.read()
|
| 22 |
|
| 23 |
prompt = PromptTemplate(
|
| 24 |
+
template=prompt, input_variables=["context", "question"]
|
| 25 |
)
|
| 26 |
|
| 27 |
llm = Clarifai(
|
|
|
|
| 32 |
model_version_id=config.MODEL_VERSION_ID,
|
| 33 |
)
|
| 34 |
|
| 35 |
+
qa_chain = RetrievalQA.from_chain_type(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
llm=llm,
|
| 37 |
chain_type="stuff",
|
| 38 |
retriever=db.as_retriever(
|
| 39 |
+
search_type="mmr", search_kwargs={"k": 2, "fetch_k": 4}
|
| 40 |
+
),
|
| 41 |
+
return_source_documents=True,
|
| 42 |
+
chain_type_kwargs={"prompt": prompt},
|
|
|
|
| 43 |
)
|
| 44 |
|
| 45 |
+
# memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
| 46 |
+
# qa_chain = ConversationalRetrievalChain.from_llm(
|
| 47 |
+
# llm=llm,
|
| 48 |
+
# chain_type="stuff",
|
| 49 |
+
# retriever=db.as_retriever(
|
| 50 |
+
# search_type="mmr", search_kwargs={"k": 2, "fetch_k": 4}),
|
| 51 |
+
# # return_source_documents=True,
|
| 52 |
+
# # chain_type_kwargs={"prompt": prompt},
|
| 53 |
+
# condense_question_prompt=prompt,
|
| 54 |
+
# memory=memory,
|
| 55 |
+
# )
|
| 56 |
+
|
| 57 |
app.state.qa_chain = qa_chain
|
StudybotAPI/backend/utils/prompt.txt
CHANGED
|
@@ -9,10 +9,7 @@ The "SOURCES" part should be a reference to the source of the document from whic
|
|
| 9 |
Consider a student engaged in the study of any theoretical subject, where the abundance of concepts and events poses a challenge to memorization. The aim is to overcome this hurdle and be capable of providing brief answers to specific queries. For example, if a student forgets a key concept, date, or event, they can ask the bot a question like "What is [specific query]?" for a concise answer.
|
| 10 |
Note that students can also ask multiple questions in a single query. For example, "What is [specific query 1]?, What is [specific query 2]?, What is [specific query 3]?".
|
| 11 |
|
| 12 |
-
Chat History:
|
| 13 |
-
{chat_history}
|
| 14 |
|
| 15 |
-
|
| 16 |
-
Standalone question:
|
| 17 |
|
| 18 |
[/INST]
|
|
|
|
| 9 |
Consider a student engaged in the study of any theoretical subject, where the abundance of concepts and events poses a challenge to memorization. The aim is to overcome this hurdle and be capable of providing brief answers to specific queries. For example, if a student forgets a key concept, date, or event, they can ask the bot a question like "What is [specific query]?" for a concise answer.
|
| 10 |
Note that students can also ask multiple questions in a single query. For example, "What is [specific query 1]?, What is [specific query 2]?, What is [specific query 3]?".
|
| 11 |
|
|
|
|
|
|
|
| 12 |
|
| 13 |
+
{question}
|
|
|
|
| 14 |
|
| 15 |
[/INST]
|