提交 ec677bee 作者: imClumsyPanda

Update local_doc_qa.py

上级 21362cd9
...@@ -117,26 +117,23 @@ class LocalDocQA: ...@@ -117,26 +117,23 @@ class LocalDocQA:
问题: 问题:
{question}""" {question}"""
if vs_path is None or vs_path =="":# or (not os.path.exists(vs_path)) prompt = PromptTemplate(
result = self.llm.chat(query) template=prompt_template,
else: input_variables=["context", "question"]
prompt = PromptTemplate( )
template=prompt_template, self.llm.history = chat_history
input_variables=["context", "question"] vector_store = FAISS.load_local(vs_path, self.embeddings)
) knowledge_chain = RetrievalQA.from_llm(
self.llm.history = chat_history llm=self.llm,
vector_store = FAISS.load_local(vs_path, self.embeddings) retriever=vector_store.as_retriever(search_kwargs={"k": self.top_k}),
knowledge_chain = RetrievalQA.from_llm( prompt=prompt
llm=self.llm, )
retriever=vector_store.as_retriever(search_kwargs={"k": self.top_k}), knowledge_chain.combine_documents_chain.document_prompt = PromptTemplate(
prompt=prompt input_variables=["page_content"], template="{page_content}"
) )
knowledge_chain.combine_documents_chain.document_prompt = PromptTemplate(
input_variables=["page_content"], template="{page_content}"
)
knowledge_chain.return_source_documents = True
knowledge_chain.return_source_documents = True
result = knowledge_chain({"query": query}) result = knowledge_chain({"query": query})
self.llm.history[-1][0] = query self.llm.history[-1][0] = query
return result, self.llm.history return result, self.llm.history
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论