-
Notifications
You must be signed in to change notification settings - Fork 3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #434 from jacoblee93/jacob/update_versions
Update LangChain and Pinecone client, use expression language for chain
- Loading branch information
Showing
8 changed files
with
345 additions
and
116 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,37 +1,85 @@ | ||
import { ChatOpenAI } from 'langchain/chat_models/openai'; | ||
import { PineconeStore } from 'langchain/vectorstores/pinecone'; | ||
import { ConversationalRetrievalQAChain } from 'langchain/chains'; | ||
import { ChatPromptTemplate } from 'langchain/prompts'; | ||
import { RunnableSequence } from 'langchain/schema/runnable'; | ||
import { StringOutputParser } from 'langchain/schema/output_parser'; | ||
import type { Document } from 'langchain/document'; | ||
import type { VectorStoreRetriever } from 'langchain/vectorstores/base'; | ||
|
||
const CONDENSE_TEMPLATE = `Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question. | ||
Chat History: | ||
{chat_history} | ||
<chat_history> | ||
{chat_history} | ||
</chat_history> | ||
Follow Up Input: {question} | ||
Standalone question:`; | ||
|
||
const QA_TEMPLATE = `You are a helpful AI assistant. Use the following pieces of context to answer the question at the end. | ||
const QA_TEMPLATE = `You are an expert researcher. Use the following pieces of context to answer the question at the end. | ||
If you don't know the answer, just say you don't know. DO NOT try to make up an answer. | ||
If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context. | ||
If the question is not related to the context or chat history, politely respond that you are tuned to only answer questions that are related to the context. | ||
<context> | ||
{context} | ||
</context> | ||
{context} | ||
<chat_history> | ||
{chat_history} | ||
</chat_history> | ||
Question: {question} | ||
Helpful answer in markdown:`; | ||
|
||
export const makeChain = (vectorstore: PineconeStore) => { | ||
const combineDocumentsFn = (docs: Document[], separator = '\n\n') => { | ||
const serializedDocs = docs.map((doc) => doc.pageContent); | ||
return serializedDocs.join(separator); | ||
}; | ||
|
||
export const makeChain = (retriever: VectorStoreRetriever) => { | ||
const condenseQuestionPrompt = | ||
ChatPromptTemplate.fromTemplate(CONDENSE_TEMPLATE); | ||
const answerPrompt = ChatPromptTemplate.fromTemplate(QA_TEMPLATE); | ||
|
||
const model = new ChatOpenAI({ | ||
temperature: 0, // increase temepreature to get more creative answers | ||
temperature: 0, // increase temperature to get more creative answers | ||
modelName: 'gpt-3.5-turbo', //change this to gpt-4 if you have access | ||
}); | ||
|
||
const chain = ConversationalRetrievalQAChain.fromLLM( | ||
// Rephrase the initial question into a dereferenced standalone question based on | ||
// the chat history to allow effective vectorstore querying. | ||
const standaloneQuestionChain = RunnableSequence.from([ | ||
condenseQuestionPrompt, | ||
model, | ||
new StringOutputParser(), | ||
]); | ||
|
||
// Retrieve documents based on a query, then format them. | ||
const retrievalChain = retriever.pipe(combineDocumentsFn); | ||
|
||
// Generate an answer to the standalone question based on the chat history | ||
// and retrieved documents. Additionally, we return the source documents directly. | ||
const answerChain = RunnableSequence.from([ | ||
{ | ||
context: RunnableSequence.from([ | ||
(input) => input.question, | ||
retrievalChain, | ||
]), | ||
chat_history: (input) => input.chat_history, | ||
question: (input) => input.question, | ||
}, | ||
answerPrompt, | ||
model, | ||
vectorstore.asRetriever(), | ||
new StringOutputParser(), | ||
]); | ||
|
||
// First generate a standalone question, then answer it based on | ||
// chat history and retrieved context documents. | ||
const conversationalRetrievalQAChain = RunnableSequence.from([ | ||
{ | ||
qaTemplate: QA_TEMPLATE, | ||
questionGeneratorTemplate: CONDENSE_TEMPLATE, | ||
returnSourceDocuments: true, //The number of source documents returned is 4 by default | ||
question: standaloneQuestionChain, | ||
chat_history: (input) => input.chat_history, | ||
}, | ||
); | ||
return chain; | ||
answerChain, | ||
]); | ||
|
||
return conversationalRetrievalQAChain; | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.