|
| 1 | +import CodeBlock from "@theme/CodeBlock"; |
| 2 | + |
| 3 | +# Conversational Retrieval Agents |
| 4 | + |
| 5 | +This is an agent specifically optimized for doing retrieval when necessary while holding a conversation and being able |
| 6 | +to answer questions based on previous dialogue in the conversation. |
| 7 | + |
| 8 | +To start, we will set up the retriever we want to use, then turn it into a retriever tool. Next, we will use the high-level constructor for this type of agent. |
| 9 | +Finally, we will walk through how to construct a conversational retrieval agent from components. |
| 10 | + |
| 11 | +## The Retriever |
| 12 | + |
| 13 | +To start, we need a retriever to use! The code here is mostly just example code. Feel free to use your own retriever and skip to the next section on creating a retriever tool. |
| 14 | + |
| 15 | +```typescript |
| 16 | +import { FaissStore } from "langchain/vectorstores/faiss"; |
| 17 | +import { OpenAIEmbeddings } from "langchain/embeddings/openai"; |
| 18 | +import { TextLoader } from "langchain/document_loaders/fs/text"; |
| 19 | +import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"; |
| 20 | + |
| 21 | +const loader = new TextLoader("state_of_the_union.txt"); |
| 22 | +const docs = await loader.load(); |
| 23 | +const splitter = new RecursiveCharacterTextSplitter({ |
| 24 | + chunkSize: 1000, |
| 25 | + chunkOverlap: 0 |
| 26 | +}); |
| 27 | + |
| 28 | +const texts = await splitter.splitDocuments(docs); |
| 29 | + |
| 30 | +const vectorStore = await FaissStore.fromDocuments(texts, new OpenAIEmbeddings()); |
| 31 | + |
| 32 | +const retriever = vectorStore.asRetriever(); |
| 33 | +``` |
| 34 | + |
| 35 | +## Retriever Tool |
| 36 | + |
| 37 | +Now we need to create a tool for our retriever. The main things we need to pass in are a `name` for the retriever as well as a `description`. These will both be used by the language model, so they should be informative. |
| 38 | + |
| 39 | +```typescript |
| 40 | +import { createRetrieverTool } from "langchain/agents/toolkits"; |
| 41 | + |
| 42 | +const tool = createRetrieverTool(retriever, { |
| 43 | + name: "search_state_of_union", |
| 44 | + description: "Searches and returns documents regarding the state-of-the-union.", |
| 45 | +}); |
| 46 | +``` |
| 47 | + |
| 48 | +## Agent Constructor |
| 49 | + |
| 50 | +Here, we will use the high level `create_conversational_retrieval_agent` API to construct the agent. |
| 51 | +Notice that beside the list of tools, the only thing we need to pass in is a language model to use. |
| 52 | + |
| 53 | +Under the hood, this agent is using the OpenAIFunctionsAgent, so we need to use an ChatOpenAI model. |
| 54 | + |
| 55 | +```typescript |
| 56 | +import { createConversationalRetrievalAgent } from "langchain/agents/toolkits"; |
| 57 | +import { ChatOpenAI } from "langchain/chat_models/openai"; |
| 58 | + |
| 59 | +const model = new ChatOpenAI({ |
| 60 | + temperature: 0, |
| 61 | +}); |
| 62 | + |
| 63 | +const executor = await createConversationalRetrievalAgent(model, [tool], { |
| 64 | + verbose: true, |
| 65 | +}); |
| 66 | +``` |
| 67 | + |
| 68 | +We can now try it out! |
| 69 | + |
| 70 | +```typescript |
| 71 | +const result = await executor.call({ |
| 72 | + input: "Hi, I'm Bob!" |
| 73 | +}); |
| 74 | + |
| 75 | +console.log(result); |
| 76 | + |
| 77 | +/* |
| 78 | + { |
| 79 | + output: 'Hello Bob! How can I assist you today?', |
| 80 | + intermediateSteps: [] |
| 81 | + } |
| 82 | +*/ |
| 83 | + |
| 84 | +const result2 = await executor.call({ |
| 85 | + input: "What's my name?" |
| 86 | +}); |
| 87 | + |
| 88 | +console.log(result2); |
| 89 | + |
| 90 | +/* |
| 91 | + { output: 'Your name is Bob.', intermediateSteps: [] } |
| 92 | +*/ |
| 93 | + |
| 94 | +const result3 = await executor.call({ |
| 95 | + input: "What did the president say about Ketanji Brown Jackson in the most recent state of the union?" |
| 96 | +}); |
| 97 | + |
| 98 | +console.log(result3); |
| 99 | + |
| 100 | +/* |
| 101 | + { |
| 102 | + output: "In the most recent state of the union, President Biden mentioned Ketanji Brown Jackson. He nominated her as a Circuit Court of Appeals judge and described her as one of the nation's top legal minds who will continue Justice Breyer's legacy of excellence. He mentioned that she has received a broad range of support, including from the Fraternal Order of Police and former judges appointed by Democrats and Republicans.", |
| 103 | + intermediateSteps: [ |
| 104 | + {...} |
| 105 | + ] |
| 106 | + } |
| 107 | +*/ |
| 108 | + |
| 109 | +const result4 = await executor.call({ |
| 110 | + input: "How long ago did he nominate her?" |
| 111 | +}); |
| 112 | + |
| 113 | +console.log(result4); |
| 114 | + |
| 115 | +/* |
| 116 | + { |
| 117 | + output: 'President Biden nominated Ketanji Brown Jackson four days before the most recent state of the union address.', |
| 118 | + intermediateSteps: [] |
| 119 | + } |
| 120 | +*/ |
| 121 | +``` |
| 122 | + |
| 123 | +Note that for the final call, the agent used previously retrieved information to answer the query and did not need to call the tool again! |
| 124 | + |
| 125 | +Here's a trace showing how the agent fetches documents to answer the question with the retrieval tool: |
| 126 | + |
| 127 | +https://smith.langchain.com/public/1e2b1887-ca44-4210-913b-a69c1b8a8e7e/r |
| 128 | + |
| 129 | +## Creating from components |
| 130 | + |
| 131 | +What actually is going on underneath the hood? Let's take a look so we can understand how to modify things going forward. |
| 132 | + |
| 133 | +### Memory |
| 134 | + |
| 135 | +In this example, we want the agent to remember not only previous conversations, but also previous intermediate steps. |
| 136 | +For that, we can use `OpenAIAgentTokenBufferMemory`. Note that if you want to change whether the agent remembers intermediate steps, |
| 137 | +how the long the retained buffer is, or anything like that you should change this part. |
| 138 | + |
| 139 | +```typescript |
| 140 | +import { OpenAIAgentTokenBufferMemory } from "langchain/agents/toolkits"; |
| 141 | + |
| 142 | +const memory = new OpenAIAgentTokenBufferMemory({ llm: model, memoryKey: "chat_history" }); |
| 143 | +``` |
| 144 | + |
| 145 | +You should make sure `memoryKey` is set to `chat_history` for the OpenAI functions agent. This memory also has `returnMessages` set to `true` by default. |
| 146 | + |
| 147 | +### Agent executor |
| 148 | + |
| 149 | +We can recreate the agent executor directly with the `initializeAgentExecutorWithOptions` method. |
| 150 | +This allows us to customize the agent's system message by passing in a `prefix` into `agentArgs`. |
| 151 | +Importantly, we must pass in `return_intermediate_steps: true` since we are recording that with our memory object. |
| 152 | + |
| 153 | +```typescript |
| 154 | +import { initializeAgentExecutorWithOptions } from "langchain/agents"; |
| 155 | + |
| 156 | +const executor = await initializeAgentExecutorWithOptions(tools, llm, { |
| 157 | + agentType: "openai-functions", |
| 158 | + memory, |
| 159 | + returnIntermediateSteps: true, |
| 160 | + agentArgs: { |
| 161 | + prefix: |
| 162 | + prefix ?? |
| 163 | + `Do your best to answer the questions. Feel free to use any tools available to look up relevant information, only if necessary.`, |
| 164 | + }, |
| 165 | +}); |
| 166 | +``` |
0 commit comments