Skip to content

Commit

Permalink
langchain[minor]: Deprecate LLMChain, ConversationalRetrievalQAChain,…
Browse files Browse the repository at this point in the history
… RetrievalQAChain (langchain-ai#5225)

* langchain[minor]: Deprecate LLMChain, ConversationalRetrievalQAChain, RetrievalQAChain

* chore: lint files

* slightly betta docstrings

* chore: lint files

* update 0.2 to 0.3
  • Loading branch information
bracesproul authored Apr 26, 2024
1 parent 5c51f0e commit ff25510
Show file tree
Hide file tree
Showing 7 changed files with 101 additions and 49 deletions.
2 changes: 0 additions & 2 deletions langchain-core/src/language_models/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -446,8 +446,6 @@ export abstract class BaseChatModel<
}

/**
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
*
* Generates chat based on the input messages.
* @param messages An array of arrays of BaseMessage instances.
* @param options The call options or an array of stop sequences.
Expand Down
2 changes: 0 additions & 2 deletions langchain-core/src/language_models/llms.ts
Original file line number Diff line number Diff line change
Expand Up @@ -402,8 +402,6 @@ export abstract class BaseLLM<
}

/**
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
*
* Run the LLM on the given prompts and input, handling caching.
*/
async generate(
Expand Down
2 changes: 1 addition & 1 deletion langchain-core/src/retrievers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ export abstract class BaseRetriever<
}

/**
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
* @deprecated Use .invoke() instead. Will be removed in 0.3.0.
*
* Main method used to retrieve relevant documents. It takes a query
* string and an optional configuration object, and returns a promise that
Expand Down
12 changes: 6 additions & 6 deletions langchain-core/src/tools.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ export interface StructuredToolInterface<
schema: T | z.ZodEffects<T>;

/**
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
* @deprecated Use .invoke() instead. Will be removed in 0.3.0.
*
* Calls the tool with the provided argument, configuration, and tags. It
* parses the input according to the schema, handles any errors, and
Expand Down Expand Up @@ -107,7 +107,7 @@ export abstract class StructuredTool<
}

/**
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
* @deprecated Use .invoke() instead. Will be removed in 0.3.0.
*
* Calls the tool with the provided argument, configuration, and tags. It
* parses the input according to the schema, handles any errors, and
Expand Down Expand Up @@ -172,7 +172,7 @@ export abstract class StructuredTool<

export interface ToolInterface extends StructuredToolInterface {
/**
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
* @deprecated Use .invoke() instead. Will be removed in 0.3.0.
*
* Calls the tool with the provided argument and callbacks. It handles
* string inputs specifically.
Expand All @@ -199,7 +199,7 @@ export abstract class Tool extends StructuredTool {
}

/**
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
* @deprecated Use .invoke() instead. Will be removed in 0.3.0.
*
* Calls the tool with the provided argument and callbacks. It handles
* string inputs specifically.
Expand Down Expand Up @@ -273,7 +273,7 @@ export class DynamicTool extends Tool {
}

/**
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
* @deprecated Use .invoke() instead. Will be removed in 0.3.0.
*/
async call(
arg: string | undefined | z.input<this["schema"]>,
Expand Down Expand Up @@ -328,7 +328,7 @@ export class DynamicStructuredTool<
}

/**
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
* @deprecated Use .invoke() instead. Will be removed in 0.3.0.
*/
async call(
arg: z.output<T>,
Expand Down
80 changes: 61 additions & 19 deletions langchain/src/chains/conversational_retrieval_chain.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,37 +33,79 @@ export interface ConversationalRetrievalQAChainInput extends ChainInputs {
}

/**
* @deprecated This class will be removed in 0.3.0. See below for an example implementation using
* `createRetrievalChain`.
*
* Class for conducting conversational question-answering tasks with a
* retrieval component. Extends the BaseChain class and implements the
* ConversationalRetrievalQAChainInput interface.
* @example
* ```typescript
* const model = new ChatAnthropic({});
*
* const text = fs.readFileSync("state_of_the_union.txt", "utf8");
*
* const textSplitter = new RecursiveCharacterTextSplitter({ chunkSize: 1000 });
* const docs = await textSplitter.createDocuments([text]);
* import { ChatAnthropic } from "@langchain/anthropic";
* import {
* ChatPromptTemplate,
* MessagesPlaceholder,
* } from "@langchain/core/prompts";
* import { BaseMessage } from "@langchain/core/messages";
* import { createStuffDocumentsChain } from "langchain/chains/combine_documents";
* import { createHistoryAwareRetriever } from "langchain/chains/history_aware_retriever";
* import { createRetrievalChain } from "langchain/chains/retrieval";
*
* const vectorStore = await HNSWLib.fromDocuments(docs, new OpenAIEmbeddings());
* const retriever = ...your retriever;
* const llm = new ChatAnthropic();
*
* const chain = ConversationalRetrievalQAChain.fromLLM(
* model,
* vectorStore.asRetriever(),
* );
* // Contextualize question
* const contextualizeQSystemPrompt = `
* Given a chat history and the latest user question
* which might reference context in the chat history,
* formulate a standalone question which can be understood
* without the chat history. Do NOT answer the question, just
* reformulate it if needed and otherwise return it as is.`;
* const contextualizeQPrompt = ChatPromptTemplate.fromMessages([
* ["system", contextualizeQSystemPrompt],
* new MessagesPlaceholder("chat_history"),
* ["human", "{input}"],
* ]);
* const historyAwareRetriever = await createHistoryAwareRetriever({
* llm,
* retriever,
* rephrasePrompt: contextualizeQPrompt,
* });
*
* const question = "What did the president say about Justice Breyer?";
* // Answer question
* const qaSystemPrompt = `
* You are an assistant for question-answering tasks. Use
* the following pieces of retrieved context to answer the
* question. If you don't know the answer, just say that you
* don't know. Use three sentences maximum and keep the answer
* concise.
* \n\n
* {context}`;
* const qaPrompt = ChatPromptTemplate.fromMessages([
* ["system", qaSystemPrompt],
* new MessagesPlaceholder("chat_history"),
* ["human", "{input}"],
* ]);
*
* const res = await chain.call({ question, chat_history: "" });
* console.log(res);
* // Below we use createStuffDocuments_chain to feed all retrieved context
* // into the LLM. Note that we can also use StuffDocumentsChain and other
* // instances of BaseCombineDocumentsChain.
* const questionAnswerChain = await createStuffDocumentsChain({
* llm,
* prompt: qaPrompt,
* });
*
* const chatHistory = `${question}\n${res.text}`;
* const followUpRes = await chain.call({
* question: "Was that nice?",
* chat_history: chatHistory,
* const ragChain = await createRetrievalChain({
* retriever: historyAwareRetriever,
* combineDocsChain: questionAnswerChain,
* });
* console.log(followUpRes);
*
* // Usage:
* const chat_history: BaseMessage[] = [];
* const response = await ragChain.invoke({
* chat_history,
* input: "...",
* });
* ```
*/
export class ConversationalRetrievalQAChain
Expand Down
15 changes: 10 additions & 5 deletions langchain/src/chains/llm_chain.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,16 +72,21 @@ function _getLanguageModel(llmLike: RunnableInterface): BaseLanguageModel {
}

/**
* @deprecated This class will be removed in 0.3.0. Use the LangChain Expression Language (LCEL) instead.
* See the example below for how to use LCEL with the LLMChain class:
*
* Chain to run queries against LLMs.
*
* @example
* ```ts
* import { LLMChain } from "langchain/chains";
* import { OpenAI } from "langchain/llms/openai";
* import { PromptTemplate } from "langchain/prompts";
* import { ChatPromptTemplate } from "@langchain/core/prompts";
* import { ChatOpenAI } from "@langchain/openai";
*
* const prompt = ChatPromptTemplate.fromTemplate("Tell me a {adjective} joke");
* const llm = new ChatOpenAI();
* const chain = prompt.pipe(llm);
*
* const prompt = PromptTemplate.fromTemplate("Tell me a {adjective} joke");
* const llm = new LLMChain({ llm: new OpenAI(), prompt });
* const response = await chain.invoke({ adjective: "funny" });
* ```
*/
export class LLMChain<
Expand Down
37 changes: 23 additions & 14 deletions langchain/src/chains/retrieval_qa.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,28 +23,37 @@ export interface RetrievalQAChainInput extends Omit<ChainInputs, "memory"> {
}

/**
* @deprecated This class will be removed in 0.3.0. See below for an example implementation using
* `createRetrievalChain`:
* Class representing a chain for performing question-answering tasks with
* a retrieval component.
* @example
* ```typescript
* // Initialize the OpenAI model and the remote retriever with the specified configuration
* const model = new ChatOpenAI({});
* const retriever = new RemoteLangChainRetriever({
* url: "http://example.com/api",
* auth: { bearer: "foo" },
* inputKey: "message",
* responseKey: "response",
* });
* import { createStuffDocumentsChain } from "langchain/chains/combine_documents";
* import { ChatPromptTemplate } from "@langchain/core/prompts";
* import { createRetrievalChain } from "langchain/chains/retrieval";
* import { MemoryVectorStore } from "langchain/vectorstores/memory";
*
* const documents = [...your documents here];
* const embeddings = ...your embeddings model;
* const llm = ...your LLM model;
*
* // Create a RetrievalQAChain using the model and retriever
* const chain = RetrievalQAChain.fromLLM(model, retriever);
* const vectorstore = await MemoryVectorStore.fromDocuments(
* documents,
* embeddings
* );
* const prompt = ChatPromptTemplate.fromTemplate(`Answer the user's question: {input}`);
*
* // Execute the chain with a query and log the result
* const res = await chain.call({
* query: "What did the president say about Justice Breyer?",
* const combineDocsChain = await createStuffDocumentsChain({
* llm,
* prompt,
* });
* console.log({ res });
* const retriever = vectorstore.asRetriever();
*
* const retrievalChain = await createRetrievalChain({
* combineDocsChain,
* retriever,
* });
* ```
*/
export class RetrievalQAChain
Expand Down

0 comments on commit ff25510

Please sign in to comment.