diff --git a/langchain/src/chat_models/llama_cpp.ts b/langchain/src/chat_models/llama_cpp.ts index 76ad790708c8..828b415614a9 100644 --- a/langchain/src/chat_models/llama_cpp.ts +++ b/langchain/src/chat_models/llama_cpp.ts @@ -136,6 +136,7 @@ export class ChatLlamaCpp extends SimpleChatModel { try { const promptOptions = { + onToken: _options.onToken, maxTokens: this?.maxTokens, temperature: this?.temperature, topK: this?.topK,