Skip to content

Commit

Permalink
✨ feat: add support InternLM (书生浦语) provider (#4711)
Browse files Browse the repository at this point in the history
* ✨ feat: add support InternLM (书生浦语) provider

* 🐛 fix: fix tools calling, disable streaming, not support

* 💄 style: disable Client Fetch mode, not support

* 🔨 chore: cleanup code
  • Loading branch information
hezhijie0327 authored Nov 19, 2024
1 parent c59f338 commit aaae059
Show file tree
Hide file tree
Showing 12 changed files with 347 additions and 0 deletions.
2 changes: 2 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,8 @@ ENV \
HUGGINGFACE_API_KEY="" HUGGINGFACE_MODEL_LIST="" HUGGINGFACE_PROXY_URL="" \
# Hunyuan
HUNYUAN_API_KEY="" HUNYUAN_MODEL_LIST="" \
# InternLM
INTERNLM_API_KEY="" INTERNLM_MODEL_LIST="" \
# Minimax
MINIMAX_API_KEY="" MINIMAX_MODEL_LIST="" \
# Mistral
Expand Down
2 changes: 2 additions & 0 deletions Dockerfile.database
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,8 @@ ENV \
HUGGINGFACE_API_KEY="" HUGGINGFACE_MODEL_LIST="" HUGGINGFACE_PROXY_URL="" \
# Hunyuan
HUNYUAN_API_KEY="" HUNYUAN_MODEL_LIST="" \
# InternLM
INTERNLM_API_KEY="" INTERNLM_MODEL_LIST="" \
# Minimax
MINIMAX_API_KEY="" MINIMAX_MODEL_LIST="" \
# Mistral
Expand Down
2 changes: 2 additions & 0 deletions src/app/(main)/settings/llm/ProviderList/providers.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
GoogleProviderCard,
GroqProviderCard,
HunyuanProviderCard,
InternLMProviderCard,
MinimaxProviderCard,
MistralProviderCard,
MoonshotProviderCard,
Expand Down Expand Up @@ -85,6 +86,7 @@ export const useProviderList = (): ProviderItem[] => {
MinimaxProviderCard,
Ai360ProviderCard,
TaichuProviderCard,
InternLMProviderCard,
SiliconCloudProviderCard,
],
[
Expand Down
6 changes: 6 additions & 0 deletions src/config/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,9 @@ export const getLLMConfig = () => {

ENABLED_XAI: z.boolean(),
XAI_API_KEY: z.string().optional(),

ENABLED_INTERNLM: z.boolean(),
INTERNLM_API_KEY: z.string().optional(),
},
runtimeEnv: {
API_KEY_SELECT_MODE: process.env.API_KEY_SELECT_MODE,
Expand Down Expand Up @@ -246,6 +249,9 @@ export const getLLMConfig = () => {

ENABLED_XAI: !!process.env.XAI_API_KEY,
XAI_API_KEY: process.env.XAI_API_KEY,

ENABLED_INTERNLM: !!process.env.INTERNLM_API_KEY,
INTERNLM_API_KEY: process.env.INTERNLM_API_KEY,
},
});
};
Expand Down
4 changes: 4 additions & 0 deletions src/config/modelProviders/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import GoogleProvider from './google';
import GroqProvider from './groq';
import HuggingFaceProvider from './huggingface';
import HunyuanProvider from './hunyuan';
import InternLMProvider from './internlm';
import MinimaxProvider from './minimax';
import MistralProvider from './mistral';
import MoonshotProvider from './moonshot';
Expand Down Expand Up @@ -69,6 +70,7 @@ export const LOBE_DEFAULT_MODEL_LIST: ChatModelCard[] = [
HunyuanProvider.chatModels,
WenxinProvider.chatModels,
SenseNovaProvider.chatModels,
InternLMProvider.chatModels,
].flat();

export const DEFAULT_MODEL_PROVIDER_LIST = [
Expand Down Expand Up @@ -105,6 +107,7 @@ export const DEFAULT_MODEL_PROVIDER_LIST = [
MinimaxProvider,
Ai360Provider,
TaichuProvider,
InternLMProvider,
SiliconCloudProvider,
];

Expand All @@ -131,6 +134,7 @@ export { default as GoogleProviderCard } from './google';
export { default as GroqProviderCard } from './groq';
export { default as HuggingFaceProviderCard } from './huggingface';
export { default as HunyuanProviderCard } from './hunyuan';
export { default as InternLMProviderCard } from './internlm';
export { default as MinimaxProviderCard } from './minimax';
export { default as MistralProviderCard } from './mistral';
export { default as MoonshotProviderCard } from './moonshot';
Expand Down
42 changes: 42 additions & 0 deletions src/config/modelProviders/internlm.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import { ModelProviderCard } from '@/types/llm';

const InternLM: ModelProviderCard = {
chatModels: [
{
description: '我们最新的模型系列,有着卓越的推理性能,支持 1M 的上下文长度以及更强的指令跟随和工具调用能力。',
displayName: 'InternLM2.5',
enabled: true,
functionCall: true,
id: 'internlm2.5-latest',
maxOutput: 4096,
pricing: {
input: 0,
output: 0,
},
tokens: 32_768,
},
{
description: '我们仍在维护的老版本模型,有 7B、20B 多种模型参数量可选。',
displayName: 'InternLM2 Pro Chat',
functionCall: true,
id: 'internlm2-pro-chat',
maxOutput: 4096,
pricing: {
input: 0,
output: 0,
},
tokens: 32_768,
},
],
checkModel: 'internlm2.5-latest',
description:
'致力于大模型研究与开发工具链的开源组织。为所有 AI 开发者提供高效、易用的开源平台,让最前沿的大模型与算法技术触手可及',
disableBrowserRequest: true,
id: 'internlm',
modelList: { showModelFetcher: true },
modelsUrl: 'https://internlm.intern-ai.org.cn/doc/docs/Models#%E8%8E%B7%E5%8F%96%E6%A8%A1%E5%9E%8B%E5%88%97%E8%A1%A8',
name: 'InternLM',
url: 'https://internlm.intern-ai.org.cn',
};

export default InternLM;
7 changes: 7 additions & 0 deletions src/libs/agent-runtime/AgentRuntime.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import { LobeGoogleAI } from './google';
import { LobeGroq } from './groq';
import { LobeHuggingFaceAI } from './huggingface';
import { LobeHunyuanAI } from './hunyuan';
import { LobeInternLMAI } from './internlm';
import { LobeMinimaxAI } from './minimax';
import { LobeMistralAI } from './mistral';
import { LobeMoonshotAI } from './moonshot';
Expand Down Expand Up @@ -141,6 +142,7 @@ class AgentRuntime {
groq: Partial<ClientOptions>;
huggingface: { apiKey?: string; baseURL?: string };
hunyuan: Partial<ClientOptions>;
internlm: Partial<ClientOptions>;
minimax: Partial<ClientOptions>;
mistral: Partial<ClientOptions>;
moonshot: Partial<ClientOptions>;
Expand Down Expand Up @@ -335,6 +337,11 @@ class AgentRuntime {
runtimeModel = new LobeCloudflareAI(params.cloudflare ?? {});
break;
}

case ModelProvider.InternLM: {
runtimeModel = new LobeInternLMAI(params.internlm);
break;
}
}
return new AgentRuntime(runtimeModel);
}
Expand Down
Loading

0 comments on commit aaae059

Please sign in to comment.