Skip to content

Commit

Permalink
♻️ refactor: refactor and clean some code (#4629)
Browse files Browse the repository at this point in the history
* ♻️ refactor: refactor a few code

* Update .gitignore
  • Loading branch information
arvinxx authored Nov 6, 2024
1 parent bb25f31 commit ad3a154
Show file tree
Hide file tree
Showing 7 changed files with 22 additions and 34 deletions.
12 changes: 0 additions & 12 deletions src/app/(backend)/webapi/chat/anthropic/route.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,5 @@
import { POST as UniverseRoute } from '../[provider]/route';

// due to the Chinese region does not support accessing Google
// we need to use proxy to access it
// refs: https://github.com/google/generative-ai-js/issues/29#issuecomment-1866246513
// if (process.env.HTTP_PROXY_URL) {
// const { setGlobalDispatcher, ProxyAgent } = require('undici');
//
// console.log(process.env.HTTP_PROXY_URL)
// setGlobalDispatcher(new ProxyAgent({ uri: process.env.HTTP_PROXY_URL }));
// }

// but undici only can be used in NodeJS
// so if you want to use with proxy, you need comment the code below
export const runtime = 'edge';

export const preferredRegion = [
Expand Down
11 changes: 0 additions & 11 deletions src/app/(backend)/webapi/chat/google/route.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,5 @@
import { POST as UniverseRoute } from '../[provider]/route';

// due to the Chinese region does not support accessing Google
// we need to use proxy to access it
// refs: https://github.com/google/generative-ai-js/issues/29#issuecomment-1866246513
// if (process.env.HTTP_PROXY_URL) {
// const { setGlobalDispatcher, ProxyAgent } = require('undici');
//
// setGlobalDispatcher(new ProxyAgent({ uri: process.env.HTTP_PROXY_URL }));
// }

// but undici only can be used in NodeJS
// so if you want to use with proxy, you need comment the code below
export const runtime = 'edge';

// due to Gemini-1.5-pro is not available in Hong Kong, we need to set the preferred region to exclude "Hong Kong (hkg1)".
Expand Down
4 changes: 2 additions & 2 deletions src/app/(backend)/webapi/chat/wenxin/route.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { getLLMConfig } from '@/config/llm';
import { AgentRuntime } from '@/libs/agent-runtime';
import { AgentRuntime, ModelProvider } from '@/libs/agent-runtime';
import LobeWenxinAI from '@/libs/agent-runtime/wenxin';

import { POST as UniverseRoute } from '../[provider]/route';
Expand All @@ -26,5 +26,5 @@ export const POST = async (req: Request) =>

return new AgentRuntime(instance);
},
params: { provider: 'wenxin' },
params: { provider: ModelProvider.Wenxin },
});
15 changes: 9 additions & 6 deletions src/app/(main)/settings/llm/components/ProviderConfig/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ const ProviderConfig = memo<ProviderConfigProps>(
className,
name,
showAceGcm = true,
showChecker = true,
extra,
}) => {
const { t } = useTranslation('setting');
Expand Down Expand Up @@ -219,12 +220,14 @@ const ProviderConfig = memo<ProviderConfigProps>(
label: t('llm.modelList.title'),
name: [LLMProviderConfigKey, id, LLMProviderModelListKey],
},
checkerItem ?? {
children: <Checker model={checkModel!} provider={id} />,
desc: t('llm.checker.desc'),
label: t('llm.checker.title'),
minWidth: undefined,
},
showChecker
? (checkerItem ?? {
children: <Checker model={checkModel!} provider={id} />,
desc: t('llm.checker.desc'),
label: t('llm.checker.title'),
minWidth: undefined,
})
: undefined,
showAceGcm && isServerMode && aceGcmItem,
].filter(Boolean) as FormItemProps[];

Expand Down
2 changes: 1 addition & 1 deletion src/server/globalConfig/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ export const getServerGlobalConfig = () => {

ENABLED_AI21,
AI21_MODEL_LIST,

ENABLED_AI360,
AI360_MODEL_LIST,

Expand Down
8 changes: 6 additions & 2 deletions src/server/modules/AgentRuntime/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -266,8 +266,12 @@ const getLlmOptionsFromPayload = (provider: string, payload: JWTPayload) => {
case ModelProvider.SenseNova: {
const { SENSENOVA_ACCESS_KEY_ID, SENSENOVA_ACCESS_KEY_SECRET } = getLLMConfig();

const sensenovaAccessKeyID = apiKeyManager.pick(payload?.sensenovaAccessKeyID || SENSENOVA_ACCESS_KEY_ID);
const sensenovaAccessKeySecret = apiKeyManager.pick(payload?.sensenovaAccessKeySecret || SENSENOVA_ACCESS_KEY_SECRET);
const sensenovaAccessKeyID = apiKeyManager.pick(
payload?.sensenovaAccessKeyID || SENSENOVA_ACCESS_KEY_ID,
);
const sensenovaAccessKeySecret = apiKeyManager.pick(
payload?.sensenovaAccessKeySecret || SENSENOVA_ACCESS_KEY_SECRET,
);

const apiKey = sensenovaAccessKeyID + ':' + sensenovaAccessKeySecret;

Expand Down
4 changes: 4 additions & 0 deletions src/types/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,10 @@ export interface ModelProviderCard {
* so provider like ollama don't need api key field
*/
showApiKey?: boolean;
/**
* whether show checker in the provider config
*/
showChecker?: boolean;
/**
* whether to smoothing the output
*/
Expand Down

0 comments on commit ad3a154

Please sign in to comment.