From f77bf37be88a07bf81b4a07ad52aa93f173cbff1 Mon Sep 17 00:00:00 2001 From: jialin Date: Sat, 12 Apr 2025 11:46:32 +0800 Subject: [PATCH] chore: init max tokens for mindie models --- src/components/logs-viewer/styles/index.less | 2 +- src/pages/playground/hooks/config.ts | 3 +- src/pages/playground/hooks/use-init-meta.ts | 41 ++++++++++++++------ 3 files changed, 33 insertions(+), 13 deletions(-) diff --git a/src/components/logs-viewer/styles/index.less b/src/components/logs-viewer/styles/index.less index 8a3c51a0..5f0f3b4c 100644 --- a/src/components/logs-viewer/styles/index.less +++ b/src/components/logs-viewer/styles/index.less @@ -96,7 +96,7 @@ } color: var(--color-logs-text); - font-size: var(--font-size-base); + font-size: var(--font-size-small); line-height: 22px; white-space: pre-wrap; background-color: var(--color-logs-bg); diff --git a/src/pages/playground/hooks/config.ts b/src/pages/playground/hooks/config.ts index 77b22e2e..ad002d19 100644 --- a/src/pages/playground/hooks/config.ts +++ b/src/pages/playground/hooks/config.ts @@ -7,7 +7,8 @@ export const LLM_METAKEYS: Record = { n_slot: 'n_slot', max_model_len: 'max_model_len', frequency_penalty: 'frequency_penalty', - presence_penalty: 'presence_penalty' + presence_penalty: 'presence_penalty', + max_total_tokens: 'max_total_tokens' }; export const precisionTwoKeys = [ diff --git a/src/pages/playground/hooks/use-init-meta.ts b/src/pages/playground/hooks/use-init-meta.ts index 7c4dea11..a8844979 100644 --- a/src/pages/playground/hooks/use-init-meta.ts +++ b/src/pages/playground/hooks/use-init-meta.ts @@ -75,6 +75,26 @@ export const useInitLLmMeta = ( // use for multiple chat return model; }, [model, modelList, isChat]); + + const getMaxTokens = (meta: any) => { + const { max_model_len, n_ctx, n_slot, max_total_tokens } = meta || {}; + + let max_tokens: number = 0; + + if (n_ctx && n_slot) { + max_tokens = _.divide(n_ctx, n_slot); + } else if (max_model_len) { + max_tokens = max_model_len; + } else if (max_total_tokens) { + max_tokens = max_total_tokens; + } + + return { + max_tokens: max_tokens || 16 * 1024, + defaultFormValue: max_tokens ? _.divide(max_tokens, 2) : 1024 + }; + }; + const extractLLMMeta = (meta: any) => { const towKeys = new Set(precisionTwoKeys); const modelMeta = meta || {}; @@ -87,23 +107,22 @@ export const useInitLLmMeta = ( return acc; }, {}); - let defaultMaxTokens = 1024; - - if (obj.n_ctx && obj.n_slot) { - defaultMaxTokens = _.divide(obj.n_ctx / 2, obj.n_slot); - } else if (obj.max_model_len) { - defaultMaxTokens = obj.max_model_len / 2; - } + const tokensRes = getMaxTokens(obj); return { form: _.merge({}, defaultValues, { - ..._.omit(obj, ['n_ctx', 'n_slot', 'max_model_len']), + ..._.omit(obj, [ + 'n_ctx', + 'n_slot', + 'max_model_len', + 'max_total_tokens' + ]), seed: obj.seed === -1 ? null : obj.seed, - max_tokens: defaultMaxTokens + max_tokens: tokensRes.defaultFormValue }), meta: { ...obj, - max_tokens: obj.max_model_len || _.divide(obj.n_ctx, obj.n_slot) + max_tokens: tokensRes.max_tokens } }; }; @@ -132,7 +151,7 @@ export const useInitLLmMeta = ( ...item, attrs: item.name === 'max_tokens' - ? { ...item.attrs, max: meta.max_tokens || 16 * 1024 } + ? { ...item.attrs, max: meta.max_tokens } : { ...item.attrs }