chore: init chat params

main
jialin 11 months ago
parent 35fe459420
commit 1bb6c5fbfc

@ -18,7 +18,7 @@ type ParamsSettingsProps = {
ref?: any;
parametersTitle?: React.ReactNode;
showModelSelector?: boolean;
modelList: Global.BaseOption<string>[];
modelList?: Global.BaseOption<string>[];
onValuesChange?: (changeValues: any, value: Record<string, any>) => void;
onModelChange?: (model: string) => void;
paramsConfig?: ParamsSchema[];
@ -143,27 +143,29 @@ const ParamsSettings: React.FC<ParamsSettingsProps> = forwardRef(
</span>
)}
</h3>
<Form.Item
name="model"
rules={[
{
required: true,
message: intl.formatMessage(
{
id: 'common.form.rule.select'
},
{ name: intl.formatMessage({ id: 'playground.model' }) }
)
}
]}
>
<SealSelect
onChange={handleOnModelChange}
showSearch={true}
options={modelList}
label={intl.formatMessage({ id: 'playground.model' })}
></SealSelect>
</Form.Item>
{showModelSelector && (
<Form.Item
name="model"
rules={[
{
required: true,
message: intl.formatMessage(
{
id: 'common.form.rule.select'
},
{ name: intl.formatMessage({ id: 'playground.model' }) }
)
}
]}
>
<SealSelect
onChange={handleOnModelChange}
showSearch={true}
options={modelList}
label={intl.formatMessage({ id: 'playground.model' })}
></SealSelect>
</Form.Item>
)}
</>
}
{renderFields}

@ -1,25 +1,26 @@
import useOverlayScroller from '@/hooks/use-overlay-scroller';
import { useIntl, useSearchParams } from '@umijs/max';
import { useIntl } from '@umijs/max';
import { Spin } from 'antd';
import classNames from 'classnames';
import 'overlayscrollbars/overlayscrollbars.css';
import {
forwardRef,
useEffect,
useImperativeHandle,
useMemo,
useRef,
useState
} from 'react';
import { OpenAIViewCode, Roles, generateMessages } from '../config';
import { ChatParamsConfig } from '../config/params-config';
import { MessageItem, MessageItemAction } from '../config/types';
import { LLM_METAKEYS, llmInitialValues } from '../hooks/config';
import useChatCompletion from '../hooks/use-chat-completion';
import { useInitLLmMeta } from '../hooks/use-init-meta';
import '../style/ground-left.less';
import '../style/system-message-wrap.less';
import DynamicParams from './dynamic-params';
import MessageInput from './message-input';
import MessageContent from './multiple-chat/message-content';
import SystemMessage from './multiple-chat/system-message';
import ParamsSettings from './params-settings';
import ReferenceParams from './reference-params';
import ViewCodeModal from './view-code-modal';
@ -32,21 +33,16 @@ interface MessageProps {
const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
const { modelList } = props;
const intl = useIntl();
const [searchParams] = useSearchParams();
const selectModel = searchParams.get('model') || '';
const [parameters, setParams] = useState<any>({});
const [systemMessage, setSystemMessage] = useState('');
const [show, setShow] = useState(false);
const [collapse, setCollapse] = useState(false);
const scroller = useRef<any>(null);
const paramsRef = useRef<any>(null);
const [actions, setActions] = useState<MessageItemAction[]>([
'upload',
'delete',
'copy'
]);
const { initialize: innitializeParams } = useOverlayScroller();
const {
submitMessage,
handleStopConversation,
@ -57,6 +53,24 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
messageList,
loading
} = useChatCompletion(scroller);
const {
handleOnValuesChange,
formRef,
paramsRef,
paramsConfig,
initialValues,
parameters
} = useInitLLmMeta(
{ modelList, isChat: true },
{
defaultValues: {
...llmInitialValues,
model: modelList[0]?.value
},
defaultParamsConfig: ChatParamsConfig,
metaKeys: LLM_METAKEYS
}
);
useImperativeHandle(ref, () => {
return {
@ -93,8 +107,6 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
setShow(false);
};
const handleSelectModel = () => {};
const handleOnCheck = (e: any) => {
const checked = e.target.checked;
if (checked) {
@ -104,12 +116,6 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
}
};
useEffect(() => {
if (paramsRef.current) {
innitializeParams(paramsRef.current);
}
}, [innitializeParams]);
return (
<div className="ground-left-wrapper">
<div className="ground-left">
@ -176,7 +182,6 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
addMessage={handleAddNewMessage}
handleAbortFetch={handleStopConversation}
clearAll={handleClear}
setModelSelections={handleSelectModel}
/>
</div>
</div>
@ -187,11 +192,13 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
ref={paramsRef}
>
<div className="box">
<ParamsSettings
setParams={setParams}
params={parameters}
selectedModel={selectModel}
<DynamicParams
ref={formRef}
onValuesChange={handleOnValuesChange}
paramsConfig={paramsConfig}
initialValues={initialValues}
modelList={modelList}
showModelSelector={true}
/>
</div>
</div>

@ -28,13 +28,7 @@ const MultiCompare: React.FC<MultiCompareProps> = ({ modelList, loaded }) => {
const [modelSelections, setModelSelections] = useState<ModelSelectionItem[]>(
[]
);
const [globalParams, setGlobalParams] = useState<Record<string, any>>({
seed: null,
stop: null,
temperature: 1,
top_p: 1,
max_tokens: 1024
});
const [globalParams, setGlobalParams] = useState<Record<string, any>>({});
const [spans, setSpans] = useState<{
span: number;
count: number;

@ -23,10 +23,13 @@ import React, {
import 'simplebar-react/dist/simplebar.min.css';
import { OpenAIViewCode, Roles, generateMessages } from '../../config';
import CompareContext from '../../config/compare-context';
import { ChatParamsConfig } from '../../config/params-config';
import { MessageItem, ModelSelectionItem } from '../../config/types';
import { LLM_METAKEYS, llmInitialValues } from '../../hooks/config';
import useChatCompletion from '../../hooks/use-chat-completion';
import { useInitLLmMeta } from '../../hooks/use-init-meta';
import '../../style/model-item.less';
import ParamsSettings from '../params-settings';
import DynamicParams from '../dynamic-params';
import ReferenceParams from '../reference-params';
import ViewCodeModal from '../view-code-modal';
import MessageContent from './message-content';
@ -39,287 +42,300 @@ interface ModelItemProps {
ref: any;
}
const ModelItem: React.FC<ModelItemProps> = forwardRef(
({ model, modelList, instanceId }, ref) => {
const {
globalParams,
setGlobalParams,
setLoadingStatus,
handleDeleteModel,
handleApplySystemChangeToAll,
modelFullList,
actions
} = useContext(CompareContext);
const intl = useIntl();
const isApplyToAllModels = useRef(false);
const [systemMessage, setSystemMessage] = useState<string>('');
const [params, setParams] = useState<Record<string, any>>({
const ModelItem: React.FC<ModelItemProps> = forwardRef((props, ref) => {
const { modelList, model, instanceId } = props;
const {
globalParams,
setGlobalParams,
setLoadingStatus,
handleDeleteModel,
handleApplySystemChangeToAll,
modelFullList,
actions
} = useContext(CompareContext);
const {
handleOnValuesChange,
handleOnModelChange,
setParams,
setInitialValues,
formRef,
paramsConfig,
initialValues,
parameters
} = useInitLLmMeta(props, {
defaultValues: {
...llmInitialValues,
model: model
});
const [show, setShow] = useState(false);
const scroller = useRef<any>(null);
},
defaultParamsConfig: ChatParamsConfig,
metaKeys: LLM_METAKEYS
});
const intl = useIntl();
const isApplyToAllModels = useRef(false);
const [systemMessage, setSystemMessage] = useState<string>('');
const [show, setShow] = useState(false);
const scroller = useRef<any>(null);
const {
submitMessage,
handleAddNewMessage,
handleClear,
setMessageList,
handleStopConversation,
tokenResult,
messageList,
loading
} = useChatCompletion(scroller);
const {
submitMessage,
handleAddNewMessage,
handleClear,
setMessageList,
handleStopConversation,
tokenResult,
messageList,
loading
} = useChatCompletion(scroller);
const viewCodeMessage = useMemo(() => {
return generateMessages([
{ role: Roles.System, content: systemMessage },
...messageList
]);
}, [messageList, systemMessage]);
const viewCodeMessage = useMemo(() => {
return generateMessages([
{ role: Roles.System, content: systemMessage },
...messageList
]);
}, [messageList, systemMessage]);
const abortFetch = () => {
handleStopConversation();
setLoadingStatus(instanceId, false);
};
const abortFetch = () => {
handleStopConversation();
setLoadingStatus(instanceId, false);
};
const handleDelete = () => {
handleDeleteModel(instanceId);
};
const handleDelete = () => {
handleDeleteModel(instanceId);
};
const handleSubmit = (currentMessage: Omit<MessageItem, 'uid'>) => {
const currentMsg =
currentMessage.content || currentMessage.imgs?.length
? currentMessage
: undefined;
const handleSubmit = (currentMessage: Omit<MessageItem, 'uid'>) => {
const currentMsg =
currentMessage.content || currentMessage.imgs?.length
? currentMessage
: undefined;
submitMessage({
system: systemMessage
? { role: Roles.System, content: systemMessage }
: undefined,
current: currentMsg,
parameters: params
submitMessage({
system: systemMessage
? { role: Roles.System, content: systemMessage }
: undefined,
current: currentMsg,
parameters: parameters
});
};
const handleApplyToAllModels = (e: any) => {
isApplyToAllModels.current = e.target.checked;
if (e.target.checked) {
setGlobalParams({
..._.omit(parameters, 'model')
});
};
}
};
const handleApplyToAllModels = (e: any) => {
isApplyToAllModels.current = e.target.checked;
if (e.target.checked) {
const OnValuesChange = useCallback(
(changeValues: any, allValues: Record<string, any>) => {
handleOnValuesChange(changeValues, {
...allValues,
model: parameters.model
});
if (isApplyToAllModels.current) {
setGlobalParams({
..._.omit(params, 'model')
...allValues
});
}
};
},
[parameters, isApplyToAllModels.current]
);
const handleOnValuesChange = useCallback(
(changeValues: any, allValues: Record<string, any>) => {
if (isApplyToAllModels.current) {
setParams({
...params,
...allValues
});
setGlobalParams({
...allValues
});
} else {
setParams({
...params,
...changeValues
});
}
},
[params, isApplyToAllModels.current]
);
const handleCloseViewCode = () => {
setShow(false);
};
const handleCloseViewCode = () => {
setShow(false);
};
const onModelChange = (value: string) => {
handleOnModelChange(value);
handleClear();
};
const handleModelChange = (value: string) => {
setParams({
...params,
model: value
});
handleClear();
};
const actionItems = useMemo(() => {
const list = [
{
label: intl.formatMessage({ id: 'common.button.clear' }),
key: 'clear',
icon: <ClearOutlined />,
danger: false,
onClick: () => {
setMessageList([]);
setSystemMessage('');
}
},
{
label: intl.formatMessage({ id: 'playground.viewcode' }),
key: 'viewcode',
icon: <IconFont type="icon-code" />,
onClick: () => {
setShow(true);
}
const actionItems = useMemo(() => {
const list = [
{
label: intl.formatMessage({ id: 'common.button.clear' }),
key: 'clear',
icon: <ClearOutlined />,
danger: false,
onClick: () => {
setMessageList([]);
setSystemMessage('');
}
},
{
label: intl.formatMessage({ id: 'playground.viewcode' }),
key: 'viewcode',
icon: <IconFont type="icon-code" />,
onClick: () => {
setShow(true);
}
];
if (modelList.length > 2) {
list.push({
label: intl.formatMessage({ id: 'common.button.delete' }),
key: 'delete',
icon: <DeleteOutlined />,
danger: true,
onClick: () => {
handleDelete();
}
});
}
return list;
}, [modelList, intl]);
];
useEffect(() => {
setParams({
...params,
...globalParams
if (modelList.length > 2) {
list.push({
label: intl.formatMessage({ id: 'common.button.delete' }),
key: 'delete',
icon: <DeleteOutlined />,
danger: true,
onClick: () => {
handleDelete();
}
});
}, [globalParams]);
}
return list;
}, [modelList, intl]);
useEffect(() => {
setLoadingStatus(instanceId, loading);
return () => {
setLoadingStatus(instanceId, false);
useEffect(() => {
setParams((prev: any) => {
return {
...prev,
...globalParams
};
}, [loading]);
useImperativeHandle(ref, () => {
});
setInitialValues((prev: any) => {
return {
submit: handleSubmit,
abortFetch,
addNewMessage: handleAddNewMessage,
clear: handleClear,
setSystemMessage,
loading
...prev,
...globalParams
};
});
}, [globalParams]);
return (
<div className="model-item">
<div className="header">
<span className="title">
<Select
style={{ width: '100%' }}
variant="borderless"
options={modelFullList}
onChange={handleModelChange}
value={params.model}
labelRender={(data) => {
return (
<AutoTooltip
ghost
tooltipProps={{
placement: 'right'
}}
minWidth={60}
maxWidth={180}
>
{data.label}
</AutoTooltip>
);
}}
optionRender={(data) => {
return (
<AutoTooltip
ghost
tooltipProps={{
placement: 'right'
}}
minWidth={60}
maxWidth={180}
>
{data.label}
</AutoTooltip>
);
}}
></Select>
</span>
<ReferenceParams usage={tokenResult} scaleable></ReferenceParams>
<span className="action">
<Dropdown
menu={{
items: actionItems
}}
placement="bottomRight"
>
<Button
type="text"
icon={<MoreOutlined style={{ fontSize: '14px' }} />}
size="small"
></Button>
</Dropdown>
<Popover
placement="bottomRight"
content={
<ParamsSettings
showModelSelector={false}
setParams={setParams}
modelList={modelList}
globalParams={globalParams}
selectedModel={params.model}
onValuesChange={handleOnValuesChange}
/>
}
trigger={['click']}
arrow={false}
fresh={true}
title={
<div>
<Checkbox onChange={handleApplyToAllModels}>
{intl.formatMessage({
id: 'playground.compare.applytoall'
})}
</Checkbox>
</div>
}
>
<Button
type="text"
icon={<SettingOutlined />}
size="small"
></Button>
</Popover>
</span>
</div>
<SystemMessage
showApplyToAll={true}
systemMessage={systemMessage}
applyToAll={handleApplySystemChangeToAll}
setSystemMessage={setSystemMessage}
></SystemMessage>
<div className="content" ref={scroller}>
<div>
<MessageContent
messageList={messageList}
setMessageList={setMessageList}
actions={actions}
editable={true}
/>
<Spin spinning={loading} size="small" style={{ width: '100%' }} />
</div>
useEffect(() => {
setLoadingStatus(instanceId, loading);
return () => {
setLoadingStatus(instanceId, false);
};
}, [loading]);
useImperativeHandle(ref, () => {
return {
submit: handleSubmit,
abortFetch,
addNewMessage: handleAddNewMessage,
clear: handleClear,
setSystemMessage,
loading
};
});
return (
<div className="model-item">
<div className="header">
<span className="title">
<Select
style={{ width: '100%' }}
variant="borderless"
options={modelFullList}
onChange={onModelChange}
value={parameters.model}
labelRender={(data) => {
return (
<AutoTooltip
ghost
tooltipProps={{
placement: 'right'
}}
minWidth={60}
maxWidth={180}
>
{data.label}
</AutoTooltip>
);
}}
optionRender={(data) => {
return (
<AutoTooltip
ghost
tooltipProps={{
placement: 'right'
}}
minWidth={60}
maxWidth={180}
>
{data.label}
</AutoTooltip>
);
}}
></Select>
</span>
<ReferenceParams usage={tokenResult} scaleable></ReferenceParams>
<span className="action">
<Dropdown
menu={{
items: actionItems
}}
placement="bottomRight"
>
<Button
type="text"
icon={<MoreOutlined style={{ fontSize: '14px' }} />}
size="small"
></Button>
</Dropdown>
<Popover
placement="bottomRight"
overlayInnerStyle={{ width: 384 }}
content={
<DynamicParams
ref={formRef}
onValuesChange={OnValuesChange}
paramsConfig={paramsConfig}
initialValues={initialValues}
showModelSelector={false}
/>
}
trigger={['click']}
arrow={false}
fresh={true}
title={
<div>
<Checkbox onChange={handleApplyToAllModels}>
{intl.formatMessage({
id: 'playground.compare.applytoall'
})}
</Checkbox>
</div>
}
>
<Button
type="text"
icon={<SettingOutlined />}
size="small"
></Button>
</Popover>
</span>
</div>
<SystemMessage
showApplyToAll={true}
systemMessage={systemMessage}
applyToAll={handleApplySystemChangeToAll}
setSystemMessage={setSystemMessage}
></SystemMessage>
<div className="content" ref={scroller}>
<div>
<MessageContent
messageList={messageList}
setMessageList={setMessageList}
actions={actions}
editable={true}
/>
<Spin spinning={loading} size="small" style={{ width: '100%' }} />
</div>
<ViewCodeModal
{...OpenAIViewCode.chat}
open={show}
payload={{
messages: viewCodeMessage
}}
parameters={params}
onCancel={handleCloseViewCode}
title={intl.formatMessage({ id: 'playground.viewcode' })}
></ViewCodeModal>
</div>
);
}
);
<ViewCodeModal
{...OpenAIViewCode.chat}
open={show}
payload={{
messages: viewCodeMessage
}}
parameters={parameters}
onCancel={handleCloseViewCode}
title={intl.formatMessage({ id: 'playground.viewcode' })}
></ViewCodeModal>
</div>
);
});
export default React.memo(ModelItem);

@ -23,7 +23,7 @@ export const llmInitialValues = {
stop: null,
temperature: 1,
top_p: 1,
max_tokens: 1024
max_tokens: null
};
export const advancedFieldsDefaultValus = {

@ -28,7 +28,9 @@ import {
interface MessageProps {
modelList: Global.BaseOption<string>[];
model?: string;
loaded?: boolean;
isChat?: boolean;
ref?: any;
}
@ -43,7 +45,7 @@ export const useInitLLmMeta = (
props: MessageProps,
options: InitMetaOptions
) => {
const { modelList } = props;
const { modelList, model, isChat } = props;
const {
metaKeys = {},
defaultValues = {},
@ -51,7 +53,9 @@ export const useInitLLmMeta = (
} = options;
const formRef = useRef<any>(null);
const [searchParams] = useSearchParams();
const defaultModel = searchParams.get('model') || modelList?.[0]?.value || '';
const defaultModel =
searchParams.get('model') ||
(isChat ? model ?? modelList?.[0]?.value : model);
const [modelMeta, setModelMeta] = useState<any>({});
const [initialValues, setInitialValues] = useState<any>({
...defaultValues,
@ -103,18 +107,12 @@ export const useInitLLmMeta = (
return fields?.join(',');
}, [paramsConfig]);
const handleOnValuesChange = useCallback(
(changeValues: Record<string, any>, allValues: Record<string, any>) => {
setParams(allValues);
},
[]
);
const handleOnModelChange = useCallback(
(val: string) => {
if (!val) return;
const model = modelList.find((item) => item.value === val);
const { form: initialData, meta } = extractLLMMeta(model?.meta);
setModelMeta(meta || {});
setInitialValues({
...initialData,
@ -124,8 +122,31 @@ export const useInitLLmMeta = (
...initialData,
model: val
});
const config = defaultParamsConfig.map((item) => {
return {
...item,
attrs:
item.name === 'max_tokens'
? { ...item.attrs, max: meta.max_tokens || 16 * 1024 }
: {
...item.attrs
}
};
});
setParamsConfig(config);
},
[modelList, defaultParamsConfig]
);
const handleOnValuesChange = useCallback(
(changeValues: Record<string, any>, allValues: Record<string, any>) => {
if (changeValues.model) {
handleOnModelChange(changeValues.model);
return;
}
setParams(allValues);
},
[modelList]
[handleOnModelChange]
);
useEffect(() => {
@ -138,7 +159,7 @@ export const useInitLLmMeta = (
if (paramsRef.current) {
innitializeParams(paramsRef.current);
}
}, [paramsRef.current, innitializeParams]);
}, [innitializeParams]);
return {
extractLLMMeta,

Loading…
Cancel
Save