chore: audio model deploy

main
jialin 1 year ago
parent 5e0772fae9
commit ba76498acc

@ -14,7 +14,7 @@ const isProduction = env === 'production';
const t = Date.now();
export default defineConfig({
proxy: {
...proxy()
...proxy('http://192.168.50.2')
},
history: {
type: 'hash'

@ -148,6 +148,10 @@
gap: 20px;
}
.gap-6 {
gap: 6px;
}
.line-24 {
line-height: 24px;
}

@ -4,6 +4,7 @@ import './styles/dark.less';
interface CodeViewerProps {
code: string;
copyValue?: string;
lang: string;
autodetect?: boolean;
ignoreIllegals?: boolean;
@ -14,6 +15,7 @@ interface CodeViewerProps {
const DarkViewer: React.FC<CodeViewerProps> = (props) => {
const {
code,
copyValue,
lang,
autodetect,
ignoreIllegals,
@ -26,6 +28,7 @@ const DarkViewer: React.FC<CodeViewerProps> = (props) => {
style={props.style}
height={height}
code={code}
copyValue={copyValue}
lang={lang}
theme="dark"
autodetect={autodetect}

@ -4,6 +4,7 @@ import './styles/light.less';
interface CodeViewerProps {
code: string;
copyValue?: string;
lang: string;
autodetect?: boolean;
ignoreIllegals?: boolean;
@ -14,6 +15,7 @@ interface CodeViewerProps {
const LightViewer: React.FC<CodeViewerProps> = (props) => {
const {
code,
copyValue,
lang,
autodetect,
ignoreIllegals,
@ -27,6 +29,7 @@ const LightViewer: React.FC<CodeViewerProps> = (props) => {
style={style}
height={height}
code={code}
copyValue={copyValue}
lang={lang}
theme="light"
autodetect={autodetect}

@ -6,6 +6,7 @@ import { escapeHtml } from './utils';
interface CodeViewerProps {
code: string;
copyValue?: string;
lang: string;
autodetect?: boolean;
ignoreIllegals?: boolean;
@ -17,6 +18,7 @@ interface CodeViewerProps {
const CodeViewer: React.FC<CodeViewerProps> = (props) => {
const {
code,
copyValue,
lang,
autodetect = true,
ignoreIllegals = true,
@ -87,7 +89,7 @@ const CodeViewer: React.FC<CodeViewerProps> = (props) => {
></code>
{copyable && (
<CopyButton
text={code}
text={copyValue || code}
size="small"
style={{ color: '#abb2bf' }}
></CopyButton>

@ -10,10 +10,12 @@ const HighlightCode: React.FC<{
theme?: 'light' | 'dark';
height?: string | number;
style?: React.CSSProperties;
copyValue?: string;
}> = (props) => {
const {
style,
code,
copyValue,
lang = 'bash',
copyable = true,
theme = 'dark',
@ -26,6 +28,7 @@ const HighlightCode: React.FC<{
<CodeViewerDark
lang={lang}
code={code}
copyValue={copyValue}
copyable={copyable}
height={height}
style={style}
@ -35,6 +38,7 @@ const HighlightCode: React.FC<{
style={style}
lang={lang}
code={code}
copyValue={copyValue}
copyable={copyable}
height={height}
/>

@ -12,6 +12,7 @@ interface SystemMessageProps {
label?: React.ReactNode;
height?: number;
onChange: (e: any) => void;
onPaste?: (e: any) => void;
}
const RowTextarea: React.FC<SystemMessageProps> = (props) => {
@ -52,6 +53,9 @@ const RowTextarea: React.FC<SystemMessageProps> = (props) => {
const handleClear = () => {
onChange?.({ target: { value: '' } });
};
const handleOnPaste = (e: any) => {
props.onPaste?.(e);
};
return (
<div
@ -83,6 +87,7 @@ const RowTextarea: React.FC<SystemMessageProps> = (props) => {
onBlur={handleBlur}
allowClear={false}
onChange={handleOnChange}
onPaste={handleOnPaste}
></Input.TextArea>
</div>
}

@ -9,7 +9,7 @@ import React, { useRef, useState } from 'react';
import AudioPlayer from './audio-player';
import './styles/index.less';
const audioUrl = require('./ih.mp4');
// const audioUrl = require('./ih.mp4');
interface SpeechContentProps {
prompt: string;
@ -17,6 +17,7 @@ interface SpeechContentProps {
voice: string;
format: string;
speed: number;
audioUrl: string;
}
const SpeechItem: React.FC<SpeechContentProps> = (props) => {
console.log('porps=======', props);
@ -34,13 +35,16 @@ const SpeechItem: React.FC<SpeechContentProps> = (props) => {
return (
<div>
<div className="speech-item">
{/* <audio controls autoPlay={true} src={require('./ih.mp4')}></audio> */}
<div className="voice">
<IconFont type="icon-user_voice" className="font-size-16" />
<span className="text">{props.voice}</span>
</div>
<div className="wrapper">
<AudioPlayer {...props} audioUrl={audioUrl} ref={ref}></AudioPlayer>
<AudioPlayer
{...props}
audioUrl={props.audioUrl}
ref={ref}
></AudioPlayer>
</div>
</div>
<div className="speech-actions">
@ -75,11 +79,11 @@ const SpeechItem: React.FC<SpeechContentProps> = (props) => {
</Tooltip>
</div>
</div>
{collapsed && (
{/* {collapsed && (
<div className="prompt-box">
<div className="prompt">{props.prompt}</div>
</div>
)}
)} */}
</div>
);
};

@ -1,4 +1,5 @@
import { UploadOutlined } from '@ant-design/icons';
import { useIntl } from '@umijs/max';
import { Button, Tooltip, Upload } from 'antd';
import React from 'react';
@ -10,6 +11,7 @@ interface UploadAudioProps {
}
const UploadAudio: React.FC<UploadAudioProps> = (props) => {
const intl = useIntl();
const beforeUpload = (file: any) => {
return true;
};
@ -22,7 +24,13 @@ const UploadAudio: React.FC<UploadAudioProps> = (props) => {
[]
);
return (
<Tooltip title={`Upload an audio file, support for ${props.accept}`}>
<Tooltip
overlayInnerStyle={{ maxWidth: 265, width: 'max-content' }}
title={intl.formatMessage(
{ id: 'playground.audio.uploadfile.tips' },
{ formats: props.accept }
)}
>
<Upload
beforeUpload={beforeUpload}
onChange={handleOnChange}

@ -82,5 +82,13 @@ export default {
'playground.audio.speechtotext.tips':
'Upload an audio file or start recording',
'playground.audio.enablemic':
"Enable microphone access in your browser's settings."
"Enable microphone access in your browser's settings.",
'playground.audio.startrecord': 'Start Recording',
'playground.audio.stoprecord': 'Stop Recording',
'playground.audio.generating.tips': 'Generated text will appear here.',
'playground.audio.uploadfile.tips':
'Please upload an audio file, supported formats: {formats}',
'playground.input.multiplePaste': 'Multi-line paste',
'playground.multiple.on': 'Enable',
'playground.multiple.off': 'Disable'
};

@ -80,5 +80,13 @@ export default {
'playground.audio.speechtotext': '语音转文本',
'playground.audio.texttospeech.tips': '生成的语音将出现在这里',
'playground.audio.speechtotext.tips': '上传音频文件或开始录音',
'playground.audio.enablemic': '请允许浏览器访问麦克风,以便开始录音'
'playground.audio.enablemic': '请允许浏览器访问麦克风,以便开始录音',
'playground.audio.startrecord': '开始录音',
'playground.audio.stoprecord': '停止录音',
'playground.audio.generating.tips': '生成的文本将出现在这里',
'playground.audio.uploadfile.tips': '请上传音频文件,支持格式:{formats}',
'playground.audio.button.generate': '生成文本',
'playground.input.multiplePaste': '多行粘贴',
'playground.multiple.on': '开启',
'playground.multiple.off': '关闭'
};

@ -147,17 +147,24 @@ export async function queryModelScopeModels(
Target?: string;
SingleCriterion?: any[];
Name: string;
filterGGUF?: boolean;
tags?: string[];
tasks?: string[];
},
config?: any
) {
const Criterion = params.filterGGUF
? {
Criterion: [
{ category: 'tags', predicate: 'contains', values: ['gguf'] }
]
}
: {};
const tagsCriterion = params.tags?.map((tag: string) => {
return { category: 'tags', predicate: 'contains', values: [tag] };
});
const tasksCriterion = params.tasks?.map((task: string) => {
return { category: 'tasks', predicate: 'contains', values: [task] };
});
const Criterion =
tagsCriterion?.length || tasksCriterion?.length
? {
Criterion: [...(tagsCriterion || []), ...(tasksCriterion || [])]
}
: {};
const res = await fetch(`${MODEL_SCOPE_LIST_MODEL_API}`, {
method: 'PUT',
signal: config?.signal,
@ -299,3 +306,19 @@ export async function downloadModelFile(
)?.text();
return res;
}
export async function downloadModelScopeModelfile(
params: { name: string },
options?: any
) {
const res = await fetch(
`${MODE_SCOPE_MODEL_FIELS_API}${params.name}/resolve/master/config.json`,
{
method: 'GET',
signal: options?.signal
}
);
if (!res.ok) {
throw new Error('Network response was not ok');
}
return res.json();
}

@ -35,6 +35,7 @@ interface AdvanceConfigProps {
gpuOptions: Array<any>;
action: PageActionType;
source: string;
modelTask: string;
}
const AdvanceConfig: React.FC<AdvanceConfigProps> = (props) => {

@ -16,8 +16,11 @@ import React, {
} from 'react';
import { queryGPUList } from '../apis';
import {
HuggingFaceTaskMap,
ModelscopeTaskMap,
backendOptionsMap,
modelSourceMap,
modelTaskMap,
ollamaModelOptions
} from '../config';
import { FormData, GPUListItem } from '../config/types';
@ -45,6 +48,12 @@ const DataForm: React.FC<DataFormProps> = forwardRef((props, ref) => {
const [gpuOptions, setGpuOptions] = useState<
Array<GPUListItem & { label: string; value: string }>
>([]);
const [modelTask, setModelTask] = useState<Record<string, any>>({
type: '',
value: '',
text2speech: false,
speech2text: false
});
const sourceOptions = [
{
@ -116,6 +125,27 @@ const DataForm: React.FC<DataFormProps> = forwardRef((props, ref) => {
const reg = /(-gguf)$/i;
name = _.toLower(name).replace(reg, '');
const modelTask =
HuggingFaceTaskMap.audio.includes(props.selectedModel.task) ||
ModelscopeTaskMap.audio.includes(props.selectedModel.task)
? modelTaskMap.audio
: '';
setModelTask({
value: props.selectedModel.task,
type: modelTask,
text2speech:
HuggingFaceTaskMap[modelTaskMap.textToSpeech] ===
props.selectedModel.task ||
ModelscopeTaskMap[modelTaskMap.textToSpeech] ===
props.selectedModel.task,
speech2text:
HuggingFaceTaskMap[modelTaskMap.speechToText] ===
props.selectedModel.task ||
ModelscopeTaskMap[modelTaskMap.speechToText] ===
props.selectedModel.task
});
if (SEARCH_SOURCE.includes(props.source)) {
form.setFieldsValue({
repo_id: props.selectedModel.name,
@ -321,6 +351,8 @@ const DataForm: React.FC<DataFormProps> = forwardRef((props, ref) => {
if (gpu) {
onOk({
..._.omit(formdata, ['scheduleType']),
speech_to_text: modelTask.speech2text,
text_to_speech: modelTask.text2speech,
gpu_selector: {
gpu_name: gpu.name,
gpu_index: gpu.index,
@ -329,19 +361,24 @@ const DataForm: React.FC<DataFormProps> = forwardRef((props, ref) => {
});
} else {
onOk({
..._.omit(formdata, ['scheduleType'])
..._.omit(formdata, ['scheduleType']),
speech_to_text: modelTask.speech2text,
text_to_speech: modelTask.text2speech
});
}
};
useEffect(() => {
if (action === PageAction.CREATE) {
if (action === PageAction.EDIT) return;
if (modelTask.type === modelTaskMap.audio) {
form.setFieldValue('backend', backendOptionsMap.voxBox);
} else {
form.setFieldValue(
'backend',
isGGUF ? backendOptionsMap.llamaBox : backendOptionsMap.vllm
);
}
}, [isGGUF]);
}, [isGGUF, modelTask]);
useEffect(() => {
handleOnSelectModel();
}, [props.selectedModel.name]);
@ -449,6 +486,7 @@ const DataForm: React.FC<DataFormProps> = forwardRef((props, ref) => {
form={form}
gpuOptions={gpuOptions}
isGGUF={isGGUF}
modelTask={modelTask}
action={action}
source={props.source}
></AdvanceConfig>

@ -23,7 +23,7 @@ interface HFModelItemProps {
source?: string;
tags?: string[];
}
const warningTask = ['audio', 'video'];
const warningTask = ['video'];
const SUPPORTEDSOURCE = [
modelSourceMap.huggingface_value,

@ -10,11 +10,18 @@ import { useIntl } from '@umijs/max';
import { Button, Empty, Spin, Tag, Tooltip } from 'antd';
import { some } from 'lodash';
import 'overlayscrollbars/overlayscrollbars.css';
import React, { useCallback, useEffect, useRef, useState } from 'react';
import React, {
useCallback,
useEffect,
useMemo,
useRef,
useState
} from 'react';
import SimpleBar from 'simplebar-react';
import 'simplebar-react/dist/simplebar.min.css';
import {
downloadModelFile,
downloadModelScopeModelfile,
queryHuggingfaceModelDetail,
queryModelScopeModelDetail
} from '../apis';
@ -37,10 +44,32 @@ const ModelCard: React.FC<{
const [readmeText, setReadmeText] = useState<string | null>(null);
const requestToken = useRef<any>(null);
const axiosTokenRef = useRef<any>(null);
const loadConfigTokenRef = useRef<any>(null);
const loadConfigJsonTokenRef = useRef<any>(null);
const [isGGUFModel, setIsGGUFModel] = useState<boolean>(false);
const [loading, setLoading] = useState<boolean>(false);
const loadFile = async (repo: string, sha: string) => {
const modelTags = useMemo(() => {
if (modelSource === modelSourceMap.huggingface_value) {
return modelData?.pipeline_tag ? [modelData?.pipeline_tag] : [];
}
if (modelSource === modelSourceMap.modelscope_value) {
return modelData?.Tasks?.map((task: any) => task?.Name)?.filter(
(val: string) => val
);
}
return [];
}, [modelSource, modelData]);
const modelType = useMemo(() => {
if (modelSource === modelSourceMap.huggingface_value) {
return modelData?.config?.model_type || modelData?.ModelType?.[0];
}
if (modelSource === modelSourceMap.modelscope_value) {
return modelData?.ModelType?.[0];
}
}, [modelData, modelSource]);
const loadFile = useCallback(async (repo: string, sha: string) => {
try {
axiosTokenRef.current?.abort?.();
axiosTokenRef.current = new AbortController();
@ -54,12 +83,32 @@ const ModelCard: React.FC<{
signal: axiosTokenRef.current.signal
}
);
console.log('readme++++++++', res);
return res || '';
} catch (error) {
return '';
}
};
}, []);
const loadConfig = useCallback(async (repo: string, sha: string) => {
try {
loadConfigTokenRef.current?.abort?.();
loadConfigTokenRef.current = new AbortController();
const res = await downloadModelFile(
{
repo,
revision: sha,
path: 'config.json'
},
{
signal: loadConfigTokenRef.current.signal
}
);
return res || null;
} catch (error) {
console.log('error======', error);
return null;
}
}, []);
const removeMetadata = useCallback((str: string) => {
let indexes = [];
@ -81,6 +130,12 @@ const ModelCard: React.FC<{
// huggingface model card data
const getHuggingfaceModelDetail = async () => {
try {
const configjson = await loadConfig(
props.selectedModel.name,
'main'
).catch(() => {
return null;
});
const [modelcard, readme] = await Promise.all([
queryHuggingfaceModelDetail(
{ repo: props.selectedModel.name },
@ -92,11 +147,13 @@ const ModelCard: React.FC<{
]);
setModelData(modelcard);
// remove the meta data from readme
const newReadme = removeMetadata(readme);
setReadmeText(newReadme);
const isGGUF = modelcard.tags?.includes('gguf');
console.log('modelData++++++++++++', isGGUF);
setIsGGUF(isGGUF);
setIsGGUFModel(isGGUF);
} catch (error) {
@ -107,8 +164,30 @@ const ModelCard: React.FC<{
}
};
const loadModelscopeModelConfig = useCallback(async (name: string) => {
try {
loadConfigJsonTokenRef.current?.abort?.();
loadConfigJsonTokenRef.current = new AbortController();
return await downloadModelScopeModelfile(
{
name: name
},
{
signal: loadConfigJsonTokenRef.current.token
}
);
} catch (error) {
return null;
}
}, []);
const getModelScopeModelDetail = async () => {
try {
const configjson = await loadModelscopeModelConfig(
props.selectedModel.name
).catch(() => {
return null;
});
const data = await queryModelScopeModelDetail(
{
name: props.selectedModel.name
@ -121,6 +200,7 @@ const ModelCard: React.FC<{
...data?.Data,
name: `${data.Data?.Path}/${data.Data?.Name}`
});
console.log('modelData++++++++++++', configjson, data?.Data);
setReadmeText(data?.Data?.ReadMeContent);
const isGGUF = some(
data?.Data?.Tags,
@ -230,6 +310,8 @@ const ModelCard: React.FC<{
return () => {
requestToken.current?.cancel?.();
axiosTokenRef.current?.abort?.();
loadConfigTokenRef.current?.abort?.();
loadConfigJsonTokenRef.current?.abort?.();
};
}, []);
@ -243,13 +325,13 @@ const ModelCard: React.FC<{
{modelData ? (
<div className="model-card-wrap">
<div className="flex-center">
{modelData.config?.model_type && (
{modelType && (
<Tag className="tag-item" color="gold">
<span style={{ opacity: 0.65 }}>
<span className="m-r-5">
{intl.formatMessage({ id: 'models.architecture' })}:
</span>
{modelData.config?.model_type}
{modelType}
</span>
</Tag>
)}
@ -258,6 +340,14 @@ const ModelCard: React.FC<{
<span style={{ opacity: 0.65 }}>GGUF</span>
</Tag>
)}
{!!modelTags.length &&
modelTags.map((tag: string, index: number) => {
return (
<Tag className="tag-item" color="geekblue" key={index}>
<span style={{ opacity: 0.65 }}>{tag}</span>
</Tag>
);
})}
</div>
{readmeText && isGGUFModel && (
<div

@ -5,9 +5,12 @@ import _ from 'lodash';
import React, { useCallback, useEffect, useRef, useState } from 'react';
import { queryHuggingfaceModels, queryModelScopeModels } from '../apis';
import {
HuggingFaceTaskMap,
ModelScopeSortType,
ModelSortType,
ModelscopeTaskMap,
modelSourceMap,
modelTaskMap,
ollamaModelOptions
} from '../config';
import SearchStyle from '../style/search-result.less';
@ -23,7 +26,7 @@ interface SearchInputProps {
const SearchModel: React.FC<SearchInputProps> = (props) => {
const intl = useIntl();
const { modelSource, setLoadingModel, onSourceChange, onSelectModel } = props;
const { modelSource, setLoadingModel, onSelectModel } = props;
const [dataSource, setDataSource] = useState<{
repoOptions: any[];
loading: boolean;
@ -44,6 +47,7 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
const axiosTokenRef = useRef<any>(null);
const searchInputRef = useRef<any>('');
const filterGGUFRef = useRef<boolean | undefined>();
const filterTaskRef = useRef<string>('');
const modelFilesSortOptions = useRef<any[]>([
{
label: intl.formatMessage({ id: 'models.sort.trending' }),
@ -64,7 +68,6 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
]);
const handleOnSelectModel = useCallback((item: any) => {
console.log('handleOnSelectModel', item);
onSelectModel(item);
setCurrent(item.id);
}, []);
@ -78,7 +81,7 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
query: searchInputRef.current || '',
sort: sort,
tags: filterGGUFRef.current ? ['gguf'] : [],
task
task: HuggingFaceTaskMap[filterTaskRef.current] || task
}
};
const data = await queryHuggingfaceModels(params, {
@ -102,7 +105,10 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
try {
const params = {
Name: `${searchInputRef.current}`,
filterGGUF: filterGGUFRef.current,
tags: filterGGUFRef.current ? ['gguf'] : [],
tasks: filterTaskRef.current
? ([ModelscopeTaskMap[filterTaskRef.current]] as string[])
: [],
SortBy: ModelScopeSortType[sort]
};
const data = await queryModelScopeModels(params, {
@ -213,6 +219,11 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
handleOnSearchRepo();
};
const handleFilterTaskChange = useCallback((value: string) => {
filterTaskRef.current = value;
handleOnSearchRepo();
}, []);
const renderHFSearch = () => {
return (
<>
@ -222,15 +233,22 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
modelSource={modelSource}
></SearchInput>
<div className={SearchStyle.filter}>
<span>
{/* <span>
<span className="value">
{intl.formatMessage(
{ id: 'models.search.result' },
{ count: dataSource.repoOptions.length }
)}
</span>
</span>
<span>
</span> */}
<span
style={{
flex: 1,
display: 'flex',
justifyContent: 'space-between',
alignItems: 'center'
}}
>
<Checkbox
onChange={handleFilterGGUFChange}
className="m-r-5"
@ -253,21 +271,43 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
<InfoCircleOutlined className="m-l-4" />
</Tooltip>
</Checkbox>
<Select
allowClear
value={dataSource.sortType}
onChange={handleSortChange}
labelRender={({ label }) => {
return (
<span>
{intl.formatMessage({ id: 'model.deploy.sort' })}: {label}
</span>
);
}}
options={modelFilesSortOptions.current}
size="middle"
style={{ width: '150px' }}
></Select>
<span className="flex gap-6">
<Select
allowClear
value={filterTaskRef.current}
onChange={handleFilterTaskChange}
options={[
{
label: intl.formatMessage({
id: 'playground.audio.texttospeech'
}),
value: modelTaskMap.textToSpeech
},
{
label: intl.formatMessage({
id: 'playground.audio.speechtotext'
}),
value: modelTaskMap.speechToText
}
]}
size="middle"
style={{ width: '140px' }}
></Select>
<Select
value={dataSource.sortType}
onChange={handleSortChange}
labelRender={({ label }) => {
return (
<span>
{intl.formatMessage({ id: 'model.deploy.sort' })}: {label}
</span>
);
}}
options={modelFilesSortOptions.current}
size="middle"
style={{ width: '140px' }}
></Select>
</span>
</span>
</div>
</>

@ -73,6 +73,38 @@ export const backendOptionsMap = {
voxBox: 'vox-box'
};
export const modelTaskMap = {
textToSpeech: 'text-to-speech',
speechToText: 'speech-to-text',
textToText: 'text-to-text',
textToImage: 'text-to-image',
audio: 'audio',
image: 'image'
};
export const ModelscopeTaskMap = {
[modelTaskMap.textToSpeech]: 'text-to-speech',
[modelTaskMap.speechToText]: 'auto-speech-recognition',
[modelTaskMap.textToText]: 'TextToText',
[modelTaskMap.textToImage]: 'text-to-image',
audio: ['text-to-speech', 'auto-speech-recognition']
};
export const HuggingFaceTaskMap = {
[modelTaskMap.textToSpeech]: 'text-to-speech',
[modelTaskMap.speechToText]: 'automatic-speech-recognition',
[modelTaskMap.textToText]: 'text-2-text',
[modelTaskMap.textToImage]: 'text-to-image',
audio: ['text-to-speech', 'automatic-speech-recognition']
};
export const AudioModeTypeMap = {
FunASR: ['FunASR', 'funasr', 'fun-asr', 'fun_asr'],
Bark: ['Bark', 'bark'],
Whisper: ['Whisper', 'whisper'],
CosyVoice: ['CosyVoice', 'cosyvoice', 'cosy-voice', 'cosy_voice']
};
export const modelSourceMap: Record<string, string> = {
huggingface: 'Hugging Face',
ollama_library: 'Ollama Library',

@ -1,3 +1,4 @@
import { MODELS_API } from '@/pages/llmodels/apis';
import { request } from '@umijs/max';
export const CHAT_API = '/v1-openai/chat/completions';
@ -96,17 +97,37 @@ export const textToSpeech = async (params: any, options?: any) => {
if (!res.ok) {
throw new Error('Network response was not ok');
}
return res.json();
const audioBlob = await res.blob();
const audioUrl = URL.createObjectURL(audioBlob);
return audioUrl;
};
// export const speechToText = async (params: any, options?: any) => {
// const res = await fetch(AUDIO_SPEECH_TO_TEXT_API, {
// method: 'POST',
// body: JSON.stringify(params.data),
// signal: params.signal
// });
// if (!res.ok) {
// throw new Error('Network response was not ok');
// }
// return res.json();
// };
export const speechToText = async (params: any, options?: any) => {
const res = await fetch(AUDIO_SPEECH_TO_TEXT_API, {
return request(AUDIO_SPEECH_TO_TEXT_API, {
method: 'POST',
body: JSON.stringify(params.data),
signal: params.signal
data: params.data,
headers: {
'Content-Type': 'multipart/form-data'
}
});
};
export const queryModelVoices = async (params: { name: string }) => {
return request(`${MODELS_API}/${params.name}/voices`, {
method: 'GET',
skipErrorHandler: true
});
if (!res.ok) {
throw new Error('Network response was not ok');
}
return res.json();
};

@ -1,4 +1,5 @@
import { AudioOutlined } from '@ant-design/icons';
import { useIntl } from '@umijs/max';
import { Button, Space, Tooltip } from 'antd';
import React, { useCallback, useEffect, useRef, useState } from 'react';
// import '../style/audio-input.less';
@ -19,6 +20,7 @@ interface AudioInputProps {
}
const AudioInput: React.FC<AudioInputProps> = (props) => {
const intl = useIntl();
const [audioOn, setAudioOn] = useState(false);
const [isRecording, setIsRecording] = useState(false);
const [audioPermission, setAudioPermission] = useState(true);
@ -150,9 +152,9 @@ const AudioInput: React.FC<AudioInputProps> = (props) => {
stopRecording();
return;
}
try {
await EnableAudio();
console.log('audioStream:', audioStream.current);
audioRecorder.current = new MediaRecorder(audioStream.current);
const audioChunks: any[] = [];
@ -172,7 +174,7 @@ const AudioInput: React.FC<AudioInputProps> = (props) => {
const audioUrl = URL.createObjectURL(audioBlob);
handleAudioData({
chunks: audioChunks,
chunks: audioBlob,
size: audioBlob.size,
type: audioBlob.type,
url: audioUrl,
@ -188,8 +190,9 @@ const AudioInput: React.FC<AudioInputProps> = (props) => {
startTime.current = Date.now();
audioRecorder.current.start(1000);
generateVisualData();
console.log('start recording');
} catch (error) {
// console.log(error);
console.log('error====', error);
}
};
@ -208,7 +211,13 @@ const AudioInput: React.FC<AudioInputProps> = (props) => {
<div className="audio-input">
<Space size={40} className="btns">
{
<Tooltip title="Start Recording">
<Tooltip
title={
isRecording
? intl.formatMessage({ id: 'playground.audio.stoprecord' })
: intl.formatMessage({ id: 'playground.audio.startrecord' })
}
>
<div
style={{
display: 'flex',
@ -229,17 +238,6 @@ const AudioInput: React.FC<AudioInputProps> = (props) => {
</div>
</Tooltip>
}
{/* {isRecording && (
<Tooltip title="Stop Recording">
<Button
shape="circle"
icon={<IconFont type="icon-stop2"></IconFont>}
size="middle"
type={props.type ?? 'text'}
onClick={stopRecording}
></Button>
</Tooltip>
)} */}
</Space>
</div>
);

@ -32,6 +32,7 @@ type ParamsSettingsProps = {
modelList: Global.BaseOption<string>[];
onValuesChange?: (changeValues: any, value: Record<string, any>) => void;
setParams: (params: any) => void;
onModelChange?: (model: string) => void;
globalParams?: Record<string, any>;
paramsConfig?: ParamsSchema[];
initialValues?: Record<string, any>;
@ -43,6 +44,7 @@ const ParamsSettings: React.FC<ParamsSettingsProps> = forwardRef(
{
setParams,
onValuesChange,
onModelChange,
selectedModel,
globalParams,
initialValues,
@ -84,6 +86,13 @@ const ParamsSettings: React.FC<ParamsSettingsProps> = forwardRef(
}
}, [modelList, showModelSelector, selectedModel, initialValues]);
const handleModelChange = useCallback(
(value: string) => {
onModelChange?.(value);
},
[onModelChange]
);
const handleOnFinish = (values: any) => {
console.log('handleOnFinish', values);
};
@ -239,7 +248,7 @@ const ParamsSettings: React.FC<ParamsSettingsProps> = forwardRef(
}
return null;
});
}, [paramsConfig, params]);
}, [paramsConfig, params, intl]);
return (
<Form
@ -272,6 +281,7 @@ const ParamsSettings: React.FC<ParamsSettingsProps> = forwardRef(
]}
>
<SealSelect
onChange={handleModelChange}
showSearch={true}
options={modelList}
label={intl.formatMessage({ id: 'playground.model' })}

@ -11,7 +11,7 @@ import {
SendOutlined
} from '@ant-design/icons';
import { useIntl, useSearchParams } from '@umijs/max';
import { Button, Segmented, Tabs } from 'antd';
import { Button, Checkbox, Segmented, Tabs } from 'antd';
import classNames from 'classnames';
import { PCA } from 'ml-pca';
import 'overlayscrollbars/overlayscrollbars.css';
@ -72,8 +72,15 @@ const GroundEmbedding: React.FC<MessageProps> = forwardRef((props, ref) => {
>([]);
const [outputType, setOutputType] = useState<string>('chart');
const [outputHeight, setOutputHeight] = useState<number>(180);
const [embeddingData, setEmbeddingData] = useState<string>('');
const [embeddingData, setEmbeddingData] = useState<{
code: string;
copyValue: string;
}>({
code: '',
copyValue: ''
});
const [lessTwoInput, setLessTwoInput] = useState<boolean>(false);
const multiplePasteEnable = useRef<boolean>(true);
const [textList, setTextList] = useState<
{ text: string; uid: number | string; name: string }[]
@ -97,6 +104,7 @@ const GroundEmbedding: React.FC<MessageProps> = forwardRef((props, ref) => {
const { initialize: innitializeParams, updateScrollerPosition } =
useOverlayScroller();
const formRef = useRef<any>(null);
useImperativeHandle(ref, () => {
return {
@ -137,7 +145,15 @@ const GroundEmbedding: React.FC<MessageProps> = forwardRef((props, ref) => {
};
});
setScatterData(list);
setEmbeddingData(JSON.stringify(embeddings, null, 2));
const embeddingJson = embeddings.map((item, index) => {
item.embedding = item.embedding.slice(0, 5);
item.embedding.push(null);
return item;
});
setEmbeddingData({
code: JSON.stringify(embeddingJson, null, 2).replace(/null/g, '...'),
copyValue: JSON.stringify(embeddings, null, 2)
});
} catch (e) {
console.log('error:', e);
}
@ -155,6 +171,7 @@ const GroundEmbedding: React.FC<MessageProps> = forwardRef((props, ref) => {
};
const submitMessage = async (current?: { role: string; content: string }) => {
await formRef.current?.form.validateFields();
if (!parameters.model) return;
try {
@ -255,6 +272,25 @@ const GroundEmbedding: React.FC<MessageProps> = forwardRef((props, ref) => {
setTextList(list);
};
const handleOnPaste = useCallback(
(e: any, index: number) => {
if (!multiplePasteEnable.current) return;
const text = e.clipboardData.getData('text');
if (text) {
console.log('text:', text);
const dataLlist = text.split('\n').map((item: string) => {
return {
text: item,
uid: inputListRef.current?.setMessageId(),
name: ''
};
});
setTextList([...textList.slice(0, index), ...dataLlist]);
}
},
[textList]
);
const handleClearDocuments = () => {
setTextList([
{
@ -300,7 +336,8 @@ const GroundEmbedding: React.FC<MessageProps> = forwardRef((props, ref) => {
<HighlightCode
height={outputHeight - 20}
theme="light"
code={embeddingData}
code={embeddingData.code}
copyValue={embeddingData.copyValue}
lang="json"
copyable={true}
style={{ marginBottom: 0 }}
@ -355,7 +392,37 @@ const GroundEmbedding: React.FC<MessageProps> = forwardRef((props, ref) => {
</span>
</div>
</h3>
<div className="flex gap-10">
<div className="flex-center gap-10">
<Button className="flex-center" size="middle">
<Checkbox
defaultChecked={multiplePasteEnable.current}
onChange={(e: any) => {
multiplePasteEnable.current = e.target.checked;
}}
>
{intl.formatMessage({
id: 'playground.input.multiplePaste'
})}
</Checkbox>
</Button>
{/* <Tooltip
title={intl.formatMessage({
id: 'playground.input.multiplePaste'
})}
>
<Switch
checkedChildren={intl.formatMessage({
id: 'playground.multiple.on'
})}
unCheckedChildren={intl.formatMessage({
id: 'playground.multiple.off'
})}
defaultChecked={multiplePasteEnable.current}
onChange={(checked) => {
multiplePasteEnable.current = checked;
}}
/>
</Tooltip> */}
<Button size="middle" onClick={handleAddText}>
<PlusOutlined />
{intl.formatMessage({ id: 'playground.embedding.addtext' })}
@ -399,6 +466,7 @@ const GroundEmbedding: React.FC<MessageProps> = forwardRef((props, ref) => {
ref={inputListRef}
textList={textList}
onChange={handleTextListChange}
onPaste={handleOnPaste}
></InputList>
<div style={{ marginTop: 8 }}>
<FileList
@ -505,6 +573,7 @@ const GroundEmbedding: React.FC<MessageProps> = forwardRef((props, ref) => {
>
<div className="box">
<DynamicParams
ref={formRef}
setParams={setParams}
paramsConfig={paramsConfig}
initialValues={initialValues}

@ -283,6 +283,7 @@ const GroundImages: React.FC<MessageProps> = forwardRef((props, ref) => {
if (item.b64_json) {
imgItem.dataUrl += item.b64_json;
}
const progress = _.round(item.progress, 0);
newImageList[item.index] = {
dataUrl: imgItem.dataUrl,
height: '100%',
@ -291,8 +292,8 @@ const GroundImages: React.FC<MessageProps> = forwardRef((props, ref) => {
maxWidth: `${imgSize[0]}px`,
uid: imgItem.uid,
span: imgItem.span,
loading: _.round(item.progress, 0) < 100,
progress: _.round(item.progress, 0)
loading: progress < 100,
progress: progress
};
});
setImageList([...newImageList]);

@ -2,7 +2,7 @@ import useOverlayScroller from '@/hooks/use-overlay-scroller';
import useRequestToken from '@/hooks/use-request-token';
import { ClearOutlined, PlusOutlined, SendOutlined } from '@ant-design/icons';
import { useIntl, useSearchParams } from '@umijs/max';
import { Button, Input, Spin, Tag } from 'antd';
import { Button, Checkbox, Input, Spin, Tag } from 'antd';
import classNames from 'classnames';
import _ from 'lodash';
import 'overlayscrollbars/overlayscrollbars.css';
@ -76,6 +76,8 @@ const GroundReranker: React.FC<MessageProps> = forwardRef((props, ref) => {
const paramsRef = useRef<any>(null);
const messageListLengthCache = useRef<number>(0);
const requestToken = useRef<any>(null);
const formRef = useRef<any>(null);
const multiplePasteEnable = useRef<boolean>(true);
const [fileList, setFileList] = useState<
{
text: string;
@ -186,6 +188,7 @@ const GroundReranker: React.FC<MessageProps> = forwardRef((props, ref) => {
};
const submitMessage = async (current?: { content: string }) => {
await formRef.current?.form.validateFields();
if (!parameters.model) return;
try {
setLoading(true);
@ -340,6 +343,26 @@ const GroundReranker: React.FC<MessageProps> = forwardRef((props, ref) => {
},
[]
);
const handleOnPaste = useCallback(
(e: any, index: number) => {
if (!multiplePasteEnable.current) return;
const text = e.clipboardData.getData('text');
if (text) {
console.log('text:', text);
const dataLlist = text.split('\n').map((item: string) => {
return {
text: item,
uid: inputListRef.current?.setMessageId(),
name: ''
};
});
setTextList([...textList.slice(0, index), ...dataLlist]);
}
},
[textList]
);
const handleOnSort = useCallback(
(list: { text: string; uid: number | string; name: string }[]) => {
const newList = list?.map((item) => {
@ -435,7 +458,37 @@ const GroundReranker: React.FC<MessageProps> = forwardRef((props, ref) => {
</span>
)}
</span>
<div className="flex gap-10">
<div className="flex-center gap-10">
<Button className="flex-center" size="middle">
<Checkbox
defaultChecked={multiplePasteEnable.current}
onChange={(e: any) => {
multiplePasteEnable.current = e.target.checked;
}}
>
{intl.formatMessage({
id: 'playground.input.multiplePaste'
})}
</Checkbox>
</Button>
{/* <Tooltip
title={intl.formatMessage({
id: 'playground.input.multiplePaste'
})}
>
<Switch
checkedChildren={intl.formatMessage({
id: 'playground.multiple.on'
})}
unCheckedChildren={intl.formatMessage({
id: 'playground.multiple.off'
})}
defaultChecked={multiplePasteEnable.current}
onChange={(checked) => {
multiplePasteEnable.current = checked;
}}
/>
</Tooltip> */}
<Button size="middle" onClick={handleAddText}>
<PlusOutlined />
{intl.formatMessage({ id: 'playground.embedding.addtext' })}
@ -460,6 +513,7 @@ const GroundReranker: React.FC<MessageProps> = forwardRef((props, ref) => {
onChange={handleTextListChange}
onSort={handleOnSort}
extra={renderPercent}
onPaste={handleOnPaste}
></InputList>
</div>
</div>
@ -488,6 +542,7 @@ const GroundReranker: React.FC<MessageProps> = forwardRef((props, ref) => {
>
<div className="box">
<DynamicParams
ref={formRef}
setParams={setParams}
params={parameters}
paramsConfig={paramsConfig}

@ -4,7 +4,7 @@ import IconFont from '@/components/icon-font';
import UploadAudio from '@/components/upload-audio';
import useOverlayScroller from '@/hooks/use-overlay-scroller';
import { readAudioFile } from '@/utils/load-audio-file';
import { AudioOutlined, ThunderboltOutlined } from '@ant-design/icons';
import { AudioOutlined, SendOutlined } from '@ant-design/icons';
import { useIntl, useSearchParams } from '@umijs/max';
import { Button, Spin, Tag, Tooltip } from 'antd';
import classNames from 'classnames';
@ -18,7 +18,7 @@ import {
useRef,
useState
} from 'react';
import { CHAT_API, speechToText } from '../apis';
import { speechToText } from '../apis';
import { RealtimeParamsConfig as paramsConfig } from '../config/params-config';
import { MessageItem } from '../config/types';
import '../style/ground-left.less';
@ -26,7 +26,6 @@ import '../style/speech-to-text.less';
import '../style/system-message-wrap.less';
import AudioInput from './audio-input';
import DynamicParams from './dynamic-params';
import MessageContent from './multiple-chat/message-content';
import ViewCodeModal from './view-code-modal';
interface MessageProps {
@ -40,17 +39,17 @@ const initialValues = {
};
const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
const intl = useIntl();
const { modelList } = props;
const messageId = useRef<number>(0);
const [messageList, setMessageList] = useState<MessageItem[]>([
{
content: 'Generating text content...',
content: '',
title: '',
role: '',
uid: messageId.current
}
]);
const intl = useIntl();
const [searchParams] = useSearchParams();
const selectModel = searchParams.get('model') || '';
const [parameters, setParams] = useState<any>({});
@ -70,6 +69,7 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
});
const [isRecording, setIsRecording] = useState(false);
const [recordEnd, setRecordEnd] = useState(false);
const formRef = useRef<any>(null);
const { initialize, updateScrollerPosition } = useOverlayScroller();
const { initialize: innitializeParams } = useOverlayScroller();
@ -95,7 +95,8 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
setLoading(false);
};
const submitMessage = async (current?: { role: string; content: string }) => {
const submitMessage = async () => {
await formRef.current?.form.validateFields();
if (!parameters.model) return;
try {
setLoading(true);
@ -106,17 +107,12 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
controllerRef.current = new AbortController();
const signal = controllerRef.current.signal;
const chatParams = {
const params = {
...parameters,
stream: true,
stream_options: {
include_usage: true
}
file: new File([audioData.data], audioData.name)
};
const result: any = await speechToText({
data: chatParams,
url: CHAT_API,
signal
data: params
});
if (result?.error) {
@ -129,16 +125,18 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
}
setMessageList([
{
content: 'Generating text content...',
content: result.text,
title: '',
role: '',
uid: messageId.current
}
]);
} catch (error) {
// console.log('error:', error);
console.log('error:', error);
} finally {
setLoading(false);
setRecordEnd(false);
setIsRecording(false);
}
};
const handleClear = () => {
@ -160,6 +158,7 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
return {
url: data.url,
name: data.name,
data: data.chunks,
duration: data.duration
};
});
@ -177,7 +176,6 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
const handleUploadChange = useCallback(
async (data: { file: any; fileList: any }) => {
const res = await readAudioFile(data.file.originFileObj);
console.log('res=======', res);
setAudioData(res);
setRecordEnd(true);
},
@ -195,20 +193,12 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
const handleOnRecord = useCallback((val: boolean) => {
setIsRecording(val);
setAudioData(null);
console.log('data===', val);
}, []);
const handleOnGenerate = useCallback(() => {
setMessageList([
{
content: 'Generating text content...',
title: '',
role: '',
uid: messageId.current
}
]);
setRecordEnd(false);
setIsRecording(false);
}, []);
const handleOnGenerate = async () => {
submitMessage();
};
const handleOnDiscard = useCallback(() => {
setRecordEnd(false);
@ -229,6 +219,7 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
></AudioAnimation>
);
}
return (
<div className="tips-text">
<IconFont type={'icon-audio'} style={{ fontSize: 20 }}></IconFont>
@ -238,7 +229,9 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
</div>
);
};
useEffect(() => {
console.log('parameters:', parameters);
}, [parameters]);
useEffect(() => {}, [messageList]);
useEffect(() => {
if (scroller.current) {
@ -271,51 +264,21 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
<div className="ground-left-footer" style={{ flex: 1 }}>
<div className="speech-to-text">
<div className="speech-box">
{isRecording ? (
<>
<AudioInput
type="default"
voiceActivity={true}
onAudioData={handleOnAudioData}
onAudioPermission={handleOnAudioPermission}
onAnalyse={handleOnAnalyse}
onRecord={handleOnRecord}
></AudioInput>
</>
) : (
<>
{/* <Tooltip title="discard">
<Button
onClick={handleOnDiscard}
icon={<DeleteRowOutlined />}
shape="circle"
></Button>
</Tooltip>
<Tooltip title="generate text content">
<Button
type="primary"
onClick={handleOnGenerate}
shape="circle"
icon={<ThunderboltOutlined></ThunderboltOutlined>}
></Button>
</Tooltip> */}
<Tooltip title="Upload an audio file">
<UploadAudio
type="default"
accept=".mp3,.mp4,.wav"
onChange={handleUploadChange}
></UploadAudio>
</Tooltip>
<AudioInput
type="default"
voiceActivity={true}
onAudioData={handleOnAudioData}
onAudioPermission={handleOnAudioPermission}
onAnalyse={handleOnAnalyse}
onRecord={handleOnRecord}
></AudioInput>
</>
{!isRecording && (
<UploadAudio
type="default"
accept=".mp3,.mp4,.wav"
onChange={handleUploadChange}
></UploadAudio>
)}
<AudioInput
type="default"
voiceActivity={true}
onAudioData={handleOnAudioData}
onAudioPermission={handleOnAudioPermission}
onAnalyse={handleOnAnalyse}
onRecord={handleOnRecord}
></AudioInput>
</div>
{audioData ? (
@ -326,24 +289,6 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
name={audioData.name}
duration={audioData.duration}
></AudioPlayer>
{/* <div
style={{
paddingRight: 5,
display: 'flex',
justifyContent: 'flex-end',
marginTop: 30
}}
>
<Tooltip title="generate text content">
<Button
size="middle"
type="primary"
icon={<ThunderboltOutlined></ThunderboltOutlined>}
>
Generata Text Content
</Button>
</Tooltip>
</div> */}
</div>
</div>
) : (
@ -399,20 +344,29 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
className="message-list-wrap"
ref={scroller}
style={{
borderTop: messageList.length
? '1px solid var(--ant-color-split)'
: '1px solid var(--ant-color-split)'
borderTop: '1px solid var(--ant-color-split)'
}}
>
<div className="content" style={{ height: '100%' }}>
<>
<MessageContent
actions={[]}
messageList={messageList[0] ? [messageList[0]] : []}
editable={false}
showTitle={false}
loading={true}
/>
<div
style={{
padding: '8px 14px',
lineHeight: '20px',
display: 'flex',
justifyContent: 'center'
}}
>
{audioData ? (
messageList[0]?.content
) : (
<span className="text-tertiary">
{intl.formatMessage({
id: 'playground.audio.generating.tips'
})}
</span>
)}
</div>
{loading && (
<Spin size="small">
<div style={{ height: '46px' }}></div>
@ -422,12 +376,18 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
</div>
</div>
<div style={{ padding: '16px 32px', textAlign: 'right' }}>
<Tooltip title="generate text content">
<Tooltip
title={intl.formatMessage({
id: 'playground.audio.button.generate'
})}
>
<Button
style={{ width: 46 }}
size="middle"
disabled={!audioData}
type="primary"
onClick={handleOnGenerate}
icon={<ThunderboltOutlined></ThunderboltOutlined>}
icon={<SendOutlined></SendOutlined>}
></Button>
</Tooltip>
</div>
@ -441,6 +401,7 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
>
<div className="box">
<DynamicParams
ref={formRef}
setParams={setParams}
paramsConfig={paramsConfig}
initialValues={initialValues}

@ -1,25 +1,29 @@
import IconFont from '@/components/icon-font';
import SealSelect from '@/components/seal-form/seal-select';
import SpeechContent from '@/components/speech-content';
import useOverlayScroller from '@/hooks/use-overlay-scroller';
import { ThunderboltOutlined } from '@ant-design/icons';
import { SendOutlined } from '@ant-design/icons';
import { useIntl, useSearchParams } from '@umijs/max';
import { Spin } from 'antd';
import { Form, Spin } from 'antd';
import classNames from 'classnames';
import _ from 'lodash';
import 'overlayscrollbars/overlayscrollbars.css';
import {
forwardRef,
memo,
useCallback,
useEffect,
useImperativeHandle,
useMemo,
useRef,
useState
} from 'react';
import { CHAT_API, textToSpeech } from '../apis';
import { CHAT_API, queryModelVoices, textToSpeech } from '../apis';
import { TTSParamsConfig as paramsConfig } from '../config/params-config';
import { MessageItem } from '../config/types';
import { MessageItem, ParamsSchema } from '../config/types';
import '../style/ground-left.less';
import '../style/system-message-wrap.less';
import RerankerParams from './dynamic-params';
import DynamicParams from './dynamic-params';
import MessageInput from './message-input';
import ReferenceParams from './reference-params';
import ViewCodeModal from './view-code-modal';
@ -31,7 +35,7 @@ interface MessageProps {
}
const initialValues = {
voice: 'Alloy',
voice: '',
response_format: 'mp3',
speed: 1
};
@ -41,12 +45,13 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
const messageId = useRef<number>(0);
const [messageList, setMessageList] = useState<
{
prompt: string;
input: string;
voice: string;
format: string;
speed: number;
uid: number;
autoplay: boolean;
audioUrl: string;
}[]
>([]);
@ -54,19 +59,18 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
const [searchParams] = useSearchParams();
const selectModel = searchParams.get('model') || '';
const [parameters, setParams] = useState<any>({});
const [systemMessage, setSystemMessage] = useState('');
const [show, setShow] = useState(false);
const [loading, setLoading] = useState(false);
const [tokenResult, setTokenResult] = useState<any>(null);
const [collapse, setCollapse] = useState(false);
const contentRef = useRef<any>('');
const controllerRef = useRef<any>(null);
const scroller = useRef<any>(null);
const currentMessageRef = useRef<any>(null);
const paramsRef = useRef<any>(null);
const messageListLengthCache = useRef<number>(0);
const checkvalueRef = useRef<any>(true);
const [currentPrompt, setCurrentPrompt] = useState<string>('');
const [voiceList, setVoiceList] = useState<Global.BaseOption<string>[]>([]);
const formRef = useRef<any>(null);
const { initialize, updateScrollerPosition } = useOverlayScroller();
const { initialize: innitializeParams } = useOverlayScroller();
@ -93,6 +97,7 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
};
const submitMessage = async (current?: { role: string; content: string }) => {
await formRef.current?.form.validateFields();
if (!parameters.model) return;
try {
setLoading(true);
@ -104,24 +109,27 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
controllerRef.current = new AbortController();
const signal = controllerRef.current.signal;
const chatParams = {
const params = {
...parameters,
prompt: current?.content || currentPrompt
input: current?.content || currentPrompt
};
const result: any = await textToSpeech({
data: chatParams,
const audioUrl: any = await textToSpeech({
data: params,
url: CHAT_API,
signal
});
console.log('result:', parameters, audioUrl);
setMessageList([
{
prompt: current?.content || currentPrompt,
input: current?.content || currentPrompt,
voice: parameters.voice,
format: parameters.response_format,
speed: parameters.speed,
uid: messageId.current,
autoplay: checkvalueRef.current
autoplay: checkvalueRef.current,
audioUrl: audioUrl
}
]);
} catch (error) {
@ -147,11 +155,68 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
setShow(false);
};
const handleSelectModel = () => {};
const handleSelectModel = useCallback(
async (value: string) => {
const data: any = modelList.find((item) => item.value === value);
if (!data) return;
try {
const res = await queryModelVoices({
name: data?.modelId as string
});
const voiceList = _.map(res.voices || [], (item: any) => {
return {
label: item,
value: item
};
});
setVoiceList(voiceList);
setParams((pre: any) => {
return {
...pre,
voice: voiceList[0]?.value
};
});
formRef.current?.form.setFieldValue('voice', voiceList[0]?.value);
} catch (error) {
setVoiceList([]);
formRef.current?.form.setFieldValue('voice', '');
setParams((pre: any) => {
return {
...pre,
voice: ''
};
});
}
},
[modelList]
);
const handleOnCheckChange = (e: any) => {
checkvalueRef.current = e.target.checked;
};
const renderExtra = useMemo(() => {
return paramsConfig.map((item: ParamsSchema) => {
return (
<Form.Item name={item.name} rules={item.rules} key={item.name}>
<SealSelect
{...item.attrs}
options={item.name === 'voice' ? voiceList : item.options}
label={
item.label.isLocalized
? intl.formatMessage({ id: item.label.text })
: item.label.text
}
></SealSelect>
</Form.Item>
);
});
}, [paramsConfig, intl, voiceList]);
useEffect(() => {
handleSelectModel(parameters.model);
}, [parameters.model, handleSelectModel]);
useEffect(() => {
if (scroller.current) {
initialize(scroller.current);
@ -233,9 +298,8 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
handleSubmit={handleSendMessage}
handleAbortFetch={handleStopConversation}
clearAll={handleClear}
setModelSelections={handleSelectModel}
shouldResetMessage={false}
submitIcon={<ThunderboltOutlined></ThunderboltOutlined>}
submitIcon={<SendOutlined></SendOutlined>}
modelList={modelList}
/>
</div>
@ -247,13 +311,14 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
ref={paramsRef}
>
<div className="box">
<RerankerParams
<DynamicParams
ref={formRef}
setParams={setParams}
paramsConfig={paramsConfig}
initialValues={initialValues}
params={parameters}
selectedModel={selectModel}
modelList={modelList}
extra={renderExtra}
/>
</div>
</div>
@ -261,7 +326,7 @@ const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
<ViewCodeModal
open={show}
payLoad={{
prompt: currentPrompt
input: currentPrompt
}}
api="audio/speech"
clientType="audio.speech"

@ -27,6 +27,7 @@ interface InputListProps {
onChange?: (
textList: { text: string; uid: number | string; name: string }[]
) => void;
onPaste?: (e: any, index: number) => void;
onSort?: (
textList: { text: string; uid: number | string; name: string }[]
) => void;
@ -42,7 +43,8 @@ const InputList: React.FC<InputListProps> = forwardRef(
height,
onSort,
onChange,
extra
extra,
onPaste
},
ref
) => {
@ -181,6 +183,7 @@ const InputList: React.FC<InputListProps> = forwardRef(
const setMessageId = () => {
messageId.current = messageId.current + 1;
return messageId.current;
};
const handleAdd = () => {
@ -222,7 +225,8 @@ const InputList: React.FC<InputListProps> = forwardRef(
useImperativeHandle(ref, () => ({
handleAdd,
handleDelete,
handleTextChange
handleTextChange,
setMessageId
}));
return (
@ -239,6 +243,7 @@ const InputList: React.FC<InputListProps> = forwardRef(
id: 'playground.embedding.inputyourtext'
})}
onChange={(e) => handleTextChange(e.target.value, text)}
onPaste={(e) => onPaste?.(e, index)}
></RowTextarea>
</div>
<span className="btn-group">

@ -5,12 +5,12 @@ export const TTSParamsConfig: ParamsSchema[] = [
type: 'Select',
name: 'voice',
options: [
{ label: 'Alloy', value: 'Alloy' },
{ label: 'Echo', value: 'Echo' },
{ label: 'Fable', value: 'Fable' },
{ label: 'Onyx', value: 'Onyx' },
{ label: 'Nova', value: 'Nova' },
{ label: 'Shimmer', value: 'Shimmer' }
// { label: 'Alloy', value: 'Alloy' },
// { label: 'Echo', value: 'Echo' },
// { label: 'Fable', value: 'Fable' },
// { label: 'Onyx', value: 'Onyx' },
// { label: 'Nova', value: 'Nova' },
// { label: 'Shimmer', value: 'Shimmer' }
],
label: {
text: 'playground.params.voice',
@ -64,25 +64,6 @@ export const TTSParamsConfig: ParamsSchema[] = [
}
]
}
// {
// type: 'TextArea',
// name: 'prompt',
// label: {
// text: 'Prompt',
// isLocalized: false
// },
// attrs: {
// autoSize: {
// minRows: 2,
// maxRows: 3
// }
// },
// rules: [
// {
// required: false
// }
// ]
// }
];
export const RealtimeParamsConfig: ParamsSchema[] = [

@ -2,6 +2,7 @@ import IconFont from '@/components/icon-font';
import breakpoints from '@/config/breakpoints';
import HotKeys from '@/config/hotkeys';
import useWindowResize from '@/hooks/use-window-resize';
import { queryModelsList as queryGPUStackModels } from '@/pages/llmodels/apis';
import { AudioOutlined } from '@ant-design/icons';
import { PageContainer } from '@ant-design/pro-components';
import { useIntl } from '@umijs/max';
@ -102,10 +103,30 @@ const Playground: React.FC = () => {
}
};
const getGpuStackModels = async () => {
try {
const res: any = await queryGPUStackModels({ page: 1, perPage: 100 });
return res.items || [];
} catch (error) {
return [];
}
};
const fetchData = async () => {
try {
const modelist = await getModelList();
setModelList(modelist);
const [modelist, list] = await Promise.all([
getModelList(),
getGpuStackModels()
]);
const dataMap = list.reduce((acc: any, cur: any) => {
acc[cur.name] = cur;
return acc;
}, {});
const dataList = modelist.map((item: any) => {
item.modelId = dataMap[item.value]?.id;
return item;
});
setModelList(dataList);
} catch (error) {
setLoaded(true);
}

@ -86,7 +86,7 @@
height: fit-content;
top: -10px;
font-size: var(--font-size-middle);
left: calc(50% + 18px);
left: calc(50% + 19px);
transform: translateX(-50%);
background-color: transparent;
}

@ -12,13 +12,20 @@ export const loadAudioData = async (data: any, type: string) => {
audio.addEventListener('loadedmetadata', () => {
const duration = audio.duration;
resolve({ size: fileSize, duration: Math.ceil(duration), url: url });
resolve({
data: audioBlob,
size: fileSize,
type: type,
duration: Math.ceil(duration),
url: url
});
});
audio.addEventListener('ended', () => {
URL.revokeObjectURL(audio.src);
});
} catch (error) {
console.log('error====', error);
reject(error);
}
});
@ -29,7 +36,6 @@ export const readAudioFile = async (file: File) => {
const reader = new FileReader();
reader.onload = async function (e: any) {
try {
// const size = convertFileSize(file.size);
console.log('file====', file);
const arrayBuffer = e.target.result;
const audioData = await loadAudioData(arrayBuffer, file.type);
@ -38,6 +44,7 @@ export const readAudioFile = async (file: File) => {
name: file.name
});
} catch (error) {
console.log('error====', error);
reject(error);
}
};

Loading…
Cancel
Save