parent
f24aef300b
commit
c3473673da
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,10 @@
|
||||
.canvas-wrap {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
text-align: center;
|
||||
|
||||
canvas {
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,121 @@
|
||||
import React, { useEffect } from 'react';
|
||||
import './index.less';
|
||||
|
||||
interface AudioAnimationProps {
|
||||
width: number;
|
||||
height: number;
|
||||
analyserData: {
|
||||
data: Uint8Array;
|
||||
analyser: any;
|
||||
};
|
||||
}
|
||||
|
||||
const AudioAnimation: React.FC<AudioAnimationProps> = (props) => {
|
||||
const { width, height, analyserData } = props;
|
||||
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
||||
const animationId = React.useRef<number>(0);
|
||||
const isScaled = React.useRef<boolean>(false);
|
||||
const oscillationOffset = React.useRef(0);
|
||||
const direction = React.useRef(1);
|
||||
|
||||
const startAudioVisualization = () => {
|
||||
if (!canvasRef.current || !analyserData.data?.length) return;
|
||||
|
||||
const canvas = canvasRef.current;
|
||||
const canvasCtx = canvas.getContext('2d');
|
||||
if (!canvasCtx) return;
|
||||
|
||||
const WIDTH = (canvas.width = width * 2);
|
||||
const HEIGHT = (canvas.height = height * 2);
|
||||
|
||||
if (!isScaled.current) {
|
||||
canvasCtx.scale(2, 2);
|
||||
isScaled.current = true;
|
||||
}
|
||||
|
||||
const barWidth = 3;
|
||||
const barSpacing = 2;
|
||||
const centerX = HEIGHT / 2;
|
||||
const centerLine = Math.floor(HEIGHT / 2);
|
||||
const jitterAmplitude = 60; // 最大抖动幅度
|
||||
const minJitter = 15; // 最小抖动幅度
|
||||
|
||||
const frameInterval = 2;
|
||||
let frameCount = 0;
|
||||
|
||||
canvasCtx.fillStyle = '#0073EF';
|
||||
|
||||
const draw = () => {
|
||||
frameCount++;
|
||||
if (frameCount % frameInterval !== 0) {
|
||||
animationId.current = requestAnimationFrame(draw);
|
||||
return;
|
||||
}
|
||||
analyserData.analyser?.current?.getByteFrequencyData(analyserData.data);
|
||||
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
|
||||
|
||||
const barCount = analyserData.data.length;
|
||||
const totalWidth = barCount * (barWidth + barSpacing) - barSpacing;
|
||||
let x = centerX - totalWidth / 2 + oscillationOffset.current;
|
||||
oscillationOffset.current += direction.current * 0.5;
|
||||
|
||||
if (oscillationOffset.current > 20 || oscillationOffset.current < -20) {
|
||||
direction.current *= -1;
|
||||
}
|
||||
|
||||
for (let i = 0; i < barCount; i++) {
|
||||
const baseHeight = Math.floor(analyserData.data[i] / 2);
|
||||
const jitter =
|
||||
minJitter +
|
||||
Math.round((Math.random() - 0.5) * (jitterAmplitude - minJitter));
|
||||
const barHeight = baseHeight + jitter;
|
||||
|
||||
const topY = Math.round(centerLine - barHeight / 2);
|
||||
const bottomY = Math.round(centerLine + barHeight / 2);
|
||||
|
||||
canvasCtx.beginPath();
|
||||
canvasCtx.moveTo(x, bottomY);
|
||||
canvasCtx.lineTo(x, topY + 2);
|
||||
canvasCtx.arcTo(x + barWidth, topY + 2, x + barWidth, bottomY, 2);
|
||||
canvasCtx.lineTo(x + barWidth, bottomY);
|
||||
canvasCtx.closePath();
|
||||
canvasCtx.fill();
|
||||
|
||||
x += barWidth + barSpacing;
|
||||
}
|
||||
|
||||
animationId.current = requestAnimationFrame(draw);
|
||||
};
|
||||
|
||||
draw();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!analyserData.data?.length || !analyserData.analyser.current) {
|
||||
canvasRef.current
|
||||
?.getContext('2d')
|
||||
?.clearRect(0, 0, width * 2, height * 2);
|
||||
cancelAnimationFrame(animationId.current);
|
||||
animationId.current = 0;
|
||||
return;
|
||||
}
|
||||
startAudioVisualization();
|
||||
return () => {
|
||||
if (animationId.current) cancelAnimationFrame(animationId.current);
|
||||
};
|
||||
}, [analyserData, width, height]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="canvas-wrap"
|
||||
style={{
|
||||
width: '100%',
|
||||
height: height
|
||||
}}
|
||||
>
|
||||
<canvas ref={canvasRef} style={{ width, height }}></canvas>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(AudioAnimation);
|
||||
@ -0,0 +1,53 @@
|
||||
.player-wrap {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
background-color: var(--ant-color-fill-quaternary);
|
||||
border-radius: 36px;
|
||||
|
||||
.player-ui {
|
||||
padding: 5px 10px;
|
||||
flex: 1;
|
||||
display: flex;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.play-content {
|
||||
display: flex;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.time {
|
||||
&.current {
|
||||
margin-left: 6px;
|
||||
}
|
||||
}
|
||||
|
||||
.progress-bar {
|
||||
margin-inline: 10px;
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
.slider {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.file-name {
|
||||
line-height: 20px;
|
||||
height: 20px;
|
||||
}
|
||||
|
||||
.ant-slider-horizontal {
|
||||
margin-block: 2px;
|
||||
}
|
||||
}
|
||||
|
||||
.speaker {
|
||||
margin-left: 10px;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,191 @@
|
||||
import { formatTime } from '@/utils/index';
|
||||
import { PauseCircleFilled, PlayCircleFilled } from '@ant-design/icons';
|
||||
import { Button, Slider } from 'antd';
|
||||
import React, {
|
||||
forwardRef,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useImperativeHandle
|
||||
} from 'react';
|
||||
import IconFont from '../icon-font';
|
||||
import './index.less';
|
||||
|
||||
interface AudioPlayerProps {
|
||||
autoplay?: boolean;
|
||||
url: string;
|
||||
speed?: number;
|
||||
ref?: any;
|
||||
name: string;
|
||||
height?: number;
|
||||
width?: number;
|
||||
duration?: number;
|
||||
}
|
||||
|
||||
const AudioPlayer: React.FC<AudioPlayerProps> = forwardRef((props, ref) => {
|
||||
const { autoplay = false, speed: defaultSpeed = 1 } = props;
|
||||
const audioRef = React.useRef<HTMLAudioElement>(null);
|
||||
const [audioState, setAudioState] = React.useState<{
|
||||
currentTime: number;
|
||||
duration: number;
|
||||
}>({
|
||||
currentTime: 0,
|
||||
duration: 0
|
||||
});
|
||||
const [playOn, setPlayOn] = React.useState<boolean>(false);
|
||||
const [speakerOn, setSpeakerOn] = React.useState<boolean>(false);
|
||||
const [volume, setVolume] = React.useState<number>(0.5);
|
||||
const [speed, setSpeed] = React.useState<number>(defaultSpeed);
|
||||
const timer = React.useRef<any>(null);
|
||||
|
||||
useImperativeHandle(ref, () => ({
|
||||
play: () => {
|
||||
audioRef.current?.play();
|
||||
},
|
||||
pause: () => {
|
||||
audioRef.current?.pause();
|
||||
}
|
||||
}));
|
||||
|
||||
const handleAudioOnPlay = useCallback(() => {
|
||||
timer.current = setInterval(() => {
|
||||
setAudioState((prestate) => {
|
||||
return {
|
||||
currentTime: Math.ceil(audioRef.current?.currentTime || 0),
|
||||
duration:
|
||||
prestate.duration || Math.ceil(audioRef.current?.duration || 0)
|
||||
};
|
||||
});
|
||||
|
||||
if (audioRef.current?.paused || audioRef.current?.ended) {
|
||||
clearInterval(timer.current);
|
||||
setPlayOn(false);
|
||||
setAudioState((prestate: any) => {
|
||||
return {
|
||||
currentTime: 0,
|
||||
duration: prestate.duration
|
||||
};
|
||||
});
|
||||
}
|
||||
}, 500);
|
||||
}, []);
|
||||
|
||||
const handlePlay = useCallback(() => {
|
||||
if (playOn) {
|
||||
audioRef.current?.pause();
|
||||
} else {
|
||||
audioRef.current?.play();
|
||||
}
|
||||
setPlayOn(!playOn);
|
||||
}, [playOn]);
|
||||
|
||||
const initPlayerConfig = useCallback(() => {
|
||||
// set volume
|
||||
audioRef.current!.volume = volume;
|
||||
// set playback rate
|
||||
audioRef.current!.playbackRate = speed;
|
||||
}, []);
|
||||
|
||||
const handleLoadedMetadata = useCallback(
|
||||
(data: any) => {
|
||||
const duration = Math.ceil(audioRef.current?.duration || 0);
|
||||
setAudioState({
|
||||
currentTime: 0,
|
||||
duration:
|
||||
duration && duration !== Infinity ? duration : props.duration || 0
|
||||
});
|
||||
setPlayOn(autoplay);
|
||||
},
|
||||
[autoplay, props.duration]
|
||||
);
|
||||
|
||||
const handleCurrentChange = useCallback((val: number) => {
|
||||
audioRef.current!.currentTime = val;
|
||||
setAudioState((prestate) => {
|
||||
return {
|
||||
currentTime: val,
|
||||
duration: prestate.duration
|
||||
};
|
||||
});
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (audioRef.current) {
|
||||
initPlayerConfig();
|
||||
}
|
||||
}, [audioRef.current]);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
clearInterval(timer.current);
|
||||
};
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="player-wrap" style={{ width: props.width || '100%' }}>
|
||||
<div className="player-ui">
|
||||
<span className="play-btn">
|
||||
<Button
|
||||
size="middle"
|
||||
type="text"
|
||||
onClick={handlePlay}
|
||||
icon={
|
||||
!playOn ? (
|
||||
<PlayCircleFilled
|
||||
style={{ fontSize: '22px' }}
|
||||
></PlayCircleFilled>
|
||||
) : (
|
||||
<PauseCircleFilled
|
||||
style={{ fontSize: '22px' }}
|
||||
></PauseCircleFilled>
|
||||
)
|
||||
}
|
||||
></Button>
|
||||
</span>
|
||||
<div className="play-content">
|
||||
<span className="time current">
|
||||
{' '}
|
||||
{formatTime(audioState.currentTime)}
|
||||
</span>
|
||||
<div className="progress-bar">
|
||||
<span className="file-name">{props.name}</span>
|
||||
<div className="slider">
|
||||
<Slider
|
||||
tooltip={{ open: false }}
|
||||
min={0}
|
||||
max={audioState.duration}
|
||||
value={audioState.currentTime}
|
||||
onChange={handleCurrentChange}
|
||||
/>
|
||||
</div>
|
||||
<span>{props.speed ? `${props.speed}x` : '1x'}</span>
|
||||
</div>
|
||||
<span className="time">{formatTime(audioState.duration)}</span>
|
||||
</div>
|
||||
<span className="speaker">
|
||||
<Button
|
||||
size="middle"
|
||||
type="text"
|
||||
icon={
|
||||
<IconFont
|
||||
type="icon-SpeakerHigh"
|
||||
style={{ fontSize: '22px' }}
|
||||
></IconFont>
|
||||
}
|
||||
></Button>
|
||||
</span>
|
||||
</div>
|
||||
<audio
|
||||
controls
|
||||
autoPlay={autoplay}
|
||||
src={props.url}
|
||||
ref={audioRef}
|
||||
preload="metadata"
|
||||
style={{ display: 'none' }}
|
||||
onPlay={handleAudioOnPlay}
|
||||
onLoadedMetadata={handleLoadedMetadata}
|
||||
></audio>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
export default React.memo(AudioPlayer);
|
||||
@ -1,7 +1,7 @@
|
||||
import { createFromIconfontCN } from '@ant-design/icons';
|
||||
|
||||
const IconFont = createFromIconfontCN({
|
||||
scriptUrl: '//at.alicdn.com/t/c/font_4613488_flbkvujyhg4.js'
|
||||
scriptUrl: '//at.alicdn.com/t/c/font_4613488_f5wastzhj2w.js'
|
||||
});
|
||||
|
||||
export default IconFont;
|
||||
|
||||
@ -0,0 +1,47 @@
|
||||
import React, {
|
||||
forwardRef,
|
||||
useEffect,
|
||||
useImperativeHandle,
|
||||
useRef
|
||||
} from 'react';
|
||||
import useWavesurfer from './hooks/use-wavesurfer';
|
||||
|
||||
interface AudioPlayerProps {
|
||||
autoplay: boolean;
|
||||
audioUrl: string;
|
||||
speed: number;
|
||||
ref?: any;
|
||||
height?: number;
|
||||
width?: number;
|
||||
}
|
||||
|
||||
const AudioPlayer: React.FC<AudioPlayerProps> = forwardRef((props, ref) => {
|
||||
const { autoplay, audioUrl, speed = 1, ...rest } = props;
|
||||
const container = useRef<HTMLDivElement>(null);
|
||||
const { createWavesurfer, play, pause, destroyWavesurfer } = useWavesurfer({
|
||||
container,
|
||||
autoplay: autoplay,
|
||||
url: audioUrl,
|
||||
audioRate: speed,
|
||||
...rest
|
||||
});
|
||||
|
||||
useImperativeHandle(ref, () => {
|
||||
return {
|
||||
play,
|
||||
pause
|
||||
};
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (container.current) {
|
||||
createWavesurfer();
|
||||
}
|
||||
return () => {
|
||||
destroyWavesurfer();
|
||||
};
|
||||
}, [container.current]);
|
||||
return <div ref={container} className="audio-container"></div>;
|
||||
});
|
||||
|
||||
export default React.memo(AudioPlayer);
|
||||
@ -0,0 +1,68 @@
|
||||
import { useRef } from 'react';
|
||||
import WaveSurfer from 'wavesurfer.js';
|
||||
|
||||
interface Options {
|
||||
container: React.RefObject<HTMLDivElement>;
|
||||
waveColor?: string;
|
||||
progressColor?: string;
|
||||
url: string;
|
||||
barWidth?: number;
|
||||
barGap?: number;
|
||||
barRadius?: number;
|
||||
autoplay?: boolean;
|
||||
audioRate?: number;
|
||||
}
|
||||
const useWavesurfer = (options: Options) => {
|
||||
const wavesurfer = useRef<WaveSurfer | null>(null);
|
||||
|
||||
const { container, url, ...rest } = options;
|
||||
|
||||
const createWavesurfer = () => {
|
||||
if (!container.current) {
|
||||
return;
|
||||
}
|
||||
if (wavesurfer.current) {
|
||||
wavesurfer.current.destroy();
|
||||
}
|
||||
wavesurfer.current = WaveSurfer.create({
|
||||
container: container.current,
|
||||
waveColor: '#4096ff',
|
||||
progressColor: 'rgb(100, 0, 100)',
|
||||
url: url,
|
||||
height: 60,
|
||||
barWidth: 2,
|
||||
barGap: 1,
|
||||
barRadius: 2,
|
||||
interact: true,
|
||||
cursorWidth: 0,
|
||||
...rest
|
||||
});
|
||||
};
|
||||
|
||||
const destroyWavesurfer = () => {
|
||||
if (wavesurfer.current) {
|
||||
wavesurfer.current.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
const play = () => {
|
||||
if (wavesurfer.current) {
|
||||
wavesurfer.current.play();
|
||||
}
|
||||
};
|
||||
|
||||
const pause = () => {
|
||||
if (wavesurfer.current) {
|
||||
wavesurfer.current.pause();
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
createWavesurfer,
|
||||
play,
|
||||
pause,
|
||||
destroyWavesurfer
|
||||
};
|
||||
};
|
||||
|
||||
export default useWavesurfer;
|
||||
Binary file not shown.
@ -0,0 +1,20 @@
|
||||
import React from 'react';
|
||||
import SpeechItem from './speech-item';
|
||||
|
||||
interface SpeechContentProps {
|
||||
dataList: any[];
|
||||
loading?: boolean;
|
||||
}
|
||||
|
||||
const SpeechContent: React.FC<SpeechContentProps> = (props) => {
|
||||
console.log('SpeechContent', props);
|
||||
return (
|
||||
<div>
|
||||
{props.dataList.map((item) => (
|
||||
<SpeechItem key={item.uid} {...item} />
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(SpeechContent);
|
||||
@ -0,0 +1,87 @@
|
||||
import IconFont from '@/components/icon-font';
|
||||
import {
|
||||
DownloadOutlined,
|
||||
FileTextOutlined,
|
||||
PlayCircleOutlined
|
||||
} from '@ant-design/icons';
|
||||
import { Button, Tooltip } from 'antd';
|
||||
import React, { useRef, useState } from 'react';
|
||||
import AudioPlayer from './audio-player';
|
||||
import './styles/index.less';
|
||||
|
||||
const audioUrl = require('./ih.mp4');
|
||||
|
||||
interface SpeechContentProps {
|
||||
prompt: string;
|
||||
autoplay: boolean;
|
||||
voice: string;
|
||||
format: string;
|
||||
speed: number;
|
||||
}
|
||||
const SpeechItem: React.FC<SpeechContentProps> = (props) => {
|
||||
console.log('porps=======', props);
|
||||
const [collapsed, setCollapsed] = useState(false);
|
||||
const ref = useRef<HTMLAudioElement>(null);
|
||||
|
||||
const handlePlay = () => {
|
||||
ref.current?.play();
|
||||
};
|
||||
|
||||
const handleCollapse = () => {
|
||||
setCollapsed(!collapsed);
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className="speech-item">
|
||||
{/* <audio controls autoPlay={true} src={require('./ih.mp4')}></audio> */}
|
||||
<div className="voice">
|
||||
<IconFont type="icon-user_voice" className="font-size-16" />
|
||||
<span className="text">{props.voice}</span>
|
||||
</div>
|
||||
<div className="wrapper">
|
||||
<AudioPlayer {...props} audioUrl={audioUrl} ref={ref}></AudioPlayer>
|
||||
</div>
|
||||
</div>
|
||||
<div className="speech-actions">
|
||||
<span className="tags">
|
||||
<span className="item">{props.format}</span>
|
||||
<span className="item splitor"></span>
|
||||
<span className="item">{props.speed}x</span>
|
||||
</span>
|
||||
<div className="actions">
|
||||
<Tooltip title="Play">
|
||||
<Button
|
||||
onClick={handlePlay}
|
||||
icon={<PlayCircleOutlined />}
|
||||
type="text"
|
||||
size="small"
|
||||
></Button>
|
||||
</Tooltip>
|
||||
<Tooltip title="Download">
|
||||
<Button
|
||||
icon={<DownloadOutlined />}
|
||||
type="text"
|
||||
size="small"
|
||||
></Button>
|
||||
</Tooltip>
|
||||
<Tooltip title="Show Prompt">
|
||||
<Button
|
||||
icon={<FileTextOutlined />}
|
||||
type="text"
|
||||
size="small"
|
||||
onClick={handleCollapse}
|
||||
></Button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
{collapsed && (
|
||||
<div className="prompt-box">
|
||||
<div className="prompt">{props.prompt}</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(SpeechItem);
|
||||
@ -0,0 +1,93 @@
|
||||
.speech-item {
|
||||
display: flex;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
|
||||
.voice {
|
||||
width: 80px;
|
||||
display: flex;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
gap: 5px;
|
||||
|
||||
.text {
|
||||
display: flex;
|
||||
padding: 2px 4px;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--ant-color-border);
|
||||
}
|
||||
}
|
||||
|
||||
.wrapper {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
|
||||
.audio-container {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.format {
|
||||
display: flex;
|
||||
padding-left: 5px;
|
||||
}
|
||||
}
|
||||
|
||||
audio {
|
||||
flex: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.prompt-box {
|
||||
padding-left: 80px;
|
||||
|
||||
.prompt {
|
||||
margin-top: 16px;
|
||||
padding: 10px;
|
||||
border-radius: var(--border-radius-base);
|
||||
background-color: var(--ant-color-fill-quaternary);
|
||||
}
|
||||
}
|
||||
|
||||
.speech-actions {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
margin-top: 10px;
|
||||
padding-left: 80px;
|
||||
|
||||
.actions {
|
||||
display: flex;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
gap: 15px;
|
||||
|
||||
.anticon {
|
||||
font-size: 14px;
|
||||
}
|
||||
}
|
||||
|
||||
.tags {
|
||||
display: flex;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
|
||||
.item {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
padding: 2px;
|
||||
border-radius: 4px;
|
||||
color: var(--ant-color-text-tertiary);
|
||||
}
|
||||
|
||||
.splitor {
|
||||
display: flex;
|
||||
width: 1px;
|
||||
height: 1px;
|
||||
border-radius: 2px;
|
||||
margin: 0 8px;
|
||||
background-color: var(--ant-color-fill-content-hover);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,54 @@
|
||||
import { UploadOutlined } from '@ant-design/icons';
|
||||
import { Button, Tooltip, Upload } from 'antd';
|
||||
import React from 'react';
|
||||
|
||||
interface UploadAudioProps {
|
||||
accept?: string;
|
||||
maxCount?: number;
|
||||
type?: 'text' | 'primary' | 'default';
|
||||
onChange?: (data: { file: any; fileList: any[] }) => void;
|
||||
}
|
||||
|
||||
const UploadAudio: React.FC<UploadAudioProps> = (props) => {
|
||||
const beforeUpload = (file: any) => {
|
||||
return true;
|
||||
};
|
||||
|
||||
const handleOnChange = React.useCallback(
|
||||
(data: { file: any; fileList: any }) => {
|
||||
console.log('handleOnChange', data);
|
||||
props.onChange?.(data);
|
||||
},
|
||||
[]
|
||||
);
|
||||
return (
|
||||
<Tooltip title={`Upload an audio file, support for ${props.accept}`}>
|
||||
<Upload
|
||||
beforeUpload={beforeUpload}
|
||||
onChange={handleOnChange}
|
||||
accept={props.accept}
|
||||
multiple={false}
|
||||
maxCount={1}
|
||||
showUploadList={false}
|
||||
fileList={[]}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
alignItems: 'center',
|
||||
gap: 16
|
||||
}}
|
||||
>
|
||||
<Button
|
||||
icon={<UploadOutlined />}
|
||||
type={props.type ?? 'text'}
|
||||
shape="circle"
|
||||
></Button>
|
||||
</div>
|
||||
</Upload>
|
||||
</Tooltip>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(UploadAudio);
|
||||
@ -0,0 +1,246 @@
|
||||
import { AudioOutlined } from '@ant-design/icons';
|
||||
import { Button, Space, Tooltip } from 'antd';
|
||||
import React, { useCallback, useEffect, useRef, useState } from 'react';
|
||||
// import '../style/audio-input.less';
|
||||
|
||||
interface AudioInputProps {
|
||||
onAudioData: (audioData: {
|
||||
chunks: any[];
|
||||
url: string;
|
||||
name: string;
|
||||
duration: number;
|
||||
}) => void;
|
||||
onAnalyse?: (analyseData: any, frequencyBinCount: any) => void;
|
||||
onAudioPermission: (audioPermission: boolean) => void;
|
||||
onRecord?: (isRecording: boolean) => void;
|
||||
voiceActivity?: boolean;
|
||||
type?: 'text' | 'primary' | 'default';
|
||||
}
|
||||
|
||||
const AudioInput: React.FC<AudioInputProps> = (props) => {
|
||||
const [audioOn, setAudioOn] = useState(false);
|
||||
const [isRecording, setIsRecording] = useState(false);
|
||||
const [audioPermission, setAudioPermission] = useState(true);
|
||||
const audioStream = useRef<any>(null);
|
||||
const audioRecorder = useRef<any>(null);
|
||||
const startTime = useRef<number>(0);
|
||||
const audioContext = useRef<any>(null);
|
||||
const analyser = useRef<any>(null);
|
||||
const dataArray = useRef<any>(null);
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
|
||||
const initAudioContext = useCallback(() => {
|
||||
audioContext.current = new (window.AudioContext ||
|
||||
window.webkitAudioContext)();
|
||||
|
||||
analyser.current = audioContext.current.createAnalyser();
|
||||
analyser.current.fftSize = 256;
|
||||
dataArray.current = new Uint8Array(analyser.current.frequencyBinCount);
|
||||
}, []);
|
||||
|
||||
const generateVisualData = useCallback(() => {
|
||||
const source = audioContext.current.createMediaStreamSource(
|
||||
audioStream.current
|
||||
);
|
||||
source.connect(analyser.current);
|
||||
}, []);
|
||||
|
||||
// stop all audio tracks
|
||||
const stopAudioTracks = () => {
|
||||
audioStream.current?.getTracks().forEach((track: any) => {
|
||||
track.stop();
|
||||
});
|
||||
};
|
||||
|
||||
const handleStopRecording = () => {
|
||||
setIsRecording(false);
|
||||
audioRecorder.current?.stop();
|
||||
props.onRecord?.(false);
|
||||
};
|
||||
|
||||
// get all audio tracks
|
||||
const getAudioTracks = () => {
|
||||
const audioTracks = audioStream.current.getAudioTracks();
|
||||
audioTracks.forEach((track: any) => {
|
||||
track.onended = () => {
|
||||
setAudioPermission(false);
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
// check if microphone is on
|
||||
const isMicrophoneOn = () => {
|
||||
return (
|
||||
audioStream.current &&
|
||||
audioStream.current
|
||||
.getTracks()
|
||||
.some((track: any) => track.readyState === 'live')
|
||||
);
|
||||
};
|
||||
|
||||
const microphonePermissionDenied = async () => {
|
||||
const permissionStatus = await navigator.permissions.query({
|
||||
name: 'microphone' as any
|
||||
});
|
||||
|
||||
return permissionStatus.state === 'denied';
|
||||
};
|
||||
|
||||
const checkMicrophonePermission = async () => {
|
||||
try {
|
||||
const permissionStatus = await navigator.permissions.query({
|
||||
name: 'microphone' as any
|
||||
});
|
||||
console.log('permissionStatus:', permissionStatus);
|
||||
if (permissionStatus.state === 'granted') {
|
||||
setAudioPermission(true);
|
||||
props.onAudioPermission(true);
|
||||
} else if (permissionStatus.state === 'denied') {
|
||||
setAudioPermission(false);
|
||||
props.onAudioPermission(false);
|
||||
handleStopRecording();
|
||||
}
|
||||
|
||||
permissionStatus.onchange = () => {
|
||||
console.log('permission changed');
|
||||
checkMicrophonePermission();
|
||||
};
|
||||
} catch (error) {
|
||||
// todo
|
||||
}
|
||||
};
|
||||
|
||||
// open audio
|
||||
const EnableAudio = async () => {
|
||||
try {
|
||||
audioStream.current = await navigator.mediaDevices.getUserMedia({
|
||||
audio: true
|
||||
});
|
||||
getAudioTracks();
|
||||
setAudioOn(true);
|
||||
setAudioPermission(true);
|
||||
initAudioContext();
|
||||
} catch (error) {
|
||||
// console.log(error);
|
||||
}
|
||||
};
|
||||
|
||||
// close audio
|
||||
const disableAudio = () => {
|
||||
stopAudioTracks();
|
||||
setAudioOn(false);
|
||||
handleStopRecording();
|
||||
audioStream.current = null;
|
||||
};
|
||||
|
||||
const stopRecording = () => {
|
||||
audioRecorder.current?.stop();
|
||||
setIsRecording(false);
|
||||
};
|
||||
|
||||
const handleAudioData = (audioData: any) => {
|
||||
props.onAudioData?.(audioData);
|
||||
};
|
||||
|
||||
// start recording
|
||||
const StartRecording = async () => {
|
||||
if (isRecording) {
|
||||
stopRecording();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await EnableAudio();
|
||||
console.log('audioStream:', audioStream.current);
|
||||
audioRecorder.current = new MediaRecorder(audioStream.current);
|
||||
|
||||
const audioChunks: any[] = [];
|
||||
|
||||
audioRecorder.current.ondataavailable = (event: any) => {
|
||||
audioChunks.push(event.data);
|
||||
if (props.voiceActivity) {
|
||||
analyser.current?.getByteFrequencyData(dataArray.current);
|
||||
|
||||
props.onAnalyse?.(dataArray.current, analyser);
|
||||
}
|
||||
};
|
||||
|
||||
// stop recording
|
||||
audioRecorder.current.onstop = () => {
|
||||
const audioBlob = new Blob(audioChunks, { type: 'audio/wav' });
|
||||
const audioUrl = URL.createObjectURL(audioBlob);
|
||||
|
||||
handleAudioData({
|
||||
chunks: audioChunks,
|
||||
size: audioBlob.size,
|
||||
type: audioBlob.type,
|
||||
url: audioUrl,
|
||||
name: `recording-${new Date().toISOString()}.wav`,
|
||||
duration: Math.ceil((Date.now() - startTime.current) / 1000)
|
||||
});
|
||||
|
||||
props.onAnalyse?.([], null);
|
||||
};
|
||||
|
||||
setIsRecording(true);
|
||||
props.onRecord?.(true);
|
||||
startTime.current = Date.now();
|
||||
audioRecorder.current.start(1000);
|
||||
generateVisualData();
|
||||
} catch (error) {
|
||||
// console.log(error);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
handleStopRecording();
|
||||
stopAudioTracks();
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
checkMicrophonePermission();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="audio-input">
|
||||
<Space size={40} className="btns">
|
||||
{
|
||||
<Tooltip title="Start Recording">
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
alignItems: 'center',
|
||||
gap: 16
|
||||
}}
|
||||
>
|
||||
<Button
|
||||
disabled={!audioPermission}
|
||||
shape="circle"
|
||||
icon={<AudioOutlined />}
|
||||
size="middle"
|
||||
type={props.type ?? 'text'}
|
||||
danger={isRecording}
|
||||
onClick={StartRecording}
|
||||
></Button>
|
||||
</div>
|
||||
</Tooltip>
|
||||
}
|
||||
{/* {isRecording && (
|
||||
<Tooltip title="Stop Recording">
|
||||
<Button
|
||||
shape="circle"
|
||||
icon={<IconFont type="icon-stop2"></IconFont>}
|
||||
size="middle"
|
||||
type={props.type ?? 'text'}
|
||||
onClick={stopRecording}
|
||||
></Button>
|
||||
</Tooltip>
|
||||
)} */}
|
||||
</Space>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(AudioInput);
|
||||
@ -1,7 +1,475 @@
|
||||
import React from 'react';
|
||||
import AudioAnimation from '@/components/audio-animation';
|
||||
import AudioPlayer from '@/components/audio-player';
|
||||
import IconFont from '@/components/icon-font';
|
||||
import UploadAudio from '@/components/upload-audio';
|
||||
import useOverlayScroller from '@/hooks/use-overlay-scroller';
|
||||
import { fetchChunkedData, readStreamData } from '@/utils/fetch-chunk-data';
|
||||
import { readAudioFile } from '@/utils/load-audio-file';
|
||||
import { AudioOutlined, ThunderboltOutlined } from '@ant-design/icons';
|
||||
import { useIntl, useSearchParams } from '@umijs/max';
|
||||
import { Button, Spin, Tag, Tooltip } from 'antd';
|
||||
import classNames from 'classnames';
|
||||
import _ from 'lodash';
|
||||
import 'overlayscrollbars/overlayscrollbars.css';
|
||||
import {
|
||||
forwardRef,
|
||||
memo,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useImperativeHandle,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState
|
||||
} from 'react';
|
||||
import { CHAT_API } from '../apis';
|
||||
import { Roles, generateMessages } from '../config';
|
||||
import { RealtimeParamsConfig as paramsConfig } from '../config/params-config';
|
||||
import { MessageItem } from '../config/types';
|
||||
import '../style/ground-left.less';
|
||||
import '../style/speech-to-text.less';
|
||||
import '../style/system-message-wrap.less';
|
||||
import AudioInput from './audio-input';
|
||||
import MessageContent from './multiple-chat/message-content';
|
||||
import RerankerParams from './reranker-params';
|
||||
import ViewCodeModal from './view-code-modal';
|
||||
|
||||
const GroundStt = () => {
|
||||
return <div>STT</div>;
|
||||
interface MessageProps {
|
||||
modelList: Global.BaseOption<string>[];
|
||||
loaded?: boolean;
|
||||
ref?: any;
|
||||
}
|
||||
|
||||
const initialValues = {
|
||||
language: 'auto'
|
||||
};
|
||||
|
||||
export default React.memo(GroundStt);
|
||||
const GroundLeft: React.FC<MessageProps> = forwardRef((props, ref) => {
|
||||
const { modelList } = props;
|
||||
const messageId = useRef<number>(0);
|
||||
const [messageList, setMessageList] = useState<MessageItem[]>([]);
|
||||
|
||||
const intl = useIntl();
|
||||
const [searchParams] = useSearchParams();
|
||||
const selectModel = searchParams.get('model') || '';
|
||||
const [parameters, setParams] = useState<any>({});
|
||||
const [systemMessage, setSystemMessage] = useState('');
|
||||
const [show, setShow] = useState(false);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [tokenResult, setTokenResult] = useState<any>(null);
|
||||
const [collapse, setCollapse] = useState(false);
|
||||
const contentRef = useRef<any>('');
|
||||
const controllerRef = useRef<any>(null);
|
||||
const scroller = useRef<any>(null);
|
||||
const currentMessageRef = useRef<any>(null);
|
||||
const paramsRef = useRef<any>(null);
|
||||
const messageListLengthCache = useRef<number>(0);
|
||||
const [audioPermissionOn, setAudioPermissionOn] = useState(true);
|
||||
const [audioData, setAudioData] = useState<any>(null);
|
||||
const [audioChunks, setAudioChunks] = useState<any>({
|
||||
data: [],
|
||||
analyser: null
|
||||
});
|
||||
const [isRecording, setIsRecording] = useState(false);
|
||||
|
||||
const { initialize, updateScrollerPosition } = useOverlayScroller();
|
||||
const { initialize: innitializeParams } = useOverlayScroller();
|
||||
|
||||
useImperativeHandle(ref, () => {
|
||||
return {
|
||||
viewCode() {
|
||||
setShow(true);
|
||||
},
|
||||
setCollapse() {
|
||||
setCollapse(!collapse);
|
||||
},
|
||||
collapse: collapse
|
||||
};
|
||||
});
|
||||
|
||||
const viewCodeMessage = useMemo(() => {
|
||||
return generateMessages([
|
||||
{ role: Roles.System, content: systemMessage },
|
||||
...messageList
|
||||
]);
|
||||
}, [messageList, systemMessage]);
|
||||
|
||||
const setMessageId = () => {
|
||||
messageId.current = messageId.current + 1;
|
||||
};
|
||||
|
||||
const joinMessage = (chunk: any) => {
|
||||
setTokenResult({
|
||||
...(chunk?.usage ?? {})
|
||||
});
|
||||
|
||||
if (!chunk || !_.get(chunk, 'choices', []).length) {
|
||||
return;
|
||||
}
|
||||
contentRef.current =
|
||||
contentRef.current + _.get(chunk, 'choices.0.delta.content', '');
|
||||
setMessageList([
|
||||
...messageList,
|
||||
...currentMessageRef.current,
|
||||
{
|
||||
role: Roles.Assistant,
|
||||
content: contentRef.current,
|
||||
uid: messageId.current
|
||||
}
|
||||
]);
|
||||
};
|
||||
const handleStopConversation = () => {
|
||||
controllerRef.current?.abort?.();
|
||||
setLoading(false);
|
||||
};
|
||||
|
||||
const submitMessage = async (current?: { role: string; content: string }) => {
|
||||
if (!parameters.model) return;
|
||||
try {
|
||||
setLoading(true);
|
||||
setMessageId();
|
||||
setTokenResult(null);
|
||||
|
||||
controllerRef.current?.abort?.();
|
||||
controllerRef.current = new AbortController();
|
||||
const signal = controllerRef.current.signal;
|
||||
currentMessageRef.current = current
|
||||
? [
|
||||
{
|
||||
...current,
|
||||
uid: messageId.current
|
||||
}
|
||||
]
|
||||
: [];
|
||||
|
||||
contentRef.current = '';
|
||||
setMessageList((pre) => {
|
||||
return [...pre, ...currentMessageRef.current];
|
||||
});
|
||||
|
||||
const messageParams = [
|
||||
{ role: Roles.System, content: systemMessage },
|
||||
...messageList,
|
||||
...currentMessageRef.current
|
||||
];
|
||||
|
||||
const messages = generateMessages(messageParams);
|
||||
|
||||
const chatParams = {
|
||||
messages: messages,
|
||||
...parameters,
|
||||
stream: true,
|
||||
stream_options: {
|
||||
include_usage: true
|
||||
}
|
||||
};
|
||||
const result: any = await fetchChunkedData({
|
||||
data: chatParams,
|
||||
url: CHAT_API,
|
||||
signal
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
setTokenResult({
|
||||
error: true,
|
||||
errorMessage:
|
||||
result?.data?.error?.message || result?.data?.message || ''
|
||||
});
|
||||
return;
|
||||
}
|
||||
setMessageId();
|
||||
const { reader, decoder } = result;
|
||||
await readStreamData(reader, decoder, (chunk: any) => {
|
||||
if (chunk?.error) {
|
||||
setTokenResult({
|
||||
error: true,
|
||||
errorMessage: chunk?.error?.message || chunk?.message || ''
|
||||
});
|
||||
return;
|
||||
}
|
||||
joinMessage(chunk);
|
||||
});
|
||||
} catch (error) {
|
||||
// console.log('error:', error);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
const handleClear = () => {
|
||||
if (!messageList.length) {
|
||||
return;
|
||||
}
|
||||
setMessageId();
|
||||
setMessageList([]);
|
||||
setTokenResult(null);
|
||||
};
|
||||
|
||||
const renderTitle = useCallback((role: string) => {
|
||||
return (
|
||||
<span>
|
||||
{intl.formatMessage({ id: `playground.${role}` })}
|
||||
<span className="text-tertiary m-l-5">00:10</span>
|
||||
</span>
|
||||
);
|
||||
}, []);
|
||||
|
||||
const handleSendMessage = (message: Omit<MessageItem, 'uid'>) => {
|
||||
setLoading(true);
|
||||
setMessageList([
|
||||
...messageList,
|
||||
{
|
||||
role: Roles.User,
|
||||
title: renderTitle(Roles.User),
|
||||
content: 'test data test data',
|
||||
uid: messageId.current
|
||||
}
|
||||
]);
|
||||
|
||||
setTimeout(() => {
|
||||
setMessageList([
|
||||
...messageList,
|
||||
{
|
||||
role: Roles.Assistant,
|
||||
title: renderTitle(Roles.Assistant),
|
||||
content: 'generate by assistant',
|
||||
uid: messageId.current
|
||||
}
|
||||
]);
|
||||
setLoading(false);
|
||||
}, 1000);
|
||||
};
|
||||
|
||||
const handleCloseViewCode = () => {
|
||||
setShow(false);
|
||||
};
|
||||
|
||||
const handleOnAudioData = useCallback(
|
||||
(data: { chunks: Blob[]; url: string; name: string; duration: number }) => {
|
||||
setAudioData(() => {
|
||||
return {
|
||||
url: data.url,
|
||||
name: data.name,
|
||||
duration: data.duration
|
||||
};
|
||||
});
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const handleOnAudioPermission = useCallback((permission: boolean) => {
|
||||
setAudioPermissionOn(permission);
|
||||
}, []);
|
||||
|
||||
const handleUploadChange = useCallback(
|
||||
async (data: { file: any; fileList: any }) => {
|
||||
const res = await readAudioFile(data.file.originFileObj);
|
||||
console.log('res=======', res);
|
||||
setAudioData(res);
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const handleOnAnalyse = useCallback((data: any, analyser: any) => {
|
||||
setAudioChunks((pre: any) => {
|
||||
return {
|
||||
data: data,
|
||||
analyser: analyser
|
||||
};
|
||||
});
|
||||
}, []);
|
||||
const handleOnRecord = useCallback((val: boolean) => {
|
||||
setIsRecording(val);
|
||||
setAudioData(null);
|
||||
}, []);
|
||||
|
||||
const renderAniamtion = () => {
|
||||
if (!audioPermissionOn) {
|
||||
return null;
|
||||
}
|
||||
if (isRecording) {
|
||||
return (
|
||||
<AudioAnimation
|
||||
height={66}
|
||||
width={200}
|
||||
analyserData={audioChunks}
|
||||
></AudioAnimation>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<div className="tips-text">
|
||||
<IconFont type={'icon-audio'} style={{ fontSize: 20 }}></IconFont>
|
||||
<span>Upload an audio file or start recording</span>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
useEffect(() => {}, [messageList]);
|
||||
useEffect(() => {
|
||||
if (scroller.current) {
|
||||
initialize(scroller.current);
|
||||
}
|
||||
}, [scroller.current, initialize]);
|
||||
|
||||
useEffect(() => {
|
||||
if (paramsRef.current) {
|
||||
innitializeParams(paramsRef.current);
|
||||
}
|
||||
}, [paramsRef.current, innitializeParams]);
|
||||
|
||||
useEffect(() => {
|
||||
if (loading) {
|
||||
updateScrollerPosition();
|
||||
}
|
||||
}, [messageList, loading]);
|
||||
|
||||
useEffect(() => {
|
||||
if (messageList.length > messageListLengthCache.current) {
|
||||
updateScrollerPosition();
|
||||
}
|
||||
messageListLengthCache.current = messageList.length;
|
||||
}, [messageList.length]);
|
||||
|
||||
return (
|
||||
<div className="ground-left-wrapper">
|
||||
<div className="ground-left">
|
||||
<div className="ground-left-footer" style={{ flex: 1 }}>
|
||||
<div className="speech-to-text">
|
||||
<div className="speech-box">
|
||||
<Tooltip title="Upload an audio file">
|
||||
<UploadAudio
|
||||
type="default"
|
||||
accept=".mp3,.mp4,.wav"
|
||||
onChange={handleUploadChange}
|
||||
></UploadAudio>
|
||||
</Tooltip>
|
||||
<AudioInput
|
||||
type="default"
|
||||
voiceActivity={true}
|
||||
onAudioData={handleOnAudioData}
|
||||
onAudioPermission={handleOnAudioPermission}
|
||||
onAnalyse={handleOnAnalyse}
|
||||
onRecord={handleOnRecord}
|
||||
></AudioInput>
|
||||
</div>
|
||||
|
||||
{audioData ? (
|
||||
<div className="flex-between flex-center">
|
||||
<div style={{ flex: 1 }}>
|
||||
<AudioPlayer
|
||||
url={audioData.url}
|
||||
name={audioData.name}
|
||||
duration={audioData.duration}
|
||||
></AudioPlayer>
|
||||
<div
|
||||
style={{
|
||||
paddingRight: 5,
|
||||
display: 'flex',
|
||||
justifyContent: 'flex-end',
|
||||
marginTop: 30
|
||||
}}
|
||||
>
|
||||
<Tooltip title="generate text content">
|
||||
<Button
|
||||
size="middle"
|
||||
type="primary"
|
||||
icon={<ThunderboltOutlined></ThunderboltOutlined>}
|
||||
>
|
||||
Generata Text Content
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
renderAniamtion()
|
||||
)}
|
||||
</div>
|
||||
{!audioPermissionOn && (
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
justifyContent: 'center',
|
||||
alignItems: 'center',
|
||||
height: '100%'
|
||||
}}
|
||||
>
|
||||
<span>
|
||||
<Tag
|
||||
style={{
|
||||
width: 36,
|
||||
height: 36,
|
||||
lineHeight: '36px',
|
||||
textAlign: 'center'
|
||||
}}
|
||||
bordered={false}
|
||||
color="error"
|
||||
icon={
|
||||
<AudioOutlined className="font-size-16"></AudioOutlined>
|
||||
}
|
||||
></Tag>
|
||||
</span>
|
||||
<span
|
||||
style={{
|
||||
marginTop: 10,
|
||||
fontSize: 14,
|
||||
fontWeight: 500
|
||||
}}
|
||||
>
|
||||
Enable microphone access in your browser’s settings.
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="message-list-wrap" ref={scroller}>
|
||||
<>
|
||||
<div className="content" style={{ height: '100%' }}>
|
||||
<>
|
||||
<MessageContent
|
||||
actions={['copy']}
|
||||
messageList={messageList[0] ? [messageList[0]] : []}
|
||||
setMessageList={setMessageList}
|
||||
editable={false}
|
||||
loading={loading}
|
||||
/>
|
||||
{loading && (
|
||||
<Spin size="small">
|
||||
<div style={{ height: '46px' }}></div>
|
||||
</Spin>
|
||||
)}
|
||||
</>
|
||||
</div>
|
||||
</>
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
className={classNames('params-wrapper', {
|
||||
collapsed: collapse
|
||||
})}
|
||||
ref={paramsRef}
|
||||
>
|
||||
<div className="box">
|
||||
<RerankerParams
|
||||
setParams={setParams}
|
||||
paramsConfig={paramsConfig}
|
||||
initialValues={initialValues}
|
||||
params={parameters}
|
||||
selectedModel={selectModel}
|
||||
modelList={modelList}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<ViewCodeModal
|
||||
open={show}
|
||||
payLoad={{
|
||||
messages: viewCodeMessage
|
||||
}}
|
||||
parameters={parameters}
|
||||
onCancel={handleCloseViewCode}
|
||||
title={intl.formatMessage({ id: 'playground.viewcode' })}
|
||||
></ViewCodeModal>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
export default memo(GroundLeft);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,20 @@
|
||||
.audio-input {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
background-color: var(--ant-color-fill-quaternary);
|
||||
border-radius: 4px;
|
||||
padding: 16px;
|
||||
|
||||
.ant-upload {
|
||||
border: none;
|
||||
background-color: transparent !important;
|
||||
}
|
||||
|
||||
.btns {
|
||||
.anticon {
|
||||
font-size: 24px;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,42 @@
|
||||
.speech-to-text {
|
||||
padding: 32px;
|
||||
height: 100%;
|
||||
display: flex;
|
||||
gap: 30px;
|
||||
flex-direction: column;
|
||||
|
||||
.tips-text {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
font-size: var(--font-size-large);
|
||||
height: 66px;
|
||||
padding: 0;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.speech-box {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
gap: 120px;
|
||||
flex: 1;
|
||||
min-height: 160px;
|
||||
|
||||
.ant-upload {
|
||||
border: none;
|
||||
background-color: transparent !important;
|
||||
}
|
||||
|
||||
.ant-btn {
|
||||
width: 60px;
|
||||
height: 60px;
|
||||
}
|
||||
|
||||
.btns {
|
||||
.anticon {
|
||||
font-size: 24px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,48 @@
|
||||
import { convertFileSize } from './index';
|
||||
|
||||
export const loadAudioData = async (data: any, type: string) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
const audioBlob = new Blob([data], { type: type });
|
||||
const fileSize = convertFileSize(audioBlob.size);
|
||||
|
||||
const audio = document.createElement('audio');
|
||||
const url = URL.createObjectURL(audioBlob);
|
||||
audio.src = url;
|
||||
|
||||
audio.addEventListener('loadedmetadata', () => {
|
||||
const duration = audio.duration;
|
||||
resolve({ size: fileSize, duration: Math.ceil(duration), url: url });
|
||||
});
|
||||
|
||||
audio.addEventListener('ended', () => {
|
||||
URL.revokeObjectURL(audio.src);
|
||||
});
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const readAudioFile = async (file: File) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = async function (e: any) {
|
||||
try {
|
||||
// const size = convertFileSize(file.size);
|
||||
console.log('file====', file);
|
||||
const arrayBuffer = e.target.result;
|
||||
const audioData = await loadAudioData(arrayBuffer, file.type);
|
||||
resolve({
|
||||
...(audioData || {}),
|
||||
name: file.name
|
||||
});
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
};
|
||||
|
||||
reader.onerror = (error) => reject(error);
|
||||
reader.readAsArrayBuffer(file);
|
||||
});
|
||||
};
|
||||
Loading…
Reference in new issue