chore: download log display

main
jialin 2 years ago
parent 7ba7f57b05
commit 30e4e1aa1e

@ -2,8 +2,8 @@ import useSetChunkRequest from '@/hooks/use-chunk-request';
import useContainerScroll from '@/hooks/use-container-scorll';
import Convert from 'ansi-to-html';
import classNames from 'classnames';
import hasAnsi from 'has-ansi';
import { memo, useEffect, useRef, useState } from 'react';
import _ from 'lodash';
import { memo, useCallback, useEffect, useRef, useState } from 'react';
import './index.less';
interface LogsViewerProps {
@ -15,7 +15,7 @@ interface LogsViewerProps {
const LogsViewer: React.FC<LogsViewerProps> = (props) => {
const { height, content, url } = props;
const [nowrap, setNowrap] = useState(false);
const [logsContent, setLogsContent] = useState(content || '');
const [logsContent, setLogsContent] = useState<string[]>([]);
const { setChunkRequest } = useSetChunkRequest();
const chunkRequedtRef = useRef<any>(null);
const scroller = useRef<any>(null);
@ -24,19 +24,50 @@ const LogsViewer: React.FC<LogsViewerProps> = (props) => {
{ toBottom: true }
);
const convert = new Convert();
const convert = new Convert({
newline: true,
escapeXML: true
});
useEffect(() => {
updateScrollerPosition();
}, [logsContent]);
const getTrailingACount = useCallback((str: string) => {
const match = str.match(/A+$/);
return match ? match[0].length : 0;
}, []);
const parseHtmlStr = useCallback((htmlStr: string) => {
const result: string[] = [];
const htmlStrArr = _.filter(
htmlStr?.split?.('<br/>'),
(item: string) => item
);
htmlStrArr.forEach((item: string, index: number) => {
const aCount = getTrailingACount(item);
if (aCount > 0) {
console.log('aCount========', {
htmlStrArr,
aCount,
item,
length: result.length,
result: [...result]
});
const placeIndex = result.length - aCount;
result[placeIndex] = item.slice(0, -aCount);
} else {
result.push(item);
}
});
return result;
}, []);
const updateContent = (newVal: string) => {
if (hasAnsi(newVal)) {
const htmlStr = `${convert.toHtml(newVal)}`;
setLogsContent(htmlStr);
} else {
setLogsContent(newVal);
}
const htmlStr = `${convert.toHtml(newVal)}`;
const list = parseHtmlStr(htmlStr);
setLogsContent(list);
};
const createChunkConnection = async () => {
@ -68,7 +99,13 @@ const LogsViewer: React.FC<LogsViewerProps> = (props) => {
onWheel={handleContentWheel}
>
<div className={classNames('content', { 'line-break': nowrap })}>
<div className="text">{logsContent}</div>
<div className="text">
{logsContent.map((item, index) => {
return (
<div key={index} dangerouslySetInnerHTML={{ __html: item }} />
);
})}
</div>
</div>
</div>
</div>

@ -0,0 +1,56 @@
export const text = `reflection-llama-3.1-70b
Hugging Face / Reflection-Llama-3.1-70B.Q6_K*.gguf
0 / 1
2024-09-10 17:42:25
reflection-llama-3.1-70b-2cDWS
0.42%
2024-09-10 17:42:25
gemma2-27b
Ollama Library / gemma2:27b
0 / 1
2024-09-10 16:54:13
gemma2-27b-m37dz
58.98%
2024-09-10 16:54:13
2024-09-10T17:42:27+08:00 - gpustack.worker.downloaders - INFO - Downloading model leafspark/Reflection-Llama-3.1-70B-GGUF/Reflection-Llama-3.1-70B.Q6_K*.gguf
Fetching 2 files: 0%| | 0/2 [00:00<?, ?it/s]
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 55%| | 22.0G/39.9G [00:00<?, ?B/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 55%| | 22.0G/39.9G [00:05<2:30:05, 1.98MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 55%| | 22.0G/39.9G [00:10<2:26:57, 2.02MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 55%| | 22.0G/39.9G [00:15<2:27:42, 2.01MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 55%| | 22.1G/39.9G [00:20<2:23:48, 2.06MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 55%| | 22.1G/39.9G [00:25<2:22:41, 2.08MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 55%| | 22.1G/39.9G [00:30<2:22:08, 2.09MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 55%| | 22.1G/39.9G [00:35<2:22:43, 2.08MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 55%| | 22.1G/39.9G [00:40<2:22:49, 2.07MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 55%| | 22.1G/39.9G [00:45<2:21:54, 2.09MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 55%| | 22.1G/39.9G [00:50<2:21:42, 2.09MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 56%| | 22.1G/39.9G [00:55<2:23:39, 2.06MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 56%| | 22.1G/39.9G [01:01<2:25:09, 2.04MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 56%| | 22.1G/39.9G [01:06<2:23:30, 2.06MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 56%| | 22.2G/39.9G [01:11<2:26:31, 2.01MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 56%| | 22.2G/39.9G [01:16<2:26:04, 2.02MB/s]A
()n-Llama-3.1-70B.Q6_K-00001-of-00002.gguf: 56%| | 22.2G/39.9G [01:21<2:25:13, 2.03MB/s]A`;
export default text;

@ -9,6 +9,7 @@ export default function useContainerScroll(
const scroller = useRef(container);
const optionsRef = useRef(options);
const toBottomFlag = useRef(options?.toBottom);
const timerRef = useRef<any>(null);
const debunceResetWheeled = _.debounce(() => {
isWheeled.current = false;
@ -16,6 +17,7 @@ export default function useContainerScroll(
const handleContentWheel = (e: any) => {
isWheeled.current = true;
debunceResetWheeled.cancel?.();
debunceResetWheeled();
};

@ -121,10 +121,13 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
[dataSource]
);
const handlerSearchModels = async (e: any) => {
searchInputRef.current = e.target.value;
handleOnSearchRepo();
};
const handlerSearchModels = useCallback(
async (e: any) => {
searchInputRef.current = e.target.value;
handleOnSearchRepo();
},
[handleOnSearchRepo]
);
const handleOnOpen = () => {
if (

Loading…
Cancel
Save