chore: watch request counts

main
jialin 1 year ago
parent fb0a2798e7
commit 9f40e2c71e

@ -90,6 +90,7 @@ export default defineConfig({
model: {},
initialState: {},
request: {},
keepalive: ['/playground/text-to-image'],
locale: {
antd: true,
baseNavigator: true,

@ -0,0 +1,17 @@
import { atom, getDefaultStore } from 'jotai';
export const routeCacheAtom = atom<any>(new Map());
export const setRouteCache = (key: string, value: any) => {
const store = getDefaultStore();
const cache = store.get(routeCacheAtom);
cache.set(key, value);
store.set(routeCacheAtom, cache);
};
export const deleteRouteCache = (key: string) => {
const store = getDefaultStore();
const cache = store.get(routeCacheAtom);
cache.delete(key);
store.set(routeCacheAtom, cache);
};

@ -0,0 +1,45 @@
interface WatchRequest {
id: number;
token: any;
cancel: () => void;
}
declare global {
interface Window {
__GPUSTACK_WATCH_REQUEST_CLEAR__: {
watchIDValue: number;
requestList: WatchRequest[];
};
}
}
window.__GPUSTACK_WATCH_REQUEST_CLEAR__ = {
watchIDValue: 0,
requestList: []
};
export const updateWatchIDValue = () => {
window.__GPUSTACK_WATCH_REQUEST_CLEAR__.watchIDValue =
window.__GPUSTACK_WATCH_REQUEST_CLEAR__.watchIDValue + 1;
return window.__GPUSTACK_WATCH_REQUEST_CLEAR__.watchIDValue;
};
export const updateWatchRequest = (watchToken: WatchRequest) => {
window.__GPUSTACK_WATCH_REQUEST_CLEAR__.requestList.push(watchToken);
};
export const cancelWatchRequest = (n: number) => {
// cancel the before n requests
const requestList = window.__GPUSTACK_WATCH_REQUEST_CLEAR__.requestList;
for (let i = 0; i < n; i++) {
requestList[i]?.cancel?.();
}
window.__GPUSTACK_WATCH_REQUEST_CLEAR__.requestList = requestList.slice(n);
};
export const clearWatchRequestId = (id: number) => {
const requestList = window.__GPUSTACK_WATCH_REQUEST_CLEAR__.requestList;
const newRequestList = requestList.filter((item) => item.id !== id);
window.__GPUSTACK_WATCH_REQUEST_CLEAR__.requestList = newRequestList;
};

@ -0,0 +1,3 @@
export default {
playgroundTextToImage: '/playground/text-to-image'
};

@ -1,3 +1,9 @@
import {
cancelWatchRequest,
clearWatchRequestId,
updateWatchIDValue,
updateWatchRequest
} from '@/atoms/watch-request';
import { WatchEventType } from '@/config';
import { request } from '@umijs/max';
import axios from 'axios';
@ -106,6 +112,7 @@ export const sliceData = (data: string, loaded: number, loadedSize: any) => {
};
const useSetChunkRequest = () => {
const watchRequestList = window.__GPUSTACK_WATCH_REQUEST_CLEAR__.requestList;
const [requestReadyState, setRequestReadyState] = useState(3);
const axiosToken = useRef<any>(null);
const requestConfig = useRef<any>({});
@ -124,17 +131,21 @@ const useSetChunkRequest = () => {
setRequestReadyState(3);
};
const resetResultSchema = (result: any[]) => {
// ============ handle list data ============
// return _.map(result, (data: any) => {
// if (data.type === WatchEventType.DELETE) {
// data.ids = _.map(data.items, (item: any) => item.id);
// }
// data.collection = data.items || [];
// return data;
// });
// ============ handle list data ============
const createAxiosToken = () => {
const { CancelToken } = axios;
const source = CancelToken.source();
const watchID = updateWatchIDValue();
return {
id: watchID,
token: source.token,
cancel() {
source.cancel();
clearWatchRequestId(watchID);
}
};
};
const resetResultSchema = (result: any[]) => {
return _.map(result, (data: any) => {
if (data.type === WatchEventType.DELETE) {
data.ids = data.data?.id ? [data.data.id] : [];
@ -154,6 +165,12 @@ const useSetChunkRequest = () => {
reset();
axiosToken.current?.cancel?.();
axiosToken.current = createAxiosToken();
updateWatchRequest(axiosToken.current);
if (watchRequestList.length >= 4) {
cancelWatchRequest(watchRequestList.length - 4 || 1);
}
try {
const { request: requestData } = await request(url, {
params: {
@ -243,7 +260,8 @@ const useSetChunkRequest = () => {
}, [requestReadyState]);
return {
setChunkRequest
setChunkRequest,
createAxiosToken
};
};

@ -1,5 +1,6 @@
// @ts-nocheck
import { routeCacheAtom } from '@/atoms/route-cache';
import { GPUStackVersionAtom, UpdateCheckAtom, userAtom } from '@/atoms/user';
import ShortCuts, {
modalConfig as ShortCutsConfig
@ -14,6 +15,7 @@ import { ProLayout } from '@ant-design/pro-components';
import {
Link,
Outlet,
dropByCacheKey,
history,
matchRoutes,
useAppData,
@ -94,6 +96,7 @@ export default (props: any) => {
const { initialize: initialize } = useOverlayScroller();
const { initialize: initializeMenu } = useOverlayScroller();
const [userInfo] = useAtom(userAtom);
const [routeCache] = useAtom(routeCacheAtom);
const [version] = useAtom(GPUStackVersionAtom);
const [updateCheck] = useAtom(UpdateCheckAtom);
const location = useLocation();
@ -103,6 +106,8 @@ export default (props: any) => {
const [collapsed, setCollapsed] = useState(false);
const [collapseValue, setCollapseValue] = useState(false);
console.log('routeCache========', routeCache);
const initialInfo = (useModel && useModel('@@initialState')) || {
initialState: undefined,
loading: false,
@ -135,6 +140,15 @@ export default (props: any) => {
});
};
const dropRouteCache = (pathname) => {
console.log('routeCache.keys()========', routeCache.keys());
for (let key of routeCache.keys()) {
if (key !== pathname && !routeCache.get(key)) {
dropByCacheKey(key);
routeCache.delete(key);
}
}
};
const runtimeConfig = {
...initialInfo,
logout: async (userInfo) => {
@ -156,7 +170,7 @@ export default (props: any) => {
};
const newRoutes = filterRoutes(
clientRoutes.filter((route) => route.id === '@@/global-layout'),
clientRoutes.filter((route) => route.id === 'max-tabs'),
(route) => {
return (
(!!route.isLayout && route.id !== '@@/global-layout') ||
@ -307,6 +321,7 @@ export default (props: any) => {
: '/playground';
history.push(pathname);
}
dropRouteCache(pathname);
}}
formatMessage={formatMessage}
menu={{

@ -85,6 +85,6 @@ export default {
'vLLM: For non-GGUF format models, supports x86 Linux only.',
'models.form.backend.voxbox': 'vox-box: For non-GGUF format audio models.',
'models.form.search.gguftips':
'If using Mac or Windows as a worker, check GGUF (uncheck for audio models).',
'If using macOS or Windows as a worker, check GGUF (uncheck for audio models).',
'models.form.button.addlabel': 'Add Label'
};

@ -81,6 +81,6 @@ export default {
'models.form.backend.vllm': 'vLLM: 用于非 GGUF 格式模型,仅支持 x86 Linux',
'models.form.backend.voxbox': 'vox-box: 用于非 GGUF 格式的音频模型',
'models.form.search.gguftips':
'当 Mac 或 Windows 作 worker 时勾选 GGUF搜索音频模型时取消勾选',
'当 macOS 或 Windows 作 worker 时勾选 GGUF搜索音频模型时取消勾选',
'models.form.button.addlabel': '添加标签'
};

@ -58,6 +58,8 @@ interface ModelsProps {
handleShowSizeChange?: (page: number, size: number) => void;
handlePageChange: (page: number, pageSize: number | undefined) => void;
handleDeleteSuccess: () => void;
onViewLogs: () => void;
onCancelViewLogs: () => void;
queryParams: {
page: number;
perPage: number;
@ -106,6 +108,8 @@ const Models: React.FC<ModelsProps> = ({
handleSearch,
handlePageChange,
handleDeleteSuccess,
onViewLogs,
onCancelViewLogs,
dataSource,
gpuDeviceList,
workerList,
@ -400,6 +404,7 @@ const Models: React.FC<ModelsProps> = ({
const handleLogModalCancel = useCallback(() => {
setOpenLogModal(false);
onCancelViewLogs();
}, []);
const handleDelete = async (row: any) => {
@ -454,31 +459,38 @@ const Models: React.FC<ModelsProps> = ({
navigate(`/playground/chat?model=${row.name}`);
};
const handleViewLogs = async (row: any) => {
try {
setCurrentInstance({
url: `${MODEL_INSTANCE_API}/${row.id}/logs`,
status: row.state,
id: row.id,
modelId: row.model_id,
tail: InstanceRealLogStatus.includes(row.state) ? undefined : PageSize
});
setOpenLogModal(true);
} catch (error) {
console.log('error:', error);
}
};
const handleDeleteInstace = (row: any, list: ModelInstanceListItem[]) => {
modalRef.current.show({
content: 'models.instances',
okText: 'common.button.delrecreate',
operation: 'common.delete.single.confirm',
name: row.name,
async onOk() {
await deleteModelInstance(row.id);
const handleViewLogs = useCallback(
async (row: any) => {
try {
setCurrentInstance({
url: `${MODEL_INSTANCE_API}/${row.id}/logs`,
status: row.state,
id: row.id,
modelId: row.model_id,
tail: InstanceRealLogStatus.includes(row.state) ? undefined : PageSize
});
setOpenLogModal(true);
onViewLogs();
} catch (error) {
console.log('error:', error);
}
});
};
},
[onViewLogs]
);
const handleDeleteInstace = useCallback(
(row: any, list: ModelInstanceListItem[]) => {
modalRef.current.show({
content: 'models.instances',
okText: 'common.button.delrecreate',
operation: 'common.delete.single.confirm',
name: row.name,
async onOk() {
await deleteModelInstance(row.id);
}
});
},
[deleteModelInstance]
);
const getModelInstances = async (row: any) => {
const params = {
@ -539,7 +551,7 @@ const Models: React.FC<ModelsProps> = ({
handleViewLogs(row);
}
},
[]
[handleViewLogs, handleDeleteInstace]
);
const renderModelTags = useCallback(

@ -1,9 +1,7 @@
import TableContext from '@/components/seal-table/table-context';
import useSetChunkRequest, {
createAxiosToken
} from '@/hooks/use-chunk-request';
import useSetChunkRequest from '@/hooks/use-chunk-request';
import useUpdateChunkedList from '@/hooks/use-update-chunk-list';
import { queryGpuDevicesList, queryWorkersList } from '@/pages/resources/apis';
import { queryWorkersList } from '@/pages/resources/apis';
import {
GPUDeviceItem,
ListItem as WokerListItem
@ -17,7 +15,7 @@ import { ListItem } from './config/types';
const Models: React.FC = () => {
console.log('model list====1');
const { setChunkRequest } = useSetChunkRequest();
const { setChunkRequest, createAxiosToken } = useSetChunkRequest();
const { setChunkRequest: setModelInstanceChunkRequest } =
useSetChunkRequest();
const [modelInstances, setModelInstances] = useState<any[]>([]);
@ -45,24 +43,16 @@ const Models: React.FC = () => {
const { updateChunkedList, cacheDataListRef } = useUpdateChunkedList({
dataList: dataSource.dataList,
setDataList(list) {
setDataSource({
total: dataSource.total,
loading: false,
dataList: list
setDataSource((pre) => {
return {
total: pre.total,
loading: false,
dataList: list
};
});
}
});
const getDeviceList = async () => {
try {
const data = await queryGpuDevicesList({ page: 1, perPage: 100 });
const gpuDeviceMap = _.groupBy(data.items, 'worker_name');
setGpuDeviceList(data.items || []);
} catch (error) {
// ingore
}
};
const getWorkerList = async () => {
try {
const data = await queryWorkersList({ page: 1, perPage: 100 });
@ -98,7 +88,7 @@ const Models: React.FC = () => {
loading: false,
total: dataSource.total
});
console.log('error', error);
console.log('error+++', error);
} finally {
setFirstLoad(false);
}
@ -122,7 +112,6 @@ const Models: React.FC = () => {
};
const updateInstanceHandler = (list: any) => {
console.log('updateInstanceHandler=====', list);
setModelInstances(list);
};
@ -170,19 +159,16 @@ const Models: React.FC = () => {
[queryParams]
);
useEffect(() => {
if (!firstLoad) {
setTimeout(() => {
createModelsChunkRequest();
createModelsInstanceChunkRequest();
}, 100);
}
return () => {
chunkRequedtRef.current?.current?.cancel?.();
cacheDataListRef.current = [];
chunkInstanceRequedtRef.current?.current?.cancel?.();
};
}, [firstLoad]);
const handleOnViewLogs = useCallback(() => {
chunkRequedtRef.current?.current?.cancel?.();
cacheDataListRef.current = [];
chunkInstanceRequedtRef.current?.current?.cancel?.();
}, []);
const handleOnCancelViewLogs = useCallback(() => {
createModelsChunkRequest();
createModelsInstanceChunkRequest();
}, []);
useEffect(() => {
fetchData();
@ -193,8 +179,37 @@ const Models: React.FC = () => {
useEffect(() => {
getWorkerList();
return () => {
chunkRequedtRef.current?.current?.cancel?.();
cacheDataListRef.current = [];
chunkInstanceRequedtRef.current?.current?.cancel?.();
};
}, []);
useEffect(() => {
if (!firstLoad) {
setTimeout(() => {
createModelsChunkRequest();
createModelsInstanceChunkRequest();
}, 100);
document.addEventListener('visibilitychange', () => {
if (document.visibilityState === 'visible') {
createModelsChunkRequest();
createModelsInstanceChunkRequest();
} else {
chunkRequedtRef.current?.current?.cancel?.();
cacheDataListRef.current = [];
chunkInstanceRequedtRef.current?.current?.cancel?.();
}
});
}
return () => {
document.removeEventListener('visibilitychange', () => {});
};
}, [firstLoad]);
return (
<TableContext.Provider
value={{
@ -207,6 +222,8 @@ const Models: React.FC = () => {
handleSearch={handleSearch}
handlePageChange={handlePageChange}
handleDeleteSuccess={fetchData}
onViewLogs={handleOnViewLogs}
onCancelViewLogs={handleOnCancelViewLogs}
queryParams={queryParams}
loading={dataSource.loading}
total={dataSource.total}

@ -114,9 +114,9 @@ const AudioInput: React.FC<AudioInputProps> = (props) => {
audioStream.current?.getTracks().forEach((track: any) => {
track.stop();
});
scriptProcessor.current.disconnect();
mediaStreamSource.current.disconnect();
audioContext.current.close();
scriptProcessor.current?.disconnect();
mediaStreamSource.current?.disconnect();
audioContext.current?.close();
};
const handleStopRecording = () => {

@ -1,7 +1,9 @@
import { setRouteCache } from '@/atoms/route-cache';
import AlertInfo from '@/components/alert-info';
import IconFont from '@/components/icon-font';
import FieldComponent from '@/components/seal-form/field-component';
import SealSelect from '@/components/seal-form/seal-select';
import routeCachekey from '@/config/route-cachekey';
import useOverlayScroller from '@/hooks/use-overlay-scroller';
import ThumbImg from '@/pages/playground/components/thumb-img';
import { generateRandomNumber } from '@/utils';
@ -208,6 +210,7 @@ const GroundImages: React.FC<MessageProps> = forwardRef((props, ref) => {
setMessageId();
setTokenResult(null);
setCurrentPrompt(current?.content || '');
setRouteCache(routeCachekey.playgroundTextToImage, true);
const imgSize = _.split(finalParameters.size, 'x');
// preview
@ -237,7 +240,7 @@ const GroundImages: React.FC<MessageProps> = forwardRef((props, ref) => {
height: imgSize[1],
width: imgSize[0],
loading: true,
progressType: stream_options.chunk_results ? 'dashboard' : 'line',
progressType: 'dashboard',
preview: false,
uid: setMessageId()
};
@ -316,6 +319,7 @@ const GroundImages: React.FC<MessageProps> = forwardRef((props, ref) => {
setImageList([]);
} finally {
setLoading(false);
setRouteCache(routeCachekey.playgroundTextToImage, false);
}
};
const handleClear = () => {

@ -1,9 +1,11 @@
import { setRouteCache } from '@/atoms/route-cache';
import AlertInfo from '@/components/alert-info';
import SingleImage from '@/components/auto-image/single-image';
import IconFont from '@/components/icon-font';
import CanvasImageEditor from '@/components/image-editor';
import FieldComponent from '@/components/seal-form/field-component';
import SealSelect from '@/components/seal-form/seal-select';
import routeCachekey from '@/config/route-cachekey';
import useOverlayScroller from '@/hooks/use-overlay-scroller';
import UploadImg from '@/pages/playground/components/upload-img';
import { base64ToFile, generateRandomNumber } from '@/utils';
@ -218,6 +220,7 @@ const GroundImages: React.FC<MessageProps> = forwardRef((props, ref) => {
setMessageId();
setTokenResult(null);
setCurrentPrompt(current?.content || '');
setRouteCache(routeCachekey.playgroundTextToImage, true);
const imgSize = _.split(finalParameters.size, 'x');
@ -248,7 +251,7 @@ const GroundImages: React.FC<MessageProps> = forwardRef((props, ref) => {
height: imgSize[1],
width: imgSize[0],
loading: true,
progressType: stream_options.chunk_results ? 'dashboard' : 'line',
progressType: 'dashboard',
preview: false,
uid: setMessageId()
};
@ -275,7 +278,7 @@ const GroundImages: React.FC<MessageProps> = forwardRef((props, ref) => {
const result: any = await fetchChunkedData({
data: params,
url: `http://192.168.50.4:40325/v1/images/edits?t=${Date.now()}`,
url: `${EDIT_IMAGE_API}?t=${Date.now()}`,
signal: requestToken.current.signal
});
if (result.error) {
@ -327,6 +330,7 @@ const GroundImages: React.FC<MessageProps> = forwardRef((props, ref) => {
setImageList([]);
} finally {
setLoading(false);
setRouteCache(routeCachekey.playgroundTextToImage, false);
}
};
const handleClear = () => {

@ -5,7 +5,7 @@ import useWindowResize from '@/hooks/use-window-resize';
import { DiffOutlined, HighlightOutlined } from '@ant-design/icons';
import { PageContainer } from '@ant-design/pro-components';
import { useIntl } from '@umijs/max';
import { Button, Segmented, Space, Tabs, TabsProps } from 'antd';
import { Button, Space, Tabs, TabsProps } from 'antd';
import classNames from 'classnames';
import _ from 'lodash';
import { useCallback, useEffect, useRef, useState } from 'react';
@ -155,14 +155,14 @@ const TextToImages: React.FC = () => {
<span className="font-600">
{intl.formatMessage({ id: 'menu.playground.text2images' })}
</span>
{
{/* {
<Segmented
options={optionsList}
size="middle"
className="m-l-40"
onChange={(key) => setActiveKey(key)}
></Segmented>
}
} */}
</div>
),
breadcrumb: {}

Loading…
Cancel
Save