fix: update evaluation result issue

main
jialin 7 months ago
parent 5f7df3f871
commit 830ee45161

@ -1,4 +1,18 @@
import { atom } from 'jotai';
import { atom, getDefaultStore } from 'jotai';
// models expand keys: create, update , delete,
export const modelsExpandKeysAtom = atom<string[]>([]);
export const requestIdAtom = atom<number>(0);
export const setRquestId = () => {
const store = getDefaultStore();
const id = Date.now();
store.set(requestIdAtom, id);
return id;
};
export const getRequestId = () => {
const store = getDefaultStore();
return store.get(requestIdAtom);
};

@ -277,6 +277,14 @@ body {
display: none;
}
.ant-pro-base-menu-vertical-item-title {
// height: var(--ant-menu-item-height) !important;
}
.ant-pro-base-menu-vertical-item-title-collapsed {
// height: var(--ant-menu-item-height) !important;
}
.ant-pro-layout {
height: 100vh;
@ -333,12 +341,13 @@ body {
}
// ======== menu style end ============
.ant-menu-submenu-popup {
.ant-menu-sub {
.ant-menu-item-only-child {
height: 40px;
line-height: 40px;
color: var(--ant-color-text);
// color: var(--ant-color-text);
.anticon {
font-size: var(--font-size-middle);

@ -1,3 +1,4 @@
import { getRequestId } from '@/atoms/models';
import ModalFooter from '@/components/modal-footer';
import GSDrawer from '@/components/scroller-modal/gs-drawer';
import { PageActionType } from '@/config/types';
@ -116,7 +117,6 @@ const AddModal: FC<AddModalProps> = (props) => {
warningStatus,
submitAnyway
} = useCheckCompatibility();
const { onSelectModel } = useSelectModel({ gpuOptions: props.gpuOptions });
const form = useRef<any>({});
const intl = useIntl();
@ -132,12 +132,18 @@ const AddModal: FC<AddModalProps> = (props) => {
requestModelId: 0
});
const requestModelIdRef = useRef<number>(0);
const currentSelectedModel = useRef<any>({});
const { run: fetchModelFiles } = useDeferredRequest(
() => modelFileRef.current?.fetchModelFiles?.(),
100
);
const updateSelectedModel = (model: any) => {
currentSelectedModel.current = model;
setSelectedModel(model);
};
/**
* Update the request model id to distinguish
* the evaluate request.
@ -236,10 +242,14 @@ const AddModal: FC<AddModalProps> = (props) => {
);
const handleSelectModelFile = async (item: any, requestModelId: number) => {
if (
evaluateStateRef.current.state !== EvaluateProccess.file ||
requestModelId !== evaluateStateRef.current.requestModelId
) {
console.log(
'handleSelectModelFile:',
item,
requestModelId,
getRequestId(),
evaluateStateRef.current
);
if (requestModelId !== getRequestId()) {
return;
}
form.current?.form?.resetFields(resetFieldsByFile);
@ -251,7 +261,7 @@ const AddModal: FC<AddModalProps> = (props) => {
categories: getCategory(item)
});
console.log('handleSelectModelFile', item);
console.log('handleSelectModelFile>>>>>>>>>>>>', item);
// evaluate the form data when select a model file
if (item.fakeName) {
@ -265,7 +275,7 @@ const AddModal: FC<AddModalProps> = (props) => {
};
const handleOnSelectModel = async (item: any) => {
// If the item is empty or the same as the selected model, do nothing
console.log('handleOnSelectModel', item, selectedModel);
handleCancelFiles();
if (
_.isEmpty(item) ||
@ -273,6 +283,7 @@ const AddModal: FC<AddModalProps> = (props) => {
) {
return;
}
console.log('handleOnSelectModel:', item, selectedModel);
setIsGGUF(item.isGGUF);
clearCahceFormValues();
unlockWarningStatus();
@ -280,7 +291,7 @@ const AddModal: FC<AddModalProps> = (props) => {
state: EvaluateProccess.model,
requestModelId: updateRequestModelId()
});
setSelectedModel(item);
updateSelectedModel(item);
form.current?.form?.resetFields(resetFieldsByModel);
const modelInfo = onSelectModel(item, props.source);
@ -306,9 +317,27 @@ const AddModal: FC<AddModalProps> = (props) => {
}
};
const currentModelDuringEvaluate = (item: any) => {
return (
evaluateStateRef.current.state === EvaluateProccess.form &&
item.name === currentSelectedModel.current.name
);
};
const handleOnSelectModelAfterEvaluate = (item: any) => {
console.log(
'handleOnSelectModelAfterEvaluate:',
item.name,
currentSelectedModel.current.name,
warningStatus.type,
currentModelDuringEvaluate(item)
);
if (currentModelDuringEvaluate(item)) {
return;
}
// If the item is empty
setIsGGUF(item.isGGUF);
setSelectedModel(item);
updateSelectedModel(item);
setEvaluteState({
state: EvaluateProccess.model,
requestModelId: updateRequestModelId()
@ -316,12 +345,6 @@ const AddModal: FC<AddModalProps> = (props) => {
handleCancelFiles();
const modelInfo = onSelectModel(item, props.source);
console.log(
'handleOnSelectModelAfterEvaluate',
item,
evaluateStateRef.current
);
if (
evaluateStateRef.current.state === EvaluateProccess.model &&
item.evaluated
@ -329,7 +352,9 @@ const AddModal: FC<AddModalProps> = (props) => {
handleShowCompatibleAlert(item.evaluateResult);
form.current?.setFieldsValue?.({
...getDefaultSpec(item),
...modelInfo,
...(item.name === currentSelectedModel.current.name
? _.omit(modelInfo, ['name'])
: modelInfo),
categories: getCategory(item)
});
}

@ -1,3 +1,4 @@
import { getRequestId } from '@/atoms/models';
import SimpleOverlay from '@/components/simple-overlay';
import { createAxiosToken } from '@/hooks/use-chunk-request';
import { useIntl } from '@umijs/max';
@ -283,7 +284,7 @@ const HFModelFile: React.FC<HFModelFileProps> = forwardRef((props, ref) => {
handleSelectModelFile({});
return;
}
parentRequestModelId.current = updateEvaluteState?.('file');
parentRequestModelId.current = getRequestId();
checkTokenRef.current?.cancel?.();
axiosTokenRef.current?.abort?.();
axiosTokenRef.current = new AbortController();
@ -292,12 +293,17 @@ const HFModelFile: React.FC<HFModelFileProps> = forwardRef((props, ref) => {
setCurrent('');
try {
let list = [];
const currentParentRequestId = getRequestId();
if (modelSourceMap.huggingface_value === modelSource) {
list = await getHuggingfaceFiles();
} else if (modelSourceMap.modelscope_value === modelSource) {
list = await getModelScopeFiles();
}
if (currentParentRequestId !== getRequestId()) {
return;
}
const newList = generateGroupByFilename(list);
const sortList = _.sortBy(newList, (item: any) => {
return sortType === 'size' ? item.size : item.path;

@ -9,6 +9,7 @@ import {
RightOutlined
} from '@ant-design/icons';
import { useIntl } from '@umijs/max';
import { useBoolean } from 'ahooks';
import { Button, Empty, Spin, Tooltip } from 'antd';
import { some } from 'lodash';
import 'overlayscrollbars/overlayscrollbars.css';
@ -72,6 +73,7 @@ const ModelCard: React.FC<{
loadingModel?: boolean;
modelSource: string;
}> = (props) => {
const [hideMd, { toggle }] = useBoolean();
const { onCollapse, setIsGGUF, collapsed, modelSource } = props;
const intl = useIntl();
const requestSource = useRequestToken();
@ -397,9 +399,20 @@ const ModelCard: React.FC<{
{readmeText && (
<>
<TitleWrapper>
<div className="title">README.md</div>
<div className="flex-center gap-8">
<span className="title">README.md</span>
{/* <Button
onClick={toggle}
size="small"
type="text"
icon={hideMd ? <EyeOutlined /> : <EyeInvisibleOutlined />}
></Button> */}
</div>
</TitleWrapper>
<div className="card-wrapper">
<div
className="card-wrapper"
style={{ width: hideMd ? 0 : 'auto', overflow: 'hidden' }}
>
<MarkdownViewer
generateImgLink={generateModeScopeImgLink}
content={readmeText}

@ -1,3 +1,4 @@
import { getRequestId, setRquestId } from '@/atoms/models';
import { createAxiosToken } from '@/hooks/use-chunk-request';
import { QuestionCircleOutlined } from '@ant-design/icons';
import { useIntl } from '@umijs/max';
@ -68,6 +69,7 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
displayEvaluateStatus,
unlockWarningStatus
} = props;
const [dataSource, setDataSource] = useState<{
repoOptions: any[];
loading: boolean;
@ -95,7 +97,7 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
const filterTaskRef = useRef<string>('');
const timer = useRef<any>(null);
const requestIdRef = useRef<number>(0);
const searchIdRef = useRef<number>(0);
const searchRepoRequestIdRef = useRef<number>(0);
const [query, setQuery] = useState({
page: 1,
perPage: 10,
@ -120,9 +122,9 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
}
]);
const updateSearchId = () => {
searchIdRef.current += 1;
return searchIdRef.current;
const updateSearchRepoRequestId = () => {
searchRepoRequestIdRef.current += 1;
return searchRepoRequestIdRef.current;
};
const updateRequestId = () => {
@ -153,43 +155,32 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
// huggeface
const getModelsFromHuggingface = async (sort: string) => {
const currentSearchId = updateSearchId();
try {
const task: any = searchInputRef.current ? '' : 'text-generation';
const params = {
search: {
query: searchInputRef.current || '',
sort: sort,
tags: filterGGUFRef.current ? ['gguf'] : [],
task: HuggingFaceTaskMap[filterTaskRef.current] || task
}
};
const data = await queryHuggingfaceModels(params, {
signal: axiosTokenRef.current.signal
});
if (searchIdRef.current !== currentSearchId) {
return {
notSameRequest: true
};
}
let list = _.map(data || [], (item: any) => {
return {
...item,
value: item.name,
label: item.name,
isGGUF: checkIsGGUF(item),
source: modelSource
};
});
return list;
} catch (error) {
if (searchIdRef.current !== currentSearchId) {
return {
notSameRequest: true
};
const currentSearchId = setRquestId();
const task: any = searchInputRef.current ? '' : 'text-generation';
const params = {
search: {
query: searchInputRef.current || '',
sort: sort,
tags: filterGGUFRef.current ? ['gguf'] : [],
task: HuggingFaceTaskMap[filterTaskRef.current] || task
}
return [];
};
const data = await queryHuggingfaceModels(params, {
signal: axiosTokenRef.current.signal
});
if (getRequestId() !== currentSearchId) {
throw 'new request has been sent';
}
let list = _.map(data || [], (item: any) => {
return {
...item,
value: item.name,
label: item.name,
isGGUF: checkIsGGUF(item),
source: modelSource
};
});
return list;
};
// modelscope, only modelscope has page and perPage
@ -198,7 +189,7 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
page: number;
perPage?: number;
}) => {
const currentSearchId = updateSearchId();
const currentSearchId = setRquestId();
try {
const params = {
Name: `${searchInputRef.current}`,
@ -213,11 +204,11 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
const data = await queryModelScopeModels(params, {
signal: axiosTokenRef.current.signal
});
if (searchIdRef.current !== currentSearchId) {
return {
notSameRequest: true
};
if (getRequestId() !== currentSearchId) {
throw 'new request has been sent';
}
let list = _.map(_.get(data, 'Data.Model.Models') || [], (item: any) => {
return {
path: item.Path,
@ -249,38 +240,28 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
});
return list;
} catch (error) {
if (searchIdRef.current !== currentSearchId) {
return {
notSameRequest: true
};
}
setQuery((prev) => {
return {
...prev,
page: queryParams.page,
total: 0
page: queryParams.page
};
});
return [];
throw error;
}
};
const getEvaluateResults = async (repoList: any[]) => {
try {
checkTokenRef.current?.cancel?.();
checkTokenRef.current = createAxiosToken();
const evaluations = await evaluationsModelSpec(
{
model_specs: repoList
},
{
token: checkTokenRef.current?.token
}
);
return evaluations.results;
} catch (error) {
return [];
}
checkTokenRef.current?.cancel?.();
checkTokenRef.current = createAxiosToken();
const evaluations = await evaluationsModelSpec(
{
model_specs: repoList
},
{
token: checkTokenRef.current?.token
}
);
return evaluations.results;
};
const handleEvaluate = async (list: any[]) => {
@ -288,6 +269,7 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
return;
}
const currentRequestId = updateRequestId();
const currentSearchId = getRequestId();
try {
const repoList = list.map((item) => {
const res = handleRecognizeAudioModel(item, modelSource);
@ -320,7 +302,11 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
setIsEvaluating(true);
const evaluations = await getEvaluateResults(repoList);
if (requestIdRef.current !== currentRequestId) {
// bind the requestId to the current request and searchId
if (
requestIdRef.current !== currentRequestId &&
currentSearchId !== getRequestId()
) {
return;
}
const resultList = list.map((item, index) => {
@ -349,6 +335,7 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
onSelectModelAfterEvaluate(currentItem);
}
} catch (error) {
// cancel the corrponding request
if (requestIdRef.current === currentRequestId) {
setIsEvaluating(false);
}
@ -369,7 +356,8 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
if (!SUPPORTEDSOURCE.includes(modelSource)) {
return;
}
axiosTokenRef.current?.abort?.('new request');
const currentSearchId = updateSearchRepoRequestId();
axiosTokenRef.current?.abort?.('cancel previous request');
axiosTokenRef.current = new AbortController();
checkTokenRef.current?.cancel?.();
if (timer.current) {
@ -386,9 +374,7 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
let list: any[] = [];
if (modelSource === modelSourceMap.huggingface_value) {
const resultList = await getModelsFromHuggingface(sort);
if (resultList?.notSameRequest) {
return;
}
cacheRepoOptions.current = resultList;
// hf has no page and perPage, so we need to slice the resultList
@ -402,9 +388,7 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
});
} else if (modelSource === modelSourceMap.modelscope_value) {
list = await getModelsFromModelscope(params);
if (list?.notSameRequest) {
return;
}
console.log('list:', list);
cacheRepoOptions.current = list;
}
@ -423,12 +407,12 @@ const SearchModel: React.FC<SearchInputProps> = (props) => {
console.log('error:', error);
setDataSource({
repoOptions: [],
loading: false,
loading: currentSearchId !== searchRepoRequestIdRef.current,
sortType: sort,
networkError: error?.message === 'Failed to fetch'
});
setLoadingModel?.(false);
setLoadingModel?.(currentSearchId !== searchRepoRequestIdRef.current);
displayEvaluateStatus?.({
show: false,
message: ''

@ -502,7 +502,10 @@ export const useCheckCompatibility = () => {
};
};
const handleDoEvalute = async (formData: FormData) => {
const handleDoEvalute = async (
formData: FormData,
evaluateProccess?: 'model' | 'file' | 'form'
) => {
const currentRequestId = updateRequestId();
const evalutionData = await handleEvaluate(formData);

Loading…
Cancel
Save