Skip to content

Commit

Permalink
Fixed duplicated notifications for automatic annotation (#7595)
Browse files Browse the repository at this point in the history
  • Loading branch information
klakhov authored Mar 27, 2024
1 parent aec333d commit f5dae55
Show file tree
Hide file tree
Showing 6 changed files with 56 additions and 18 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
### Fixed

- Duplicated notifications for automatic annotation
(<https://github.com/opencv/cvat/pull/7595>)
2 changes: 1 addition & 1 deletion cvat-core/src/lambda-manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ class LambdaManager {

async listen(
requestID: string,
functionID: string,
functionID: string | number,
callback: (status: RQStatus, progress: number, message?: string) => void,
): Promise<void> {
const model = this.cachedList.find((_model) => _model.id === functionID);
Expand Down
2 changes: 1 addition & 1 deletion cvat-ui/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "cvat-ui",
"version": "1.63.3",
"version": "1.63.4",
"description": "CVAT single-page application",
"main": "src/index.tsx",
"scripts": {
Expand Down
30 changes: 22 additions & 8 deletions cvat-ui/src/actions/models-actions.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// Copyright (C) 2020-2022 Intel Corporation
// Copyright (C) 2022-2023 CVAT.ai Corporation
// Copyright (C) 2022-2024 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT

Expand All @@ -20,6 +20,7 @@ export enum ModelsActionTypes {
DELETE_MODEL = 'DELETE_MODEL',
DELETE_MODEL_SUCCESS = 'DELETE_MODEL_SUCCESS',
DELETE_MODEL_FAILED = 'DELETE_MODEL_FAILED',
GET_INFERENCES_SUCCESS = 'GET_INFERENCES_SUCCESS',
START_INFERENCE_FAILED = 'START_INFERENCE_FAILED',
GET_INFERENCE_STATUS_SUCCESS = 'GET_INFERENCE_STATUS_SUCCESS',
GET_INFERENCE_STATUS_FAILED = 'GET_INFERENCE_STATUS_FAILED',
Expand All @@ -45,6 +46,9 @@ export const modelsActions = {
error,
}),
fetchMetaFailed: (error: any) => createAction(ModelsActionTypes.FETCH_META_FAILED, { error }),
getInferencesSuccess: (requestedInferenceIDs: Record<string, boolean>) => (
createAction(ModelsActionTypes.GET_INFERENCES_SUCCESS, { requestedInferenceIDs })
),
getInferenceStatusSuccess: (taskID: number, activeInference: ActiveInference) => (
createAction(ModelsActionTypes.GET_INFERENCE_STATUS_SUCCESS, {
taskID,
Expand All @@ -64,9 +68,10 @@ export const modelsActions = {
error,
})
),
cancelInferenceSuccess: (taskID: number) => (
cancelInferenceSuccess: (taskID: number, activeInference: ActiveInference) => (
createAction(ModelsActionTypes.CANCEL_INFERENCE_SUCCESS, {
taskID,
activeInference,
})
),
cancelInferenceFailed: (taskID: number, error: any) => (
Expand Down Expand Up @@ -119,8 +124,9 @@ interface InferenceMeta {

function listen(inferenceMeta: InferenceMeta, dispatch: (action: ModelsActions) => void): void {
const { taskID, requestID, functionID } = inferenceMeta;

core.lambda
.listen(requestID, functionID, (status: RQStatus, progress: number, message: string) => {
.listen(requestID, functionID, (status: RQStatus, progress: number, message?: string) => {
if (status === RQStatus.FAILED || status === RQStatus.UNKNOWN) {
dispatch(
modelsActions.getInferenceStatusFailed(
Expand All @@ -129,7 +135,7 @@ function listen(inferenceMeta: InferenceMeta, dispatch: (action: ModelsActions)
status,
progress,
functionID,
error: message,
error: message as string,
id: requestID,
},
new Error(`Inference status for the task ${taskID} is ${status}. ${message}`),
Expand All @@ -144,7 +150,7 @@ function listen(inferenceMeta: InferenceMeta, dispatch: (action: ModelsActions)
status,
progress,
functionID,
error: message,
error: message as string,
id: requestID,
}),
);
Expand All @@ -163,22 +169,29 @@ function listen(inferenceMeta: InferenceMeta, dispatch: (action: ModelsActions)
}

export function getInferenceStatusAsync(): ThunkAction {
return async (dispatch): Promise<void> => {
return async (dispatch, getState): Promise<void> => {
const dispatchCallback = (action: ModelsActions): void => {
dispatch(action);
};

const { requestedInferenceIDs } = getState().models;

try {
const requests = await core.lambda.requests();
const newListenedIDs: Record<string, boolean> = {};
requests
.map((request: any): object => ({
taskID: +request.function.task,
requestID: request.id,
functionID: request.function.id,
}))
.forEach((inferenceMeta: InferenceMeta): void => {
listen(inferenceMeta, dispatchCallback);
if (!(inferenceMeta.requestID in requestedInferenceIDs)) {
listen(inferenceMeta, dispatchCallback);
newListenedIDs[inferenceMeta.requestID] = true;
}
});
dispatch(modelsActions.getInferencesSuccess(newListenedIDs));
} catch (error) {
dispatch(modelsActions.fetchMetaFailed(error));
}
Expand All @@ -201,6 +214,7 @@ export function startInferenceAsync(taskId: number, model: MLModel, body: object
},
dispatchCallback,
);
dispatch(modelsActions.getInferencesSuccess({ [requestID]: true }));
} catch (error) {
dispatch(modelsActions.startInferenceFailed(taskId, error));
}
Expand All @@ -212,7 +226,7 @@ export function cancelInferenceAsync(taskID: number): ThunkAction {
try {
const inference = getState().models.inferences[taskID];
await core.lambda.cancel(inference.id, inference.functionID);
dispatch(modelsActions.cancelInferenceSuccess(taskID));
dispatch(modelsActions.cancelInferenceSuccess(taskID, inference));
} catch (error) {
dispatch(modelsActions.cancelInferenceFailed(taskID, error));
}
Expand Down
3 changes: 3 additions & 0 deletions cvat-ui/src/reducers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,9 @@ export interface ModelsState {
reid: MLModel[];
classifiers: MLModel[];
totalCount: number;
requestedInferenceIDs: {
[index: string]: boolean;
};
inferences: {
[index: number]: ActiveInference;
};
Expand Down
33 changes: 25 additions & 8 deletions cvat-ui/src/reducers/models-reducer.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
// Copyright (C) 2020-2022 Intel Corporation
// Copyright (C) 2022-2023 CVAT.ai Corporation
// Copyright (C) 2022-2024 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT

import { omit } from 'lodash';
import { BoundariesActions, BoundariesActionTypes } from 'actions/boundaries-actions';
import { ModelsActionTypes, ModelsActions } from 'actions/models-actions';
import { AuthActionTypes, AuthActions } from 'actions/auth-actions';
Expand All @@ -20,6 +21,7 @@ const defaultState: ModelsState = {
classifiers: [],
modelRunnerIsVisible: false,
modelRunnerTask: null,
requestedInferenceIDs: {},
inferences: {},
totalCount: 0,
query: {
Expand Down Expand Up @@ -88,15 +90,28 @@ export default function (state = defaultState, action: ModelsActions | AuthActio
modelRunnerTask: null,
};
}
case ModelsActionTypes.GET_INFERENCES_SUCCESS: {
const { requestedInferenceIDs } = state;

return {
...state,
requestedInferenceIDs: {
...requestedInferenceIDs,
...action.payload.requestedInferenceIDs,
},
};
}
case ModelsActionTypes.GET_INFERENCE_STATUS_SUCCESS: {
const { inferences } = state;
const { inferences, requestedInferenceIDs } = state;

if (action.payload.activeInference.status === 'finished') {
const { taskID, activeInference } = action.payload;
const { id: inferenceID } = activeInference;

return {
...state,
inferences: Object.fromEntries(
Object.entries(inferences).filter(([key]): boolean => +key !== action.payload.taskID),
),
inferences: omit(inferences, taskID),
requestedInferenceIDs: omit(requestedInferenceIDs, inferenceID),
};
}

Expand All @@ -123,12 +138,14 @@ export default function (state = defaultState, action: ModelsActions | AuthActio
};
}
case ModelsActionTypes.CANCEL_INFERENCE_SUCCESS: {
const { inferences } = state;
delete inferences[action.payload.taskID];
const { inferences, requestedInferenceIDs } = state;
const { taskID, activeInference } = action.payload;
const { id: inferenceID } = activeInference;

return {
...state,
inferences: { ...inferences },
inferences: omit(inferences, taskID),
requestedInferenceIDs: omit(requestedInferenceIDs, inferenceID),
};
}
case ModelsActionTypes.GET_MODEL_PREVIEW: {
Expand Down

0 comments on commit f5dae55

Please sign in to comment.