diff --git a/changelog.d/20240515_100610_kirill.9992_data_processing.md b/changelog.d/20240515_100610_kirill.9992_data_processing.md new file mode 100644 index 00000000000..b8962fa6451 --- /dev/null +++ b/changelog.d/20240515_100610_kirill.9992_data_processing.md @@ -0,0 +1,6 @@ +### Added + +- Set of features to track background activities: importing/exporting datasets, annotations or backups, creating tasks. +Now you may find these processes on Requests page, it allows a user to understand current status of these activities +and enhances user experience, not losing progress when the browser tab is closed +() diff --git a/cvat-core/package.json b/cvat-core/package.json index 1de340e39f0..030b2cb0e49 100644 --- a/cvat-core/package.json +++ b/cvat-core/package.json @@ -1,6 +1,6 @@ { "name": "cvat-core", - "version": "15.0.7", + "version": "15.1.0", "type": "module", "description": "Part of Computer Vision Tool which presents an interface for client-side integration", "main": "src/api.ts", diff --git a/cvat-core/src/annotations.ts b/cvat-core/src/annotations.ts index 3c63f800426..f9b51139d0e 100644 --- a/cvat-core/src/annotations.ts +++ b/cvat-core/src/annotations.ts @@ -1,5 +1,5 @@ // Copyright (C) 2019-2022 Intel Corporation -// Copyright (C) 2022-2023 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT @@ -168,9 +168,9 @@ export function importDataset( file: File | string, options: { convMaskToPoly?: boolean, - updateStatusCallback?: (s: string, n: number) => void, + updateStatusCallback?: (message: string, progress: number) => void, } = {}, -): Promise { +): Promise { const updateStatusCallback = options.updateStatusCallback || (() => {}); const convMaskToPoly = 'convMaskToPoly' in options ? options.convMaskToPoly : true; const adjustedOptions = { diff --git a/cvat-core/src/api-implementation.ts b/cvat-core/src/api-implementation.ts index 0bafcd2f32d..1e7bfb5164c 100644 --- a/cvat-core/src/api-implementation.ts +++ b/cvat-core/src/api-implementation.ts @@ -9,6 +9,7 @@ import config from './config'; import PluginRegistry from './plugins'; import serverProxy from './server-proxy'; import lambdaManager from './lambda-manager'; +import requestsManager from './requests-manager'; import { isBoolean, isInteger, @@ -61,6 +62,10 @@ export default function implementAPI(cvat: CVATCore): CVATCore { implementationMixin(cvat.lambda.listen, lambdaManager.listen.bind(lambdaManager)); implementationMixin(cvat.lambda.requests, lambdaManager.requests.bind(lambdaManager)); + implementationMixin(cvat.requests.list, requestsManager.list.bind(requestsManager)); + implementationMixin(cvat.requests.listen, requestsManager.listen.bind(requestsManager)); + implementationMixin(cvat.requests.cancel, requestsManager.cancel.bind(requestsManager)); + implementationMixin(cvat.server.about, async () => { const result = await serverProxy.server.about(); return result; diff --git a/cvat-core/src/api.ts b/cvat-core/src/api.ts index d47cf8f4f6e..00a65ac3a84 100644 --- a/cvat-core/src/api.ts +++ b/cvat-core/src/api.ts @@ -22,6 +22,7 @@ import Organization from './organization'; import Webhook from './webhook'; import AnnotationGuide from './guide'; import BaseSingleFrameAction from './annotations-actions'; +import { Request } from './request'; import * as enums from './enums'; @@ -286,6 +287,12 @@ function build(): CVATCore { set globalObjectsCounter(value: number) { config.globalObjectsCounter = value; }, + get requestsStatusDelay() { + return config.requestsStatusDelay; + }, + set requestsStatusDelay(value) { + config.requestsStatusDelay = value; + }, }, client: { version: `${pjson.version}`, @@ -374,6 +381,26 @@ function build(): CVATCore { }, }, }, + requests: { + async list() { + const result = await PluginRegistry.apiWrapper(cvat.requests.list); + return result; + }, + async cancel(rqID: string) { + const result = await PluginRegistry.apiWrapper(cvat.requests.cancel, rqID); + return result; + }, + async listen( + rqID: string, + options: { + callback: (request: Request) => void, + initialRequest?: Request, + }, + ) { + const result = await PluginRegistry.apiWrapper(cvat.requests.listen, rqID, options); + return result; + }, + }, classes: { User, Project: implementProject(Project), diff --git a/cvat-core/src/config.ts b/cvat-core/src/config.ts index 31357b1b7d4..99d76a72365 100644 --- a/cvat-core/src/config.ts +++ b/cvat-core/src/config.ts @@ -17,6 +17,8 @@ const config = { }, onOrganizationChange: null, globalObjectsCounter: 0, + + requestsStatusDelay: null, }; export default config; diff --git a/cvat-core/src/exceptions.ts b/cvat-core/src/exceptions.ts index a40987bac06..dd3728c2506 100644 --- a/cvat-core/src/exceptions.ts +++ b/cvat-core/src/exceptions.ts @@ -1,5 +1,5 @@ // Copyright (C) 2019-2022 Intel Corporation -// Copyright (C) 2022 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT @@ -99,6 +99,8 @@ export class DataError extends Exception {} export class ScriptingError extends Exception {} +export class RequestError extends Exception {} + export class ServerError extends Exception { public code: number; constructor(message, code) { diff --git a/cvat-core/src/index.ts b/cvat-core/src/index.ts index e83da172525..efd069e919d 100644 --- a/cvat-core/src/index.ts +++ b/cvat-core/src/index.ts @@ -32,6 +32,7 @@ import QualityConflict from './quality-conflict'; import QualitySettings from './quality-settings'; import AnalyticsReport from './analytics-report'; import AnnotationGuide from './guide'; +import { Request } from './request'; import BaseSingleFrameAction, { listActions, registerAction, runActions } from './annotations-actions'; import { ArgumentError, DataError, Exception, ScriptingError, ServerError, @@ -150,6 +151,17 @@ export default interface CVATCore { frames: { getMeta: any; }; + requests: { + list: () => Promise>; + listen: ( + rqID: string, + options: { + callback: (request: Request) => void, + initialRequest?: Request, + } + ) => Promise; + cancel: (rqID: string) => Promise; + }; actions: { list: typeof listActions; register: typeof registerAction; @@ -166,6 +178,7 @@ export default interface CVATCore { }; onOrganizationChange: (newOrgId: number | null) => void | null; globalObjectsCounter: typeof config.globalObjectsCounter; + requestsStatusDelay: typeof config.requestsStatusDelay; }, client: { version: string; diff --git a/cvat-core/src/project-implementation.ts b/cvat-core/src/project-implementation.ts index 028f535cab6..5291dc77431 100644 --- a/cvat-core/src/project-implementation.ts +++ b/cvat-core/src/project-implementation.ts @@ -107,7 +107,7 @@ export default function implementProject(Project: typeof ProjectClass): typeof P }); Object.defineProperty(Project.prototype.annotations.exportDataset, 'implementation', { - value: function exportDatasetImplementation( + value: async function exportDatasetImplementation( this: ProjectClass, format: Parameters[0], saveImages: Parameters[1], @@ -115,12 +115,13 @@ export default function implementProject(Project: typeof ProjectClass): typeof P targetStorage: Parameters[3], customName: Parameters[4], ): ReturnType { - return exportDataset(this, format, saveImages, useDefaultSettings, targetStorage, customName); + const rqID = await exportDataset(this, format, saveImages, useDefaultSettings, targetStorage, customName); + return rqID; }, }); Object.defineProperty(Project.prototype.annotations.importDataset, 'implementation', { - value: function importDatasetImplementation( + value: async function importDatasetImplementation( this: ProjectClass, format: Parameters[0], useDefaultSettings: Parameters[1], @@ -128,18 +129,20 @@ export default function implementProject(Project: typeof ProjectClass): typeof P file: Parameters[3], options: Parameters[4], ): ReturnType { - return importDataset(this, format, useDefaultSettings, sourceStorage, file, options); + const rqID = await importDataset(this, format, useDefaultSettings, sourceStorage, file, options); + return rqID; }, }); Object.defineProperty(Project.prototype.backup, 'implementation', { - value: function backupImplementation( + value: async function backupImplementation( this: ProjectClass, targetStorage: Parameters[0], useDefaultSettings: Parameters[1], fileName: Parameters[2], ): ReturnType { - return serverProxy.projects.backup(this.id, targetStorage, useDefaultSettings, fileName); + const rqID = await serverProxy.projects.backup(this.id, targetStorage, useDefaultSettings, fileName); + return rqID; }, }); @@ -149,9 +152,8 @@ export default function implementProject(Project: typeof ProjectClass): typeof P storage: Parameters[0], file: Parameters[1], ): ReturnType { - const serializedProject = await serverProxy.projects.restore(storage, file); - const labels = await serverProxy.labels.get({ project_id: serializedProject.id }); - return new Project({ ...serializedProject, labels: labels.results }); + const rqID = await serverProxy.projects.restore(storage, file); + return rqID; }, }); diff --git a/cvat-core/src/project.ts b/cvat-core/src/project.ts index ab033f6388b..831fc2b4f5b 100644 --- a/cvat-core/src/project.ts +++ b/cvat-core/src/project.ts @@ -47,7 +47,7 @@ export default class Project { convMaskToPoly?: boolean, updateStatusCallback?: (s: string, n: number) => void, }, - ) => Promise; + ) => Promise; }; constructor(initialData: Readonly) { @@ -246,7 +246,7 @@ export default class Project { return result; } - static async restore(storage: Storage, file: File | string): Promise { + static async restore(storage: Storage, file: File | string): Promise { const result = await PluginRegistry.apiWrapper.call(this, Project.restore, storage, file); return result; } diff --git a/cvat-core/src/request.ts b/cvat-core/src/request.ts new file mode 100644 index 00000000000..2c04402c8df --- /dev/null +++ b/cvat-core/src/request.ts @@ -0,0 +1,105 @@ +// Copyright (C) 2024 CVAT.ai Corporation +// +// SPDX-License-Identifier: MIT + +import { RQStatus } from './enums'; +import User from './user'; +import { SerializedRequest } from './server-response-types'; + +type Operation = { + target: string; + type: string; + format: string; + jobID: number | null; + taskID: number | null; + projectID: number | null; +}; + +export class Request { + #id: string; + #status: RQStatus; + #operation: Partial; + #message: string; + #progress: number; + #resultUrl: string; + #resultID: number; + #createdDate: string; + #startedDate: string; + #finishedDate: string; + #expiryDate: string; + #owner: User; + + constructor(initialData: SerializedRequest) { + this.#id = initialData.id; + this.#status = initialData.status as RQStatus; + this.#operation = initialData.operation; + this.#progress = initialData.progress; + this.#message = initialData.message; + this.#resultUrl = initialData.result_url; + this.#resultID = initialData.result_id; + + this.#createdDate = initialData.created_date; + this.#startedDate = initialData.started_date; + this.#finishedDate = initialData.finished_date; + this.#expiryDate = initialData.expiry_date; + + if (initialData.owner) { + this.#owner = new User(initialData.owner); + } + } + + get id(): string { + return this.#id; + } + + get status(): RQStatus { + return this.#status.toLowerCase() as RQStatus; + } + + get progress(): number { + return this.#progress; + } + + get message(): string { + return this.#message; + } + + get operation(): Operation { + return { + target: this.#operation.target, + type: this.#operation.type, + format: this.#operation.format, + jobID: this.#operation.job_id, + taskID: this.#operation.task_id, + projectID: this.#operation.project_id, + }; + } + + get url(): string { + return this.#resultUrl; + } + + get resultID(): number { + return this.#resultID; + } + + get createdDate(): string { + return this.#createdDate; + } + + get startedDate(): string { + return this.#startedDate; + } + + get finishedDate(): string { + return this.#finishedDate; + } + + get expiryDate(): string { + return this.#expiryDate; + } + + get owner(): User { + return this.#owner; + } +} diff --git a/cvat-core/src/requests-manager.ts b/cvat-core/src/requests-manager.ts new file mode 100644 index 00000000000..429c42dba2f --- /dev/null +++ b/cvat-core/src/requests-manager.ts @@ -0,0 +1,216 @@ +// Copyright (C) 2024 CVAT.ai Corporation +// +// SPDX-License-Identifier: MIT + +import serverProxy from './server-proxy'; +import { RQStatus } from './enums'; +import { Request } from './request'; +import { RequestError } from './exceptions'; +import { PaginatedResource } from './core-types'; +import config from './config'; + +const REQUESTS_COUNT = 5; +const PROGRESS_EPS = 25; +const REQUEST_STATUS_DELAYS = { + [RQStatus.STARTED]: [3000, 7000, 13000], + [RQStatus.QUEUED]: [7000, 13000, 19000, 29000, + 41000, 53000, 67000, 79000, + 101000, 113000, 139000, 163000], +}; + +function getRequestStatusDelays(): typeof REQUEST_STATUS_DELAYS { + if (config.requestsStatusDelay) { + return { + [RQStatus.STARTED]: [config.requestsStatusDelay], + [RQStatus.QUEUED]: [config.requestsStatusDelay], + }; + } + return REQUEST_STATUS_DELAYS; +} + +class RequestsManager { + private listening: Record void)[]; + requestDelayIdx: number | null, + request: Request | null, + timeout: number | null; + promise?: Promise; + }>; + + private requestStack: number[]; + constructor() { + this.listening = {}; + this.requestStack = []; + } + + async list(): Promise> { + const result = await serverProxy.requests.list(); + const requests = result.map((serializedRequest) => new Request({ + ...serializedRequest, + })) as PaginatedResource; + requests.count = requests.length; + return requests; + } + + async listen( + requestID: string, + options: { + callback: (request: Request) => void, + initialRequest?: Request, + }, + ): Promise { + if (!requestID) { + throw new Error('Request id is not provided'); + } + const callback = options?.callback; + const initialRequest = options?.initialRequest; + + if (requestID in this.listening) { + if (callback) { + this.listening[requestID].onUpdate.push(callback); + } + return this.listening[requestID].promise; + } + const promise = new Promise((resolve, reject) => { + const timeoutCallback = async (): Promise => { + // We make sure that no more than REQUESTS_COUNT requests are sent simultaneously + // If thats the case, we re-schedule the timeout + const timestamp = Date.now(); + if (this.requestStack.length >= REQUESTS_COUNT) { + const timestampToCheck = this.requestStack[this.requestStack.length - 1]; + const delay = this.delayFor(requestID); + if (timestamp - timestampToCheck < delay) { + this.listening[requestID].timeout = window.setTimeout(timeoutCallback, delay); + return; + } + } + if (this.requestStack.length >= REQUESTS_COUNT) { + this.requestStack.pop(); + } + this.requestStack.unshift(timestamp); + + try { + const serializedRequest = await serverProxy.requests.status(requestID); + if (requestID in this.listening) { + const request = new Request({ ...serializedRequest }); + const { status } = request; + + const { onUpdate } = this.listening[requestID]; + if ([RQStatus.QUEUED, RQStatus.STARTED].includes(status)) { + onUpdate.forEach((update) => update(request)); + this.listening[requestID].requestDelayIdx = this.updateRequestDelayIdx( + requestID, + request, + ); + this.listening[requestID].request = request; + this.listening[requestID].timeout = window + .setTimeout(timeoutCallback, this.delayFor(requestID)); + } else { + delete this.listening[requestID]; + if (status === RQStatus.FINISHED) { + onUpdate + .forEach((update) => update(request)); + resolve(request); + } else { + onUpdate + .forEach((update) => ( + update(request) + )); + reject(new RequestError(request.message)); + } + } + } + } catch (error) { + if (requestID in this.listening) { + const { onUpdate } = this.listening[requestID]; + + onUpdate + .forEach((update) => update(new Request({ + id: requestID, + status: RQStatus.FAILED, + message: `Could not get a status of the request ${requestID}. ${error.toString()}`, + }))); + reject(error); + } + } + }; + + if (initialRequest?.status === RQStatus.FAILED) { + reject(new RequestError(initialRequest?.message)); + } else { + this.listening[requestID] = { + onUpdate: callback ? [callback] : [], + timeout: window.setTimeout(timeoutCallback), + request: initialRequest, + requestDelayIdx: 0, + }; + } + }); + + this.listening[requestID] = { + ...this.listening[requestID], + promise, + }; + return promise; + } + + async cancel(rqID: string): Promise { + await serverProxy.requests.cancel(rqID).then(() => { + if (rqID in this.listening) { + clearTimeout(this.listening[rqID].timeout); + delete this.listening[rqID]; + } + }); + } + + private delayFor(rqID: string): number { + const state = this.listening[rqID]; + const { request, requestDelayIdx } = state; + + // request was not checked yet, call it immediately + if (!request) { + return 0; + } + + const addRndComponent = (val: number): number => ( + val + Math.floor(Math.random() * Math.floor(val / 2)) // NOSONAR + ); + + switch (request.status) { + case RQStatus.STARTED: { + return addRndComponent(getRequestStatusDelays()[RQStatus.STARTED][requestDelayIdx]); + } + case RQStatus.QUEUED: { + return addRndComponent(getRequestStatusDelays()[RQStatus.QUEUED][requestDelayIdx]); + } + default: + return 0; + } + } + + private updateRequestDelayIdx(rqID: string, updatedRequest: Request): number { + const state = this.listening[rqID]; + const { requestDelayIdx, request } = state; + + let progress = 0; + if (request) { + progress = request?.progress; + } + + switch (updatedRequest.status) { + case RQStatus.QUEUED: { + return Math.min(requestDelayIdx + 1, getRequestStatusDelays()[RQStatus.QUEUED].length - 1); + } + case RQStatus.STARTED: { + if (Math.round(Math.abs(updatedRequest.progress - progress) * 100) < PROGRESS_EPS) { + return Math.min(requestDelayIdx + 1, getRequestStatusDelays()[RQStatus.STARTED].length - 1); + } + return requestDelayIdx; + } + default: + return requestDelayIdx; + } + } +} + +export default new RequestsManager(); diff --git a/cvat-core/src/server-proxy.ts b/cvat-core/src/server-proxy.ts index c59354b4d24..caa945d8448 100644 --- a/cvat-core/src/server-proxy.ts +++ b/cvat-core/src/server-proxy.ts @@ -18,8 +18,10 @@ import { SerializedInvitationData, SerializedCloudStorage, SerializedFramesMetaData, SerializedCollection, SerializedQualitySettingsData, APIQualitySettingsFilter, SerializedQualityConflictData, APIQualityConflictsFilter, SerializedQualityReportData, APIQualityReportsFilter, SerializedAnalyticsReport, APIAnalyticsReportFilter, + SerializedRequest, } from './server-response-types'; import { PaginatedResource } from './core-types'; +import { Request } from './request'; import { Storage } from './storage'; import { SerializedEvent } from './event'; import { RQStatus, StorageLocation, WebhookSourceType } from './enums'; @@ -29,12 +31,12 @@ import { ServerError } from './exceptions'; type Params = { org: number | string, - use_default_location?: boolean, location?: StorageLocation, cloud_storage_id?: number, format?: string, filename?: string, action?: string, + save_images?: boolean, }; tus.defaultOptions.storeFingerprintForResuming = false; @@ -45,7 +47,6 @@ function enableOrganization(): { org: string } { function configureStorage(storage: Storage, useDefaultLocation = false): Partial { return { - use_default_location: useDefaultLocation, ...(!useDefaultLocation ? { location: storage.location, ...(storage.cloudStorageId ? { @@ -839,31 +840,24 @@ function exportDataset(instanceType: 'projects' | 'jobs' | 'tasks') { name?: string, ) { const { backendAPI } = config; - const baseURL = `${backendAPI}/${instanceType}/${id}/${saveImages ? 'dataset' : 'annotations'}`; + const baseURL = `${backendAPI}/${instanceType}/${id}/dataset/export`; const params: Params = { ...enableOrganization(), ...configureStorage(targetStorage, useDefaultSettings), ...(name ? { filename: name } : {}), format, + save_images: saveImages, }; - return new Promise((resolve, reject) => { async function request() { - Axios.get(baseURL, { + Axios.post(baseURL, {}, { params, }) .then((response) => { - const isCloudStorage = targetStorage.location === StorageLocation.CLOUD_STORAGE; - const { status } = response; - - if (status === 202) { - setTimeout(request, 3000); - } else if (status === 201) { - params.action = 'download'; - resolve(`${baseURL}?${new URLSearchParams(params).toString()}`); - } else if (isCloudStorage && status === 200) { - resolve(); + if (response.status === 202) { + resolve(response.data.rq_id); } + resolve(); }) .catch((errorData) => { reject(generateError(errorData)); @@ -883,9 +877,9 @@ async function importDataset( file: File | string, options: { convMaskToPoly: boolean, - updateStatusCallback: (s: string, n: number) => void, + updateStatusCallback: (message: string, progress: number) => void, }, -): Promise { +): Promise { const { backendAPI, origin } = config; const params: Params & { conv_mask_to_poly: boolean } = { ...enableOrganization(), @@ -896,45 +890,16 @@ async function importDataset( }; const url = `${backendAPI}/projects/${id}/dataset`; - let rqId: string; - - async function wait() { - return new Promise((resolve, reject) => { - async function requestStatus() { - try { - const response = await Axios.get(url, { - params: { ...params, action: 'import_status', rq_id: rqId }, - }); - if (response.status === 202) { - if (response.data.message) { - options.updateStatusCallback(response.data.message, response.data.progress || 0); - } - setTimeout(requestStatus, 3000); - } else if (response.status === 201) { - resolve(); - } else { - reject(generateError(response)); - } - } catch (error) { - reject(generateError(error)); - } - } - setTimeout(requestStatus, 2000); - }); - } const isCloudStorage = sourceStorage.location === StorageLocation.CLOUD_STORAGE; - if (isCloudStorage) { - try { + try { + if (isCloudStorage) { const response = await Axios.post(url, new FormData(), { params, }); - rqId = response.data.rq_id; - } catch (errorData) { - throw generateError(errorData); + return response.data.rq_id; } - } else { const uploadConfig = { chunkSize: config.uploadChunkSize * 1024 * 1024, endpoint: `${origin}${backendAPI}/projects/${id}/dataset/`, @@ -944,57 +909,47 @@ async function importDataset( options.updateStatusCallback('The dataset is being uploaded to the server', percentage); }, }; - - try { - await Axios.post(url, - new FormData(), { - params, - headers: { 'Upload-Start': true }, - }); - await chunkUpload(file as File, uploadConfig); - const response = await Axios.post(url, - new FormData(), { - params, - headers: { 'Upload-Finish': true }, - }); - rqId = response.data.rq_id; - } catch (errorData) { - throw generateError(errorData); - } - } - try { - return await wait(); + await Axios.post(url, + new FormData(), { + params, + headers: { 'Upload-Start': true }, + }); + await chunkUpload(file as File, uploadConfig); + const response = await Axios.post(url, + new FormData(), { + params, + headers: { 'Upload-Finish': true }, + }); + return response.data.rq_id; } catch (errorData) { throw generateError(errorData); } } -async function backupTask(id: number, targetStorage: Storage, useDefaultSettings: boolean, fileName?: string) { +async function backupTask( + id: number, + targetStorage: Storage, + useDefaultSettings: boolean, + fileName?: string, +): Promise { const { backendAPI } = config; const params: Params = { ...enableOrganization(), ...configureStorage(targetStorage, useDefaultSettings), ...(fileName ? { filename: fileName } : {}), }; - const url = `${backendAPI}/tasks/${id}/backup`; + const url = `${backendAPI}/tasks/${id}/backup/export`; - return new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { async function request() { try { - const response = await Axios.get(url, { + const response = await Axios.post(url, {}, { params, }); - const isCloudStorage = targetStorage.location === StorageLocation.CLOUD_STORAGE; - const { status } = response; - - if (status === 202) { - setTimeout(request, 3000); - } else if (status === 201) { - params.action = 'download'; - resolve(`${url}?${new URLSearchParams(params).toString()}`); - } else if (isCloudStorage && status === 200) { - resolve(); + if (response.status === 202) { + resolve(response.data.rq_id); } + resolve(); } catch (errorData) { reject(generateError(errorData)); } @@ -1004,7 +959,7 @@ async function backupTask(id: number, targetStorage: Storage, useDefaultSettings }); } -async function restoreTask(storage: Storage, file: File | string): Promise { +async function restoreTask(storage: Storage, file: File | string): Promise { const { backendAPI } = config; // keep current default params to 'freeze" them during this request const params: Params = { @@ -1013,40 +968,18 @@ async function restoreTask(storage: Storage, file: File | string): Promise { - async function checkStatus() { - try { - taskData.set('rq_id', response.data.rq_id); - response = await Axios.post(url, taskData, { - params, - }); - if (response.status === 202) { - setTimeout(checkStatus, 3000); - } else { - // to be able to get the task after it was created, pass frozen params - const importedTask = await getTasks({ id: response.data.id, ...params }); - resolve(importedTask[0]); - } - } catch (errorData) { - reject(generateError(errorData)); - } - } - setTimeout(checkStatus); - }); - } const isCloudStorage = storage.location === StorageLocation.CLOUD_STORAGE; + let response; - if (isCloudStorage) { - params.filename = file as string; - response = await Axios.post(url, - new FormData(), { - params, - }); - } else { + try { + if (isCloudStorage) { + params.filename = file as string; + response = await Axios.post(url, + new FormData(), { + params, + }); + return response.data.rq_id; + } const uploadConfig = { chunkSize: config.uploadChunkSize * 1024 * 1024, endpoint: `${origin}${backendAPI}/tasks/backup/`, @@ -1064,8 +997,10 @@ async function restoreTask(storage: Storage, file: File | string): Promise { const { backendAPI } = config; // keep current default params to 'freeze" them during this request const params: Params = { @@ -1082,25 +1017,18 @@ async function backupProject( ...(fileName ? { filename: fileName } : {}), }; - const url = `${backendAPI}/projects/${id}/backup`; + const url = `${backendAPI}/projects/${id}/backup/export`; - return new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { async function request() { try { - const response = await Axios.get(url, { + const response = await Axios.post(url, {}, { params, }); - const isCloudStorage = targetStorage.location === StorageLocation.CLOUD_STORAGE; - const { status } = response; - - if (status === 202) { - setTimeout(request, 3000); - } else if (status === 201) { - params.action = 'download'; - resolve(`${url}?${new URLSearchParams(params).toString()}`); - } else if (isCloudStorage && status === 200) { - resolve(); + if (response.status === 202) { + resolve(response.data.rq_id); } + resolve(); } catch (errorData) { reject(generateError(errorData)); } @@ -1110,7 +1038,7 @@ async function backupProject( }); } -async function restoreProject(storage: Storage, file: File | string): Promise { +async function restoreProject(storage: Storage, file: File | string): Promise { const { backendAPI } = config; // keep current default params to 'freeze" them during this request const params: Params = { @@ -1119,42 +1047,18 @@ async function restoreProject(storage: Storage, file: File | string): Promise { - async function request() { - try { - projectData.set('rq_id', response.data.rq_id); - response = await Axios.post(`${backendAPI}/projects/backup`, projectData, { - params, - }); - if (response.status === 202) { - setTimeout(request, 3000); - } else { - // to be able to get the task after it was created, pass frozen params - const restoredProject = await getProjects({ id: response.data.id, ...params }); - resolve(restoredProject[0]); - } - } catch (errorData) { - reject(generateError(errorData)); - } - } - - setTimeout(request); - }); - } - const isCloudStorage = storage.location === StorageLocation.CLOUD_STORAGE; + let response; - if (isCloudStorage) { - params.filename = file; - response = await Axios.post(url, - new FormData(), { - params, - }); - } else { + try { + if (isCloudStorage) { + params.filename = file; + response = await Axios.post(url, + new FormData(), { + params, + }); + return response.data.rq_id; + } const uploadConfig = { chunkSize: config.uploadChunkSize * 1024 * 1024, endpoint: `${origin}${backendAPI}/projects/backup/`, @@ -1172,8 +1076,10 @@ async function restoreProject(storage: Storage, file: File | string): Promise = Record = Record void)[]; }>; -const listenToCreateTaskCallbacks: LongProcessListener = {}; - -function listenToCreateTask( - id, onUpdate: (state: RQStatus, progress: number, message: string) => void, -): Promise { - if (id in listenToCreateTaskCallbacks) { - listenToCreateTaskCallbacks[id].onUpdate.push(onUpdate); - // to avoid extra status check requests we do not create any more promises - return listenToCreateTaskCallbacks[id].promise; - } - - const promise = new Promise((resolve, reject) => { - const { backendAPI } = config; - const params = enableOrganization(); - async function checkStatus(): Promise { - try { - const response = await Axios.get(`${backendAPI}/tasks/${id}/status`, { params }); - const state = response.data.state?.toLowerCase(); - if ([RQStatus.QUEUED, RQStatus.STARTED].includes(state)) { - // notify all the subscribtions when data status changed - listenToCreateTaskCallbacks[id].onUpdate.forEach((callback) => { - callback( - state, - response.data.progress || 0, - state === RQStatus.QUEUED ? - 'CVAT queued the task to import' : response.data.message, - ); - }); - - setTimeout(checkStatus, state === RQStatus.QUEUED ? 20000 : 5000); - } else if (state === RQStatus.FINISHED) { - const [createdTask] = await getTasks({ id, ...params }); - resolve(createdTask); - } else if (state === RQStatus.FAILED) { - const failMessage = 'Images processing failed'; - listenToCreateTaskCallbacks[id].onUpdate.forEach((callback) => { - callback(state, 0, failMessage); - }); - - reject(new ServerError(filterPythonTraceback(response.data.message), 400)); - } else { - const failMessage = 'Unknown status received'; - listenToCreateTaskCallbacks[id].onUpdate.forEach((callback) => { - callback(state || RQStatus.UNKNOWN, 0, failMessage); - }); - reject( - new ServerError( - `Could not create task. ${failMessage}: ${state}`, - 500, - ), - ); - } - } catch (errorData) { - listenToCreateTaskCallbacks[id].onUpdate.forEach((callback) => { - callback('failed', 0, 'Server request failed'); - }); - reject(generateError(errorData)); - } - } - - setTimeout(checkStatus, 100); - }); - - listenToCreateTaskCallbacks[id] = { - promise, - onUpdate: [onUpdate], - }; - promise.catch(() => { - // do nothing, avoid uncaught promise exceptions - }).finally(() => delete listenToCreateTaskCallbacks[id]); - return promise; -} - async function createTask( taskSpec: Partial, taskDataSpec: any, - onUpdate: (state: RQStatus, progress: number, message: string) => void, -): Promise { + onUpdate: (request: Request) => void, +): Promise<{ taskID: number, rqID: string }> { const { backendAPI, origin } = config; // keep current default params to 'freeze" them during this request const params = enableOrganization(); @@ -1292,7 +1125,12 @@ async function createTask( let response = null; - onUpdate(RQStatus.UNKNOWN, 0, 'CVAT is creating your task'); + onUpdate(new Request({ + status: RQStatus.UNKNOWN, + progress: 0, + message: 'CVAT is creating your task', + })); + try { response = await Axios.post(`${backendAPI}/tasks`, taskSpec, { params, @@ -1301,7 +1139,11 @@ async function createTask( throw generateError(errorData); } - onUpdate(RQStatus.UNKNOWN, 0, 'CVAT is uploading task data to the server'); + onUpdate(new Request({ + status: RQStatus.UNKNOWN, + progress: 0, + message: 'CVAT is uploading task data to the server', + })); async function bulkUpload(taskId, files) { const fileBulks = files.reduce((fileGroups, file) => { @@ -1321,7 +1163,11 @@ async function createTask( taskData.append(`client_files[${idx}]`, element); } const percentage = totalSentSize / totalSize; - onUpdate(RQStatus.UNKNOWN, percentage, 'CVAT is uploading task data to the server'); + onUpdate(new Request({ + status: RQStatus.UNKNOWN, + progress: percentage, + message: 'CVAT is uploading task data to the server', + })); await Axios.post(`${backendAPI}/tasks/${taskId}/data`, taskData, { ...params, headers: { 'Upload-Multiple': true }, @@ -1334,6 +1180,7 @@ async function createTask( } } + let rqID = null; try { await Axios.post(`${backendAPI}/tasks/${response.data.id}/data`, taskData, { @@ -1343,7 +1190,11 @@ async function createTask( const uploadConfig = { endpoint: `${origin}${backendAPI}/tasks/${response.data.id}/data/`, onUpdate: (percentage) => { - onUpdate(RQStatus.UNKNOWN, percentage, 'CVAT is uploading task data to the server'); + onUpdate(new Request({ + status: RQStatus.UNKNOWN, + progress: percentage, + message: 'CVAT is uploading task data to the server', + })); }, chunkSize, totalSize, @@ -1356,11 +1207,12 @@ async function createTask( if (bulkFiles.length > 0) { await bulkUpload(response.data.id, bulkFiles); } - await Axios.post(`${backendAPI}/tasks/${response.data.id}/data`, + const dataResponse = await Axios.post(`${backendAPI}/tasks/${response.data.id}/data`, taskData, { ...params, headers: { 'Upload-Finish': true }, }); + rqID = dataResponse.data.rq_id; } catch (errorData) { try { await deleteTask(response.data.id, params.org || null); @@ -1370,13 +1222,7 @@ async function createTask( throw generateError(errorData); } - try { - const createdTask = await listenToCreateTask(response.data.id, onUpdate); - return createdTask; - } catch (createException) { - await deleteTask(response.data.id, params.org || null); - throw createException; - } + return { taskID: response.data.id, rqID }; } async function getJobs( @@ -1736,7 +1582,7 @@ async function uploadAnnotations( sourceStorage: Storage, file: File | string, options: { convMaskToPoly: boolean }, -): Promise { +): Promise { const { backendAPI, origin } = config; const params: Params & { conv_mask_to_poly: boolean } = { ...enableOrganization(), @@ -1745,70 +1591,35 @@ async function uploadAnnotations( filename: typeof file === 'string' ? file : file.name, conv_mask_to_poly: options.convMaskToPoly, }; - let rqId: string; const url = `${backendAPI}/${session}s/${id}/annotations`; - async function wait() { - return new Promise((resolve, reject) => { - async function requestStatus() { - try { - const response = await Axios.put( - url, - new FormData(), - { - params: { ...params, rq_id: rqId }, - }, - ); - if (response.status === 202) { - setTimeout(requestStatus, 3000); - } else { - resolve(); - } - } catch (errorData) { - reject(generateError(errorData)); - } - } - setTimeout(requestStatus); - }); - } const isCloudStorage = sourceStorage.location === StorageLocation.CLOUD_STORAGE; - if (isCloudStorage) { - try { + try { + if (isCloudStorage) { const response = await Axios.post(url, new FormData(), { params, }); - rqId = response.data.rq_id; - } catch (errorData) { - throw generateError(errorData); + return response.data.rq_id; } - } else { const chunkSize = config.uploadChunkSize * 1024 * 1024; const uploadConfig = { chunkSize, endpoint: `${origin}${backendAPI}/${session}s/${id}/annotations/`, }; - - try { - await Axios.post(url, - new FormData(), { - params, - headers: { 'Upload-Start': true }, - }); - await chunkUpload(file as File, uploadConfig); - const response = await Axios.post(url, - new FormData(), { - params, - headers: { 'Upload-Finish': true }, - }); - rqId = response.data.rq_id; - } catch (errorData) { - throw generateError(errorData); - } - } - try { - return await wait(); + await Axios.post(url, + new FormData(), { + params, + headers: { 'Upload-Start': true }, + }); + await chunkUpload(file as File, uploadConfig); + const response = await Axios.post(url, + new FormData(), { + params, + headers: { 'Upload-Finish': true }, + }); + return response.data.rq_id; } catch (errorData) { throw generateError(errorData); } @@ -1877,7 +1688,7 @@ async function getLambdaRequests() { } } -async function getRequestStatus(requestID) { +async function getLambdaRequestStatus(requestID) { const { backendAPI } = config; try { @@ -2424,6 +2235,41 @@ async function getAnalyticsReports( } } +async function getRequestsList(): Promise> { + const { backendAPI } = config; + const params = enableOrganization(); + + try { + const response = await fetchAll(`${backendAPI}/requests`, params); + + return response.results; + } catch (errorData) { + throw generateError(errorData); + } +} + +async function getRequestStatus(rqID: string): Promise { + const { backendAPI } = config; + + try { + const response = await Axios.get(`${backendAPI}/requests/${rqID}`); + + return response.data; + } catch (errorData) { + throw generateError(errorData); + } +} + +async function cancelRequest(requestID): Promise { + const { backendAPI } = config; + + try { + await Axios.post(`${backendAPI}/requests/${requestID}/cancel`); + } catch (errorData) { + throw generateError(errorData); + } +} + const listenToCreateAnalyticsReportCallbacks: { job: LongProcessListener; task: LongProcessListener; @@ -2542,7 +2388,6 @@ export default Object.freeze({ get: getTasks, save: saveTask, create: createTask, - listenToCreate: listenToCreateTask, delete: deleteTask, exportDataset: exportDataset('tasks'), getPreview: getPreview('tasks'), @@ -2590,7 +2435,7 @@ export default Object.freeze({ lambda: Object.freeze({ list: getLambdaFunctions, - status: getRequestStatus, + status: getLambdaRequestStatus, requests: getLambdaRequests, run: runLambdaRequest, call: callLambdaFunction, @@ -2666,4 +2511,10 @@ export default Object.freeze({ }), }), }), + + requests: Object.freeze({ + list: getRequestsList, + status: getRequestStatus, + cancel: cancelRequest, + }), }); diff --git a/cvat-core/src/server-response-types.ts b/cvat-core/src/server-response-types.ts index 0bcce7cfb67..07a869b43a0 100644 --- a/cvat-core/src/server-response-types.ts +++ b/cvat-core/src/server-response-types.ts @@ -495,3 +495,25 @@ export interface SerializedAPISchema { url: string; }; } + +export interface SerializedRequest { + id?: string; + status: string; + operation?: { + target: string; + type: string; + format: string; + job_id: number | null; + task_id: number | null; + project_id: number | null; + }; + progress?: number; + message: string; + result_url?: string; + result_id?: number; + created_date?: string; + started_date?: string; + finished_date?: string; + expiry_date?: string; + owner?: any; +} diff --git a/cvat-core/src/session-implementation.ts b/cvat-core/src/session-implementation.ts index 823261a4830..4483c111393 100644 --- a/cvat-core/src/session-implementation.ts +++ b/cvat-core/src/session-implementation.ts @@ -4,9 +4,11 @@ // SPDX-License-Identifier: MIT import { omit } from 'lodash'; +import config from './config'; import { ArgumentError } from './exceptions'; import { HistoryActions, JobStage, JobState, JobType, + RQStatus, } from './enums'; import { Task as TaskClass, Job as JobClass } from './session'; import logger from './logger'; @@ -31,6 +33,8 @@ import { importDataset, exportDataset, clearCache, getHistory, } from './annotations'; import AnnotationGuide from './guide'; +import requestsManager from './requests-manager'; +import { Request } from './request'; import User from './user'; // must be called with task/job context @@ -499,7 +503,8 @@ export function implementJob(Job: typeof JobClass): typeof JobClass { file: Parameters[3], options: Parameters[4], ): ReturnType { - return importDataset(this, format, useDefaultLocation, sourceStorage, file, options); + const rqID = await importDataset(this, format, useDefaultLocation, sourceStorage, file, options); + return rqID; }, }); @@ -512,7 +517,8 @@ export function implementJob(Job: typeof JobClass): typeof JobClass { targetStorage: Parameters[3], customName?: Parameters[4], ): ReturnType { - return exportDataset(this, format, saveImages, useDefaultSettings, targetStorage, customName); + const rqID = await exportDataset(this, format, saveImages, useDefaultSettings, targetStorage, customName); + return rqID; }, }); @@ -612,7 +618,7 @@ export function implementTask(Task: typeof TaskClass): typeof TaskClass { Object.defineProperty(Task.prototype.save, 'implementation', { value: async function saveImplementation( this: TaskClass, - onUpdate: Parameters[0], + options: Parameters[0], ): ReturnType { if (typeof this.id !== 'undefined') { // If the task has been already created, we update it @@ -705,7 +711,22 @@ export function implementTask(Task: typeof TaskClass): typeof TaskClass { ...(typeof this.cloudStorageId !== 'undefined' ? { cloud_storage_id: this.cloudStorageId } : {}), }; - const task = await serverProxy.tasks.create(taskSpec, taskDataSpec, onUpdate); + const { taskID, rqID } = await serverProxy.tasks.create( + taskSpec, + taskDataSpec, + options?.requestStatusCallback || (() => {}), + ); + + await requestsManager.listen(rqID, { + callback: (request: Request) => { + options?.requestStatusCallback(request); + if (request.status === RQStatus.FAILED) { + serverProxy.tasks.delete(taskID, config.organization.organizationSlug || null); + } + }, + }); + + const [task] = await serverProxy.tasks.get({ id: taskID }); const labels = await serverProxy.labels.get({ task_id: task.id }); const jobs = await serverProxy.jobs.get({ filter: JSON.stringify({ and: [{ '==': [{ var: 'task_id' }, task.id] }] }), @@ -723,10 +744,12 @@ export function implementTask(Task: typeof TaskClass): typeof TaskClass { Object.defineProperty(Task.prototype.listenToCreate, 'implementation', { value: async function listenToCreateImplementation( this: TaskClass, - onUpdate: Parameters[0], + rqID: Parameters[0], + options: Parameters[1], ): ReturnType { if (Number.isInteger(this.id) && this.size === 0) { - const serializedTask = await serverProxy.tasks.listenToCreate(this.id, onUpdate); + const request = await requestsManager.listen(rqID, options); + const [serializedTask] = await serverProxy.tasks.get({ id: request.operation.taskID }); return new Task(omit(serializedTask, ['labels', 'jobs'])); } @@ -752,13 +775,14 @@ export function implementTask(Task: typeof TaskClass): typeof TaskClass { }); Object.defineProperty(Task.prototype.backup, 'implementation', { - value: function backupImplementation( + value: async function backupImplementation( this: TaskClass, targetStorage: Parameters[0], useDefaultSettings: Parameters[1], fileName: Parameters[2], ): ReturnType { - return serverProxy.tasks.backup(this.id, targetStorage, useDefaultSettings, fileName); + const rqID = await serverProxy.tasks.backup(this.id, targetStorage, useDefaultSettings, fileName); + return rqID; }, }); @@ -768,16 +792,8 @@ export function implementTask(Task: typeof TaskClass): typeof TaskClass { storage: Parameters[0], file: Parameters[1], ): ReturnType { - const serializedTask = await serverProxy.tasks.restore(storage, file); - // When request task by ID we also need to add labels and jobs to work with them - const labels = await serverProxy.labels.get({ task_id: serializedTask.id }); - const jobs = await serverProxy.jobs.get({ task_id: serializedTask.id }, true); - return new Task({ - ...omit(serializedTask, ['jobs', 'labels']), - progress: serializedTask.jobs, - jobs, - labels: labels.results, - }); + const rqID = await serverProxy.tasks.restore(storage, file); + return rqID; }, }); @@ -797,7 +813,6 @@ export function implementTask(Task: typeof TaskClass): typeof TaskClass { } const job = this.jobs.filter((_job) => _job.startFrame <= frame && _job.stopFrame >= frame)[0]; - const result = await getFrame( job.id, this.dataChunkSize, @@ -1114,7 +1129,7 @@ export function implementTask(Task: typeof TaskClass): typeof TaskClass { }); Object.defineProperty(Task.prototype.annotations.upload, 'implementation', { - value: function uploadAnnotationsImplementation( + value: async function uploadAnnotationsImplementation( this: TaskClass, format: Parameters[0], useDefaultLocation: Parameters[1], @@ -1122,7 +1137,8 @@ export function implementTask(Task: typeof TaskClass): typeof TaskClass { file: Parameters[3], options: Parameters[4], ): ReturnType { - return importDataset(this, format, useDefaultLocation, sourceStorage, file, options); + const rqID = await importDataset(this, format, useDefaultLocation, sourceStorage, file, options); + return rqID; }, }); @@ -1145,7 +1161,7 @@ export function implementTask(Task: typeof TaskClass): typeof TaskClass { }); Object.defineProperty(Task.prototype.annotations.exportDataset, 'implementation', { - value: function exportDatasetImplementation( + value: async function exportDatasetImplementation( this: TaskClass, format: Parameters[0], saveImages: Parameters[1], @@ -1153,7 +1169,8 @@ export function implementTask(Task: typeof TaskClass): typeof TaskClass { targetStorage: Parameters[3], customName: Parameters[4], ): ReturnType { - return exportDataset(this, format, saveImages, useDefaultSettings, targetStorage, customName); + const rqID = await exportDataset(this, format, saveImages, useDefaultSettings, targetStorage, customName); + return rqID; }, }); diff --git a/cvat-core/src/session.ts b/cvat-core/src/session.ts index f4720535120..6209c361912 100644 --- a/cvat-core/src/session.ts +++ b/cvat-core/src/session.ts @@ -8,7 +8,7 @@ import _ from 'lodash'; import { ChunkQuality } from 'cvat-data'; import { ChunkType, DimensionType, HistoryActions, JobStage, - JobState, JobType, RQStatus, StorageLocation, TaskMode, TaskStatus, + JobState, JobType, StorageLocation, TaskMode, TaskStatus, } from './enums'; import { Storage } from './storage'; @@ -24,6 +24,7 @@ import { import AnnotationGuide from './guide'; import { FrameData } from './frames'; import Statistics from './statistics'; +import { Request } from './request'; import logger from './logger'; import Issue from './issue'; import ObjectState from './object-state'; @@ -346,7 +347,7 @@ export class Session { convMaskToPoly?: boolean, updateStatusCallback?: (s: string, n: number) => void, }, - ) => Promise; + ) => Promise; select: (objectStates: ObjectState[], x: number, y: number) => Promise<{ state: ObjectState, distance: number | null, @@ -1117,15 +1118,16 @@ export class Task extends Session { return result; } - async save(onUpdate: (state: RQStatus, progress: number, message: string) => void = () => {}): Promise { - const result = await PluginRegistry.apiWrapper.call(this, Task.prototype.save, onUpdate); + async save(options?: { requestStatusCallback?: (request: Request) => void }): Promise { + const result = await PluginRegistry.apiWrapper.call(this, Task.prototype.save, options); return result; } async listenToCreate( - onUpdate: (state: RQStatus, progress: number, message: string) => void = () => {}, + rqID, + options, ): Promise { - const result = await PluginRegistry.apiWrapper.call(this, Task.prototype.listenToCreate, onUpdate); + const result = await PluginRegistry.apiWrapper.call(this, Task.prototype.listenToCreate, rqID, options); return result; } @@ -1150,7 +1152,7 @@ export class Task extends Session { return result; } - static async restore(storage: Storage, file: File | string): Promise { + static async restore(storage: Storage, file: File | string): Promise { const result = await PluginRegistry.apiWrapper.call(this, Task.restore, storage, file); return result; } diff --git a/cvat-core/tests/mocks/dummy-data.mock.cjs b/cvat-core/tests/mocks/dummy-data.mock.cjs index f7a0d309408..3653d62a6cd 100644 --- a/cvat-core/tests/mocks/dummy-data.mock.cjs +++ b/cvat-core/tests/mocks/dummy-data.mock.cjs @@ -3388,6 +3388,38 @@ const webhooksEventsDummyData = { }, } +const requestsDummyData = { + count: 1, + next: null, + previous: null, + results: [ + { + status: "finished", + message: "", + id: "create:task-1", + operation: { + type: "create:task", + target: "task", + project_id: null, + task_id: 1, + job_id: null, + format: null, + name: "create:task-1" + }, + percent: 0, + enqueue_date: "2024-04-26T09:22:43.824811Z", + start_date: "2024-04-26T09:22:43.827597Z", + finished_date: "2024-04-26T09:22:43.884098Z", + expire_date: "2024-04-26T09:31:03.884098", + owner: { + id: 1, + username: "admin", + }, + result_url: null, + } + ] +} + module.exports = { tasksDummyData, tasksDummyLabelsData, @@ -3404,4 +3436,5 @@ module.exports = { webhooksDummyData, webhooksEventsDummyData, jobsDummyData, + requestsDummyData, }; diff --git a/cvat-core/tests/mocks/server-proxy.mock.cjs b/cvat-core/tests/mocks/server-proxy.mock.cjs index af8b00ea3a4..d1cbfa9fe1a 100644 --- a/cvat-core/tests/mocks/server-proxy.mock.cjs +++ b/cvat-core/tests/mocks/server-proxy.mock.cjs @@ -1,5 +1,5 @@ // Copyright (C) 2020-2022 Intel Corporation -// Copyright (C) 2022-2023 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT @@ -19,6 +19,7 @@ const { webhooksDummyData, webhooksEventsDummyData, jobsDummyData, + requestsDummyData, } = require('./dummy-data.mock.cjs'); function QueryStringToJSON(query, ignoreList = []) { @@ -228,8 +229,7 @@ class ServerProxy { labels: JSON.parse(JSON.stringify(taskData.labels)), }); - const createdTask = await getTasks(`?id=${id}`); - return createdTask[0]; + return { taskID: id, rqID: `create:task-${id}` }; } async function deleteTask(id) { @@ -591,6 +591,11 @@ class ServerProxy { return; } + async function getImportRequestStatus() { + const requests = requestsDummyData.results; + return requests[0]; + } + Object.defineProperties( this, Object.freeze({ @@ -698,6 +703,13 @@ class ServerProxy { }), writable: false, }, + + requests: { + value: Object.freeze({ + status: getImportRequestStatus, + }), + writable: false, + }, }), ); } diff --git a/cvat-sdk/cvat_sdk/core/client.py b/cvat-sdk/cvat_sdk/core/client.py index dc713771070..add7ccb5f3d 100644 --- a/cvat-sdk/cvat_sdk/core/client.py +++ b/cvat-sdk/cvat_sdk/core/client.py @@ -21,7 +21,6 @@ from cvat_sdk.api_client import ApiClient, Configuration, exceptions, models from cvat_sdk.core.exceptions import IncompatibleVersionException, InvalidHostException -from cvat_sdk.core.helpers import expect_status from cvat_sdk.core.proxies.issues import CommentsRepo, IssuesRepo from cvat_sdk.core.proxies.jobs import JobsRepo from cvat_sdk.core.proxies.model_proxy import Repo @@ -209,37 +208,26 @@ def logout(self) -> None: def wait_for_completion( self: Client, - url: str, + rq_id: str, *, - success_status: int, status_check_period: Optional[int] = None, - query_params: Optional[Dict[str, Any]] = None, - post_params: Optional[Dict[str, Any]] = None, - method: str = "POST", - positive_statuses: Optional[Sequence[int]] = None, - ) -> urllib3.HTTPResponse: + ) -> Tuple[models.Request, urllib3.HTTPResponse]: if status_check_period is None: status_check_period = self.config.status_check_period - positive_statuses = set(positive_statuses) | {success_status} - while True: sleep(status_check_period) - response = self.api_client.rest_client.request( - method=method, - url=url, - headers=self.api_client.get_common_headers(), - query_params=query_params, - post_params=post_params, - ) + request, response = self.api_client.requests_api.retrieve(rq_id) - self.logger.debug("STATUS %s", response.status) - expect_status(positive_statuses, response) - if response.status == success_status: + if request.status.value == models.RequestStatus.allowed_values[("value",)]["FINISHED"]: break + elif request.status.value == models.RequestStatus.allowed_values[("value",)]["FAILED"]: + raise exceptions.ApiException( + status=request.status, reason=request.message, http_resp=response + ) - return response + return request, response def check_server_version(self, fail_if_unsupported: Optional[bool] = None) -> None: if fail_if_unsupported is None: diff --git a/cvat-sdk/cvat_sdk/core/downloading.py b/cvat-sdk/cvat_sdk/core/downloading.py index fdde8430438..c37765f8114 100644 --- a/cvat-sdk/cvat_sdk/core/downloading.py +++ b/cvat-sdk/cvat_sdk/core/downloading.py @@ -5,11 +5,13 @@ from __future__ import annotations +import json from contextlib import closing from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, Optional from cvat_sdk.api_client.api_client import Endpoint +from cvat_sdk.core.helpers import expect_status from cvat_sdk.core.progress import NullProgressReporter, ProgressReporter from cvat_sdk.core.utils import atomic_writer @@ -86,18 +88,23 @@ def prepare_and_download_file_from_endpoint( url = client.api_map.make_endpoint_url( endpoint.path, kwsub=url_params, query_params=query_params ) - client.wait_for_completion( - url, + + # initialize background process + response = client.api_client.rest_client.request( method="GET", - positive_statuses=[202], - success_status=201, - status_check_period=status_check_period, + url=url, + headers=client.api_client.get_common_headers(), ) - query_params = dict(query_params or {}) - query_params["action"] = "download" - url = client.api_map.make_endpoint_url( - endpoint.path, kwsub=url_params, query_params=query_params + client.logger.debug("STATUS %s", response.status) + expect_status(202, response) + rq_id = json.loads(response.data).get("rq_id") + assert rq_id, "Request identifier was not found in server response" + + # wait until background process will be finished or failed + request, response = client.wait_for_completion( + rq_id, status_check_period=status_check_period ) + downloader = Downloader(client) - downloader.download_file(url, output_path=filename, pbar=pbar) + downloader.download_file(request.result_url, output_path=filename, pbar=pbar) diff --git a/cvat-sdk/cvat_sdk/core/proxies/projects.py b/cvat-sdk/cvat_sdk/core/proxies/projects.py index 939e555fd62..a60483e7d40 100644 --- a/cvat-sdk/cvat_sdk/core/proxies/projects.py +++ b/cvat-sdk/cvat_sdk/core/proxies/projects.py @@ -56,7 +56,6 @@ def import_dataset( DatasetUploader(self._client).upload_file_and_wait( self.api.create_dataset_endpoint, - self.api.retrieve_dataset_endpoint, filename, format_name, url_params={"id": self.id}, @@ -210,16 +209,13 @@ def create_from_backup( logger=self._client.logger.debug, ) - rq_id = json.loads(response.data)["rq_id"] - response = self._client.wait_for_completion( - url, - success_status=201, - positive_statuses=[202], - post_params={"rq_id": rq_id}, - status_check_period=status_check_period, + rq_id = json.loads(response.data).get("rq_id") + assert rq_id, "The rq_id was not found in server response" + request, response = self._client.wait_for_completion( + rq_id, status_check_period=status_check_period ) - project_id = json.loads(response.data)["id"] + project_id = request.result_id self._client.logger.info( f"Project has been imported successfully. Project ID: {project_id}" ) diff --git a/cvat-sdk/cvat_sdk/core/proxies/tasks.py b/cvat-sdk/cvat_sdk/core/proxies/tasks.py index 509928f4dd5..2a2a33f6cff 100644 --- a/cvat-sdk/cvat_sdk/core/proxies/tasks.py +++ b/cvat-sdk/cvat_sdk/core/proxies/tasks.py @@ -112,18 +112,22 @@ def upload_data( elif resource_type is ResourceType.SHARE: data["server_files"] = resources - self.api.create_data( + result, _ = self.api.create_data( self.id, data_request=models.DataRequest(**data), ) + rq_id = result.rq_id elif resource_type == ResourceType.LOCAL: url = self._client.api_map.make_endpoint_url( self.api.create_data_endpoint.path, kwsub={"id": self.id} ) - DataUploader(self._client).upload_files( + response = DataUploader(self._client).upload_files( url, list(map(Path, resources)), pbar=pbar, **data ) + response = json.loads(response.data) + rq_id = response.get("rq_id") + assert rq_id, "The rq_id param was not found in the response" if wait_for_completion: if status_check_period is None: @@ -132,27 +136,21 @@ def upload_data( self._client.logger.info("Awaiting for task %s creation...", self.id) while True: sleep(status_check_period) - (status, response) = self.api.retrieve_status(self.id) + request_details, response = self._client.api_client.requests_api.retrieve(rq_id) + status, message = request_details.status, request_details.message self._client.logger.info( "Task %s creation status: %s (message=%s)", self.id, - status.state.value, - status.message, + status, + message, ) - if ( - status.state.value - == models.RqStatusStateEnum.allowed_values[("value",)]["FINISHED"] - ): + if status.value == models.RequestStatus.allowed_values[("value",)]["FINISHED"]: break - elif ( - status.state.value - == models.RqStatusStateEnum.allowed_values[("value",)]["FAILED"] - ): - raise exceptions.ApiException( - status=status.state.value, reason=status.message, http_resp=response - ) + + elif status.value == models.RequestStatus.allowed_values[("value",)]["FAILED"]: + raise exceptions.ApiException(status=status, reason=message, http_resp=response) self.fetch() @@ -431,16 +429,14 @@ def create_from_backup( logger=self._client.logger.debug, ) - rq_id = json.loads(response.data)["rq_id"] - response = self._client.wait_for_completion( - url, - success_status=201, - positive_statuses=[202], - post_params={"rq_id": rq_id}, - status_check_period=status_check_period, + rq_id = json.loads(response.data).get("rq_id") + assert rq_id, "The rq_id was not found in server response" + + request, response = self._client.wait_for_completion( + rq_id, status_check_period=status_check_period ) - task_id = json.loads(response.data)["id"] + task_id = request.result_id self._client.logger.info(f"Task has been imported successfully. Task ID: {task_id}") return self.retrieve(task_id) diff --git a/cvat-sdk/cvat_sdk/core/uploading.py b/cvat-sdk/cvat_sdk/core/uploading.py index f13f862e91c..0ccfd902da6 100644 --- a/cvat-sdk/cvat_sdk/core/uploading.py +++ b/cvat-sdk/cvat_sdk/core/uploading.py @@ -7,7 +7,7 @@ import json import os from pathlib import Path -from typing import TYPE_CHECKING, Any, ContextManager, Dict, List, Optional, Sequence, Tuple +from typing import TYPE_CHECKING, Any, ContextManager, Dict, List, Optional, Tuple import requests import urllib3 @@ -199,27 +199,6 @@ def _uploading_task(pbar: ProgressReporter, total_size: int) -> ContextManager[N total=total_size, desc="Uploading data", unit_scale=True, unit="B", unit_divisor=1024 ) - def _wait_for_completion( - self, - url: str, - *, - success_status: int, - status_check_period: Optional[int] = None, - query_params: Optional[Dict[str, Any]] = None, - post_params: Optional[Dict[str, Any]] = None, - method: str = "POST", - positive_statuses: Optional[Sequence[int]] = None, - ) -> urllib3.HTTPResponse: - return self._client.wait_for_completion( - url, - success_status=success_status, - status_check_period=status_check_period, - query_params=query_params, - post_params=post_params, - method=method, - positive_statuses=positive_statuses, - ) - @staticmethod def _make_tus_uploader(api_client: ApiClient, url: str, **kwargs): # Add headers required by CVAT server @@ -289,23 +268,14 @@ def upload_file_and_wait( rq_id = json.loads(response.data).get("rq_id") assert rq_id, "The rq_id was not found in the response" - params["rq_id"] = rq_id - self._wait_for_completion( - url, - success_status=201, - positive_statuses=[202], - status_check_period=status_check_period, - query_params=params, - method="PUT", - ) + self._client.wait_for_completion(rq_id, status_check_period=status_check_period) class DatasetUploader(Uploader): def upload_file_and_wait( self, upload_endpoint: Endpoint, - retrieve_endpoint: Endpoint, filename: Path, format_name: str, *, @@ -321,19 +291,7 @@ def upload_file_and_wait( rq_id = json.loads(response.data).get("rq_id") assert rq_id, "The rq_id was not found in the response" - url = self._client.api_map.make_endpoint_url(retrieve_endpoint.path, kwsub=url_params) - params = { - "action": "import_status", - "rq_id": rq_id, - } - self._wait_for_completion( - url, - success_status=201, - positive_statuses=[202], - status_check_period=status_check_period, - query_params=params, - method="GET", - ) + self._client.wait_for_completion(rq_id, status_check_period=status_check_period) class DataUploader(Uploader): @@ -390,7 +348,7 @@ def upload_files( logger=self._client.logger.debug, ) - self._tus_finish_upload(url, fields=kwargs) + return self._tus_finish_upload(url, fields=kwargs) def _split_files_by_requests( self, filenames: List[Path] diff --git a/cvat-ui/package.json b/cvat-ui/package.json index 1a12cb29142..3d3c97be546 100644 --- a/cvat-ui/package.json +++ b/cvat-ui/package.json @@ -1,6 +1,6 @@ { "name": "cvat-ui", - "version": "1.63.12", + "version": "1.64.0", "description": "CVAT single-page application", "main": "src/index.tsx", "scripts": { diff --git a/cvat-ui/src/actions/export-actions.ts b/cvat-ui/src/actions/export-actions.ts index fe38dc73913..353cc4ae78e 100644 --- a/cvat-ui/src/actions/export-actions.ts +++ b/cvat-ui/src/actions/export-actions.ts @@ -1,13 +1,15 @@ // Copyright (C) 2021-2022 Intel Corporation -// Copyright (C) 2022 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT import { ActionUnion, createAction, ThunkAction } from 'utils/redux'; -import { getCore, Storage } from 'cvat-core-wrapper'; - -const core = getCore(); +import { Storage, ProjectOrTaskOrJob, Job } from 'cvat-core-wrapper'; +import { + getInstanceType, RequestInstanceType, listen, RequestsActions, + shouldListenForProgress, +} from './requests-actions'; export enum ExportActionTypes { OPEN_EXPORT_DATASET_MODAL = 'OPEN_EXPORT_DATASET_MODAL', @@ -29,29 +31,33 @@ export const exportActions = { closeExportDatasetModal: (instance: any) => ( createAction(ExportActionTypes.CLOSE_EXPORT_DATASET_MODAL, { instance }) ), - exportDataset: (instance: any, format: string) => ( - createAction(ExportActionTypes.EXPORT_DATASET, { instance, format }) - ), exportDatasetSuccess: ( - instance: any, + instance: ProjectOrTaskOrJob | RequestInstanceType, instanceType: 'project' | 'task' | 'job', format: string, - isLocal: boolean, - resource: 'Dataset' | 'Annotations', + resource: 'dataset' | 'annotations', + target?: 'local' | 'cloudstorage', ) => ( createAction(ExportActionTypes.EXPORT_DATASET_SUCCESS, { instance, instanceType, format, - isLocal, + target, resource, }) ), - exportDatasetFailed: (instance: any, instanceType: 'project' | 'task' | 'job', format: string, error: any) => ( + exportDatasetFailed: ( + instance: ProjectOrTaskOrJob | RequestInstanceType, + instanceType: 'project' | 'task' | 'job', + format: string, + resource: 'dataset' | 'annotations', + error: any, + ) => ( createAction(ExportActionTypes.EXPORT_DATASET_FAILED, { instance, instanceType, format, + resource, error, }) ), @@ -61,68 +67,107 @@ export const exportActions = { closeExportBackupModal: (instance: any) => ( createAction(ExportActionTypes.CLOSE_EXPORT_BACKUP_MODAL, { instance }) ), - exportBackup: (instance: any) => ( - createAction(ExportActionTypes.EXPORT_BACKUP, { instance }) - ), - exportBackupSuccess: (instance: any, instanceType: 'task' | 'project', isLocal: boolean) => ( - createAction(ExportActionTypes.EXPORT_BACKUP_SUCCESS, { instance, instanceType, isLocal }) + exportBackupSuccess: (instance: Exclude | RequestInstanceType, instanceType: 'task' | 'project', target?: 'local' | 'cloudstorage') => ( + createAction(ExportActionTypes.EXPORT_BACKUP_SUCCESS, { instance, instanceType, target }) ), - exportBackupFailed: (instance: any, instanceType: 'task' | 'project', error: any) => ( + exportBackupFailed: (instance: Exclude | RequestInstanceType, instanceType: 'task' | 'project', error: any) => ( createAction(ExportActionTypes.EXPORT_BACKUP_FAILED, { instance, instanceType, error }) ), }; +export async function listenExportDatasetAsync( + rqID: string, + dispatch: (action: ExportActions | RequestsActions) => void, + params: { + instance: ProjectOrTaskOrJob | RequestInstanceType, + format: string, + saveImages: boolean, + }, +): Promise { + const { instance, format, saveImages } = params; + const resource = saveImages ? 'dataset' : 'annotations'; + + const instanceType = getInstanceType(instance); + try { + const result = await listen(rqID, dispatch); + const target = !result?.url ? 'cloudstorage' : 'local'; + dispatch(exportActions.exportDatasetSuccess( + instance, instanceType, format, resource, target, + )); + } catch (error) { + dispatch(exportActions.exportDatasetFailed(instance, instanceType, format, resource, error)); + } +} + export const exportDatasetAsync = ( - instance: any, + instance: ProjectOrTaskOrJob, format: string, saveImages: boolean, useDefaultSettings: boolean, targetStorage: Storage, name?: string, -): ThunkAction => async (dispatch) => { - dispatch(exportActions.exportDataset(instance, format)); +): ThunkAction => async (dispatch, getState) => { + const state = getState(); - let instanceType: 'project' | 'task' | 'job'; - if (instance instanceof core.classes.Project) { - instanceType = 'project'; - } else if (instance instanceof core.classes.Task) { - instanceType = 'task'; - } else { - instanceType = 'job'; - } + const resource = saveImages ? 'dataset' : 'annotations'; + const instanceType = getInstanceType(instance); try { - const result = await instance.annotations + const rqID = await instance.annotations .exportDataset(format, saveImages, useDefaultSettings, targetStorage, name); - if (result) { - const downloadAnchor = window.document.getElementById('downloadAnchor') as HTMLAnchorElement; - downloadAnchor.href = result; - downloadAnchor.click(); + if (shouldListenForProgress(rqID, state.requests)) { + await listenExportDatasetAsync(rqID, dispatch, { + instance, format, saveImages, + }); + } + if (!rqID) { + dispatch(exportActions.exportDatasetSuccess( + instance, instanceType, format, resource, + )); } - const resource = saveImages ? 'Dataset' : 'Annotations'; - dispatch(exportActions.exportDatasetSuccess(instance, instanceType, format, !!result, resource)); } catch (error) { - dispatch(exportActions.exportDatasetFailed(instance, instanceType, format, error)); + dispatch(exportActions.exportDatasetFailed(instance, instanceType, format, resource, error)); } }; +export async function listenExportBackupAsync( + rqID: string, + dispatch: (action: ExportActions | RequestsActions) => void, + params: { + instance: Exclude | RequestInstanceType, + }, +): Promise { + const { instance } = params; + const instanceType = getInstanceType(instance) as 'project' | 'task'; + + try { + const result = await listen(rqID, dispatch); + const target = !result?.url ? 'cloudstorage' : 'local'; + dispatch(exportActions.exportBackupSuccess(instance, instanceType, target)); + } catch (error) { + dispatch(exportActions.exportBackupFailed(instance, instanceType, error as Error)); + } +} + export const exportBackupAsync = ( - instance: any, + instance: Exclude, targetStorage: Storage, useDefaultSetting: boolean, - fileName?: string, -): ThunkAction => async (dispatch) => { - dispatch(exportActions.exportBackup(instance)); - const instanceType = (instance instanceof core.classes.Project) ? 'project' : 'task'; + fileName: string, +): ThunkAction => async (dispatch, getState) => { + const state = getState(); + + const instanceType = getInstanceType(instance) as 'project' | 'task'; try { - const result = await instance.backup(targetStorage, useDefaultSetting, fileName); - if (result) { - const downloadAnchor = window.document.getElementById('downloadAnchor') as HTMLAnchorElement; - downloadAnchor.href = result; - downloadAnchor.click(); + const rqID = await instance + .backup(targetStorage, useDefaultSetting, fileName); + if (shouldListenForProgress(rqID, state.requests)) { + await listenExportBackupAsync(rqID, dispatch, { instance }); + } + if (!rqID) { + dispatch(exportActions.exportBackupSuccess(instance, instanceType)); } - dispatch(exportActions.exportBackupSuccess(instance, instanceType, !!result)); } catch (error) { dispatch(exportActions.exportBackupFailed(instance, instanceType, error as Error)); } diff --git a/cvat-ui/src/actions/import-actions.ts b/cvat-ui/src/actions/import-actions.ts index 1dd230b81cc..0fef13c22f0 100644 --- a/cvat-ui/src/actions/import-actions.ts +++ b/cvat-ui/src/actions/import-actions.ts @@ -1,16 +1,20 @@ // Copyright (C) 2021-2022 Intel Corporation -// Copyright (C) 2022 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT import { createAction, ActionUnion, ThunkAction } from 'utils/redux'; import { CombinedState } from 'reducers'; import { - getCore, Storage, Job, Task, Project, + getCore, Storage, Job, Task, Project, ProjectOrTaskOrJob, } from 'cvat-core-wrapper'; import { EventScope } from 'cvat-logger'; import { getProjectsAsync } from './projects-actions'; import { AnnotationActionTypes, fetchAnnotationsAsync } from './annotation-actions'; +import { + getInstanceType, listen, RequestInstanceType, RequestsActions, + shouldListenForProgress, +} from './requests-actions'; const core = getCore(); @@ -29,35 +33,35 @@ export enum ImportActionTypes { } export const importActions = { - openImportDatasetModal: (instance: any) => ( + openImportDatasetModal: (instance: ProjectOrTaskOrJob) => ( createAction(ImportActionTypes.OPEN_IMPORT_DATASET_MODAL, { instance }) ), - closeImportDatasetModal: (instance: any) => ( + closeImportDatasetModal: (instance: ProjectOrTaskOrJob) => ( createAction(ImportActionTypes.CLOSE_IMPORT_DATASET_MODAL, { instance }) ), - importDataset: (instance: any, format: string) => ( + importDataset: (instance: ProjectOrTaskOrJob | RequestInstanceType, format: string) => ( createAction(ImportActionTypes.IMPORT_DATASET, { instance, format }) ), - importDatasetSuccess: (instance: Job | Task | Project, resource: 'dataset' | 'annotation') => ( + importDatasetSuccess: (instance: ProjectOrTaskOrJob | RequestInstanceType, resource: 'dataset' | 'annotation') => ( createAction(ImportActionTypes.IMPORT_DATASET_SUCCESS, { instance, resource }) ), - importDatasetFailed: (instance: Job | Task | Project, resource: 'dataset' | 'annotation', error: any) => ( + importDatasetFailed: (instance: ProjectOrTaskOrJob | RequestInstanceType, resource: 'dataset' | 'annotation', error: any) => ( createAction(ImportActionTypes.IMPORT_DATASET_FAILED, { instance, resource, error, }) ), - importDatasetUpdateStatus: (instance: any, progress: number, status: string) => ( + importDatasetUpdateStatus: (instance: ProjectOrTaskOrJob, progress: number, status: string) => ( createAction(ImportActionTypes.IMPORT_DATASET_UPDATE_STATUS, { instance, progress, status }) ), openImportBackupModal: (instanceType: 'project' | 'task') => ( createAction(ImportActionTypes.OPEN_IMPORT_BACKUP_MODAL, { instanceType }) ), + importBackup: () => createAction(ImportActionTypes.IMPORT_BACKUP), closeImportBackupModal: (instanceType: 'project' | 'task') => ( createAction(ImportActionTypes.CLOSE_IMPORT_BACKUP_MODAL, { instanceType }) ), - importBackup: () => createAction(ImportActionTypes.IMPORT_BACKUP), importBackupSuccess: (instanceId: number, instanceType: 'project' | 'task') => ( createAction(ImportActionTypes.IMPORT_BACKUP_SUCCESS, { instanceId, instanceType }) ), @@ -66,8 +70,27 @@ export const importActions = { ), }; +export async function listenImportDatasetAsync( + rqID: string, + dispatch: (action: ImportActions | RequestsActions) => void, + params: { + instance: ProjectOrTaskOrJob | RequestInstanceType, + }, +): Promise { + const { instance } = params; + + const instanceType = getInstanceType(instance); + const resource = instanceType === 'project' ? 'dataset' : 'annotation'; + try { + await listen(rqID, dispatch); + dispatch(importActions.importDatasetSuccess(instance, resource)); + } catch (error) { + dispatch(importActions.importDatasetFailed(instance, resource, error)); + } +} + export const importDatasetAsync = ( - instance: any, + instance: ProjectOrTaskOrJob, format: string, useDefaultSettings: boolean, sourceStorage: Storage, @@ -75,17 +98,15 @@ export const importDatasetAsync = ( convMaskToPoly: boolean, ): ThunkAction => ( async (dispatch, getState) => { - const resource = instance instanceof core.classes.Project ? 'dataset' : 'annotation'; + const instanceType = getInstanceType(instance); + const resource = instanceType === 'project' ? 'dataset' : 'annotation'; try { const state: CombinedState = getState(); - if (instance instanceof core.classes.Project) { - if (state.import.projects.dataset.current?.[instance.id]) { - throw Error('Only one importing of annotation/dataset allowed at the same time'); - } + if (instanceType === 'project') { dispatch(importActions.importDataset(instance, format)); - await instance.annotations + const rqID = await (instance as Project).annotations .importDataset(format, useDefaultSettings, sourceStorage, file, { convMaskToPoly, updateStatusCallback: (message: string, progress: number) => ( @@ -94,37 +115,42 @@ export const importDatasetAsync = ( )) ), }); - } else if (instance instanceof core.classes.Task) { - if (state.import.tasks.dataset.current?.[instance.id]) { - throw Error('Only one importing of annotation/dataset allowed at the same time'); + if (shouldListenForProgress(rqID, state.requests)) { + await listen(rqID, dispatch); } + } else if (instanceType === 'task') { dispatch(importActions.importDataset(instance, format)); - await instance.annotations.upload(format, useDefaultSettings, sourceStorage, file, { convMaskToPoly }); - } else { // job - if (state.import.tasks.dataset.current?.[instance.taskId]) { - throw Error('Annotations is being uploaded for the task'); - } - if (state.import.jobs.dataset.current?.[instance.id]) { - throw Error('Only one uploading of annotations for a job allowed at the same time'); + const rqID = await (instance as Task).annotations + .upload(format, useDefaultSettings, sourceStorage, file, { + convMaskToPoly, + }); + if (shouldListenForProgress(rqID, state.requests)) { + await listen(rqID, dispatch); } - + } else { // job dispatch(importActions.importDataset(instance, format)); + const rqID = await (instance as Job).annotations + .upload(format, useDefaultSettings, sourceStorage, file, { + convMaskToPoly, + }); + if (shouldListenForProgress(rqID, state.requests)) { + await listen(rqID, dispatch); - await instance.annotations.upload(format, useDefaultSettings, sourceStorage, file, { convMaskToPoly }); - await instance.logger.log(EventScope.uploadAnnotations); - await instance.annotations.clear({ reload: true }); - await instance.actions.clear(); + await (instance as Job).logger.log(EventScope.uploadAnnotations); + await (instance as Job).annotations.clear({ reload: true }); + await (instance as Job).actions.clear(); - // first set empty objects list - // to escape some problems in canvas when shape with the same - // clientID has different type (polygon, rectangle) for example - dispatch({ type: AnnotationActionTypes.UPLOAD_JOB_ANNOTATIONS_SUCCESS }); + // first set empty objects list + // to escape some problems in canvas when shape with the same + // clientID has different type (polygon, rectangle) for example + dispatch({ type: AnnotationActionTypes.UPLOAD_JOB_ANNOTATIONS_SUCCESS }); - const relevantInstance = getState().annotation.job.instance; - if (relevantInstance && relevantInstance.id === instance.id) { - setTimeout(() => { - dispatch(fetchAnnotationsAsync()); - }); + const relevantInstance = getState().annotation.job.instance; + if (relevantInstance && relevantInstance.id === instance.id) { + setTimeout(() => { + dispatch(fetchAnnotationsAsync()); + }); + } } } } catch (error) { @@ -139,13 +165,36 @@ export const importDatasetAsync = ( } ); +export async function listenImportBackupAsync( + rqID: string, + dispatch: (action: ImportActions | RequestsActions) => void, + params: { + instanceType: 'project' | 'task', + }, +): Promise { + const { instanceType } = params; + + try { + const result = await listen(rqID, dispatch); + + dispatch(importActions.importBackupSuccess(result?.resultID, instanceType)); + } catch (error) { + dispatch(importActions.importBackupFailed(instanceType, error)); + } +} + export const importBackupAsync = (instanceType: 'project' | 'task', storage: Storage, file: File | string): ThunkAction => ( - async (dispatch) => { + async (dispatch, getState) => { + const state: CombinedState = getState(); + dispatch(importActions.importBackup()); + try { const instanceClass = (instanceType === 'task') ? core.classes.Task : core.classes.Project; - const instance = await instanceClass.restore(storage, file); - dispatch(importActions.importBackupSuccess(instance.id, instanceType)); + const rqID = await instanceClass.restore(storage, file); + if (shouldListenForProgress(rqID, state.requests)) { + await listenImportBackupAsync(rqID, dispatch, { instanceType }); + } } catch (error) { dispatch(importActions.importBackupFailed(instanceType, error)); } diff --git a/cvat-ui/src/actions/requests-actions.ts b/cvat-ui/src/actions/requests-actions.ts new file mode 100644 index 00000000000..ed3438a2225 --- /dev/null +++ b/cvat-ui/src/actions/requests-actions.ts @@ -0,0 +1,97 @@ +// Copyright (C) 2024 CVAT.ai Corporation +// +// SPDX-License-Identifier: MIT + +import { ActionUnion, createAction } from 'utils/redux'; +import { RequestsQuery, RequestsState } from 'reducers'; +import { + Request, ProjectOrTaskOrJob, getCore, RQStatus, +} from 'cvat-core-wrapper'; + +const core = getCore(); + +export enum RequestsActionsTypes { + GET_REQUESTS = 'GET_REQUESTS', + GET_REQUESTS_SUCCESS = 'GET_REQUESTS_SUCCESS', + GET_REQUESTS_FAILED = 'GET_REQUESTS_FAILED', + GET_REQUEST_STATUS_SUCCESS = 'GET_REQUEST_STATUS_SUCCESS', + REQUEST_FINISHED = 'REQUEST_FINISHED', + REQUEST_FAILED = 'REQUEST_FAILED', + CANCEL_REQUEST = 'CANCEL_REQUEST', + CANCEL_REQUEST_FAILED = 'CANCEL_REQUEST_FAILED', + DELETE_REQUEST = 'DELETE_REQUEST', + DELETE_REQUEST_FAILED = 'DELETE_REQUEST_FAILED', +} + +export const requestsActions = { + getRequests: (query: RequestsQuery, fetching = true) => ( + createAction(RequestsActionsTypes.GET_REQUESTS, { query, fetching }) + ), + requestFinished: (request: Request) => createAction(RequestsActionsTypes.REQUEST_FINISHED, { request }), + requestFailed: (request: Request) => createAction(RequestsActionsTypes.REQUEST_FAILED, { request }), + getRequestsSuccess: (requests: Awaited>) => createAction( + RequestsActionsTypes.GET_REQUESTS_SUCCESS, { requests }, + ), + getRequestsFailed: (error: any) => createAction(RequestsActionsTypes.GET_REQUESTS_FAILED, { + error, + }), + getRequestStatusSuccess: (request: Request) => ( + createAction(RequestsActionsTypes.GET_REQUEST_STATUS_SUCCESS, { + request, + }) + ), + cancelRequest: (request: Request) => createAction(RequestsActionsTypes.CANCEL_REQUEST, { request }), + cancelRequestFailed: (request: Request, error: any) => createAction( + RequestsActionsTypes.CANCEL_REQUEST_FAILED, { request, error }, + ), +}; + +export type RequestsActions = ActionUnion; + +export interface RequestInstanceType { + id: number; + type: 'project' | 'task' | 'job'; +} + +export function getInstanceType(instance: ProjectOrTaskOrJob | RequestInstanceType): 'project' | 'task' | 'job' { + if (instance instanceof core.classes.Project) { + return 'project'; + } + + if (instance instanceof core.classes.Task) { + return 'task'; + } + + if (instance instanceof core.classes.Job) { + return 'job'; + } + + return instance.type; +} + +export function updateRequestProgress(request: Request, dispatch: (action: RequestsActions) => void): void { + dispatch( + requestsActions.getRequestStatusSuccess(request), + ); +} + +export function shouldListenForProgress(rqID: string | undefined, state: RequestsState): boolean { + return ( + typeof rqID === 'string' && + (!state.requests[rqID] || [RQStatus.FINISHED, RQStatus.FAILED].includes(state.requests[rqID]?.status)) + ); +} + +export function listen( + requestID: string, + dispatch: (action: RequestsActions) => void, + initialRequest?: Request, +) : Promise { + return core.requests + .listen(requestID, { + callback: (updatedRequest) => { + updateRequestProgress(updatedRequest, dispatch); + }, + initialRequest, + }); +} diff --git a/cvat-ui/src/actions/requests-async-actions.ts b/cvat-ui/src/actions/requests-async-actions.ts new file mode 100644 index 00000000000..04a5ffd0a5c --- /dev/null +++ b/cvat-ui/src/actions/requests-async-actions.ts @@ -0,0 +1,101 @@ +// Copyright (C) 2024 CVAT.ai Corporation +// +// SPDX-License-Identifier: MIT + +import { ThunkAction } from 'utils/redux'; +import { CombinedState, RequestsQuery, StorageLocation } from 'reducers'; +import { + getCore, RQStatus, Request, Project, Task, Job, +} from 'cvat-core-wrapper'; +import { listenExportBackupAsync, listenExportDatasetAsync } from './export-actions'; +import { RequestInstanceType, listen, requestsActions } from './requests-actions'; +import { listenImportBackupAsync, listenImportDatasetAsync } from './import-actions'; + +const core = getCore(); + +export interface RequestParams { + id: string; + type: string; + instance?: Project | Task | Job; + location?: StorageLocation; +} + +export function getRequestsAsync(query: RequestsQuery): ThunkAction { + return async (dispatch, getState): Promise => { + dispatch(requestsActions.getRequests(query)); + + const state: CombinedState = getState(); + + try { + const requests = await core.requests.list(); + + requests + .filter((request: Request) => [RQStatus.STARTED, RQStatus.QUEUED].includes(request.status)) + .forEach((request: Request): void => { + const { + id: rqID, + operation: { + type, target, format, taskID, projectID, jobID, + }, + } = request; + + if (state.requests.requests[rqID]) { + return; + } + + let instance: RequestInstanceType | null = null; + + const [operationType, operationTarget] = type.split(':'); + if (target === 'task') { + instance = { id: taskID as number, type: target }; + } else if (target === 'job') { + instance = { id: jobID as number, type: target }; + } else if (target === 'project') { + instance = { id: projectID as number, type: target }; + } + + if (operationType === 'export') { + if (operationTarget === 'backup') { + listenExportBackupAsync(rqID, dispatch, { instance: instance as RequestInstanceType }); + } else if (operationTarget === 'dataset' || operationTarget === 'annotations') { + listenExportDatasetAsync( + rqID, + dispatch, + { instance: instance as RequestInstanceType, format, saveImages: type.includes('dataset') }, + ); + } + } else if (operationType === 'import') { + if (operationTarget === 'backup') { + listenImportBackupAsync(rqID, dispatch, { instanceType: (instance as RequestInstanceType).type as 'project' | 'task' }); + } else if (operationTarget === 'dataset' || operationTarget === 'annotations') { + listenImportDatasetAsync( + rqID, + dispatch, + { instance: instance as RequestInstanceType }, + ); + } + } else if (operationType === 'create') { + if (operationTarget === 'task') { + listen(rqID, dispatch); + } + } + }); + dispatch(requestsActions.getRequestsSuccess(requests)); + } catch (error) { + dispatch(requestsActions.getRequestsFailed(error)); + } + }; +} + +export function cancelRequestAsync(request: Request, onSuccess: () => void): ThunkAction { + return async (dispatch): Promise => { + dispatch(requestsActions.cancelRequest(request)); + + try { + await core.requests.cancel(request.id); + onSuccess(); + } catch (error) { + dispatch(requestsActions.cancelRequestFailed(request, error)); + } + }; +} diff --git a/cvat-ui/src/actions/tasks-actions.ts b/cvat-ui/src/actions/tasks-actions.ts index 8a105feb45b..78a9f55b392 100644 --- a/cvat-ui/src/actions/tasks-actions.ts +++ b/cvat-ui/src/actions/tasks-actions.ts @@ -1,5 +1,5 @@ // Copyright (C) 2019-2022 Intel Corporation -// Copyright (C) 2022-2023 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT @@ -12,6 +12,7 @@ import { filterNull } from 'utils/filter-null'; import { ThunkDispatch, ThunkAction } from 'utils/redux'; import { getInferenceStatusAsync } from './models-actions'; +import { updateRequestProgress } from './requests-actions'; const cvat = getCore(); @@ -259,16 +260,28 @@ ThunkAction { taskInstance.remoteFiles = data.files.remote; try { - const savedTask = await taskInstance.save((status: RQStatus, progress: number, message: string): void => { - if (status === RQStatus.UNKNOWN) { - onProgress?.(`${message} ${progress ? `${Math.floor(progress * 100)}%` : ''}`); - } else if ([RQStatus.QUEUED, RQStatus.STARTED].includes(status)) { - const helperMessage = 'You may close the window.'; + const savedTask = await taskInstance.save({ + requestStatusCallback(request) { + let { message } = request; + let helperMessage = ''; + const { status, progress } = request; + if (!message) { + if ([RQStatus.QUEUED, RQStatus.STARTED].includes(status)) { + message = 'CVAT queued the task to import'; + helperMessage = 'You may close the window.'; + } else if (status === RQStatus.FAILED) { + message = 'Images processing failed'; + } else if (status === RQStatus.FINISHED) { + message = 'Task creation finished'; + } else { + message = 'Unknown status received'; + } + } onProgress?.(`${message} ${progress ? `${Math.floor(progress * 100)}%` : ''}. ${helperMessage}`); - } else { - onProgress?.(`${status}: ${message}`); - } + if (request.id) updateRequestProgress(request, dispatch); + }, }); + dispatch(updateTaskInState(savedTask)); dispatch(getTaskPreviewAsync(savedTask)); return savedTask; diff --git a/cvat-ui/src/components/actions-menu/actions-menu.tsx b/cvat-ui/src/components/actions-menu/actions-menu.tsx index 6f17e335c8a..8a6997dfe2f 100644 --- a/cvat-ui/src/components/actions-menu/actions-menu.tsx +++ b/cvat-ui/src/components/actions-menu/actions-menu.tsx @@ -6,7 +6,6 @@ import './styles.scss'; import React, { useCallback } from 'react'; import Modal from 'antd/lib/modal'; -import { LoadingOutlined } from '@ant-design/icons'; import { DimensionType, CVATCore } from 'cvat-core-wrapper'; import Menu, { MenuInfo } from 'components/dropdown-menu'; import { usePlugins } from 'utils/hooks'; @@ -23,7 +22,6 @@ interface Props { dumpers: AnnotationFormats['dumpers']; inferenceIsActive: boolean; taskDimension: DimensionType; - backupIsActive: boolean; onClickMenu: (params: MenuInfo) => void; } @@ -44,7 +42,6 @@ function ActionsMenuComponent(props: Props): JSX.Element { projectID, bugTracker, inferenceIsActive, - backupIsActive, onClickMenu, } = props; @@ -101,8 +98,6 @@ function ActionsMenuComponent(props: Props): JSX.Element { menuItems.push([( } > Backup Task diff --git a/cvat-ui/src/components/common/styles.scss b/cvat-ui/src/components/common/styles.scss new file mode 100644 index 00000000000..439a0354d26 --- /dev/null +++ b/cvat-ui/src/components/common/styles.scss @@ -0,0 +1,19 @@ +// Copyright (C) 2024 CVAT.ai Corporation +// +// SPDX-License-Identifier: MIT + +@import 'base'; + +.cvat-modal-upload-file-status .ant-modal-body { + display: flex; + align-items: center; + flex-flow: column; + + .ant-progress { + margin-bottom: $grid-unit-size * 2; + } + + .ant-alert { + width: 100%; + } +} \ No newline at end of file diff --git a/cvat-ui/src/components/common/upload-file-status-modal.tsx b/cvat-ui/src/components/common/upload-file-status-modal.tsx new file mode 100644 index 00000000000..5df39000210 --- /dev/null +++ b/cvat-ui/src/components/common/upload-file-status-modal.tsx @@ -0,0 +1,44 @@ +// Copyright (C) 2021-2022 Intel Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation +// +// SPDX-License-Identifier: MIT + +import './styles.scss'; +import React from 'react'; +import { shallowEqual, useSelector } from 'react-redux'; +import Modal from 'antd/lib/modal'; +import Alert from 'antd/lib/alert'; +import Progress from 'antd/lib/progress'; + +import { CombinedState } from 'reducers'; + +function UploadFileStatusModal(): JSX.Element { + const { + id: importingId, + progress: importingProgress, + status: importingStatus, + } = useSelector((state: CombinedState) => { + const { id, progress, status } = state.import.projects.dataset.uploadState; + return { + id, + progress: id === null ? 0 : progress, + status: id === null ? '' : status, + }; + }, shallowEqual); + + return ( + + + + + ); +} + +export default React.memo(UploadFileStatusModal); diff --git a/cvat-ui/src/components/cvat-app.tsx b/cvat-ui/src/components/cvat-app.tsx index 0d023468125..d061a97d532 100644 --- a/cvat-ui/src/components/cvat-app.tsx +++ b/cvat-ui/src/components/cvat-app.tsx @@ -31,6 +31,7 @@ import ExportDatasetModal from 'components/export-dataset/export-dataset-modal'; import ExportBackupModal from 'components/export-backup/export-backup-modal'; import ImportDatasetModal from 'components/import-dataset/import-dataset-modal'; import ImportBackupModal from 'components/import-backup/import-backup-modal'; +import UploadFileStatusModal from 'components/common/upload-file-status-modal'; import JobsPageComponent from 'components/jobs-page/jobs-page'; import ModelsPageComponent from 'components/models-page/models-page'; @@ -59,9 +60,13 @@ import GuidePage from 'components/md-guide/guide-page'; import InvitationsPage from 'components/invitations-page/invitations-page'; +import RequestsPage from 'components/requests-page/requests-page'; + import AnnotationPageContainer from 'containers/annotation-page/annotation-page'; import { Organization, getCore } from 'cvat-core-wrapper'; -import { ErrorState, NotificationsState, PluginsState } from 'reducers'; +import { + ErrorState, NotificationState, NotificationsState, PluginsState, +} from 'reducers'; import { customWaViewHit } from 'utils/environment'; import showPlatformNotification, { platformInfo, @@ -90,6 +95,7 @@ interface CVATAppProps { resetMessages: () => void; loadOrganization: () => void; initInvitations: () => void; + initRequests: () => void; loadServerAPISchema: () => void; userInitialized: boolean; userFetching: boolean; @@ -111,6 +117,8 @@ interface CVATAppProps { pluginComponents: PluginsState['components']; invitationsFetching: boolean; invitationsInitialized: boolean; + requestsFetching: boolean; + requestsInitialized: boolean; serverAPISchemaFetching: boolean; serverAPISchemaInitialized: boolean; isPasswordResetEnabled: boolean; @@ -121,7 +129,6 @@ interface CVATAppState { healthIinitialized: boolean; backendIsHealthy: boolean; } - class CVATApplication extends React.PureComponent { constructor(props: CVATAppProps & RouteComponentProps) { super(props); @@ -278,6 +285,9 @@ class CVATApplication extends React.PureComponent{title} + {notificationState.message} ), - duration: null, + description: notificationState?.description && ( + {notificationState?.description} + ), + duration: notificationState.duration || null, }); } @@ -358,10 +375,10 @@ class CVATApplication extends React.PureComponent + { routesToRender } {isModelPluginActive && ( + { loggedInModals.map((Component, idx) => ( ))} diff --git a/cvat-ui/src/components/export-backup/export-backup-modal.tsx b/cvat-ui/src/components/export-backup/export-backup-modal.tsx index f213014590d..ed9be629296 100644 --- a/cvat-ui/src/components/export-backup/export-backup-modal.tsx +++ b/cvat-ui/src/components/export-backup/export-backup-modal.tsx @@ -5,6 +5,7 @@ import './styles.scss'; import React, { useState, useEffect, useCallback } from 'react'; import { useSelector, useDispatch } from 'react-redux'; +import ReactMarkdown from 'react-markdown'; import Modal from 'antd/lib/modal'; import Notification from 'antd/lib/notification'; import Text from 'antd/lib/typography/Text'; @@ -98,11 +99,13 @@ function ExportBackupModal(): JSX.Element { ), ); closeModal(); + + const description = 'Backup export was started. You can check progress [here](/requests)'; Notification.info({ message: 'Backup export started', - description: - 'Backup export was started. ' + - 'Download will start automatically as soon as the file is ready.', + description: ( + {description} + ), className: 'cvat-notification-notice-export-backup-start', }); }, diff --git a/cvat-ui/src/components/export-dataset/export-dataset-modal.tsx b/cvat-ui/src/components/export-dataset/export-dataset-modal.tsx index 56a108f0fce..e6049067b12 100644 --- a/cvat-ui/src/components/export-dataset/export-dataset-modal.tsx +++ b/cvat-ui/src/components/export-dataset/export-dataset-modal.tsx @@ -8,7 +8,7 @@ import React, { useState, useEffect, useCallback } from 'react'; import { connect, useDispatch } from 'react-redux'; import Modal from 'antd/lib/modal'; import Notification from 'antd/lib/notification'; -import { DownloadOutlined, LoadingOutlined } from '@ant-design/icons'; +import { DownloadOutlined } from '@ant-design/icons'; import Text from 'antd/lib/typography/Text'; import Select from 'antd/lib/select'; import Input from 'antd/lib/input'; @@ -19,8 +19,9 @@ import TargetStorageField from 'components/storage/target-storage-field'; import { CombinedState, StorageLocation } from 'reducers'; import { exportActions, exportDatasetAsync } from 'actions/export-actions'; import { - Dumper, Job, Project, Storage, StorageData, Task, + Dumper, ProjectOrTaskOrJob, Job, Project, Storage, StorageData, Task, } from 'cvat-core-wrapper'; +import ReactMarkdown from 'react-markdown'; type FormValues = { selectedFormat: string | undefined; @@ -45,7 +46,6 @@ function ExportDatasetModal(props: StateToProps): JSX.Element { const { dumpers, instance, - current, } = props; const [instanceType, setInstanceType] = useState(''); @@ -101,7 +101,7 @@ function ExportDatasetModal(props: StateToProps): JSX.Element { // have to validate format before so it would not be undefined dispatch( exportDatasetAsync( - instance, + instance as ProjectOrTaskOrJob, values.selectedFormat as string, values.saveImages, useDefaultTargetStorage, @@ -114,15 +114,18 @@ function ExportDatasetModal(props: StateToProps): JSX.Element { ); closeModal(); const resource = values.saveImages ? 'Dataset' : 'Annotations'; + const description = `${resource} export was started for ${instanceType}. ` + + 'You can check progress and download the file [here](/requests).'; Notification.info({ message: `${resource} export started`, - description: - `${resource} export was started for ${instanceType}. ` + - `Download will start automatically as soon as the ${resource} is ready.`, + description: ( + {description} + ), className: `cvat-notification-notice-export-${instanceType.split(' ')[0]}-start`, }); }, - [instance, instanceType, useDefaultTargetStorage, defaultStorageLocation, defaultStorageCloudId, targetStorage], + [instance, instanceType, useDefaultTargetStorage, + defaultStorageLocation, defaultStorageCloudId, targetStorage], ); return ( @@ -154,23 +157,16 @@ function ExportDatasetModal(props: StateToProps): JSX.Element { (instance instanceof Project && instance.dimension === null), ) .map( - (dumper: Dumper): JSX.Element => { - const pending = (instance && current ? current : []) - .includes(dumper.name); - const disabled = !dumper.enabled || pending; - return ( - - - {dumper.name} - {pending && } - - ); - }, + (dumper: Dumper): JSX.Element => ( + + + {dumper.name} + + ), )} @@ -213,7 +209,6 @@ function ExportDatasetModal(props: StateToProps): JSX.Element { interface StateToProps { dumpers: Dumper[]; instance: Project | Task | Job | null; - current: string[]; } function mapStateToProps(state: CombinedState): StateToProps { @@ -224,9 +219,6 @@ function mapStateToProps(state: CombinedState): StateToProps { return { instance, - current: !instanceType ? [] : ( - state.export[`${instanceType}s` as 'projects' | 'tasks' | 'jobs'] - ).dataset.current[instance.id], dumpers: state.formats.annotationFormats.dumpers, }; } diff --git a/cvat-ui/src/components/header/header.tsx b/cvat-ui/src/components/header/header.tsx index fca95c81e9c..fd2e21162de 100644 --- a/cvat-ui/src/components/header/header.tsx +++ b/cvat-ui/src/components/header/header.tsx @@ -473,6 +473,18 @@ function HeaderComponent(props: Props): JSX.Element { > Cloud Storages + {isModelsPluginActive ? ( )} diff --git a/cvat-ui/src/components/requests-page/empty-list.tsx b/cvat-ui/src/components/requests-page/empty-list.tsx new file mode 100644 index 00000000000..7b1006fdd88 --- /dev/null +++ b/cvat-ui/src/components/requests-page/empty-list.tsx @@ -0,0 +1,30 @@ +// Copyright (C) 2024 CVAT.ai Corporation +// +// SPDX-License-Identifier: MIT + +import React from 'react'; +import Text from 'antd/lib/typography/Text'; +import { Row, Col } from 'antd/lib/grid'; +import Empty from 'antd/lib/empty'; + +export default function EmptyListComponent(): JSX.Element { + return ( +
+ + + + No requests made yet ... + + + + + Start importing/exporting your resources to see progress here + + + + )} + /> +
+ ); +} diff --git a/cvat-ui/src/components/requests-page/request-card.tsx b/cvat-ui/src/components/requests-page/request-card.tsx new file mode 100644 index 00000000000..6ff0ab9c20f --- /dev/null +++ b/cvat-ui/src/components/requests-page/request-card.tsx @@ -0,0 +1,266 @@ +// Copyright (C) 2024 CVAT.ai Corporation +// +// SPDX-License-Identifier: MIT + +import React, { useState } from 'react'; +import { Link } from 'react-router-dom'; + +import { Row, Col } from 'antd/lib/grid'; +import { useDispatch } from 'react-redux'; + +import Card from 'antd/lib/card'; +import Text from 'antd/lib/typography/Text'; +import Progress from 'antd/lib/progress'; +import { MoreOutlined } from '@ant-design/icons'; +import Dropdown from 'antd/lib/dropdown'; +import Button from 'antd/lib/button'; +import { MenuProps } from 'antd/lib/menu'; + +import { RQStatus, Request } from 'cvat-core-wrapper'; + +import moment from 'moment'; +import { cancelRequestAsync } from 'actions/requests-async-actions'; +import StatusMessage from './request-status'; + +export interface Props { + request: Request; +} + +function constructLink(request: Request): string | null { + const { + type, target, jobID, taskID, projectID, + } = request.operation; + + if (request.status === RQStatus.FAILED && type.includes('create')) { + return null; + } + + if (target === 'project' && projectID) { + return `/projects/${projectID}`; + } + if (target === 'task' && taskID) { + return `/tasks/${taskID}`; + } + if (target === 'job' && jobID) { + return `/tasks/${taskID}/jobs/${jobID}`; + } + return null; +} + +function constructName(operation: typeof Request['operation']): string | null { + const { + target, jobID, taskID, projectID, + } = operation; + + if (target === 'project' && projectID) { + return `Project #${projectID}`; + } + if (target === 'task' && taskID) { + return `Task #${taskID}`; + } + if (target === 'job' && jobID) { + return `Job #${jobID}`; + } + return null; +} + +function constructTimestamps(request: Request): JSX.Element { + const started = moment(request.startedDate).format('MMM Do YY, H:mm'); + const finished = moment(request.finishedDate).format('MMM Do YY, H:mm'); + const created = moment(request.createdDate).format('MMM Do YY, H:mm'); + const expired = moment(request.expiryDate).format('MMM Do YY, H:mm'); + const { operation: { type }, url } = request; + + switch (request.status) { + case RQStatus.FINISHED: { + const exportToCloudStorage = type.includes('export') && !url; + if (request.expiryDate && !type.includes('create') && !type.includes('import') && !exportToCloudStorage) { + return ( + <> + + {`Started by ${request.owner.username} on ${started}`} + + + {`Expires on ${expired}`} + + + ); + } + return ( + <> + + {`Started by ${request.owner.username} on ${started}`} + + + {`Finished on ${finished}`} + + + ); + } + case RQStatus.FAILED: { + return ( + + {`Started by ${request.owner.username} on ${started}`} + + ); + } + case RQStatus.STARTED: { + return ( + <> + + {`Enqueued by ${request.owner.username} on ${created}`} + + + {`Started on ${started}`} + + + ); + } + default: { + return ( + + {`Enqueued by ${request.owner.username} on ${created}`} + + ); + } + } +} + +const dimensions = { + xs: 6, + sm: 6, + md: 8, + lg: 8, + xl: 8, + xxl: 6, +}; + +function RequestCard(props: Props): JSX.Element { + const { request } = props; + const { operation } = request; + const { type } = operation; + + const dispatch = useDispatch(); + const [isActive, setIsActive] = useState(true); + + const linkToEntity = constructLink(request); + const percent = request.status === RQStatus.FINISHED ? 100 : request.progress; + const timestamps = constructTimestamps(request); + + const name = constructName(operation); + + const percentProgress = (request.status === RQStatus.FAILED || !percent) ? '' : `${percent.toFixed(2)}%`; + + const style: React.CSSProperties = {}; + if (!isActive) { + style.pointerEvents = 'none'; + style.opacity = 0.5; + } + + const menuItems: NonNullable = []; + if (request?.url) { + menuItems.push({ + key: 'download', + label: 'Download', + onClick: () => { + const downloadAnchor = window.document.getElementById('downloadAnchor') as HTMLAnchorElement; + downloadAnchor.href = request.url; + downloadAnchor.click(); + setIsActive(false); + }, + }); + } + + // only queued requests can be canceled now + if (request.status === RQStatus.QUEUED) { + menuItems.push({ + key: 'cancel', + label: 'Cancel', + onClick: () => { + dispatch(cancelRequestAsync(request, () => { + setIsActive(false); + })); + }, + }); + } + + return ( + + + + + + + {type.split(':').map((word) => word.charAt(0).toUpperCase() + word.slice(1)).join(' ')} + {' '} + + + + {linkToEntity ? + ({name}) : + {name}} + + + {timestamps} + + + + + + + + + { + request.status !== RQStatus.FAILED ? ( + + ) : null + } + + + {percentProgress} + + + { + operation?.format ? ( + + + {operation.format} + + + ) : null + } + + + { + menuItems.length !== 0 ? ( + + )} diff --git a/cvat-ui/src/config.tsx b/cvat-ui/src/config.tsx index 9e97bb00717..c79e72f309b 100644 --- a/cvat-ui/src/config.tsx +++ b/cvat-ui/src/config.tsx @@ -1,5 +1,5 @@ // Copyright (C) 2019-2022 Intel Corporation -// Copyright (C) 2022-2023 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT @@ -132,6 +132,8 @@ const OPENCV_PATH = '/assets/opencv_4.8.0.js'; const LOCAL_STORAGE_SEEN_GUIDES_MEMORY_LIMIT = 10; const LOCAL_STORAGE_LAST_FRAME_MEMORY_LIMIT = 20; +const REQUEST_SUCCESS_NOTIFICATION_DURATION = 5; // seconds + export default { UNDEFINED_ATTRIBUTE_VALUE, NO_BREAK_SPACE, @@ -171,4 +173,5 @@ export default { OPENCV_PATH, LOCAL_STORAGE_SEEN_GUIDES_MEMORY_LIMIT, LOCAL_STORAGE_LAST_FRAME_MEMORY_LIMIT, + REQUEST_SUCCESS_NOTIFICATION_DURATION, }; diff --git a/cvat-ui/src/containers/actions-menu/actions-menu.tsx b/cvat-ui/src/containers/actions-menu/actions-menu.tsx index 07bd9c6e551..0883e8067c6 100644 --- a/cvat-ui/src/containers/actions-menu/actions-menu.tsx +++ b/cvat-ui/src/containers/actions-menu/actions-menu.tsx @@ -26,7 +26,6 @@ interface OwnProps { interface StateToProps { annotationFormats: any; inferenceIsActive: boolean; - backupIsActive: boolean; } interface DispatchToProps { @@ -49,7 +48,6 @@ function mapStateToProps(state: CombinedState, own: OwnProps): StateToProps { return { annotationFormats, inferenceIsActive: tid in state.models.inferences, - backupIsActive: state.export.tasks.backup.current[tid], }; } @@ -82,7 +80,6 @@ function ActionsMenuContainer(props: OwnProps & StateToProps & DispatchToProps): taskInstance, annotationFormats: { loaders, dumpers }, inferenceIsActive, - backupIsActive, showExportModal, showImportModal, deleteTask, @@ -122,7 +119,6 @@ function ActionsMenuContainer(props: OwnProps & StateToProps & DispatchToProps): inferenceIsActive={inferenceIsActive} onClickMenu={onClickMenu} taskDimension={taskInstance.dimension} - backupIsActive={backupIsActive} /> ); } diff --git a/cvat-ui/src/containers/tasks-page/task-item.tsx b/cvat-ui/src/containers/tasks-page/task-item.tsx index 639dce8e408..2131fd64341 100644 --- a/cvat-ui/src/containers/tasks-page/task-item.tsx +++ b/cvat-ui/src/containers/tasks-page/task-item.tsx @@ -1,11 +1,11 @@ // Copyright (C) 2020-2022 Intel Corporation -// Copyright (C) 2022-2023 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT import { connect } from 'react-redux'; -import { Task } from 'cvat-core-wrapper'; +import { Task, Request } from 'cvat-core-wrapper'; import { TasksQuery, CombinedState, ActiveInference, PluginComponent, } from 'reducers'; @@ -17,6 +17,7 @@ interface StateToProps { deleted: boolean; taskInstance: any; activeInference: ActiveInference | null; + activeRequest: Request | null; ribbonPlugins: PluginComponent[]; } @@ -34,6 +35,11 @@ interface OwnProps { function mapStateToProps(state: CombinedState, own: OwnProps): StateToProps { const task = state.tasks.current[own.idx]; const { deletes } = state.tasks.activities; + const { requests } = state.requests; + const activeRequest = Object.values(requests).find((request: Request) => { + const { operation: { type, taskID } } = request; + return type === 'create:task' && task.id === taskID; + }); const id = own.taskID; return { @@ -41,6 +47,7 @@ function mapStateToProps(state: CombinedState, own: OwnProps): StateToProps { taskInstance: task, activeInference: state.models.inferences[id] || null, ribbonPlugins: state.plugins.components.taskItem.ribbon, + activeRequest: activeRequest || null, }; } diff --git a/cvat-ui/src/cvat-core-wrapper.ts b/cvat-ui/src/cvat-core-wrapper.ts index ea72f3c6111..48bc735a148 100644 --- a/cvat-ui/src/cvat-core-wrapper.ts +++ b/cvat-ui/src/cvat-core-wrapper.ts @@ -14,14 +14,16 @@ import { ModelProvider } from 'cvat-core/src/lambda-manager'; import { Label, Attribute, } from 'cvat-core/src/labels'; -import { SerializedAttribute, SerializedLabel, SerializedAPISchema } from 'cvat-core/src/server-response-types'; +import { + SerializedAttribute, SerializedLabel, SerializedAPISchema, +} from 'cvat-core/src/server-response-types'; import { Job, Task } from 'cvat-core/src/session'; import Project from 'cvat-core/src/project'; import QualityReport, { QualitySummary } from 'cvat-core/src/quality-report'; import QualityConflict, { AnnotationConflict, ConflictSeverity } from 'cvat-core/src/quality-conflict'; import QualitySettings from 'cvat-core/src/quality-settings'; import { FramesMetaData, FrameData } from 'cvat-core/src/frames'; -import { ServerError } from 'cvat-core/src/exceptions'; +import { ServerError, RequestError } from 'cvat-core/src/exceptions'; import { ShapeType, LabelType, ModelKind, ModelProviders, ModelReturnType, DimensionType, JobType, @@ -38,6 +40,7 @@ import { Dumper } from 'cvat-core/src/annotation-formats'; import { Event } from 'cvat-core/src/event'; import { APIWrapperEnterOptions } from 'cvat-core/src/plugins'; import BaseSingleFrameAction, { ActionParameterType, FrameSelectionType } from 'cvat-core/src/annotations-actions'; +import { Request } from 'cvat-core/src/request'; const cvat: CVATCore = _cvat; @@ -52,6 +55,8 @@ function getCore(): typeof cvat { return cvat; } +type ProjectOrTaskOrJob = Project | Task | Job; + export { getCore, ObjectState, @@ -93,10 +98,12 @@ export { AnalyticsEntry, AnalyticsEntryViewType, ServerError, + RequestError, Event, FrameData, ActionParameterType, FrameSelectionType, + Request, }; export type { @@ -108,4 +115,5 @@ export type { QualitySummary, CVATCore, SerializedAPISchema, + ProjectOrTaskOrJob, }; diff --git a/cvat-ui/src/index.tsx b/cvat-ui/src/index.tsx index 2ce49501d91..fbd719816df 100644 --- a/cvat-ui/src/index.tsx +++ b/cvat-ui/src/index.tsx @@ -23,6 +23,7 @@ import createRootReducer from 'reducers/root-reducer'; import { activateOrganizationAsync } from 'actions/organization-actions'; import { resetErrors, resetMessages } from 'actions/notification-actions'; import { getInvitationsAsync } from 'actions/invitations-actions'; +import { getRequestsAsync } from 'actions/requests-async-actions'; import { getServerAPISchemaAsync } from 'actions/server-actions'; import { CombinedState, NotificationsState, PluginsState } from './reducers'; @@ -51,6 +52,8 @@ interface StateToProps { pluginComponents: PluginsState['components']; invitationsFetching: boolean; invitationsInitialized: boolean; + requestsFetching: boolean; + requestsInitialized: boolean; serverAPISchemaFetching: boolean; serverAPISchemaInitialized: boolean; isPasswordResetEnabled: boolean; @@ -68,12 +71,13 @@ interface DispatchToProps { loadUserAgreements: () => void; loadOrganization: () => void; initInvitations: () => void; + initRequests: () => void; loadServerAPISchema: () => void; } function mapStateToProps(state: CombinedState): StateToProps { const { - plugins, auth, formats, about, userAgreements, models, organizations, invitations, serverAPI, + plugins, auth, formats, about, userAgreements, models, organizations, invitations, serverAPI, requests, } = state; return { @@ -97,6 +101,8 @@ function mapStateToProps(state: CombinedState): StateToProps { isModelPluginActive: plugins.list.MODELS, invitationsFetching: invitations.fetching, invitationsInitialized: invitations.initialized, + requestsFetching: requests.fetching, + requestsInitialized: requests.initialized, serverAPISchemaFetching: serverAPI.fetching, serverAPISchemaInitialized: serverAPI.initialized, isPasswordResetEnabled: serverAPI.configuration.isPasswordResetEnabled, @@ -116,6 +122,7 @@ function mapDispatchToProps(dispatch: any): DispatchToProps { resetMessages: (): void => dispatch(resetMessages()), loadOrganization: (): void => dispatch(activateOrganizationAsync()), initInvitations: (): void => dispatch(getInvitationsAsync({ page: 1 }, true)), + initRequests: (): void => dispatch(getRequestsAsync({ page: 1 })), loadServerAPISchema: (): void => dispatch(getServerAPISchemaAsync()), }; } diff --git a/cvat-ui/src/reducers/export-reducer.ts b/cvat-ui/src/reducers/export-reducer.ts index bd944e4e2fa..73edc864945 100644 --- a/cvat-ui/src/reducers/export-reducer.ts +++ b/cvat-ui/src/reducers/export-reducer.ts @@ -1,11 +1,9 @@ // Copyright (C) 2021-2022 Intel Corporation -// Copyright (C) 2022 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT import { ExportActions, ExportActionTypes } from 'actions/export-actions'; -import { omit } from 'lodash'; -import deepCopy from 'utils/deep-copy'; import { ExportState } from '.'; import { defineActititiesField } from './import-reducer'; @@ -13,27 +11,22 @@ import { defineActititiesField } from './import-reducer'; const defaultState: ExportState = { projects: { dataset: { - current: {}, modalInstance: null, }, backup: { modalInstance: null, - current: {}, }, }, tasks: { dataset: { - current: {}, modalInstance: null, }, backup: { modalInstance: null, - current: {}, }, }, jobs: { dataset: { - current: {}, modalInstance: null, }, }, @@ -75,40 +68,6 @@ export default (state: ExportState = defaultState, action: ExportActions): Expor instanceType: null, }; } - case ExportActionTypes.EXPORT_DATASET: { - const { instance, format } = action.payload; - const field = defineActititiesField(instance) as 'projects' | 'tasks' | 'jobs'; - - return { - ...state, - [field]: { - ...state[field], - dataset: { - ...state[field].dataset, - current: { - ...state[field].dataset.current, - [instance.id]: !state[field].dataset.current[instance.id] ? [format] : - [...state[field].dataset.current[instance.id], format], - }, - }, - }, - }; - } - case ExportActionTypes.EXPORT_DATASET_FAILED: - case ExportActionTypes.EXPORT_DATASET_SUCCESS: { - const { instance, format } = action.payload; - const field: 'projects' | 'tasks' | 'jobs' = defineActititiesField(instance); - const activities = deepCopy(state[field]); - - activities.dataset.current[instance.id] = activities.dataset.current[instance.id].filter( - (exporterName: string): boolean => exporterName !== format, - ); - - return { - ...state, - [field]: activities, - }; - } case ExportActionTypes.OPEN_EXPORT_BACKUP_MODAL: { const { instance } = action.payload; const field = defineActititiesField(instance) as 'projects' | 'tasks'; @@ -142,41 +101,6 @@ export default (state: ExportState = defaultState, action: ExportActions): Expor instanceType: null, }; } - case ExportActionTypes.EXPORT_BACKUP: { - const { instance } = action.payload; - const field = defineActititiesField(instance) as 'projects' | 'tasks'; - - return { - ...state, - [field]: { - ...state[field], - backup: { - ...state[field].backup, - current: { - ...state[field].backup.current, - [instance.id]: true, - }, - }, - }, - }; - } - case ExportActionTypes.EXPORT_BACKUP_FAILED: - case ExportActionTypes.EXPORT_BACKUP_SUCCESS: { - const { instance } = action.payload; - - const field = defineActititiesField(instance) as 'projects' | 'tasks'; - - return { - ...state, - [field]: { - ...state[field], - backup: { - ...state[field].backup, - current: omit(state[field].backup, instance.id), - }, - }, - }; - } default: return state; } diff --git a/cvat-ui/src/reducers/import-reducer.ts b/cvat-ui/src/reducers/import-reducer.ts index e91ad0cb64c..a967b49a123 100644 --- a/cvat-ui/src/reducers/import-reducer.ts +++ b/cvat-ui/src/reducers/import-reducer.ts @@ -1,32 +1,29 @@ // Copyright (C) 2021-2022 Intel Corporation -// Copyright (C) 2022 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT -import { omit } from 'lodash'; import { ImportActions, ImportActionTypes } from 'actions/import-actions'; -import { getCore } from 'cvat-core-wrapper'; +import { getInstanceType, RequestInstanceType } from 'actions/requests-actions'; +import { ProjectOrTaskOrJob } from 'cvat-core-wrapper'; import { ImportState } from '.'; -const core = getCore(); - const defaultProgress = 0.0; -export function defineActititiesField(instance: any): 'projects' | 'tasks' | 'jobs' { - if (instance instanceof core.classes.Project) { - return 'projects'; - } - if (instance instanceof core.classes.Task) { - return 'tasks'; - } - return 'jobs'; +export function defineActititiesField(instance: ProjectOrTaskOrJob | RequestInstanceType): 'projects' | 'tasks' | 'jobs' { + return `${getInstanceType(instance)}s`; } const defaultState: ImportState = { projects: { dataset: { modalInstance: null, - current: {}, + uploadState: { + id: null, + format: '', + progress: 0, + status: '', + }, }, backup: { modalVisible: false, @@ -36,7 +33,6 @@ const defaultState: ImportState = { tasks: { dataset: { modalInstance: null, - current: {}, }, backup: { modalVisible: false, @@ -46,7 +42,6 @@ const defaultState: ImportState = { jobs: { dataset: { modalInstance: null, - current: {}, }, }, instanceType: null, @@ -93,69 +88,54 @@ export default (state: ImportState = defaultState, action: ImportActions): Impor const activitiesField = defineActititiesField(instance); let updatedActivity: { + id: number; format: string; status?: string; progress?: number; - } = { format }; + } = { format, id: instance.id }; if (activitiesField === 'projects') { updatedActivity = { ...updatedActivity, status: 'The file is being uploaded to the server', progress: defaultProgress, }; - } - return { - ...state, - [activitiesField]: { - ...state[activitiesField], - dataset: { - ...state[activitiesField].dataset, - current: { - ...state[activitiesField].dataset.current, - [instance.id]: updatedActivity, + return { + ...state, + [activitiesField]: { + ...state[activitiesField], + dataset: { + ...state[activitiesField].dataset, + uploadState: { + ...state[activitiesField].dataset.uploadState, + ...updatedActivity, + }, }, }, - }, - }; + }; + } + return state; } case ImportActionTypes.IMPORT_DATASET_UPDATE_STATUS: { const { progress, status, instance } = action.payload; const activitiesField = defineActititiesField(instance); - return { - ...state, - [activitiesField]: { - ...state[activitiesField], - dataset: { - ...state[activitiesField].dataset, - current: { - ...state[activitiesField].dataset.current, - [instance.id]: { - ...state[activitiesField].dataset.current[instance.id] as Record, + if (activitiesField === 'projects') { + return { + ...state, + [activitiesField]: { + ...state[activitiesField], + dataset: { + ...state[activitiesField].dataset, + uploadState: { + ...state[activitiesField].dataset.uploadState, progress, status, }, }, }, - }, - }; - } - case ImportActionTypes.IMPORT_DATASET_FAILED: - case ImportActionTypes.IMPORT_DATASET_SUCCESS: { - const { instance } = action.payload; - const activitiesField = defineActititiesField(instance); - const { current } = state[activitiesField].dataset; - - return { - ...state, - [activitiesField]: { - ...state[activitiesField], - dataset: { - ...state[activitiesField].dataset, - current: omit(current, instance.id), - }, - }, - }; + }; + } + return state; } case ImportActionTypes.OPEN_IMPORT_BACKUP_MODAL: { const { instanceType } = action.payload; diff --git a/cvat-ui/src/reducers/index.ts b/cvat-ui/src/reducers/index.ts index 0100d1fc70f..e24be56111d 100644 --- a/cvat-ui/src/reducers/index.ts +++ b/cvat-ui/src/reducers/index.ts @@ -8,6 +8,7 @@ import { Canvas, RectDrawingMethod, CuboidDrawingMethod } from 'cvat-canvas-wrap import { Webhook, MLModel, Organization, Job, Label, User, QualityConflict, FramesMetaData, RQStatus, Event, Invitation, SerializedAPISchema, + Request, } from 'cvat-core-wrapper'; import { IntelligentScissors } from 'utils/opencv-wrapper/intelligent-scissors'; import { KeyMap } from 'utils/mousetrap-react'; @@ -115,37 +116,22 @@ export interface TasksState { export interface ExportState { projects: { dataset: { - current: { - [id: number]: string[]; - }; modalInstance: any | null; }; backup: { - current: { - [id: number]: boolean; - }; modalInstance: any | null; }; }; tasks: { dataset: { - current: { - [id: number]: string[]; - }; modalInstance: any | null; }; backup: { - current: { - [id: number]: boolean; - }; modalInstance: any | null; }; }; jobs: { dataset: { - current: { - [id: number]: string[]; - }; modalInstance: any | null; }; }; @@ -156,12 +142,11 @@ export interface ImportState { projects: { dataset: { modalInstance: any | null; - current: { - [id: number]: { - format: string; - progress: number; - status: string; - }; + uploadState: { + id: number | null, + format: string; + progress: number; + status: string; }; }; backup: { @@ -172,9 +157,6 @@ export interface ImportState { tasks: { dataset: { modalInstance: any | null; - current: { - [id: number]: string; - }; }; backup: { modalVisible: boolean; @@ -184,9 +166,6 @@ export interface ImportState { jobs: { dataset: { modalInstance: any | null; - current: { - [id: number]: string; - }; }; }; instanceType: 'project' | 'task' | 'job' | null; @@ -434,6 +413,12 @@ export interface ErrorState { className?: string; } +export interface NotificationState { + message: string; + description?: string; + duration?: number; +} + export interface NotificationsState { errors: { auth: { @@ -578,41 +563,46 @@ export interface NotificationsState { acceptingInvitation: null | ErrorState; decliningInvitation: null | ErrorState; resendingInvitation: null | ErrorState; + }; + requests: { + fetching: null | ErrorState; + canceling: null | ErrorState; + deleting: null | ErrorState; } }; messages: { tasks: { - loadingDone: string; - importingDone: string; - movingDone: string; + loadingDone: null | NotificationState; + importingDone: null | NotificationState; + movingDone: null | NotificationState; }; models: { - inferenceDone: string; + inferenceDone: null | NotificationState; }; auth: { - changePasswordDone: string; - registerDone: string; - requestPasswordResetDone: string; - resetPasswordDone: string; + changePasswordDone: null | NotificationState; + registerDone: null | NotificationState; + requestPasswordResetDone: null | NotificationState; + resetPasswordDone: null | NotificationState; }; projects: { - restoringDone: string; + restoringDone: null | NotificationState; }; exporting: { - dataset: string; - annotation: string; - backup: string; + dataset: null | NotificationState; + annotation: null | NotificationState; + backup: null | NotificationState; }; importing: { - dataset: string; - annotation: string; - backup: string; + dataset: null | NotificationState; + annotation: null | NotificationState; + backup: null | NotificationState; }; invitations: { - newInvitations: string; - acceptInvitationDone: string; - declineInvitationDone: string; - resendingInvitation: string; + newInvitations: null | NotificationState; + acceptInvitationDone: null | NotificationState; + declineInvitationDone: null | NotificationState; + resendingInvitation: null | NotificationState; } }; } @@ -950,6 +940,18 @@ export interface InvitationsState { query: InvitationsQuery; } +export interface RequestsQuery { + page: number; +} + +export interface RequestsState { + fetching: boolean; + initialized: boolean; + requests: Record; + urls: string[]; + query: RequestsQuery; +} + export interface CombinedState { auth: AuthState; projects: ProjectsState; @@ -971,6 +973,7 @@ export interface CombinedState { organizations: OrganizationState; invitations: InvitationsState; webhooks: WebhooksState; + requests: RequestsState; serverAPI: ServerAPIState; } diff --git a/cvat-ui/src/reducers/notifications-reducer.ts b/cvat-ui/src/reducers/notifications-reducer.ts index c4eb38d5fe5..26c2a81fe59 100644 --- a/cvat-ui/src/reducers/notifications-reducer.ts +++ b/cvat-ui/src/reducers/notifications-reducer.ts @@ -5,7 +5,7 @@ import { AnyAction } from 'redux'; -import { Project, ServerError, Task } from 'cvat-core-wrapper'; +import { ServerError, RequestError } from 'cvat-core-wrapper'; import { AuthActionTypes } from 'actions/auth-actions'; import { FormatsActionTypes } from 'actions/formats-actions'; import { ModelsActionTypes } from 'actions/models-actions'; @@ -17,17 +17,23 @@ import { NotificationsActionType } from 'actions/notification-actions'; import { BoundariesActionTypes } from 'actions/boundaries-actions'; import { UserAgreementsActionTypes } from 'actions/useragreements-actions'; import { ReviewActionTypes } from 'actions/review-actions'; -import { ExportActionTypes } from 'actions/export-actions'; -import { ImportActionTypes } from 'actions/import-actions'; import { CloudStorageActionTypes } from 'actions/cloud-storage-actions'; import { OrganizationActionsTypes } from 'actions/organization-actions'; import { JobsActionTypes } from 'actions/jobs-actions'; import { WebhooksActionsTypes } from 'actions/webhooks-actions'; import { InvitationsActionTypes } from 'actions/invitations-actions'; import { ServerAPIActionTypes } from 'actions/server-actions'; +import { RequestsActionsTypes, getInstanceType } from 'actions/requests-actions'; +import { ImportActionTypes } from 'actions/import-actions'; +import { ExportActionTypes } from 'actions/export-actions'; +import config from 'config'; import { NotificationsState } from '.'; +const shouldLog = (error: Error): boolean => ( + ![ServerError, RequestError].some((ErrorClass) => error instanceof ErrorClass) +); + const defaultState: NotificationsState = { errors: { auth: { @@ -173,40 +179,45 @@ const defaultState: NotificationsState = { decliningInvitation: null, resendingInvitation: null, }, + requests: { + fetching: null, + canceling: null, + deleting: null, + }, }, messages: { tasks: { - loadingDone: '', - importingDone: '', - movingDone: '', + loadingDone: null, + importingDone: null, + movingDone: null, }, models: { - inferenceDone: '', + inferenceDone: null, }, auth: { - changePasswordDone: '', - registerDone: '', - requestPasswordResetDone: '', - resetPasswordDone: '', + changePasswordDone: null, + registerDone: null, + requestPasswordResetDone: null, + resetPasswordDone: null, }, projects: { - restoringDone: '', + restoringDone: null, }, exporting: { - dataset: '', - annotation: '', - backup: '', + dataset: null, + annotation: null, + backup: null, }, importing: { - dataset: '', - annotation: '', - backup: '', + dataset: null, + annotation: null, + backup: null, }, invitations: { - newInvitations: '', - acceptInvitationDone: '', - declineInvitationDone: '', - resendingInvitation: '', + newInvitations: null, + acceptInvitationDone: null, + declineInvitationDone: null, + resendingInvitation: null, }, }, }; @@ -223,7 +234,7 @@ export default function (state = defaultState, action: AnyAction): Notifications authenticated: { message: 'Could not check authentication on the server', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -239,7 +250,7 @@ export default function (state = defaultState, action: AnyAction): Notifications login: { message: 'Could not login on the server', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-login-failed', }, }, @@ -256,7 +267,7 @@ export default function (state = defaultState, action: AnyAction): Notifications logout: { message: 'Could not logout from the server', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -272,7 +283,7 @@ export default function (state = defaultState, action: AnyAction): Notifications register: { message: 'Could not register on the server', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -286,8 +297,10 @@ export default function (state = defaultState, action: AnyAction): Notifications ...state.messages, auth: { ...state.messages.auth, - registerDone: `To use your account, you need to confirm the email address. \ - We have sent an email with a confirmation link to ${action.payload.user.email}.`, + registerDone: { + message: `To use your account, you need to confirm the email address. \ + We have sent an email with a confirmation link to ${action.payload.user.email}.`, + }, }, }, }; @@ -304,7 +317,9 @@ export default function (state = defaultState, action: AnyAction): Notifications ...state.messages, auth: { ...state.messages.auth, - changePasswordDone: 'New password has been saved.', + changePasswordDone: { + message: 'New password has been saved.', + }, }, }, }; @@ -319,7 +334,7 @@ export default function (state = defaultState, action: AnyAction): Notifications changePassword: { message: 'Could not change password', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-change-password-failed', }, }, @@ -333,8 +348,10 @@ export default function (state = defaultState, action: AnyAction): Notifications ...state.messages, auth: { ...state.messages.auth, - requestPasswordResetDone: `Check your email for a link to reset your password. + requestPasswordResetDone: { + message: `Check your email for a link to reset your password. If it doesn’t appear within a few minutes, check your spam folder.`, + }, }, }, }; @@ -349,7 +366,7 @@ export default function (state = defaultState, action: AnyAction): Notifications requestPasswordReset: { message: 'Could not reset password on the server.', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -362,7 +379,9 @@ export default function (state = defaultState, action: AnyAction): Notifications ...state.messages, auth: { ...state.messages.auth, - resetPasswordDone: 'Password has been reset with the new password.', + resetPasswordDone: { + message: 'Password has been reset with the new password.', + }, }, }, }; @@ -377,7 +396,7 @@ export default function (state = defaultState, action: AnyAction): Notifications resetPassword: { message: 'Could not set new password on the server.', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -393,7 +412,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: 'Could not receive server schema', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -407,7 +426,9 @@ export default function (state = defaultState, action: AnyAction): Notifications ...state.messages, invitations: { ...state.messages.invitations, - newInvitations: 'You\'ve received an invitation to join an organization! [Click here](/invitations) to get details.', + newInvitations: { + message: 'You\'ve received an invitation to join an organization! [Click here](/invitations) to get details.', + }, }, }, }; @@ -424,7 +445,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: 'Could not get invitations', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-get-invitations-failed', }, }, @@ -441,7 +462,7 @@ export default function (state = defaultState, action: AnyAction): Notifications acceptingInvitation: { message: 'Could not accept invitation', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-accept-organization-invitation-failed', }, }, @@ -458,7 +479,7 @@ export default function (state = defaultState, action: AnyAction): Notifications decliningInvitation: { message: 'Could not decline invitation', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-decline-organization-invitation-failed', }, }, @@ -475,7 +496,7 @@ export default function (state = defaultState, action: AnyAction): Notifications resendingInvitation: { message: 'Could not resend invitation', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-resend-organization-invitation-failed', }, }, @@ -489,7 +510,9 @@ export default function (state = defaultState, action: AnyAction): Notifications ...state.messages, invitations: { ...state.messages.invitations, - resendingInvitation: 'Invitation was sent successfully', + resendingInvitation: { + message: 'Invitation was sent successfully', + }, }, }, }; @@ -507,7 +530,7 @@ export default function (state = defaultState, action: AnyAction): Notifications 'Could not export dataset for the ' + `[${instanceType} ${instance.id}](/${instanceType}s/${instance.id})`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -515,19 +538,26 @@ export default function (state = defaultState, action: AnyAction): Notifications } case ExportActionTypes.EXPORT_DATASET_SUCCESS: { const { - instance, instanceType, isLocal, resource, + instance, instanceType, resource, target, } = action.payload; - const auxiliaryVerb = resource === 'Dataset' ? 'has' : 'have'; + let description = `Export ${resource} for ${instanceType} ${instance.id} is finished. `; + if (target === 'local') { + description += 'You can [download it here](/requests)'; + } else if (target === 'cloudstorage') { + description = + `Export ${resource} for ${instanceType} ${instance.id} has been uploaded to cloud storage`; + } return { ...state, messages: { ...state.messages, exporting: { ...state.messages.exporting, - dataset: - `${resource} for ${instanceType} ${instance.id} ` + - `${auxiliaryVerb} been ${(isLocal) ? 'downloaded' : 'uploaded'} ` + - `${(isLocal) ? 'locally' : 'to cloud storage'}`, + dataset: { + message: 'Export is finished', + duration: config.REQUEST_SUCCESS_NOTIFICATION_DURATION, + description, + }, }, }, }; @@ -544,39 +574,50 @@ export default function (state = defaultState, action: AnyAction): Notifications message: `Could not export the ${instanceType} №${instance.id}`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, }; } case ExportActionTypes.EXPORT_BACKUP_SUCCESS: { - const { instance, instanceType, isLocal } = action.payload; + const { + instance, instanceType, target, + } = action.payload; + let description = `Backup for the ${instanceType} ${instance.id} is finished. `; + if (target === 'local') { + description += 'You can [download it here](/requests)'; + } else if (target === 'cloudstorage') { + description = + `Backup for the ${instanceType} ${instance.id} has been uploaded to cloud storage`; + } return { ...state, messages: { ...state.messages, exporting: { ...state.messages.exporting, - backup: - `Backup for the ${instanceType} №${instance.id} ` + - `has been ${(isLocal) ? 'downloaded' : 'uploaded'} ` + - `${(isLocal) ? 'locally' : 'to cloud storage'}`, + backup: { + message: 'Backup export is finished', + duration: config.REQUEST_SUCCESS_NOTIFICATION_DURATION, + description, + }, }, }, }; } case ImportActionTypes.IMPORT_DATASET_SUCCESS: { const { instance, resource } = action.payload; - let message = resource === 'annotation' ? + let description = resource === 'annotation' ? 'Annotations have been loaded to the ' : 'Dataset was imported to the '; - if (instance instanceof Project) { - message += `[Project ${instance.id}](/projects/${instance.id})`; - } else if (instance instanceof Task) { - message += `[Task ${instance.id}](/tasks/${instance.id})`; + const instanceType = getInstanceType(instance); + if (instanceType === 'project') { + description += `[Project ${instance.id}](/projects/${instance.id})`; + } else if (instanceType === 'task') { + description += `[Task ${instance.id}](/tasks/${instance.id})`; } else { - message += `[Job ${instance.id}](/jobs/${instance.id})`; + description += `[Job ${instance.id}](/jobs/${instance.id})`; } return { @@ -585,7 +626,11 @@ export default function (state = defaultState, action: AnyAction): Notifications ...state.messages, importing: { ...state.messages.importing, - [resource]: message, + [resource]: { + message: 'Annotations import is finished', + duration: config.REQUEST_SUCCESS_NOTIFICATION_DURATION, + description, + }, }, }, }; @@ -594,7 +639,7 @@ export default function (state = defaultState, action: AnyAction): Notifications const { instance, resource } = action.payload; const message = resource === 'annotation' ? 'Could not upload annotation for the ' + - `[task ${instance.taskId || instance.id}](/tasks/${instance.taskId || instance.id})` : + `[task ${instance?.taskId || instance.id}](/tasks/${instance?.taskId || instance.id})` : `Could not import dataset to the [project ${instance.id}](/projects/${instance.id})`; return { ...state, @@ -605,7 +650,7 @@ export default function (state = defaultState, action: AnyAction): Notifications dataset: { message, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-' + `${resource === 'annotation' ? 'load-annotation' : 'import-dataset'}-failed`, }, @@ -615,15 +660,19 @@ export default function (state = defaultState, action: AnyAction): Notifications } case ImportActionTypes.IMPORT_BACKUP_SUCCESS: { const { instanceId, instanceType } = action.payload; + const description = `The ${instanceType} has been restored successfully. + Click [here](/${instanceType}s/${instanceId}) to open`; return { ...state, messages: { ...state.messages, importing: { ...state.messages.importing, - backup: - `The ${instanceType} has been restored successfully. - Click [here](/${instanceType}s/${instanceId}) to open`, + backup: { + message: 'Import backup is finished', + duration: config.REQUEST_SUCCESS_NOTIFICATION_DURATION, + description, + }, }, }, }; @@ -640,7 +689,7 @@ export default function (state = defaultState, action: AnyAction): Notifications message: `Could not restore ${instanceType} backup.`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -656,7 +705,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: 'Could not fetch tasks', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -673,7 +722,7 @@ export default function (state = defaultState, action: AnyAction): Notifications deleting: { message: `Could not delete the [task ${taskID}](/tasks/${taskID})`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-delete-task-failed', }, }, @@ -690,7 +739,7 @@ export default function (state = defaultState, action: AnyAction): Notifications creating: { message: 'Could not create the task', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-create-task-failed', }, }, @@ -707,7 +756,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: 'Could not fetch projects', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -723,7 +772,7 @@ export default function (state = defaultState, action: AnyAction): Notifications creating: { message: 'Could not create the project', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-create-project-failed', }, }, @@ -741,7 +790,7 @@ export default function (state = defaultState, action: AnyAction): Notifications updating: { message: `Could not delete [project ${projectId}](/project/${projectId})`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-delete-project-failed', }, }, @@ -758,7 +807,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: 'Could not get formats from the server', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -774,7 +823,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: 'Could not get info about the server', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -789,8 +838,10 @@ export default function (state = defaultState, action: AnyAction): Notifications ...state.messages, models: { ...state.messages.models, - inferenceDone: 'Automatic annotation accomplished for the ' + + inferenceDone: { + message: 'Automatic annotation accomplished for the ' + `[task ${taskID}](/tasks/${taskID})`, + }, }, }, }; @@ -814,7 +865,7 @@ export default function (state = defaultState, action: AnyAction): Notifications metaFetching: { message: 'Could not fetch models meta information', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -831,7 +882,7 @@ export default function (state = defaultState, action: AnyAction): Notifications inferenceStatusFetching: { message: `Fetching inference status for the [task ${taskID}](/tasks/${taskID})`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -847,7 +898,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: 'Could not get models from the server', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -864,7 +915,7 @@ export default function (state = defaultState, action: AnyAction): Notifications starting: { message: `Could not infer model for the [task ${taskID}](/tasks/${taskID})`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -881,7 +932,7 @@ export default function (state = defaultState, action: AnyAction): Notifications canceling: { message: `Could not cancel model inference for the [task ${taskID}](/tasks/${taskID})`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -897,7 +948,7 @@ export default function (state = defaultState, action: AnyAction): Notifications jobFetching: { message: 'Error during fetching a job', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-fetch-job-failed', }, }, @@ -914,7 +965,7 @@ export default function (state = defaultState, action: AnyAction): Notifications frameFetching: { message: `Could not receive frame ${action.payload.number}`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -930,7 +981,7 @@ export default function (state = defaultState, action: AnyAction): Notifications saving: { message: 'Could not save annotations', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-save-annotations-failed', }, }, @@ -964,7 +1015,7 @@ export default function (state = defaultState, action: AnyAction): Notifications updating: { message: 'Could not update annotations', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-update-annotations-failed', }, }, @@ -981,7 +1032,7 @@ export default function (state = defaultState, action: AnyAction): Notifications creating: { message: 'Could not create annotations', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -997,7 +1048,7 @@ export default function (state = defaultState, action: AnyAction): Notifications merging: { message: 'Could not merge annotations', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1013,7 +1064,7 @@ export default function (state = defaultState, action: AnyAction): Notifications grouping: { message: 'Could not group annotations', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1029,7 +1080,7 @@ export default function (state = defaultState, action: AnyAction): Notifications joining: { message: 'Could not join annotations', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1045,7 +1096,7 @@ export default function (state = defaultState, action: AnyAction): Notifications slicing: { message: 'Could not slice the object', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1060,7 +1111,7 @@ export default function (state = defaultState, action: AnyAction): Notifications splitting: { message: 'Could not split the track', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1076,7 +1127,7 @@ export default function (state = defaultState, action: AnyAction): Notifications removing: { message: 'Could not remove the object', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-remove-object-failed', }, }, @@ -1093,7 +1144,7 @@ export default function (state = defaultState, action: AnyAction): Notifications propagating: { message: 'Could not propagate the object', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1109,7 +1160,7 @@ export default function (state = defaultState, action: AnyAction): Notifications collectingStatistics: { message: 'Could not collect annotations statistics', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1149,7 +1200,7 @@ export default function (state = defaultState, action: AnyAction): Notifications removeAnnotations: { message: 'Could not remove annotations', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1165,7 +1216,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetchingAnnotations: { message: 'Could not fetch annotations', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1181,7 +1232,7 @@ export default function (state = defaultState, action: AnyAction): Notifications redo: { message: 'Could not redo', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1197,7 +1248,7 @@ export default function (state = defaultState, action: AnyAction): Notifications undo: { message: 'Could not undo', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1213,7 +1264,7 @@ export default function (state = defaultState, action: AnyAction): Notifications search: { message: 'Could not execute search annotations', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1229,7 +1280,7 @@ export default function (state = defaultState, action: AnyAction): Notifications savingLogs: { message: 'Could not send logs to the server', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1245,7 +1296,7 @@ export default function (state = defaultState, action: AnyAction): Notifications resetError: { message: 'Could not reset the state', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1261,7 +1312,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: 'Could not get user agreements from the server', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1277,7 +1328,7 @@ export default function (state = defaultState, action: AnyAction): Notifications finishingIssue: { message: 'Could not open a new issue', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1293,7 +1344,7 @@ export default function (state = defaultState, action: AnyAction): Notifications resolvingIssue: { message: 'Could not resolve the issue', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1309,7 +1360,55 @@ export default function (state = defaultState, action: AnyAction): Notifications reopeningIssue: { message: 'Could not reopen the issue', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), + }, + }, + }, + }; + } + case RequestsActionsTypes.GET_REQUESTS_FAILED: { + return { + ...state, + errors: { + ...state.errors, + requests: { + ...state.errors.requests, + fetching: { + message: 'Could not fetch requests from the server', + reason: action.payload.error, + shouldLog: shouldLog(action.payload.error), + }, + }, + }, + }; + } + case RequestsActionsTypes.CANCEL_REQUEST_FAILED: { + return { + ...state, + errors: { + ...state.errors, + requests: { + ...state.errors.requests, + canceling: { + message: 'Could not cancel the request', + reason: action.payload.error, + shouldLog: shouldLog(action.payload.error), + }, + }, + }, + }; + } + case RequestsActionsTypes.DELETE_REQUEST_FAILED: { + return { + ...state, + errors: { + ...state.errors, + requests: { + ...state.errors.requests, + deleting: { + message: 'Could not delete the request', + reason: action.payload.error, + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1325,7 +1424,7 @@ export default function (state = defaultState, action: AnyAction): Notifications commentingIssue: { message: 'Could not comment the issue', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1341,7 +1440,7 @@ export default function (state = defaultState, action: AnyAction): Notifications submittingReview: { message: `Could not submit review for the job ${action.payload.jobId}`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1357,7 +1456,7 @@ export default function (state = defaultState, action: AnyAction): Notifications deletingIssue: { message: 'Could not remove issue from the server', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1389,7 +1488,7 @@ export default function (state = defaultState, action: AnyAction): Notifications jobFetching: { message: 'Could not receive image data', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-fetch-frame-data-from-the-server-failed', }, }, @@ -1423,7 +1522,7 @@ export default function (state = defaultState, action: AnyAction): Notifications deleteFrame: { message: 'Could not delete frame', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1439,7 +1538,7 @@ export default function (state = defaultState, action: AnyAction): Notifications restoreFrame: { message: 'Could not restore frame', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1455,7 +1554,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: 'Could not fetch cloud storage', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), }, }, }, @@ -1471,7 +1570,7 @@ export default function (state = defaultState, action: AnyAction): Notifications creating: { message: 'Could not create the cloud storage', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-create-cloud-storage-failed', }, }, @@ -1507,7 +1606,7 @@ export default function (state = defaultState, action: AnyAction): Notifications message: `Could not delete cloud storage ${cloudStorageID}`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-delete-cloud-storage-failed', }, }, @@ -1525,7 +1624,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: `Could not fetch content for cloud storage #${cloudStorageID}`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-fetch-cloud-storage-content-failed', }, }, @@ -1543,7 +1642,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: `Could not fetch cloud storage #${cloudStorageID} status`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-fetch-cloud-storage-status-failed', }, }, @@ -1562,7 +1661,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: `Could not fetch preview for cloud storage #${cloudStorageID}`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-fetch-cloud-storage-preview-failed', }, }, @@ -1579,7 +1678,7 @@ export default function (state = defaultState, action: AnyAction): Notifications creating: { message: `Could not create organization ${action.payload.slug}`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-create-organization-failed', }, }, @@ -1597,7 +1696,7 @@ export default function (state = defaultState, action: AnyAction): Notifications updating: { message: `Could not update organization "${slug}"`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-update-organization-failed', }, }, @@ -1614,7 +1713,7 @@ export default function (state = defaultState, action: AnyAction): Notifications activation: { message: `Could not activate organization ${action.payload.slug || ''}`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-activate-organization-failed', }, }, @@ -1631,7 +1730,7 @@ export default function (state = defaultState, action: AnyAction): Notifications deleting: { message: `Could not remove organization ${action.payload.slug}`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-remove-organization-failed', }, }, @@ -1648,7 +1747,7 @@ export default function (state = defaultState, action: AnyAction): Notifications inviting: { message: 'Could not invite organization members', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-invite-organization-members-failed', }, }, @@ -1665,7 +1764,7 @@ export default function (state = defaultState, action: AnyAction): Notifications inviting: { message: `Could not invite this member "${action.payload.email}" to the organization`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-invite-organization-member-failed', }, }, @@ -1682,7 +1781,7 @@ export default function (state = defaultState, action: AnyAction): Notifications leaving: { message: 'Could not leave the organization', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-leave-organization-failed', }, }, @@ -1699,7 +1798,7 @@ export default function (state = defaultState, action: AnyAction): Notifications removingMembership: { message: `Could not remove member "${action.payload.username}" from the organization`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-remove-organization-member-failed', }, }, @@ -1717,7 +1816,7 @@ export default function (state = defaultState, action: AnyAction): Notifications updatingMembership: { message: `Could not assign role "${role}" to the user "${username}"`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-update-organization-membership-failed', }, }, @@ -1734,7 +1833,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: 'Could not fetch a list of jobs', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-get-jobs-failed', }, }, @@ -1751,7 +1850,7 @@ export default function (state = defaultState, action: AnyAction): Notifications creating: { message: 'Could not create job', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-create-job-failed', }, }, @@ -1785,7 +1884,7 @@ export default function (state = defaultState, action: AnyAction): Notifications deleting: { message: `Could not delete the job #${jobID}`, reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-delete-job-failed', }, }, @@ -1802,7 +1901,7 @@ export default function (state = defaultState, action: AnyAction): Notifications fetching: { message: 'Could not fetch a list of webhooks', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-get-webhooks-failed', }, }, @@ -1819,7 +1918,7 @@ export default function (state = defaultState, action: AnyAction): Notifications creating: { message: 'Could not create webhook', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-create-webhook-failed', }, }, @@ -1836,7 +1935,7 @@ export default function (state = defaultState, action: AnyAction): Notifications updating: { message: 'Could not update webhook', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-update-webhook-failed', }, }, @@ -1853,7 +1952,7 @@ export default function (state = defaultState, action: AnyAction): Notifications deleting: { message: 'Could not delete webhook', reason: action.payload.error, - shouldLog: !(action.payload.error instanceof ServerError), + shouldLog: shouldLog(action.payload.error), className: 'cvat-notification-notice-delete-webhook-failed', }, }, diff --git a/cvat-ui/src/reducers/requests-reducer.ts b/cvat-ui/src/reducers/requests-reducer.ts new file mode 100644 index 00000000000..fbf205b5f5d --- /dev/null +++ b/cvat-ui/src/reducers/requests-reducer.ts @@ -0,0 +1,70 @@ +// Copyright (C) 2024 CVAT.ai Corporation +// +// SPDX-License-Identifier: MIT + +import { BoundariesActions, BoundariesActionTypes } from 'actions/boundaries-actions'; +import { RequestsActionsTypes, RequestsActions } from 'actions/requests-actions'; +import { AuthActionTypes, AuthActions } from 'actions/auth-actions'; +import { RequestsState } from '.'; + +const defaultState: RequestsState = { + initialized: false, + fetching: false, + requests: {}, + urls: [], + query: { + page: 1, + }, +}; + +export default function ( + state = defaultState, + action: RequestsActions | AuthActions | BoundariesActions, +): RequestsState { + switch (action.type) { + case RequestsActionsTypes.GET_REQUESTS: { + const { fetching } = action.payload; + return { + ...state, + fetching, + query: { + ...state.query, + ...action.payload.query, + }, + }; + } + case RequestsActionsTypes.GET_REQUESTS_SUCCESS: { + return { + ...state, + requests: Object.fromEntries(action.payload.requests.map((r) => [r.id, r])), + initialized: true, + fetching: false, + }; + } + case RequestsActionsTypes.GET_REQUESTS_FAILED: { + return { + ...state, + initialized: true, + fetching: false, + }; + } + case RequestsActionsTypes.GET_REQUEST_STATUS_SUCCESS: { + const { requests } = state; + + return { + ...state, + requests: { + ...requests, + [action.payload.request.id]: action.payload.request, + }, + }; + } + case BoundariesActionTypes.RESET_AFTER_ERROR: + case AuthActionTypes.LOGOUT_SUCCESS: { + return { ...defaultState }; + } + default: { + return state; + } + } +} diff --git a/cvat-ui/src/reducers/root-reducer.ts b/cvat-ui/src/reducers/root-reducer.ts index a766325c789..91c300c9d14 100644 --- a/cvat-ui/src/reducers/root-reducer.ts +++ b/cvat-ui/src/reducers/root-reducer.ts @@ -1,5 +1,5 @@ // Copyright (C) 2020-2022 Intel Corporation -// Copyright (C) 2022-2023 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT @@ -24,6 +24,7 @@ import cloudStoragesReducer from './cloud-storages-reducer'; import organizationsReducer from './organizations-reducer'; import webhooksReducer from './webhooks-reducer'; import invitationsReducer from './invitations-reducer'; +import requestsReducer from './requests-reducer'; import serverAPIReducer from './server-api-reducer'; export default function createRootReducer(): Reducer { @@ -48,6 +49,7 @@ export default function createRootReducer(): Reducer { organizations: organizationsReducer, webhooks: webhooksReducer, invitations: invitationsReducer, + requests: requestsReducer, serverAPI: serverAPIReducer, }); } diff --git a/cvat/apps/dataset_manager/bindings.py b/cvat/apps/dataset_manager/bindings.py index d2316195151..d94d6fd39e3 100644 --- a/cvat/apps/dataset_manager/bindings.py +++ b/cvat/apps/dataset_manager/bindings.py @@ -35,6 +35,7 @@ from cvat.apps.engine.models import (AttributeSpec, AttributeType, Data, DimensionType, Job, JobType, Label, LabelType, Project, SegmentType, ShapeType, Task) +from cvat.apps.engine.rq_job_handler import RQJobMetaField from .annotation import AnnotationIR, AnnotationManager, TrackManager from .formats.transformations import MaskConverter, EllipsesToMasks @@ -2241,8 +2242,8 @@ def load_dataset_data(project_annotation, dataset: dm.Dataset, project_data): raise CvatImportError(f'Target project does not have label with name "{label.name}"') for subset_id, subset in enumerate(dataset.subsets().values()): job = rq.get_current_job() - job.meta['status'] = 'Task from dataset is being created...' - job.meta['progress'] = (subset_id + job.meta.get('task_progress', 0.)) / len(dataset.subsets().keys()) + job.meta[RQJobMetaField.STATUS] = 'Task from dataset is being created...' + job.meta[RQJobMetaField.PROGRESS] = (subset_id + job.meta.get(RQJobMetaField.TASK_PROGRESS, 0.)) / len(dataset.subsets().keys()) job.save_meta() task_fields = { diff --git a/cvat/apps/dataset_manager/default_settings.py b/cvat/apps/dataset_manager/default_settings.py index 1499bd2857c..a4dd53b0f52 100644 --- a/cvat/apps/dataset_manager/default_settings.py +++ b/cvat/apps/dataset_manager/default_settings.py @@ -4,7 +4,7 @@ import os -DATASET_CACHE_TTL = int(os.getenv("CVAT_DATASET_CACHE_TTL", 60 * 60 * 10)) +DATASET_CACHE_TTL = int(os.getenv("CVAT_DATASET_CACHE_TTL", 60 * 60 * 24)) "Base lifetime for cached exported datasets, in seconds" DATASET_CACHE_LOCK_TIMEOUT = int(os.getenv("CVAT_DATASET_CACHE_LOCK_TIMEOUT", 10)) diff --git a/cvat/apps/dataset_manager/project.py b/cvat/apps/dataset_manager/project.py index 35a283f53d5..7579f241043 100644 --- a/cvat/apps/dataset_manager/project.py +++ b/cvat/apps/dataset_manager/project.py @@ -16,6 +16,7 @@ from cvat.apps.engine.log import DatasetLogManager from cvat.apps.engine.serializers import DataSerializer, TaskWriteSerializer from cvat.apps.engine.task import _create_thread as create_task +from cvat.apps.engine.rq_job_handler import RQJobMetaField from cvat.apps.dataset_manager.task import TaskAnnotation from .annotation import AnnotationIR @@ -179,8 +180,8 @@ def data(self) -> dict: @transaction.atomic def import_dataset_as_project(src_file, project_id, format_name, conv_mask_to_poly): rq_job = rq.get_current_job() - rq_job.meta['status'] = 'Dataset import has been started...' - rq_job.meta['progress'] = 0. + rq_job.meta[RQJobMetaField.STATUS] = 'Dataset import has been started...' + rq_job.meta[RQJobMetaField.PROGRESS] = 0. rq_job.save_meta() project = ProjectAnnotationAndData(project_id) diff --git a/cvat/apps/dataset_manager/views.py b/cvat/apps/dataset_manager/views.py index 53cbdd5c03b..cb9e4f0f674 100644 --- a/cvat/apps/dataset_manager/views.py +++ b/cvat/apps/dataset_manager/views.py @@ -39,7 +39,7 @@ def log_exception(logger=None, exc_info=True): exc_info=exc_info) DEFAULT_CACHE_TTL = timedelta(seconds=settings.DATASET_CACHE_TTL) -PROJECT_CACHE_TTL = DEFAULT_CACHE_TTL / 3 +PROJECT_CACHE_TTL = DEFAULT_CACHE_TTL TASK_CACHE_TTL = DEFAULT_CACHE_TTL JOB_CACHE_TTL = DEFAULT_CACHE_TTL TTL_CONSTS = { diff --git a/cvat/apps/engine/background_operations.py b/cvat/apps/engine/background_operations.py new file mode 100644 index 00000000000..b23eafa88a5 --- /dev/null +++ b/cvat/apps/engine/background_operations.py @@ -0,0 +1,680 @@ +# Copyright (C) 2024 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +import os +import os.path as osp +import cvat.apps.dataset_manager as dm + +from abc import ABC, abstractmethod +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Callable, Dict, Optional, Union + +import django_rq +from attrs.converters import to_bool +from django.conf import settings +from django.http.request import HttpRequest +from django.utils import timezone +from django_rq.queues import DjangoRQ +from rest_framework import serializers, status +from rest_framework.response import Response +from rest_framework.reverse import reverse +from rq.job import Job as RQJob +from rq.job import JobStatus as RQJobStatus + +from cvat.apps.engine import models +from cvat.apps.engine.backup import ProjectExporter, TaskExporter, create_backup +from cvat.apps.engine.cloud_provider import export_resource_to_cloud_storage +from cvat.apps.engine.location import StorageType, get_location_configuration +from cvat.apps.engine.log import ServerLogManager +from cvat.apps.engine.models import Location, Project, Task +from cvat.apps.engine.permissions import get_cloud_storage_for_import_or_export +from cvat.apps.engine.rq_job_handler import RQIdManager +from cvat.apps.engine.serializers import RqIdSerializer +from cvat.apps.engine.utils import ( + build_annotations_file_name, + build_backup_file_name, + define_dependent_job, + get_rq_job_meta, + get_rq_lock_by_user, + sendfile, +) +from cvat.apps.events.handlers import handle_dataset_export + +slogger = ServerLogManager(__name__) + + +class _ResourceExportManager(ABC): + QUEUE_NAME = settings.CVAT_QUEUES.EXPORT_DATA.value + + def __init__( + self, + version: int, + db_instance: Union[models.Project, models.Task, models.Job], + *, + export_callback: Callable, + ) -> None: + """ + Args: + version (int): API endpoint version to use. Possible options: 1 or 2 + db_instance (Union[models.Project, models.Task, models.Job]): Model instance + export_callback (Callable): Main function that will be executed in the background + """ + self.version = version + self.db_instance = db_instance + self.resource = db_instance.__class__.__name__.lower() + if self.resource not in self.SUPPORTED_RESOURCES: + raise ValueError( + "Unexpected type of db_instance: {}".format(type(db_instance)) + ) + + self.export_callback = export_callback + + @abstractmethod + def export(self) -> Response: + pass + + @abstractmethod + def setup_background_job(self, queue: DjangoRQ, rq_id: str) -> None: + pass + + @abstractmethod + def _handle_rq_job_v1(self, rq_job: RQJob, queue: DjangoRQ) -> Optional[Response]: + pass + + def _handle_rq_job_v2(self, rq_job: RQJob, *args, **kwargs) -> Optional[Response]: + rq_job_status = rq_job.get_status(refresh=False) + if rq_job_status in { + RQJobStatus.FINISHED, + RQJobStatus.FAILED, + RQJobStatus.CANCELED, + RQJobStatus.STOPPED, + }: + rq_job.delete() + return None + + return Response( + data=f"Export process is already {'started' if rq_job_status == RQJobStatus.STARTED else 'queued'}", + status=status.HTTP_409_CONFLICT, + ) + + def handle_rq_job(self, *args, **kwargs) -> Optional[Response]: + if self.version == 1: + return self._handle_rq_job_v1(*args, **kwargs) + elif self.version == 2: + return self._handle_rq_job_v2(*args, **kwargs) + + raise ValueError("Unsupported version") + + @abstractmethod + def get_v1_endpoint_view_name(self) -> str: + pass + + def make_result_url(self) -> str: + view_name = self.get_v1_endpoint_view_name() + result_url = reverse( + view_name, args=[self.db_instance.pk], request=self.request + ) + query_dict = self.request.query_params.copy() + query_dict["action"] = "download" + result_url += "?" + query_dict.urlencode() + + return result_url + + def get_instance_update_time(self) -> datetime: + instance_update_time = timezone.localtime(self.db_instance.updated_date) + if isinstance(self.db_instance, Project): + tasks_update = list( + map( + lambda db_task: timezone.localtime(db_task.updated_date), + self.db_instance.tasks.all(), + ) + ) + instance_update_time = max(tasks_update + [instance_update_time]) + return instance_update_time + + def get_timestamp(self, time_: datetime) -> str: + return datetime.strftime(time_, "%Y_%m_%d_%H_%M_%S") + +class DatasetExportManager(_ResourceExportManager): + SUPPORTED_RESOURCES = {"project", "task", "job"} + + @dataclass + class ExportArgs: + format: str + filename: str + save_images: bool + location_config: Dict[str, Any] + + @property + def location(self) -> Location: + return self.location_config["location"] + + def __init__( + self, + db_instance: Union[models.Project, models.Task, models.Job], + request: HttpRequest, + export_callback: Callable, + save_images: Optional[bool] = None, + *, + version: int = 2, + ) -> None: + super().__init__(version, db_instance, export_callback=export_callback) + self.request = request + + format_name = request.query_params.get("format", "") + filename = request.query_params.get("filename", "") + # can be passed directly when it is initialized based on API request, not query param + save_images = ( + save_images + if save_images is not None + else to_bool(request.query_params.get("save_images", False)) + ) + + try: + location_config = get_location_configuration( + db_instance=db_instance, + query_params=request.query_params, + field_name=StorageType.TARGET, + ) + except ValueError as ex: + raise serializers.ValidationError(str(ex)) from ex + + location = location_config["location"] + + if location not in Location.list(): + raise serializers.ValidationError( + f"Unexpected location {location} specified for the request" + ) + + self.export_args = self.ExportArgs( + format=format_name, + filename=filename, + save_images=save_images, + location_config=location_config, + ) + + def _handle_rq_job_v1( + self, + rq_job: RQJob, + queue: DjangoRQ, + ) -> Optional[Response]: + action = self.request.query_params.get("action") + if action not in {None, "download"}: + raise serializers.ValidationError( + "Unexpected action specified for the request" + ) + + request_time = rq_job.meta.get("request", {}).get("timestamp") + instance_update_time = self.get_instance_update_time() + if request_time is None or request_time < instance_update_time: + # The result is outdated, need to restart the export. + # Cancel the current job. + # The new attempt will be made after the last existing job. + # In the case the server is configured with ONE_RUNNING_JOB_IN_QUEUE_PER_USER + # we have to enqueue dependent jobs after canceling one. + rq_job.cancel(enqueue_dependents=settings.ONE_RUNNING_JOB_IN_QUEUE_PER_USER) + rq_job.delete() + return None + + instance_timestamp = self.get_timestamp(instance_update_time) + + REQUEST_TIMEOUT = 60 + + if action == "download": + if self.export_args.location != Location.LOCAL: + return Response( + 'Action "download" is only supported for a local export location', + status=status.HTTP_400_BAD_REQUEST, + ) + + if not rq_job.is_finished: + return Response( + "Export has not finished", status=status.HTTP_400_BAD_REQUEST + ) + + file_path = rq_job.return_value() + + if not file_path: + return Response( + "A result for exporting job was not found for finished RQ job", + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + with dm.util.get_export_cache_lock(file_path, ttl=REQUEST_TIMEOUT): + if not osp.exists(file_path): + return Response( + "The exported file has expired, please retry exporting", + status=status.HTTP_404_NOT_FOUND, + ) + + filename = self.export_args.filename or build_annotations_file_name( + class_name=self.resource, + identifier=( + self.db_instance.name + if isinstance(self.db_instance, (Task, Project)) + else self.db_instance.id + ), + timestamp=instance_timestamp, + format_name=self.export_args.format, + is_annotation_file=not self.export_args.save_images, + extension=osp.splitext(file_path)[1], + ) + + rq_job.delete() + return sendfile( + self.request, + file_path, + attachment=True, + attachment_filename=filename, + ) + + if rq_job.is_finished: + if self.export_args.location == Location.CLOUD_STORAGE: + rq_job.delete() + return Response(status=status.HTTP_200_OK) + + elif self.export_args.location == Location.LOCAL: + file_path = rq_job.return_value() + + if not file_path: + return Response( + "A result for exporting job was not found for finished RQ job", + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + with dm.util.get_export_cache_lock(file_path, ttl=REQUEST_TIMEOUT): + if osp.exists(file_path): + # Update last update time to prolong the export lifetime + # as the last access time is not available on every filesystem + os.utime(file_path, None) + + return Response(status=status.HTTP_201_CREATED) + else: + # Cancel and reenqueue the job. + # The new attempt will be made after the last existing job. + # In the case the server is configured with ONE_RUNNING_JOB_IN_QUEUE_PER_USER + # we have to enqueue dependent jobs after canceling one. + rq_job.cancel( + enqueue_dependents=settings.ONE_RUNNING_JOB_IN_QUEUE_PER_USER + ) + rq_job.delete() + else: + raise NotImplementedError( + f"Export to {self.export_args.location} location is not implemented yet" + ) + elif rq_job.is_failed: + exc_info = rq_job.meta.get("formatted_exception", str(rq_job.exc_info)) + rq_job.delete() + return Response(exc_info, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + elif ( + rq_job.is_deferred + and rq_job.id not in queue.deferred_job_registry.get_job_ids() + ): + # Sometimes jobs can depend on outdated jobs in the deferred jobs registry. + # They can be fetched by their specific ids, but are not listed by get_job_ids(). + # Supposedly, this can happen because of the server restarts + # (potentially, because the redis used for the queue is in memory). + # Another potential reason is canceling without enqueueing dependents. + # Such dependencies are never removed or finished, + # as there is no TTL for deferred jobs, + # so the current job can be blocked indefinitely. + + # Cancel the current job and then reenqueue it, considering the current situation. + # The new attempt will be made after the last existing job. + # In the case the server is configured with ONE_RUNNING_JOB_IN_QUEUE_PER_USER + # we have to enqueue dependent jobs after canceling one. + rq_job.cancel(enqueue_dependents=settings.ONE_RUNNING_JOB_IN_QUEUE_PER_USER) + rq_job.delete() + else: + return Response(status=status.HTTP_202_ACCEPTED) + + def export(self) -> Response: + format_desc = {f.DISPLAY_NAME: f for f in dm.views.get_export_formats()}.get( + self.export_args.format + ) + if format_desc is None: + raise serializers.ValidationError( + "Unknown format specified for the request" + ) + elif not format_desc.ENABLED: + return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) + + queue: DjangoRQ = django_rq.get_queue(self.QUEUE_NAME) + rq_id = RQIdManager.build( + "export", + self.resource, + self.db_instance.pk, + subresource="dataset" if self.export_args.save_images else "annotations", + anno_format=self.export_args.format, + user_id=self.request.user.id, + ) + + rq_job = queue.fetch_job(rq_id) + + if rq_job: + response = self.handle_rq_job(rq_job, queue) + if response: + return response + + self.setup_background_job(queue, rq_id) + + handle_dataset_export( + self.db_instance, + format_name=self.export_args.format, + cloud_storage_id=self.export_args.location_config.get("storage_id"), + save_images=self.export_args.save_images, + ) + + serializer = RqIdSerializer(data={"rq_id": rq_id}) + serializer.is_valid(raise_exception=True) + + return Response(serializer.data, status=status.HTTP_202_ACCEPTED) + + def setup_background_job( + self, + queue: DjangoRQ, + rq_id: str, + ) -> None: + try: + if self.request.scheme: + server_address = self.request.scheme + "://" + server_address += self.request.get_host() + except Exception: + server_address = None + + cache_ttl = dm.views.get_export_cache_ttl(self.db_instance) + + user_id = self.request.user.id + + func = self.export_callback + func_args = (self.db_instance.id, self.export_args.format, server_address) + result_url = None + + if self.export_args.location == Location.CLOUD_STORAGE: + try: + storage_id = self.export_args.location_config["storage_id"] + except KeyError: + raise serializers.ValidationError( + "Cloud storage location was selected as the destination," + " but cloud storage id was not specified" + ) + + db_storage = get_cloud_storage_for_import_or_export( + storage_id=storage_id, + request=self.request, + is_default=self.export_args.location_config["is_default"], + ) + instance_update_time = self.get_instance_update_time() + instance_timestamp = self.get_timestamp(instance_update_time) + filename_pattern = build_annotations_file_name( + class_name=self.db_instance.__class__.__name__, + identifier=( + self.db_instance.name + if isinstance(self.db_instance, (models.Task, models.Project)) + else self.db_instance.id + ), + timestamp=instance_timestamp, + format_name=self.export_args.format, + is_annotation_file=not self.export_args.save_images, + ) + func = export_resource_to_cloud_storage + func_args = ( + db_storage, + self.export_args.filename, + filename_pattern, + self.export_callback, + ) + func_args + else: + db_storage = None + result_url = self.make_result_url() + + with get_rq_lock_by_user(queue, user_id): + queue.enqueue_call( + func=func, + args=func_args, + job_id=rq_id, + meta=get_rq_job_meta( + request=self.request, db_obj=self.db_instance, result_url=result_url + ), + depends_on=define_dependent_job(queue, user_id, rq_id=rq_id), + result_ttl=cache_ttl.total_seconds(), + failure_ttl=cache_ttl.total_seconds(), + ) + + def get_v1_endpoint_view_name(self) -> str: + """ + Get view name of the endpoint for the first API version + + Possible view names: + - project-dataset + - task|job-dataset-export + - project|task|job-annotations + """ + if self.export_args.save_images: + template = "{}-dataset" + ("-export" if self.resource != "project" else "") + else: + template = "{}-annotations" + + return template.format(self.resource) + + +class BackupExportManager(_ResourceExportManager): + SUPPORTED_RESOURCES = {"project", "task"} + + @dataclass + class ExportArgs: + filename: str + location_config: Dict[str, Any] + + @property + def location(self) -> Location: + return self.location_config["location"] + + def __init__( + self, + db_instance: Union[models.Project, models.Task], + request: HttpRequest, + *, + version: int = 2, + ) -> None: + super().__init__(version, db_instance, export_callback=create_backup) + self.request = request + + filename = request.query_params.get("filename", "") + location_config = get_location_configuration( + db_instance=self.db_instance, + query_params=self.request.query_params, + field_name=StorageType.TARGET, + ) + self.export_args = self.ExportArgs(filename, location_config) + + def _handle_rq_job_v1( + self, + rq_job: RQJob, + queue: DjangoRQ, + ) -> Optional[Response]: + last_instance_update_time = timezone.localtime(self.db_instance.updated_date) + timestamp = self.get_timestamp(last_instance_update_time) + + action = self.request.query_params.get("action") + + if action not in (None, "download"): + raise serializers.ValidationError( + "Unexpected action specified for the request" + ) + + if action == "download": + if self.export_args.location != Location.LOCAL: + return Response( + 'Action "download" is only supported for a local backup location', + status=status.HTTP_400_BAD_REQUEST, + ) + + if not rq_job.is_finished: + return Response( + "Backup has not finished", status=status.HTTP_400_BAD_REQUEST + ) + + file_path = rq_job.return_value() + + if not file_path: + return Response( + "A result for exporting job was not found for finished RQ job", + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + elif not os.path.exists(file_path): + return Response( + "The result file does not exist in export cache", + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + filename = self.export_args.filename or build_backup_file_name( + class_name=self.resource, + identifier=self.db_instance.name, + timestamp=timestamp, + extension=os.path.splitext(file_path)[1], + ) + + rq_job.delete() + return sendfile( + self.request, file_path, attachment=True, attachment_filename=filename + ) + + if rq_job.is_finished: + if self.export_args.location == Location.LOCAL: + return Response(status=status.HTTP_201_CREATED) + + elif self.export_args.location == Location.CLOUD_STORAGE: + rq_job.delete() + return Response(status=status.HTTP_200_OK) + else: + raise NotImplementedError() + elif rq_job.is_failed: + exc_info = rq_job.meta.get("formatted_exception", str(rq_job.exc_info)) + rq_job.delete() + return Response(exc_info, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + elif ( + rq_job.is_deferred + and rq_job.id not in queue.deferred_job_registry.get_job_ids() + ): + # Sometimes jobs can depend on outdated jobs in the deferred jobs registry. + # They can be fetched by their specific ids, but are not listed by get_job_ids(). + # Supposedly, this can happen because of the server restarts + # (potentially, because the redis used for the queue is in memory). + # Another potential reason is canceling without enqueueing dependents. + # Such dependencies are never removed or finished, + # as there is no TTL for deferred jobs, + # so the current job can be blocked indefinitely. + + # Cancel the current job and then reenqueue it, considering the current situation. + # The new attempt will be made after the last existing job. + # In the case the server is configured with ONE_RUNNING_JOB_IN_QUEUE_PER_USER + # we have to enqueue dependent jobs after canceling one. + rq_job.cancel(enqueue_dependents=settings.ONE_RUNNING_JOB_IN_QUEUE_PER_USER) + rq_job.delete() + else: + return Response(status=status.HTTP_202_ACCEPTED) + + def export(self) -> Response: + queue: DjangoRQ = django_rq.get_queue(self.QUEUE_NAME) + rq_id = RQIdManager.build( + "export", + self.resource, + self.db_instance.pk, + subresource="backup", + user_id=self.request.user.id, + ) + rq_job = queue.fetch_job(rq_id) + + if rq_job: + response = self.handle_rq_job(rq_job, queue) + if response: + return response + + self.setup_background_job(queue, rq_id) + serializer = RqIdSerializer(data={"rq_id": rq_id}) + serializer.is_valid(raise_exception=True) + + return Response(serializer.data, status=status.HTTP_202_ACCEPTED) + + def setup_background_job( + self, + queue: DjangoRQ, + rq_id: str, + ) -> None: + if isinstance(self.db_instance, Task): + logger = slogger.task[self.db_instance.pk] + Exporter = TaskExporter + cache_ttl = dm.views.TASK_CACHE_TTL + else: + logger = slogger.project[self.db_instance.pk] + Exporter = ProjectExporter + cache_ttl = dm.views.PROJECT_CACHE_TTL + + func = self.export_callback + func_args = ( + self.db_instance, + Exporter, + "{}_backup.zip".format(self.resource), + logger, + cache_ttl, + ) + result_url = None + + if self.export_args.location == Location.CLOUD_STORAGE: + try: + storage_id = self.export_args.location_config["storage_id"] + except KeyError: + raise serializers.ValidationError( + "Cloud storage location was selected as the destination," + " but cloud storage id was not specified" + ) + + db_storage = get_cloud_storage_for_import_or_export( + storage_id=storage_id, + request=self.request, + is_default=self.export_args.location_config["is_default"], + ) + + last_instance_update_time = timezone.localtime( + self.db_instance.updated_date + ) + timestamp = self.get_timestamp(last_instance_update_time) + + filename_pattern = build_backup_file_name( + class_name=self.resource, + identifier=self.db_instance.name, + timestamp=timestamp, + ) + func = export_resource_to_cloud_storage + func_args = ( + db_storage, + self.export_args.filename, + filename_pattern, + self.export_callback, + ) + func_args + else: + result_url = self.make_result_url() + + user_id = self.request.user.id + + with get_rq_lock_by_user(queue, user_id): + queue.enqueue_call( + func=func, + args=func_args, + job_id=rq_id, + meta=get_rq_job_meta( + request=self.request, db_obj=self.db_instance, result_url=result_url + ), + depends_on=define_dependent_job(queue, user_id, rq_id=rq_id), + result_ttl=cache_ttl.total_seconds(), + failure_ttl=cache_ttl.total_seconds(), + ) + + def get_v1_endpoint_view_name(self) -> str: + """Get view name of the endpoint for the first API version""" + + return f"{self.resource}-export-backup" diff --git a/cvat/apps/engine/backup.py b/cvat/apps/engine/backup.py index 3ad2d1ff01d..3f345231e50 100644 --- a/cvat/apps/engine/backup.py +++ b/cvat/apps/engine/backup.py @@ -14,11 +14,9 @@ import uuid import mimetypes from zipfile import ZipFile -from datetime import datetime from tempfile import NamedTemporaryFile import django_rq -from attr.converters import to_bool from django.conf import settings from django.db import transaction from django.utils import timezone @@ -38,16 +36,17 @@ ProjectReadSerializer, ProjectFileSerializer, TaskFileSerializer, RqIdSerializer) from cvat.apps.engine.utils import ( av_scan_paths, process_failed_job, - get_rq_job_meta, get_import_rq_id, import_resource_with_clean_up_after, - sendfile, define_dependent_job, get_rq_lock_by_user, build_backup_file_name, + get_rq_job_meta, import_resource_with_clean_up_after, + define_dependent_job, get_rq_lock_by_user, ) +from cvat.apps.engine.rq_job_handler import RQIdManager, RQJobMetaField from cvat.apps.engine.models import ( - StorageChoice, StorageMethodChoice, DataChoice, Task, Project, Location) + StorageChoice, StorageMethodChoice, DataChoice, Project, Location) from cvat.apps.engine.task import JobFileMapping, _create_thread -from cvat.apps.engine.cloud_provider import import_resource_from_cloud_storage, export_resource_to_cloud_storage +from cvat.apps.engine.cloud_provider import import_resource_from_cloud_storage from cvat.apps.engine.location import StorageType, get_location_configuration -from cvat.apps.engine.view_utils import get_cloud_storage_for_import_or_export -from cvat.apps.dataset_manager.views import TASK_CACHE_TTL, PROJECT_CACHE_TTL, get_export_cache_dir, log_exception +from cvat.apps.engine.permissions import get_cloud_storage_for_import_or_export +from cvat.apps.dataset_manager.views import get_export_cache_dir, log_exception from cvat.apps.dataset_manager.bindings import CvatImportError slogger = ServerLogManager(__name__) @@ -887,7 +886,7 @@ def _import_project(filename, user, org_id): db_project = project_importer.import_project() return db_project.id -def _create_backup(db_instance, Exporter, output_path, logger, cache_ttl): +def create_backup(db_instance, Exporter, output_path, logger, cache_ttl): try: cache_dir = get_export_cache_dir(db_instance) output_path = os.path.join(cache_dir, output_path) @@ -922,141 +921,10 @@ def _create_backup(db_instance, Exporter, output_path, logger, cache_ttl): log_exception(logger) raise -def export(db_instance, request, queue_name): - action = request.query_params.get('action', None) - filename = request.query_params.get('filename', None) - - if action not in (None, 'download'): - raise serializers.ValidationError( - "Unexpected action specified for the request") - - if isinstance(db_instance, Task): - obj_type = 'task' - logger = slogger.task[db_instance.pk] - Exporter = TaskExporter - cache_ttl = TASK_CACHE_TTL - use_target_storage_conf = request.query_params.get('use_default_location', True) - elif isinstance(db_instance, Project): - obj_type = 'project' - logger = slogger.project[db_instance.pk] - Exporter = ProjectExporter - cache_ttl = PROJECT_CACHE_TTL - use_target_storage_conf = request.query_params.get('use_default_location', True) - else: - raise Exception( - "Unexpected type of db_instance: {}".format(type(db_instance))) - use_settings = to_bool(use_target_storage_conf) - obj = db_instance if use_settings else request.query_params - location_conf = get_location_configuration( - obj=obj, - use_settings=use_settings, - field_name=StorageType.TARGET - ) - - last_instance_update_time = timezone.localtime(db_instance.updated_date) - - queue = django_rq.get_queue(queue_name) - rq_id = f"export:{obj_type}.id{db_instance.pk}-by-{request.user}" - rq_job = queue.fetch_job(rq_id) - - if rq_job: - rq_request = rq_job.meta.get('request', None) - request_time = rq_request.get("timestamp", None) if rq_request else None - if request_time is None or request_time < last_instance_update_time: - # in case the server is configured with ONE_RUNNING_JOB_IN_QUEUE_PER_USER - # we have to enqueue dependent jobs after canceling one - rq_job.cancel(enqueue_dependents=settings.ONE_RUNNING_JOB_IN_QUEUE_PER_USER) - rq_job.delete() - rq_job = None - - timestamp = datetime.strftime(last_instance_update_time, "%Y_%m_%d_%H_%M_%S") - location = location_conf.get('location') - - if action == "download": - if location != Location.LOCAL: - return Response('Action "download" is only supported for a local backup location', status=status.HTTP_400_BAD_REQUEST) - - if not rq_job or not rq_job.is_finished: - return Response('Backup has not finished', status=status.HTTP_400_BAD_REQUEST) - - file_path = rq_job.return_value() - - if not file_path: - return Response('A result for exporting job was not found for finished RQ job', status=status.HTTP_500_INTERNAL_SERVER_ERROR) - - elif not os.path.exists(file_path): - return Response('The result file does not exist in export cache', status=status.HTTP_500_INTERNAL_SERVER_ERROR) - - filename = filename or build_backup_file_name( - class_name=obj_type, - identifier=db_instance.name, - timestamp=timestamp, - extension=os.path.splitext(file_path)[1] - ) - - rq_job.delete() - return sendfile(request, file_path, attachment=True, - attachment_filename=filename) - - if rq_job: - if rq_job.is_finished: - if location == Location.LOCAL: - return Response(status=status.HTTP_201_CREATED) - - elif location == Location.CLOUD_STORAGE: - rq_job.delete() - return Response(status=status.HTTP_200_OK) - else: - raise NotImplementedError() - elif rq_job.is_failed: - exc_info = rq_job.meta.get('formatted_exception', str(rq_job.exc_info)) - rq_job.delete() - return Response(exc_info, - status=status.HTTP_500_INTERNAL_SERVER_ERROR) - else: - return Response(status=status.HTTP_202_ACCEPTED) - - ttl = dm.views.PROJECT_CACHE_TTL.total_seconds() - user_id = request.user.id - - func = _create_backup if location == Location.LOCAL else export_resource_to_cloud_storage - func_args = (db_instance, Exporter, '{}_backup.zip'.format(obj_type), logger, cache_ttl) - - if location == Location.CLOUD_STORAGE: - try: - storage_id = location_conf['storage_id'] - except KeyError: - raise serializers.ValidationError( - 'Cloud storage location was selected as the destination,' - ' but cloud storage id was not specified') - - db_storage = get_cloud_storage_for_import_or_export( - storage_id=storage_id, request=request, - is_default=location_conf['is_default']) - filename_pattern = build_backup_file_name( - class_name=obj_type, - identifier=db_instance.name, - timestamp=timestamp, - ) - func_args = (db_storage, filename, filename_pattern, _create_backup) + func_args - - with get_rq_lock_by_user(queue, user_id): - queue.enqueue_call( - func=func, - args=func_args, - job_id=rq_id, - meta=get_rq_job_meta(request=request, db_obj=db_instance), - depends_on=define_dependent_job(queue, user_id, rq_id=rq_id), - result_ttl=ttl, - failure_ttl=ttl, - ) - return Response(status=status.HTTP_202_ACCEPTED) - - def _import(importer, request, queue, rq_id, Serializer, file_field_name, location_conf, filename=None): rq_job = queue.fetch_job(rq_id) - if (user_id_from_meta := getattr(rq_job, 'meta', {}).get('user', {}).get('id')) and user_id_from_meta != request.user.id: + if (user_id_from_meta := getattr(rq_job, 'meta', {}).get(RQJobMetaField.USER, {}).get('id')) and user_id_from_meta != request.user.id: return Response(status=status.HTTP_403_FORBIDDEN) if not rq_job: @@ -1145,12 +1013,12 @@ def import_project(request, queue_name, filename=None): if 'rq_id' in request.data: rq_id = request.data['rq_id'] else: - rq_id = get_import_rq_id('project', uuid.uuid4(), 'backup', request.user) + rq_id = RQIdManager.build('import', 'project', uuid.uuid4(), subresource='backup') Serializer = ProjectFileSerializer file_field_name = 'project_file' location_conf = get_location_configuration( - obj=request.query_params, + query_params=request.query_params, field_name=StorageType.SOURCE, ) @@ -1168,13 +1036,12 @@ def import_project(request, queue_name, filename=None): ) def import_task(request, queue_name, filename=None): - rq_id = request.data.get('rq_id', get_import_rq_id('task', uuid.uuid4(), 'backup', request.user)) - + rq_id = request.data.get('rq_id', RQIdManager.build('import', 'task', uuid.uuid4(), subresource='backup')) Serializer = TaskFileSerializer file_field_name = 'task_file' location_conf = get_location_configuration( - obj=request.query_params, + query_params=request.query_params, field_name=StorageType.SOURCE ) diff --git a/cvat/apps/engine/filters.py b/cvat/apps/engine/filters.py index e8ab6b46824..5ae98bf3ad8 100644 --- a/cvat/apps/engine/filters.py +++ b/cvat/apps/engine/filters.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: MIT -from typing import Any, Dict, Iterator, Optional +from typing import Any, Dict, Tuple, List, Iterator, Optional, Iterable from functools import reduce import operator import json @@ -16,6 +16,7 @@ from django.db.models.query import QuerySet from django.utils.translation import gettext_lazy as _ from django.utils.encoding import force_str +from django.http import HttpRequest from rest_framework import filters from rest_framework.compat import coreapi, coreschema from rest_framework.exceptions import ValidationError @@ -78,15 +79,16 @@ def get_schema_operation_parameters(self, view): class OrderingFilter(filters.OrderingFilter): ordering_param = 'sort' + reverse_flag = "-" def get_ordering(self, request, queryset, view): ordering = [] lookup_fields = self._get_lookup_fields(request, queryset, view) for term in super().get_ordering(request, queryset, view): flag = '' - if term.startswith("-"): - flag = '-' - term = term[1:] + if term.startswith(self.reverse_flag): + flag = self.reverse_flag + term = term[len(flag):] ordering.append(flag + lookup_fields[term]) return ordering @@ -349,3 +351,186 @@ def get_schema_operation_parameters(self, view): parameter['schema']['enum'] = [c[0] for c in filter_.extra['choices']] parameters.append(parameter) return parameters + + +class _NestedAttributeHandler: + nested_attribute_separator = '.' + + class DotDict(dict): + """recursive dot.notation access to dictionary attributes""" + __getattr__ = dict.get + __setattr__ = dict.__setitem__ + __delattr__ = dict.__delitem__ + + def __init__(self, dct: Dict): + for key, value in dct.items(): + if isinstance(value, dict): + value = self.__class__(value) + self[key] = value + + def get_nested_attr(self, obj: Any, nested_attr_path: str) -> Any: + result = obj + for attribute in nested_attr_path.split(self.nested_attribute_separator): + if isinstance(result, dict): + result = self.DotDict(result) + result = getattr(result, attribute) + + if callable(result): + result = result() + + return result + +class NonModelSimpleFilter(SimpleFilter, _NestedAttributeHandler): + """ + A simple filter backend for non-model views, useful for small search queries and manually-edited + requests. + + Argument types are numbers and strings. The only available check is equality. + Operators are not supported (e.g. or, less, greater, not etc.). + """ + + def get_schema_operation_parameters(self, view): + simple_filters = getattr(view, self.filter_fields_attr, None) + simple_filters_schema = getattr(view, 'simple_filters_schema', None) + + parameters = [] + if simple_filters and simple_filters_schema: + for filter_name in simple_filters: + filter_type, filter_choices = simple_filters_schema[filter_name] + parameter = { + 'name': filter_name, + 'in': 'query', + 'description': force_str(self.filter_desc.format_map({ + 'field_name': filter_name + })), + 'schema': { + 'type': filter_type + }, + } + if filter_choices: + parameter['schema']['enum'] = [c[0] for c in filter_choices] + parameters.append(parameter) + return parameters + + def filter_queryset(self, request: HttpRequest, queryset: Iterable, view): + filtered_queryset = queryset + + query_params = request.query_params + filters_to_use = set(query_params) + + simple_filters = getattr(view, self.filter_fields_attr, None) + lookup_fields = self.get_lookup_fields(view) + + if simple_filters and lookup_fields and (intersection := filters_to_use & set(simple_filters)): + filtered_queryset = [] + + for obj in queryset: + fits_filter = False + for field in intersection: + query_param = query_params[field] + + if query_param.isdigit(): + query_param = int(query_param) + + # replace empty string with None + if field == 'org' and not query_param: + query_param = None + + fits_filter = self.get_nested_attr(obj, lookup_fields[field]) == query_param + if not fits_filter: + break + + if fits_filter: + filtered_queryset.append(obj) + + return filtered_queryset + +class NonModelOrderingFilter(OrderingFilter, _NestedAttributeHandler): + """Ordering filter for non-model views. + This filter backend supports the following syntaxes: + ?sort=field + ?sort=-field + ?sort=field1,field2 + ?sort=-field1,-field2 + """ + + def get_ordering(self, request, queryset, view) -> Tuple[List[str], bool]: + ordering = super().get_ordering(request, queryset, view) + result, reverse = [], False + for field in ordering: + if field.startswith(self.reverse_flag): + reverse = True + field = field[len(self.reverse_flag):] + result.append(field) + + return result, reverse + + def filter_queryset(self, request: HttpRequest, queryset: Iterable, view) -> Iterable: + ordering, reverse = self.get_ordering(request, queryset, view) + + if ordering: + return sorted(queryset, key=lambda obj: [self.get_nested_attr(obj, field) for field in ordering], reverse=reverse) + + return queryset + + +class NonModelJsonLogicFilter(JsonLogicFilter, _NestedAttributeHandler): + filter_description = _(dedent(""" + JSON Logic filter. This filter can be used to perform complex filtering by grouping rules.\n + Details about the syntax used can be found at the link: https://jsonlogic.com/\n + """)) + + def _apply_filter(self, rules, lookup_fields, obj): + op, args = next(iter(rules.items())) + if op in ['or', 'and']: + return reduce({ + 'or': any, + 'and': all, + }[op], [self._apply_filter(arg, lookup_fields, obj) for arg in args]) + elif op == '!': + return not self._apply_filter(args, lookup_fields, obj) + elif op == 'var': + var = lookup_fields[args] + var_value = self.get_nested_attr(obj, var) + return var_value is not None + elif op in ['!=', '==', '<', '>', '<=', '>='] and len(args) == 2: + var = lookup_fields[args[0]['var']] + var_value = self.get_nested_attr(obj, var) + return { + '!=': operator.ne, + '==': operator.eq, + '<': operator.lt, + '<=': operator.le, + '>': operator.gt, + '>=': operator.ge, + }[op](var_value, args[1]) + elif op == 'in': + if isinstance(args[0], dict): + var = lookup_fields[args[0]['var']] + var_value = self.get_nested_attr(obj, var) + return operator.contains(args[1], var_value) + else: + var = lookup_fields[args[1]['var']] + var_value = self.get_nested_attr(obj, var) + return operator.contains(args[0], var_value) + elif op == '<=' and len(args) == 3: + var = lookup_fields[args[1]['var']] + var_value = self.get_nested_attr(obj, var) + return var_value >= args[0] and var_value <= args[2] + else: + raise ValidationError(f'filter: {op} operation with {args} arguments is not implemented') + + def filter_queryset(self, request: HttpRequest, queryset: Iterable, view) -> Iterable: + filtered_queryset = queryset + json_rules = request.query_params.get(self.filter_param) + if json_rules: + filtered_queryset = [] + parsed_rules = self._parse_query(json_rules) + lookup_fields = self._get_lookup_fields(view) + + for obj in queryset: + fits_filter = self._apply_filter(parsed_rules, lookup_fields, obj) + if fits_filter: + filtered_queryset.append(obj) + + return filtered_queryset diff --git a/cvat/apps/engine/location.py b/cvat/apps/engine/location.py index 6eb5dc76441..ac6ab77dc07 100644 --- a/cvat/apps/engine/location.py +++ b/cvat/apps/engine/location.py @@ -3,7 +3,7 @@ # SPDX-License-Identifier: MIT from enum import Enum -from typing import Any, Dict, Union +from typing import Any, Dict, Union, Optional from cvat.apps.engine.models import Location, Project, Task, Job @@ -15,16 +15,25 @@ def __str__(self): return self.value def get_location_configuration( - obj: Union[Project, Task, Job, Dict], + query_params: Dict[str, Any], field_name: str, - use_settings: bool = False, + *, + db_instance: Optional[Union[Project, Task, Job]] = None, ) -> Dict[str, Any]: + location = query_params.get('location') + + # handle resource import + if not location and not db_instance: + location = Location.LOCAL + + use_default_settings = location is None + location_conf = { - "is_default": use_settings + "is_default": use_default_settings } - if use_settings: - storage = getattr(obj, field_name) if not isinstance(obj, Job) else getattr(obj.segment.task, field_name) + if use_default_settings: + storage = getattr(db_instance, field_name) if not isinstance(db_instance, Job) else getattr(db_instance.segment.task, field_name) if storage is None: location_conf['location'] = Location.LOCAL else: @@ -32,9 +41,16 @@ def get_location_configuration( if cloud_storage_id := storage.cloud_storage_id: location_conf['storage_id'] = cloud_storage_id else: - # obj is query_params - location_conf['location'] = obj.get('location', Location.LOCAL) - if cloud_storage_id := obj.get('cloud_storage_id'): + if location not in Location.list(): + raise ValueError(f"The specified location {location} is not supported") + + cloud_storage_id = query_params.get('cloud_storage_id') + + if location == Location.CLOUD_STORAGE and not cloud_storage_id: + raise ValueError("Cloud storage was selected as location but cloud_storage_id was not specified") + + location_conf['location'] = location + if cloud_storage_id: location_conf['storage_id'] = int(cloud_storage_id) return location_conf diff --git a/cvat/apps/engine/mixins.py b/cvat/apps/engine/mixins.py index 751a1986461..b0ab8315ae0 100644 --- a/cvat/apps/engine/mixins.py +++ b/cvat/apps/engine/mixins.py @@ -12,22 +12,31 @@ from pathlib import Path from tempfile import NamedTemporaryFile from unittest import mock +from textwrap import dedent from typing import Optional, Callable, Dict, Any, Mapping import django_rq from attr.converters import to_bool from django.conf import settings +from django.http import HttpRequest +from drf_spectacular.types import OpenApiTypes +from drf_spectacular.utils import (OpenApiParameter, OpenApiResponse, + extend_schema) from rest_framework import mixins, status +from rest_framework.decorators import action from rest_framework.authentication import SessionAuthentication from rest_framework.response import Response from rest_framework.views import APIView +from cvat.apps.engine.background_operations import (BackupExportManager, + DatasetExportManager) +from cvat.apps.engine.handlers import clear_import_cache from cvat.apps.engine.location import StorageType, get_location_configuration from cvat.apps.engine.log import ServerLogManager from cvat.apps.engine.models import Location -from cvat.apps.engine.serializers import DataSerializer -from cvat.apps.engine.handlers import clear_import_cache -from cvat.apps.engine.utils import get_import_rq_id +from cvat.apps.engine.rq_job_handler import RQIdManager +from cvat.apps.engine.serializers import DataSerializer, RqIdSerializer +from cvat.apps.engine.utils import is_dataset_export slogger = ServerLogManager(__name__) @@ -266,7 +275,7 @@ def init_tus_upload(self, request): if file_exists: # check whether the rq_job is in progress or has been finished/failed object_class_name = self._object.__class__.__name__.lower() - template = get_import_rq_id(object_class_name, self._object.pk, import_type, request.user) + template = RQIdManager.build('import', object_class_name, self._object.pk, subresource=import_type) queue = django_rq.get_queue(settings.CVAT_QUEUES.IMPORT_DATA.value) finished_job_ids = queue.finished_job_registry.get_job_ids() failed_job_ids = queue.failed_job_registry.get_job_ids() @@ -386,62 +395,94 @@ def upload_finished(self, request): raise NotImplementedError('Must be implemented in the derived class') -class AnnotationMixin: - def export_annotations( +class PartialUpdateModelMixin: + """ + Update fields of a model instance. + + Almost the same as UpdateModelMixin, but has no public PUT / update() method. + """ + + def _update(self, request, *args, **kwargs): + # This method must not be named "update" not to be matched with the PUT method + return mixins.UpdateModelMixin.update(self, request, *args, **kwargs) + + def perform_update(self, serializer): + mixins.UpdateModelMixin.perform_update(self, serializer=serializer) + + def partial_update(self, request, *args, **kwargs): + with mock.patch.object(self, 'update', new=self._update, create=True): + return mixins.UpdateModelMixin.partial_update(self, request=request, *args, **kwargs) + + +class DatasetMixin: + def export_dataset_v1( self, request, - db_obj, - export_func, - callback: Callable[[int, Optional[str], Optional[str]], str], + save_images: bool, *, - get_data: Optional[Callable[[int], Dict[str, Any]]]= None, - ): - format_name = request.query_params.get("format", "") - action = request.query_params.get("action", "").lower() - filename = request.query_params.get("filename", "") - - use_default_location = request.query_params.get("use_default_location", True) - use_settings = to_bool(use_default_location) - obj = db_obj if use_settings else request.query_params - location_conf = get_location_configuration( - obj=obj, - use_settings=use_settings, - field_name=StorageType.TARGET, - ) + get_data: Optional[Callable[[int], Dict[str, Any]]] = None, + ) -> Response: + if request.query_params.get("format"): + callback = self.get_export_callback(save_images) - object_name = self._object.__class__.__name__.lower() - rq_id = f"export:{request.path.strip('/').split('/')[-1]}-for-{object_name}.id{self._object.pk}-in-{format_name.replace(' ', '_')}-format" - - if format_name: - return export_func(db_instance=self._object, - rq_id=rq_id, - request=request, - action=action, - callback=callback, - format_name=format_name, - filename=filename, - location_conf=location_conf, - ) + dataset_export_manager = DatasetExportManager(self._object, request, callback, save_images=save_images, version=1) + return dataset_export_manager.export() if not get_data: - return Response("Format is not specified",status=status.HTTP_400_BAD_REQUEST) + return Response("Format is not specified", status=status.HTTP_400_BAD_REQUEST) data = get_data(self._object.pk) return Response(data) + @extend_schema( + summary='Initialize process to export resource as a dataset in a specific format', + description=dedent("""\ + The request `POST /api//id/dataset/export` will initialize + a background process to export a dataset. To check status of the process + please, use `GET /api/requests/` where **rq_id** is request ID returned in the response for this endpoint. + """), + parameters=[ + OpenApiParameter('format', location=OpenApiParameter.QUERY, + description='Desired output format name\nYou can get the list of supported formats at:\n/server/annotation/formats', + type=OpenApiTypes.STR, required=True), + OpenApiParameter('filename', description='Desired output file name', + location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False), + OpenApiParameter('location', description='Where need to save downloaded dataset', + location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False, + enum=Location.list()), + OpenApiParameter('cloud_storage_id', description='Storage id', + location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False), + OpenApiParameter('save_images', description='Include images or not', + location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, default=False), + ], + request=OpenApiTypes.NONE, + responses={ + '202': OpenApiResponse(response=RqIdSerializer, description='Exporting has been started'), + '405': OpenApiResponse(description='Format is not available'), + '409': OpenApiResponse(description='Exporting is already in progress'), + }, + ) + @action(detail=True, methods=['POST'], serializer_class=None, url_path='dataset/export') + def export_dataset_v2(self, request: HttpRequest, pk: int): + self._object = self.get_object() # force call of check_object_permissions() + + save_images = is_dataset_export(request) + callback = self.get_export_callback(save_images) + + dataset_export_manager = DatasetExportManager(self._object, request, callback, save_images=save_images, version=2) + return dataset_export_manager.export() + + # FUTURE-TODO: migrate to new API def import_annotations(self, request, db_obj, import_func, rq_func, rq_id_template): is_tus_request = request.headers.get('Upload-Length', None) is not None or \ request.method == 'OPTIONS' if is_tus_request: return self.init_tus_upload(request) - use_default_location = request.query_params.get('use_default_location', True) conv_mask_to_poly = to_bool(request.query_params.get('conv_mask_to_poly', True)) - use_settings = to_bool(use_default_location) - obj = db_obj if use_settings else request.query_params location_conf = get_location_configuration( - obj=obj, - use_settings=use_settings, + db_instance=db_obj, + query_params=request.query_params, field_name=StorageType.SOURCE, ) @@ -462,16 +503,21 @@ def import_annotations(self, request, db_obj, import_func, rq_func, rq_id_templa return self.upload_data(request) -class SerializeMixin: - def serialize(self, request, export_func): + +class BackupMixin: + def export_backup_v1(self, request: HttpRequest) -> Response: db_object = self.get_object() # force to call check_object_permissions - return export_func( - db_object, - request, - queue_name=settings.CVAT_QUEUES.EXPORT_DATA.value, - ) - def deserialize(self, request, import_func): + export_backup_manager = BackupExportManager(db_object, request, version=1) + response = export_backup_manager.export() + + if request.query_params.get('action') != 'download': + response.headers['Deprecated'] = True + + return response + + # FUTURE-TODO: migrate to new API + def import_backup_v1(self, request: HttpRequest, import_func: Callable) -> Response: location = request.query_params.get("location", Location.LOCAL) if location == Location.CLOUD_STORAGE: file_name = request.query_params.get("filename", "") @@ -482,24 +528,35 @@ def deserialize(self, request, import_func): ) return self.upload_data(request) + @extend_schema(summary='Initiate process to backup resource', + description=dedent("""\ + The request `POST /api//id/backup/export` will initialize + a background process to backup a resource. To check status of the process + please, use `GET /api/requests/` where **rq_id** is request ID returned in the response for this endpoint. + """), + parameters=[ + OpenApiParameter('filename', description='Backup file name', + location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False), + OpenApiParameter('location', description='Where need to save downloaded backup', + location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False, + enum=Location.list()), + OpenApiParameter('cloud_storage_id', description='Storage id', + location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False), + ], + request=OpenApiTypes.NONE, + responses={ + '202': OpenApiResponse(response=RqIdSerializer, description='Creating a backup file has been started'), + '400': OpenApiResponse(description='Wrong query parameters were passed'), + '409': OpenApiResponse(description='The backup process has already been initiated and is not yet finished'), + }, + ) + @action(detail=True, methods=['POST'], serializer_class=None, url_path='backup/export') + def export_backup_v2(self, request: HttpRequest, pk: int): + db_object = self.get_object() # force to call check_object_permissions -class PartialUpdateModelMixin: - """ - Update fields of a model instance. - - Almost the same as UpdateModelMixin, but has no public PUT / update() method. - """ - - def _update(self, request, *args, **kwargs): - # This method must not be named "update" not to be matched with the PUT method - return mixins.UpdateModelMixin.update(self, request, *args, **kwargs) - - def perform_update(self, serializer): - mixins.UpdateModelMixin.perform_update(self, serializer=serializer) + export_backup_manager = BackupExportManager(db_object, request, version=2) + return export_backup_manager.export() - def partial_update(self, request, *args, **kwargs): - with mock.patch.object(self, 'update', new=self._update, create=True): - return mixins.UpdateModelMixin.partial_update(self, request=request, *args, **kwargs) class CsrfWorkaroundMixin(APIView): """ diff --git a/cvat/apps/engine/permissions.py b/cvat/apps/engine/permissions.py index 4229f05b367..777b7a0e8f5 100644 --- a/cvat/apps/engine/permissions.py +++ b/cvat/apps/engine/permissions.py @@ -6,16 +6,20 @@ from collections import namedtuple from typing import Any, Dict, List, Optional, Sequence, Union, cast +from django.shortcuts import get_object_or_404 from django.conf import settings -from rest_framework.exceptions import ValidationError +from rest_framework.exceptions import ValidationError, PermissionDenied, NotFound +from rq.job import Job as RQJob +from cvat.apps.engine.rq_job_handler import is_rq_job_owner from cvat.apps.iam.permissions import ( OpenPolicyAgentPermission, StrEnum, get_iam_context, get_membership ) from cvat.apps.organizations.models import Organization from .models import AnnotationGuide, CloudStorage, Issue, Job, Label, Project, Task +from cvat.apps.engine.utils import is_dataset_export def _get_key(d: Dict[str, Any], key_path: Union[str, Sequence[str]]) -> Optional[Any]: """ @@ -270,7 +274,9 @@ def get_scopes(request, view, obj): ('append_dataset_chunk', 'PATCH'): Scopes.IMPORT_DATASET, ('annotations', 'GET'): Scopes.EXPORT_ANNOTATIONS, ('dataset', 'GET'): Scopes.IMPORT_DATASET if request.query_params.get('action') == 'import_status' else Scopes.EXPORT_DATASET, + ('export_dataset_v2', 'GET'): Scopes.EXPORT_DATASET if is_dataset_export(request) else Scopes.EXPORT_ANNOTATIONS, ('export_backup', 'GET'): Scopes.EXPORT_BACKUP, + ('export_backup_v2', 'GET'): Scopes.EXPORT_BACKUP, ('import_backup', 'POST'): Scopes.IMPORT_BACKUP, ('append_backup_chunk', 'PATCH'): Scopes.IMPORT_BACKUP, ('append_backup_chunk', 'HEAD'): Scopes.IMPORT_BACKUP, @@ -473,6 +479,7 @@ def get_scopes(request, view, obj) -> List[Scopes]: ('append_annotations_chunk', 'PATCH'): Scopes.UPDATE_ANNOTATIONS, ('append_annotations_chunk', 'HEAD'): Scopes.UPDATE_ANNOTATIONS, ('dataset_export', 'GET'): Scopes.EXPORT_DATASET, + ('export_dataset_v2', 'GET'): Scopes.EXPORT_DATASET if is_dataset_export(request) else Scopes.EXPORT_ANNOTATIONS, ('metadata', 'GET'): Scopes.VIEW_METADATA, ('metadata', 'PATCH'): Scopes.UPDATE_METADATA, ('data', 'GET'): Scopes.VIEW_DATA, @@ -484,6 +491,7 @@ def get_scopes(request, view, obj) -> List[Scopes]: ('append_backup_chunk', 'PATCH'): Scopes.IMPORT_BACKUP, ('append_backup_chunk', 'HEAD'): Scopes.IMPORT_BACKUP, ('export_backup', 'GET'): Scopes.EXPORT_BACKUP, + ('export_backup_v2', 'GET'): Scopes.EXPORT_BACKUP, ('preview', 'GET'): Scopes.VIEW, }.get((view.action, request.method)) @@ -709,6 +717,7 @@ def get_scopes(request, view, obj): ('metadata','PATCH'): Scopes.UPDATE_METADATA, ('issues', 'GET'): Scopes.VIEW, ('dataset_export', 'GET'): Scopes.EXPORT_DATASET, + ('export_dataset_v2', 'GET'): Scopes.EXPORT_DATASET if is_dataset_export(request) else Scopes.EXPORT_ANNOTATIONS, ('preview', 'GET'): Scopes.VIEW, }.get((view.action, request.method)) @@ -1193,3 +1202,91 @@ def get_scopes(request, view, obj): 'destroy': Scopes.DELETE, 'retrieve': Scopes.VIEW, }.get(view.action, None)] + + +class RequestPermission(OpenPolicyAgentPermission): + class Scopes(StrEnum): + LIST = 'list' + VIEW = 'view' + CANCEL = 'cancel' + + @classmethod + def create(cls, request, view, obj: Optional[RQJob], iam_context: Dict): + permissions = [] + if view.basename == 'request': + for scope in cls.get_scopes(request, view, obj): + if scope == cls.Scopes.CANCEL: + parsed_rq_id = obj.parsed_rq_id + + permission_class, resource_scope = { + ('import', 'project', 'dataset'): (ProjectPermission, ProjectPermission.Scopes.IMPORT_DATASET), + ('import', 'project', 'backup'): (ProjectPermission, ProjectPermission.Scopes.IMPORT_BACKUP), + ('import', 'task', 'annotations'): (TaskPermission, TaskPermission.Scopes.IMPORT_ANNOTATIONS), + ('import', 'task', 'backup'): (TaskPermission, TaskPermission.Scopes.IMPORT_BACKUP), + ('import', 'job', 'annotations'): (JobPermission, JobPermission.Scopes.IMPORT_ANNOTATIONS), + ('create', 'task', None): (TaskPermission, TaskPermission.Scopes.VIEW), + ('export', 'project', 'annotations'): (ProjectPermission, ProjectPermission.Scopes.EXPORT_ANNOTATIONS), + ('export', 'project', 'dataset'): (ProjectPermission, ProjectPermission.Scopes.EXPORT_DATASET), + ('export', 'project', 'backup'): (ProjectPermission, ProjectPermission.Scopes.EXPORT_BACKUP), + ('export', 'task', 'annotations'): (TaskPermission, TaskPermission.Scopes.EXPORT_ANNOTATIONS), + ('export', 'task', 'dataset'): (TaskPermission, TaskPermission.Scopes.EXPORT_DATASET), + ('export', 'task', 'backup'): (TaskPermission, TaskPermission.Scopes.EXPORT_BACKUP), + ('export', 'job', 'annotations'): (JobPermission, JobPermission.Scopes.EXPORT_ANNOTATIONS), + ('export', 'job', 'dataset'): (JobPermission, JobPermission.Scopes.EXPORT_DATASET), + }[(parsed_rq_id.action, parsed_rq_id.resource, parsed_rq_id.subresource)] + + + resource = None + if (resource_id := parsed_rq_id.identifier) and isinstance(resource_id, int): + resource_model = { + 'project': Project, + 'task': Task, + 'job': Job, + }[parsed_rq_id.resource] + + try: + resource = resource_model.objects.get(id=resource_id) + except resource_model.DoesNotExist as ex: + raise NotFound(f'The {parsed_rq_id.resource!r} with specified id#{resource_id} does not exist') from ex + + permissions.append(permission_class.create_base_perm(request, view, scope=resource_scope, iam_context=iam_context, obj=resource)) + + if scope != cls.Scopes.LIST: + user_id = request.user.id + if not is_rq_job_owner(obj, user_id): + raise PermissionDenied('You don\'t have permission to perform this action') + + return permissions + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.url = settings.IAM_OPA_DATA_URL + '/requests/allow' + + @staticmethod + def get_scopes(request, view, obj) -> List[Scopes]: + Scopes = __class__.Scopes + return [{ + ('list', 'GET'): Scopes.LIST, + ('retrieve', 'GET'): Scopes.VIEW, + ('cancel', 'POST'): Scopes.CANCEL, + }.get((view.action, request.method))] + + + def get_resource(self): + return None + +def get_cloud_storage_for_import_or_export( + storage_id: int, *, request, is_default: bool = False +) -> CloudStorage: + perm = CloudStoragePermission.create_scope_view(None, storage_id=storage_id, request=request) + result = perm.check_access() + if not result.allow: + if is_default: + # In this case, the user did not specify the location explicitly + error_message = "A cloud storage is selected as the default location. " + else: + error_message = "" + error_message += "You don't have access to this cloud storage" + raise PermissionDenied(error_message) + + return get_object_or_404(CloudStorage, pk=storage_id) diff --git a/cvat/apps/engine/rq_job_handler.py b/cvat/apps/engine/rq_job_handler.py new file mode 100644 index 00000000000..7c7b41deef9 --- /dev/null +++ b/cvat/apps/engine/rq_job_handler.py @@ -0,0 +1,133 @@ +# Copyright (C) 2024 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +import attrs + +from typing import Optional, Union +from uuid import UUID +from rq.job import Job as RQJob + +class RQJobMetaField: + # common fields + FORMATTED_EXCEPTION = "formatted_exception" + REQUEST = 'request' + USER = 'user' + PROJECT_ID = 'project_id' + TASK_ID = 'task_id' + JOB_ID = 'job_id' + ORG_ID = 'org_id' + ORG_SLUG = 'org_slug' + STATUS = 'status' + PROGRESS = 'progress' + TASK_PROGRESS = 'task_progress' + # export specific fields + RESULT_URL = 'result_url' + + +def is_rq_job_owner(rq_job: RQJob, user_id: int) -> bool: + return rq_job.meta.get(RQJobMetaField.USER, {}).get('id') == user_id + +@attrs.define(kw_only=True) +class RQId: + action: str = attrs.field( + validator=attrs.validators.instance_of(str) + ) + resource: str = attrs.field( + validator=attrs.validators.instance_of(str) + ) + identifier: Union[int, UUID] = attrs.field( + validator=attrs.validators.instance_of((int, UUID)) + ) + subresource: Optional[str] = attrs.field( + validator=attrs.validators.optional( + attrs.validators.instance_of(str) + ) + ) + user_id: Optional[int] = attrs.field( + validator=attrs.validators.optional(attrs.validators.instance_of(int)) + ) + format: Optional[str] = attrs.field( + validator=attrs.validators.optional(attrs.validators.instance_of(str)) + ) + + +class RQIdManager: + # RQ ID templates: + # import:-- + # create:task- + # export:---in--format-by- + # export:--backup-by- + + @staticmethod + def build( + action: str, + resource: str, + identifier: Union[int, UUID], + *, + subresource: Optional[str] = None, + user_id: Optional[int] = None, + anno_format: Optional[str] = None, + ) -> str: + if "import" == action: + return f"{action}:{resource}-{identifier}-{subresource}" + elif "export" == action: + if anno_format is None: + return ( + f"{action}:{resource}-{identifier}-{subresource}-by-{user_id}" + ) + format_to_be_used_in_urls = anno_format.replace(" ", "_").replace(".", "@") + return f"{action}:{resource}-{identifier}-{subresource}-in-{format_to_be_used_in_urls}-format-by-{user_id}" + elif "create" == action: + assert "task" == resource + return f"{action}:{resource}-{identifier}" + else: + raise ValueError(f"Unsupported action {action!r} was found") + + @staticmethod + def parse(rq_id: str) -> RQId: + action: Optional[str] = None + resource: Optional[str] = None + identifier: Optional[Union[UUID, int]] = None + subresource: Optional[str] = None + user_id: Optional[int] = None + anno_format: Optional[str] = None + + try: + action_and_resource, unparsed = rq_id.split("-", maxsplit=1) + action, resource = action_and_resource.split(":") + + if "create" == action: + identifier = unparsed + elif "import" == action: + identifier, subresource = unparsed.rsplit("-", maxsplit=1) + else: # action == export + identifier, subresource, unparsed = unparsed.split("-", maxsplit=2) + if "backup" == subresource: + _, user_id = unparsed.split("-") + else: + unparsed, _, user_id = unparsed.rsplit("-", maxsplit=2) + # remove prefix(in-), suffix(-format) and restore original format name + # by replacing special symbols: "_" -> " ", "@" -> "." + anno_format = unparsed[3:-7].replace("_", " ").replace("@", ".") + + if identifier is not None: + if identifier.isdigit(): + identifier = int(identifier) + else: + identifier = UUID(identifier) + + if user_id is not None: + user_id = int(user_id) + + return RQId( + action=action, + resource=resource, + identifier=identifier, + subresource=subresource, + user_id=user_id, + format=anno_format, + ) + + except Exception as ex: + raise ValueError(f"The {rq_id!r} RQ ID cannot be parsed: {str(ex)}") from ex diff --git a/cvat/apps/engine/serializers.py b/cvat/apps/engine/serializers.py index 37c06694dc5..235e6d700dc 100644 --- a/cvat/apps/engine/serializers.py +++ b/cvat/apps/engine/serializers.py @@ -3,27 +3,36 @@ # # SPDX-License-Identifier: MIT +import warnings from copy import copy from inspect import isclass import os import re import shutil import string +import rq.defaults as rq_defaults from tempfile import NamedTemporaryFile import textwrap from typing import Any, Dict, Iterable, Optional, OrderedDict, Union +from rq.job import Job as RQJob, JobStatus as RQJobStatus +from datetime import timezone, timedelta +from decimal import Decimal + from rest_framework import serializers, exceptions from django.contrib.auth.models import User, Group from django.db import transaction +from django.db.models import TextChoices from cvat.apps.dataset_manager.formats.utils import get_label_color +from cvat.apps.engine.utils import parse_exception_message from cvat.apps.engine import models from cvat.apps.engine.cloud_provider import get_cloud_storage_instance, Credentials, Status from cvat.apps.engine.log import ServerLogManager from cvat.apps.engine.permissions import TaskPermission from cvat.apps.engine.utils import parse_specific_attributes, build_field_filter_params, get_list_view_name, reverse +from cvat.apps.engine.rq_job_handler import RQJobMetaField, RQId from drf_spectacular.utils import OpenApiExample, extend_schema_field, extend_schema_serializer @@ -836,6 +845,11 @@ class RqStatusSerializer(serializers.Serializer): message = serializers.CharField(allow_blank=True, default="") progress = serializers.FloatField(max_value=100, default=0) + def __init__(self, instance=None, data=..., **kwargs): + warnings.warn("RqStatusSerializer is deprecated, " + "use cvat.apps.engine.serializers.RequestSerializer instead", DeprecationWarning) + super().__init__(instance, data, **kwargs) + class RqIdSerializer(serializers.Serializer): rq_id = serializers.CharField(help_text="Request id") @@ -2185,3 +2199,134 @@ def _update_assets(guide): class Meta: model = models.AnnotationGuide fields = ('id', 'task_id', 'project_id', 'markdown', ) + +class UserIdentifiersSerializer(BasicUserSerializer): + class Meta(BasicUserSerializer.Meta): + fields = ( + "id", + "username", + ) + +class RequestStatus(TextChoices): + QUEUED = "queued" + STARTED = "started" + FAILED = "failed" + FINISHED = "finished" + +class RequestAction(TextChoices): + CREATE = "create" + IMPORT = "import" + EXPORT = "export" + +class RequestTarget(TextChoices): + PROJECT = "project" + TASK = "task" + JOB = "job" + +class RequestSubresource(TextChoices): + ANNOTATIONS = "annotations" + DATASET = "dataset" + BACKUP = "backup" + +class RequestDataOperationSerializer(serializers.Serializer): + type = serializers.CharField() + target = serializers.ChoiceField(choices=RequestTarget.choices) + project_id = serializers.IntegerField(required=False, allow_null=True) + task_id = serializers.IntegerField(required=False, allow_null=True) + job_id = serializers.IntegerField(required=False, allow_null=True) + format = serializers.CharField(required=False, allow_null=True) + + def to_representation(self, rq_job: RQJob) -> Dict[str, Any]: + parsed_rq_id: RQId = rq_job.parsed_rq_id + + return { + "type": ":".join( + [ + parsed_rq_id.action, + parsed_rq_id.subresource or parsed_rq_id.resource, + ] + ), + "target": parsed_rq_id.resource, + "project_id": rq_job.meta[RQJobMetaField.PROJECT_ID], + "task_id": rq_job.meta[RQJobMetaField.TASK_ID], + "job_id": rq_job.meta[RQJobMetaField.JOB_ID], + "format": parsed_rq_id.format, + } + +class RequestSerializer(serializers.Serializer): + # SerializerMethodField is not used here to mark "status" field as required and fix schema generation. + # Marking them as read_only leads to generating type as allOf with one reference to RequestStatus component. + # The client generated using openapi-generator from such a schema contains wrong type like: + # status (bool, date, datetime, dict, float, int, list, str, none_type): [optional] + status = serializers.ChoiceField(source="get_status", choices=RequestStatus.choices) + message = serializers.SerializerMethodField() + id = serializers.CharField() + operation = RequestDataOperationSerializer(source="*") + progress = serializers.SerializerMethodField() + created_date = serializers.DateTimeField(source="created_at") + started_date = serializers.DateTimeField( + required=False, allow_null=True, source="started_at", + ) + finished_date = serializers.DateTimeField( + required=False, allow_null=True, source="ended_at", + ) + expiry_date = serializers.SerializerMethodField() + owner = serializers.SerializerMethodField() + result_url = serializers.URLField(required=False, allow_null=True) + result_id = serializers.IntegerField(required=False, allow_null=True) + + @extend_schema_field(UserIdentifiersSerializer()) + def get_owner(self, rq_job: RQJob) -> Dict[str, Any]: + return UserIdentifiersSerializer(rq_job.meta[RQJobMetaField.USER]).data + + @extend_schema_field( + serializers.FloatField(min_value=0, max_value=1, required=False, allow_null=True) + ) + def get_progress(self, rq_job: RQJob) -> Decimal: + # progress of task creation is stored in "task_progress" field + # progress of project import is stored in "progress" field + return Decimal(rq_job.meta.get(RQJobMetaField.PROGRESS) or rq_job.meta.get(RQJobMetaField.TASK_PROGRESS) or 0.) + + @extend_schema_field(serializers.DateTimeField(required=False, allow_null=True)) + def get_expiry_date(self, rq_job: RQJob) -> Optional[str]: + delta = None + if rq_job.is_finished: + delta = rq_job.result_ttl or rq_defaults.DEFAULT_RESULT_TTL + elif rq_job.is_failed: + delta = rq_job.failure_ttl or rq_defaults.DEFAULT_FAILURE_TTL + + if rq_job.ended_at and delta: + expiry_date = rq_job.ended_at + timedelta(seconds=delta) + return expiry_date.replace(tzinfo=timezone.utc) + + return None + + @extend_schema_field(serializers.CharField(allow_blank=True)) + def get_message(self, rq_job: RQJob) -> str: + rq_job_status = rq_job.get_status() + message = '' + + if RQJobStatus.STARTED == rq_job_status: + message = rq_job.meta.get(RQJobMetaField.STATUS, '') + elif RQJobStatus.FAILED == rq_job_status: + message = rq_job.meta.get( + RQJobMetaField.FORMATTED_EXCEPTION, + parse_exception_message(str(rq_job.exc_info or "Unknown error")), + ) + + return message + + def to_representation(self, rq_job: RQJob) -> Dict[str, Any]: + representation = super().to_representation(rq_job) + + if representation["status"] == RQJobStatus.DEFERRED: + representation["status"] = RQJobStatus.QUEUED + + if representation["status"] == RQJobStatus.FINISHED: + if result_url := rq_job.meta.get(RQJobMetaField.RESULT_URL): + representation["result_url"] = result_url + + if rq_job.parsed_rq_id.action == RequestAction.IMPORT and rq_job.parsed_rq_id.subresource == RequestSubresource.BACKUP: + representation["result_id"] = rq_job.return_value() + + return representation diff --git a/cvat/apps/engine/task.py b/cvat/apps/engine/task.py index ca6eb2b51fd..c44f01e1f35 100644 --- a/cvat/apps/engine/task.py +++ b/cvat/apps/engine/task.py @@ -19,6 +19,7 @@ from django.conf import settings from django.db import transaction +from django.http import HttpRequest from datetime import datetime, timezone from pathlib import Path @@ -26,7 +27,10 @@ from cvat.apps.engine.log import ServerLogManager from cvat.apps.engine.media_extractors import (MEDIA_TYPES, ImageListReader, Mpeg4ChunkWriter, Mpeg4CompressedChunkWriter, ValidateDimension, ZipChunkWriter, ZipCompressedChunkWriter, get_mime, sort) -from cvat.apps.engine.utils import av_scan_paths,get_rq_job_meta, define_dependent_job, get_rq_lock_by_user, preload_images +from cvat.apps.engine.utils import ( + av_scan_paths,get_rq_job_meta, define_dependent_job, get_rq_lock_by_user, preload_images +) +from cvat.apps.engine.rq_job_handler import RQIdManager from cvat.utils.http import make_requests_session, PROXIES_FOR_UNTRUSTED_URLS from utils.dataset_manifest import ImageManifestManager, VideoManifestManager, is_manifest from utils.dataset_manifest.core import VideoManifestValidator, is_dataset_manifest @@ -37,21 +41,28 @@ ############################# Low Level server API -def create(db_task, data, request): - """Schedule the task""" +def create( + db_task: models.Task, + data: models.Data, + request: HttpRequest, +) -> str: + """Schedule a background job to create a task and return that job's identifier""" q = django_rq.get_queue(settings.CVAT_QUEUES.IMPORT_DATA.value) user_id = request.user.id + rq_id = RQIdManager.build('create', 'task', db_task.pk) with get_rq_lock_by_user(q, user_id): q.enqueue_call( func=_create_thread, args=(db_task.pk, data), - job_id=f"create:task.id{db_task.pk}", + job_id=rq_id, meta=get_rq_job_meta(request=request, db_obj=db_task), depends_on=define_dependent_job(q, user_id), failure_ttl=settings.IMPORT_CACHE_FAILED_TTL.total_seconds(), ) + return rq_id + ############################# Internal implementation for server API JobFileMapping = List[List[str]] diff --git a/cvat/apps/engine/tests/test_rest_api.py b/cvat/apps/engine/tests/test_rest_api.py index d4a7d602f56..388e6e1e86f 100644 --- a/cvat/apps/engine/tests/test_rest_api.py +++ b/cvat/apps/engine/tests/test_rest_api.py @@ -3039,7 +3039,7 @@ def test_can_remove_export_cache_automatically_after_successful_export(self): task_id = self.tasks[0]["id"] user = self.admin - with mock.patch('cvat.apps.engine.backup.TASK_CACHE_TTL', new=timedelta(hours=10)): + with mock.patch('cvat.apps.dataset_manager.views.TASK_CACHE_TTL', new=timedelta(hours=10)): response = self._run_api_v2_tasks_id_export(task_id, user) self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED) diff --git a/cvat/apps/engine/urls.py b/cvat/apps/engine/urls.py index 9ce74e91453..1755197ebcd 100644 --- a/cvat/apps/engine/urls.py +++ b/cvat/apps/engine/urls.py @@ -24,6 +24,7 @@ router.register('cloudstorages', views.CloudStorageViewSet) router.register('assets', views.AssetsViewSet) router.register('guides', views.AnnotationGuidesViewSet) +router.register('requests', views.RequestViewSet, basename="request") urlpatterns = [ # Entry point for a client diff --git a/cvat/apps/engine/utils.py b/cvat/apps/engine/utils.py index daa7e617a2f..0219bcb0468 100644 --- a/cvat/apps/engine/utils.py +++ b/cvat/apps/engine/utils.py @@ -19,6 +19,7 @@ import logging import platform +from attr.converters import to_bool from datumaro.util.os_util import walk from rq.job import Job, Dependency from django_rq.queues import DjangoRQ @@ -202,7 +203,12 @@ def get_rq_lock_by_user(queue: DjangoRQ, user_id: int) -> Union[Lock, nullcontex return queue.connection.lock(f'{queue.name}-lock-{user_id}', timeout=30) return nullcontext() -def get_rq_job_meta(request, db_obj): +def get_rq_job_meta( + request: HttpRequest, + db_obj: Any, + *, + result_url: Optional[str] = None, +): # to prevent circular import from cvat.apps.webhooks.signals import project_id, organization_id from cvat.apps.events.handlers import task_id, job_id, organization_slug @@ -213,7 +219,7 @@ def get_rq_job_meta(request, db_obj): tid = task_id(db_obj) jid = job_id(db_obj) - return { + meta = { 'user': { 'id': getattr(request.user, "id", None), 'username': getattr(request.user, "username", None), @@ -230,6 +236,12 @@ def get_rq_job_meta(request, db_obj): 'job_id': jid, } + + if result_url: + meta['result_url'] = result_url + + return meta + def reverse(viewname, *, args=None, kwargs=None, query_params: Optional[Dict[str, str]] = None, request: Optional[HttpRequest] = None, @@ -267,15 +279,6 @@ def get_list_view_name(model): 'model_name': model._meta.object_name.lower() } -def get_import_rq_id( - resource_type: str, - resource_id: int, - subresource_type: str, - user: str, -) -> str: - # import:---by- - return f"import:{resource_type}-{resource_id}-{subresource_type}-by-{user}" - def import_resource_with_clean_up_after( func: Union[Callable[[str, int, int], int], Callable[[str, int, str, bool], None]], filename: str, @@ -406,3 +409,6 @@ def directory_tree(path, max_depth=None) -> str: for file in files: tree += f"{indent}-{file}\n" return tree + +def is_dataset_export(request: HttpRequest) -> bool: + return to_bool(request.query_params.get('save_images', False)) diff --git a/cvat/apps/engine/view_utils.py b/cvat/apps/engine/view_utils.py index 901c6244782..2acb8bac780 100644 --- a/cvat/apps/engine/view_utils.py +++ b/cvat/apps/engine/view_utils.py @@ -9,18 +9,14 @@ from django.db.models.query import QuerySet from django.http.request import HttpRequest from django.http.response import HttpResponse -from django.shortcuts import get_object_or_404 from rest_framework.decorators import action -from rest_framework.exceptions import PermissionDenied from rest_framework.response import Response from rest_framework.serializers import Serializer from rest_framework.viewsets import GenericViewSet from drf_spectacular.utils import extend_schema from cvat.apps.engine.mixins import UploadMixin -from cvat.apps.engine.models import CloudStorage as CloudStorageModel from cvat.apps.engine.parsers import TusUploadParser -from cvat.apps.engine.permissions import CloudStoragePermission def make_paginated_response( @@ -76,21 +72,6 @@ def list_action(serializer_class: Type[Serializer], **kwargs): return action(**params) -def get_cloud_storage_for_import_or_export( - storage_id: int, *, request, is_default: bool = False -) -> CloudStorageModel: - perm = CloudStoragePermission.create_scope_view(None, storage_id=storage_id, request=request) - result = perm.check_access() - if not result.allow: - if is_default: - # In this case, the user did not specify the location explicitly - error_message = "A cloud storage is selected as the default location. " - else: - error_message = "" - error_message += "You don't have access to this cloud storage" - raise PermissionDenied(error_message) - - return get_object_or_404(CloudStorageModel, pk=storage_id) def tus_chunk_action(*, detail: bool, suffix_base: str): def decorator(f): diff --git a/cvat/apps/engine/views.py b/cvat/apps/engine/views.py index b5e4d2cc784..880c78c0089 100644 --- a/cvat/apps/engine/views.py +++ b/cvat/apps/engine/views.py @@ -5,13 +5,16 @@ import os import os.path as osp +import functools from PIL import Image from types import SimpleNamespace -from typing import Optional, Any, Dict, List, cast, Callable, Mapping +from typing import Optional, Any, Dict, List, cast, Callable, Mapping, Iterable import traceback import textwrap +from collections import namedtuple from copy import copy from datetime import datetime +from redis.exceptions import ConnectionError as RedisConnectionError from tempfile import NamedTemporaryFile from textwrap import dedent @@ -24,6 +27,9 @@ from django.db.models.query import Prefetch from django.http import HttpResponse, HttpRequest, HttpResponseNotFound, HttpResponseBadRequest from django.utils import timezone +from django.utils.decorators import method_decorator +from django.views.decorators.cache import never_cache +from django_rq.queues import DjangoRQ from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import ( @@ -40,13 +46,16 @@ from rest_framework.response import Response from rest_framework.settings import api_settings +from rq.job import Job as RQJob, JobStatus as RQJobStatus + import cvat.apps.dataset_manager as dm import cvat.apps.dataset_manager.views # pylint: disable=unused-import -from cvat.apps.engine.cloud_provider import db_storage_to_storage_instance, import_resource_from_cloud_storage, export_resource_to_cloud_storage -from cvat.apps.events.handlers import handle_dataset_export, handle_dataset_import +from cvat.apps.engine.cloud_provider import db_storage_to_storage_instance, import_resource_from_cloud_storage +from cvat.apps.events.handlers import handle_dataset_import from cvat.apps.dataset_manager.bindings import CvatImportError from cvat.apps.dataset_manager.serializers import DatasetFormatsSerializer from cvat.apps.engine.frame_provider import FrameProvider +from cvat.apps.engine.filters import NonModelSimpleFilter, NonModelOrderingFilter, NonModelJsonLogicFilter from cvat.apps.engine.media_extractors import get_mime from cvat.apps.engine.models import ( ClientFile, Job, JobType, Label, SegmentType, Task, Project, Issue, Data, @@ -65,19 +74,21 @@ AssetReadSerializer, AssetWriteSerializer, IssueWriteSerializer, CommentReadSerializer, CommentWriteSerializer, CloudStorageWriteSerializer, CloudStorageReadSerializer, DatasetFileSerializer, - ProjectFileSerializer, TaskFileSerializer, RqIdSerializer, CloudStorageContentSerializer) -from cvat.apps.engine.view_utils import get_cloud_storage_for_import_or_export + ProjectFileSerializer, TaskFileSerializer, RqIdSerializer, CloudStorageContentSerializer, + RequestSerializer, RequestStatus, RequestAction, RequestSubresource, +) +from cvat.apps.engine.permissions import get_cloud_storage_for_import_or_export from utils.dataset_manifest import ImageManifestManager from cvat.apps.engine.utils import ( av_scan_paths, process_failed_job, - parse_exception_message, get_rq_job_meta, get_import_rq_id, + parse_exception_message, get_rq_job_meta, import_resource_with_clean_up_after, sendfile, define_dependent_job, get_rq_lock_by_user, - build_annotations_file_name, ) +from cvat.apps.engine.rq_job_handler import RQIdManager, is_rq_job_owner, RQJobMetaField from cvat.apps.engine import backup from cvat.apps.engine.mixins import ( - PartialUpdateModelMixin, UploadMixin, AnnotationMixin, SerializeMixin, CsrfWorkaroundMixin + PartialUpdateModelMixin, UploadMixin, DatasetMixin, BackupMixin, CsrfWorkaroundMixin ) from cvat.apps.engine.location import get_location_configuration, StorageType @@ -247,7 +258,7 @@ def csrf_workaround_is_needed_for_export(query_params: Mapping[str, str]) -> boo ) class ProjectViewSet(viewsets.GenericViewSet, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.CreateModelMixin, mixins.DestroyModelMixin, - PartialUpdateModelMixin, UploadMixin, AnnotationMixin, SerializeMixin, CsrfWorkaroundMixin + PartialUpdateModelMixin, UploadMixin, DatasetMixin, BackupMixin, CsrfWorkaroundMixin ): queryset = models.Project.objects.select_related( 'assignee', 'owner', 'target_storage', 'source_storage', 'annotation_guide', @@ -262,7 +273,9 @@ class ProjectViewSet(viewsets.GenericViewSet, mixins.ListModelMixin, ordering = "-id" lookup_fields = {'owner': 'owner__username', 'assignee': 'assignee__username'} iam_organization_field = 'organization' - IMPORT_RQ_ID_TEMPLATE = get_import_rq_id('project', {}, 'dataset', {}) + IMPORT_RQ_ID_TEMPLATE = RQIdManager.build( + 'import', 'project', {}, subresource='dataset' + ) def get_serializer_class(self): if self.request.method in SAFE_METHODS: @@ -288,6 +301,9 @@ def perform_create(self, serializer, **kwargs): # Required for the extra summary information added in the queryset serializer.instance = self.get_queryset().get(pk=serializer.instance.pk) + def get_export_callback(self, save_images: bool) -> Callable: + return dm.views.export_project_as_dataset if save_images else dm.views.export_project_annotations + @extend_schema(methods=['GET'], summary='Export a project as a dataset / Check dataset import status', description=textwrap.dedent(""" To check the status of the process of importing a project dataset from a file: @@ -296,6 +312,14 @@ def perform_create(self, serializer, **kwargs): Make sure to include this parameter as a query parameter in your subsequent GET /api/projects/id/dataset requests to track the status of the dataset import. Also you should specify action parameter: action=import_status. + + Deprecation warning: + Utilizing this endpoint to export project dataset in + a specific format will be deprecated in one of the next releases. + Consider using new API: + - POST /api/projects//dataset/export/?save_images=True to initiate export process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request """), parameters=[ OpenApiParameter('format', description='Desired output format name\n' @@ -312,10 +336,11 @@ def perform_create(self, serializer, **kwargs): location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False), OpenApiParameter('use_default_location', description='Use the location that was configured in project to import dataset', location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, - default=True), + default=True, deprecated=True), OpenApiParameter('rq_id', description='rq id', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False), ], + # deprecated=True, FUTURE-TODO: uncomment when new API for result downloading will be implemented responses={ '200': OpenApiResponse(OpenApiTypes.BINARY, description='Download of file started'), '201': OpenApiResponse(description='Output file is ready for downloading'), @@ -340,7 +365,7 @@ def perform_create(self, serializer, **kwargs): location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False), OpenApiParameter('use_default_location', description='Use the location that was configured in the project to import annotations', location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, - default=True), + default=True, deprecated=True), OpenApiParameter('filename', description='Dataset file name', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False), ], @@ -375,18 +400,20 @@ def dataset(self, request, pk): queue = django_rq.get_queue(settings.CVAT_QUEUES.IMPORT_DATA.value) rq_id = request.query_params.get('rq_id') if not rq_id: - return Response('The rq_id param should be specified in the query parameters', status=status.HTTP_400_BAD_REQUEST) - - # check that the user has access to the current rq_job - # We should not return any status of job including "404 not found" for user that has no access for this rq_job - - if self.IMPORT_RQ_ID_TEMPLATE.format(pk, request.user) != rq_id: - return Response(status=status.HTTP_403_FORBIDDEN) + return Response( + 'The rq_id param should be specified in the query parameters', + status=status.HTTP_400_BAD_REQUEST, + ) rq_job = queue.fetch_job(rq_id) + if rq_job is None: return Response(status=status.HTTP_404_NOT_FOUND) - elif rq_job.is_finished: + # check that the user has access to the current rq_job + elif not is_rq_job_owner(rq_job, request.user.id): + return Response(status=status.HTTP_403_FORBIDDEN) + + if rq_job.is_finished: rq_job.delete() return Response(status=status.HTTP_201_CREATED) elif rq_job.is_failed: @@ -394,7 +421,7 @@ def dataset(self, request, pk): return Response( data=str(exc_info), - status=status.HTTP_500_INTERNAL_SERVER_ERROR + status=status.HTTP_500_INTERNAL_SERVER_ERROR, ) else: return Response( @@ -402,15 +429,10 @@ def dataset(self, request, pk): settings.CVAT_QUEUES.IMPORT_DATA.value, rq_id, ), - status=status.HTTP_202_ACCEPTED + status=status.HTTP_202_ACCEPTED, ) else: - return self.export_annotations( - request=request, - db_obj=self._object, - export_func=_export_annotations, - callback=dm.views.export_project_as_dataset - ) + return self.export_dataset_v1(request=request, save_images=True) @tus_chunk_action(detail=True, suffix_base="dataset") def append_dataset_chunk(self, request, pk, file_id): @@ -459,7 +481,16 @@ def upload_finished(self, request): return Response(data='Unknown upload was finished', status=status.HTTP_400_BAD_REQUEST) - @extend_schema(summary='Get project annotations', + @extend_schema(summary='Get project annotations or export them as a dataset', + description=textwrap.dedent("""\ + Deprecation warning: + + Using this endpoint to initiate export of annotations as a dataset or to check export status is deprecated. + Consider using new API: + - POST /api/projects//dataset/export?save_images=False to initiate exporting process + - GET /api/requests/ to check export status, + where rq_id is request id returned on initializing request' + """), parameters=[ OpenApiParameter('format', description='Desired output format name\n' 'You can get the list of supported formats at:\n/server/annotation/formats', @@ -475,7 +506,7 @@ def upload_finished(self, request): location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False), OpenApiParameter('use_default_location', description='Use the location that was configured in project to export annotation', location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, - default=True), + default=True, deprecated=True), ], responses={ '200': OpenApiResponse(PolymorphicProxySerializer( @@ -492,16 +523,24 @@ def upload_finished(self, request): serializer_class=LabeledDataSerializer, csrf_workaround_is_needed=csrf_workaround_is_needed_for_export) def annotations(self, request, pk): + # FUTURE-TODO: mark exporting dataset using this endpoint as deprecated when new API for result file downloading will be implemented self._object = self.get_object() # force call of check_object_permissions() - return self.export_annotations( + return self.export_dataset_v1( request=request, - db_obj=self._object, - export_func=_export_annotations, - callback=dm.views.export_project_annotations, + save_images=False, get_data=dm.task.get_job_data, ) @extend_schema(summary='Back up a project', + description=textwrap.dedent("""\ + Deprecation warning: + + This endpoint will be deprecated in one of the next releases. + Consider using new API: + - POST /api/projects//backup/export to initiate backup process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request + """), parameters=[ OpenApiParameter('action', location=OpenApiParameter.QUERY, description='Used to start downloading process after backup file had been created', @@ -515,7 +554,7 @@ def annotations(self, request, pk): location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False), OpenApiParameter('use_default_location', description='Use the location that was configured in project to export backup', location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, - default=True), + default=True, deprecated=True), ], responses={ '200': OpenApiResponse(description='Download of file started'), @@ -525,7 +564,8 @@ def annotations(self, request, pk): @action(methods=['GET'], detail=True, url_path='backup', csrf_workaround_is_needed=csrf_workaround_is_needed_for_backup) def export_backup(self, request, pk=None): - return self.serialize(request, backup.export) + # FUTURE-TODO: mark this endpoint as deprecated when new API for result file downloading will be implemented + return self.export_backup_v1(request) @extend_schema(methods=['POST'], summary='Recreate a project from a backup', description=textwrap.dedent(""" @@ -569,7 +609,7 @@ def export_backup(self, request, pk=None): serializer_class=None, parser_classes=_UPLOAD_PARSER_CLASSES) def import_backup(self, request, pk=None): - return self.deserialize(request, backup.import_project) + return self.import_backup_v1(request, backup.import_project) @tus_chunk_action(detail=False, suffix_base="backup") def append_backup_chunk(self, request, file_id): @@ -777,7 +817,7 @@ def __call__(self, request, start, stop, db_data): class TaskViewSet(viewsets.GenericViewSet, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.CreateModelMixin, mixins.DestroyModelMixin, - PartialUpdateModelMixin, UploadMixin, AnnotationMixin, SerializeMixin, CsrfWorkaroundMixin + PartialUpdateModelMixin, UploadMixin, DatasetMixin, BackupMixin, CsrfWorkaroundMixin ): queryset = Task.objects.select_related( 'data', 'assignee', 'owner', @@ -808,7 +848,9 @@ class TaskViewSet(viewsets.GenericViewSet, mixins.ListModelMixin, ordering_fields = list(filter_fields) ordering = "-id" iam_organization_field = 'organization' - IMPORT_RQ_ID_TEMPLATE = get_import_rq_id('task', {}, 'annotations', {}) + IMPORT_RQ_ID_TEMPLATE = RQIdManager.build( + 'import', 'task', {}, subresource='annotations' + ) def get_serializer_class(self): if self.request.method in SAFE_METHODS: @@ -864,13 +906,21 @@ def get_queryset(self): serializer_class=None, parser_classes=_UPLOAD_PARSER_CLASSES) def import_backup(self, request, pk=None): - return self.deserialize(request, backup.import_task) + return self.import_backup_v1(request, backup.import_task) @tus_chunk_action(detail=False, suffix_base="backup") def append_backup_chunk(self, request, file_id): return self.append_tus_chunk(request, file_id) @extend_schema(summary='Back up a task', + description=textwrap.dedent("""\ + Deprecation warning: + This endpoint will be deprecated in one of the next releases. + Consider using new API: + - POST /api/tasks//backup/export to initiate backup process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request' + """), parameters=[ OpenApiParameter('action', location=OpenApiParameter.QUERY, description='Used to start downloading process after backup file had been created', @@ -884,7 +934,7 @@ def append_backup_chunk(self, request, file_id): location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False), OpenApiParameter('use_default_location', description='Use the location that was configured in the task to export backup', location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, - default=True), + default=True, deprecated=True), ], responses={ '200': OpenApiResponse(description='Download of file started'), @@ -895,12 +945,13 @@ def append_backup_chunk(self, request, file_id): @action(methods=['GET'], detail=True, url_path='backup', csrf_workaround_is_needed=csrf_workaround_is_needed_for_backup) def export_backup(self, request, pk=None): + # FUTURE-TODO: mark this endpoint as deprecated when new API for result file downloading will be implemented if self.get_object().data is None: return Response( data='Backup of a task without data is not allowed', status=status.HTTP_400_BAD_REQUEST ) - return self.serialize(request, backup.export) + return self.export_backup_v1(request) @transaction.atomic def perform_update(self, serializer): @@ -1087,9 +1138,11 @@ def _handle_upload_data(request): data['stop_frame'] = None # Need to process task data when the transaction is committed - task.create(self._object, data, request) + rq_id = task.create(self._object, data, request) + rq_id_serializer = RqIdSerializer(data={'rq_id': rq_id}) + rq_id_serializer.is_valid(raise_exception=True) - return Response(serializer.data, status=status.HTTP_202_ACCEPTED) + return Response(rq_id_serializer.data, status=status.HTTP_202_ACCEPTED) @transaction.atomic def _handle_upload_backup(request): @@ -1171,7 +1224,7 @@ def _handle_upload_backup(request): https://docs.cvat.ai/docs/manual/advanced/dataset_manifest/ After all data is sent, the operation status can be retrieved via - the /status endpoint. + the `GET /api/requests/`, where **rq_id** is request ID returned for this request. Once data is attached to a task, it cannot be detached or replaced. """.format_map( @@ -1188,7 +1241,18 @@ def _handle_upload_backup(request): description='Finishes data upload. Can be combined with Upload-Start header to create task data with one request'), ], responses={ - '202': OpenApiResponse(description=''), + '202': OpenApiResponse( + response=PolymorphicProxySerializer( + component_name='DataResponse', + # FUTURE-FIXME: endpoint should return RqIdSerializer or OpenApiTypes.NONE + # but SDK generated from a schema with nullable RqIdSerializer + # throws an error when tried to convert empty response to a specific type + serializers=[RqIdSerializer, OpenApiTypes.BINARY], + resource_type_field_name=None + ), + + description='Request to attach a data to a task has been accepted' + ), }) @extend_schema(methods=['GET'], summary='Get data of a task', @@ -1245,10 +1309,26 @@ def append_data_chunk(self, request, pk, file_id): self._object = self.get_object() return self.append_tus_chunk(request, file_id) - @extend_schema(methods=['GET'], summary='Get task annotations', + def get_export_callback(self, save_images: bool) -> Callable: + return dm.views.export_task_as_dataset if save_images else dm.views.export_task_annotations + + # TODO: mark this endpoint as deprecated when new endpoint for downloading results will be implemented + @extend_schema(methods=['GET'], summary='Get task annotations or export them as a dataset in a specific format', + description=textwrap.dedent("""\ + Deprecation warning: + + Utilizing this endpoint ot export annotations as a dataset in + a specific format will be deprecated in one of the next releases. + + Consider using new API: + - POST /api/tasks//dataset/export?save_images=False to initiate export process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request + """), parameters=[ OpenApiParameter('format', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False, - description="Desired output format name\nYou can get the list of supported formats at:\n/server/annotation/formats"), + description="Desired output format name\nYou can get the list of supported formats at:\n/server/annotation/formats", + ), OpenApiParameter('filename', description='Desired output file name', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False), OpenApiParameter('action', location=OpenApiParameter.QUERY, @@ -1261,7 +1341,7 @@ def append_data_chunk(self, request, pk, file_id): location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False), OpenApiParameter('use_default_location', description='Use the location that was configured in the task to export annotation', location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, - default=True), + default=True, deprecated=True), ], responses={ '200': OpenApiResponse(PolymorphicProxySerializer( @@ -1315,7 +1395,7 @@ def append_data_chunk(self, request, pk, file_id): location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False), OpenApiParameter('use_default_location', description='Use the location that was configured in task to import annotations', location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, - default=True), + default=True, deprecated=True), OpenApiParameter('filename', description='Annotation file name', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False), ], @@ -1349,16 +1429,14 @@ def annotations(self, request, pk): self._object = self.get_object() # force call of check_object_permissions() if request.method == 'GET': if self._object.data: - return self.export_annotations( + return self.export_dataset_v1( request=request, - db_obj=self._object, - export_func=_export_annotations, - callback=dm.views.export_task_annotations, + save_images=False, get_data=dm.task.get_task_data, ) else: - return Response(data="Exporting annotations from a task without data is not allowed", - status=status.HTTP_400_BAD_REQUEST) + return HttpResponseBadRequest("Exporting annotations from a task without data is not allowed") + elif request.method == 'POST' or request.method == 'OPTIONS': # NOTE: initialization process of annotations import format_name = request.query_params.get('format', '') @@ -1373,11 +1451,9 @@ def annotations(self, request, pk): format_name = request.query_params.get('format', '') if format_name: # NOTE: continue process of import annotations - use_settings = to_bool(request.query_params.get('use_default_location', True)) conv_mask_to_poly = to_bool(request.query_params.get('conv_mask_to_poly', True)) - obj = self._object if use_settings else request.query_params location_conf = get_location_configuration( - obj=obj, use_settings=use_settings, field_name=StorageType.SOURCE + db_instance=self._object, query_params=request.query_params, field_name=StorageType.SOURCE ) return _import_annotations( request=request, @@ -1414,23 +1490,30 @@ def append_annotations_chunk(self, request, pk, file_id): self._object = self.get_object() return self.append_tus_chunk(request, file_id) + ### --- DEPRECATED METHOD --- ### @extend_schema( summary='Get the creation status of a task', responses={ '200': RqStatusSerializer, - }) + }, + deprecated=True, + description="This method is deprecated and will be removed in one of the next releases. " + "To check status of task creation, use new common API " + "for managing background operations: GET /api/requests/?action=create&task_id=", + ) @action(detail=True, methods=['GET'], serializer_class=RqStatusSerializer) def status(self, request, pk): self.get_object() # force call of check_object_permissions() response = self._get_rq_response( queue=settings.CVAT_QUEUES.IMPORT_DATA.value, - job_id=f"create:task.id{pk}" + job_id=RQIdManager.build('create', 'task', pk) ) serializer = RqStatusSerializer(data=response) serializer.is_valid(raise_exception=True) - return Response(serializer.data) + return Response(serializer.data, headers={'Deprecation': 'true'}) + ### --- DEPRECATED METHOD--- ### @staticmethod def _get_rq_response(queue, job_id): queue = django_rq.get_queue(queue) @@ -1497,6 +1580,17 @@ def metadata(self, request, pk): return Response(serializer.data) @extend_schema(summary='Export task as a dataset in a specific format', + description=textwrap.dedent("""\ + Deprecation warning: + + Utilizing this endpoint to export task dataset in + a specific format will be deprecated in one of the next releases. + + Consider using new API: + - POST /api/tasks//dataset/export?save_images=True to initiate export process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request + """), parameters=[ OpenApiParameter('format', location=OpenApiParameter.QUERY, description='Desired output format name\nYou can get the list of supported formats at:\n/server/annotation/formats', @@ -1508,7 +1602,7 @@ def metadata(self, request, pk): type=OpenApiTypes.STR, required=False, enum=['download']), OpenApiParameter('use_default_location', description='Use the location that was configured in task to export annotations', location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, - default=True), + default=True, deprecated=True), OpenApiParameter('location', description='Where need to save downloaded dataset', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False, enum=Location.list()), @@ -1521,21 +1615,21 @@ def metadata(self, request, pk): '202': OpenApiResponse(description='Exporting has been started'), '400': OpenApiResponse(description='Exporting without data is not allowed'), '405': OpenApiResponse(description='Format is not available'), - }) + }, + ) @action(detail=True, methods=['GET'], serializer_class=None, url_path='dataset', csrf_workaround_is_needed=csrf_workaround_is_needed_for_export) def dataset_export(self, request, pk): + # FUTURE-TODO: mark this endpoint as deprecated when new API for result file downloading will be implemented self._object = self.get_object() # force call of check_object_permissions() if self._object.data: - return self.export_annotations( + return self.export_dataset_v1( request=request, - db_obj=self._object, - export_func=_export_annotations, - callback=dm.views.export_task_as_dataset) - else: - return Response(data="Exporting a dataset from a task without data is not allowed", - status=status.HTTP_400_BAD_REQUEST) + save_images=True + ) + + return HttpResponseBadRequest("Exporting a dataset from a task without data is not allowed") @extend_schema(summary='Get a preview image for a task', responses={ @@ -1598,7 +1692,7 @@ def preview(self, request, pk): ) class JobViewSet(viewsets.GenericViewSet, mixins.ListModelMixin, mixins.CreateModelMixin, mixins.RetrieveModelMixin, PartialUpdateModelMixin, mixins.DestroyModelMixin, - UploadMixin, AnnotationMixin, CsrfWorkaroundMixin + UploadMixin, DatasetMixin, CsrfWorkaroundMixin ): queryset = Job.objects.select_related('assignee', 'segment__task__data', 'segment__task__project', 'segment__task__annotation_guide', 'segment__task__project__annotation_guide', @@ -1622,7 +1716,9 @@ class JobViewSet(viewsets.GenericViewSet, mixins.ListModelMixin, mixins.CreateMo 'project_name': 'segment__task__project__name', 'assignee': 'assignee__username' } - IMPORT_RQ_ID_TEMPLATE = get_import_rq_id('job', {}, 'annotations', {}) + IMPORT_RQ_ID_TEMPLATE = RQIdManager.build( + 'import', 'job', {}, subresource='annotations' + ) def get_queryset(self): queryset = super().get_queryset() @@ -1681,10 +1777,20 @@ def upload_finished(self, request): status=status.HTTP_400_BAD_REQUEST) @extend_schema(methods=['GET'], - summary="Get job annotations", + summary="Get job annotations or export job annotations as a dataset in a specific format", description=textwrap.dedent("""\ If format is specified, a ZIP archive will be returned. Otherwise, the annotations will be returned as a JSON document. + + Deprecation warning: + + Utilizing this endpoint to export annotations as a dataset in + a specific format will be deprecated in one of the next releases. + + Consider using new API: + - POST /api/jobs//dataset/export?save_images=False to initiate export process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request """), parameters=[ OpenApiParameter('format', location=OpenApiParameter.QUERY, @@ -1702,7 +1808,7 @@ def upload_finished(self, request): location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False), OpenApiParameter('use_default_location', description='Use the location that was configured in the task to export annotation', location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, - default=True), + default=True, deprecated=True), ], responses={ '200': OpenApiResponse(PolymorphicProxySerializer( @@ -1731,7 +1837,7 @@ def upload_finished(self, request): location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False), OpenApiParameter('use_default_location', description='Use the location that was configured in the task to import annotation', location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, - default=True), + default=True, deprecated=True), OpenApiParameter('filename', description='Annotation file name', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False), ], @@ -1760,7 +1866,7 @@ def upload_finished(self, request): location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False), OpenApiParameter('use_default_location', description='Use the location that was configured in the task to import annotation', location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, - default=True), + default=True, deprecated=True), OpenApiParameter('filename', description='Annotation file name', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False), OpenApiParameter('rq_id', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False, @@ -1795,11 +1901,10 @@ def upload_finished(self, request): def annotations(self, request, pk): self._object = self.get_object() # force call of check_object_permissions() if request.method == 'GET': - return self.export_annotations( + # FUTURE-TODO: mark as deprecated using this endpoint to export annotations when new API for result file downloading will be implemented + return self.export_dataset_v1( request=request, - db_obj=self._object.segment.task, - export_func=_export_annotations, - callback=dm.views.export_job_annotations, + save_images=False, get_data=dm.task.get_job_data, ) @@ -1816,11 +1921,9 @@ def annotations(self, request, pk): elif request.method == 'PUT': format_name = request.query_params.get('format', '') if format_name: - use_settings = to_bool(request.query_params.get('use_default_location', True)) conv_mask_to_poly = to_bool(request.query_params.get('conv_mask_to_poly', True)) - obj = self._object.segment.task if use_settings else request.query_params location_conf = get_location_configuration( - obj=obj, use_settings=use_settings, field_name=StorageType.SOURCE + db_instance=self._object, query_params=request.query_params, field_name=StorageType.SOURCE ) return _import_annotations( request=request, @@ -1863,6 +1966,14 @@ def append_annotations_chunk(self, request, pk, file_id): @extend_schema(summary='Export job as a dataset in a specific format', + description=textwrap.dedent("""\ + Deprecation warning: + This endpoint will be deprecated in one of the next releases. + Consider using new API: + - POST /api/jobs//dataset/export?save_images=True to initiate export process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request + """), parameters=[ OpenApiParameter('format', location=OpenApiParameter.QUERY, description='Desired output format name\nYou can get the list of supported formats at:\n/server/annotation/formats', @@ -1874,7 +1985,7 @@ def append_annotations_chunk(self, request, pk, file_id): type=OpenApiTypes.STR, required=False, enum=['download']), OpenApiParameter('use_default_location', description='Use the location that was configured in the task to export dataset', location=OpenApiParameter.QUERY, type=OpenApiTypes.BOOL, required=False, - default=True), + default=True, deprecated=True), OpenApiParameter('location', description='Where need to save downloaded dataset', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False, enum=Location.list()), @@ -1886,18 +1997,18 @@ def append_annotations_chunk(self, request, pk, file_id): '201': OpenApiResponse(description='Output file is ready for downloading'), '202': OpenApiResponse(description='Exporting has been started'), '405': OpenApiResponse(description='Format is not available'), - }) + }, + ) @action(detail=True, methods=['GET'], serializer_class=None, url_path='dataset', csrf_workaround_is_needed=csrf_workaround_is_needed_for_export) def dataset_export(self, request, pk): + # FUTURE-TODO: mark this endpoint as deprecated when new API for result file downloading will be implemented self._object = self.get_object() # force call of check_object_permissions() - return self.export_annotations( - request=request, - db_obj=self._object.segment.task, - export_func=_export_annotations, - callback=dm.views.export_job_as_dataset - ) + return self.export_dataset_v1(request=request, save_images=True) + + def get_export_callback(self, save_images: bool) -> Callable: + return dm.views.export_job_as_dataset if save_images else dm.views.export_job_annotations @extend_schema(summary='Get data of a job', parameters=[ @@ -2822,7 +2933,7 @@ def perform_destroy(self, instance): instance.delete() def rq_exception_handler(rq_job, exc_type, exc_value, tb): - rq_job.meta["formatted_exception"] = "".join( + rq_job.meta[RQJobMetaField.FORMATTED_EXCEPTION] = "".join( traceback.format_exception_only(exc_type, exc_value)) rq_job.save_meta() @@ -2842,12 +2953,12 @@ def _import_annotations(request, rq_id_template, rq_func, db_obj, format_name, rq_id = request.query_params.get('rq_id') rq_id_should_be_checked = bool(rq_id) if not rq_id: - rq_id = rq_id_template.format(db_obj.pk, request.user) + rq_id = rq_id_template.format(db_obj.pk) queue = django_rq.get_queue(settings.CVAT_QUEUES.IMPORT_DATA.value) rq_job = queue.fetch_job(rq_id) - if rq_id_should_be_checked and rq_id_template.format(db_obj.pk, request.user) != rq_id: + if rq_job and rq_id_should_be_checked and not is_rq_job_owner(rq_job, request.user.id): return Response(status=status.HTTP_403_FORBIDDEN) if rq_job and request.method == 'POST': @@ -2921,7 +3032,7 @@ def _import_annotations(request, rq_id_template, rq_func, db_obj, format_name, failure_ttl=settings.IMPORT_CACHE_FAILED_TTL.total_seconds() ) - handle_dataset_import(db_obj, format_name=format_name, cloud_storage=db_storage) + handle_dataset_import(db_obj, format_name=format_name, cloud_storage_id=db_storage.id if db_storage else None) serializer = RqIdSerializer(data={'rq_id': rq_id}) serializer.is_valid(raise_exception=True) @@ -2944,202 +3055,6 @@ def _import_annotations(request, rq_id_template, rq_func, db_obj, format_name, return Response(status=status.HTTP_202_ACCEPTED) -def _export_annotations( - db_instance: models.Project | models.Task | models.Job, - rq_id: str, - request: HttpRequest, - format_name: str, - action: str, - callback: Callable[[int, Optional[str], Optional[str]], str], - filename: Optional[str], - location_conf: Dict[str, Any] -): - if action not in {"", "download"}: - raise serializers.ValidationError( - "Unexpected action specified for the request") - - format_desc = {f.DISPLAY_NAME: f - for f in dm.views.get_export_formats()}.get(format_name) - if format_desc is None: - raise serializers.ValidationError( - "Unknown format specified for the request") - elif not format_desc.ENABLED: - return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) - - instance_update_time = timezone.localtime(db_instance.updated_date) - if isinstance(db_instance, Project): - tasks_update = list(map(lambda db_task: timezone.localtime(db_task.updated_date), db_instance.tasks.all())) - instance_update_time = max(tasks_update + [instance_update_time]) - - queue = django_rq.get_queue(settings.CVAT_QUEUES.EXPORT_DATA.value) - rq_job = queue.fetch_job(rq_id) - - if rq_job: - rq_request = rq_job.meta.get('request', None) - request_time = rq_request.get('timestamp', None) if rq_request else None - if request_time is None or request_time < instance_update_time: - # The result is outdated, need to restart the export. - # Cancel the current job. - # The new attempt will be made after the last existing job. - # In the case the server is configured with ONE_RUNNING_JOB_IN_QUEUE_PER_USER - # we have to enqueue dependent jobs after canceling one. - rq_job.cancel(enqueue_dependents=settings.ONE_RUNNING_JOB_IN_QUEUE_PER_USER) - rq_job.delete() - rq_job = None - - location = location_conf.get('location') - if location not in Location.list(): - raise serializers.ValidationError( - f"Unexpected location {location} specified for the request" - ) - - cache_ttl = dm.views.get_export_cache_ttl(db_instance) - - instance_timestamp = datetime.strftime(instance_update_time, "%Y_%m_%d_%H_%M_%S") - is_annotation_file = rq_id.startswith('export:annotations') - - REQUEST_TIMEOUT = 60 - - if action == "download": - if location != Location.LOCAL: - return Response('Action "download" is only supported for a local export location', - status=status.HTTP_400_BAD_REQUEST) - - if not rq_job or not rq_job.is_finished: - return Response('Export has not finished', status=status.HTTP_400_BAD_REQUEST) - - file_path = rq_job.return_value() - - if not file_path: - return Response( - 'A result for exporting job was not found for finished RQ job', - status=status.HTTP_500_INTERNAL_SERVER_ERROR - ) - - with dm.util.get_export_cache_lock(file_path, ttl=REQUEST_TIMEOUT): - if not osp.exists(file_path): - return Response( - "The exported file has expired, please retry exporting", - status=status.HTTP_404_NOT_FOUND - ) - - filename = filename or \ - build_annotations_file_name( - class_name=db_instance.__class__.__name__, - identifier=db_instance.name if isinstance(db_instance, (Task, Project)) else db_instance.id, - timestamp=instance_timestamp, - format_name=format_name, - is_annotation_file=is_annotation_file, - extension=osp.splitext(file_path)[1] - ) - - rq_job.delete() - return sendfile(request, file_path, attachment=True, attachment_filename=filename) - - - if rq_job: - if rq_job.is_finished: - if location == Location.CLOUD_STORAGE: - rq_job.delete() - return Response(status=status.HTTP_200_OK) - - elif location == Location.LOCAL: - file_path = rq_job.return_value() - - if not file_path: - return Response( - 'A result for exporting job was not found for finished RQ job', - status=status.HTTP_500_INTERNAL_SERVER_ERROR - ) - - with dm.util.get_export_cache_lock(file_path, ttl=REQUEST_TIMEOUT): - if osp.exists(file_path): - # Update last update time to prolong the export lifetime - # as the last access time is not available on every filesystem - os.utime(file_path, None) - - return Response(status=status.HTTP_201_CREATED) - else: - # Cancel and reenqueue the job. - # The new attempt will be made after the last existing job. - # In the case the server is configured with ONE_RUNNING_JOB_IN_QUEUE_PER_USER - # we have to enqueue dependent jobs after canceling one. - rq_job.cancel(enqueue_dependents=settings.ONE_RUNNING_JOB_IN_QUEUE_PER_USER) - rq_job.delete() - else: - raise NotImplementedError(f"Export to {location} location is not implemented yet") - elif rq_job.is_failed: - exc_info = rq_job.meta.get('formatted_exception', str(rq_job.exc_info)) - rq_job.delete() - return Response(exc_info, status=status.HTTP_500_INTERNAL_SERVER_ERROR) - elif rq_job.is_deferred and rq_id not in queue.deferred_job_registry.get_job_ids(): - # Sometimes jobs can depend on outdated jobs in the deferred jobs registry. - # They can be fetched by their specific ids, but are not listed by get_job_ids(). - # Supposedly, this can happen because of the server restarts - # (potentially, because the redis used for the queue is inmemory). - # Another potential reason is canceling without enqueueing dependents. - # Such dependencies are never removed or finished, - # as there is no TTL for deferred jobs, - # so the current job can be blocked indefinitely. - - # Cancel the current job and then reenqueue it, considering the current situation. - # The new attempt will be made after the last existing job. - # In the case the server is configured with ONE_RUNNING_JOB_IN_QUEUE_PER_USER - # we have to enqueue dependent jobs after canceling one. - rq_job.cancel(enqueue_dependents=settings.ONE_RUNNING_JOB_IN_QUEUE_PER_USER) - rq_job.delete() - else: - return Response(status=status.HTTP_202_ACCEPTED) - try: - if request.scheme: - server_address = request.scheme + '://' - server_address += request.get_host() - except Exception: - server_address = None - - user_id = request.user.id - - func = callback if location == Location.LOCAL else export_resource_to_cloud_storage - func_args = (db_instance.id, format_name, server_address) - - if location == Location.CLOUD_STORAGE: - try: - storage_id = location_conf['storage_id'] - except KeyError: - raise serializers.ValidationError( - 'Cloud storage location was selected as the destination,' - ' but cloud storage id was not specified') - - db_storage = get_cloud_storage_for_import_or_export( - storage_id=storage_id, request=request, - is_default=location_conf['is_default']) - filename_pattern = build_annotations_file_name( - class_name=db_instance.__class__.__name__, - identifier=db_instance.name if isinstance(db_instance, (Task, Project)) else db_instance.id, - timestamp=instance_timestamp, - format_name=format_name, - is_annotation_file=is_annotation_file, - ) - func_args = (db_storage, filename, filename_pattern, callback) + func_args - else: - db_storage = None - - with get_rq_lock_by_user(queue, user_id): - queue.enqueue_call( - func=func, - args=func_args, - job_id=rq_id, - meta=get_rq_job_meta(request=request, db_obj=db_instance), - depends_on=define_dependent_job(queue, user_id, rq_id=rq_id), - result_ttl=cache_ttl.total_seconds(), - failure_ttl=cache_ttl.total_seconds(), - ) - - handle_dataset_export(db_instance, - format_name=format_name, cloud_storage=db_storage, save_images=not is_annotation_file) - - return Response(status=status.HTTP_202_ACCEPTED) - def _import_project_dataset(request, rq_id_template, rq_func, db_obj, format_name, filename=None, conv_mask_to_poly=True, location_conf=None): format_desc = {f.DISPLAY_NAME: f for f in dm.views.get_import_formats()}.get(format_name) @@ -3149,7 +3064,7 @@ def _import_project_dataset(request, rq_id_template, rq_func, db_obj, format_nam elif not format_desc.ENABLED: return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) - rq_id = rq_id_template.format(db_obj.pk, request.user) + rq_id = rq_id_template.format(db_obj.pk) queue = django_rq.get_queue(settings.CVAT_QUEUES.IMPORT_DATA.value) rq_job = queue.fetch_job(rq_id) @@ -3218,7 +3133,7 @@ def _import_project_dataset(request, rq_id_template, rq_func, db_obj, format_nam failure_ttl=settings.IMPORT_CACHE_FAILED_TTL.total_seconds() ) - handle_dataset_import(db_obj, format_name=format_name, cloud_storage=db_storage) + handle_dataset_import(db_obj, format_name=format_name, cloud_storage_id=db_storage.id if db_storage else None) else: return Response(status=status.HTTP_409_CONFLICT, data='Import job already exists') @@ -3226,3 +3141,212 @@ def _import_project_dataset(request, rq_id_template, rq_func, db_obj, format_nam serializer.is_valid(raise_exception=True) return Response(serializer.data, status=status.HTTP_202_ACCEPTED) + +@extend_schema(tags=['requests']) +@extend_schema_view( + list=extend_schema( + summary='List requests', + responses={ + '200': RequestSerializer(many=True), + } + ), + retrieve=extend_schema( + summary='Get request details', + responses={ + '200': RequestSerializer, + } + ), +) +class RequestViewSet(viewsets.GenericViewSet): + # FUTURE-TODO: support re-enqueue action + # FUTURE-TODO: implement endpoint to download result file + SUPPORTED_QUEUES = ( + settings.CVAT_QUEUES.IMPORT_DATA.value, + settings.CVAT_QUEUES.EXPORT_DATA.value, + ) + + serializer_class = RequestSerializer + iam_organization_field = None + filter_backends = [ + NonModelSimpleFilter, + NonModelJsonLogicFilter, + NonModelOrderingFilter, + ] + + ordering_fields = ['created_date', 'status', 'action'] + ordering = '-created_date' + + filter_fields = [ + # RQ job fields + 'status', + # derivatives fields (from meta) + 'project_id', + 'task_id', + 'job_id', + # derivatives fields (from parsed rq_id) + 'action', + 'subresource', + 'format', + ] + + simple_filters = filter_fields + ['org'] + + lookup_fields = { + 'created_date': 'created_at', + 'action': 'parsed_rq_id.action', + 'subresource': 'parsed_rq_id.subresource', + 'status': 'get_status', + 'project_id': 'meta.project_id', + 'task_id': 'meta.task_id', + 'job_id': 'meta.job_id', + 'org': 'meta.org_slug', + } + + SchemaField = namedtuple('SchemaField', ['type', 'choices'], defaults=(None,)) + + simple_filters_schema = { + 'status': SchemaField('string', RequestStatus.choices), + 'project_id': SchemaField('integer'), + 'task_id': SchemaField('integer'), + 'job_id': SchemaField('integer'), + 'action': SchemaField('string', RequestAction.choices), + 'subresource': SchemaField('string', RequestSubresource.choices), + 'format': SchemaField('string'), + 'org': SchemaField('string'), + } + + def get_queryset(self): + return None + + @property + def queues(self) -> Iterable[DjangoRQ]: + return (django_rq.get_queue(queue_name) for queue_name in self.SUPPORTED_QUEUES) + + def _get_rq_jobs_from_queue(self, queue: DjangoRQ, user_id: int) -> List[RQJob]: + job_ids = set(queue.get_job_ids() + + queue.started_job_registry.get_job_ids() + + queue.finished_job_registry.get_job_ids() + + queue.failed_job_registry.get_job_ids() + + queue.deferred_job_registry.get_job_ids() + ) + jobs = [] + for job in queue.job_class.fetch_many(job_ids, queue.connection): + if job and is_rq_job_owner(job, user_id): + try: + parsed_rq_id = RQIdManager.parse(job.id) + except Exception: # nosec B112 + continue + job.parsed_rq_id = parsed_rq_id + jobs.append(job) + + return jobs + + + def _get_rq_jobs(self, user_id: int) -> List[RQJob]: + """ + Get all RQ jobs for a specific user and return them as a list of RQJob objects. + + Parameters: + user_id (int): The ID of the user for whom to retrieve jobs. + + Returns: + List[RQJob]: A list of RQJob objects representing all jobs for the specified user. + """ + all_jobs = [] + for queue in self.queues: + jobs = self._get_rq_jobs_from_queue(queue, user_id) + all_jobs.extend(jobs) + + return all_jobs + + def _get_rq_job_by_id(self, rq_id: str) -> Optional[RQJob]: + """ + Get a RQJob by its ID from the queues. + + Args: + rq_id (str): The ID of the RQJob to retrieve. + + Returns: + Optional[RQJob]: The retrieved RQJob, or None if not found. + """ + try: + parsed_rq_id = RQIdManager.parse(rq_id) + except Exception: + return None + + job: Optional[RQJob] = None + + for queue in self.queues: + job = queue.fetch_job(rq_id) + if job: + job.parsed_rq_id = parsed_rq_id + break + + return job + + def _handle_redis_exceptions(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except RedisConnectionError as ex: + msg = 'Redis service is not available' + slogger.glob.exception(f'{msg}: {str(ex)}') + return Response(msg, status=status.HTTP_503_SERVICE_UNAVAILABLE) + return wrapper + + @method_decorator(never_cache) + @_handle_redis_exceptions + def retrieve(self, request: HttpRequest, pk: str): + job = self._get_rq_job_by_id(pk) + + if not job: + return HttpResponseNotFound(f"There is no request with specified id: {pk}") + + self.check_object_permissions(request, job) + + serializer = self.get_serializer(job, context={'request': request}) + return Response(data=serializer.data, status=status.HTTP_200_OK) + + @method_decorator(never_cache) + @_handle_redis_exceptions + def list(self, request: HttpRequest): + user_id = request.user.id + user_jobs = self._get_rq_jobs(user_id) + + filtered_jobs = self.filter_queryset(user_jobs) + + page = self.paginate_queryset(filtered_jobs) + if page is not None: + serializer = self.get_serializer(page, many=True, context={'request': request}) + return self.get_paginated_response(serializer.data) + + serializer = self.get_serializer(filtered_jobs, many=True, context={'request': request}) + return Response(data=serializer.data, status=status.HTTP_200_OK) + + @extend_schema( + summary='Cancel request', + request=None, + responses={ + '200': OpenApiResponse(description='The request has been cancelled'), + }, + ) + @method_decorator(never_cache) + @action(detail=True, methods=['POST'], url_path='cancel') + @_handle_redis_exceptions + def cancel(self, request: HttpRequest, pk: str): + rq_job = self._get_rq_job_by_id(pk) + + if not rq_job: + return HttpResponseNotFound(f"There is no request with specified id: {pk!r}") + + self.check_object_permissions(request, rq_job) + + if rq_job.get_status(refresh=False) not in {RQJobStatus.QUEUED, RQJobStatus.DEFERRED}: + return HttpResponseBadRequest("Only requests that have not yet been started can be cancelled") + + # FUTURE-TODO: race condition is possible here + rq_job.cancel(enqueue_dependents=settings.ONE_RUNNING_JOB_IN_QUEUE_PER_USER) + rq_job.delete() + + return Response(status=status.HTTP_200_OK) diff --git a/cvat/apps/events/export.py b/cvat/apps/events/export.py index da248db4d29..9225f114116 100644 --- a/cvat/apps/events/export.py +++ b/cvat/apps/events/export.py @@ -20,6 +20,7 @@ from cvat.apps.dataset_manager.views import log_exception from cvat.apps.engine.log import ServerLogManager from cvat.apps.engine.utils import sendfile +from cvat.apps.engine.rq_job_handler import RQJobMetaField slogger = ServerLogManager(__name__) @@ -152,7 +153,7 @@ def export(request, filter_query, queue_name): if os.path.exists(file_path): return Response(status=status.HTTP_201_CREATED) elif rq_job.is_failed: - exc_info = rq_job.meta.get('formatted_exception', str(rq_job.exc_info)) + exc_info = rq_job.meta.get(RQJobMetaField.FORMATTED_EXCEPTION, str(rq_job.exc_info)) rq_job.delete() return Response(exc_info, status=status.HTTP_500_INTERNAL_SERVER_ERROR) diff --git a/cvat/apps/events/handlers.py b/cvat/apps/events/handlers.py index 23e5ba2d07e..198bcbb45cf 100644 --- a/cvat/apps/events/handlers.py +++ b/cvat/apps/events/handlers.py @@ -26,6 +26,7 @@ from cvat.apps.organizations.serializers import (InvitationReadSerializer, MembershipReadSerializer, OrganizationReadSerializer) +from cvat.apps.engine.rq_job_handler import RQJobMetaField from .cache import get_cache from .event import event_scope, record_server_event @@ -86,11 +87,11 @@ def get_user(instance=None): # Try to get user from rq_job if isinstance(instance, rq.job.Job): - return instance.meta.get("user", None) + return instance.meta.get(RQJobMetaField.USER, None) else: rq_job = rq.get_current_job() if rq_job: - return rq_job.meta.get("user", None) + return rq_job.meta.get(RQJobMetaField.USER, None) if isinstance(instance, User): return instance @@ -103,11 +104,11 @@ def get_request(instance=None): return request if isinstance(instance, rq.job.Job): - return instance.meta.get("request", None) + return instance.meta.get(RQJobMetaField.REQUEST, None) else: rq_job = rq.get_current_job() if rq_job: - return rq_job.meta.get("request", None) + return rq_job.meta.get(RQJobMetaField.REQUEST, None) return None @@ -472,13 +473,13 @@ def handle_dataset_io( action: str, *, format_name: str, - cloud_storage: Optional[CloudStorage], + cloud_storage_id: Optional[int], **payload_fields, ) -> None: payload={"format": format_name, **payload_fields} - if cloud_storage: - payload["cloud_storage"] = {"id": cloud_storage.id} + if cloud_storage_id: + payload["cloud_storage"] = {"id": cloud_storage_id} record_server_event( scope=event_scope(action, "dataset"), @@ -498,26 +499,26 @@ def handle_dataset_export( instance: Union[Project, Task, Job], *, format_name: str, - cloud_storage: Optional[CloudStorage], + cloud_storage_id: Optional[int], save_images: bool, ) -> None: handle_dataset_io(instance, "export", - format_name=format_name, cloud_storage=cloud_storage, save_images=save_images) + format_name=format_name, cloud_storage_id=cloud_storage_id, save_images=save_images) def handle_dataset_import( instance: Union[Project, Task, Job], *, format_name: str, - cloud_storage: Optional[CloudStorage], + cloud_storage_id: Optional[int], ) -> None: - handle_dataset_io(instance, "import", format_name=format_name, cloud_storage=cloud_storage) + handle_dataset_io(instance, "import", format_name=format_name, cloud_storage_id=cloud_storage_id) def handle_rq_exception(rq_job, exc_type, exc_value, tb): - oid = rq_job.meta.get("org_id", None) - oslug = rq_job.meta.get("org_slug", None) - pid = rq_job.meta.get("project_id", None) - tid = rq_job.meta.get("task_id", None) - jid = rq_job.meta.get("job_id", None) + oid = rq_job.meta.get(RQJobMetaField.ORG_ID, None) + oslug = rq_job.meta.get(RQJobMetaField.ORG_SLUG, None) + pid = rq_job.meta.get(RQJobMetaField.PROJECT_ID, None) + tid = rq_job.meta.get(RQJobMetaField.TASK_ID, None) + jid = rq_job.meta.get(RQJobMetaField.JOB_ID, None) uid = user_id(rq_job) uname = user_name(rq_job) uemail = user_email(rq_job) diff --git a/cvat/apps/lambda_manager/permissions.py b/cvat/apps/lambda_manager/permissions.py index 75299114298..e1f78ce58c4 100644 --- a/cvat/apps/lambda_manager/permissions.py +++ b/cvat/apps/lambda_manager/permissions.py @@ -19,7 +19,7 @@ class Scopes(StrEnum): @classmethod def create(cls, request, view, obj, iam_context): permissions = [] - if view.basename == 'function' or view.basename == 'request': + if view.basename == 'lambda_function' or view.basename == 'lambda_request': scopes = cls.get_scopes(request, view, obj) for scope in scopes: self = cls.create_base_perm(request, view, scope, iam_context, obj) @@ -42,13 +42,13 @@ def __init__(self, **kwargs): def get_scopes(request, view, obj): Scopes = __class__.Scopes return [{ - ('function', 'list'): Scopes.LIST, - ('function', 'retrieve'): Scopes.VIEW, - ('function', 'call'): Scopes.CALL_ONLINE, - ('request', 'create'): Scopes.CALL_OFFLINE, - ('request', 'list'): Scopes.LIST_OFFLINE, - ('request', 'retrieve'): Scopes.CALL_OFFLINE, - ('request', 'destroy'): Scopes.CALL_OFFLINE, + ('lambda_function', 'list'): Scopes.LIST, + ('lambda_function', 'retrieve'): Scopes.VIEW, + ('lambda_function', 'call'): Scopes.CALL_ONLINE, + ('lambda_request', 'create'): Scopes.CALL_OFFLINE, + ('lambda_request', 'list'): Scopes.LIST_OFFLINE, + ('lambda_request', 'retrieve'): Scopes.CALL_OFFLINE, + ('lambda_request', 'destroy'): Scopes.CALL_OFFLINE, }.get((view.basename, view.action), None)] def get_resource(self): diff --git a/cvat/apps/lambda_manager/urls.py b/cvat/apps/lambda_manager/urls.py index 26029582e36..6dae0edaca7 100644 --- a/cvat/apps/lambda_manager/urls.py +++ b/cvat/apps/lambda_manager/urls.py @@ -13,8 +13,8 @@ # POST (like get HTTP method is mapped to list(...)). One way is to implement # own CustomRouter. But it is simpler just patch the router instance here. router.routes[2].mapping.update({'post': 'call'}) -router.register('functions', views.FunctionViewSet, basename='function') -router.register('requests', views.RequestViewSet, basename='request') +router.register('functions', views.FunctionViewSet, basename='lambda_function') +router.register('requests', views.RequestViewSet, basename='lambda_request') # GET /api/lambda/functions - get list of functions # GET /api/lambda/functions/ - get information about the function diff --git a/cvat/schema.yml b/cvat/schema.yml index 8d3c331e00e..bf25130a5b6 100644 --- a/cvat/schema.yml +++ b/cvat/schema.yml @@ -2018,7 +2018,18 @@ paths: description: | If format is specified, a ZIP archive will be returned. Otherwise, the annotations will be returned as a JSON document. - summary: Get job annotations + + Deprecation warning: + + Utilizing this endpoint to export annotations as a dataset in + a specific format will be deprecated in one of the next releases. + + Consider using new API: + - POST /api/jobs//dataset/export?save_images=False to initiate export process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request + summary: Get job annotations or export job annotations as a dataset in a specific + format parameters: - in: query name: action @@ -2066,6 +2077,7 @@ paths: type: boolean default: true description: Use the location that was configured in the task to export annotation + deprecated: true tags: - jobs security: @@ -2134,6 +2146,7 @@ paths: type: boolean default: true description: Use the location that was configured in the task to import annotation + deprecated: true tags: - jobs requestBody: @@ -2215,6 +2228,7 @@ paths: type: boolean default: true description: Use the location that was configured in the task to import annotation + deprecated: true tags: - jobs requestBody: @@ -2405,6 +2419,13 @@ paths: /api/jobs/{id}/dataset: get: operationId: jobs_retrieve_dataset + description: | + Deprecation warning: + This endpoint will be deprecated in one of the next releases. + Consider using new API: + - POST /api/jobs//dataset/export?save_images=True to initiate export process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request summary: Export job as a dataset in a specific format parameters: - in: query @@ -2454,6 +2475,7 @@ paths: type: boolean default: true description: Use the location that was configured in the task to export dataset + deprecated: true tags: - jobs security: @@ -2476,6 +2498,73 @@ paths: description: Exporting has been started '405': description: Format is not available + /api/jobs/{id}/dataset/export: + post: + operationId: jobs_create_dataset_export + description: | + The request `POST /api//id/dataset/export` will initialize + a background process to export a dataset. To check status of the process + please, use `GET /api/requests/` where **rq_id** is request ID returned in the response for this endpoint. + summary: Initialize process to export resource as a dataset in a specific format + parameters: + - in: query + name: cloud_storage_id + schema: + type: integer + description: Storage id + - in: query + name: filename + schema: + type: string + description: Desired output file name + - in: query + name: format + schema: + type: string + description: |- + Desired output format name + You can get the list of supported formats at: + /server/annotation/formats + required: true + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this job. + required: true + - in: query + name: location + schema: + type: string + enum: + - cloud_storage + - local + description: Where need to save downloaded dataset + - in: query + name: save_images + schema: + type: boolean + default: false + description: Include images or not + tags: + - jobs + security: + - sessionAuth: [] + csrfAuth: [] + tokenAuth: [] + - signatureAuth: [] + - basicAuth: [] + responses: + '202': + content: + application/vnd.cvat+json: + schema: + $ref: '#/components/schemas/RqId' + description: Exporting has been started + '405': + description: Format is not available + '409': + description: Exporting is already in progress /api/jobs/{id}/preview: get: operationId: jobs_retrieve_preview @@ -3455,7 +3544,15 @@ paths: /api/projects/{id}/annotations: get: operationId: projects_retrieve_annotations - summary: Get project annotations + description: | + Deprecation warning: + + Using this endpoint to initiate export of annotations as a dataset or to check export status is deprecated. + Consider using new API: + - POST /api/projects//dataset/export?save_images=False to initiate exporting process + - GET /api/requests/ to check export status, + where rq_id is request id returned on initializing request' + summary: Get project annotations or export them as a dataset parameters: - in: query name: action @@ -3504,6 +3601,7 @@ paths: type: boolean default: true description: Use the location that was configured in project to export annotation + deprecated: true tags: - projects security: @@ -3530,6 +3628,14 @@ paths: /api/projects/{id}/backup: get: operationId: projects_retrieve_backup + description: | + Deprecation warning: + + This endpoint will be deprecated in one of the next releases. + Consider using new API: + - POST /api/projects//backup/export to initiate backup process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request summary: Back up a project parameters: - in: query @@ -3570,6 +3676,7 @@ paths: type: boolean default: true description: Use the location that was configured in project to export backup + deprecated: true tags: - projects security: @@ -3585,6 +3692,59 @@ paths: description: Output backup file is ready for downloading '202': description: Creating a backup file has been started + /api/projects/{id}/backup/export: + post: + operationId: projects_create_backup_export + description: | + The request `POST /api//id/backup/export` will initialize + a background process to backup a resource. To check status of the process + please, use `GET /api/requests/` where **rq_id** is request ID returned in the response for this endpoint. + summary: Initiate process to backup resource + parameters: + - in: query + name: cloud_storage_id + schema: + type: integer + description: Storage id + - in: query + name: filename + schema: + type: string + description: Backup file name + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this project. + required: true + - in: query + name: location + schema: + type: string + enum: + - cloud_storage + - local + description: Where need to save downloaded backup + tags: + - projects + security: + - sessionAuth: [] + csrfAuth: [] + tokenAuth: [] + - signatureAuth: [] + - basicAuth: [] + responses: + '202': + content: + application/vnd.cvat+json: + schema: + $ref: '#/components/schemas/RqId' + description: Creating a backup file has been started + '400': + description: Wrong query parameters were passed + '409': + description: The backup process has already been initiated and is not yet + finished /api/projects/{id}/dataset/: get: operationId: projects_retrieve_dataset @@ -3596,6 +3756,14 @@ paths: Make sure to include this parameter as a query parameter in your subsequent GET /api/projects/id/dataset requests to track the status of the dataset import. Also you should specify action parameter: action=import_status. + + Deprecation warning: + Utilizing this endpoint to export project dataset in + a specific format will be deprecated in one of the next releases. + Consider using new API: + - POST /api/projects//dataset/export/?save_images=True to initiate export process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request summary: Export a project as a dataset / Check dataset import status parameters: - in: query @@ -3650,6 +3818,7 @@ paths: type: boolean default: true description: Use the location that was configured in project to import dataset + deprecated: true tags: - projects security: @@ -3720,6 +3889,7 @@ paths: default: true description: Use the location that was configured in the project to import annotations + deprecated: true tags: - projects requestBody: @@ -3747,6 +3917,73 @@ paths: description: Failed to import dataset '405': description: Format is not available + /api/projects/{id}/dataset/export: + post: + operationId: projects_create_dataset_export + description: | + The request `POST /api//id/dataset/export` will initialize + a background process to export a dataset. To check status of the process + please, use `GET /api/requests/` where **rq_id** is request ID returned in the response for this endpoint. + summary: Initialize process to export resource as a dataset in a specific format + parameters: + - in: query + name: cloud_storage_id + schema: + type: integer + description: Storage id + - in: query + name: filename + schema: + type: string + description: Desired output file name + - in: query + name: format + schema: + type: string + description: |- + Desired output format name + You can get the list of supported formats at: + /server/annotation/formats + required: true + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this project. + required: true + - in: query + name: location + schema: + type: string + enum: + - cloud_storage + - local + description: Where need to save downloaded dataset + - in: query + name: save_images + schema: + type: boolean + default: false + description: Include images or not + tags: + - projects + security: + - sessionAuth: [] + csrfAuth: [] + tokenAuth: [] + - signatureAuth: [] + - basicAuth: [] + responses: + '202': + content: + application/vnd.cvat+json: + schema: + $ref: '#/components/schemas/RqId' + description: Exporting has been started + '405': + description: Format is not available + '409': + description: Exporting is already in progress /api/projects/{id}/preview: get: operationId: projects_retrieve_preview @@ -4281,6 +4518,156 @@ paths: schema: $ref: '#/components/schemas/QualitySettings' description: '' + /api/requests: + get: + operationId: requests_list + summary: List requests + parameters: + - name: action + in: query + description: A simple equality filter for the action field + schema: + type: string + enum: + - create + - import + - export + - name: filter + required: false + in: query + description: |2- + + JSON Logic filter. This filter can be used to perform complex filtering by grouping rules. + + Details about the syntax used can be found at the link: https://jsonlogic.com/ + + Available filter_fields: ['status', 'project_id', 'task_id', 'job_id', 'action', 'subresource', 'format']. + schema: + type: string + - name: format + in: query + description: A simple equality filter for the format field + schema: + type: string + - name: job_id + in: query + description: A simple equality filter for the job_id field + schema: + type: integer + - name: org + in: query + description: A simple equality filter for the org field + schema: + type: string + - name: page + required: false + in: query + description: A page number within the paginated result set. + schema: + type: integer + - name: page_size + required: false + in: query + description: Number of results to return per page. + schema: + type: integer + - name: project_id + in: query + description: A simple equality filter for the project_id field + schema: + type: integer + - name: sort + required: false + in: query + description: 'Which field to use when ordering the results. Available ordering_fields: + [''created_date'', ''status'', ''action'']' + schema: + type: string + - name: status + in: query + description: A simple equality filter for the status field + schema: + type: string + enum: + - queued + - started + - failed + - finished + - name: subresource + in: query + description: A simple equality filter for the subresource field + schema: + type: string + enum: + - annotations + - dataset + - backup + - name: task_id + in: query + description: A simple equality filter for the task_id field + schema: + type: integer + tags: + - requests + security: + - sessionAuth: [] + csrfAuth: [] + tokenAuth: [] + - signatureAuth: [] + - basicAuth: [] + responses: + '200': + content: + application/vnd.cvat+json: + schema: + $ref: '#/components/schemas/PaginatedRequestList' + description: '' + /api/requests/{id}: + get: + operationId: requests_retrieve + summary: Get request details + parameters: + - in: path + name: id + schema: + type: string + required: true + tags: + - requests + security: + - sessionAuth: [] + csrfAuth: [] + tokenAuth: [] + - signatureAuth: [] + - basicAuth: [] + responses: + '200': + content: + application/vnd.cvat+json: + schema: + $ref: '#/components/schemas/Request' + description: '' + /api/requests/{id}/cancel: + post: + operationId: requests_create_cancel + summary: Cancel request + parameters: + - in: path + name: id + schema: + type: string + required: true + tags: + - requests + security: + - sessionAuth: [] + csrfAuth: [] + tokenAuth: [] + - signatureAuth: [] + - basicAuth: [] + responses: + '200': + description: The request has been cancelled /api/schema/: get: operationId: schema_retrieve @@ -4780,7 +5167,17 @@ paths: /api/tasks/{id}/annotations/: get: operationId: tasks_retrieve_annotations - summary: Get task annotations + description: | + Deprecation warning: + + Utilizing this endpoint ot export annotations as a dataset in + a specific format will be deprecated in one of the next releases. + + Consider using new API: + - POST /api/tasks//dataset/export?save_images=False to initiate export process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request + summary: Get task annotations or export them as a dataset in a specific format parameters: - in: query name: action @@ -4828,6 +5225,7 @@ paths: type: boolean default: true description: Use the location that was configured in the task to export annotation + deprecated: true tags: - tasks security: @@ -4898,6 +5296,7 @@ paths: type: boolean default: true description: Use the location that was configured in task to import annotations + deprecated: true tags: - tasks requestBody: @@ -5044,6 +5443,13 @@ paths: /api/tasks/{id}/backup: get: operationId: tasks_retrieve_backup + description: | + Deprecation warning: + This endpoint will be deprecated in one of the next releases. + Consider using new API: + - POST /api/tasks//backup/export to initiate backup process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request' summary: Back up a task parameters: - in: query @@ -5084,6 +5490,7 @@ paths: type: boolean default: true description: Use the location that was configured in the task to export backup + deprecated: true tags: - tasks security: @@ -5101,6 +5508,59 @@ paths: description: Creating a backup file has been started '400': description: Backup of a task without data is not allowed + /api/tasks/{id}/backup/export: + post: + operationId: tasks_create_backup_export + description: | + The request `POST /api//id/backup/export` will initialize + a background process to backup a resource. To check status of the process + please, use `GET /api/requests/` where **rq_id** is request ID returned in the response for this endpoint. + summary: Initiate process to backup resource + parameters: + - in: query + name: cloud_storage_id + schema: + type: integer + description: Storage id + - in: query + name: filename + schema: + type: string + description: Backup file name + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this task. + required: true + - in: query + name: location + schema: + type: string + enum: + - cloud_storage + - local + description: Where need to save downloaded backup + tags: + - tasks + security: + - sessionAuth: [] + csrfAuth: [] + tokenAuth: [] + - signatureAuth: [] + - basicAuth: [] + responses: + '202': + content: + application/vnd.cvat+json: + schema: + $ref: '#/components/schemas/RqId' + description: Creating a backup file has been started + '400': + description: Wrong query parameters were passed + '409': + description: The backup process has already been initiated and is not yet + finished /api/tasks/{id}/data/: get: operationId: tasks_retrieve_data @@ -5196,7 +5656,7 @@ paths: https://docs.cvat.ai/docs/manual/advanced/dataset_manifest/ After all data is sent, the operation status can be retrieved via - the /status endpoint. + the `GET /api/requests/`, where **rq_id** is request ID returned for this request. Once data is attached to a task, it cannot be detached or replaced. summary: Attach data to a task @@ -5243,7 +5703,11 @@ paths: - basicAuth: [] responses: '202': - description: No response body + content: + application/vnd.cvat+json: + schema: + $ref: '#/components/schemas/DataResponse' + description: Request to attach a data to a task has been accepted /api/tasks/{id}/data/meta: get: operationId: tasks_retrieve_data_meta @@ -5303,6 +5767,16 @@ paths: /api/tasks/{id}/dataset: get: operationId: tasks_retrieve_dataset + description: | + Deprecation warning: + + Utilizing this endpoint to export task dataset in + a specific format will be deprecated in one of the next releases. + + Consider using new API: + - POST /api/tasks//dataset/export?save_images=True to initiate export process + - GET /api/requests/ to check process status, + where rq_id is request id returned on initializing request summary: Export task as a dataset in a specific format parameters: - in: query @@ -5352,6 +5826,7 @@ paths: type: boolean default: true description: Use the location that was configured in task to export annotations + deprecated: true tags: - tasks security: @@ -5376,6 +5851,73 @@ paths: description: Exporting without data is not allowed '405': description: Format is not available + /api/tasks/{id}/dataset/export: + post: + operationId: tasks_create_dataset_export + description: | + The request `POST /api//id/dataset/export` will initialize + a background process to export a dataset. To check status of the process + please, use `GET /api/requests/` where **rq_id** is request ID returned in the response for this endpoint. + summary: Initialize process to export resource as a dataset in a specific format + parameters: + - in: query + name: cloud_storage_id + schema: + type: integer + description: Storage id + - in: query + name: filename + schema: + type: string + description: Desired output file name + - in: query + name: format + schema: + type: string + description: |- + Desired output format name + You can get the list of supported formats at: + /server/annotation/formats + required: true + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this task. + required: true + - in: query + name: location + schema: + type: string + enum: + - cloud_storage + - local + description: Where need to save downloaded dataset + - in: query + name: save_images + schema: + type: boolean + default: false + description: Include images or not + tags: + - tasks + security: + - sessionAuth: [] + csrfAuth: [] + tokenAuth: [] + - signatureAuth: [] + - basicAuth: [] + responses: + '202': + content: + application/vnd.cvat+json: + schema: + $ref: '#/components/schemas/RqId' + description: Exporting has been started + '405': + description: Format is not available + '409': + description: Exporting is already in progress /api/tasks/{id}/preview: get: operationId: tasks_retrieve_preview @@ -5403,6 +5945,9 @@ paths: /api/tasks/{id}/status: get: operationId: tasks_retrieve_status + description: 'This method is deprecated and will be removed in one of the next + releases. To check status of task creation, use new common API for managing + background operations: GET /api/requests/?action=create&task_id=' summary: Get the creation status of a task parameters: - in: path @@ -5419,6 +5964,7 @@ paths: tokenAuth: [] - signatureAuth: [] - basicAuth: [] + deprecated: true responses: '200': content: @@ -6899,6 +7445,11 @@ components: pass the list of file names in the required order. required: - image_quality + DataResponse: + oneOf: + - $ref: '#/components/schemas/RqId' + - type: string + format: binary DatasetFileRequest: type: object properties: @@ -8412,6 +8963,26 @@ components: type: array items: $ref: '#/components/schemas/QualitySettings' + PaginatedRequestList: + type: object + properties: + count: + type: integer + example: 123 + next: + type: string + nullable: true + format: uri + example: http://api.example.org/accounts/?page=4 + previous: + type: string + nullable: true + format: uri + example: http://api.example.org/accounts/?page=2 + results: + type: array + items: + $ref: '#/components/schemas/Request' PaginatedTaskReadList: type: object properties: @@ -9274,6 +9845,101 @@ components: - password1 - password2 - username + Request: + type: object + properties: + status: + $ref: '#/components/schemas/RequestStatus' + message: + type: string + readOnly: true + id: + type: string + operation: + $ref: '#/components/schemas/RequestDataOperation' + progress: + type: number + format: double + maximum: 1 + minimum: 0 + nullable: true + readOnly: true + created_date: + type: string + format: date-time + started_date: + type: string + format: date-time + nullable: true + finished_date: + type: string + format: date-time + nullable: true + expiry_date: + type: string + format: date-time + nullable: true + readOnly: true + owner: + allOf: + - $ref: '#/components/schemas/UserIdentifiers' + readOnly: true + result_url: + type: string + format: uri + nullable: true + result_id: + type: integer + nullable: true + required: + - created_date + - id + - operation + - status + RequestDataOperation: + type: object + properties: + type: + type: string + target: + $ref: '#/components/schemas/RequestDataOperationTargetEnum' + project_id: + type: integer + nullable: true + task_id: + type: integer + nullable: true + job_id: + type: integer + nullable: true + format: + type: string + nullable: true + required: + - target + - type + RequestDataOperationTargetEnum: + enum: + - project + - task + - job + type: string + description: |- + * `project` - Project + * `task` - Task + * `job` - Job + RequestStatus: + enum: + - queued + - started + - failed + - finished + type: string + description: |- + * `queued` - Queued + * `started` - Started + * `failed` - Failed + * `finished` - Finished RestAuthDetail: type: object properties: @@ -9976,6 +10642,20 @@ components: required: - groups - username + UserIdentifiers: + type: object + properties: + id: + type: integer + readOnly: true + username: + type: string + description: Required. 150 characters or fewer. Letters, digits and @/./+/-/_ + only. + pattern: ^[\w.@+-]+$ + maxLength: 150 + required: + - username WebhookContentType: enum: - application/json diff --git a/cvat/settings/base.py b/cvat/settings/base.py index 46b6a075b1f..1f4b3592d69 100644 --- a/cvat/settings/base.py +++ b/cvat/settings/base.py @@ -239,7 +239,7 @@ def generate_secret_key(): LOGIN_URL = 'rest_login' LOGIN_REDIRECT_URL = '/' -OBJECTS_NOT_RELATED_WITH_ORG = ['user', 'function', 'request', 'server',] +OBJECTS_NOT_RELATED_WITH_ORG = ['user', 'lambda_function', 'lambda_request', 'server', 'request'] # ORG settings ORG_INVITATION_CONFIRM = 'No' @@ -636,6 +636,7 @@ class CVAT_QUEUES(Enum): 'SortingMethod': 'cvat.apps.engine.models.SortingMethod', 'WebhookType': 'cvat.apps.webhooks.models.WebhookTypeChoice', 'WebhookContentType': 'cvat.apps.webhooks.models.WebhookContentTypeChoice', + 'RequestStatus': 'cvat.apps.engine.serializers.RequestStatus', }, # Coercion of {pk} to {id} is controlled by SCHEMA_COERCE_PATH_PK. Additionally, diff --git a/site/content/en/docs/api_sdk/sdk/lowlevel-api.md b/site/content/en/docs/api_sdk/sdk/lowlevel-api.md index 9b1ef5b5358..543fca88bd4 100644 --- a/site/content/en/docs/api_sdk/sdk/lowlevel-api.md +++ b/site/content/en/docs/api_sdk/sdk/lowlevel-api.md @@ -79,8 +79,7 @@ with ApiClient(configuration) as api_client: ) # If we pass binary file objects, we need to specify content type. - # For this endpoint, we don't have response data - (_, response) = api_client.tasks_api.create_data(task.id, + (result, response) = api_client.tasks_api.create_data(task.id, data_request=task_data, _content_type="multipart/form-data", @@ -92,11 +91,13 @@ with ApiClient(configuration) as api_client: # Wait till task data is processed for _ in range(100): - (status, _) = api_client.tasks_api.retrieve_status(task.id) - if status.state.value in ['Finished', 'Failed']: + request_details, response = api_client.requests_api.retrieve(result.rq_id) + status, message = request_details.status, request_details.message + + if status.value in {'finished', 'failed'}: break sleep(0.1) - assert status.state.value == 'Finished', status.message + assert status.value == 'finished', status.message # Update the task object and check the task size (task, _) = api_client.tasks_api.retrieve(task.id) @@ -393,7 +394,7 @@ please specify `_content_type="multipart/form-data"` in the request parameters: Example: ```python -(_, response) = api_client.tasks_api.create_data( +(result, response) = api_client.tasks_api.create_data( id=42, data_request=models.DataRequest( client_files=[ diff --git a/tests/cypress.base.config.js b/tests/cypress.base.config.js index 3276839086f..f5be6b4d64b 100644 --- a/tests/cypress.base.config.js +++ b/tests/cypress.base.config.js @@ -11,6 +11,7 @@ module.exports = { viewportWidth: 1300, viewportHeight: 960, defaultCommandTimeout: 25000, + requestTimeout: 15000, downloadsFolder: 'cypress/fixtures', env: { user: 'admin', diff --git a/tests/cypress/e2e/actions_objects/case_37_object_make_copy.js b/tests/cypress/e2e/actions_objects/case_37_object_make_copy.js index 057d1e54831..aed63f1be79 100644 --- a/tests/cypress/e2e/actions_objects/case_37_object_make_copy.js +++ b/tests/cypress/e2e/actions_objects/case_37_object_make_copy.js @@ -139,7 +139,7 @@ context('Object make a copy.', () => { const coordY = 400; for (let id = 1; id < countObject; id++) { // Point doesn't have a context menu - cy.get(`#cvat-objects-sidebar-state-item-${id}`).trigger('mouseover'); + cy.get(`#cvat-objects-sidebar-state-item-${id}`).click(); cy.get(`#cvat_canvas_shape_${id}`).should('have.class', 'cvat_canvas_shape_activated'); cy.get(`#cvat_canvas_shape_${id}`).rightclick({ force: true }); cy.get('.cvat-canvas-context-menu').should('be.visible'); diff --git a/tests/cypress/e2e/actions_objects/test_annotations_saving.js b/tests/cypress/e2e/actions_objects/test_annotations_saving.js index e39a0ae5a6c..a1cc5f9a853 100644 --- a/tests/cypress/e2e/actions_objects/test_annotations_saving.js +++ b/tests/cypress/e2e/actions_objects/test_annotations_saving.js @@ -50,7 +50,7 @@ context('Test annotations saving works correctly', () => { expect(interception.response.statusCode).to.equal(201); taskID = interception.response.body.id; }); - cy.wait('@getJobsRequest', { requestTimeout: 10000 }).then((interception) => { + cy.wait('@getJobsRequest').then((interception) => { expect(interception.response.statusCode).to.equal(200); jobID = interception.response.body.results[0].id; diff --git a/tests/cypress/e2e/actions_projects_models/case_103_project_export.js b/tests/cypress/e2e/actions_projects_models/case_103_project_export.js index 9300706cdb8..b5c252eaa36 100644 --- a/tests/cypress/e2e/actions_projects_models/case_103_project_export.js +++ b/tests/cypress/e2e/actions_projects_models/case_103_project_export.js @@ -39,12 +39,15 @@ context('Export project dataset.', { browser: '!firefox' }, () => { function checkCounTasksInXML(projectParams, expectedCount) { cy.exportProject(projectParams); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); cy.unpackZipArchive(`cypress/fixtures/${projectParams.archiveCustomName}.zip`); cy.readFile('cypress/fixtures/annotations.xml').should('exist').then((xml) => { const tasks = Cypress.$(Cypress.$.parseXML(xml)).find('task').find('name'); expect(tasks.length).to.be.eq(expectedCount); }); + cy.goBack(); } before(() => { @@ -95,7 +98,10 @@ context('Export project dataset.', { browser: '!firefox' }, () => { dumpType: 'CVAT for images', }; cy.exportProject(exportAnnotation); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); + cy.goBack(); }); it('Export project dataset. Dataset.', () => { @@ -107,11 +113,11 @@ context('Export project dataset.', { browser: '!firefox' }, () => { dumpType: 'CVAT for images', }; cy.exportProject(exportDataset); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { + cy.verifyDownload(file); datasetArchiveName = file; - cy.verifyDownload(datasetArchiveName); }); - cy.verifyNotification(); + cy.goBack(); }); it('Export project dataset. Annotation. Rename an archive.', () => { diff --git a/tests/cypress/e2e/actions_projects_models/case_104_project_export_3d.js b/tests/cypress/e2e/actions_projects_models/case_104_project_export_3d.js index fed46444f75..c5ce30071a3 100644 --- a/tests/cypress/e2e/actions_projects_models/case_104_project_export_3d.js +++ b/tests/cypress/e2e/actions_projects_models/case_104_project_export_3d.js @@ -61,7 +61,10 @@ context('Export project dataset with 3D task.', { browser: '!firefox' }, () => { dumpType: 'Kitti Raw Format', }; cy.exportProject(exportAnnotation3d); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); + cy.goBack(); }); it('Export project with 3D task. Dataset.', () => { @@ -73,11 +76,11 @@ context('Export project dataset with 3D task.', { browser: '!firefox' }, () => { dumpType: 'Sly Point Cloud Format', }; cy.exportProject(exportDataset3d); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { datasetArchiveName = file; cy.verifyDownload(datasetArchiveName); }); - cy.verifyNotification(); + cy.goBack(); }); it('Export project with 3D task. Annotation. Rename a archive.', () => { @@ -90,7 +93,10 @@ context('Export project dataset with 3D task.', { browser: '!firefox' }, () => { archiveCustomName: 'export_project_3d_annotation', }; cy.exportProject(exportAnnotations3dRenameArchive); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); + cy.goBack(); }); // FIXME: Activate after implementation diff --git a/tests/cypress/e2e/actions_projects_models/case_114_backup_restore_project.js b/tests/cypress/e2e/actions_projects_models/case_114_backup_restore_project.js index 120ee85603c..998e2584dd5 100644 --- a/tests/cypress/e2e/actions_projects_models/case_114_backup_restore_project.js +++ b/tests/cypress/e2e/actions_projects_models/case_114_backup_restore_project.js @@ -88,11 +88,11 @@ context('Backup, restore a project.', { browser: '!firefox' }, () => { describe(`Testing "${caseId}"`, () => { it('Export the project.', () => { cy.backupProject(project.name); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { projectBackupArchiveFullName = file; - cy.verifyDownload(projectBackupArchiveFullName); + cy.verifyDownload(file); }); - cy.verifyNotification(); + cy.goBack(); }); it('Remove and restore the project from backup.', () => { @@ -154,11 +154,11 @@ context('Backup, restore a project with a 3D task.', { browser: '!firefox' }, () describe(`Testing "${caseId}"`, () => { it('Export the project.', () => { cy.backupProject(project.name); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { projectBackupArchiveFullName = file; cy.verifyDownload(projectBackupArchiveFullName); }); - cy.verifyNotification(); + cy.goBack(); }); it('Remove and restore the project from backup.', () => { diff --git a/tests/cypress/e2e/actions_projects_models/case_117_backup_restore_project_to_various_storages.js b/tests/cypress/e2e/actions_projects_models/case_117_backup_restore_project_to_various_storages.js index 78dd48ad79d..a2b09bf5d47 100644 --- a/tests/cypress/e2e/actions_projects_models/case_117_backup_restore_project_to_various_storages.js +++ b/tests/cypress/e2e/actions_projects_models/case_117_backup_restore_project_to_various_storages.js @@ -139,7 +139,10 @@ context('Tests source & target storage for backups.', () => { { location: 'Local' }, false, ); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); + cy.goBack(); }); it('Export project to default minio bucket', () => { diff --git a/tests/cypress/e2e/actions_tasks/case_52_dump_upload_annotation.js b/tests/cypress/e2e/actions_tasks/case_52_dump_upload_annotation.js index 8371ea9534d..2713d9569e7 100644 --- a/tests/cypress/e2e/actions_tasks/case_52_dump_upload_annotation.js +++ b/tests/cypress/e2e/actions_tasks/case_52_dump_upload_annotation.js @@ -76,11 +76,11 @@ context('Dump/Upload annotation.', { browser: '!firefox' }, () => { archiveCustomName: 'task_export_annotation_custome_name', }; cy.exportJob(exportAnnotationRenameArchive); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { annotationArchiveNameCustomName = file; cy.verifyDownload(annotationArchiveNameCustomName); }); - cy.verifyNotification(); + cy.goBack(); }); it('Save job. Dump annotation. Remove annotation. Save job.', () => { @@ -90,11 +90,11 @@ context('Dump/Upload annotation.', { browser: '!firefox' }, () => { format: exportFormat, }; cy.exportJob(exportAnnotation); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { annotationArchiveName = file; cy.verifyDownload(annotationArchiveName); }); - cy.verifyNotification(); + cy.goBack(); cy.removeAnnotations(); cy.saveJob('PUT'); cy.get('#cvat_canvas_shape_1').should('not.exist'); diff --git a/tests/cypress/e2e/actions_tasks/issue_2473_import_annotations_frames_dots_in_name.js b/tests/cypress/e2e/actions_tasks/issue_2473_import_annotations_frames_dots_in_name.js index bacdf12cb3d..7398019d390 100644 --- a/tests/cypress/e2e/actions_tasks/issue_2473_import_annotations_frames_dots_in_name.js +++ b/tests/cypress/e2e/actions_tasks/issue_2473_import_annotations_frames_dots_in_name.js @@ -1,4 +1,5 @@ // Copyright (C) 2021-2022 Intel Corporation +// Copyright (C) 2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT @@ -95,9 +96,8 @@ context('Import annotations for frames with dots in name.', { browser: '!firefox cy.get('.cvat-modal-export-job').contains('button', 'OK').click(); cy.get('.cvat-notification-notice-export-job-start').should('be.visible'); cy.closeNotification('.cvat-notification-notice-export-job-start'); - cy.wait('@dumpAnnotations', { timeout: 5000 }).its('response.statusCode').should('equal', 202); - cy.wait('@dumpAnnotations').its('response.statusCode').should('equal', 201); - cy.verifyNotification(); + cy.downloadExport(); + cy.goBack(); cy.removeAnnotations(); cy.saveJob('PUT'); cy.get('#cvat_canvas_shape_1').should('not.exist'); diff --git a/tests/cypress/e2e/actions_tasks/task_rectangles_only.js b/tests/cypress/e2e/actions_tasks/task_rectangles_only.js index 2216f2c4729..350dd9a4c24 100644 --- a/tests/cypress/e2e/actions_tasks/task_rectangles_only.js +++ b/tests/cypress/e2e/actions_tasks/task_rectangles_only.js @@ -84,7 +84,7 @@ context('Creating a task with only bounding boxes', () => { taskID = interception.response.body.id; expect(interception.response.statusCode).to.be.equal(201); cy.intercept(`/api/tasks/${taskID}`).as('getTask'); - cy.wait('@getTask', { timeout: 10000 }); + cy.wait('@getTask'); cy.get('.cvat-job-item').should('exist').and('be.visible'); cy.openJob(); diff --git a/tests/cypress/e2e/actions_tasks2/case_42_change_label_name_via_label_constructor.js b/tests/cypress/e2e/actions_tasks2/case_42_change_label_name_via_label_constructor.js index 88d355af28a..538654a8ee6 100644 --- a/tests/cypress/e2e/actions_tasks2/case_42_change_label_name_via_label_constructor.js +++ b/tests/cypress/e2e/actions_tasks2/case_42_change_label_name_via_label_constructor.js @@ -21,6 +21,7 @@ context('Changing a label name via label constructor.', () => { it('Set empty label name. Press "Continue" button. Label name is not created. Label constructor is closed.', () => { cy.get('.cvat-constructor-viewer-new-item').click(); // Open label constructor cy.contains('[type="submit"]', 'Continue').click(); + cy.contains('[type="submit"]', 'Continue').trigger('mouseout'); cy.contains('[role="alert"]', 'Please specify a name').should('exist').and('be.visible'); cy.contains('[type="button"]', 'Cancel').click(); // Close label constructor }); diff --git a/tests/cypress/e2e/actions_tasks2/case_97_export_import_task.js b/tests/cypress/e2e/actions_tasks2/case_97_export_import_task.js index f30ab61f749..fe2426ea575 100644 --- a/tests/cypress/e2e/actions_tasks2/case_97_export_import_task.js +++ b/tests/cypress/e2e/actions_tasks2/case_97_export_import_task.js @@ -90,16 +90,17 @@ context('Export, import an annotation task.', { browser: '!firefox' }, () => { cy.get('.cvat-modal-export-task').contains('button', 'OK').click(); cy.get('.cvat-notification-notice-export-backup-start').should('be.visible'); cy.closeNotification('.cvat-notification-notice-export-backup-start'); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { taskBackupArchiveFullName = file; cy.verifyDownload(taskBackupArchiveFullName); }); - cy.verifyNotification(); + cy.goBack(); cy.deleteTask(taskName); }); it('Import the task. Check id, labels, shape.', () => { cy.intercept({ method: /PATCH|POST/, url: /\/api\/tasks\/backup.*/ }).as('importTask'); + cy.intercept({ method: /GET/, url: /\/api\/requests.*/ }).as('requestStatus'); cy.get('.cvat-create-task-dropdown').click(); cy.get('.cvat-import-task-button').click(); cy.get('input[type=file]').attachFile(taskBackupArchiveFullName, { subjectType: 'drag-n-drop' }); @@ -111,13 +112,8 @@ context('Export, import an annotation task.', { browser: '!firefox' }, () => { cy.wait('@importTask').its('response.statusCode').should('equal', 202); cy.wait('@importTask').its('response.statusCode').should('equal', 201); cy.wait('@importTask').its('response.statusCode').should('equal', 204); - cy.wait('@importTask').its('response.statusCode').should('equal', 202); - cy.wait('@importTask').then((interception) => { - cy.wrap(interception).its('response.statusCode').should('be.oneOf', [201, 202]); - if (interception.response.statusCode === 202) { - cy.wait('@importTask').its('response.statusCode').should('equal', 201); - } - }); + cy.wait('@requestStatus').its('response.statusCode').should('equal', 200); + cy.contains('The task has been restored successfully. Click here to open').should('exist').and('be.visible'); cy.closeNotification('.ant-notification-notice-info'); cy.openTask(taskName); diff --git a/tests/cypress/e2e/actions_tasks2/test_default_attribute.js b/tests/cypress/e2e/actions_tasks2/test_default_attribute.js index ecfc6225416..49bd9694609 100644 --- a/tests/cypress/e2e/actions_tasks2/test_default_attribute.js +++ b/tests/cypress/e2e/actions_tasks2/test_default_attribute.js @@ -62,7 +62,7 @@ context('Test default value for an attribute', () => { expect(interception.response.statusCode).to.equal(201); taskID = interception.response.body.id; }); - cy.wait('@getJobsRequest', { requestTimeout: 10000 }).then((interception) => { + cy.wait('@getJobsRequest').then((interception) => { expect(interception.response.statusCode).to.equal(200); jobID = interception.response.body.results[0].id; }); diff --git a/tests/cypress/e2e/actions_tasks3/case_114_use_default_task_storage_for_import_export_annotations.js b/tests/cypress/e2e/actions_tasks3/case_114_use_default_task_storage_for_import_export_annotations.js index 048f4916a8c..8ebded744f2 100644 --- a/tests/cypress/e2e/actions_tasks3/case_114_use_default_task_storage_for_import_export_annotations.js +++ b/tests/cypress/e2e/actions_tasks3/case_114_use_default_task_storage_for_import_export_annotations.js @@ -153,11 +153,11 @@ context('Tests for source and target storage.', () => { targetStorage: project.advancedConfiguration.targetStorage, }; cy.exportJob(exportParams); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { annotationsArchiveName = file; cy.verifyDownload(annotationsArchiveName); }); - cy.verifyNotification(); + cy.goBack(); // remove annotations cy.removeAnnotations(); diff --git a/tests/cypress/e2e/actions_tasks3/case_118_multi_tasks.js b/tests/cypress/e2e/actions_tasks3/case_118_multi_tasks.js index fc0a7d46f80..97394769ec6 100644 --- a/tests/cypress/e2e/actions_tasks3/case_118_multi_tasks.js +++ b/tests/cypress/e2e/actions_tasks3/case_118_multi_tasks.js @@ -31,7 +31,7 @@ context('Create mutli tasks.', () => { cy.get('.cvat-create-multi-tasks-progress', { timeout: 50000 }).should('exist') .contains(`Total: ${videoFiles.videos.length}`); cy.contains('button', 'Cancel'); - cy.get('.cvat-create-multi-tasks-state').should('exist') + cy.get('.cvat-create-multi-tasks-state', { timeout: 50000 }).should('exist') .contains('Finished'); cy.get('.cvat-notification-create-task-success').within(() => { cy.get('.ant-notification-notice-close').click(); diff --git a/tests/cypress/e2e/actions_tasks3/case_47_export_dataset.js b/tests/cypress/e2e/actions_tasks3/case_47_export_dataset.js index 7694ea60d78..08b941608bf 100644 --- a/tests/cypress/e2e/actions_tasks3/case_47_export_dataset.js +++ b/tests/cypress/e2e/actions_tasks3/case_47_export_dataset.js @@ -34,7 +34,10 @@ context('Export task dataset.', () => { format: exportFormat, }; cy.exportJob(exportDataset); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); + cy.goBack(); }); it('Export a job as dataset with renaming the archive.', () => { @@ -45,7 +48,10 @@ context('Export task dataset.', () => { archiveCustomName: 'job_export_dataset_custome_name', }; cy.exportJob(exportDataset); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); + cy.goBack(); }); }); }); diff --git a/tests/cypress/e2e/canvas3d_functionality/case_91_canvas3d_functionality_dump_upload_annotation_point_cloud_format.js b/tests/cypress/e2e/canvas3d_functionality/case_91_canvas3d_functionality_dump_upload_annotation_point_cloud_format.js index 3ec73154a1a..b93803c56bb 100644 --- a/tests/cypress/e2e/canvas3d_functionality/case_91_canvas3d_functionality_dump_upload_annotation_point_cloud_format.js +++ b/tests/cypress/e2e/canvas3d_functionality/case_91_canvas3d_functionality_dump_upload_annotation_point_cloud_format.js @@ -55,11 +55,11 @@ context('Canvas 3D functionality. Dump/upload annotation. "Point Cloud" format', format: dumpTypePC, }; cy.exportJob(exportAnnotation); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { annotationPCArchiveName = file; cy.verifyDownload(annotationPCArchiveName); }); - cy.verifyNotification(); + cy.goBack(); }); it('Export with "Point Cloud" format. Renaming the archive', () => { @@ -70,11 +70,11 @@ context('Canvas 3D functionality. Dump/upload annotation. "Point Cloud" format', archiveCustomName: 'job_export_3d_annotation_custome_name_pc_format', }; cy.exportJob(exportAnnotationRenameArchive); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { annotationPCArchiveCustomName = file; cy.verifyDownload(annotationPCArchiveCustomName); }); - cy.verifyNotification(); + cy.goBack(); cy.removeAnnotations(); cy.saveJob('PUT'); cy.get('#cvat-objects-sidebar-state-item-1').should('not.exist'); diff --git a/tests/cypress/e2e/canvas3d_functionality/case_92_canvas3d_functionality_dump_upload_annotation_velodyne_points_format.js b/tests/cypress/e2e/canvas3d_functionality/case_92_canvas3d_functionality_dump_upload_annotation_velodyne_points_format.js index 6383a33caf2..a24e5c7296f 100644 --- a/tests/cypress/e2e/canvas3d_functionality/case_92_canvas3d_functionality_dump_upload_annotation_velodyne_points_format.js +++ b/tests/cypress/e2e/canvas3d_functionality/case_92_canvas3d_functionality_dump_upload_annotation_velodyne_points_format.js @@ -55,11 +55,11 @@ context('Canvas 3D functionality. Dump/upload annotation. "Velodyne Points" form format: dumpTypeVC, }; cy.exportJob(exportAnnotation); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { annotationVCArchiveName = file; cy.verifyDownload(annotationVCArchiveName); }); - cy.verifyNotification(); + cy.goBack(); }); it('Export with "Point Cloud" format. Renaming the archive', () => { @@ -70,11 +70,11 @@ context('Canvas 3D functionality. Dump/upload annotation. "Velodyne Points" form archiveCustomName: 'job_export_3d_annotation_custome_name_vc_format', }; cy.exportJob(exportAnnotationRenameArchive); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { annotationVCArchiveNameCustomName = file; cy.verifyDownload(annotationVCArchiveNameCustomName); }); - cy.verifyNotification(); + cy.goBack(); cy.removeAnnotations(); cy.saveJob('PUT'); cy.get('#cvat-objects-sidebar-state-item-1').should('not.exist'); diff --git a/tests/cypress/e2e/canvas3d_functionality/case_93_canvas3d_functionality_export_dataset.js b/tests/cypress/e2e/canvas3d_functionality/case_93_canvas3d_functionality_export_dataset.js index b4fa11339fc..95262800209 100644 --- a/tests/cypress/e2e/canvas3d_functionality/case_93_canvas3d_functionality_export_dataset.js +++ b/tests/cypress/e2e/canvas3d_functionality/case_93_canvas3d_functionality_export_dataset.js @@ -36,7 +36,10 @@ context('Canvas 3D functionality. Export as a dataset.', () => { format: dumpTypePC, }; cy.exportJob(exportDatasetPCFormat); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); + cy.goBack(); }); it('Export as a dataset with "Velodyne Points" format.', () => { @@ -46,7 +49,10 @@ context('Canvas 3D functionality. Export as a dataset.', () => { format: dumpTypeVC, }; cy.exportJob(exportDatasetVCFormat); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); + cy.goBack(); }); it('Export as a dataset with renaming the archive.', () => { @@ -57,7 +63,10 @@ context('Canvas 3D functionality. Export as a dataset.', () => { archiveCustomName: 'job_export_3d_dataset_custome_name_vc_format', }; cy.exportJob(exportDatasetVCFormatRenameArchive); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); + cy.goBack(); cy.removeAnnotations(); cy.saveJob('PUT'); }); diff --git a/tests/cypress/e2e/features/annotations_actions.js b/tests/cypress/e2e/features/annotations_actions.js index c63c7e86057..43be5cca595 100644 --- a/tests/cypress/e2e/features/annotations_actions.js +++ b/tests/cypress/e2e/features/annotations_actions.js @@ -382,10 +382,9 @@ context('Testing annotations actions workflow', () => { scrollList: true, }; cy.exportJob(exportAnnotation); - cy.getDownloadFileName().then((file) => { + cy.downloadExport().then((file) => { cy.verifyDownload(file); }); - cy.verifyNotification(); }); }); diff --git a/tests/cypress/e2e/features/skeletons_pipeline.js b/tests/cypress/e2e/features/skeletons_pipeline.js index 1caf3c34724..2164f57b984 100644 --- a/tests/cypress/e2e/features/skeletons_pipeline.js +++ b/tests/cypress/e2e/features/skeletons_pipeline.js @@ -83,7 +83,7 @@ context('Manipulations with skeletons', { scrollBehavior: false }, () => { taskID = interception.response.body.id; expect(interception.response.statusCode).to.be.equal(201); cy.intercept(`/api/tasks/${taskID}`).as('getTask'); - cy.wait('@getTask', { timeout: 10000 }); + cy.wait('@getTask'); cy.get('.cvat-job-item').should('exist').and('be.visible'); cy.openJob(); }); diff --git a/tests/cypress/e2e/issues_prs2/issue_1568_cuboid_dump_annotation.js b/tests/cypress/e2e/issues_prs2/issue_1568_cuboid_dump_annotation.js index 259e5d6c8bc..cfab62819db 100644 --- a/tests/cypress/e2e/issues_prs2/issue_1568_cuboid_dump_annotation.js +++ b/tests/cypress/e2e/issues_prs2/issue_1568_cuboid_dump_annotation.js @@ -37,7 +37,10 @@ context('Dump annotation if cuboid created.', () => { format: exportFormat, }; cy.exportJob(exportAnnotation); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); + cy.goBack(); }); it('Error notification is not exists.', () => { diff --git a/tests/cypress/e2e/issues_prs2/issue_5274_upload_annotations_different_file_formats.js b/tests/cypress/e2e/issues_prs2/issue_5274_upload_annotations_different_file_formats.js index 8243e5f55ee..1ac3d259dca 100644 --- a/tests/cypress/e2e/issues_prs2/issue_5274_upload_annotations_different_file_formats.js +++ b/tests/cypress/e2e/issues_prs2/issue_5274_upload_annotations_different_file_formats.js @@ -39,7 +39,10 @@ context('Upload annotations in different file formats', () => { cy.saveJob('PATCH', 200, 'saveJobDump'); for (const archive of archives) { cy.exportJob(archive); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); + cy.goBack(); cy.unpackZipArchive(`cypress/fixtures/${archive.archiveCustomName}.zip`, archive.archiveCustomName); } }); diff --git a/tests/cypress/e2e/issues_prs2/issue_7428_importing_annotation_from_cloud_after_local_import.js b/tests/cypress/e2e/issues_prs2/issue_7428_importing_annotation_from_cloud_after_local_import.js index 297f6a7f1ca..3f4368beb50 100644 --- a/tests/cypress/e2e/issues_prs2/issue_7428_importing_annotation_from_cloud_after_local_import.js +++ b/tests/cypress/e2e/issues_prs2/issue_7428_importing_annotation_from_cloud_after_local_import.js @@ -95,7 +95,10 @@ context('Incorrect cloud storage filename used in subsequent import.', () => { archiveCustomName: annotationsArchiveNameLocal, }; cy.exportTask(exportParams); - cy.waitForDownload(); + cy.downloadExport().then((file) => { + cy.verifyDownload(file); + }); + cy.goBack(); }); it('Export Annotation to the cloud storage', () => { diff --git a/tests/cypress/support/commands.js b/tests/cypress/support/commands.js index 56f997a798c..af2e7949294 100644 --- a/tests/cypress/support/commands.js +++ b/tests/cypress/support/commands.js @@ -1265,6 +1265,33 @@ Cypress.Commands.add('exportJob', ({ cy.closeNotification('.cvat-notification-notice-export-job-start'); }); +Cypress.Commands.add('downloadExport', () => { + cy.verifyNotification(); + cy.get('.cvat-header-requests-button').click(); + cy.get('.cvat-spinner').should('not.exist'); + cy.get('.cvat-requests-list').should('be.visible'); + cy.get('.cvat-requests-card').first().within(() => { + cy.get('.cvat-requests-page-actions-button').click(); + }); + cy.intercept('GET', '**=download').as('download'); + cy.get('.ant-dropdown') + .not('.ant-dropdown-hidden') + .within(() => { + cy.contains('[role="menuitem"]', 'Download').click(); + }); + cy.wait('@download') + .then((download) => { + const filename = download.response.headers['content-disposition'].split(';')[1].split('filename=')[1]; + // need to remove quotes + return filename.substring(1, filename.length - 1); + }); +}); + +Cypress.Commands.add('goBack', () => { + cy.go('back'); + cy.get('.cvat-spinner').should('not.exist'); +}); + Cypress.Commands.add('renameTask', (oldName, newName) => { cy.get('.cvat-task-details-task-name').within(() => { cy.get('[aria-label="edit"]').click(); diff --git a/tests/cypress/support/commands_projects.js b/tests/cypress/support/commands_projects.js index a722af9c397..d9e2088136c 100644 --- a/tests/cypress/support/commands_projects.js +++ b/tests/cypress/support/commands_projects.js @@ -154,10 +154,10 @@ Cypress.Commands.add('importProject', ({ cy.get('input[type="file"]').last().attachFile(archive, { subjectType: 'drag-n-drop' }); cy.get(`[title="${archive}"]`).should('be.visible'); cy.contains('button', 'OK').click(); - cy.get('.cvat-modal-import-dataset-status').should('be.visible'); + cy.get('.cvat-modal-upload-file-status').should('be.visible'); cy.get('.cvat-notification-notice-import-dataset-start').should('be.visible'); cy.closeNotification('.cvat-notification-notice-import-dataset-start'); - cy.get('.cvat-modal-import-dataset-status').should('not.exist'); + cy.get('.cvat-modal-upload-file-status').should('not.exist'); }); Cypress.Commands.add( @@ -197,6 +197,7 @@ Cypress.Commands.add( Cypress.Commands.add('restoreProject', (archiveWithBackup, sourceStorage = null) => { cy.intercept({ method: /PATCH|POST/, url: /\/api\/projects\/backup.*/ }).as('restoreProject'); + cy.intercept({ method: /GET/, url: /\/api\/requests.*/ }).as('requestStatus'); cy.get('.cvat-create-project-dropdown').click(); cy.get('.cvat-import-project-button').click(); @@ -227,16 +228,10 @@ Cypress.Commands.add('restoreProject', (archiveWithBackup, sourceStorage = null) cy.wait('@restoreProject').its('response.statusCode').should('equal', 202); cy.wait('@restoreProject').its('response.statusCode').should('equal', 201); cy.wait('@restoreProject').its('response.statusCode').should('equal', 204); - cy.wait('@restoreProject').its('response.statusCode').should('equal', 202); + cy.wait('@requestStatus').its('response.statusCode').should('equal', 200); } else { cy.wait('@restoreProject').its('response.statusCode').should('equal', 202); } - cy.wait('@restoreProject').then((interception) => { - cy.wrap(interception).its('response.statusCode').should('be.oneOf', [201, 202]); - if (interception.response.statusCode === 202) { - cy.wait('@restoreProject').its('response.statusCode').should('equal', 201); - } - }); cy.contains('The project has been restored successfully. Click here to open') .should('exist') @@ -244,27 +239,8 @@ Cypress.Commands.add('restoreProject', (archiveWithBackup, sourceStorage = null) cy.closeNotification('.ant-notification-notice-info'); }); -Cypress.Commands.add('getDownloadFileName', () => { - cy.intercept('GET', '**=download').as('download'); - cy.wait('@download', { requestTimeout: 10000 }).then((download) => { - const filename = download.response.headers['content-disposition'].split(';')[1].split('filename=')[1]; - // need to remove quotes - return filename.substring(1, filename.length - 1); - }); -}); - Cypress.Commands.add('waitForFileUploadToCloudStorage', () => { - cy.intercept('GET', /.*\/(annotations|dataset|backup)/).as('download'); - cy.wait('@download', { requestTimeout: 7000 }).then((interseption) => { - expect(interseption.response.statusCode).to.be.equal(200); - }); - cy.verifyNotification(); -}); - -Cypress.Commands.add('waitForDownload', () => { - cy.getDownloadFileName().then((filename) => { - cy.verifyDownload(filename); - }); + cy.get('.ant-notification-notice-info').contains('uploaded to cloud storage').should('be.visible'); cy.verifyNotification(); }); diff --git a/tests/python/rest_api/test_remote_url.py b/tests/python/rest_api/test_remote_url.py index a3b0f1c388a..cc50a3284d5 100644 --- a/tests/python/rest_api/test_remote_url.py +++ b/tests/python/rest_api/test_remote_url.py @@ -5,6 +5,7 @@ from http import HTTPStatus from time import sleep +from typing import Any, Dict import pytest @@ -20,14 +21,14 @@ def _post_task_remote_data(username, task_id, resources): return post_method(username, f"tasks/{task_id}/data", data) -def _wait_until_task_is_created(username, task_id): - url = f"tasks/{task_id}/status" +def _wait_until_task_is_created(username: str, rq_id: str) -> Dict[str, Any]: + url = f"requests/{rq_id}" for _ in range(100): response = get_method(username, url) - response_json = response.json() - if response_json["state"] == "Finished" or response_json["state"] == "Failed": - return response + request_details = response.json() + if request_details["status"] in ("finished", "failed"): + return request_details sleep(1) raise Exception("Cannot create task") @@ -39,18 +40,22 @@ class TestCreateFromRemote: def _test_can_create(self, user, task_id, resources): response = _post_task_remote_data(user, task_id, resources) assert response.status_code == HTTPStatus.ACCEPTED + response = response.json() + rq_id = response.get("rq_id") + assert rq_id, "The rq_id param was not found in the server response" - response = _wait_until_task_is_created(user, task_id) - response_json = response.json() - assert response_json["state"] == "Finished" + response_json = _wait_until_task_is_created(user, rq_id) + assert response_json["status"] == "finished" def _test_cannot_create(self, user, task_id, resources): response = _post_task_remote_data(user, task_id, resources) assert response.status_code == HTTPStatus.ACCEPTED + response = response.json() + rq_id = response.get("rq_id") + assert rq_id, "The rq_id param was not found in the server response" - response = _wait_until_task_is_created(user, task_id) - response_json = response.json() - assert response_json["state"] == "Failed" + response_json = _wait_until_task_is_created(user, rq_id) + assert response_json["status"] == "failed" def test_cannot_create(self, find_users): user = find_users(privilege="admin")[0]["username"] diff --git a/tests/python/rest_api/test_tasks.py b/tests/python/rest_api/test_tasks.py index e7b57673433..d061c84422c 100644 --- a/tests/python/rest_api/test_tasks.py +++ b/tests/python/rest_api/test_tasks.py @@ -800,15 +800,15 @@ def _test_cannot_create_task(self, username, spec, data, **kwargs): (task, response) = api_client.tasks_api.create(spec, **kwargs) assert response.status == HTTPStatus.CREATED - (_, response) = api_client.tasks_api.create_data( + (result, response) = api_client.tasks_api.create_data( task.id, data_request=deepcopy(data), _content_type="application/json", **kwargs ) assert response.status == HTTPStatus.ACCEPTED - status = wait_until_task_is_created(api_client.tasks_api, task.id) - assert status.state.value == "Failed" + request_details = wait_until_task_is_created(api_client.requests_api, result.rq_id) + assert request_details.status.value == "failed" - return status + return request_details def test_can_create_task_with_defined_start_and_stop_frames(self): task_spec = { @@ -1678,8 +1678,8 @@ def test_create_task_with_file_pattern( assert response.status == HTTPStatus.OK assert task.size == task_size else: - status = self._test_cannot_create_task(self._USERNAME, task_spec, data_spec) - assert "No media data found" in status.message + rq_job_details = self._test_cannot_create_task(self._USERNAME, task_spec, data_spec) + assert "No media data found" in rq_job_details.message @pytest.mark.with_external_services @pytest.mark.parametrize("use_manifest", [True, False]) diff --git a/tests/python/rest_api/utils.py b/tests/python/rest_api/utils.py index 4e17932efd8..ea1266ac5aa 100644 --- a/tests/python/rest_api/utils.py +++ b/tests/python/rest_api/utils.py @@ -141,11 +141,12 @@ def build_exclude_paths_expr(ignore_fields: Iterator[str]) -> List[str]: return exclude_expr_parts -def wait_until_task_is_created(api: apis.TasksApi, task_id: int) -> models.RqStatus: +def wait_until_task_is_created(api: apis.RequestsApi, rq_id: str) -> models.Request: for _ in range(100): - (status, _) = api.retrieve_status(task_id) - if status.state.value in ["Finished", "Failed"]: - return status + (request_details, _) = api.retrieve(rq_id) + + if request_details.status.value in ("finished", "failed"): + return request_details sleep(1) raise Exception("Cannot create task") @@ -192,7 +193,7 @@ def create_task(username, spec, data, content_type="application/json", **kwargs) if sent_upload_start: last_kwargs["upload_finish"] = True - (_, response) = api_client.tasks_api.create_data( + (result, response) = api_client.tasks_api.create_data( task.id, data_request=deepcopy(data), _content_type=content_type, @@ -201,8 +202,8 @@ def create_task(username, spec, data, content_type="application/json", **kwargs) ) assert response.status == HTTPStatus.ACCEPTED - status = wait_until_task_is_created(api_client.tasks_api, task.id) - assert status.state.value == "Finished", status.message + request_details = wait_until_task_is_created(api_client.requests_api, result.rq_id) + assert request_details.status.value == "finished", request_details.message return task.id, response_.headers.get("X-Request-Id") diff --git a/tests/python/shared/utils/resource_import_export.py b/tests/python/shared/utils/resource_import_export.py index 82997749e8a..37983dbd147 100644 --- a/tests/python/shared/utils/resource_import_export.py +++ b/tests/python/shared/utils/resource_import_export.py @@ -3,6 +3,7 @@ from abc import ABC, abstractstaticmethod from contextlib import ExitStack from http import HTTPStatus +from time import sleep from typing import Any, Dict, Optional, TypeVar import pytest @@ -21,14 +22,12 @@ def _make_custom_resource_params(resource: str, obj: str, cloud_storage_id: int) "filename": FILENAME_TEMPLATE.format(obj, resource), "location": "cloud_storage", "cloud_storage_id": cloud_storage_id, - "use_default_location": False, } def _make_default_resource_params(resource: str, obj: str) -> Dict[str, Any]: return { "filename": FILENAME_TEMPLATE.format(obj, resource), - "use_default_location": True, } @@ -91,15 +90,62 @@ def _export_resource_to_cloud_storage( _expect_status: Optional[int] = None, **kwargs, ): - _expect_status = _expect_status or HTTPStatus.OK + _expect_status = _expect_status or HTTPStatus.ACCEPTED + sleep_interval = 0.1 + number_of_checks = 100 + + # initialize the export process response = get_method(user, f"{obj}/{obj_id}/{resource}", **kwargs) + assert response.status_code == _expect_status + + if _expect_status == HTTPStatus.FORBIDDEN: + return + + rq_id = json.loads(response.content).get("rq_id") + assert rq_id, "The rq_id was not found in server request" + + for _ in range(number_of_checks): + sleep(sleep_interval) + # use new requests API for checking the status of the operation + response = get_method(user, f"requests/{rq_id}") + assert response.status_code == HTTPStatus.OK + + request_details = json.loads(response.content) + status = request_details["status"] + assert status in {"started", "queued", "finished", "failed"} + if status in {"finished", "failed"}: + break + + def _import_resource_from_cloud_storage( + self, url: str, *, user: str, _expect_status: Optional[int] = None, **kwargs + ) -> None: + _expect_status = _expect_status or HTTPStatus.ACCEPTED + + response = post_method(user, url, data=None, **kwargs) status = response.status_code - while status != _expect_status: - assert status == HTTPStatus.ACCEPTED - response = get_method(user, f"{obj}/{obj_id}/{resource}", **kwargs) - status = response.status_code + assert status == _expect_status + if status == HTTPStatus.FORBIDDEN: + return + + rq_id = response.json().get("rq_id") + assert rq_id, "The rq_id parameter was not found in the server response" + + number_of_checks = 100 + sleep_interval = 0.1 + + for _ in range(number_of_checks): + sleep(sleep_interval) + # use new requests API for checking the status of the operation + response = get_method(user, f"requests/{rq_id}") + assert response.status_code == HTTPStatus.OK + + request_details = json.loads(response.content) + status = request_details["status"] + assert status in {"started", "queued", "finished", "failed"} + if status in {"finished", "failed"}: + break def _import_annotations_from_cloud_storage( self,