Skip to content

Commit

Permalink
Add tests for 6781 issue (#7217)
Browse files Browse the repository at this point in the history
This PR contains REST API tests for
#6801
  • Loading branch information
Marishka17 authored Dec 6, 2023
1 parent a9c3425 commit 67b80c2
Show file tree
Hide file tree
Showing 4 changed files with 121 additions and 31 deletions.
6 changes: 3 additions & 3 deletions tests/python/rest_api/test_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -1258,7 +1258,7 @@ def _export_annotations(self, username, jid, **kwargs):

def test_can_export_dataset(self, admin_user: str, jobs_with_shapes: List):
job = jobs_with_shapes[0]
response = self._export_dataset(admin_user, job["id"], format="CVAT for images 1.1")
response = self._export_dataset(admin_user, job["id"])
assert response.data

def test_non_admin_can_export_dataset(self, users, tasks, jobs_with_shapes):
Expand All @@ -1271,7 +1271,7 @@ def test_non_admin_can_export_dataset(self, users, tasks, jobs_with_shapes):
and tasks[job["task_id"]]["organization"] is None
)
)
response = self._export_dataset(username, job_id, format="CVAT for images 1.1")
response = self._export_dataset(username, job_id)
assert response.data

def test_non_admin_can_export_annotations(self, users, tasks, jobs_with_shapes):
Expand All @@ -1284,7 +1284,7 @@ def test_non_admin_can_export_annotations(self, users, tasks, jobs_with_shapes):
and tasks[job["task_id"]]["organization"] is None
)
)
response = self._export_annotations(username, job_id, format="CVAT for images 1.1")
response = self._export_annotations(username, job_id)
assert response.data

@pytest.mark.parametrize("username, jid", [("admin1", 14)])
Expand Down
61 changes: 46 additions & 15 deletions tests/python/rest_api/test_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -514,16 +514,16 @@ def _check_cvat_for_video_project_annotations_meta(content, values_to_be_checked

@pytest.mark.usefixtures("restore_db_per_function")
class TestImportExportDatasetProject:
def _test_export_project(self, username, pid, format_name):
def _test_export_project(self, username: str, pid: int, **kwargs):
with make_api_client(username) as api_client:
return export_dataset(
api_client.projects_api.retrieve_dataset_endpoint, id=pid, format=format_name
api_client.projects_api.retrieve_dataset_endpoint, id=pid, **kwargs
)

def _export_annotations(self, username, pid, format_name):
def _export_annotations(self, username: str, pid: int, **kwargs):
with make_api_client(username) as api_client:
return export_dataset(
api_client.projects_api.retrieve_annotations_endpoint, id=pid, format=format_name
api_client.projects_api.retrieve_annotations_endpoint, id=pid, **kwargs
)

def _test_import_project(self, username, project_id, format_name, data):
Expand Down Expand Up @@ -557,7 +557,7 @@ def _test_get_annotations_from_task(self, username, task_id):
def test_can_import_dataset_in_org(self, admin_user):
project_id = 4

response = self._test_export_project(admin_user, project_id, "CVAT for images 1.1")
response = self._test_export_project(admin_user, project_id)

tmp_file = io.BytesIO(response.data)
tmp_file.name = "dataset.zip"
Expand All @@ -571,7 +571,7 @@ def test_can_import_dataset_in_org(self, admin_user):
def test_can_export_and_import_dataset_with_skeletons_coco_keypoints(self, admin_user):
project_id = 5

response = self._test_export_project(admin_user, project_id, "COCO Keypoints 1.0")
response = self._test_export_project(admin_user, project_id, format="COCO Keypoints 1.0")

tmp_file = io.BytesIO(response.data)
tmp_file.name = "dataset.zip"
Expand All @@ -584,7 +584,7 @@ def test_can_export_and_import_dataset_with_skeletons_coco_keypoints(self, admin
def test_can_export_and_import_dataset_with_skeletons_cvat_for_images(self, admin_user):
project_id = 5

response = self._test_export_project(admin_user, project_id, "CVAT for images 1.1")
response = self._test_export_project(admin_user, project_id)

tmp_file = io.BytesIO(response.data)
tmp_file.name = "dataset.zip"
Expand All @@ -597,7 +597,7 @@ def test_can_export_and_import_dataset_with_skeletons_cvat_for_images(self, admi
def test_can_export_and_import_dataset_with_skeletons_cvat_for_video(self, admin_user):
project_id = 5

response = self._test_export_project(admin_user, project_id, "CVAT for video 1.1")
response = self._test_export_project(admin_user, project_id, format="CVAT for video 1.1")

tmp_file = io.BytesIO(response.data)
tmp_file.name = "dataset.zip"
Expand Down Expand Up @@ -643,7 +643,7 @@ def test_can_import_export_dataset_with_some_format(self, format_name):
username = "admin1"
project_id = 4

response = self._test_export_project(username, project_id, format_name)
response = self._test_export_project(username, project_id, format=format_name)

tmp_file = io.BytesIO(response.data)
tmp_file.name = "dataset.zip"
Expand Down Expand Up @@ -693,7 +693,7 @@ def test_exported_project_dataset_structure(
],
}

response = self._export_annotations(username, pid, anno_format)
response = self._export_annotations(username, pid, format=anno_format)
assert response.data
with zipfile.ZipFile(BytesIO(response.data)) as zip_file:
content = zip_file.read(anno_file_name)
Expand All @@ -704,7 +704,7 @@ def test_can_import_export_annotations_with_rotation(self):
username = "admin1"
project_id = 4

response = self._test_export_project(username, project_id, "CVAT for images 1.1")
response = self._test_export_project(username, project_id)

tmp_file = io.BytesIO(response.data)
tmp_file.name = "dataset.zip"
Expand Down Expand Up @@ -732,11 +732,11 @@ def test_can_export_dataset_with_skeleton_labels_with_spaces(self):
username = "admin1"
project_id = 11

self._test_export_project(username, project_id, "COCO Keypoints 1.0")
self._test_export_project(username, project_id, format="COCO Keypoints 1.0")

def test_can_export_dataset_for_empty_project(self, projects):
empty_project = next((p for p in projects if 0 == p["tasks"]["count"]))
self._test_export_project("admin1", empty_project["id"], "COCO 1.0")
self._test_export_project("admin1", empty_project["id"], format="COCO 1.0")

def test_can_export_project_dataset_when_some_tasks_have_no_data(self, projects):
project = next((p for p in projects if 0 < p["tasks"]["count"]))
Expand All @@ -747,7 +747,7 @@ def test_can_export_project_dataset_when_some_tasks_have_no_data(self, projects)
)
assert response.status_code == HTTPStatus.CREATED

self._test_export_project("admin1", project["id"], "COCO 1.0")
self._test_export_project("admin1", project["id"], format="COCO 1.0")

def test_can_export_project_dataset_when_all_tasks_have_no_data(self, projects):
project = next((p for p in projects if 0 == p["tasks"]["count"]))
Expand All @@ -763,7 +763,38 @@ def test_can_export_project_dataset_when_all_tasks_have_no_data(self, projects):
)
assert response.status_code == HTTPStatus.CREATED

self._test_export_project("admin1", project["id"], "COCO 1.0")
self._test_export_project("admin1", project["id"], format="COCO 1.0")

@pytest.mark.parametrize("cloud_storage_id", [2])
def test_can_export_and_import_dataset_after_deleting_related_storage(
self, admin_user, projects, cloud_storage_id: int
):
project = next(
p
for p in projects
if p["source_storage"]
and p["source_storage"]["cloud_storage_id"] == cloud_storage_id
and p["target_storage"]
and p["target_storage"]["cloud_storage_id"] == cloud_storage_id
)
project_id = project["id"]

with make_api_client(admin_user) as api_client:
_, response = api_client.cloudstorages_api.destroy(cloud_storage_id)
assert response.status == HTTPStatus.NO_CONTENT

result, response = api_client.projects_api.retrieve(project_id)
assert all([not getattr(result, field) for field in ("source_storage", "target_storage")])

response = self._test_export_project(admin_user, project_id)

with io.BytesIO(response.data) as tmp_file:
tmp_file.name = "dataset.zip"
import_data = {
"dataset_file": tmp_file,
}

self._test_import_project(admin_user, project_id, "CVAT 1.1", import_data)


@pytest.mark.usefixtures("restore_db_per_function")
Expand Down
72 changes: 62 additions & 10 deletions tests/python/rest_api/test_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -590,13 +590,13 @@ def test_can_split_skeleton_tracks_on_jobs(self, jobs):

@pytest.mark.usefixtures("restore_db_per_class")
class TestGetTaskDataset:
def _test_export_task(self, username, tid, **kwargs):
def _test_export_task(self, username: str, tid: int, **kwargs):
with make_api_client(username) as api_client:
return export_dataset(api_client.tasks_api.retrieve_dataset_endpoint, id=tid, **kwargs)

def test_can_export_task_dataset(self, admin_user, tasks_with_shapes):
task = tasks_with_shapes[0]
response = self._test_export_task(admin_user, task["id"], format="CVAT for images 1.1")
response = self._test_export_task(admin_user, task["id"])
assert response.data

@pytest.mark.parametrize("tid", [21])
Expand Down Expand Up @@ -722,10 +722,29 @@ def test_can_download_task_with_special_chars_in_name(self, admin_user):

task_id, _ = create_task(admin_user, task_spec, task_data)

response = self._test_export_task(admin_user, task_id, format="CVAT for images 1.1")
response = self._test_export_task(admin_user, task_id)
assert response.status == HTTPStatus.OK
assert zipfile.is_zipfile(io.BytesIO(response.data))

def test_export_dataset_after_deleting_related_cloud_storage(self, admin_user, tasks):
related_field = "target_storage"

task = next(
t for t in tasks if t[related_field] and t[related_field]["location"] == "cloud_storage"
)
task_id = task["id"]
cloud_storage_id = task[related_field]["cloud_storage_id"]

with make_api_client(admin_user) as api_client:
_, response = api_client.cloudstorages_api.destroy(cloud_storage_id)
assert response.status == HTTPStatus.NO_CONTENT

result, response = api_client.tasks_api.retrieve(task_id)
assert not result[related_field]

response = export_dataset(api_client.tasks_api.retrieve_dataset_endpoint, id=task["id"])
assert response.data


@pytest.mark.usefixtures("restore_db_per_function")
@pytest.mark.usefixtures("restore_cvat_data")
Expand Down Expand Up @@ -2433,7 +2452,8 @@ def setup(self, restore_db_per_function, tmp_path: Path, admin_user: str):
self.tmp_dir = tmp_path
self.client = self._make_client()
self.user = admin_user
self.format = "COCO 1.0"
self.export_format = "CVAT for images 1.1"
self.import_format = "CVAT 1.1"

with self.client:
self.client.login((self.user, USER_PASS))
Expand Down Expand Up @@ -2463,11 +2483,11 @@ def test_can_import_annotations_after_previous_unclear_import(
filename = self.tmp_dir / f"task_{task_id}_{Path(f.name).name}_coco.zip"

task = self.client.tasks.retrieve(task_id)
task.export_dataset(self.format, filename, include_images=False)
task.export_dataset(self.export_format, filename, include_images=False)

self._delete_annotations(task_id)

params = {"format": self.format, "filename": filename.name}
params = {"format": self.import_format, "filename": filename.name}
url = self.client.api_map.make_endpoint_url(
self.client.api_client.tasks_api.create_annotations_endpoint.path
).format(id=task_id)
Expand All @@ -2476,7 +2496,7 @@ def test_can_import_annotations_after_previous_unclear_import(
if successful_upload:
# define time required to upload file with annotations
start_time = time()
task.import_annotations(self.format, filename)
task.import_annotations(self.import_format, filename)
required_time = ceil(time() - start_time) * 2
self._delete_annotations(task_id)

Expand All @@ -2500,7 +2520,7 @@ def test_can_import_annotations_after_previous_unclear_import(
if successful_upload:
self._check_annotations(task_id)
self._delete_annotations(task_id)
task.import_annotations(self.format, filename)
task.import_annotations(self.import_format, filename)
self._check_annotations(task_id)

@pytest.mark.skip("Fails sometimes, needs to be fixed")
Expand All @@ -2510,9 +2530,9 @@ def test_check_import_cache_after_previous_interrupted_upload(self, tasks_with_s
with NamedTemporaryFile() as f:
filename = self.tmp_dir / f"task_{task_id}_{Path(f.name).name}_coco.zip"
task = self.client.tasks.retrieve(task_id)
task.export_dataset(self.format, filename, include_images=False)
task.export_dataset(self.export_format, filename, include_images=False)

params = {"format": self.format, "filename": filename.name}
params = {"format": self.import_format, "filename": filename.name}
url = self.client.api_map.make_endpoint_url(
self.client.api_client.tasks_api.create_annotations_endpoint.path
).format(id=task_id)
Expand Down Expand Up @@ -2540,6 +2560,38 @@ def test_check_import_cache_after_previous_interrupted_upload(self, tasks_with_s
break
assert not number_of_files

def test_import_annotations_after_deleting_related_cloud_storage(
self, admin_user: str, tasks_with_shapes
):
related_field = "source_storage"

task = next(
t
for t in tasks_with_shapes
if t[related_field] and t[related_field]["location"] == "cloud_storage"
)
task_id = task["id"]
cloud_storage_id = task["source_storage"]["cloud_storage_id"]

# generate temporary destination
with NamedTemporaryFile(dir=self.tmp_dir, suffix=f"task_{task_id}.zip") as f:
file_path = Path(f.name)

task = self.client.tasks.retrieve(task_id)
self._check_annotations(task_id)

with make_api_client(admin_user) as api_client:
_, response = api_client.cloudstorages_api.destroy(cloud_storage_id)
assert response.status == HTTPStatus.NO_CONTENT

task = self.client.tasks.retrieve(task_id)
assert not getattr(task, related_field)

task.export_dataset(self.export_format, file_path, include_images=False)
self._delete_annotations(task_id)
task.import_annotations(self.import_format, file_path)
self._check_annotations(task_id)


class TestImportWithComplexFilenames:
@staticmethod
Expand Down
13 changes: 10 additions & 3 deletions tests/python/rest_api/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,24 @@


def export_dataset(
endpoint: Endpoint, *, max_retries: int = 20, interval: float = 0.1, **kwargs
endpoint: Endpoint,
*,
max_retries: int = 20,
interval: float = 0.1,
format: str = "CVAT for images 1.1", # pylint: disable=redefined-builtin
**kwargs,
) -> HTTPResponse:
for _ in range(max_retries):
(_, response) = endpoint.call_with_http_info(**kwargs, _parse_response=False)
(_, response) = endpoint.call_with_http_info(**kwargs, format=format, _parse_response=False)
if response.status == HTTPStatus.CREATED:
break
assert response.status == HTTPStatus.ACCEPTED
sleep(interval)
assert response.status == HTTPStatus.CREATED

(_, response) = endpoint.call_with_http_info(**kwargs, action="download", _parse_response=False)
(_, response) = endpoint.call_with_http_info(
**kwargs, format=format, action="download", _parse_response=False
)
assert response.status == HTTPStatus.OK

return response
Expand Down

0 comments on commit 67b80c2

Please sign in to comment.