diff --git a/cvat/apps/dataset_manager/formats/datumaro/export_templates/plugins/cvat_rest_api_task_images.py b/cvat/apps/dataset_manager/formats/datumaro/export_templates/plugins/cvat_rest_api_task_images.py index 2af31fcc5d0d..9a7a9f06af85 100644 --- a/cvat/apps/dataset_manager/formats/datumaro/export_templates/plugins/cvat_rest_api_task_images.py +++ b/cvat/apps/dataset_manager/formats/datumaro/export_templates/plugins/cvat_rest_api_task_images.py @@ -42,32 +42,19 @@ def _download_image(self, item_id): frame_ids=[item_id], outdir=self._cache_dir, quality='original') def _connect(self): - if self._session is not None: + if self._cvat_cli is not None: return - session = None - try: - print("Enter credentials for '%s' to read task data:" % \ - (self._config.server_url)) - username = input('User: ') - password = getpass.getpass() + print("Enter credentials for '%s' to read task data:" % \ + (self._config.server_url)) + username = input('User: ') + password = getpass.getpass() - session = requests.Session() - session.auth = (username, password) + session = requests.Session() - api = CVAT_API_V1(self._config.server_url) - cli = CVAT_CLI(session, api) - - self._session = session - self._cvat_cli = cli - except Exception: - if session is not None: - session.close() - - def __del__(self): - if hasattr(self, '_session'): - if self._session is not None: - self._session.close() + api = CVAT_API_V1(self._config.server_url) + cli = CVAT_CLI(session, api, credentials=(username, password)) + self._cvat_cli = cli @staticmethod def _image_loader(item_id, extractor): @@ -101,15 +88,13 @@ def __init__(self, url): size = (entry['height'], entry['width']) image = Image(data=self._make_image_loader(item_id), path=self._image_local_path(item_id), size=size) - item = DatasetItem(id=item_id, image=image) + item = DatasetItem(id=osp.splitext(item_filename)[0], image=image) items.append((item.id, item)) - items = sorted(items, key=lambda e: int(e[0])) items = OrderedDict(items) self._items = items self._cvat_cli = None - self._session = None def __iter__(self): for item in self._items.values(): diff --git a/cvat/apps/dataset_manager/task.py b/cvat/apps/dataset_manager/task.py index f2c40dfd0a17..246d2ef02c1d 100644 --- a/cvat/apps/dataset_manager/task.py +++ b/cvat/apps/dataset_manager/task.py @@ -536,6 +536,14 @@ def init_from_db(self): def data(self): return self.ir_data.data + def export(self, dst_file, exporter, host='', **options): + task_data = TaskData( + annotation_ir=self.ir_data, + db_task=self.db_job.segment.task, + host=host, + ) + exporter(dst_file, task_data, **options) + def import_annotations(self, src_file, importer): task_data = TaskData( annotation_ir=AnnotationIR(), @@ -674,6 +682,21 @@ def delete_job_data(pk): annotation = JobAnnotation(pk) annotation.delete() +def export_job(job_id, dst_file, format_name, + server_url=None, save_images=False): + # For big tasks dump function may run for a long time and + # we dont need to acquire lock after the task has been initialized from DB. + # But there is the bug with corrupted dump file in case 2 or + # more dump request received at the same time: + # https://github.com/opencv/cvat/issues/217 + with transaction.atomic(): + job = JobAnnotation(job_id) + job.init_from_db() + + exporter = make_exporter(format_name) + with open(dst_file, 'wb') as f: + job.export(f, exporter, host=server_url, save_images=save_images) + @silk_profile(name="GET task data") @transaction.atomic def get_task_data(pk): @@ -722,8 +745,7 @@ def export_task(task_id, dst_file, format_name, exporter = make_exporter(format_name) with open(dst_file, 'wb') as f: - task.export(f, exporter, host=server_url, - save_images=save_images) + task.export(f, exporter, host=server_url, save_images=save_images) @transaction.atomic def import_task_annotations(task_id, src_file, format_name):