Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix rest api task extractor, add job export function #1972

Merged
merged 4 commits into from
Aug 5, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -42,32 +42,19 @@ def _download_image(self, item_id):
frame_ids=[item_id], outdir=self._cache_dir, quality='original')

def _connect(self):
if self._session is not None:
if self._cvat_cli is not None:
return

session = None
try:
print("Enter credentials for '%s' to read task data:" % \
(self._config.server_url))
username = input('User: ')
password = getpass.getpass()
print("Enter credentials for '%s' to read task data:" % \
(self._config.server_url))
username = input('User: ')
password = getpass.getpass()

session = requests.Session()
session.auth = (username, password)
session = requests.Session()

api = CVAT_API_V1(self._config.server_url)
cli = CVAT_CLI(session, api)

self._session = session
self._cvat_cli = cli
except Exception:
if session is not None:
session.close()

def __del__(self):
if hasattr(self, '_session'):
if self._session is not None:
self._session.close()
api = CVAT_API_V1(self._config.server_url)
cli = CVAT_CLI(session, api, credentials=(username, password))
self._cvat_cli = cli

@staticmethod
def _image_loader(item_id, extractor):
Expand Down Expand Up @@ -101,15 +88,13 @@ def __init__(self, url):
size = (entry['height'], entry['width'])
image = Image(data=self._make_image_loader(item_id),
path=self._image_local_path(item_id), size=size)
item = DatasetItem(id=item_id, image=image)
item = DatasetItem(id=osp.splitext(item_filename)[0], image=image)
items.append((item.id, item))

items = sorted(items, key=lambda e: int(e[0]))
items = OrderedDict(items)
self._items = items

self._cvat_cli = None
self._session = None

def __iter__(self):
for item in self._items.values():
Expand Down
26 changes: 24 additions & 2 deletions cvat/apps/dataset_manager/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,6 +536,14 @@ def init_from_db(self):
def data(self):
return self.ir_data.data

def export(self, dst_file, exporter, host='', **options):
task_data = TaskData(
annotation_ir=self.ir_data,
db_task=self.db_job.segment.task,
host=host,
)
exporter(dst_file, task_data, **options)

def import_annotations(self, src_file, importer):
task_data = TaskData(
annotation_ir=AnnotationIR(),
Expand Down Expand Up @@ -674,6 +682,21 @@ def delete_job_data(pk):
annotation = JobAnnotation(pk)
annotation.delete()

def export_job(job_id, dst_file, format_name,
server_url=None, save_images=False):
# For big tasks dump function may run for a long time and
# we dont need to acquire lock after the task has been initialized from DB.
# But there is the bug with corrupted dump file in case 2 or
# more dump request received at the same time:
# https://github.com/opencv/cvat/issues/217
with transaction.atomic():
job = JobAnnotation(job_id)
job.init_from_db()

exporter = make_exporter(format_name)
with open(dst_file, 'wb') as f:
job.export(f, exporter, host=server_url, save_images=save_images)

@silk_profile(name="GET task data")
@transaction.atomic
def get_task_data(pk):
Expand Down Expand Up @@ -722,8 +745,7 @@ def export_task(task_id, dst_file, format_name,

exporter = make_exporter(format_name)
with open(dst_file, 'wb') as f:
task.export(f, exporter, host=server_url,
save_images=save_images)
task.export(f, exporter, host=server_url, save_images=save_images)

@transaction.atomic
def import_task_annotations(task_id, src_file, format_name):
Expand Down