Skip to content

Commit

Permalink
t
Browse files Browse the repository at this point in the history
  • Loading branch information
Marishka17 committed Nov 28, 2024
1 parent a6b6f0f commit c61a74b
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 12 deletions.
11 changes: 10 additions & 1 deletion cvat/apps/dataset_manager/default_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,14 @@

import os
import warnings
from django.core.exceptions import ImproperlyConfigured


DATASET_CACHE_TTL = int(os.getenv("CVAT_DATASET_CACHE_TTL", 60 * 60 * 24))
"Base lifetime for cached exported datasets, in seconds"

default_dataset_export_lock_ttl = 60 * 5

DATASET_EXPORT_LOCK_TTL = int(os.getenv("CVAT_DATASET_EXPORT_LOCK_TTL", default_dataset_export_lock_ttl))
"""
Default lifetime for the export cache lock, in seconds.
Expand All @@ -17,6 +20,12 @@
The lock will be automatically extended as needed for the duration of the worker process.
"""

# prevent lock auto releasing when extending a lock by setting a slightly lower value
DATASET_EXPORT_LOCK_EXTEND_INTERVAL = DATASET_EXPORT_LOCK_TTL - 10

if DATASET_EXPORT_LOCK_EXTEND_INTERVAL < 5:
raise ImproperlyConfigured("Recheck value of DATASET_EXPORT_LOCK_TTL")

DATASET_CACHE_LOCK_ACQUIRE_TIMEOUT = os.getenv("CVAT_DATASET_CACHE_LOCK_TIMEOUT")
"Timeout for cache lock acquiring, in seconds"

Expand All @@ -26,7 +35,7 @@
"The CVAT_DATASET_CACHE_LOCK_TIMEOUT is deprecated, "
"use DATASET_CACHE_LOCK_ACQUIRE_TIMEOUT instead", DeprecationWarning)
else:
default_dataset_lock_acquire_timeout = default_dataset_export_lock_ttl + 5
default_dataset_lock_acquire_timeout = default_dataset_export_lock_ttl + 30
"""
Set default lock acquire timeout to the default lock lifetime + small buffer
to handle possible cases when a lock wasn't released by the worker process
Expand Down
4 changes: 2 additions & 2 deletions cvat/apps/dataset_manager/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ def log_exception(logger: logging.Logger | None = None, exc_info: bool = True):
EXPORT_CACHE_LOCK_ACQUIRE_TIMEOUT = timedelta(seconds=settings.DATASET_CACHE_LOCK_ACQUIRE_TIMEOUT)
EXPORT_LOCKED_RETRY_INTERVAL = timedelta(seconds=settings.DATASET_EXPORT_LOCKED_RETRY_INTERVAL)
EXPORT_LOCK_TTL = settings.DATASET_EXPORT_LOCK_TTL
# prevent lock auto releasing when extending a lock by setting a slightly lower value
EXPORT_LOCK_EXTEND_INTERVAL = EXPORT_LOCK_TTL - 2

EXPORT_LOCK_EXTEND_INTERVAL = settings.DATASET_EXPORT_LOCK_EXTEND_INTERVAL


def get_export_cache_ttl(db_instance: str | Project | Task | Job) -> timedelta:
Expand Down
1 change: 0 additions & 1 deletion cvat/rqworker.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

import cvat.utils.remote_debugger as debug


DefaultWorker = Worker


Expand Down
16 changes: 8 additions & 8 deletions tests/python/rest_api/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def initialize_export(endpoint: Endpoint, *, expect_forbidden: bool = False, **k
def wait_and_download_v1(
endpoint: Endpoint,
*,
max_retries: int = 100,
max_retries: int = 50,
interval: float = 0.1,
download_result: bool = True,
**kwargs,
Expand Down Expand Up @@ -75,7 +75,7 @@ def wait_and_download_v1(
def export_v1(
endpoint: Endpoint,
*,
max_retries: int = 100,
max_retries: int = 50,
interval: float = 0.1,
expect_forbidden: bool = False,
wait_result: bool = True,
Expand Down Expand Up @@ -115,7 +115,7 @@ def wait_and_download_v2(
api_client: ApiClient,
rq_id: str,
*,
max_retries: int = 100,
max_retries: int = 50,
interval: float = 0.1,
download_result: bool = True,
) -> Optional[bytes]:
Expand Down Expand Up @@ -153,7 +153,7 @@ def wait_and_download_v2(
def export_v2(
endpoint: Endpoint,
*,
max_retries: int = 100,
max_retries: int = 50,
interval: float = 0.1,
expect_forbidden: bool = False,
wait_result: bool = True,
Expand Down Expand Up @@ -196,7 +196,7 @@ def export_dataset(
], # make this parameter required to be sure that all tests was updated and both API versions are used
*,
save_images: bool,
max_retries: int = 30,
max_retries: int = 50,
interval: float = 0.1,
format: str = "CVAT for images 1.1", # pylint: disable=redefined-builtin
**kwargs,
Expand Down Expand Up @@ -278,7 +278,7 @@ def export_backup(
int, tuple[int]
], # make this parameter required to be sure that all tests was updated and both API versions are used
*,
max_retries: int = 30,
max_retries: int = 50,
interval: float = 0.1,
**kwargs,
) -> Optional[bytes]:
Expand Down Expand Up @@ -326,7 +326,7 @@ def export_task_backup(
def import_resource(
endpoint: Endpoint,
*,
max_retries: int = 30,
max_retries: int = 50,
interval: float = 0.1,
expect_forbidden: bool = False,
wait_result: bool = True,
Expand Down Expand Up @@ -372,7 +372,7 @@ def import_resource(
def import_backup(
api: Union[ProjectsApi, TasksApi],
*,
max_retries: int = 30,
max_retries: int = 50,
interval: float = 0.1,
**kwargs,
) -> None:
Expand Down

0 comments on commit c61a74b

Please sign in to comment.