Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor way of working with RQ meta #9082

Open
wants to merge 29 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 16 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
2a2dc86
Add new API && remove deprecated && refactor working with rq meta
Marishka17 Feb 7, 2025
e3d6da9
Remove outdated tests
Marishka17 Feb 7, 2025
a01a8a8
Resolve conflicts
Marishka17 Feb 7, 2025
0f71c08
Rename method
Marishka17 Feb 7, 2025
a22e10f
Split RQMeta into several classes && small fixes
Marishka17 Feb 9, 2025
19a322a
Black code
Marishka17 Feb 9, 2025
f535d32
f
Marishka17 Feb 9, 2025
3c9895b
Fix typo
Marishka17 Feb 9, 2025
1762178
Revert some changes
Marishka17 Feb 10, 2025
472763c
Sort imports
Marishka17 Feb 10, 2025
d9eeecb
Remove commented code
Marishka17 Feb 10, 2025
b8f8a88
Fix var usage
Marishka17 Feb 10, 2025
6208de4
Small fixes
Marishka17 Feb 10, 2025
c9bbe48
Fix types
Marishka17 Feb 10, 2025
3b9aefc
Fix meta update
Marishka17 Feb 12, 2025
7ea0523
Resolve conflicts
Marishka17 Feb 14, 2025
0253b4e
apply comments
Marishka17 Feb 17, 2025
d15ae47
rename module && move define_dependent_job into rq.py
Marishka17 Feb 17, 2025
a16f179
Fix imports sorting
Marishka17 Feb 17, 2025
2d8fd91
Fix exception class used
Marishka17 Feb 17, 2025
f8908cb
Merge branch 'develop' into mk/refactor_working_with_rq_meta
Marishka17 Feb 17, 2025
583c09a
Resolve conflicts
Marishka17 Feb 20, 2025
78cf972
Use descriptors
Marishka17 Feb 20, 2025
0ac2010
Merge branch 'develop' into mk/refactor_working_with_rq_meta
Marishka17 Feb 20, 2025
eedf1b5
black
Marishka17 Feb 20, 2025
8ca66ff
Fix merge
Marishka17 Feb 21, 2025
9ce85ac
Small improvments
Marishka17 Feb 21, 2025
06e81a3
refactor a bit
Marishka17 Feb 21, 2025
a0b7a74
fix typos
Marishka17 Feb 21, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions cvat/apps/dataset_manager/bindings.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
ShapeType,
Task,
)
from cvat.apps.engine.rq_job_handler import RQJobMetaField
from cvat.apps.engine.rq_job_handler import ImportRQMeta

from ..engine.log import ServerLogManager
from .annotation import AnnotationIR, AnnotationManager, TrackManager
Expand Down Expand Up @@ -2452,9 +2452,10 @@ def load_dataset_data(project_annotation, dataset: dm.Dataset, project_data):
raise CvatImportError(f'Target project does not have label with name "{label.name}"')
for subset_id, subset in enumerate(dataset.subsets().values()):
job = rq.get_current_job()
job.meta[RQJobMetaField.STATUS] = 'Task from dataset is being created...'
job.meta[RQJobMetaField.PROGRESS] = (subset_id + job.meta.get(RQJobMetaField.TASK_PROGRESS, 0.)) / len(dataset.subsets().keys())
job.save_meta()
job_meta = ImportRQMeta.from_job(job)
job_meta.status = 'Task from dataset is being created...'
job_meta.progress = (subset_id + (job_meta.task_progress or 0.)) / len(dataset.subsets().keys())
job_meta.save()

task_fields = {
'project': project_annotation.db_project,
Expand Down
9 changes: 5 additions & 4 deletions cvat/apps/dataset_manager/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from cvat.apps.engine import models
from cvat.apps.engine.log import DatasetLogManager
from cvat.apps.engine.model_utils import bulk_create
from cvat.apps.engine.rq_job_handler import RQJobMetaField
from cvat.apps.engine.rq_job_handler import ImportRQMeta
from cvat.apps.engine.serializers import DataSerializer, TaskWriteSerializer
from cvat.apps.engine.task import _create_thread as create_task

Expand Down Expand Up @@ -198,9 +198,10 @@ def data(self) -> dict:
@transaction.atomic
def import_dataset_as_project(src_file, project_id, format_name, conv_mask_to_poly):
rq_job = rq.get_current_job()
rq_job.meta[RQJobMetaField.STATUS] = 'Dataset import has been started...'
rq_job.meta[RQJobMetaField.PROGRESS] = 0.
rq_job.save_meta()
rq_job_meta = ImportRQMeta.from_job(rq_job)
rq_job_meta.status = 'Dataset import has been started...'
rq_job_meta.progress = 0.
rq_job_meta.save()

project = ProjectAnnotationAndData(project_id)
project.init_from_db()
Expand Down
7 changes: 4 additions & 3 deletions cvat/apps/dataset_manager/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import cvat.apps.dataset_manager.task as task
from cvat.apps.engine.log import ServerLogManager
from cvat.apps.engine.models import Job, Project, Task
from cvat.apps.engine.rq_job_handler import RQMeta
from cvat.apps.engine.rq_job_handler import ExportRQMeta
from cvat.apps.engine.utils import get_rq_lock_by_user

from .formats.registry import EXPORT_FORMATS, IMPORT_FORMATS
Expand Down Expand Up @@ -88,7 +88,8 @@ def _patched_retry(*_1, **_2):
settings.CVAT_QUEUES.EXPORT_DATA.value
)

user_id = current_rq_job.meta.get('user', {}).get('id') or -1
rq_job_meta = ExportRQMeta.from_job(current_rq_job)
user_id = rq_job_meta.user.id or -1

with get_rq_lock_by_user(settings.CVAT_QUEUES.EXPORT_DATA.value, user_id):
scheduled_rq_job: rq.job.Job = scheduler.enqueue_in(
Expand All @@ -97,7 +98,7 @@ def _patched_retry(*_1, **_2):
*current_rq_job.args,
**current_rq_job.kwargs,
job_id=current_rq_job.id,
meta=RQMeta.reset_meta_on_retry(current_rq_job.meta),
meta=rq_job_meta.reset_meta_on_retry(),
job_ttl=current_rq_job.ttl,
job_result_ttl=current_rq_job.result_ttl,
job_description=current_rq_job.description,
Expand Down
29 changes: 17 additions & 12 deletions cvat/apps/engine/background.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,14 +37,13 @@
Task,
)
from cvat.apps.engine.permissions import get_cloud_storage_for_import_or_export
from cvat.apps.engine.rq_job_handler import RQId, RQJobMetaField
from cvat.apps.engine.rq_job_handler import ExportRQMeta, RQId
from cvat.apps.engine.serializers import RqIdSerializer
from cvat.apps.engine.types import ExtendedRequest
from cvat.apps.engine.utils import (
build_annotations_file_name,
build_backup_file_name,
define_dependent_job,
get_rq_job_meta,
get_rq_lock_by_user,
get_rq_lock_for_job,
sendfile,
Expand Down Expand Up @@ -229,7 +228,7 @@ def _handle_rq_job_v1(
) -> Optional[Response]:

def is_result_outdated() -> bool:
return rq_job.meta[RQJobMetaField.REQUEST]["timestamp"] < instance_update_time
return ExportRQMeta.from_job(rq_job).request.timestamp < instance_update_time

def handle_local_download() -> Response:
with dm.util.get_export_cache_lock(
Expand Down Expand Up @@ -342,7 +341,7 @@ def handle_local_download() -> Response:
f"Export to {self.export_args.location} location is not implemented yet"
)
elif rq_job_status == RQJobStatus.FAILED:
exc_info = rq_job.meta.get(RQJobMetaField.FORMATTED_EXCEPTION, str(rq_job.exc_info))
exc_info = ExportRQMeta.from_job(rq_job).formatted_exception or str(rq_job.exc_info)
rq_job.delete()
return Response(exc_info, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
elif (
Expand Down Expand Up @@ -478,16 +477,19 @@ def setup_background_job(
result_url = self.make_result_url()

with get_rq_lock_by_user(queue, user_id):
meta = ExportRQMeta.build_for(
request=self.request,
db_obj=self.db_instance,
result_url=result_url,
)
queue.enqueue_call(
func=func,
args=func_args,
kwargs={
"server_url": server_address,
},
job_id=rq_id,
meta=get_rq_job_meta(
request=self.request, db_obj=self.db_instance, result_url=result_url
),
meta=meta,
depends_on=define_dependent_job(queue, user_id, rq_id=rq_id),
result_ttl=cache_ttl.total_seconds(),
failure_ttl=cache_ttl.total_seconds(),
Expand Down Expand Up @@ -548,7 +550,7 @@ def _handle_rq_job_v1(
) -> Optional[Response]:

def is_result_outdated() -> bool:
return rq_job.meta[RQJobMetaField.REQUEST]["timestamp"] < last_instance_update_time
return ExportRQMeta.from_job(rq_job).request.timestamp < last_instance_update_time

last_instance_update_time = timezone.localtime(self.db_instance.updated_date)
timestamp = self.get_timestamp(last_instance_update_time)
Expand Down Expand Up @@ -644,7 +646,7 @@ def is_result_outdated() -> bool:
f"Export to {self.export_args.location} location is not implemented yet"
)
elif rq_job_status == RQJobStatus.FAILED:
exc_info = rq_job.meta.get(RQJobMetaField.FORMATTED_EXCEPTION, str(rq_job.exc_info))
exc_info = ExportRQMeta.from_job(rq_job).formatted_exception or str(rq_job.exc_info)
rq_job.delete()
return Response(exc_info, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
elif (
Expand Down Expand Up @@ -756,13 +758,16 @@ def setup_background_job(
user_id = self.request.user.id

with get_rq_lock_by_user(queue, user_id):
meta = ExportRQMeta.build_for(
request=self.request,
db_obj=self.db_instance,
result_url=result_url,
)
queue.enqueue_call(
func=func,
args=func_args,
job_id=rq_id,
meta=get_rq_job_meta(
request=self.request, db_obj=self.db_instance, result_url=result_url
),
meta=meta,
depends_on=define_dependent_job(queue, user_id, rq_id=rq_id),
result_ttl=cache_ttl.total_seconds(),
failure_ttl=cache_ttl.total_seconds(),
Expand Down
21 changes: 12 additions & 9 deletions cvat/apps/engine/backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
StorageMethodChoice,
)
from cvat.apps.engine.permissions import get_cloud_storage_for_import_or_export
from cvat.apps.engine.rq_job_handler import RQId, RQJobMetaField
from cvat.apps.engine.rq_job_handler import ImportRQMeta, RQId
from cvat.apps.engine.serializers import (
AnnotationGuideWriteSerializer,
AssetWriteSerializer,
Expand All @@ -90,7 +90,6 @@
from cvat.apps.engine.utils import (
av_scan_paths,
define_dependent_job,
get_rq_job_meta,
get_rq_lock_by_user,
import_resource_with_clean_up_after,
process_failed_job,
Expand Down Expand Up @@ -1180,6 +1179,7 @@ def create_backup(
log_exception(logger)
raise


def _import(
importer: TaskImporter | ProjectImporter,
request: ExtendedRequest,
Expand All @@ -1192,9 +1192,6 @@ def _import(
):
rq_job = queue.fetch_job(rq_id)

if (user_id_from_meta := getattr(rq_job, 'meta', {}).get(RQJobMetaField.USER, {}).get('id')) and user_id_from_meta != request.user.id:
return Response(status=status.HTTP_403_FORBIDDEN)

if not rq_job:
org_id = getattr(request.iam_context['organization'], 'id', None)
location = location_conf.get('location')
Expand Down Expand Up @@ -1239,19 +1236,25 @@ def _import(
user_id = request.user.id

with get_rq_lock_by_user(queue, user_id):
meta = ImportRQMeta.build_for(
request=request,
db_obj=None,
tmp_file=filename,
)
rq_job = queue.enqueue_call(
func=func,
args=func_args,
job_id=rq_id,
meta={
'tmp_file': filename,
**get_rq_job_meta(request=request, db_obj=None)
},
meta=meta,
depends_on=define_dependent_job(queue, user_id),
result_ttl=settings.IMPORT_CACHE_SUCCESS_TTL.total_seconds(),
failure_ttl=settings.IMPORT_CACHE_FAILED_TTL.total_seconds()
)
else:
rq_job_meta = ImportRQMeta.from_job(rq_job)
if rq_job_meta.user.id != request.user.id:
return Response(status=status.HTTP_403_FORBIDDEN)

if rq_job.is_finished:
project_id = rq_job.return_value()
rq_job.delete()
Expand Down
9 changes: 5 additions & 4 deletions cvat/apps/engine/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
ZipCompressedChunkWriter,
load_image,
)
from cvat.apps.engine.rq_job_handler import RQJobMetaField
from cvat.apps.engine.rq_job_handler import RQMetaWithFailureInfo
from cvat.apps.engine.utils import (
CvatChunkTimestampMismatchError,
format_list,
Expand Down Expand Up @@ -107,9 +107,10 @@ def wait_for_rq_job(rq_job: rq.job.Job):
if job_status in ("finished",):
return
elif job_status in ("failed",):
job_meta = rq_job.get_meta()
exc_type = job_meta.get(RQJobMetaField.EXCEPTION_TYPE, Exception)
exc_args = job_meta.get(RQJobMetaField.EXCEPTION_ARGS, ("Cannot create chunk",))
rq_job.get_meta() # refresh from Redis
job_meta = RQMetaWithFailureInfo.from_job(rq_job)
exc_type = job_meta.exc_type or Exception
exc_args = job_meta.exc_args or ("Cannot create chunk",)
raise exc_type(*exc_args)

time.sleep(settings.CVAT_CHUNK_CREATE_CHECK_INTERVAL)
Expand Down
Loading
Loading