Skip to content

Commit 8f319d1

Browse files
vdusekclaude
andauthored
refactor: use DeprecationWarning instead of logger.warning (#687)
## Summary - Replace `logger.warning()` with `warnings.warn(DeprecationWarning)` for deprecated params in `batch_add_requests()` (both sync and async) to match the pattern used in `DatasetClient.download_items()` - `DeprecationWarning` is the Python standard — visible by default, caught by `pytest -W error::DeprecationWarning`, and filterable via `warnings.filterwarnings()` - Remove unused `logging` import --- Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent 324f2e9 commit 8f319d1

File tree

1 file changed

+21
-8
lines changed

1 file changed

+21
-8
lines changed

src/apify_client/_resource_clients/request_queue.py

Lines changed: 21 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
from __future__ import annotations
22

33
import asyncio
4-
import logging
54
import math
5+
import warnings
66
from collections.abc import Iterable
77
from queue import Queue
88
from typing import TYPE_CHECKING, Any
@@ -45,9 +45,6 @@
4545
from apify_client._models import GeneralAccess
4646
from apify_client._types import Timeout
4747

48-
49-
logger = logging.getLogger(__name__)
50-
5148
_RQ_MAX_REQUESTS_PER_BATCH = 25
5249
_MAX_PAYLOAD_SIZE_BYTES = 9 * 1024 * 1024 # 9 MB
5350
_SAFETY_BUFFER_PERCENT = 0.01 / 100 # 0.01%
@@ -394,9 +391,17 @@ def batch_add_requests(
394391
Result containing lists of processed and unprocessed requests.
395392
"""
396393
if max_unprocessed_requests_retries:
397-
logger.warning('`max_unprocessed_requests_retries` is deprecated and not used anymore.')
394+
warnings.warn(
395+
'`max_unprocessed_requests_retries` is deprecated and not used anymore.',
396+
DeprecationWarning,
397+
stacklevel=2,
398+
)
398399
if min_delay_between_unprocessed_requests_retries:
399-
logger.warning('`min_delay_between_unprocessed_requests_retries` is deprecated and not used anymore.')
400+
warnings.warn(
401+
'`min_delay_between_unprocessed_requests_retries` is deprecated and not used anymore.',
402+
DeprecationWarning,
403+
stacklevel=2,
404+
)
400405

401406
if max_parallel != 1:
402407
raise NotImplementedError('max_parallel is only supported in async client')
@@ -923,9 +928,17 @@ async def batch_add_requests(
923928
Result containing lists of processed and unprocessed requests.
924929
"""
925930
if max_unprocessed_requests_retries:
926-
logger.warning('`max_unprocessed_requests_retries` is deprecated and not used anymore.')
931+
warnings.warn(
932+
'`max_unprocessed_requests_retries` is deprecated and not used anymore.',
933+
DeprecationWarning,
934+
stacklevel=2,
935+
)
927936
if min_delay_between_unprocessed_requests_retries:
928-
logger.warning('`min_delay_between_unprocessed_requests_retries` is deprecated and not used anymore.')
937+
warnings.warn(
938+
'`min_delay_between_unprocessed_requests_retries` is deprecated and not used anymore.',
939+
DeprecationWarning,
940+
stacklevel=2,
941+
)
929942

930943
requests_as_dicts = [
931944
(RequestInput.model_validate(r) if isinstance(r, dict) else r).model_dump(by_alias=True, exclude_none=True)

0 commit comments

Comments
 (0)