Skip to content

Commit 9ae0e59

Browse files
kevinkjt2000dpkp
authored andcommitted
use absolute imports everywhere (#1362)
1 parent c0df771 commit 9ae0e59

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+144
-144
lines changed

Diff for: kafka/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from __future__ import absolute_import
22

33
__title__ = 'kafka'
4-
from .version import __version__
4+
from kafka.version import __version__
55
__author__ = 'Dana Powers'
66
__license__ = 'Apache License 2.0'
77
__copyright__ = 'Copyright 2016 Dana Powers, David Arthur, and Contributors'

Diff for: kafka/client_async.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -13,26 +13,26 @@
1313
import selectors # pylint: disable=import-error
1414
except ImportError:
1515
# vendored backport module
16-
from .vendor import selectors34 as selectors
16+
from kafka.vendor import selectors34 as selectors
1717

1818
import socket
1919
import time
2020

2121
from kafka.vendor import six
2222

23-
from .cluster import ClusterMetadata
24-
from .conn import BrokerConnection, ConnectionStates, collect_hosts, get_ip_port_afi
25-
from . import errors as Errors
26-
from .future import Future
27-
from .metrics import AnonMeasurable
28-
from .metrics.stats import Avg, Count, Rate
29-
from .metrics.stats.rate import TimeUnit
30-
from .protocol.metadata import MetadataRequest
31-
from .util import Dict, WeakMethod
23+
from kafka.cluster import ClusterMetadata
24+
from kafka.conn import BrokerConnection, ConnectionStates, collect_hosts, get_ip_port_afi
25+
from kafka import errors as Errors
26+
from kafka.future import Future
27+
from kafka.metrics import AnonMeasurable
28+
from kafka.metrics.stats import Avg, Count, Rate
29+
from kafka.metrics.stats.rate import TimeUnit
30+
from kafka.protocol.metadata import MetadataRequest
31+
from kafka.util import Dict, WeakMethod
3232
# Although this looks unused, it actually monkey-patches socket.socketpair()
3333
# and should be left in as long as we're using socket.socketpair() in this file
34-
from .vendor import socketpair
35-
from .version import __version__
34+
from kafka.vendor import socketpair
35+
from kafka.version import __version__
3636

3737
if six.PY2:
3838
ConnectionError = None

Diff for: kafka/cluster.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,9 @@
88

99
from kafka.vendor import six
1010

11-
from . import errors as Errors
12-
from .future import Future
13-
from .structs import BrokerMetadata, PartitionMetadata, TopicPartition
11+
from kafka import errors as Errors
12+
from kafka.future import Future
13+
from kafka.structs import BrokerMetadata, PartitionMetadata, TopicPartition
1414

1515
log = logging.getLogger(__name__)
1616

Diff for: kafka/conn.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
import selectors # pylint: disable=import-error
1313
except ImportError:
1414
# vendored backport module
15-
from .vendor import selectors34 as selectors
15+
from kafka.vendor import selectors34 as selectors
1616

1717
import socket
1818
import struct
@@ -858,8 +858,8 @@ def check_version(self, timeout=2, strict=False):
858858
# vanilla MetadataRequest. If the server did not recognize the first
859859
# request, both will be failed with a ConnectionError that wraps
860860
# socket.error (32, 54, or 104)
861-
from .protocol.admin import ApiVersionRequest, ListGroupsRequest
862-
from .protocol.commit import OffsetFetchRequest, GroupCoordinatorRequest
861+
from kafka.protocol.admin import ApiVersionRequest, ListGroupsRequest
862+
from kafka.protocol.commit import OffsetFetchRequest, GroupCoordinatorRequest
863863

864864
# Socket errors are logged as exceptions and can alarm users. Mute them
865865
from logging import Filter

Diff for: kafka/consumer/__init__.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
from __future__ import absolute_import
22

3-
from .simple import SimpleConsumer
4-
from .multiprocess import MultiProcessConsumer
5-
from .group import KafkaConsumer
3+
from kafka.consumer.simple import SimpleConsumer
4+
from kafka.consumer.multiprocess import MultiProcessConsumer
5+
from kafka.consumer.group import KafkaConsumer
66

77
__all__ = [
88
'SimpleConsumer', 'MultiProcessConsumer', 'KafkaConsumer'

Diff for: kafka/consumer/multiprocess.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -8,15 +8,15 @@
88

99
from kafka.vendor.six.moves import queue # pylint: disable=import-error
1010

11-
from ..common import KafkaError
12-
from .base import (
11+
from kafka.common import KafkaError
12+
from kafka.consumer.base import (
1313
Consumer,
1414
AUTO_COMMIT_MSG_COUNT, AUTO_COMMIT_INTERVAL,
1515
NO_MESSAGES_WAIT_TIME_SECONDS,
1616
FULL_QUEUE_WAIT_TIME_SECONDS,
1717
MAX_BACKOFF_SECONDS,
1818
)
19-
from .simple import SimpleConsumer
19+
from kafka.consumer.simple import SimpleConsumer
2020

2121

2222
log = logging.getLogger(__name__)

Diff for: kafka/consumer/simple.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from kafka.vendor import six
1313
from kafka.vendor.six.moves import queue # pylint: disable=import-error
1414

15-
from .base import (
15+
from kafka.consumer.base import (
1616
Consumer,
1717
FETCH_DEFAULT_BLOCK_TIMEOUT,
1818
AUTO_COMMIT_MSG_COUNT,
@@ -24,7 +24,7 @@
2424
ITER_TIMEOUT_SECONDS,
2525
NO_MESSAGES_WAIT_TIME_SECONDS
2626
)
27-
from ..common import (
27+
from kafka.common import (
2828
FetchRequestPayload, KafkaError, OffsetRequestPayload,
2929
ConsumerFetchSizeTooSmall,
3030
UnknownTopicOrPartitionError, NotLeaderForPartitionError,

Diff for: kafka/coordinator/assignors/range.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55

66
from kafka.vendor import six
77

8-
from .abstract import AbstractPartitionAssignor
9-
from ..protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment
8+
from kafka.coordinator.assignors.abstract import AbstractPartitionAssignor
9+
from kafka.coordinator.protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment
1010

1111
log = logging.getLogger(__name__)
1212

Diff for: kafka/coordinator/assignors/roundrobin.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,9 @@
66

77
from kafka.vendor import six
88

9-
from .abstract import AbstractPartitionAssignor
10-
from ...common import TopicPartition
11-
from ..protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment
9+
from kafka.coordinator.assignors.abstract import AbstractPartitionAssignor
10+
from kafka.common import TopicPartition
11+
from kafka.coordinator.protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment
1212

1313
log = logging.getLogger(__name__)
1414

Diff for: kafka/coordinator/base.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,13 @@
1010

1111
from kafka.vendor import six
1212

13-
from .heartbeat import Heartbeat
14-
from .. import errors as Errors
15-
from ..future import Future
16-
from ..metrics import AnonMeasurable
17-
from ..metrics.stats import Avg, Count, Max, Rate
18-
from ..protocol.commit import GroupCoordinatorRequest, OffsetCommitRequest
19-
from ..protocol.group import (HeartbeatRequest, JoinGroupRequest,
13+
from kafka.coordinator.heartbeat import Heartbeat
14+
from kafka import errors as Errors
15+
from kafka.future import Future
16+
from kafka.metrics import AnonMeasurable
17+
from kafka.metrics.stats import Avg, Count, Max, Rate
18+
from kafka.protocol.commit import GroupCoordinatorRequest, OffsetCommitRequest
19+
from kafka.protocol.group import (HeartbeatRequest, JoinGroupRequest,
2020
LeaveGroupRequest, SyncGroupRequest)
2121

2222
log = logging.getLogger('kafka.coordinator')

Diff for: kafka/coordinator/consumer.py

+11-11
Original file line numberDiff line numberDiff line change
@@ -7,17 +7,17 @@
77

88
from kafka.vendor import six
99

10-
from .base import BaseCoordinator, Generation
11-
from .assignors.range import RangePartitionAssignor
12-
from .assignors.roundrobin import RoundRobinPartitionAssignor
13-
from .protocol import ConsumerProtocol
14-
from .. import errors as Errors
15-
from ..future import Future
16-
from ..metrics import AnonMeasurable
17-
from ..metrics.stats import Avg, Count, Max, Rate
18-
from ..protocol.commit import OffsetCommitRequest, OffsetFetchRequest
19-
from ..structs import OffsetAndMetadata, TopicPartition
20-
from ..util import WeakMethod
10+
from kafka.coordinator.base import BaseCoordinator, Generation
11+
from kafka.coordinator.assignors.range import RangePartitionAssignor
12+
from kafka.coordinator.assignors.roundrobin import RoundRobinPartitionAssignor
13+
from kafka.coordinator.protocol import ConsumerProtocol
14+
from kafka import errors as Errors
15+
from kafka.future import Future
16+
from kafka.metrics import AnonMeasurable
17+
from kafka.metrics.stats import Avg, Count, Max, Rate
18+
from kafka.protocol.commit import OffsetCommitRequest, OffsetFetchRequest
19+
from kafka.structs import OffsetAndMetadata, TopicPartition
20+
from kafka.util import WeakMethod
2121

2222

2323
log = logging.getLogger(__name__)

Diff for: kafka/metrics/__init__.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
from __future__ import absolute_import
22

3-
from .compound_stat import NamedMeasurable
4-
from .dict_reporter import DictReporter
5-
from .kafka_metric import KafkaMetric
6-
from .measurable import AnonMeasurable
7-
from .metric_config import MetricConfig
8-
from .metric_name import MetricName
9-
from .metrics import Metrics
10-
from .quota import Quota
3+
from kafka.metrics.compound_stat import NamedMeasurable
4+
from kafka.metrics.dict_reporter import DictReporter
5+
from kafka.metrics.kafka_metric import KafkaMetric
6+
from kafka.metrics.measurable import AnonMeasurable
7+
from kafka.metrics.metric_config import MetricConfig
8+
from kafka.metrics.metric_name import MetricName
9+
from kafka.metrics.metrics import Metrics
10+
from kafka.metrics.quota import Quota
1111

1212
__all__ = [
1313
'AnonMeasurable', 'DictReporter', 'KafkaMetric', 'MetricConfig',

Diff for: kafka/metrics/stats/__init__.py

+10-10
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
from __future__ import absolute_import
22

3-
from .avg import Avg
4-
from .count import Count
5-
from .histogram import Histogram
6-
from .max_stat import Max
7-
from .min_stat import Min
8-
from .percentile import Percentile
9-
from .percentiles import Percentiles
10-
from .rate import Rate
11-
from .sensor import Sensor
12-
from .total import Total
3+
from kafka.metrics.stats.avg import Avg
4+
from kafka.metrics.stats.count import Count
5+
from kafka.metrics.stats.histogram import Histogram
6+
from kafka.metrics.stats.max_stat import Max
7+
from kafka.metrics.stats.min_stat import Min
8+
from kafka.metrics.stats.percentile import Percentile
9+
from kafka.metrics.stats.percentiles import Percentiles
10+
from kafka.metrics.stats.rate import Rate
11+
from kafka.metrics.stats.sensor import Sensor
12+
from kafka.metrics.stats.total import Total
1313

1414
__all__ = [
1515
'Avg', 'Count', 'Histogram', 'Max', 'Min', 'Percentile', 'Percentiles',

Diff for: kafka/partitioner/__init__.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
from __future__ import absolute_import
22

3-
from .default import DefaultPartitioner
4-
from .hashed import HashedPartitioner, Murmur2Partitioner, LegacyPartitioner
5-
from .roundrobin import RoundRobinPartitioner
3+
from kafka.partitioner.default import DefaultPartitioner
4+
from kafka.partitioner.hashed import HashedPartitioner, Murmur2Partitioner, LegacyPartitioner
5+
from kafka.partitioner.roundrobin import RoundRobinPartitioner
66

77
__all__ = [
88
'DefaultPartitioner', 'RoundRobinPartitioner', 'HashedPartitioner',

Diff for: kafka/partitioner/default.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import random
44

5-
from .hashed import murmur2
5+
from kafka.partitioner.hashed import murmur2
66

77

88
class DefaultPartitioner(object):

Diff for: kafka/partitioner/hashed.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from kafka.vendor import six
44

5-
from .base import Partitioner
5+
from kafka.partitioner.base import Partitioner
66

77

88
class Murmur2Partitioner(Partitioner):

Diff for: kafka/partitioner/roundrobin.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from __future__ import absolute_import
22

3-
from .base import Partitioner
3+
from kafka.partitioner.base import Partitioner
44

55

66
class RoundRobinPartitioner(Partitioner):

Diff for: kafka/producer/__init__.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
from __future__ import absolute_import
22

3-
from .kafka import KafkaProducer
4-
from .simple import SimpleProducer
5-
from .keyed import KeyedProducer
3+
from kafka.producer.kafka import KafkaProducer
4+
from kafka.producer.simple import SimpleProducer
5+
from kafka.producer.keyed import KeyedProducer
66

77
__all__ = [
88
'KafkaProducer',

Diff for: kafka/producer/buffer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import threading
66
import time
77

8-
from ..metrics.stats import Rate
8+
from kafka.metrics.stats import Rate
99

1010
import kafka.errors as Errors
1111

Diff for: kafka/producer/future.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
import collections
44
import threading
55

6-
from .. import errors as Errors
7-
from ..future import Future
6+
from kafka import errors as Errors
7+
from kafka.future import Future
88

99

1010
class FutureProduceResult(Future):

Diff for: kafka/producer/kafka.py

+14-14
Original file line numberDiff line numberDiff line change
@@ -8,20 +8,20 @@
88
import time
99
import weakref
1010

11-
from ..vendor import six
12-
13-
from .. import errors as Errors
14-
from ..client_async import KafkaClient, selectors
15-
from ..codec import has_gzip, has_snappy, has_lz4
16-
from ..metrics import MetricConfig, Metrics
17-
from ..partitioner.default import DefaultPartitioner
18-
from ..record.default_records import DefaultRecordBatchBuilder
19-
from ..record.legacy_records import LegacyRecordBatchBuilder
20-
from ..serializer import Serializer
21-
from ..structs import TopicPartition
22-
from .future import FutureRecordMetadata, FutureProduceResult
23-
from .record_accumulator import AtomicInteger, RecordAccumulator
24-
from .sender import Sender
11+
from kafka.vendor import six
12+
13+
from kafka import errors as Errors
14+
from kafka.client_async import KafkaClient, selectors
15+
from kafka.codec import has_gzip, has_snappy, has_lz4
16+
from kafka.metrics import MetricConfig, Metrics
17+
from kafka.partitioner.default import DefaultPartitioner
18+
from kafka.record.default_records import DefaultRecordBatchBuilder
19+
from kafka.record.legacy_records import LegacyRecordBatchBuilder
20+
from kafka.serializer import Serializer
21+
from kafka.structs import TopicPartition
22+
from kafka.producer.future import FutureRecordMetadata, FutureProduceResult
23+
from kafka.producer.record_accumulator import AtomicInteger, RecordAccumulator
24+
from kafka.producer.sender import Sender
2525

2626

2727
log = logging.getLogger(__name__)

Diff for: kafka/producer/keyed.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
import logging
44
import warnings
55

6-
from .base import Producer
7-
from ..partitioner import HashedPartitioner
6+
from kafka.producer.base import Producer
7+
from kafka.partitioner import HashedPartitioner
88

99

1010
log = logging.getLogger(__name__)

Diff for: kafka/producer/record_accumulator.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,10 @@
66
import threading
77
import time
88

9-
from .. import errors as Errors
10-
from .buffer import SimpleBufferPool
11-
from .future import FutureRecordMetadata, FutureProduceResult
12-
from ..structs import TopicPartition
9+
from kafka import errors as Errors
10+
from kafka.producer.buffer import SimpleBufferPool
11+
from kafka.producer.future import FutureRecordMetadata, FutureProduceResult
12+
from kafka.structs import TopicPartition
1313
from kafka.record.memory_records import MemoryRecordsBuilder
1414
from kafka.record.legacy_records import LegacyRecordBatchBuilder
1515

0 commit comments

Comments
 (0)