Skip to content

Commit e4a911e

Browse files
Ivan Gorbachevjenkins
Ivan Gorbachev
authored and
jenkins
committed
finagle-core: Track queue delay and send latency in pipelining client
Problem We need to measure the delays between submission to the Netty epoll loop queue and task execution, as well as the latencies for message sending, including socket writes. Solution Add epoll_queue_delay_ns and message_send_latency_ns to PipeliningClientPushSession Differential Revision: https://phabricator.twitter.biz/D1185421
1 parent dc5f647 commit e4a911e

File tree

3 files changed

+37
-6
lines changed

3 files changed

+37
-6
lines changed

CHANGELOG.rst

+2
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@ Runtime Behavior Changes
2121
Previously it was `[0, dur)`, which could result in `next.duration < duration`
2222
for arbitrary long invocation chains. ``PHAB_ID=D1182252``
2323
* finagle-core: `Backoff.equalJittered` is now deprecated and falls back to `exponentialJittered`. ``PHAB_ID=D1182535``
24+
* finagle-core: `PipeliningClientPushSession` now collects stats `epoll_queue_delay_ns` and `message_send_latency_ns`.
25+
``PHAB_ID=D1185421``
2426

2527
New Features
2628
~~~~~~~~~~

finagle-core/src/main/scala/com/twitter/finagle/pushsession/PipeliningClientPushSession.scala

+33-5
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,12 @@ import com.twitter.finagle.Failure
55
import com.twitter.finagle.FailureFlags
66
import com.twitter.finagle.Service
77
import com.twitter.finagle.Status
8+
import com.twitter.finagle.stats.DefaultStatsReceiver
9+
import com.twitter.finagle.stats.HistogramFormat
10+
import com.twitter.finagle.stats.MetricBuilder
11+
import com.twitter.finagle.stats.MetricBuilder.HistogramType
12+
import com.twitter.finagle.stats.MetricUsageHint
13+
import com.twitter.finagle.stats.StatsReceiver
814
import com.twitter.logging.Logger
915
import com.twitter.util._
1016

@@ -22,13 +28,29 @@ import com.twitter.util._
2228
* result of another request unless the connection is stuck, and does not
2329
* look like it will make progress. Use `stallTimeout` to configure this timeout.
2430
*/
25-
final class PipeliningClientPushSession[In, Out](
31+
class PipeliningClientPushSession[In, Out](
2632
handle: PushChannelHandle[In, Out],
2733
stallTimeout: Duration,
28-
timer: Timer)
34+
timer: Timer,
35+
statsReceiver: StatsReceiver = DefaultStatsReceiver)
2936
extends PushSession[In, Out](handle) { self =>
3037

3138
private[this] val logger = Logger.get
39+
private[this] val scopedStatsReceived = statsReceiver.scope("pipelining_client")
40+
private[this] val epollQueueDelay = scopedStatsReceived.stat(
41+
MetricBuilder(metricType = HistogramType)
42+
.withHistogramFormat(HistogramFormat.FullSummary)
43+
.withPercentiles(0.5, 0.9, 0.99, 0.999, 0.9999)
44+
.withMetricUsageHints(Set(MetricUsageHint.HighContention))
45+
.withName("epoll_queue_delay_ns")
46+
)
47+
private[this] val messageSendLatency = scopedStatsReceived.stat(
48+
MetricBuilder(metricType = HistogramType)
49+
.withHistogramFormat(HistogramFormat.FullSummary)
50+
.withPercentiles(0.5, 0.9, 0.99, 0.999, 0.9999)
51+
.withMetricUsageHints(Set(MetricUsageHint.HighContention))
52+
.withName("message_send_latency_ns")
53+
)
3254

3355
// used only within SerialExecutor
3456
private[this] val h_queue = new java.util.ArrayDeque[Promise[In]]()
@@ -93,7 +115,9 @@ final class PipeliningClientPushSession[In, Out](
93115
})
94116
}
95117
}
96-
handle.serialExecutor.execute(new Runnable { def run(): Unit = handleDispatch(request, p) })
118+
119+
val requestStartTime = System.nanoTime()
120+
handle.serialExecutor.execute(() => handleDispatch(request, p, requestStartTime))
97121
p
98122
}
99123

@@ -123,12 +147,16 @@ final class PipeliningClientPushSession[In, Out](
123147
}
124148
}
125149

126-
private[this] def handleDispatch(request: Out, p: Promise[In]): Unit = {
150+
private[this] def handleDispatch(request: Out, p: Promise[In], requestStartTime: Long): Unit = {
151+
val handleStartTime = System.nanoTime()
152+
epollQueueDelay.add(handleStartTime - requestStartTime)
127153
if (!h_running) p.setException(new ChannelClosedException(handle.remoteAddress))
128154
else {
129155
h_queue.offer(p)
130156
h_queueSize += 1
131-
handle.sendAndForget(request)
157+
handle.send(request) { _ =>
158+
messageSendLatency.add(System.nanoTime() - handleStartTime)
159+
}
132160
}
133161
}
134162
}

finagle-memcached/src/main/scala/com/twitter/finagle/Memcached.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -238,7 +238,8 @@ object Memcached extends finagle.Client[Command, Response] with finagle.Server[C
238238
new PipeliningClientPushSession[Response, Command](
239239
handle,
240240
params[StalledPipelineTimeout].timeout,
241-
params[finagle.param.Timer].timer
241+
params[finagle.param.Timer].timer,
242+
statsReceiver = params[Stats].statsReceiver,
242243
)
243244
)
244245
}

0 commit comments

Comments
 (0)