Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,8 @@ private void writeHistogramSupport(HistogramSupport histogramSupport) {
double max = isTimeBased ? histogramSnapshot.max(baseTimeUnit) : histogramSnapshot.max();
long count = histogramSnapshot.count();

addMaxGaugeForTimer(id, tags, max);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

addMaxGaugeForHistogramSupport?
Since this is for both Timer and DistributionSummary.


// if percentiles configured, use summary
if (histogramSnapshot.percentileValues().length != 0) {
buildSummaryDataPoint(histogramSupport, tags, startTimeNanos, total, count, isTimeBased, histogramSnapshot);
Expand Down Expand Up @@ -154,6 +156,20 @@ private static Optional<ExponentialHistogramSnapShot> getExponentialHistogramSna
return Optional.empty();
}

private void addMaxGaugeForTimer(Meter.Id id, Iterable<KeyValue> tags, double max) {
String metricName = id.getName() + ".max";
Metric.Builder metricBuilder = getOrCreateMetricBuilder(id.withName(metricName), DataCase.GAUGE);
if (!metricBuilder.hasGauge()) {
metricBuilder.setGauge(io.opentelemetry.proto.metrics.v1.Gauge.newBuilder());
}

metricBuilder.getGaugeBuilder()
.addDataPoints(NumberDataPoint.newBuilder()
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It seems addDataPoints has an overload for NumberDataPointBuilder and NumberDataPoint too?
It seems writeGauge (which is quite similar to this) uses NumberDataPoint (calls .build()) while this one does not. Should not be the two consistent?

.setTimeUnixNano(TimeUnit.MILLISECONDS.toNanos(clock.wallTime()))
.setAsDouble(max)
.addAllAttributes(tags));
}

private void writeFunctionTimer(FunctionTimer functionTimer) {
Metric.Builder builder = getOrCreateMetricBuilder(functionTimer.getId(), DataCase.HISTOGRAM);
HistogramDataPoint.Builder histogramDataPoint = HistogramDataPoint.newBuilder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,11 +107,13 @@ void collectorShouldExportMetrics() throws Exception {
// see: https://github.com/open-telemetry/opentelemetry-collector-contrib/pull/20519
matchesPattern("(?s)^.*test_timer_milliseconds_sum\\{.+} 123\\.0\\n.*$"),
matchesPattern("(?s)^.*test_timer_milliseconds_bucket\\{.+,le=\"\\+Inf\"} 1\\n.*$"),
matchesPattern("(?s)^.*test_timer_max_milliseconds\\{.+} 123\\.0\n.*$"),
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Since this is a separate gauge, there should be two more lines, something like this should cover them:

containsString("# HELP test_timer_max_milliseconds \n"),
containsString("# TYPE test_timer_max_milliseconds gauge\n"),


containsString("# HELP test_ds \n"),
containsString("# TYPE test_ds histogram\n"),
matchesPattern("(?s)^.*test_ds_count\\{.+} 1\\n.*$"),
matchesPattern("(?s)^.*test_ds_sum\\{.+} 24\\.0\\n.*$"),
matchesPattern("(?s)^.*test_ds_max\\{.+} 24\\.0\\n.*$"),
matchesPattern("(?s)^.*test_ds_bucket\\{.+,le=\"\\+Inf\"} 1\\n.*$")
);
// @formatter:on
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.time.Duration;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;

Expand Down Expand Up @@ -111,7 +112,9 @@ void timer() {
timer.record(111, TimeUnit.MILLISECONDS);
clock.add(otlpConfig().step());
timer.record(4, TimeUnit.MILLISECONDS);
assertThat(writeToMetric(timer).toString()).isEqualTo(
List<Metric> metrics = writeToMetrics(timer);
Metric metric = metrics.stream().filter(Metric::hasHistogram).findFirst().orElseThrow();
assertThat(metric.toString()).isEqualTo(
"name: \"web.requests\"\n" + "description: \"timing web requests\"\n" + "unit: \"milliseconds\"\n"
+ "histogram {\n" + " data_points {\n" + " start_time_unix_nano: 1000000\n"
+ " time_unix_nano: 60001000000\n" + " count: 4\n" + " sum: 202.0\n" + " }\n"
Expand All @@ -128,7 +131,9 @@ void timerWithHistogram() {
clock.add(otlpConfig().step());
timer.record(4, TimeUnit.MILLISECONDS);

assertThat(writeToMetric(timer).toString())
List<Metric> metrics = writeToMetrics(timer);
Metric metric = metrics.stream().filter(Metric::hasHistogram).findFirst().orElseThrow();
assertThat(metric.toString())
.isEqualTo("name: \"http.client.requests\"\n" + "unit: \"milliseconds\"\n" + "histogram {\n"
+ " data_points {\n" + " start_time_unix_nano: 1000000\n" + " time_unix_nano: 60001000000\n"
+ " count: 5\n" + " sum: 60202.0\n" + " bucket_counts: 0\n" + " bucket_counts: 0\n"
Expand Down Expand Up @@ -198,13 +203,14 @@ void timerWithPercentiles() {
timer.record(77, TimeUnit.MILLISECONDS);
timer.record(111, TimeUnit.MILLISECONDS);

assertThat(writeToMetric(timer).toString())
.isEqualTo("name: \"service.requests\"\n" + "unit: \"milliseconds\"\n" + "summary {\n" + " data_points {\n"
+ " start_time_unix_nano: 1000000\n" + " time_unix_nano: 1000000\n" + " count: 3\n"
+ " sum: 198.0\n" + " quantile_values {\n" + " quantile: 0.5\n"
+ " value: 79.167488\n" + " }\n" + " quantile_values {\n" + " quantile: 0.9\n"
+ " value: 112.72192\n" + " }\n" + " quantile_values {\n" + " quantile: 0.99\n"
+ " value: 112.72192\n" + " }\n" + " }\n" + "}\n");
List<Metric> metrics = writeToMetrics(timer);
Metric metric = metrics.stream().filter(Metric::hasSummary).findFirst().orElseThrow();
assertThat(metric.toString()).isEqualTo("name: \"service.requests\"\n" + "unit: \"milliseconds\"\n"
+ "summary {\n" + " data_points {\n" + " start_time_unix_nano: 1000000\n"
+ " time_unix_nano: 1000000\n" + " count: 3\n" + " sum: 198.0\n" + " quantile_values {\n"
+ " quantile: 0.5\n" + " value: 79.167488\n" + " }\n" + " quantile_values {\n"
+ " quantile: 0.9\n" + " value: 112.72192\n" + " }\n" + " quantile_values {\n"
+ " quantile: 0.99\n" + " value: 112.72192\n" + " }\n" + " }\n" + "}\n");
}

@Test
Expand All @@ -230,10 +236,11 @@ void distributionSummary() {
size.record(2233);
clock.add(otlpConfig().step());
size.record(204);

assertThat(writeToMetric(size).toString()).isEqualTo("name: \"http.response.size\"\n" + "unit: \"bytes\"\n"
+ "histogram {\n" + " data_points {\n" + " start_time_unix_nano: 1000000\n"
+ " time_unix_nano: 60001000000\n" + " count: 4\n" + " sum: 2552.0\n" + " }\n"
List<Metric> metrics = writeToMetrics(size);
Metric metric = metrics.stream().filter(Metric::hasHistogram).findFirst().orElseThrow();
assertThat(metric.toString()).isEqualTo("name: \"http.response.size\"\n" + "unit: \"bytes\"\n" + "histogram {\n"
+ " data_points {\n" + " start_time_unix_nano: 1000000\n" + " time_unix_nano: 60001000000\n"
+ " count: 4\n" + " sum: 2552.0\n" + " }\n"
+ " aggregation_temporality: AGGREGATION_TEMPORALITY_CUMULATIVE\n" + "}\n");
}

Expand Down Expand Up @@ -471,7 +478,7 @@ void distributionSummaryWithHistogram() {
+ " explicit_bounds: 3.8430716820228234E18\n" + " explicit_bounds: 4.2273788502251054E18\n"
+ " }\n" + " aggregation_temporality: AGGREGATION_TEMPORALITY_CUMULATIVE\n" + "}\n";
String[] expectedLines = expected.split("\n");
String actual = writeToMetric(size).toString();
String actual = writeToMetrics(size).stream().filter(Metric::hasHistogram).findFirst().orElseThrow().toString();
String[] actualLines = actual.split("\n");
assertThat(actualLines).hasSameSizeAs(expectedLines);
for (int i = 0; i < actualLines.length; i++) {
Expand Down Expand Up @@ -505,13 +512,14 @@ void distributionSummaryWithPercentiles() {
clock.add(otlpConfig().step());
size.record(204);

assertThat(writeToMetric(size).toString())
.isEqualTo("name: \"http.response.size\"\n" + "unit: \"bytes\"\n" + "summary {\n" + " data_points {\n"
+ " start_time_unix_nano: 1000000\n" + " time_unix_nano: 60001000000\n" + " count: 4\n"
+ " sum: 2552.0\n" + " quantile_values {\n" + " quantile: 0.5\n" + " value: 200.0\n"
+ " }\n" + " quantile_values {\n" + " quantile: 0.9\n" + " value: 200.0\n"
+ " }\n" + " quantile_values {\n" + " quantile: 0.99\n" + " value: 200.0\n"
+ " }\n" + " }\n" + "}\n");
List<Metric> metrics = writeToMetrics(size);
Metric metric = metrics.stream().filter(Metric::hasSummary).findFirst().orElseThrow();
assertThat(metric.toString()).isEqualTo("name: \"http.response.size\"\n" + "unit: \"bytes\"\n" + "summary {\n"
+ " data_points {\n" + " start_time_unix_nano: 1000000\n" + " time_unix_nano: 60001000000\n"
+ " count: 4\n" + " sum: 2552.0\n" + " quantile_values {\n" + " quantile: 0.5\n"
+ " value: 200.0\n" + " }\n" + " quantile_values {\n" + " quantile: 0.9\n"
+ " value: 200.0\n" + " }\n" + " quantile_values {\n" + " quantile: 0.99\n"
+ " value: 200.0\n" + " }\n" + " }\n" + "}\n");
}

private double extractValue(String line) {
Expand All @@ -525,19 +533,22 @@ void longTaskTimer() {
LongTaskTimer.Sample task2 = taskTimer.start();
this.clock.add(otlpConfig().step().multipliedBy(3));

assertThat(writeToMetric(taskTimer).toString())
.isEqualTo("name: \"checkout.batch\"\n" + "unit: \"milliseconds\"\n" + "histogram {\n" + " data_points {\n"
+ " start_time_unix_nano: 1000000\n" + " time_unix_nano: 180001000000\n" + " count: 2\n"
+ " sum: 360000.0\n" + " }\n"
+ " aggregation_temporality: AGGREGATION_TEMPORALITY_CUMULATIVE\n" + "}\n");
List<Metric> metrics = writeToMetrics(taskTimer);
Metric metric = metrics.stream().filter(Metric::hasHistogram).findFirst().orElseThrow();
assertThat(metric.toString()).isEqualTo("name: \"checkout.batch\"\n" + "unit: \"milliseconds\"\n"
+ "histogram {\n" + " data_points {\n" + " start_time_unix_nano: 1000000\n"
+ " time_unix_nano: 180001000000\n" + " count: 2\n" + " sum: 360000.0\n" + " }\n"
+ " aggregation_temporality: AGGREGATION_TEMPORALITY_CUMULATIVE\n" + "}\n");

task1.stop();
task2.stop();
this.clock.add(otlpConfig().step());

// this is not right that count/sum reset, but it's the same thing we do with
// prometheus
assertThat(writeToMetric(taskTimer).toString())
metrics = writeToMetrics(taskTimer);
metric = metrics.stream().filter(Metric::hasHistogram).findFirst().orElseThrow();
assertThat(metric.toString())
.isEqualTo("name: \"checkout.batch\"\n" + "unit: \"milliseconds\"\n" + "histogram {\n" + " data_points {\n"
+ " start_time_unix_nano: 1000000\n" + " time_unix_nano: 240001000000\n" + " sum: 0.0\n"
+ " }\n" + " aggregation_temporality: AGGREGATION_TEMPORALITY_CUMULATIVE\n" + "}\n");
Expand Down Expand Up @@ -568,7 +579,8 @@ void testExponentialHistogramWithTimer() {
timer.record(Duration.ofMillis(100));
timer.record(Duration.ofMillis(1000));

Metric metric = writeToMetric(timer);
List<Metric> metrics = writeToMetrics(timer);
Metric metric = metrics.stream().filter(Metric::hasExponentialHistogram).findFirst().orElseThrow();
assertThat(metric.getExponentialHistogram().getDataPointsCount()).isPositive();

ExponentialHistogramDataPoint exponentialHistogramDataPoint = metric.getExponentialHistogram().getDataPoints(0);
Expand All @@ -585,7 +597,8 @@ void testExponentialHistogramWithTimer() {
clock.add(exponentialHistogramOtlpConfig().step());
timer.record(Duration.ofMillis(10000));

metric = writeToMetric(timer);
metrics = writeToMetrics(timer);
metric = metrics.stream().filter(Metric::hasExponentialHistogram).findFirst().orElseThrow();
exponentialHistogramDataPoint = metric.getExponentialHistogram().getDataPoints(0);
assertThat(exponentialHistogramDataPoint.getTimeUnixNano() - previousEndTime)
.isEqualTo(otlpConfig().step().toNanos());
Expand All @@ -611,7 +624,8 @@ void testExponentialHistogramDs() {
ds.record(100);
ds.record(1000);

Metric metric = writeToMetric(ds);
List<Metric> metrics = writeToMetrics(ds);
Metric metric = metrics.stream().filter(Metric::hasExponentialHistogram).findFirst().orElseThrow();
assertThat(metric.getExponentialHistogram().getDataPointsCount()).isPositive();

ExponentialHistogramDataPoint exponentialHistogramDataPoint = metric.getExponentialHistogram().getDataPoints(0);
Expand All @@ -628,7 +642,8 @@ void testExponentialHistogramDs() {
clock.add(exponentialHistogramOtlpConfig().step());
ds.record(10000);

metric = writeToMetric(ds);
metrics = writeToMetrics(ds);
metric = metrics.stream().filter(Metric::hasExponentialHistogram).findFirst().orElseThrow();
exponentialHistogramDataPoint = metric.getExponentialHistogram().getDataPoints(0);
assertThat(exponentialHistogramDataPoint.getTimeUnixNano() - previousEndTime)
.isEqualTo(otlpConfig().step().toNanos());
Expand Down
Loading