diff --git a/CHANGELOG.md b/CHANGELOG.md index 089a76a0f1d..8248e8d8b35 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -136,6 +136,8 @@ For notes on migrating to 2.x / 0.200.x see [the upgrade guide](doc/upgrade-to-2 * (user-facing): `DEFAULT_ATTRIBUTE_VALUE_COUNT_LIMIT` has been removed, please use `128` instead * (user-facing): `DEFAULT_SPAN_ATTRIBUTE_PER_EVENT_COUNT_LIMIT` has been removed, please use `128` instead * (user-facing): `DEFAULT_SPAN_ATTRIBUTE_PER_LINK_COUNT_LIMIT` has been removed, please use `128` instead +* refactor!: convert all SDK timestamps from HrTime to bigint [#5522](https://github.com/open-telemetry/opentelemetry-js/pull/5522) @dyladan + * Times are now internally represented as `bigint` literals. This simplifies time math and makes the export pipeline more efficient. ### :rocket: (Enhancement) diff --git a/experimental/packages/opentelemetry-exporter-prometheus/src/PrometheusSerializer.ts b/experimental/packages/opentelemetry-exporter-prometheus/src/PrometheusSerializer.ts index 258fedc9788..fe09b2f4d5d 100644 --- a/experimental/packages/opentelemetry-exporter-prometheus/src/PrometheusSerializer.ts +++ b/experimental/packages/opentelemetry-exporter-prometheus/src/PrometheusSerializer.ts @@ -23,8 +23,8 @@ import { DataPoint, Histogram, } from '@opentelemetry/sdk-metrics'; -import { hrTimeToMilliseconds } from '@opentelemetry/core'; import { Resource } from '@opentelemetry/resources'; +import { nanosecondsToMilliseconds } from '@opentelemetry/core'; type PrometheusDataTypeLiteral = | 'counter' @@ -282,7 +282,7 @@ export class PrometheusSerializer { name = enforcePrometheusNamingConvention(name, data); const { value, attributes } = dataPoint; - const timestamp = hrTimeToMilliseconds(dataPoint.endTime); + const timestamp = nanosecondsToMilliseconds(dataPoint.endTimeUnixNano); results += stringify( name, attributes, @@ -303,7 +303,7 @@ export class PrometheusSerializer { name = enforcePrometheusNamingConvention(name, data); const attributes = dataPoint.attributes; const histogram = dataPoint.value; - const timestamp = hrTimeToMilliseconds(dataPoint.endTime); + const timestamp = nanosecondsToMilliseconds(dataPoint.endTimeUnixNano); /** Histogram["bucket"] is not typed with `number` */ for (const key of ['count', 'sum'] as ('count' | 'sum')[]) { const value = histogram[key]; diff --git a/experimental/packages/opentelemetry-instrumentation-grpc/test/utils/assertionUtils.ts b/experimental/packages/opentelemetry-instrumentation-grpc/test/utils/assertionUtils.ts index fa67861326b..487bd460add 100644 --- a/experimental/packages/opentelemetry-instrumentation-grpc/test/utils/assertionUtils.ts +++ b/experimental/packages/opentelemetry-instrumentation-grpc/test/utils/assertionUtils.ts @@ -18,10 +18,6 @@ import { SpanKind, SpanStatusCode } from '@opentelemetry/api'; import * as assert from 'assert'; import type { status as GrpcStatus } from '@grpc/grpc-js'; import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; -import { - hrTimeToMilliseconds, - hrTimeToMicroseconds, -} from '@opentelemetry/core'; import { SEMATTRS_NET_PEER_NAME, SEMATTRS_NET_PEER_PORT, @@ -52,13 +48,11 @@ export const assertSpan = ( assert.strictEqual(span.spanContext().spanId.length, 16); assert.strictEqual(span.kind, kind); - assert.ok(span.endTime); + assert.ok(span.endTimeUnixNano); assert.strictEqual(span.links.length, 0); - assert.ok( - hrTimeToMicroseconds(span.startTime) < hrTimeToMicroseconds(span.endTime) - ); - assert.ok(hrTimeToMilliseconds(span.endTime) > 0); + assert.ok(span.startTimeUnixNano < span.endTimeUnixNano); + assert.ok(span.endTimeUnixNano > 0); if (span.kind === SpanKind.SERVER) { assert.ok(span.spanContext()); diff --git a/experimental/packages/opentelemetry-instrumentation-http/test/utils/assertSpan.ts b/experimental/packages/opentelemetry-instrumentation-http/test/utils/assertSpan.ts index 25ab861fb2e..a2e4ffd320b 100644 --- a/experimental/packages/opentelemetry-instrumentation-http/test/utils/assertSpan.ts +++ b/experimental/packages/opentelemetry-instrumentation-http/test/utils/assertSpan.ts @@ -19,7 +19,6 @@ import { SpanStatus, Exception, } from '@opentelemetry/api'; -import { hrTimeToNanoseconds } from '@opentelemetry/core'; import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; import { SEMATTRS_HTTP_METHOD, @@ -108,8 +107,11 @@ export const assertSpan = ( } ); - assert.ok(span.endTime, 'must be finished'); - assert.ok(hrTimeToNanoseconds(span.duration), 'must have positive duration'); + assert.ok(span.ended, 'must be finished'); + assert.ok( + span.endTimeUnixNano > span.startTimeUnixNano, + 'must have positive duration' + ); if (validations.reqHeaders) { const userAgent = validations.reqHeaders['user-agent']; diff --git a/experimental/packages/otlp-transformer/src/common/utils.ts b/experimental/packages/otlp-transformer/src/common/utils.ts index 756e80f4a4d..8b9cb10064a 100644 --- a/experimental/packages/otlp-transformer/src/common/utils.ts +++ b/experimental/packages/otlp-transformer/src/common/utils.ts @@ -16,7 +16,6 @@ import type { OtlpEncodingOptions, Fixed64, LongBits } from './internal-types'; import { HrTime } from '@opentelemetry/api'; -import { hrTimeToNanoseconds } from '@opentelemetry/core'; import { hexToBinary } from './hex-to-binary'; export function hrTimeToNanos(hrTime: HrTime): bigint { @@ -30,20 +29,15 @@ export function toLongBits(value: bigint): LongBits { return { low, high }; } -export function encodeAsLongBits(hrTime: HrTime): LongBits { - const nanos = hrTimeToNanos(hrTime); +export function encodeAsLongBits(nanos: bigint): LongBits { return toLongBits(nanos); } -export function encodeAsString(hrTime: HrTime): string { - const nanos = hrTimeToNanos(hrTime); +export function encodeAsString(nanos: bigint): string { return nanos.toString(); } -const encodeTimestamp = - typeof BigInt !== 'undefined' ? encodeAsString : hrTimeToNanoseconds; - -export type HrTimeEncodeFunction = (hrTime: HrTime) => Fixed64; +export type UnixNanosEncodeFunction = (nanos: bigint) => Fixed64; export type SpanContextEncodeFunction = ( spanContext: string ) => string | Uint8Array; @@ -52,7 +46,7 @@ export type OptionalSpanContextEncodeFunction = ( ) => string | Uint8Array | undefined; export interface Encoder { - encodeHrTime: HrTimeEncodeFunction; + encodeBigIntNanos: UnixNanosEncodeFunction; encodeSpanContext: SpanContextEncodeFunction; encodeOptionalSpanContext: OptionalSpanContextEncodeFunction; } @@ -67,7 +61,7 @@ function optionalHexToBinary(str: string | undefined): Uint8Array | undefined { } const DEFAULT_ENCODER: Encoder = { - encodeHrTime: encodeAsLongBits, + encodeBigIntNanos: toLongBits, encodeSpanContext: hexToBinary, encodeOptionalSpanContext: optionalHexToBinary, }; @@ -80,7 +74,7 @@ export function getOtlpEncoder(options?: OtlpEncodingOptions): Encoder { const useLongBits = options.useLongBits ?? true; const useHex = options.useHex ?? false; return { - encodeHrTime: useLongBits ? encodeAsLongBits : encodeTimestamp, + encodeBigIntNanos: useLongBits ? toLongBits : encodeAsString, encodeSpanContext: useHex ? identity : hexToBinary, encodeOptionalSpanContext: useHex ? identity : optionalHexToBinary, }; diff --git a/experimental/packages/otlp-transformer/src/logs/internal.ts b/experimental/packages/otlp-transformer/src/logs/internal.ts index 8c54e72a097..880db7909f6 100644 --- a/experimental/packages/otlp-transformer/src/logs/internal.ts +++ b/experimental/packages/otlp-transformer/src/logs/internal.ts @@ -94,8 +94,8 @@ function logRecordsToResourceLogs( function toLogRecord(log: ReadableLogRecord, encoder: Encoder): ILogRecord { return { - timeUnixNano: encoder.encodeHrTime(log.hrTime), - observedTimeUnixNano: encoder.encodeHrTime(log.hrTimeObserved), + timeUnixNano: encoder.encodeBigIntNanos(log.timeUnixNano), + observedTimeUnixNano: encoder.encodeBigIntNanos(log.timeUnixNanoObserved), severityNumber: toSeverityNumber(log.severityNumber), severityText: log.severityText, body: toAnyValue(log.body), diff --git a/experimental/packages/otlp-transformer/src/metrics/internal.ts b/experimental/packages/otlp-transformer/src/metrics/internal.ts index edcc0cddce8..352b8e8d661 100644 --- a/experimental/packages/otlp-transformer/src/metrics/internal.ts +++ b/experimental/packages/otlp-transformer/src/metrics/internal.ts @@ -118,8 +118,8 @@ function toSingularDataPoint( ) { const out: INumberDataPoint = { attributes: toAttributes(dataPoint.attributes), - startTimeUnixNano: encoder.encodeHrTime(dataPoint.startTime), - timeUnixNano: encoder.encodeHrTime(dataPoint.endTime), + startTimeUnixNano: encoder.encodeBigIntNanos(dataPoint.startTimeUnixNano), + timeUnixNano: encoder.encodeBigIntNanos(dataPoint.endTimeUnixNano), }; switch (valueType) { @@ -161,8 +161,8 @@ function toHistogramDataPoints( sum: histogram.sum, min: histogram.min, max: histogram.max, - startTimeUnixNano: encoder.encodeHrTime(dataPoint.startTime), - timeUnixNano: encoder.encodeHrTime(dataPoint.endTime), + startTimeUnixNano: encoder.encodeBigIntNanos(dataPoint.startTimeUnixNano), + timeUnixNano: encoder.encodeBigIntNanos(dataPoint.endTimeUnixNano), }; }); } @@ -189,8 +189,8 @@ function toExponentialHistogramDataPoints( }, scale: histogram.scale, zeroCount: histogram.zeroCount, - startTimeUnixNano: encoder.encodeHrTime(dataPoint.startTime), - timeUnixNano: encoder.encodeHrTime(dataPoint.endTime), + startTimeUnixNano: encoder.encodeBigIntNanos(dataPoint.startTimeUnixNano), + timeUnixNano: encoder.encodeBigIntNanos(dataPoint.endTimeUnixNano), }; }); } diff --git a/experimental/packages/otlp-transformer/src/trace/internal.ts b/experimental/packages/otlp-transformer/src/trace/internal.ts index bbc4d4a53b8..841072a9346 100644 --- a/experimental/packages/otlp-transformer/src/trace/internal.ts +++ b/experimental/packages/otlp-transformer/src/trace/internal.ts @@ -48,8 +48,8 @@ export function sdkSpanToOtlpSpan(span: ReadableSpan, encoder: Encoder): ISpan { name: span.name, // Span kind is offset by 1 because the API does not define a value for unset kind: span.kind == null ? 0 : span.kind + 1, - startTimeUnixNano: encoder.encodeHrTime(span.startTime), - endTimeUnixNano: encoder.encodeHrTime(span.endTime), + startTimeUnixNano: encoder.encodeBigIntNanos(span.startTimeUnixNano), + endTimeUnixNano: encoder.encodeBigIntNanos(span.endTimeUnixNano), attributes: toAttributes(span.attributes), droppedAttributesCount: span.droppedAttributesCount, events: span.events.map(event => toOtlpSpanEvent(event, encoder)), @@ -83,7 +83,7 @@ export function toOtlpSpanEvent( ? toAttributes(timedEvent.attributes) : [], name: timedEvent.name, - timeUnixNano: encoder.encodeHrTime(timedEvent.time), + timeUnixNano: encoder.encodeBigIntNanos(timedEvent.timeUnixNano), droppedAttributesCount: timedEvent.droppedAttributesCount || 0, }; } diff --git a/experimental/packages/otlp-transformer/test/common.test.ts b/experimental/packages/otlp-transformer/test/common.test.ts index 38444c48914..d5e7c5807d9 100644 --- a/experimental/packages/otlp-transformer/test/common.test.ts +++ b/experimental/packages/otlp-transformer/test/common.test.ts @@ -72,7 +72,7 @@ describe('common', () => { describe('otlp encoder', () => { it('defaults to long timestamps and binary encoding given no options', () => { const encoder = getOtlpEncoder(); - assert.deepStrictEqual(encoder.encodeHrTime([1697978649, 99870675]), { + assert.deepStrictEqual(encoder.encodeBigIntNanos(1697978649099870675n), { low: 3352011219, high: 395341461, }); @@ -92,7 +92,7 @@ describe('common', () => { it('defaults to long timestamps and base64 encoding given empty options', () => { const encoder = getOtlpEncoder({}); - assert.deepStrictEqual(encoder.encodeHrTime([1697978649, 99870675]), { + assert.deepStrictEqual(encoder.encodeBigIntNanos(1697978649099870675n), { low: 3352011219, high: 395341461, }); @@ -110,14 +110,6 @@ describe('common', () => { ); }); - it('can encode HrTime as string', () => { - const encoder = getOtlpEncoder({ useLongBits: false }); - assert.deepStrictEqual( - encoder.encodeHrTime([1697978649, 99870675]), - '1697978649099870675' - ); - }); - it('can encode span context as hex', () => { const encoder = getOtlpEncoder({ useHex: true }); assert.deepStrictEqual(encoder.encodeSpanContext(traceId), traceId); diff --git a/experimental/packages/otlp-transformer/test/logs.test.ts b/experimental/packages/otlp-transformer/test/logs.test.ts index 05da91f89ab..fefafc170e1 100644 --- a/experimental/packages/otlp-transformer/test/logs.test.ts +++ b/experimental/packages/otlp-transformer/test/logs.test.ts @@ -181,6 +181,8 @@ describe('Logs', () => { name: 'scope_name_2', }; const log_fragment_1 = { + timeUnixNano: 1680253513123241635n, + timeUnixNanoObserved: 1683526948965142784n, hrTime: [1680253513, 123241635] as HrTime, hrTimeObserved: [1683526948, 965142784] as HrTime, attributes: { @@ -197,6 +199,8 @@ describe('Logs', () => { }, }; const log_fragment_2 = { + timeUnixNano: 1680253797687038506n, + timeUnixNanoObserved: 1680253797687038506n, hrTime: [1680253797, 687038506] as HrTime, hrTimeObserved: [1680253797, 687038506] as HrTime, attributes: { diff --git a/experimental/packages/otlp-transformer/test/metrics.test.ts b/experimental/packages/otlp-transformer/test/metrics.test.ts index f97d6497b62..c9831e2f308 100644 --- a/experimental/packages/otlp-transformer/test/metrics.test.ts +++ b/experimental/packages/otlp-transformer/test/metrics.test.ts @@ -24,14 +24,17 @@ import { import * as assert from 'assert'; import { createExportMetricsServiceRequest } from '../src/metrics/internal'; import { EAggregationTemporality } from '../src/metrics/internal-types'; -import { hrTime, hrTimeToNanoseconds } from '@opentelemetry/core'; +import { getTimeOrigin, millisecondsToNanoseconds } from '@opentelemetry/core'; import * as root from '../src/generated/root'; -import { encodeAsLongBits, encodeAsString } from '../src/common/utils'; +import { toLongBits } from '../src/common/utils'; import { ProtobufMetricsSerializer } from '../src/metrics/protobuf'; import { JsonMetricsSerializer } from '../src/metrics/json'; -const START_TIME = hrTime(); -const END_TIME = hrTime(); +const START_TIME = millisecondsToNanoseconds( + performance.now() + getTimeOrigin() +); +const END_TIME = millisecondsToNanoseconds(performance.now() + getTimeOrigin()); + const ATTRIBUTES = { 'string-attribute': 'some attribute value', 'int-attribute': 1, @@ -119,8 +122,10 @@ describe('Metrics', () => { dataPoints: [ { value: value, - startTime: START_TIME, - endTime: END_TIME, + startTimeUnixNano: START_TIME, + endTimeUnixNano: END_TIME, + startTime: [0, 0], + endTime: [0, 0], attributes: ATTRIBUTES, }, ], @@ -144,8 +149,10 @@ describe('Metrics', () => { dataPoints: [ { value: value, - startTime: START_TIME, - endTime: END_TIME, + startTimeUnixNano: START_TIME, + endTimeUnixNano: END_TIME, + startTime: [0, 0], + endTime: [0, 0], attributes: ATTRIBUTES, }, ], @@ -169,8 +176,10 @@ describe('Metrics', () => { dataPoints: [ { value: value, - startTime: START_TIME, - endTime: END_TIME, + startTimeUnixNano: START_TIME, + endTimeUnixNano: END_TIME, + startTime: [0, 0], + endTime: [0, 0], attributes: ATTRIBUTES, }, ], @@ -194,8 +203,10 @@ describe('Metrics', () => { dataPoints: [ { value: value, - startTime: START_TIME, - endTime: END_TIME, + startTimeUnixNano: START_TIME, + endTimeUnixNano: END_TIME, + startTime: [0, 0], + endTime: [0, 0], attributes: ATTRIBUTES, }, ], @@ -215,8 +226,10 @@ describe('Metrics', () => { dataPoints: [ { value: value, - startTime: START_TIME, - endTime: END_TIME, + startTimeUnixNano: START_TIME, + endTimeUnixNano: END_TIME, + startTime: [0, 0], + endTime: [0, 0], attributes: ATTRIBUTES, }, ], @@ -253,8 +266,10 @@ describe('Metrics', () => { counts: counts, }, }, - startTime: START_TIME, - endTime: END_TIME, + startTimeUnixNano: START_TIME, + endTimeUnixNano: END_TIME, + startTime: [0, 0], + endTime: [0, 0], attributes: ATTRIBUTES, }, ], @@ -293,8 +308,10 @@ describe('Metrics', () => { positive: positive, negative: negative, }, - startTime: START_TIME, - endTime: END_TIME, + startTimeUnixNano: START_TIME, + endTimeUnixNano: END_TIME, + startTime: [0, 0], + endTime: [0, 0], attributes: ATTRIBUTES, }, ], @@ -346,8 +363,8 @@ describe('Metrics', () => { dataPoints: [ { attributes: expectedAttributes, - startTimeUnixNano: encodeAsLongBits(START_TIME), - timeUnixNano: encodeAsLongBits(END_TIME), + startTimeUnixNano: toLongBits(START_TIME), + timeUnixNano: toLongBits(END_TIME), asInt: 10, }, ], @@ -389,8 +406,8 @@ describe('Metrics', () => { dataPoints: [ { attributes: expectedAttributes, - startTimeUnixNano: encodeAsLongBits(START_TIME), - timeUnixNano: encodeAsLongBits(END_TIME), + startTimeUnixNano: toLongBits(START_TIME), + timeUnixNano: toLongBits(END_TIME), asInt: 10, }, ], @@ -433,8 +450,8 @@ describe('Metrics', () => { dataPoints: [ { attributes: expectedAttributes, - startTimeUnixNano: encodeAsLongBits(START_TIME), - timeUnixNano: encodeAsLongBits(END_TIME), + startTimeUnixNano: toLongBits(START_TIME), + timeUnixNano: toLongBits(END_TIME), asInt: 10, }, ], @@ -477,8 +494,8 @@ describe('Metrics', () => { dataPoints: [ { attributes: expectedAttributes, - startTimeUnixNano: encodeAsLongBits(START_TIME), - timeUnixNano: encodeAsLongBits(END_TIME), + startTimeUnixNano: toLongBits(START_TIME), + timeUnixNano: toLongBits(END_TIME), asInt: 10, }, ], @@ -519,8 +536,8 @@ describe('Metrics', () => { dataPoints: [ { attributes: expectedAttributes, - startTimeUnixNano: encodeAsLongBits(START_TIME), - timeUnixNano: encodeAsLongBits(END_TIME), + startTimeUnixNano: toLongBits(START_TIME), + timeUnixNano: toLongBits(END_TIME), asDouble: 10.5, }, ], @@ -577,8 +594,8 @@ describe('Metrics', () => { sum: 9, min: 1, max: 8, - startTimeUnixNano: encodeAsLongBits(START_TIME), - timeUnixNano: encodeAsLongBits(END_TIME), + startTimeUnixNano: toLongBits(START_TIME), + timeUnixNano: toLongBits(END_TIME), }, ], }, @@ -631,8 +648,8 @@ describe('Metrics', () => { sum: 9, min: undefined, max: undefined, - startTimeUnixNano: encodeAsLongBits(START_TIME), - timeUnixNano: encodeAsLongBits(END_TIME), + startTimeUnixNano: toLongBits(START_TIME), + timeUnixNano: toLongBits(END_TIME), }, ], }, @@ -697,8 +714,8 @@ describe('Metrics', () => { bucketCounts: [1, 0, 0, 0, 1, 0, 1, 0], }, negative: { offset: 0, bucketCounts: [0] }, - startTimeUnixNano: encodeAsLongBits(START_TIME), - timeUnixNano: encodeAsLongBits(END_TIME), + startTimeUnixNano: toLongBits(START_TIME), + timeUnixNano: toLongBits(END_TIME), }, ], }, @@ -759,8 +776,8 @@ describe('Metrics', () => { bucketCounts: [1, 0, 0, 0, 1, 0, 1, 0], }, negative: { offset: 0, bucketCounts: [0] }, - startTimeUnixNano: encodeAsLongBits(START_TIME), - timeUnixNano: encodeAsLongBits(END_TIME), + startTimeUnixNano: toLongBits(START_TIME), + timeUnixNano: toLongBits(END_TIME), }, ], }, @@ -813,8 +830,8 @@ describe('Metrics', () => { dataPoints: [ { attributes: expectedAttributes, - startTimeUnixNano: hrTimeToNanoseconds(START_TIME), - timeUnixNano: hrTimeToNanoseconds(END_TIME), + startTimeUnixNano: Number(START_TIME), + timeUnixNano: Number(END_TIME), asInt: 10, }, ], @@ -886,8 +903,8 @@ describe('Metrics', () => { dataPoints: [ { attributes: expectedAttributes, - startTimeUnixNano: encodeAsString(START_TIME), - timeUnixNano: encodeAsString(END_TIME), + startTimeUnixNano: START_TIME.toString(), + timeUnixNano: END_TIME.toString(), asInt: 10, }, ], diff --git a/experimental/packages/otlp-transformer/test/trace.test.ts b/experimental/packages/otlp-transformer/test/trace.test.ts index 2cada313d13..14c07c417f5 100644 --- a/experimental/packages/otlp-transformer/test/trace.test.ts +++ b/experimental/packages/otlp-transformer/test/trace.test.ts @@ -250,13 +250,13 @@ describe('Trace', () => { traceFlags: TraceFlags.SAMPLED, }, attributes: { 'string-attribute': 'some attribute value' }, - duration: [1, 300000000], - endTime: [1640715558, 642725388], + endTimeUnixNano: 1640715558642725388n, ended: true, events: [ { name: 'some event', - time: [1640715558, 542725388], + timeUnixNano: 1640715558542725388n, + time: [0, 0], attributes: { 'event-attribute': 'some string value', }, @@ -284,13 +284,15 @@ describe('Trace', () => { ], name: 'span-name', resource, - startTime: [1640715557, 342725388], + startTimeUnixNano: 1640715557342725388n, status: { code: SpanStatusCode.OK, }, droppedAttributesCount: 0, droppedEventsCount: 0, droppedLinksCount: 0, + startTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTime: [0, 0], // wrong on purpose - included for compatibility. should not be used }; }); diff --git a/experimental/packages/sdk-logs/src/LogRecord.ts b/experimental/packages/sdk-logs/src/LogRecord.ts index f1f9c99506e..f04370887db 100644 --- a/experimental/packages/sdk-logs/src/LogRecord.ts +++ b/experimental/packages/sdk-logs/src/LogRecord.ts @@ -18,9 +18,9 @@ import { AttributeValue, diag } from '@opentelemetry/api'; import type * as logsAPI from '@opentelemetry/api-logs'; import * as api from '@opentelemetry/api'; import { - timeInputToHrTime, isAttributeValue, InstrumentationScope, + nanosToHrTime, } from '@opentelemetry/core'; import type { Resource } from '@opentelemetry/resources'; @@ -28,10 +28,12 @@ import type { ReadableLogRecord } from './export/ReadableLogRecord'; import type { LogRecordLimits } from './types'; import { AnyValue, LogAttributes, LogBody } from '@opentelemetry/api-logs'; import { LoggerProviderSharedState } from './internal/LoggerProviderSharedState'; +import { timeInputToNano } from '@opentelemetry/core'; +import { HrTime } from '@opentelemetry/api'; export class LogRecord implements ReadableLogRecord { - readonly hrTime: api.HrTime; - readonly hrTimeObserved: api.HrTime; + readonly timeUnixNano: bigint; + readonly timeUnixNanoObserved: bigint; readonly spanContext?: api.SpanContext; readonly resource: Resource; readonly instrumentationScope: InstrumentationScope; @@ -78,6 +80,14 @@ export class LogRecord implements ReadableLogRecord { return this.totalAttributesCount - Object.keys(this.attributes).length; } + get hrTime(): HrTime { + return nanosToHrTime(this.timeUnixNano); + } + + get hrTimeObserved(): HrTime { + return nanosToHrTime(this.timeUnixNanoObserved); + } + constructor( _sharedState: LoggerProviderSharedState, instrumentationScope: InstrumentationScope, @@ -94,8 +104,8 @@ export class LogRecord implements ReadableLogRecord { } = logRecord; const now = Date.now(); - this.hrTime = timeInputToHrTime(timestamp ?? now); - this.hrTimeObserved = timeInputToHrTime(observedTimestamp ?? now); + this.timeUnixNano = timeInputToNano(timestamp ?? now); + this.timeUnixNanoObserved = timeInputToNano(observedTimestamp ?? now); if (context) { const spanContext = api.trace.getSpanContext(context); diff --git a/experimental/packages/sdk-logs/src/export/ConsoleLogRecordExporter.ts b/experimental/packages/sdk-logs/src/export/ConsoleLogRecordExporter.ts index d7de3c08c31..6cc350cd545 100644 --- a/experimental/packages/sdk-logs/src/export/ConsoleLogRecordExporter.ts +++ b/experimental/packages/sdk-logs/src/export/ConsoleLogRecordExporter.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { ExportResult, hrTimeToMicroseconds } from '@opentelemetry/core'; +import { ExportResult } from '@opentelemetry/core'; import { ExportResultCode } from '@opentelemetry/core'; import type { ReadableLogRecord } from './ReadableLogRecord'; @@ -58,7 +58,7 @@ export class ConsoleLogRecordExporter implements LogRecordExporter { attributes: logRecord.resource.attributes, }, instrumentationScope: logRecord.instrumentationScope, - timestamp: hrTimeToMicroseconds(logRecord.hrTime), + timestamp: logRecord.timeUnixNano, traceId: logRecord.spanContext?.traceId, spanId: logRecord.spanContext?.spanId, traceFlags: logRecord.spanContext?.traceFlags, diff --git a/experimental/packages/sdk-logs/src/export/ReadableLogRecord.ts b/experimental/packages/sdk-logs/src/export/ReadableLogRecord.ts index ab110104d1f..32e181b589f 100644 --- a/experimental/packages/sdk-logs/src/export/ReadableLogRecord.ts +++ b/experimental/packages/sdk-logs/src/export/ReadableLogRecord.ts @@ -24,7 +24,11 @@ import type { } from '@opentelemetry/api-logs'; export interface ReadableLogRecord { + readonly timeUnixNano: bigint; + readonly timeUnixNanoObserved: bigint; + /** @deprecated please use timeUnixNano */ readonly hrTime: HrTime; + /** @deprecated please use timeUnixNanoObserved */ readonly hrTimeObserved: HrTime; readonly spanContext?: SpanContext; readonly severityText?: string; diff --git a/experimental/packages/sdk-logs/test/common/LogRecord.test.ts b/experimental/packages/sdk-logs/test/common/LogRecord.test.ts index 573fd817bf8..cf96cfab687 100644 --- a/experimental/packages/sdk-logs/test/common/LogRecord.test.ts +++ b/experimental/packages/sdk-logs/test/common/LogRecord.test.ts @@ -25,8 +25,6 @@ import { TraceFlags, } from '@opentelemetry/api'; import * as logsAPI from '@opentelemetry/api-logs'; -import type { HrTime } from '@opentelemetry/api'; -import { hrTimeToMilliseconds, timeInputToHrTime } from '@opentelemetry/core'; import { defaultResource } from '@opentelemetry/resources'; import { @@ -38,8 +36,11 @@ import { import { invalidAttributes, validAttributes } from './utils'; import { LoggerProviderSharedState } from '../../src/internal/LoggerProviderSharedState'; import { reconfigureLimits } from '../../src/config'; - -const performanceTimeOrigin: HrTime = [1, 1]; +import { + millisecondsToNanoseconds, + timeInputToNano, + getTimeOrigin, +} from '@opentelemetry/core'; const setup = (logRecordLimits?: LogRecordLimits, data?: logsAPI.LogRecord) => { const instrumentationScope = { @@ -70,10 +71,9 @@ describe('LogRecord', () => { it('should have a default timestamp', () => { const { logRecord } = setup(); - assert.ok(logRecord.hrTime !== undefined); + assert.ok(logRecord.timeUnixNano !== undefined); assert.ok( - hrTimeToMilliseconds(logRecord.hrTime) > - hrTimeToMilliseconds(performanceTimeOrigin) + logRecord.timeUnixNano > millisecondsToNanoseconds(getTimeOrigin()) ); }); @@ -100,8 +100,8 @@ describe('LogRecord', () => { logRecordData ); assert.deepStrictEqual( - logRecord.hrTime, - timeInputToHrTime(logRecordData.timestamp!) + logRecord.timeUnixNano, + timeInputToNano(logRecordData.timestamp!) ); assert.strictEqual( logRecord.severityNumber, diff --git a/experimental/packages/shim-opencensus/src/metric-transform.ts b/experimental/packages/shim-opencensus/src/metric-transform.ts index 2ea91f0d5ae..6bda3fcdddf 100644 --- a/experimental/packages/shim-opencensus/src/metric-transform.ts +++ b/experimental/packages/shim-opencensus/src/metric-transform.ts @@ -16,6 +16,7 @@ import * as oc from '@opencensus/core'; import { Attributes, HrTime, ValueType, diag } from '@opentelemetry/api'; +import { hrTimeToNanoseconds } from '@opentelemetry/core'; import { AggregationTemporality, DataPoint, @@ -34,7 +35,6 @@ interface MappedType { | DataPointType.SUM | DataPointType.HISTOGRAM; } -const ZEROED_HRTIME: HrTime = [0, 0]; export function mapOcMetric(metric: oc.Metric): MetricData | null { const { description, name, unit, type } = metric.descriptor; @@ -169,16 +169,19 @@ function dataPoints( const attributes = zipOcLabels(metric.descriptor.labelKeys, ts.labelValues); // use zeroed hrTime if it is undefined, which probably shouldn't happen - const startTime = ocTimestampToHrTime(ts.startTimestamp) ?? ZEROED_HRTIME; + const startTimeUnixNano = ocTimestampToNanos(ts.startTimestamp) ?? 0n; + const startTime = ocTimestampToHrTime(ts.startTimestamp) ?? [0, 0]; // points should be an array with a single value, so this will return a single point per // attribute set. return ts.points.map( (point): DataPoint => ({ + startTimeUnixNano, startTime, attributes, value: valueMapper(point.value), - endTime: ocTimestampToHrTime(point.timestamp) ?? ZEROED_HRTIME, + endTimeUnixNano: ocTimestampToNanos(point.timestamp) ?? 0n, + endTime: ocTimestampToHrTime(point.timestamp) ?? [0, 0], }) ); }); @@ -191,6 +194,13 @@ function ocTimestampToHrTime(ts: oc.Timestamp | undefined): HrTime | null { return [ts.seconds, ts.nanos ?? 0]; } +function ocTimestampToNanos(ts: oc.Timestamp | undefined): bigint | null { + if (ts === undefined || ts.seconds === null) { + return null; + } + return hrTimeToNanoseconds([ts.seconds, ts.nanos ?? 0]); +} + function zipOcLabels( labelKeys: oc.LabelKey[], labelValues: oc.LabelValue[] diff --git a/experimental/packages/shim-opencensus/test/ShimSpan.test.ts b/experimental/packages/shim-opencensus/test/ShimSpan.test.ts index b8c84ca5706..670b9686ab6 100644 --- a/experimental/packages/shim-opencensus/test/ShimSpan.test.ts +++ b/experimental/packages/shim-opencensus/test/ShimSpan.test.ts @@ -66,7 +66,7 @@ describe('ShimSpan', () => { }); assert.strictEqual(span.events.length, 1); - const [{ time, ...event }] = span.events; + const [{ timeUnixNano, time, ...event }] = span.events; assert.deepStrictEqual(event, { attributes: { foo: 'bar', @@ -85,7 +85,7 @@ describe('ShimSpan', () => { }); assert.strictEqual(span.events.length, 1); - const [{ time, ...event }] = span.events; + const [{ timeUnixNano, time, ...event }] = span.events; assert.deepStrictEqual(event, { attributes: { 'message.event.size.compressed': 15, diff --git a/experimental/packages/shim-opencensus/test/metric-transform.test.ts b/experimental/packages/shim-opencensus/test/metric-transform.test.ts index afab45388d5..ceb5c596b1a 100644 --- a/experimental/packages/shim-opencensus/test/metric-transform.test.ts +++ b/experimental/packages/shim-opencensus/test/metric-transform.test.ts @@ -55,8 +55,10 @@ describe('metric-transform', () => { dataPoints: [ { attributes: { key1: 'value1', key2: 'value2' }, - endTime: [20, 20], + endTimeUnixNano: 20_000_000_020n, + startTimeUnixNano: 10_000_000_010n, startTime: [10, 10], + endTime: [20, 20], value: 5, }, ], @@ -97,8 +99,10 @@ describe('metric-transform', () => { dataPoints: [ { attributes: { key1: 'value1', key2: 'value2' }, - endTime: [20, 20], + endTimeUnixNano: 20_000_000_020n, + startTimeUnixNano: 10_000_000_010n, startTime: [10, 10], + endTime: [20, 20], value: 5.5, }, ], @@ -159,8 +163,10 @@ describe('metric-transform', () => { dataPoints: [ { attributes: { key1: 'value1', key2: 'value2' }, - endTime: [20, 20], + endTimeUnixNano: 20_000_000_020n, + startTimeUnixNano: 10_000_000_010n, startTime: [10, 10], + endTime: [20, 20], value: { buckets: { boundaries: [1, 10, 100], @@ -207,8 +213,10 @@ describe('metric-transform', () => { dataPoints: [ { attributes: { key1: 'value1', key2: 'value2' }, - endTime: [20, 20], + endTimeUnixNano: 20_000_000_020n, + startTimeUnixNano: 10_000_000_010n, startTime: [10, 10], + endTime: [20, 20], value: 5, }, ], @@ -248,8 +256,10 @@ describe('metric-transform', () => { dataPoints: [ { attributes: { key1: 'value1', key2: 'value2' }, - endTime: [20, 20], + endTimeUnixNano: 20_000_000_020n, + startTimeUnixNano: 10_000_000_010n, startTime: [10, 10], + endTime: [20, 20], value: 5.5, }, ], diff --git a/packages/opentelemetry-core/src/common/time.ts b/packages/opentelemetry-core/src/common/time.ts index 89f5bfd3f60..d04329cacb3 100644 --- a/packages/opentelemetry-core/src/common/time.ts +++ b/packages/opentelemetry-core/src/common/time.ts @@ -36,6 +36,9 @@ export function millisToHrTime(epochMillis: number): api.HrTime { return [seconds, nanos]; } +/** + * start time of the process in milliseconds since the epoch + */ export function getTimeOrigin(): number { let timeOrigin = performance.timeOrigin; if (typeof timeOrigin !== 'number') { @@ -50,12 +53,11 @@ export function getTimeOrigin(): number { * @param performanceNow */ export function hrTime(performanceNow?: number): api.HrTime { - const timeOrigin = millisToHrTime(getTimeOrigin()); const now = millisToHrTime( typeof performanceNow === 'number' ? performanceNow : performance.now() ); - return addHrTimes(timeOrigin, now); + return addHrTimes(millisToHrTime(getTimeOrigin()), now); } /** @@ -64,19 +66,30 @@ export function hrTime(performanceNow?: number): api.HrTime { * @param time */ export function timeInputToHrTime(time: api.TimeInput): api.HrTime { - // process.hrtime if (isTimeInputHrTime(time)) { return time as api.HrTime; + } + return nanosToHrTime(timeInputToNano(time)); +} + +/** + * + * Converts a TimeInput to a nanosecond unix timestamp + * @param time + */ +export function timeInputToNano(time: api.TimeInput): bigint { + if (isTimeInputHrTime(time)) { + return hrTimeToNanoseconds(time); } else if (typeof time === 'number') { // Must be a performance.now() if it's smaller than process start time. if (time < getTimeOrigin()) { - return hrTime(time); + return millisecondsToNanoseconds(time + getTimeOrigin()); } else { // epoch milliseconds or performance.timeOrigin - return millisToHrTime(time); + return millisecondsToNanoseconds(time); } } else if (time instanceof Date) { - return millisToHrTime(time.getTime()); + return millisecondsToNanoseconds(time.getTime()); } else { throw TypeError('Invalid input type'); } @@ -120,8 +133,31 @@ export function hrTimeToTimeStamp(time: api.HrTime): string { * Convert hrTime to nanoseconds. * @param time */ -export function hrTimeToNanoseconds(time: api.HrTime): number { - return time[0] * SECOND_TO_NANOSECONDS + time[1]; +export function hrTimeToNanoseconds(time: api.HrTime): bigint { + return BigInt(time[0]) * 1_000_000_000n + BigInt(time[1]); +} + +export function millisecondsToNanoseconds(millis: number): bigint { + if (Number.isInteger(millis)) { + return BigInt(millis) * 1_000_000n; + } else { + const out = + BigInt(Math.trunc(millis)) * 1000000n + + BigInt(Math.round((millis % 1) * 1000000)); + return out; + } +} + +export function nanosecondsToMilliseconds(nanos: bigint): number { + return Number(nanos / 1_000_000n); +} + +export function nanosecondsToMicroseconds(nanos: bigint): number { + return Number(nanos / 1_000n); +} + +export function nanosToHrTime(nanos: bigint): api.HrTime { + return [Number(nanos / 1_000_000_000n), Number(nanos % 1_000_000_000n)]; } /** diff --git a/packages/opentelemetry-core/src/index.ts b/packages/opentelemetry-core/src/index.ts index 0be041c381f..a347810688e 100644 --- a/packages/opentelemetry-core/src/index.ts +++ b/packages/opentelemetry-core/src/index.ts @@ -30,11 +30,16 @@ export { hrTimeToMicroseconds, hrTimeToMilliseconds, hrTimeToNanoseconds, + millisecondsToNanoseconds, + nanosecondsToMilliseconds, + nanosecondsToMicroseconds, + nanosToHrTime, hrTimeToTimeStamp, isTimeInput, isTimeInputHrTime, millisToHrTime, timeInputToHrTime, + timeInputToNano, } from './common/time'; export { ErrorHandler, InstrumentationScope } from './common/types'; export { ExportResult, ExportResultCode } from './ExportResult'; diff --git a/packages/opentelemetry-core/test/common/time.test.ts b/packages/opentelemetry-core/test/common/time.test.ts index 1c8df7016ed..c3ce4d2ccf2 100644 --- a/packages/opentelemetry-core/test/common/time.test.ts +++ b/packages/opentelemetry-core/test/common/time.test.ts @@ -15,12 +15,10 @@ */ import * as assert from 'assert'; -import { otperformance as performance } from '../../src/platform'; import * as sinon from 'sinon'; import * as api from '@opentelemetry/api'; import { hrTime, - timeInputToHrTime, hrTimeDuration, hrTimeToNanoseconds, hrTimeToMilliseconds, @@ -28,6 +26,9 @@ import { hrTimeToTimeStamp, isTimeInput, addHrTimes, + millisecondsToNanoseconds, + timeInputToNano, + timeInputToHrTime, } from '../../src/common/time'; describe('time', () => { @@ -150,6 +151,54 @@ describe('time', () => { }); }); + describe('#timeInputToNano', () => { + it('should convert Date hrTime', () => { + const timeInput = new Date(1609297640313); + const output = timeInputToNano(timeInput); + assert.deepStrictEqual(output, 1609297640313000000n); + }); + + it('should convert epoch milliseconds hrTime', () => { + const timeInput = Date.now(); + const output = timeInputToNano(timeInput); + assert.deepStrictEqual(output, millisecondsToNanoseconds(timeInput)); + }); + + it('should convert arbitrary epoch milliseconds (with sub-millis precision) hrTime', () => { + sinon.stub(performance, 'timeOrigin').value(111.5); + const inputs = [ + // [ input, expected ] + [1609297640313, 1609297640313000000n], + // inevitable precision loss without decimal arithmetics. + [1609297640313.333, 1609297640313333008n], + // eslint-disable-next-line @typescript-eslint/no-loss-of-precision + [1609297640313.333333333, 1609297640313333252n], + ] as const; + for (const [idx, input] of inputs.entries()) { + const output = timeInputToNano(input[0]); + assert.deepStrictEqual(output, input[1], `input[${idx}]: ${input}`); + } + }); + + it('should convert performance.now() hrTime', () => { + sinon.stub(performance, 'timeOrigin').value(111.5); + + const timeInput = 11.9; + const output = timeInputToNano(timeInput); + + assert.deepStrictEqual(output, 123400000n); + }); + + it('should not convert hrtime hrTime', () => { + sinon.stub(performance, 'timeOrigin').value(111.5); + + const timeInput: [number, number] = [3138971, 245466222]; + const output = timeInputToNano(timeInput); + + assert.deepStrictEqual(output, 3138971245466222n); + }); + }); + describe('#hrTimeDuration', () => { it('should return duration', () => { const startTime: api.HrTime = [22, 400000000]; @@ -180,7 +229,18 @@ describe('time', () => { describe('#hrTimeToNanoseconds', () => { it('should return nanoseconds', () => { const output = hrTimeToNanoseconds([1, 200000000]); - assert.deepStrictEqual(output, 1200000000); + assert.deepStrictEqual(output, 1200000000n); + }); + }); + + describe('#millisecondsToNanoseconds', () => { + it('should convert to nanoseconds', () => { + assert.strictEqual(millisecondsToNanoseconds(123), 123_000_000n); + assert.strictEqual( + millisecondsToNanoseconds(123.123_456_789), + 123_123_457n + ); + assert.strictEqual(millisecondsToNanoseconds(1066.969834), 1066969834n); }); }); @@ -204,9 +264,6 @@ describe('time', () => { it('should return true for a date', () => { assert.strictEqual(isTimeInput(new Date()), true); }); - it('should return true for an array with 2 elements type number', () => { - assert.strictEqual(isTimeInput([1, 1]), true); - }); it('should return FALSE for different cases for an array ', () => { assert.strictEqual(isTimeInput([1, 1, 1]), false); assert.strictEqual(isTimeInput([1]), false); diff --git a/packages/opentelemetry-exporter-jaeger/src/transform.ts b/packages/opentelemetry-exporter-jaeger/src/transform.ts index 20e5cffed89..f78b971f8b6 100644 --- a/packages/opentelemetry-exporter-jaeger/src/transform.ts +++ b/packages/opentelemetry-exporter-jaeger/src/transform.ts @@ -17,8 +17,8 @@ import { Link, SpanStatusCode, SpanKind } from '@opentelemetry/api'; import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; import { - hrTimeToMilliseconds, - hrTimeToMicroseconds, + nanosecondsToMicroseconds, + nanosecondsToMilliseconds, } from '@opentelemetry/core'; import { ThriftSpan, @@ -126,7 +126,7 @@ export function spanToThrift(span: ReadableSpan): ThriftSpan { value: event.droppedAttributesCount, }); } - return { timestamp: hrTimeToMilliseconds(event.time), fields }; + return { timestamp: nanosecondsToMilliseconds(event.timeUnixNano), fields }; }); const spanLogs: ThriftLog[] = ThriftUtils.getThriftLogs(logs); @@ -138,8 +138,12 @@ export function spanToThrift(span: ReadableSpan): ThriftSpan { operationName: span.name, references: spanLinksToThriftRefs(span.links), flags: span.spanContext().traceFlags || DEFAULT_FLAGS, - startTime: Utils.encodeInt64(hrTimeToMicroseconds(span.startTime)), - duration: Utils.encodeInt64(hrTimeToMicroseconds(span.duration)), + startTime: Utils.encodeInt64( + nanosecondsToMicroseconds(span.startTimeUnixNano) + ), + duration: Utils.encodeInt64( + nanosecondsToMicroseconds(span.endTimeUnixNano - span.startTimeUnixNano) + ), tags: spanTags, logs: spanLogs, }; diff --git a/packages/opentelemetry-exporter-jaeger/test/jaeger.test.ts b/packages/opentelemetry-exporter-jaeger/test/jaeger.test.ts index 1fe726769c8..743ea133203 100644 --- a/packages/opentelemetry-exporter-jaeger/test/jaeger.test.ts +++ b/packages/opentelemetry-exporter-jaeger/test/jaeger.test.ts @@ -36,8 +36,10 @@ describe('JaegerExporter', () => { traceFlags: TraceFlags.NONE, }; }, - startTime: [1566156729, 709], - endTime: [1566156731, 709], + startTimeUnixNano: 1566156729000000709n, + endTimeUnixNano: 1566156731000000709n, + startTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTime: [0, 0], // wrong on purpose - included for compatibility. should not be used ended: true, status: { code: api.SpanStatusCode.ERROR, @@ -45,7 +47,6 @@ describe('JaegerExporter', () => { attributes: {}, links: [], events: [], - duration: [32, 800000000], resource: resourceFromAttributes({ [SEMRESATTRS_SERVICE_NAME]: 'opentelemetry', }), diff --git a/packages/opentelemetry-exporter-jaeger/test/transform.test.ts b/packages/opentelemetry-exporter-jaeger/test/transform.test.ts index f4f63efac43..724aa5d99df 100644 --- a/packages/opentelemetry-exporter-jaeger/test/transform.test.ts +++ b/packages/opentelemetry-exporter-jaeger/test/transform.test.ts @@ -14,17 +14,17 @@ * limitations under the License. */ -import * as assert from 'assert'; -import { spanToThrift } from '../src/transform'; -import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; +import * as api from '@opentelemetry/api'; +import { SpanStatusCode, TraceFlags } from '@opentelemetry/api'; +import { nanosecondsToMicroseconds } from '@opentelemetry/core'; import { emptyResource, resourceFromAttributes, } from '@opentelemetry/resources'; -import * as api from '@opentelemetry/api'; -import { ThriftUtils, Utils, ThriftReferenceType } from '../src/types'; -import { hrTimeToMicroseconds } from '@opentelemetry/core'; -import { SpanStatusCode, TraceFlags } from '@opentelemetry/api'; +import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; +import * as assert from 'assert'; +import { spanToThrift } from '../src/transform'; +import { ThriftReferenceType, ThriftUtils, Utils } from '../src/types'; describe('transform', () => { const spanContext = () => { @@ -41,8 +41,10 @@ describe('transform', () => { name: 'my-span', kind: api.SpanKind.INTERNAL, spanContext, - startTime: [1566156729, 709], - endTime: [1566156731, 709], + startTimeUnixNano: 1566156729000000709n, + endTimeUnixNano: 1566156731000000709n, + startTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTime: [0, 0], // wrong on purpose - included for compatibility. should not be used ended: true, status: { code: api.SpanStatusCode.OK, @@ -72,10 +74,10 @@ describe('transform', () => { attributes: { error: true, }, - time: [1566156729, 809], + timeUnixNano: 1566156729000000809n, + time: [0, 0], }, ], - duration: [32, 800000000], resource: resourceFromAttributes({ service: 'ui', version: 1, @@ -110,7 +112,9 @@ describe('transform', () => { assert.deepStrictEqual(thriftSpan.flags, 1); assert.deepStrictEqual( thriftSpan.startTime, - Utils.encodeInt64(hrTimeToMicroseconds(readableSpan.startTime)) + Utils.encodeInt64( + nanosecondsToMicroseconds(readableSpan.startTimeUnixNano) + ) ); assert.strictEqual(thriftSpan.tags.length, 9); const [tag1, tag2, tag3, tag4, tag5, tag6, tag7] = thriftSpan.tags; @@ -169,6 +173,8 @@ describe('transform', () => { name: 'my-span1', kind: api.SpanKind.CLIENT, spanContext, + startTimeUnixNano: 1566156729000000709n, + endTimeUnixNano: 1566156731000000709n, startTime: [1566156729, 709], endTime: [1566156731, 709], ended: true, @@ -179,7 +185,6 @@ describe('transform', () => { attributes: {}, links: [], events: [], - duration: [32, 800000000], resource: emptyResource(), instrumentationScope: { name: 'default', @@ -231,6 +236,8 @@ describe('transform', () => { name: 'my-span', kind: api.SpanKind.INTERNAL, spanContext, + startTimeUnixNano: 1566156729000000709n, + endTimeUnixNano: 1566156731000000709n, startTime: [1566156729, 709], endTime: [1566156731, 709], ended: true, @@ -253,7 +260,6 @@ describe('transform', () => { }, ], events: [], - duration: [32, 800000000], resource: emptyResource(), instrumentationScope: { name: 'default', @@ -291,6 +297,8 @@ describe('transform', () => { traceFlags: TraceFlags.NONE, }; }, + startTimeUnixNano: 1566156729000000709n, + endTimeUnixNano: 1566156731000000709n, startTime: [1566156729, 709], endTime: [1566156731, 709], ended: true, @@ -301,7 +309,6 @@ describe('transform', () => { attributes: {}, links: [], events: [], - duration: [32, 800000000], resource: emptyResource(), instrumentationScope: { name: 'default', @@ -328,6 +335,8 @@ describe('transform', () => { name: 'my-span', kind: api.SpanKind.INTERNAL, spanContext, + startTimeUnixNano: 1566156729000000709n, + endTimeUnixNano: 1566156731000000709n, startTime: [1566156729, 709], endTime: [1566156731, 709], ended: true, @@ -359,10 +368,10 @@ describe('transform', () => { attributes: { error: true, }, - time: [1566156729, 809], + timeUnixNano: 1566156729000000809n, + time: [0, 0], }, ], - duration: [32, 800000000], resource: resourceFromAttributes({ service: 'ui', version: 1, diff --git a/packages/opentelemetry-exporter-zipkin/src/transform.ts b/packages/opentelemetry-exporter-zipkin/src/transform.ts index 40543571237..bf694d5d9a7 100644 --- a/packages/opentelemetry-exporter-zipkin/src/transform.ts +++ b/packages/opentelemetry-exporter-zipkin/src/transform.ts @@ -16,8 +16,8 @@ import * as api from '@opentelemetry/api'; import { ReadableSpan, TimedEvent } from '@opentelemetry/sdk-trace-base'; -import { hrTimeToMicroseconds } from '@opentelemetry/core'; import * as zipkinTypes from './types'; +import { nanosecondsToMicroseconds } from '@opentelemetry/core'; const ZIPKIN_SPAN_KIND_MAPPING = { [api.SpanKind.CLIENT]: zipkinTypes.SpanKind.CLIENT, @@ -47,15 +47,16 @@ export function toZipkinSpan( name: span.name, id: span.spanContext().spanId, kind: ZIPKIN_SPAN_KIND_MAPPING[span.kind], - timestamp: hrTimeToMicroseconds(span.startTime), - duration: Math.round(hrTimeToMicroseconds(span.duration)), + timestamp: nanosecondsToMicroseconds(span.startTimeUnixNano), + duration: nanosecondsToMicroseconds( + span.endTimeUnixNano - span.startTimeUnixNano + ), localEndpoint: { serviceName }, tags: _toZipkinTags(span, statusCodeTagName, statusErrorTagName), annotations: span.events.length ? _toZipkinAnnotations(span.events) : undefined, }; - return zipkinSpan; } @@ -111,7 +112,7 @@ export function _toZipkinAnnotations( events: TimedEvent[] ): zipkinTypes.Annotation[] { return events.map(event => ({ - timestamp: Math.round(hrTimeToMicroseconds(event.time)), + timestamp: nanosecondsToMicroseconds(event.timeUnixNano), value: event.name, })); } diff --git a/packages/opentelemetry-exporter-zipkin/test/common/transform.test.ts b/packages/opentelemetry-exporter-zipkin/test/common/transform.test.ts index 2fd0e8c493e..62bf5326f45 100644 --- a/packages/opentelemetry-exporter-zipkin/test/common/transform.test.ts +++ b/packages/opentelemetry-exporter-zipkin/test/common/transform.test.ts @@ -16,10 +16,8 @@ import * as api from '@opentelemetry/api'; import { - hrTime, - hrTimeDuration, - hrTimeToMicroseconds, - millisToHrTime, + millisecondsToNanoseconds, + nanosecondsToMicroseconds, } from '@opentelemetry/core'; import { Resource } from '@opentelemetry/resources'; import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; @@ -58,17 +56,15 @@ const spanContext: api.SpanContext = { }; const currentTime = Date.now(); const durationMs = 10; -const startTime = hrTime(currentTime - durationMs); -const endTime = hrTime(currentTime); -const duration = millisToHrTime(durationMs); +const startTime = millisecondsToNanoseconds(currentTime - durationMs); +const endTime = millisecondsToNanoseconds(currentTime); function getSpan(options: Partial): ReadableSpan { const span = { name: options.name || 'my-span', kind: typeof options.kind === 'number' ? options.kind : api.SpanKind.SERVER, - startTime: options.startTime || startTime, - endTime: options.endTime || endTime, - duration: options.duration || duration, + startTimeUnixNano: options.startTimeUnixNano || startTime, + endTimeUnixNano: options.endTimeUnixNano || endTime, spanContext: () => spanContext, parentSpanContext: options.parentSpanContext || parentSpanContext, attributes: options.attributes || {}, @@ -96,7 +92,8 @@ describe('transform', () => { events: [ { name: 'my-event', - time: hrTime(Date.now() + 5), + timeUnixNano: millisecondsToNanoseconds(Date.now() + 5), + time: [0, 0], attributes: { key3: 'value 3' }, }, ], @@ -113,11 +110,11 @@ describe('transform', () => { annotations: [ { value: 'my-event', - timestamp: Math.round(hrTimeToMicroseconds(span.events[0].time)), + timestamp: nanosecondsToMicroseconds(span.events[0].timeUnixNano), }, ], - duration: Math.round( - hrTimeToMicroseconds(hrTimeDuration(span.startTime, span.endTime)) + duration: nanosecondsToMicroseconds( + span.endTimeUnixNano - span.startTimeUnixNano ), id: span.spanContext().spanId, localEndpoint: { @@ -136,7 +133,7 @@ describe('transform', () => { 'telemetry.sdk.name': 'opentelemetry', 'telemetry.sdk.version': VERSION, }, - timestamp: hrTimeToMicroseconds(span.startTime), + timestamp: nanosecondsToMicroseconds(span.startTimeUnixNano), traceId: span.spanContext().traceId, }); it("should skip parentSpanId if doesn't exist", () => { @@ -153,8 +150,8 @@ describe('transform', () => { assert.deepStrictEqual(zipkinSpan, { kind: 'SERVER', annotations: undefined, - duration: Math.round( - hrTimeToMicroseconds(hrTimeDuration(span.startTime, span.endTime)) + duration: nanosecondsToMicroseconds( + span.endTimeUnixNano - span.startTimeUnixNano ), id: span.spanContext().spanId, localEndpoint: { @@ -171,7 +168,7 @@ describe('transform', () => { 'telemetry.sdk.name': 'opentelemetry', 'telemetry.sdk.version': VERSION, }, - timestamp: hrTimeToMicroseconds(span.startTime), + timestamp: Number(span.startTimeUnixNano / 1_000n), traceId: span.spanContext().traceId, }); }); @@ -200,7 +197,7 @@ describe('transform', () => { kind: item.zipkin, annotations: undefined, duration: Math.round( - hrTimeToMicroseconds(hrTimeDuration(span.startTime, span.endTime)) + Number((span.endTimeUnixNano - span.startTimeUnixNano) / 1000n) ), id: span.spanContext().spanId, localEndpoint: { @@ -217,7 +214,7 @@ describe('transform', () => { 'telemetry.sdk.name': 'opentelemetry', 'telemetry.sdk.version': VERSION, }, - timestamp: hrTimeToMicroseconds(span.startTime), + timestamp: nanosecondsToMicroseconds(span.startTimeUnixNano), traceId: span.spanContext().traceId, }); }) @@ -335,12 +332,18 @@ describe('transform', () => { describe('_toZipkinAnnotations', () => { it('should convert OpenTelemetry events to Zipkin annotations', () => { + const now = Date.now(); const span = getSpan({ events: [ - { name: 'my-event1', time: hrTime(Date.now()) }, + { + name: 'my-event1', + timeUnixNano: BigInt(now) * 1_000_000n, + time: [0, 0], + }, { name: 'my-event2', - time: hrTime(Date.now()), + timeUnixNano: BigInt(now + 10) * 1_000_000n, + time: [0, 0], attributes: { key1: 'value1' }, }, ], @@ -350,11 +353,11 @@ describe('transform', () => { assert.deepStrictEqual(annotations, [ { value: 'my-event1', - timestamp: Math.round(hrTimeToMicroseconds(span.events[0].time)), + timestamp: Math.round(Number(span.events[0].timeUnixNano / 1000n)), }, { value: 'my-event2', - timestamp: Math.round(hrTimeToMicroseconds(span.events[1].time)), + timestamp: Math.round(Number(span.events[1].timeUnixNano / 1000n)), }, ]); }); diff --git a/packages/opentelemetry-exporter-zipkin/test/helper.ts b/packages/opentelemetry-exporter-zipkin/test/helper.ts index b3b8ed6191a..f469c892795 100644 --- a/packages/opentelemetry-exporter-zipkin/test/helper.ts +++ b/packages/opentelemetry-exporter-zipkin/test/helper.ts @@ -35,14 +35,15 @@ export const mockedReadableSpan: ReadableSpan = { traceId: '1f1008dc8e270e85c40a0d7c3939b278', traceFlags: TraceFlags.SAMPLED, }, - startTime: [1574120165, 429803070], - endTime: [1574120165, 438688070], + startTimeUnixNano: 1574120165429803070n, + endTimeUnixNano: 1574120165438688070n, + startTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTime: [0, 0], // wrong on purpose - included for compatibility. should not be used ended: true, status: { code: SpanStatusCode.OK }, attributes: { component: 'foo' }, links: [], events: [], - duration: [0, 8885000], resource: resourceFromAttributes({ service: 'ui', version: 1, diff --git a/packages/opentelemetry-exporter-zipkin/test/node/zipkin.test.ts b/packages/opentelemetry-exporter-zipkin/test/node/zipkin.test.ts index c0cfa334de0..9638b0a9ef3 100644 --- a/packages/opentelemetry-exporter-zipkin/test/node/zipkin.test.ts +++ b/packages/opentelemetry-exporter-zipkin/test/node/zipkin.test.ts @@ -19,8 +19,8 @@ import * as nock from 'nock'; import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; import { ExportResult, - hrTimeToMicroseconds, ExportResultCode, + millisecondsToNanoseconds, } from '@opentelemetry/core'; import * as api from '@opentelemetry/api'; import { @@ -32,8 +32,6 @@ import * as zipkinTypes from '../../src/types'; import { TraceFlags } from '@opentelemetry/api'; import { SEMRESATTRS_SERVICE_NAME } from '@opentelemetry/semantic-conventions'; -const MICROS_PER_SECS = 1e6; - function getReadableSpan() { const startTime = 1566156729709; const duration = 2000; @@ -47,10 +45,13 @@ function getReadableSpan() { traceFlags: TraceFlags.NONE, }; }, - startTime: [startTime, 0], - endTime: [startTime + duration, 0], + startTimeUnixNano: millisecondsToNanoseconds(startTime), + startTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTimeUnixNano: + millisecondsToNanoseconds(startTime) + + millisecondsToNanoseconds(duration), ended: true, - duration: [duration, 0], status: { code: api.SpanStatusCode.OK, }, @@ -153,10 +154,14 @@ describe('Zipkin Exporter - node', () => { traceFlags: TraceFlags.NONE, }; }, - startTime: [startTime, 0], - endTime: [startTime + duration, 0], + startTimeUnixNano: millisecondsToNanoseconds(startTime), + startTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTimeUnixNano: + millisecondsToNanoseconds(startTime) + + millisecondsToNanoseconds(duration), + ended: true, - duration: [duration, 0], status: { code: api.SpanStatusCode.OK, }, @@ -168,7 +173,10 @@ describe('Zipkin Exporter - node', () => { events: [ { name: 'my-event', - time: [startTime + 10, 0], + timeUnixNano: + millisecondsToNanoseconds(startTime) + + millisecondsToNanoseconds(10), + time: [0, 0], attributes: { key3: 'value3' }, }, ], @@ -188,10 +196,14 @@ describe('Zipkin Exporter - node', () => { traceFlags: TraceFlags.NONE, }; }, - startTime: [startTime, 0], - endTime: [startTime + duration, 0], + startTimeUnixNano: millisecondsToNanoseconds(startTime), + startTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTimeUnixNano: + millisecondsToNanoseconds(startTime) + + millisecondsToNanoseconds(duration), + ended: true, - duration: [duration, 0], status: { code: api.SpanStatusCode.OK, }, @@ -218,10 +230,10 @@ describe('Zipkin Exporter - node', () => { annotations: [ { value: 'my-event', - timestamp: (startTime + 10) * MICROS_PER_SECS, + timestamp: (startTime + 10) * 1000, }, ], - duration: duration * MICROS_PER_SECS, + duration: duration * 1000, id: span1.spanContext().spanId, localEndpoint: { serviceName: 'my-service', @@ -233,12 +245,12 @@ describe('Zipkin Exporter - node', () => { key2: 'value2', 'otel.status_code': 'OK', }, - timestamp: startTime * MICROS_PER_SECS, + timestamp: startTime * 1000, traceId: span1.spanContext().traceId, }, // Span 2 { - duration: duration * MICROS_PER_SECS, + duration: duration * 1000, id: span2.spanContext().spanId, kind: 'SERVER', localEndpoint: { @@ -248,7 +260,7 @@ describe('Zipkin Exporter - node', () => { tags: { 'otel.status_code': 'OK', }, - timestamp: hrTimeToMicroseconds([startTime, 0]), + timestamp: startTime * 1000, traceId: span2.spanContext().traceId, }, ]); @@ -376,10 +388,14 @@ describe('Zipkin Exporter - node', () => { spanId: '6e0c63257de34c92', traceFlags: TraceFlags.NONE, }), - startTime: [startTime, 0], - endTime: [startTime + duration, 0], + startTimeUnixNano: millisecondsToNanoseconds(startTime), + startTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTimeUnixNano: + millisecondsToNanoseconds(startTime) + + millisecondsToNanoseconds(duration), + ended: true, - duration: [duration, 0], status: { code: api.SpanStatusCode.OK, }, @@ -391,7 +407,8 @@ describe('Zipkin Exporter - node', () => { events: [ { name: 'my-event', - time: [startTime + 10, 0], + timeUnixNano: millisecondsToNanoseconds(startTime) + 10_000_000_000n, + time: [0, 0], attributes: { key3: 'value3' }, }, ], @@ -411,10 +428,14 @@ describe('Zipkin Exporter - node', () => { spanId: '6e0c63257de34c92', traceFlags: TraceFlags.NONE, }), - startTime: [startTime, 0], - endTime: [startTime + duration, 0], + startTimeUnixNano: millisecondsToNanoseconds(startTime), + startTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTimeUnixNano: + millisecondsToNanoseconds(startTime) + + millisecondsToNanoseconds(duration), + ended: true, - duration: [duration, 0], status: { code: api.SpanStatusCode.OK, }, @@ -475,10 +496,14 @@ describe('Zipkin Exporter - node', () => { spanId: '6e0c63257de34c92', traceFlags: TraceFlags.NONE, }), - startTime: [startTime, 0], - endTime: [startTime + duration, 0], + startTimeUnixNano: millisecondsToNanoseconds(startTime), + startTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTimeUnixNano: + millisecondsToNanoseconds(startTime) + + millisecondsToNanoseconds(duration), + ended: true, - duration: [duration, 0], status: { code: api.SpanStatusCode.OK, }, @@ -491,7 +516,8 @@ describe('Zipkin Exporter - node', () => { events: [ { name: 'my-event', - time: [startTime + 10, 0], + timeUnixNano: millisecondsToNanoseconds(startTime) + 10_000_000_000n, + time: [0, 0], attributes: { key3: 'value3' }, }, ], @@ -509,10 +535,14 @@ describe('Zipkin Exporter - node', () => { spanId: '6e0c63257de34c92', traceFlags: TraceFlags.NONE, }), - startTime: [startTime, 0], - endTime: [startTime + duration, 0], + startTimeUnixNano: millisecondsToNanoseconds(startTime), + startTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTime: [0, 0], // wrong on purpose - included for compatibility. should not be used + endTimeUnixNano: + millisecondsToNanoseconds(startTime) + + millisecondsToNanoseconds(duration), + ended: true, - duration: [duration, 0], status: { code: api.SpanStatusCode.OK, }, diff --git a/packages/opentelemetry-sdk-trace-base/src/Span.ts b/packages/opentelemetry-sdk-trace-base/src/Span.ts index a6d42085589..8298dafeb11 100644 --- a/packages/opentelemetry-sdk-trace-base/src/Span.ts +++ b/packages/opentelemetry-sdk-trace-base/src/Span.ts @@ -15,14 +15,14 @@ */ import { + Span as APISpan, + Attributes, + AttributeValue, Context, diag, Exception, HrTime, Link, - Span as APISpan, - Attributes, - AttributeValue, SpanContext, SpanKind, SpanStatus, @@ -30,15 +30,13 @@ import { TimeInput, } from '@opentelemetry/api'; import { - addHrTimes, - millisToHrTime, - getTimeOrigin, - hrTime, - hrTimeDuration, + hrTimeToNanoseconds, InstrumentationScope, isAttributeValue, isTimeInput, isTimeInputHrTime, + millisecondsToNanoseconds, + nanosToHrTime, otperformance, sanitizeAttributes, } from '@opentelemetry/core'; @@ -48,8 +46,8 @@ import { SEMATTRS_EXCEPTION_STACKTRACE, SEMATTRS_EXCEPTION_TYPE, } from '@opentelemetry/semantic-conventions'; -import { ReadableSpan } from './export/ReadableSpan'; import { ExceptionEventName } from './enums'; +import { ReadableSpan } from './export/ReadableSpan'; import { SpanProcessor } from './SpanProcessor'; import { TimedEvent } from './TimedEvent'; import { SpanLimits } from './types'; @@ -87,7 +85,6 @@ export class SpanImpl implements Span { readonly attributes: Attributes = {}; readonly links: Link[] = []; readonly events: TimedEvent[] = []; - readonly startTime: HrTime; readonly resource: Resource; readonly instrumentationScope: InstrumentationScope; @@ -99,15 +96,16 @@ export class SpanImpl implements Span { status: SpanStatus = { code: SpanStatusCode.UNSET, }; - endTime: HrTime = [0, 0]; private _ended = false; - private _duration: HrTime = [-1, -1]; + private _startTime: bigint; + private _endTime: bigint = -1n; + private readonly _spanProcessor: SpanProcessor; private readonly _spanLimits: SpanLimits; private readonly _attributeValueLengthLimit: number; private readonly _performanceStartTime: number; - private readonly _performanceOffset: number; + private readonly _performanceOffsetNanos: bigint; private readonly _startTimeProvided: boolean; /** @@ -118,8 +116,9 @@ export class SpanImpl implements Span { this._spanContext = opts.spanContext; this._performanceStartTime = otperformance.now(); - this._performanceOffset = - now - (this._performanceStartTime + getTimeOrigin()); + this._performanceOffsetNanos = millisecondsToNanoseconds( + now - this._performanceStartTime + ); this._startTimeProvided = opts.startTime != null; this._spanLimits = opts.spanLimits; this._attributeValueLengthLimit = @@ -130,7 +129,7 @@ export class SpanImpl implements Span { this.parentSpanContext = opts.parentSpanContext; this.kind = opts.kind; this.links = opts.links || []; - this.startTime = this._getTime(opts.startTime ?? now); + this._startTime = this._getTime(opts.startTime ?? now); this.resource = opts.resource; this.instrumentationScope = opts.scope; @@ -141,6 +140,22 @@ export class SpanImpl implements Span { this._spanProcessor.onStart(this, opts.context); } + get startTimeUnixNano(): bigint { + return this._startTime; + } + + get startTime(): HrTime { + return nanosToHrTime(this._startTime); + } + + get endTimeUnixNano(): bigint { + return this._endTime; + } + + get endTime(): HrTime { + return nanosToHrTime(this._endTime); + } + spanContext(): SpanContext { return this._spanContext; } @@ -219,11 +234,13 @@ export class SpanImpl implements Span { } const attributes = sanitizeAttributes(attributesOrStartTime); + const timeUnixNano = this._getTime(timeStamp); this.events.push({ name, attributes, - time: this._getTime(timeStamp), + timeUnixNano, + time: nanosToHrTime(timeUnixNano), droppedAttributesCount: 0, }); return this; @@ -272,17 +289,15 @@ export class SpanImpl implements Span { } this._ended = true; - this.endTime = this._getTime(endTime); - this._duration = hrTimeDuration(this.startTime, this.endTime); + this._endTime = this._getTime(endTime); - if (this._duration[0] < 0) { + if (this._endTime < this._startTime) { diag.warn( 'Inconsistent start and end time, startTime > endTime. Setting span duration to 0ms.', - this.startTime, - this.endTime + this._startTime, + this._endTime ); - this.endTime = this.startTime.slice() as HrTime; - this._duration = [0, 0]; + this._endTime = this._startTime; } if (this._droppedEventsCount > 0) { @@ -294,33 +309,44 @@ export class SpanImpl implements Span { this._spanProcessor.onEnd(this); } - private _getTime(inp?: TimeInput): HrTime { + /** + * + * @param inp time input from the user + * @returns timestamp in nanoseconds from the epoch + */ + private _getTime(inp?: TimeInput): bigint { if (typeof inp === 'number' && inp <= otperformance.now()) { // must be a performance timestamp // apply correction and convert to hrtime - return hrTime(inp + this._performanceOffset); + return millisecondsToNanoseconds(inp) + this._performanceOffsetNanos; } if (typeof inp === 'number') { - return millisToHrTime(inp); + return millisecondsToNanoseconds(inp); + } + + if (typeof inp === 'bigint') { + return inp; } if (inp instanceof Date) { - return millisToHrTime(inp.getTime()); + return millisecondsToNanoseconds(inp.getTime()); } if (isTimeInputHrTime(inp)) { - return inp; + return hrTimeToNanoseconds(inp); } if (this._startTimeProvided) { // if user provided a time for the start manually // we can't use duration to calculate event/end times - return millisToHrTime(Date.now()); + return millisecondsToNanoseconds(Date.now()); } - const msDuration = otperformance.now() - this._performanceStartTime; - return addHrTimes(this.startTime, millisToHrTime(msDuration)); + const nanoDuration = millisecondsToNanoseconds( + otperformance.now() - this._performanceStartTime + ); + return this._startTime + nanoDuration; } isRecording(): boolean { @@ -356,10 +382,6 @@ export class SpanImpl implements Span { } } - get duration(): HrTime { - return this._duration; - } - get ended(): boolean { return this._ended; } diff --git a/packages/opentelemetry-sdk-trace-base/src/TimedEvent.ts b/packages/opentelemetry-sdk-trace-base/src/TimedEvent.ts index 068caa432d3..5d042873212 100644 --- a/packages/opentelemetry-sdk-trace-base/src/TimedEvent.ts +++ b/packages/opentelemetry-sdk-trace-base/src/TimedEvent.ts @@ -14,13 +14,16 @@ * limitations under the License. */ -import { HrTime, Attributes } from '@opentelemetry/api'; +import { Attributes, HrTime } from '@opentelemetry/api'; /** * Represents a timed event. * A timed event is an event with a timestamp. */ export interface TimedEvent { + /** the timestamp of the event */ + timeUnixNano: bigint; + /** @deprecated please use timeUnixNano */ time: HrTime; /** The name of the event. */ name: string; diff --git a/packages/opentelemetry-sdk-trace-base/src/export/ConsoleSpanExporter.ts b/packages/opentelemetry-sdk-trace-base/src/export/ConsoleSpanExporter.ts index 148a2a81281..05901cd5ef6 100644 --- a/packages/opentelemetry-sdk-trace-base/src/export/ConsoleSpanExporter.ts +++ b/packages/opentelemetry-sdk-trace-base/src/export/ConsoleSpanExporter.ts @@ -14,13 +14,9 @@ * limitations under the License. */ -import { SpanExporter } from './SpanExporter'; +import { ExportResult, ExportResultCode } from '@opentelemetry/core'; import { ReadableSpan } from './ReadableSpan'; -import { - ExportResult, - ExportResultCode, - hrTimeToMicroseconds, -} from '@opentelemetry/core'; +import { SpanExporter } from './SpanExporter'; /** * This is implementation of {@link SpanExporter} that prints spans to the @@ -74,8 +70,8 @@ export class ConsoleSpanExporter implements SpanExporter { name: span.name, id: span.spanContext().spanId, kind: span.kind, - timestamp: hrTimeToMicroseconds(span.startTime), - duration: hrTimeToMicroseconds(span.duration), + startTimeUnixNano: span.startTimeUnixNano, + endTimeUnixNano: span.startTimeUnixNano, attributes: span.attributes, status: span.status, events: span.events, diff --git a/packages/opentelemetry-sdk-trace-base/src/export/ReadableSpan.ts b/packages/opentelemetry-sdk-trace-base/src/export/ReadableSpan.ts index 06eb5f5d8af..0a6176abe4d 100644 --- a/packages/opentelemetry-sdk-trace-base/src/export/ReadableSpan.ts +++ b/packages/opentelemetry-sdk-trace-base/src/export/ReadableSpan.ts @@ -15,15 +15,15 @@ */ import { - SpanKind, - SpanStatus, Attributes, HrTime, Link, SpanContext, + SpanKind, + SpanStatus, } from '@opentelemetry/api'; -import { Resource } from '@opentelemetry/resources'; import { InstrumentationScope } from '@opentelemetry/core'; +import { Resource } from '@opentelemetry/resources'; import { TimedEvent } from '../TimedEvent'; export interface ReadableSpan { @@ -31,13 +31,14 @@ export interface ReadableSpan { readonly kind: SpanKind; readonly spanContext: () => SpanContext; readonly parentSpanContext?: SpanContext; + readonly startTimeUnixNano: bigint; + readonly endTimeUnixNano: bigint; readonly startTime: HrTime; readonly endTime: HrTime; readonly status: SpanStatus; readonly attributes: Attributes; readonly links: Link[]; readonly events: TimedEvent[]; - readonly duration: HrTime; readonly ended: boolean; readonly resource: Resource; readonly instrumentationScope: InstrumentationScope; diff --git a/packages/opentelemetry-sdk-trace-base/test/common/Span.test.ts b/packages/opentelemetry-sdk-trace-base/test/common/Span.test.ts index f2f498ca616..3fb624ab4af 100644 --- a/packages/opentelemetry-sdk-trace-base/test/common/Span.test.ts +++ b/packages/opentelemetry-sdk-trace-base/test/common/Span.test.ts @@ -15,21 +15,18 @@ */ import { + Attributes, + AttributeValue, diag, - SpanStatusCode, Exception, ROOT_CONTEXT, SpanContext, SpanKind, + SpanStatusCode, TraceFlags, - HrTime, - Attributes, - AttributeValue, } from '@opentelemetry/api'; import { - hrTimeDuration, - hrTimeToMilliseconds, - hrTimeToNanoseconds, + nanosecondsToMilliseconds, otperformance as performance, } from '@opentelemetry/core'; import { @@ -41,20 +38,20 @@ import * as assert from 'assert'; import * as sinon from 'sinon'; import { BasicTracerProvider, Span, SpanProcessor } from '../../src'; import { SpanImpl } from '../../src/Span'; -import { invalidAttributes, validAttributes } from './util'; import { Tracer } from '../../src/Tracer'; import { DEFAULT_ATTRIBUTE_COUNT_LIMIT, DEFAULT_ATTRIBUTE_VALUE_LENGTH_LIMIT, } from '../../src/utility'; +import { invalidAttributes, validAttributes } from './util'; -const performanceTimeOrigin: HrTime = [1, 1]; +const performanceTimeOrigin = 1_000_000_001n; describe('Span', () => { beforeEach(() => { sinon .stub(performance, 'timeOrigin') - .value(hrTimeToMilliseconds(performanceTimeOrigin)); + .value(nanosecondsToMilliseconds(performanceTimeOrigin)); }); afterEach(() => { sinon.restore(); @@ -105,10 +102,7 @@ describe('Span', () => { spanLimits: tracer.getSpanLimits(), spanProcessor: tracer['_spanProcessor'], }); - assert.ok( - hrTimeToMilliseconds(span.startTime) > - hrTimeToMilliseconds(performanceTimeOrigin) - ); + assert.ok(span.startTimeUnixNano > performanceTimeOrigin); }); it('should have valid endTime', () => { @@ -123,19 +117,19 @@ describe('Span', () => { spanProcessor: tracer['_spanProcessor'], }); span.end(); + assert.ok(span.endTimeUnixNano != null); assert.ok( - hrTimeToNanoseconds(span.endTime) >= hrTimeToNanoseconds(span.startTime), + span.endTimeUnixNano >= span.startTimeUnixNano, 'end time must be bigger or equal start time' ); assert.ok( - hrTimeToMilliseconds(span.endTime) > - hrTimeToMilliseconds(performanceTimeOrigin), + span.endTimeUnixNano > performanceTimeOrigin, 'end time must be bigger than time origin' ); }); - it('should have a duration', () => { + it('should have a duration', async () => { const span = new SpanImpl({ scope: tracer.instrumentationScope, resource: tracer['_resource'], @@ -146,8 +140,11 @@ describe('Span', () => { spanLimits: tracer.getSpanLimits(), spanProcessor: tracer['_spanProcessor'], }); + // browsers may return the same timestamp twice if performance timer is called in quick succession to prevent timing attacks + await new Promise(resolve => setTimeout(resolve, 10)); span.end(); - assert.ok(hrTimeToNanoseconds(span.duration) >= 0); + assert.ok(span.endTimeUnixNano != null); + assert.ok(span.endTimeUnixNano > span.startTimeUnixNano); }); it('should ensure duration is never negative even if provided with inconsistent times', () => { @@ -162,12 +159,12 @@ describe('Span', () => { spanProcessor: tracer['_spanProcessor'], }); // @ts-expect-error writing readonly property. performance time origin is mocked to return ms value of [1,1] - span['_performanceOffset'] = 0; - span.end(hrTimeToMilliseconds(span.startTime) - 1); - assert.ok(hrTimeToNanoseconds(span.duration) >= 0); + span['_performanceOffsetNanos'] = 0n; + span.end(nanosecondsToMilliseconds(span.startTimeUnixNano) - 1); + assert.ok(span.endTimeUnixNano! === span.startTimeUnixNano); }); - it('should have valid event.time', () => { + it('should have valid event.timeUnixNano', () => { const span = new SpanImpl({ scope: tracer.instrumentationScope, resource: tracer['_resource'], @@ -179,10 +176,7 @@ describe('Span', () => { spanProcessor: tracer['_spanProcessor'], }); span.addEvent('my-event'); - assert.ok( - hrTimeToMilliseconds(span.events[0].time) > - hrTimeToMilliseconds(performanceTimeOrigin) - ); + assert.ok(span.events[0].timeUnixNano > performanceTimeOrigin); }); it('should have an entered time for event', () => { @@ -198,14 +192,14 @@ describe('Span', () => { spanLimits: tracer.getSpanLimits(), spanProcessor: tracer['_spanProcessor'], }); - const eventTimeMS = 123; - const spanStartTime = hrTimeToMilliseconds(span.startTime); - const eventTime = spanStartTime + eventTimeMS; + const eventTime = nanosecondsToMilliseconds(span.startTimeUnixNano) + 123; span.addEvent('my-event', undefined, eventTime); - const diff = hrTimeDuration(span.startTime, span.events[0].time); - assert.strictEqual(hrTimeToMilliseconds(diff), 123); + assert.strictEqual( + span.events[0].timeUnixNano - span.startTimeUnixNano, + 123_000_000n + ); }); describe('when 2nd param is "TimeInput" type', () => { @@ -222,14 +216,14 @@ describe('Span', () => { spanLimits: tracer.getSpanLimits(), spanProcessor: tracer['_spanProcessor'], }); - const eventTimeMS = 123; - const spanStartTime = hrTimeToMilliseconds(span.startTime); - const eventTime = spanStartTime + eventTimeMS; + const eventTime = nanosecondsToMilliseconds(span.startTimeUnixNano) + 123; span.addEvent('my-event', eventTime); - const diff = hrTimeDuration(span.startTime, span.events[0].time); - assert.strictEqual(hrTimeToMilliseconds(diff), 123); + assert.strictEqual( + span.events[0].timeUnixNano - span.startTimeUnixNano, + 123_000_000n + ); }); }); @@ -1168,21 +1162,21 @@ describe('Span', () => { const [event] = span.events; assert.deepStrictEqual(event.name, 'sent'); assert.deepStrictEqual(event.attributes, {}); - assert.ok(event.time[0] > 0); + assert.ok(event.timeUnixNano >= span.startTimeUnixNano); span.addEvent('rev', { attr1: 'value', attr2: 123, attr3: true }); assert.strictEqual(span.events.length, 2); const [event1, event2] = span.events; assert.deepStrictEqual(event1.name, 'sent'); assert.deepStrictEqual(event1.attributes, {}); - assert.ok(event1.time[0] > 0); + assert.ok(event1.timeUnixNano >= span.startTimeUnixNano); assert.deepStrictEqual(event2.name, 'rev'); assert.deepStrictEqual(event2.attributes, { attr1: 'value', attr2: 123, attr3: true, }); - assert.ok(event2.time[0] > 0); + assert.ok(event2.timeUnixNano >= span.startTimeUnixNano); span.end(); // shouldn't add new event @@ -1230,8 +1224,9 @@ describe('Span', () => { }); const endTime = Date.now(); span.end(endTime); + const spanEndTime = span.endTimeUnixNano; span.end(endTime + 10); - assert.deepStrictEqual(span.endTime[0], Math.trunc(endTime / 1000)); + assert.strictEqual(span.endTimeUnixNano, spanEndTime); }); it('should update name', () => { @@ -1402,7 +1397,7 @@ describe('Span', () => { assert.deepStrictEqual(event.attributes, { 'exception.message': 'boom', }); - assert.ok(event.time[0] > 0); + assert.ok(event.timeUnixNano >= span.startTimeUnixNano); }); }); @@ -1434,7 +1429,7 @@ describe('Span', () => { span.recordException(error); const event = span.events[0]; - assert.ok(event.time[0] > 0); + assert.ok(event.timeUnixNano >= span.startTimeUnixNano); assert.strictEqual(event.name, 'exception'); assert.ok(event.attributes); @@ -1464,11 +1459,11 @@ describe('Span', () => { spanProcessor: tracer['_spanProcessor'], }); // @ts-expect-error writing readonly property. performance time origin is mocked to return ms value of [1,1] - span['_performanceOffset'] = 0; + span['_performanceOffsetNanos'] = 0n; assert.strictEqual(span.events.length, 0); span.recordException('boom', [0, 123]); const event = span.events[0]; - assert.deepStrictEqual(event.time, [0, 123]); + assert.deepStrictEqual(event.timeUnixNano, 123n); }); }); diff --git a/packages/opentelemetry-sdk-trace-base/test/common/export/ConsoleSpanExporter.test.ts b/packages/opentelemetry-sdk-trace-base/test/common/export/ConsoleSpanExporter.test.ts index d89edd8d615..aa1fa8489e8 100644 --- a/packages/opentelemetry-sdk-trace-base/test/common/export/ConsoleSpanExporter.test.ts +++ b/packages/opentelemetry-sdk-trace-base/test/common/export/ConsoleSpanExporter.test.ts @@ -79,7 +79,7 @@ describe('ConsoleSpanExporter', () => { const expectedKeys = [ 'attributes', - 'duration', + 'endTimeUnixNano', 'events', 'id', 'instrumentationScope', @@ -88,21 +88,23 @@ describe('ConsoleSpanExporter', () => { 'name', 'parentSpanContext', 'resource', + 'startTimeUnixNano', 'status', - 'timestamp', 'traceId', 'traceState', ].join(','); - assert.ok(firstSpan.name === 'foo'); - assert.ok(firstEvent.name === 'foobar'); - assert.ok(consoleSpan.id === firstSpan.spanContext().spanId); - assert.ok(keys === expectedKeys, 'expectedKeys'); - assert.ok( - firstSpan.instrumentationScope.name === instrumentationScopeName + assert.strictEqual(firstSpan.name, 'foo'); + assert.strictEqual(firstEvent.name, 'foobar'); + assert.strictEqual(consoleSpan.id, firstSpan.spanContext().spanId); + assert.strictEqual(keys, expectedKeys); + assert.strictEqual( + firstSpan.instrumentationScope.name, + instrumentationScopeName ); - assert.ok( - firstSpan.instrumentationScope.version === instrumentationScopeVersion + assert.strictEqual( + firstSpan.instrumentationScope.version, + instrumentationScopeVersion ); assert.ok(spyExport.calledOnce); diff --git a/packages/opentelemetry-sdk-trace-web/test/utils.test.ts b/packages/opentelemetry-sdk-trace-web/test/utils.test.ts index 46e9ed33c26..e7a9eb3d329 100644 --- a/packages/opentelemetry-sdk-trace-web/test/utils.test.ts +++ b/packages/opentelemetry-sdk-trace-web/test/utils.test.ts @@ -53,7 +53,7 @@ function createResource( startTime: HrTime, addToStart: number ): PerformanceResourceTiming { - const fetchStart = core.hrTimeToNanoseconds(startTime) + 1; + const fetchStart = Number(core.hrTimeToNanoseconds(startTime)) + 1; const responseEnd = fetchStart + addToStart; const million = 1000 * 1000; // used to convert nano to milli const defaultResource = { @@ -366,7 +366,9 @@ describe('utils', () => { beforeEach(() => { const time = createHrTime(startTime, 500); sinon.stub(performance, 'timeOrigin').value(0); - sinon.stub(performance, 'now').callsFake(() => hrTimeToNanoseconds(time)); + sinon + .stub(performance, 'now') + .callsFake(() => Number(hrTimeToNanoseconds(time))); }); describe('when resources are empty', () => { diff --git a/packages/opentelemetry-shim-opentracing/test/Shim.test.ts b/packages/opentelemetry-shim-opentracing/test/Shim.test.ts index 0823cb43180..e7a8db41983 100644 --- a/packages/opentelemetry-shim-opentracing/test/Shim.test.ts +++ b/packages/opentelemetry-shim-opentracing/test/Shim.test.ts @@ -14,16 +14,6 @@ * limitations under the License. */ -import * as assert from 'assert'; -import * as opentracing from 'opentracing'; -import { BasicTracerProvider, Span } from '@opentelemetry/sdk-trace-base'; -import { SpanContextShim, SpanShim, TracerShim } from '../src/shim'; -import { - CompositePropagator, - W3CBaggagePropagator, - W3CTraceContextPropagator, - hrTimeToMilliseconds, -} from '@opentelemetry/core'; import { defaultTextMapGetter, defaultTextMapSetter, @@ -33,14 +23,24 @@ import { SpanStatusCode, trace, } from '@opentelemetry/api'; -import { performance } from 'perf_hooks'; +import { + CompositePropagator, + millisecondsToNanoseconds, + W3CBaggagePropagator, + W3CTraceContextPropagator, +} from '@opentelemetry/core'; import { B3Propagator } from '@opentelemetry/propagator-b3'; import { JaegerPropagator } from '@opentelemetry/propagator-jaeger'; +import { BasicTracerProvider, Span } from '@opentelemetry/sdk-trace-base'; import { SEMATTRS_EXCEPTION_MESSAGE, SEMATTRS_EXCEPTION_STACKTRACE, SEMATTRS_EXCEPTION_TYPE, } from '@opentelemetry/semantic-conventions'; +import * as assert from 'assert'; +import * as opentracing from 'opentracing'; +import { performance } from 'perf_hooks'; +import { SpanContextShim, SpanShim, TracerShim } from '../src/shim'; describe('OpenTracing Shim', () => { const compositePropagator = new CompositePropagator({ @@ -260,12 +260,11 @@ describe('OpenTracing Shim', () => { const otSpan = (span as SpanShim).getSpan() as Span; - const adjustment = (otSpan as any)['_performanceOffset']; - assert.strictEqual(otSpan.links.length, 1); assert.deepStrictEqual( - hrTimeToMilliseconds(otSpan.startTime), - now + adjustment + performance.timeOrigin + otSpan.startTimeUnixNano, + millisecondsToNanoseconds(now) + + (otSpan as any)['_performanceOffsetNanos'] ); assert.deepStrictEqual(otSpan.attributes, opentracingOptions.tags); }); @@ -413,15 +412,15 @@ describe('OpenTracing Shim', () => { }); describe('key-value pairs', () => { - const tomorrow = new Date().setDate(new Date().getDate() + 1); + const tomorrow = Date.now() + 86_400_000; it('names event after event attribute', () => { const kvLogs = { event: 'fun-time', user: 'meow', value: 123 }; span.log(kvLogs, tomorrow); assert.strictEqual(otSpan.events[0].name, 'fun-time'); assert.strictEqual( - otSpan.events[0].time[0], - Math.trunc(tomorrow / 1000) + otSpan.events[0].timeUnixNano, + millisecondsToNanoseconds(tomorrow) ); assert.deepStrictEqual(otSpan.events[0].attributes, kvLogs); }); @@ -431,8 +430,8 @@ describe('OpenTracing Shim', () => { span.log(kvLogs, tomorrow); assert.strictEqual(otSpan.events[0].name, 'log'); assert.strictEqual( - otSpan.events[0].time[0], - Math.trunc(tomorrow / 1000) + otSpan.events[0].timeUnixNano, + millisecondsToNanoseconds(tomorrow) ); assert.deepStrictEqual(otSpan.events[0].attributes, kvLogs); }); @@ -446,8 +445,8 @@ describe('OpenTracing Shim', () => { span.log(kvLogs, tomorrow); assert.strictEqual(otSpan.events[0].name, 'exception'); assert.strictEqual( - otSpan.events[0].time[0], - Math.trunc(tomorrow / 1000) + otSpan.events[0].timeUnixNano, + millisecondsToNanoseconds(tomorrow) ); const expectedAttributes = { [SEMATTRS_EXCEPTION_MESSAGE]: 'boom', @@ -469,8 +468,8 @@ describe('OpenTracing Shim', () => { span.log(kvLogs, tomorrow); assert.strictEqual(otSpan.events[0].name, 'exception'); assert.strictEqual( - otSpan.events[0].time[0], - Math.trunc(tomorrow / 1000) + otSpan.events[0].timeUnixNano, + millisecondsToNanoseconds(tomorrow) ); const expectedAttributes = { event: 'error', @@ -495,12 +494,11 @@ describe('OpenTracing Shim', () => { it('sets explicit end timestamp', () => { const now = performance.now(); + const expected = + millisecondsToNanoseconds(now) + + (otSpan as any)['_performanceOffsetNanos']; span.finish(now); - const adjustment = (otSpan as any)['_performanceOffset']; - assert.deepStrictEqual( - hrTimeToMilliseconds(otSpan.endTime), - now + adjustment + performance.timeOrigin - ); + assert.deepStrictEqual(otSpan.endTimeUnixNano, expected); }); it('can set and retrieve baggage', () => { diff --git a/packages/sdk-metrics/src/Instruments.ts b/packages/sdk-metrics/src/Instruments.ts index 6d60792effe..2011af1a3eb 100644 --- a/packages/sdk-metrics/src/Instruments.ts +++ b/packages/sdk-metrics/src/Instruments.ts @@ -30,13 +30,13 @@ import { ObservableGauge, ObservableUpDownCounter, } from '@opentelemetry/api'; -import { millisToHrTime } from '@opentelemetry/core'; import { InstrumentDescriptor } from './InstrumentDescriptor'; import { ObservableRegistry } from './state/ObservableRegistry'; import { AsyncWritableMetricStorage, WritableMetricStorage, } from './state/WritableMetricStorage'; +import { millisecondsToNanoseconds } from '@opentelemetry/core'; export class SyncInstrument { constructor( @@ -72,7 +72,7 @@ export class SyncInstrument { value, attributes, context, - millisToHrTime(Date.now()) + millisecondsToNanoseconds(Date.now()) ); } } diff --git a/packages/sdk-metrics/src/aggregator/Drop.ts b/packages/sdk-metrics/src/aggregator/Drop.ts index f968bdf4d68..bffe8fe3516 100644 --- a/packages/sdk-metrics/src/aggregator/Drop.ts +++ b/packages/sdk-metrics/src/aggregator/Drop.ts @@ -14,7 +14,6 @@ * limitations under the License. */ -import { HrTime } from '@opentelemetry/api'; import { AggregationTemporality } from '../export/AggregationTemporality'; import { MetricData, MetricDescriptor } from '../export/MetricData'; import { Maybe } from '../utils'; @@ -40,7 +39,7 @@ export class DropAggregator implements Aggregator { _descriptor: MetricDescriptor, _aggregationTemporality: AggregationTemporality, _accumulationByAttributes: AccumulationRecord[], - _endTime: HrTime + _endTime: bigint ): Maybe { return undefined; } diff --git a/packages/sdk-metrics/src/aggregator/ExponentialHistogram.ts b/packages/sdk-metrics/src/aggregator/ExponentialHistogram.ts index af9600c758e..e7e6aa7bc6f 100644 --- a/packages/sdk-metrics/src/aggregator/ExponentialHistogram.ts +++ b/packages/sdk-metrics/src/aggregator/ExponentialHistogram.ts @@ -26,7 +26,7 @@ import { ExponentialHistogramMetricData, InstrumentType, } from '../export/MetricData'; -import { diag, HrTime } from '@opentelemetry/api'; +import { diag } from '@opentelemetry/api'; import { Maybe } from '../utils'; import { AggregationTemporality } from '../export/AggregationTemporality'; import { InstrumentDescriptor } from '../InstrumentDescriptor'; @@ -34,6 +34,7 @@ import { Buckets } from './exponential-histogram/Buckets'; import { getMapping } from './exponential-histogram/mapping/getMapping'; import { Mapping } from './exponential-histogram/mapping/types'; import { nextGreaterSquare } from './exponential-histogram/util'; +import { nanosToHrTime } from '@opentelemetry/core'; /** * Internal value type for ExponentialHistogramAggregation. @@ -65,7 +66,7 @@ const MIN_MAX_SIZE = 2; export class ExponentialHistogramAccumulation implements Accumulation { constructor( - public startTime: HrTime = startTime, + public startTimeUnixNano: bigint = startTimeUnixNano, private _maxSize = DEFAULT_MAX_SIZE, private _recordMinMax = true, private _sum = 0, @@ -94,15 +95,14 @@ export class ExponentialHistogramAccumulation implements Accumulation { /** * Sets the start time for this accumulation - * @param {HrTime} startTime + * @param {bigint} startTimeUnixNano */ - setStartTime(startTime: HrTime): void { - this.startTime = startTime; + setStartTime(startTimeUnixNano: bigint): void { + this.startTimeUnixNano = startTimeUnixNano; } /** * Returns the datapoint representation of this accumulation - * @param {HrTime} startTime */ toPointValue(): InternalHistogram { return { @@ -239,7 +239,7 @@ export class ExponentialHistogramAccumulation implements Accumulation { } } - this.startTime = previous.startTime; + this.startTimeUnixNano = previous.startTimeUnixNano; this._sum += previous.sum; this._count += previous.count; this._zeroCount += previous.zeroCount; @@ -277,7 +277,7 @@ export class ExponentialHistogramAccumulation implements Accumulation { */ clone(): ExponentialHistogramAccumulation { return new ExponentialHistogramAccumulation( - this.startTime, + this.startTimeUnixNano, this._maxSize, this._recordMinMax, this._sum, @@ -531,9 +531,9 @@ export class ExponentialHistogramAggregator private readonly _recordMinMax: boolean ) {} - createAccumulation(startTime: HrTime) { + createAccumulation(startTimeUnixNano: bigint) { return new ExponentialHistogramAccumulation( - startTime, + startTimeUnixNano, this._maxSize, this._recordMinMax ); @@ -569,7 +569,7 @@ export class ExponentialHistogramAggregator descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord[], - endTime: HrTime + endTimeUnixNano: bigint ): Maybe { return { descriptor, @@ -587,8 +587,10 @@ export class ExponentialHistogramAggregator return { attributes, - startTime: accumulation.startTime, - endTime, + startTimeUnixNano: accumulation.startTimeUnixNano, + startTime: nanosToHrTime(accumulation.startTimeUnixNano), + endTimeUnixNano, + endTime: nanosToHrTime(endTimeUnixNano), value: { min: pointValue.hasMinMax ? pointValue.min : undefined, max: pointValue.hasMinMax ? pointValue.max : undefined, diff --git a/packages/sdk-metrics/src/aggregator/Histogram.ts b/packages/sdk-metrics/src/aggregator/Histogram.ts index 09fae6c122b..4deda7f7cca 100644 --- a/packages/sdk-metrics/src/aggregator/Histogram.ts +++ b/packages/sdk-metrics/src/aggregator/Histogram.ts @@ -25,10 +25,10 @@ import { HistogramMetricData, InstrumentType, } from '../export/MetricData'; -import { HrTime } from '@opentelemetry/api'; import { binarySearchUB, Maybe } from '../utils'; import { AggregationTemporality } from '../export/AggregationTemporality'; import { InstrumentDescriptor } from '../InstrumentDescriptor'; +import { nanosToHrTime } from '@opentelemetry/core'; /** * Internal value type for HistogramAggregation. @@ -65,7 +65,7 @@ function createNewEmptyCheckpoint(boundaries: number[]): InternalHistogram { export class HistogramAccumulation implements Accumulation { constructor( - public startTime: HrTime, + public startTimeUnixNano: bigint, private readonly _boundaries: number[], private _recordMinMax = true, private _current: InternalHistogram = createNewEmptyCheckpoint(_boundaries) @@ -91,8 +91,8 @@ export class HistogramAccumulation implements Accumulation { this._current.buckets.counts[idx] += 1; } - setStartTime(startTime: HrTime): void { - this.startTime = startTime; + setStartTime(startTimeUnixNano: bigint): void { + this.startTimeUnixNano = startTimeUnixNano; } toPointValue(): InternalHistogram { @@ -116,9 +116,9 @@ export class HistogramAggregator implements Aggregator { private readonly _recordMinMax: boolean ) {} - createAccumulation(startTime: HrTime) { + createAccumulation(startTimeUnixNano: bigint) { return new HistogramAccumulation( - startTime, + startTimeUnixNano, this._boundaries, this._recordMinMax ); @@ -161,7 +161,7 @@ export class HistogramAggregator implements Aggregator { } return new HistogramAccumulation( - previous.startTime, + previous.startTimeUnixNano, previousValue.buckets.boundaries, this._recordMinMax, { @@ -199,7 +199,7 @@ export class HistogramAggregator implements Aggregator { } return new HistogramAccumulation( - current.startTime, + current.startTimeUnixNano, previousValue.buckets.boundaries, this._recordMinMax, { @@ -220,7 +220,7 @@ export class HistogramAggregator implements Aggregator { descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord[], - endTime: HrTime + endTimeUnixNano: bigint ): Maybe { return { descriptor, @@ -238,8 +238,10 @@ export class HistogramAggregator implements Aggregator { return { attributes, - startTime: accumulation.startTime, - endTime, + startTimeUnixNano: accumulation.startTimeUnixNano, + startTime: nanosToHrTime(accumulation.startTimeUnixNano), + endTimeUnixNano, + endTime: nanosToHrTime(endTimeUnixNano), value: { min: pointValue.hasMinMax ? pointValue.min : undefined, max: pointValue.hasMinMax ? pointValue.max : undefined, diff --git a/packages/sdk-metrics/src/aggregator/LastValue.ts b/packages/sdk-metrics/src/aggregator/LastValue.ts index 667344eed53..c9cbab57d54 100644 --- a/packages/sdk-metrics/src/aggregator/LastValue.ts +++ b/packages/sdk-metrics/src/aggregator/LastValue.ts @@ -21,27 +21,26 @@ import { AggregatorKind, LastValue, } from './types'; -import { HrTime } from '@opentelemetry/api'; -import { millisToHrTime, hrTimeToMicroseconds } from '@opentelemetry/core'; import { DataPointType, GaugeMetricData } from '../export/MetricData'; import { Maybe } from '../utils'; import { AggregationTemporality } from '../export/AggregationTemporality'; import { InstrumentDescriptor } from '../InstrumentDescriptor'; +import { millisecondsToNanoseconds, nanosToHrTime } from '@opentelemetry/core'; export class LastValueAccumulation implements Accumulation { constructor( - public startTime: HrTime, + public startTimeUnixNano: bigint, private _current: number = 0, - public sampleTime: HrTime = [0, 0] + public sampleTime: bigint = 0n ) {} record(value: number): void { this._current = value; - this.sampleTime = millisToHrTime(Date.now()); + this.sampleTime = millisecondsToNanoseconds(Date.now()); } - setStartTime(startTime: HrTime): void { - this.startTime = startTime; + setStartTime(startTimeUnixNano: bigint): void { + this.startTimeUnixNano = startTimeUnixNano; } toPointValue(): LastValue { @@ -53,8 +52,8 @@ export class LastValueAccumulation implements Accumulation { export class LastValueAggregator implements Aggregator { public kind: AggregatorKind.LAST_VALUE = AggregatorKind.LAST_VALUE; - createAccumulation(startTime: HrTime) { - return new LastValueAccumulation(startTime); + createAccumulation(startTimeUnixNano: bigint) { + return new LastValueAccumulation(startTimeUnixNano); } /** @@ -66,14 +65,10 @@ export class LastValueAggregator implements Aggregator { previous: LastValueAccumulation, delta: LastValueAccumulation ): LastValueAccumulation { - // nanoseconds may lose precisions. const latestAccumulation = - hrTimeToMicroseconds(delta.sampleTime) >= - hrTimeToMicroseconds(previous.sampleTime) - ? delta - : previous; + delta.sampleTime >= previous.sampleTime ? delta : previous; return new LastValueAccumulation( - previous.startTime, + previous.startTimeUnixNano, latestAccumulation.toPointValue(), latestAccumulation.sampleTime ); @@ -91,12 +86,9 @@ export class LastValueAggregator implements Aggregator { ): LastValueAccumulation { // nanoseconds may lose precisions. const latestAccumulation = - hrTimeToMicroseconds(current.sampleTime) >= - hrTimeToMicroseconds(previous.sampleTime) - ? current - : previous; + current.sampleTime >= previous.sampleTime ? current : previous; return new LastValueAccumulation( - current.startTime, + current.startTimeUnixNano, latestAccumulation.toPointValue(), latestAccumulation.sampleTime ); @@ -106,7 +98,7 @@ export class LastValueAggregator implements Aggregator { descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord[], - endTime: HrTime + endTimeUnixNano: bigint ): Maybe { return { descriptor, @@ -115,8 +107,10 @@ export class LastValueAggregator implements Aggregator { dataPoints: accumulationByAttributes.map(([attributes, accumulation]) => { return { attributes, - startTime: accumulation.startTime, - endTime, + startTimeUnixNano: accumulation.startTimeUnixNano, + startTime: nanosToHrTime(accumulation.startTimeUnixNano), + endTimeUnixNano, + endTime: nanosToHrTime(endTimeUnixNano), value: accumulation.toPointValue(), }; }), diff --git a/packages/sdk-metrics/src/aggregator/Sum.ts b/packages/sdk-metrics/src/aggregator/Sum.ts index fc1bfe7f652..38c52eb80d8 100644 --- a/packages/sdk-metrics/src/aggregator/Sum.ts +++ b/packages/sdk-metrics/src/aggregator/Sum.ts @@ -21,15 +21,15 @@ import { Accumulation, AccumulationRecord, } from './types'; -import { HrTime } from '@opentelemetry/api'; import { DataPointType, SumMetricData } from '../export/MetricData'; import { Maybe } from '../utils'; import { AggregationTemporality } from '../export/AggregationTemporality'; import { InstrumentDescriptor } from '../InstrumentDescriptor'; +import { nanosToHrTime } from '@opentelemetry/core'; export class SumAccumulation implements Accumulation { constructor( - public startTime: HrTime, + public startTimeUnixNano: bigint, public monotonic: boolean, private _current: number = 0, public reset = false @@ -42,8 +42,8 @@ export class SumAccumulation implements Accumulation { this._current += value; } - setStartTime(startTime: HrTime): void { - this.startTime = startTime; + setStartTime(startTimeUnixNano: bigint): void { + this.startTimeUnixNano = startTimeUnixNano; } toPointValue(): Sum { @@ -57,8 +57,8 @@ export class SumAggregator implements Aggregator { constructor(public monotonic: boolean) {} - createAccumulation(startTime: HrTime) { - return new SumAccumulation(startTime, this.monotonic); + createAccumulation(startTimeUnixNano: bigint) { + return new SumAccumulation(startTimeUnixNano, this.monotonic); } /** @@ -69,14 +69,14 @@ export class SumAggregator implements Aggregator { const deltaPv = delta.toPointValue(); if (delta.reset) { return new SumAccumulation( - delta.startTime, + delta.startTimeUnixNano, this.monotonic, deltaPv, delta.reset ); } return new SumAccumulation( - previous.startTime, + previous.startTimeUnixNano, this.monotonic, prevPv + deltaPv ); @@ -95,14 +95,14 @@ export class SumAggregator implements Aggregator { */ if (this.monotonic && prevPv > currPv) { return new SumAccumulation( - current.startTime, + current.startTimeUnixNano, this.monotonic, currPv, true ); } return new SumAccumulation( - current.startTime, + current.startTimeUnixNano, this.monotonic, currPv - prevPv ); @@ -112,7 +112,7 @@ export class SumAggregator implements Aggregator { descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord[], - endTime: HrTime + endTimeUnixNano: bigint ): Maybe { return { descriptor, @@ -121,8 +121,10 @@ export class SumAggregator implements Aggregator { dataPoints: accumulationByAttributes.map(([attributes, accumulation]) => { return { attributes, - startTime: accumulation.startTime, - endTime, + startTimeUnixNano: accumulation.startTimeUnixNano, + startTime: nanosToHrTime(accumulation.startTimeUnixNano), + endTimeUnixNano, + endTime: nanosToHrTime(endTimeUnixNano), value: accumulation.toPointValue(), }; }), diff --git a/packages/sdk-metrics/src/aggregator/types.ts b/packages/sdk-metrics/src/aggregator/types.ts index 7f9596788c1..3d7b6d61e43 100644 --- a/packages/sdk-metrics/src/aggregator/types.ts +++ b/packages/sdk-metrics/src/aggregator/types.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { HrTime, Attributes } from '@opentelemetry/api'; +import { Attributes } from '@opentelemetry/api'; import { AggregationTemporality } from '../export/AggregationTemporality'; import { MetricData } from '../export/MetricData'; import { Maybe } from '../utils'; @@ -85,7 +85,7 @@ export interface ExponentialHistogram { * An Aggregator accumulation state. */ export interface Accumulation { - setStartTime(startTime: HrTime): void; + setStartTime(startTimeUnixNano: bigint): void; record(value: number): void; } @@ -102,7 +102,7 @@ export interface Aggregator { /** * Create a clean state of accumulation. */ - createAccumulation(startTime: HrTime): T; + createAccumulation(startTimeUnixNano: bigint): T; /** * Returns the result of the merge of the given accumulations. @@ -131,13 +131,13 @@ export interface Aggregator { * @param descriptor the metric descriptor. * @param aggregationTemporality the temporality of the resulting {@link MetricData} * @param accumulationByAttributes the array of attributes and accumulation pairs. - * @param endTime the end time of the metric data. + * @param endTimeUnixNano the end time of the metric data. * @return the {@link MetricData} that this {@link Aggregator} will produce. */ toMetricData( descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord[], - endTime: HrTime + endTimeUnixNano: bigint ): Maybe; } diff --git a/packages/sdk-metrics/src/exemplar/AlignedHistogramBucketExemplarReservoir.ts b/packages/sdk-metrics/src/exemplar/AlignedHistogramBucketExemplarReservoir.ts index b7b9c471fd8..d96f533bda1 100644 --- a/packages/sdk-metrics/src/exemplar/AlignedHistogramBucketExemplarReservoir.ts +++ b/packages/sdk-metrics/src/exemplar/AlignedHistogramBucketExemplarReservoir.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Context, HrTime, Attributes } from '@opentelemetry/api'; +import { Context, Attributes } from '@opentelemetry/api'; import { FixedSizeExemplarReservoirBase } from './ExemplarReservoir'; /** @@ -31,7 +31,7 @@ export class AlignedHistogramBucketExemplarReservoir extends FixedSizeExemplarRe private _findBucketIndex( value: number, - _timestamp: HrTime, + _timestamp: bigint, _attributes: Attributes, _ctx: Context ) { @@ -45,7 +45,7 @@ export class AlignedHistogramBucketExemplarReservoir extends FixedSizeExemplarRe offer( value: number, - timestamp: HrTime, + timestamp: bigint, attributes: Attributes, ctx: Context ): void { diff --git a/packages/sdk-metrics/src/exemplar/AlwaysSampleExemplarFilter.ts b/packages/sdk-metrics/src/exemplar/AlwaysSampleExemplarFilter.ts index b2e7fb161d6..f5ed542de61 100644 --- a/packages/sdk-metrics/src/exemplar/AlwaysSampleExemplarFilter.ts +++ b/packages/sdk-metrics/src/exemplar/AlwaysSampleExemplarFilter.ts @@ -14,13 +14,13 @@ * limitations under the License. */ -import { Context, HrTime, Attributes } from '@opentelemetry/api'; +import { Context, Attributes } from '@opentelemetry/api'; import { ExemplarFilter } from './ExemplarFilter'; export class AlwaysSampleExemplarFilter implements ExemplarFilter { shouldSample( _value: number, - _timestamp: HrTime, + _timestamp: bigint, _attributes: Attributes, _ctx: Context ): boolean { diff --git a/packages/sdk-metrics/src/exemplar/Exemplar.ts b/packages/sdk-metrics/src/exemplar/Exemplar.ts index 4de413b1022..3cf8e99188d 100644 --- a/packages/sdk-metrics/src/exemplar/Exemplar.ts +++ b/packages/sdk-metrics/src/exemplar/Exemplar.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { HrTime, Attributes } from '@opentelemetry/api'; +import { Attributes } from '@opentelemetry/api'; /** * A representation of an exemplar, which is a sample input measurement. @@ -31,8 +31,8 @@ export type Exemplar = { // The value of the measurement that was recorded. value: number; - // timestamp is the exact time when this exemplar was recorded - timestamp: HrTime; + // timestamp is the exact time when this exemplar was recorded in nanoseconds + timestamp: bigint; // (Optional) Span ID of the exemplar trace. // span_id may be missing if the measurement is not recorded inside a trace diff --git a/packages/sdk-metrics/src/exemplar/ExemplarFilter.ts b/packages/sdk-metrics/src/exemplar/ExemplarFilter.ts index ad5a79d9edd..601e14e9a80 100644 --- a/packages/sdk-metrics/src/exemplar/ExemplarFilter.ts +++ b/packages/sdk-metrics/src/exemplar/ExemplarFilter.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Context, HrTime, Attributes } from '@opentelemetry/api'; +import { Attributes, Context } from '@opentelemetry/api'; /** * This interface represents a ExemplarFilter. Exemplar filters are @@ -32,7 +32,7 @@ export interface ExemplarFilter { */ shouldSample( value: number, - timestamp: HrTime, + timestamp: bigint, attributes: Attributes, ctx: Context ): boolean; diff --git a/packages/sdk-metrics/src/exemplar/ExemplarReservoir.ts b/packages/sdk-metrics/src/exemplar/ExemplarReservoir.ts index 229f4991bd4..268545871b6 100644 --- a/packages/sdk-metrics/src/exemplar/ExemplarReservoir.ts +++ b/packages/sdk-metrics/src/exemplar/ExemplarReservoir.ts @@ -16,7 +16,6 @@ import { Context, - HrTime, isSpanContextValid, trace, Attributes, @@ -30,7 +29,7 @@ export interface ExemplarReservoir { /** Offers a measurement to be sampled. */ offer( value: number, - timestamp: HrTime, + timestamp: bigint, attributes: Attributes, ctx: Context ): void; @@ -49,14 +48,14 @@ export interface ExemplarReservoir { class ExemplarBucket { private value: number = 0; private attributes: Attributes = {}; - private timestamp: HrTime = [0, 0]; + private timestamp: bigint = 0n; private spanId?: string; private traceId?: string; private _offered: boolean = false; offer( value: number, - timestamp: HrTime, + timestamp: bigint, attributes: Attributes, ctx: Context ) { @@ -89,7 +88,7 @@ class ExemplarBucket { }; this.attributes = {}; this.value = 0; - this.timestamp = [0, 0]; + this.timestamp = 0n; this.spanId = undefined; this.traceId = undefined; this._offered = false; @@ -113,7 +112,7 @@ export abstract class FixedSizeExemplarReservoirBase abstract offer( value: number, - timestamp: HrTime, + timestamp: bigint, attributes: Attributes, ctx: Context ): void; diff --git a/packages/sdk-metrics/src/exemplar/NeverSampleExemplarFilter.ts b/packages/sdk-metrics/src/exemplar/NeverSampleExemplarFilter.ts index 8df455d41eb..0c02f0d90a1 100644 --- a/packages/sdk-metrics/src/exemplar/NeverSampleExemplarFilter.ts +++ b/packages/sdk-metrics/src/exemplar/NeverSampleExemplarFilter.ts @@ -14,13 +14,13 @@ * limitations under the License. */ -import { Context, HrTime, Attributes } from '@opentelemetry/api'; +import { Attributes, Context } from '@opentelemetry/api'; import { ExemplarFilter } from './ExemplarFilter'; export class NeverSampleExemplarFilter implements ExemplarFilter { shouldSample( _value: number, - _timestamp: HrTime, + _timestamp: bigint, _attributes: Attributes, _ctx: Context ): boolean { diff --git a/packages/sdk-metrics/src/exemplar/SimpleFixedSizeExemplarReservoir.ts b/packages/sdk-metrics/src/exemplar/SimpleFixedSizeExemplarReservoir.ts index 659f9cee505..025abe0d252 100644 --- a/packages/sdk-metrics/src/exemplar/SimpleFixedSizeExemplarReservoir.ts +++ b/packages/sdk-metrics/src/exemplar/SimpleFixedSizeExemplarReservoir.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Context, HrTime, Attributes } from '@opentelemetry/api'; +import { Context, Attributes } from '@opentelemetry/api'; import { FixedSizeExemplarReservoirBase } from './ExemplarReservoir'; /** @@ -36,7 +36,7 @@ export class SimpleFixedSizeExemplarReservoir extends FixedSizeExemplarReservoir private _findBucketIndex( _value: number, - _timestamp: HrTime, + _timestamp: bigint, _attributes: Attributes, _ctx: Context ) { @@ -48,7 +48,7 @@ export class SimpleFixedSizeExemplarReservoir extends FixedSizeExemplarReservoir offer( value: number, - timestamp: HrTime, + timestamp: bigint, attributes: Attributes, ctx: Context ): void { diff --git a/packages/sdk-metrics/src/exemplar/WithTraceExemplarFilter.ts b/packages/sdk-metrics/src/exemplar/WithTraceExemplarFilter.ts index 36834612571..9e9274f82f8 100644 --- a/packages/sdk-metrics/src/exemplar/WithTraceExemplarFilter.ts +++ b/packages/sdk-metrics/src/exemplar/WithTraceExemplarFilter.ts @@ -16,7 +16,6 @@ import { Context, - HrTime, isSpanContextValid, trace, TraceFlags, @@ -27,7 +26,7 @@ import { ExemplarFilter } from './ExemplarFilter'; export class WithTraceExemplarFilter implements ExemplarFilter { shouldSample( value: number, - timestamp: HrTime, + timestamp: bigint, attributes: Attributes, ctx: Context ): boolean { diff --git a/packages/sdk-metrics/src/export/MetricData.ts b/packages/sdk-metrics/src/export/MetricData.ts index 02a94b49899..b722fef22fb 100644 --- a/packages/sdk-metrics/src/export/MetricData.ts +++ b/packages/sdk-metrics/src/export/MetricData.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { HrTime, Attributes, ValueType } from '@opentelemetry/api'; +import { Attributes, HrTime, ValueType } from '@opentelemetry/api'; import { InstrumentationScope } from '@opentelemetry/core'; import { Resource } from '@opentelemetry/resources'; import { AggregationTemporality } from './AggregationTemporality'; @@ -153,20 +153,29 @@ export enum DataPointType { */ export interface DataPoint { /** - * The start epoch timestamp of the DataPoint, usually the time when + * The start epoch timestamp in nanoseconds of the DataPoint, usually the time when * the metric was created when the preferred AggregationTemporality is * CUMULATIVE, or last collection time otherwise. */ + readonly startTimeUnixNano: bigint; + + /** @deprecated please use startTimeUnixNano */ readonly startTime: HrTime; + /** - * The end epoch timestamp when data were collected, usually it represents + * The end epoch timestamp in nanoseconds when data were collected, usually it represents * the moment when `MetricReader.collect` was called. */ + readonly endTimeUnixNano: bigint; + + /** @deprecated please use endTimeUnixNano */ readonly endTime: HrTime; + /** * The attributes associated with this DataPoint. */ readonly attributes: Attributes; + /** * The value for this DataPoint. The type of the value is indicated by the * {@link DataPointType}. diff --git a/packages/sdk-metrics/src/state/AsyncMetricStorage.ts b/packages/sdk-metrics/src/state/AsyncMetricStorage.ts index 4f8651915a6..2279dd8e5f3 100644 --- a/packages/sdk-metrics/src/state/AsyncMetricStorage.ts +++ b/packages/sdk-metrics/src/state/AsyncMetricStorage.ts @@ -14,7 +14,6 @@ * limitations under the License. */ -import { HrTime } from '@opentelemetry/api'; import { Accumulation, Aggregator } from '../aggregator/types'; import { InstrumentDescriptor } from '../InstrumentDescriptor'; import { MetricStorage } from './MetricStorage'; @@ -57,7 +56,7 @@ export class AsyncMetricStorage> ); } - record(measurements: AttributeHashMap, observationTime: HrTime) { + record(measurements: AttributeHashMap, observationTime: bigint) { const processed = new AttributeHashMap(); Array.from(measurements.entries()).forEach(([attributes, value]) => { processed.set(this._attributesProcessor.process(attributes), value); @@ -74,7 +73,7 @@ export class AsyncMetricStorage> */ collect( collector: MetricCollectorHandle, - collectionTime: HrTime + collectionTime: bigint ): Maybe { const accumulations = this._deltaMetricStorage.collect(); diff --git a/packages/sdk-metrics/src/state/DeltaMetricProcessor.ts b/packages/sdk-metrics/src/state/DeltaMetricProcessor.ts index 926bbf952ff..2a946e10875 100644 --- a/packages/sdk-metrics/src/state/DeltaMetricProcessor.ts +++ b/packages/sdk-metrics/src/state/DeltaMetricProcessor.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Context, HrTime, Attributes } from '@opentelemetry/api'; +import { Context, Attributes } from '@opentelemetry/api'; import { Maybe, hashAttributes } from '../utils'; import { Accumulation, Aggregator } from '../aggregator/types'; import { AttributeHashMap } from './HashMap'; @@ -47,7 +47,7 @@ export class DeltaMetricProcessor> { value: number, attributes: Attributes, _context: Context, - collectionTime: HrTime + collectionTime: bigint ) { let accumulation = this._activeCollectionStorage.get(attributes); @@ -70,7 +70,7 @@ export class DeltaMetricProcessor> { batchCumulate( measurements: AttributeHashMap, - collectionTime: HrTime + collectionTime: bigint ) { Array.from(measurements.entries()).forEach( ([attributes, value, hashCode]) => { diff --git a/packages/sdk-metrics/src/state/MeterSharedState.ts b/packages/sdk-metrics/src/state/MeterSharedState.ts index 4ff18fec7e5..cf828dae506 100644 --- a/packages/sdk-metrics/src/state/MeterSharedState.ts +++ b/packages/sdk-metrics/src/state/MeterSharedState.ts @@ -14,7 +14,6 @@ * limitations under the License. */ -import { HrTime } from '@opentelemetry/api'; import { InstrumentationScope } from '@opentelemetry/core'; import { MetricCollectOptions } from '../export/MetricProducer'; import { ScopeMetrics } from '../export/MetricData'; @@ -73,13 +72,13 @@ export class MeterSharedState { /** * @param collector opaque handle of {@link MetricCollector} which initiated the collection. - * @param collectionTime the HrTime at which the collection was initiated. + * @param collectionTime the time in nanoseconds at which the collection was initiated. * @param options options for collection. * @returns the list of metric data collected. */ async collect( collector: MetricCollectorHandle, - collectionTime: HrTime, + collectionTime: bigint, options?: MetricCollectOptions ): Promise { /** diff --git a/packages/sdk-metrics/src/state/MetricCollector.ts b/packages/sdk-metrics/src/state/MetricCollector.ts index 02faf0ae531..80bcbad084e 100644 --- a/packages/sdk-metrics/src/state/MetricCollector.ts +++ b/packages/sdk-metrics/src/state/MetricCollector.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { millisToHrTime } from '@opentelemetry/core'; +import { millisecondsToNanoseconds } from '@opentelemetry/core'; import { AggregationTemporalitySelector } from '../export/AggregationSelector'; import { CollectionResult, @@ -38,7 +38,7 @@ export class MetricCollector implements MetricProducer { ) {} async collect(options?: MetricCollectOptions): Promise { - const collectionTime = millisToHrTime(Date.now()); + const collectionTime = millisecondsToNanoseconds(Date.now()); const scopeMetrics: ScopeMetrics[] = []; const errors: unknown[] = []; diff --git a/packages/sdk-metrics/src/state/MetricStorage.ts b/packages/sdk-metrics/src/state/MetricStorage.ts index e959ea0329c..48434f84448 100644 --- a/packages/sdk-metrics/src/state/MetricStorage.ts +++ b/packages/sdk-metrics/src/state/MetricStorage.ts @@ -14,7 +14,6 @@ * limitations under the License. */ -import { HrTime } from '@opentelemetry/api'; import { MetricData } from '../export/MetricData'; import { Maybe } from '../utils'; import { MetricCollectorHandle } from './MetricCollector'; @@ -39,7 +38,7 @@ export abstract class MetricStorage { */ abstract collect( collector: MetricCollectorHandle, - collectionTime: HrTime + collectionTime: bigint ): Maybe; getInstrumentDescriptor(): Readonly { diff --git a/packages/sdk-metrics/src/state/MultiWritableMetricStorage.ts b/packages/sdk-metrics/src/state/MultiWritableMetricStorage.ts index 9a60a0573cd..611b64d64d2 100644 --- a/packages/sdk-metrics/src/state/MultiWritableMetricStorage.ts +++ b/packages/sdk-metrics/src/state/MultiWritableMetricStorage.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Context, HrTime, Attributes } from '@opentelemetry/api'; +import { Context, Attributes } from '@opentelemetry/api'; import { WritableMetricStorage } from './WritableMetricStorage'; /** @@ -27,7 +27,7 @@ export class MultiMetricStorage implements WritableMetricStorage { value: number, attributes: Attributes, context: Context, - recordTime: HrTime + recordTime: bigint ) { this._backingStorages.forEach(it => { it.record(value, attributes, context, recordTime); diff --git a/packages/sdk-metrics/src/state/ObservableRegistry.ts b/packages/sdk-metrics/src/state/ObservableRegistry.ts index 5e25686c6a6..b6db0366b22 100644 --- a/packages/sdk-metrics/src/state/ObservableRegistry.ts +++ b/packages/sdk-metrics/src/state/ObservableRegistry.ts @@ -16,7 +16,6 @@ import { diag, - HrTime, BatchObservableCallback, Observable, ObservableCallback, @@ -119,7 +118,7 @@ export class ObservableRegistry { * @returns a promise of rejected reasons for invoking callbacks. */ async observe( - collectionTime: HrTime, + collectionTime: bigint, timeoutMillis?: number ): Promise { const callbackFutures = this._observeCallbacks( @@ -142,7 +141,7 @@ export class ObservableRegistry { return rejections; } - private _observeCallbacks(observationTime: HrTime, timeoutMillis?: number) { + private _observeCallbacks(observationTime: bigint, timeoutMillis?: number) { return this._callbacks.map(async ({ callback, instrument }) => { const observableResult = new ObservableResultImpl( instrument._descriptor.name, @@ -162,7 +161,7 @@ export class ObservableRegistry { } private _observeBatchCallbacks( - observationTime: HrTime, + observationTime: bigint, timeoutMillis?: number ) { return this._batchCallbacks.map(async ({ callback, instruments }) => { diff --git a/packages/sdk-metrics/src/state/SyncMetricStorage.ts b/packages/sdk-metrics/src/state/SyncMetricStorage.ts index 10786c3dec7..47dc24917c3 100644 --- a/packages/sdk-metrics/src/state/SyncMetricStorage.ts +++ b/packages/sdk-metrics/src/state/SyncMetricStorage.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Context, HrTime, Attributes } from '@opentelemetry/api'; +import { Context, Attributes } from '@opentelemetry/api'; import { WritableMetricStorage } from './WritableMetricStorage'; import { Accumulation, Aggregator } from '../aggregator/types'; import { InstrumentDescriptor } from '../InstrumentDescriptor'; @@ -60,7 +60,7 @@ export class SyncMetricStorage> value: number, attributes: Attributes, context: Context, - recordTime: HrTime + recordTime: bigint ) { attributes = this._attributesProcessor.process(attributes, context); this._deltaMetricStorage.record(value, attributes, context, recordTime); @@ -74,7 +74,7 @@ export class SyncMetricStorage> */ collect( collector: MetricCollectorHandle, - collectionTime: HrTime + collectionTime: bigint ): Maybe { const accumulations = this._deltaMetricStorage.collect(); diff --git a/packages/sdk-metrics/src/state/TemporalMetricProcessor.ts b/packages/sdk-metrics/src/state/TemporalMetricProcessor.ts index 967b6f81815..35ef5586b07 100644 --- a/packages/sdk-metrics/src/state/TemporalMetricProcessor.ts +++ b/packages/sdk-metrics/src/state/TemporalMetricProcessor.ts @@ -14,7 +14,6 @@ * limitations under the License. */ -import { HrTime } from '@opentelemetry/api'; import { Accumulation, AccumulationRecord, @@ -38,7 +37,7 @@ interface LastReportedHistory> { /** * The timestamp the data was reported. */ - collectionTime: HrTime; + collectionTime: bigint; /** * The AggregationTemporality used to aggregate reports. */ @@ -83,7 +82,7 @@ export class TemporalMetricProcessor> { collector: MetricCollectorHandle, instrumentDescriptor: InstrumentDescriptor, currentAccumulations: AttributeHashMap, - collectionTime: HrTime + collectionTime: bigint ): Maybe { this._stashAccumulations(currentAccumulations); const unreportedAccumulations = @@ -150,7 +149,7 @@ export class TemporalMetricProcessor> { instrumentDescriptor, aggregationTemporality, accumulationRecords, - /* endTime */ collectionTime + /* endTimeUnixNano */ collectionTime ); } @@ -211,7 +210,7 @@ export class TemporalMetricProcessor> { static calibrateStartTime>( last: AttributeHashMap, current: AttributeHashMap, - lastCollectionTime: HrTime + lastCollectionTime: bigint ) { for (const [key, hash] of last.keys()) { const currentAccumulation = current.get(key, hash); diff --git a/packages/sdk-metrics/src/state/WritableMetricStorage.ts b/packages/sdk-metrics/src/state/WritableMetricStorage.ts index 6d738156acf..f396fa7e6fd 100644 --- a/packages/sdk-metrics/src/state/WritableMetricStorage.ts +++ b/packages/sdk-metrics/src/state/WritableMetricStorage.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Context, HrTime, Attributes } from '@opentelemetry/api'; +import { Context, Attributes } from '@opentelemetry/api'; import { AttributeHashMap } from './HashMap'; /** @@ -29,7 +29,7 @@ export interface WritableMetricStorage { value: number, attributes: Attributes, context: Context, - recordTime: HrTime + recordTime: bigint ): void; } @@ -41,5 +41,5 @@ export interface WritableMetricStorage { */ export interface AsyncWritableMetricStorage { /** Records a batch of measurements. */ - record(measurements: AttributeHashMap, observationTime: HrTime): void; + record(measurements: AttributeHashMap, observationTime: bigint): void; } diff --git a/packages/sdk-metrics/test/ExemplarFilter.test.ts b/packages/sdk-metrics/test/ExemplarFilter.test.ts index df472fe9414..4d5d47f9212 100644 --- a/packages/sdk-metrics/test/ExemplarFilter.test.ts +++ b/packages/sdk-metrics/test/ExemplarFilter.test.ts @@ -35,20 +35,14 @@ describe('ExemplarFilter', () => { describe('AlwaysSampleExemplarFilter', () => { it('should return true always for shouldSample', () => { const filter = new AlwaysSampleExemplarFilter(); - assert.strictEqual( - filter.shouldSample(10, [0, 0], {}, ROOT_CONTEXT), - true - ); + assert.strictEqual(filter.shouldSample(10, 0n, {}, ROOT_CONTEXT), true); }); }); describe('NeverSampleExemplarFilter', () => { it('should return false always for shouldSample', () => { const filter = new NeverSampleExemplarFilter(); - assert.strictEqual( - filter.shouldSample(1, [0, 0], {}, ROOT_CONTEXT), - false - ); + assert.strictEqual(filter.shouldSample(1, 0n, {}, ROOT_CONTEXT), false); }); }); @@ -61,7 +55,7 @@ describe('ExemplarFilter', () => { traceFlags: TraceFlags.NONE, }; const ctx = trace.setSpanContext(ROOT_CONTEXT, spanContext); - assert.strictEqual(filter.shouldSample(5.3, [0, 0], {}, ctx), false); + assert.strictEqual(filter.shouldSample(5.3, 0n, {}, ctx), false); }); it('should return true for shouldSample when the trace is sampled', () => { @@ -72,7 +66,7 @@ describe('ExemplarFilter', () => { traceFlags: TraceFlags.SAMPLED, }; const ctx = trace.setSpanContext(ROOT_CONTEXT, spanContext); - assert.strictEqual(filter.shouldSample(5.3, [0, 0], {}, ctx), true); + assert.strictEqual(filter.shouldSample(5.3, 0n, {}, ctx), true); }); }); }); diff --git a/packages/sdk-metrics/test/ExemplarReservoir.test.ts b/packages/sdk-metrics/test/ExemplarReservoir.test.ts index 8da5006e3d1..562e6825aaa 100644 --- a/packages/sdk-metrics/test/ExemplarReservoir.test.ts +++ b/packages/sdk-metrics/test/ExemplarReservoir.test.ts @@ -20,13 +20,13 @@ import { TraceFlags, trace, } from '@opentelemetry/api'; -import { hrTime } from '@opentelemetry/core'; import * as assert from 'assert'; import { SimpleFixedSizeExemplarReservoir, AlignedHistogramBucketExemplarReservoir, } from '../src/exemplar'; +import { getTimeOrigin, millisecondsToNanoseconds } from '@opentelemetry/core'; describe('ExemplarReservoir', () => { const TRACE_ID = 'd4cda95b652f4a1592b449d5929fda1b'; @@ -47,7 +47,12 @@ describe('ExemplarReservoir', () => { }; const ctx = trace.setSpanContext(ROOT_CONTEXT, spanContext); - reservoir.offer(1, hrTime(), {}, ctx); + reservoir.offer( + 1, + millisecondsToNanoseconds(performance.now() + getTimeOrigin()), + {}, + ctx + ); const exemplars = reservoir.collect({}); assert.strictEqual(exemplars.length, 1); assert.strictEqual(exemplars[0].traceId, TRACE_ID); @@ -59,7 +64,7 @@ describe('ExemplarReservoir', () => { const reservoir = new SimpleFixedSizeExemplarReservoir(1); reservoir.offer( 1, - hrTime(), + millisecondsToNanoseconds(performance.now() + getTimeOrigin()), { key1: 'value1', key2: 'value2' }, ROOT_CONTEXT ); @@ -72,9 +77,24 @@ describe('ExemplarReservoir', () => { const reservoir = new AlignedHistogramBucketExemplarReservoir([ 0, 5, 10, 25, 50, 75, ]); - reservoir.offer(52, hrTime(), { bucket: '5' }, ROOT_CONTEXT); - reservoir.offer(7, hrTime(), { bucket: '3' }, ROOT_CONTEXT); - reservoir.offer(6, hrTime(), { bucket: '3' }, ROOT_CONTEXT); + reservoir.offer( + 52, + millisecondsToNanoseconds(performance.now() + getTimeOrigin()), + { bucket: '5' }, + ROOT_CONTEXT + ); + reservoir.offer( + 7, + millisecondsToNanoseconds(performance.now() + getTimeOrigin()), + { bucket: '3' }, + ROOT_CONTEXT + ); + reservoir.offer( + 6, + millisecondsToNanoseconds(performance.now() + getTimeOrigin()), + { bucket: '3' }, + ROOT_CONTEXT + ); const exemplars = reservoir.collect({ bucket: '3' }); assert.strictEqual(exemplars.length, 2); assert.strictEqual(exemplars[0].value, 6); diff --git a/packages/sdk-metrics/test/Instruments.test.ts b/packages/sdk-metrics/test/Instruments.test.ts index 0db3b4ff241..983c8727e38 100644 --- a/packages/sdk-metrics/test/Instruments.test.ts +++ b/packages/sdk-metrics/test/Instruments.test.ts @@ -876,8 +876,8 @@ async function validateExport( metric.dataPoints[idx], expectedDataPoint.attributes ?? {}, expectedDataPoint.value as any, - expectedDataPoint.startTime, - expectedDataPoint.endTime + expectedDataPoint.startTimeUnixNano, + expectedDataPoint.endTimeUnixNano ); } } diff --git a/packages/sdk-metrics/test/aggregator/Drop.test.ts b/packages/sdk-metrics/test/aggregator/Drop.test.ts index b66ece90ac8..a90768e2fb0 100644 --- a/packages/sdk-metrics/test/aggregator/Drop.test.ts +++ b/packages/sdk-metrics/test/aggregator/Drop.test.ts @@ -14,7 +14,6 @@ * limitations under the License. */ -import { HrTime } from '@opentelemetry/api'; import * as assert from 'assert'; import { AggregationTemporality } from '../../src'; import { DropAggregator } from '../../src/aggregator'; @@ -51,14 +50,14 @@ describe('DropAggregator', () => { it('no exceptions', () => { const aggregator = new DropAggregator(); - const endTime: HrTime = [1, 1]; + const endTimeUnixNano = 1_000_000_001n; assert.strictEqual( aggregator.toMetricData( defaultInstrumentDescriptor, AggregationTemporality.CUMULATIVE, [[{}, undefined]], - endTime + endTimeUnixNano ), undefined ); diff --git a/packages/sdk-metrics/test/aggregator/ExponentialHistogram.test.ts b/packages/sdk-metrics/test/aggregator/ExponentialHistogram.test.ts index 1dd57faa540..9f2ff082550 100644 --- a/packages/sdk-metrics/test/aggregator/ExponentialHistogram.test.ts +++ b/packages/sdk-metrics/test/aggregator/ExponentialHistogram.test.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { HrTime, ValueType } from '@opentelemetry/api'; +import { ValueType } from '@opentelemetry/api'; import { AggregationTemporality, DataPointType, @@ -45,7 +45,7 @@ describe('ExponentialHistogramAccumulation', () => { * this must finish with offset=-1 (all scales). */ it('handles alternating growth: scenario 1', () => { - const accumulation = new ExponentialHistogramAccumulation([0, 0], 4); + const accumulation = new ExponentialHistogramAccumulation(0n, 4); accumulation.record(2); accumulation.record(4); accumulation.record(1); @@ -62,7 +62,7 @@ describe('ExponentialHistogramAccumulation', () => { * holds range [4, 16). */ it('handles alternating growth: scenario 2', () => { - const accumulation = new ExponentialHistogramAccumulation([0, 0], 4); + const accumulation = new ExponentialHistogramAccumulation(0n, 4); accumulation.record(2); accumulation.record(2); accumulation.record(4); @@ -84,7 +84,7 @@ describe('ExponentialHistogramAccumulation', () => { [0.5, 1, 2], [0.5, 2, 1], ].forEach(row => { - const accumulation = new ExponentialHistogramAccumulation([0, 0], 2); + const accumulation = new ExponentialHistogramAccumulation(0n, 2); row.forEach(value => { accumulation.record(value); }); @@ -106,7 +106,7 @@ describe('ExponentialHistogramAccumulation', () => { [4, 1, 2], [4, 2, 1], ].forEach(row => { - const accumulation = new ExponentialHistogramAccumulation([0, 0], 2); + const accumulation = new ExponentialHistogramAccumulation(0n, 2); row.forEach(value => { accumulation.record(value); }); @@ -129,7 +129,7 @@ describe('ExponentialHistogramAccumulation', () => { [0.25, 1, 0.5], [0.25, 0.5, 1], ].forEach(row => { - const accumulation = new ExponentialHistogramAccumulation([0, 0], 2); + const accumulation = new ExponentialHistogramAccumulation(0n, 2); row.forEach(value => { accumulation.record(value); }); @@ -152,7 +152,7 @@ describe('ExponentialHistogramAccumulation', () => { for (const initScale of [0, 4]) { for (let step = maxSize; step < 4 * maxSize; step++) { const accumulation = new ExponentialHistogramAccumulation( - [0, 0], + 0n, maxSize ); let mapper = getMapping(initScale); @@ -217,7 +217,7 @@ describe('ExponentialHistogramAccumulation', () => { }); it('ignores NaN', () => { - const accumulation = new ExponentialHistogramAccumulation([0, 0], 1); + const accumulation = new ExponentialHistogramAccumulation(0n, 1); accumulation.record(NaN); @@ -232,9 +232,9 @@ describe('ExponentialHistogramAccumulation', () => { }); describe('merge', () => { it('handles simple (even) case', () => { - const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); - const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); - const acc2 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc0 = new ExponentialHistogramAccumulation(0n, 4); + const acc1 = new ExponentialHistogramAccumulation(0n, 4); + const acc2 = new ExponentialHistogramAccumulation(0n, 4); for (let i = 0; i < 4; i++) { const v1 = 2 << i; @@ -267,9 +267,9 @@ describe('ExponentialHistogramAccumulation', () => { }); it('handles simple (odd) case', () => { - const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); - const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); - const acc2 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc0 = new ExponentialHistogramAccumulation(0n, 4); + const acc1 = new ExponentialHistogramAccumulation(0n, 4); + const acc2 = new ExponentialHistogramAccumulation(0n, 4); for (let i = 0; i < 4; i++) { const v1 = 2 << i; @@ -312,9 +312,9 @@ describe('ExponentialHistogramAccumulation', () => { size: number, incr: number ) => { - const aHist = new ExponentialHistogramAccumulation([0, 0], size); - const bHist = new ExponentialHistogramAccumulation([0, 0], size); - const cHist = new ExponentialHistogramAccumulation([0, 0], size); + const aHist = new ExponentialHistogramAccumulation(0n, size); + const bHist = new ExponentialHistogramAccumulation(0n, size); + const cHist = new ExponentialHistogramAccumulation(0n, size); a.forEach(av => { aHist.updateByIncrement(av, incr); @@ -361,8 +361,8 @@ describe('ExponentialHistogramAccumulation', () => { }); describe('diff', () => { it('handles simple case', () => { - const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); - const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc0 = new ExponentialHistogramAccumulation(0n, 4); + const acc1 = new ExponentialHistogramAccumulation(0n, 4); for (let i = 0; i < 4; i++) { const v1 = 2 << i; @@ -387,9 +387,9 @@ describe('ExponentialHistogramAccumulation', () => { }); it('trims trailing 0 buckets after diff', () => { - const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); - const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); - const acc2 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc0 = new ExponentialHistogramAccumulation(0n, 4); + const acc1 = new ExponentialHistogramAccumulation(0n, 4); + const acc2 = new ExponentialHistogramAccumulation(0n, 4); for (let i = 0; i < 4; i++) { const v1 = 2 << i; @@ -423,9 +423,9 @@ describe('ExponentialHistogramAccumulation', () => { }); it('trims leading 0 buckets after diff', () => { - const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); - const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); - const acc2 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc0 = new ExponentialHistogramAccumulation(0n, 4); + const acc1 = new ExponentialHistogramAccumulation(0n, 4); + const acc2 = new ExponentialHistogramAccumulation(0n, 4); for (let i = 0; i < 4; i++) { const v1 = 2 << i; @@ -458,9 +458,9 @@ describe('ExponentialHistogramAccumulation', () => { }); it('handles all zero bucket case', () => { - const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); - const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); - const acc2 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc0 = new ExponentialHistogramAccumulation(0n, 4); + const acc1 = new ExponentialHistogramAccumulation(0n, 4); + const acc2 = new ExponentialHistogramAccumulation(0n, 4); for (let i = 0; i < 4; i++) { const v1 = 2 << i; @@ -487,8 +487,8 @@ describe('ExponentialHistogramAccumulation', () => { }); describe('clone()', () => { it('makes a deep copy', () => { - const acc0 = new ExponentialHistogramAccumulation([0, 0], 4); - const acc1 = new ExponentialHistogramAccumulation([0, 0], 4); + const acc0 = new ExponentialHistogramAccumulation(0n, 4); + const acc1 = new ExponentialHistogramAccumulation(0n, 4); for (let i = 0; i < 4; i++) { const v = 2 << i; @@ -523,7 +523,7 @@ describe('ExponentialHistogramAccumulation', () => { describe('toPointValue()', () => { it('returns representation of histogram internals', () => { - const acc = new ExponentialHistogramAccumulation([0, 0], 4); + const acc = new ExponentialHistogramAccumulation(0n, 4); for (let i = 0; i < 4; i++) { acc.record(2 << i); @@ -546,7 +546,7 @@ describe('ExponentialHistogramAccumulation', () => { describe('min max size', () => { it('auto-corrects to min max', () => { - const acc: any = new ExponentialHistogramAccumulation([0, 0], 0); + const acc: any = new ExponentialHistogramAccumulation(0n, 0); assert.strictEqual(acc['_maxSize'], 2); }); }); @@ -556,9 +556,9 @@ describe('ExponentialHistogramAggregation', () => { describe('merge', () => { it('merges and does not mutate args', () => { const agg = new ExponentialHistogramAggregator(4, true); - const acc0 = agg.createAccumulation([0, 0]); - const acc1 = agg.createAccumulation([0, 0]); - const acc2 = agg.createAccumulation([0, 0]); + const acc0 = agg.createAccumulation(0n); + const acc1 = agg.createAccumulation(0n); + const acc2 = agg.createAccumulation(0n); acc0.record(2 << 0); acc0.record(2 << 1); @@ -589,20 +589,20 @@ describe('ExponentialHistogramAggregation', () => { assert.deepStrictEqual(acc1.toPointValue(), acc1Snapshot); }); - it("keeps the previous point's startTime", () => { + it("keeps the previous point's startTimeUnixNano", () => { const agg = new ExponentialHistogramAggregator(4, true); - const acc0 = agg.createAccumulation([0, 0]); - const acc1 = agg.createAccumulation([3, 0]); + const acc0 = agg.createAccumulation(0n); + const acc1 = agg.createAccumulation(3_000_000_000n); const result = agg.merge(acc0, acc1); - assert.strictEqual(result.startTime, acc0.startTime); + assert.strictEqual(result.startTimeUnixNano, acc0.startTimeUnixNano); }); it('handles zero-length buckets in source histogram', () => { // https://github.com/open-telemetry/opentelemetry-js/issues/4450 - const delta = new ExponentialHistogramAccumulation([0, 0], 160); + const delta = new ExponentialHistogramAccumulation(0n, 160); delta.updateByIncrement(0.0, 2); // A histogram with zero count of two and empty buckets - const previous = new ExponentialHistogramAccumulation([0, 0], 160); + const previous = new ExponentialHistogramAccumulation(0n, 160); previous.updateByIncrement(0, 1); previous.updateByIncrement(0.000979, 41); //Bucket: (0.00097656, 0.0010198], Count: 41, Index: -160 previous.updateByIncrement(0.001959, 17); //Bucket: (0.00195313, 0.0020396], Count: 17, Index: -144 @@ -645,9 +645,9 @@ describe('ExponentialHistogramAggregation', () => { describe('diff', () => { it('diffs and does not mutate args', () => { const agg = new ExponentialHistogramAggregator(4, true); - const acc0 = agg.createAccumulation([0, 0]); - const acc1 = agg.createAccumulation([0, 0]); - const acc2 = agg.createAccumulation([0, 0]); + const acc0 = agg.createAccumulation(0n); + const acc1 = agg.createAccumulation(0n); + const acc2 = agg.createAccumulation(0n); acc0.record(2 << 0); acc0.record(2 << 1); @@ -681,11 +681,11 @@ describe('ExponentialHistogramAggregation', () => { describe('toMetricData', () => { it('should transform to expected data with recordMinMax = true', () => { - const startTime: HrTime = [0, 0]; - const endTime: HrTime = [1, 1]; + const startTimeUnixNano = 0n; + const endTimeUnixNano = 1_000_000_001n; const agg = new ExponentialHistogramAggregator(4, true); - const acc = agg.createAccumulation(startTime); + const acc = agg.createAccumulation(startTimeUnixNano); acc.record(2); acc.record(-2); @@ -696,7 +696,7 @@ describe('ExponentialHistogramAggregation', () => { defaultInstrumentDescriptor, AggregationTemporality.CUMULATIVE, [[{}, acc]], - endTime + endTimeUnixNano ); const expected: MetricData = { @@ -706,8 +706,10 @@ describe('ExponentialHistogramAggregation', () => { dataPoints: [ { attributes: {}, - startTime, - endTime, + startTimeUnixNano, + endTimeUnixNano, + startTime: [0, 0], + endTime: [1, 1], value: { min: -4, max: 4, @@ -732,11 +734,11 @@ describe('ExponentialHistogramAggregation', () => { }); it('should transform to expected data with recordMinMax = false', () => { - const startTime: HrTime = [0, 0]; - const endTime: HrTime = [1, 1]; + const startTimeUnixNano = 0n; + const endTimeUnixNano = 1_000_000_001n; const agg = new ExponentialHistogramAggregator(4, false); - const acc = agg.createAccumulation(startTime); + const acc = agg.createAccumulation(startTimeUnixNano); acc.record(2); acc.record(-2); @@ -747,7 +749,7 @@ describe('ExponentialHistogramAggregation', () => { defaultInstrumentDescriptor, AggregationTemporality.CUMULATIVE, [[{}, acc]], - endTime + endTimeUnixNano ); const expected: MetricData = { @@ -757,8 +759,10 @@ describe('ExponentialHistogramAggregation', () => { dataPoints: [ { attributes: {}, - startTime, - endTime, + startTimeUnixNano, + endTimeUnixNano, + startTime: [0, 0], + endTime: [1, 1], value: { min: undefined, max: undefined, @@ -785,10 +789,10 @@ describe('ExponentialHistogramAggregation', () => { function testSum(instrumentType: InstrumentType, expectSum: boolean) { const agg = new ExponentialHistogramAggregator(4, true); - const startTime: HrTime = [0, 0]; - const endTime: HrTime = [1, 1]; + const startTimeUnixNano = 0n; + const endTimeUnixNano = 1_000_000_001n; - const acc = agg.createAccumulation(startTime); + const acc = agg.createAccumulation(startTimeUnixNano); acc.record(0); acc.record(1); acc.record(4); @@ -804,7 +808,7 @@ describe('ExponentialHistogramAggregation', () => { }, AggregationTemporality.CUMULATIVE, [[{}, acc]], - endTime + endTimeUnixNano ); assert.notStrictEqual(aggregatedData, undefined); diff --git a/packages/sdk-metrics/test/aggregator/Histogram.test.ts b/packages/sdk-metrics/test/aggregator/Histogram.test.ts index ff95b635143..f1b110b385b 100644 --- a/packages/sdk-metrics/test/aggregator/Histogram.test.ts +++ b/packages/sdk-metrics/test/aggregator/Histogram.test.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { HrTime, ValueType } from '@opentelemetry/api'; +import { ValueType } from '@opentelemetry/api'; import * as assert from 'assert'; import { AggregationTemporality, @@ -32,7 +32,7 @@ describe('HistogramAggregator', () => { describe('createAccumulation', () => { it('no exceptions on createAccumulation', () => { const aggregator = new HistogramAggregator([1, 10, 100], true); - const accumulation = aggregator.createAccumulation([0, 0]); + const accumulation = aggregator.createAccumulation(0n); assert.ok(accumulation instanceof HistogramAccumulation); }); }); @@ -40,15 +40,15 @@ describe('HistogramAggregator', () => { describe('merge', () => { it('no exceptions', () => { const aggregator = new HistogramAggregator([1, 10, 100], true); - const prev = aggregator.createAccumulation([0, 0]); + const prev = aggregator.createAccumulation(0n); prev.record(0); prev.record(1); - const delta = aggregator.createAccumulation([1, 1]); + const delta = aggregator.createAccumulation(1_000_000_001n); delta.record(2); delta.record(11); - const expected = aggregator.createAccumulation([0, 0]); + const expected = aggregator.createAccumulation(0n); // replay actions on prev expected.record(0); expected.record(1); @@ -61,11 +61,11 @@ describe('HistogramAggregator', () => { it('with only negatives', () => { const aggregator = new HistogramAggregator([1, 10, 100], true); - const prev = aggregator.createAccumulation([0, 0]); + const prev = aggregator.createAccumulation(0n); prev.record(-10); prev.record(-20); - const delta = aggregator.createAccumulation([1, 1]); + const delta = aggregator.createAccumulation(1_000_000_001n); delta.record(-5); delta.record(-30); @@ -84,15 +84,15 @@ describe('HistogramAggregator', () => { it('with single bucket', function () { const aggregator = new HistogramAggregator([], true); - const prev = aggregator.createAccumulation([0, 0]); + const prev = aggregator.createAccumulation(0n); prev.record(0); prev.record(1); - const delta = aggregator.createAccumulation([1, 1]); + const delta = aggregator.createAccumulation(1_000_000_001n); delta.record(2); delta.record(11); - const expected = new HistogramAccumulation([0, 0], [], true, { + const expected = new HistogramAccumulation(0n, [], true, { buckets: { boundaries: [], counts: [4], @@ -110,11 +110,11 @@ describe('HistogramAggregator', () => { describe('diff', () => { it('no exceptions', () => { const aggregator = new HistogramAggregator([1, 10, 100], true); - const prev = aggregator.createAccumulation([0, 0]); + const prev = aggregator.createAccumulation(0n); prev.record(0); prev.record(1); - const curr = aggregator.createAccumulation([1, 1]); + const curr = aggregator.createAccumulation(1_000_000_001n); // replay actions on prev curr.record(0); curr.record(1); @@ -122,28 +122,33 @@ describe('HistogramAggregator', () => { curr.record(2); curr.record(11); - const expected = new HistogramAccumulation([1, 1], [1, 10, 100], true, { - buckets: { - boundaries: [1, 10, 100], - counts: [0, 1, 1, 0], - }, - count: 2, - sum: 13, - hasMinMax: false, - min: Infinity, - max: -Infinity, - }); + const expected = new HistogramAccumulation( + 1_000_000_001n, + [1, 10, 100], + true, + { + buckets: { + boundaries: [1, 10, 100], + counts: [0, 1, 1, 0], + }, + count: 2, + sum: 13, + hasMinMax: false, + min: Infinity, + max: -Infinity, + } + ); assert.deepStrictEqual(aggregator.diff(prev, curr), expected); }); it('with single bucket', function () { const aggregator = new HistogramAggregator([], true); - const prev = aggregator.createAccumulation([0, 0]); + const prev = aggregator.createAccumulation(0n); prev.record(0); prev.record(1); - const curr = aggregator.createAccumulation([1, 1]); + const curr = aggregator.createAccumulation(1_000_000_001n); // replay actions on prev curr.record(0); curr.record(1); @@ -151,7 +156,7 @@ describe('HistogramAggregator', () => { curr.record(2); curr.record(11); - const expected = new HistogramAccumulation([1, 1], [], true, { + const expected = new HistogramAccumulation(1_000_000_001n, [], true, { buckets: { boundaries: [], counts: [2], @@ -171,9 +176,9 @@ describe('HistogramAggregator', () => { it('should transform to expected data with recordMinMax = true', () => { const aggregator = new HistogramAggregator([1, 10, 100], true); - const startTime: HrTime = [0, 0]; - const endTime: HrTime = [1, 1]; - const accumulation = aggregator.createAccumulation(startTime); + const startTimeUnixNano = 0n; + const endTimeUnixNano = 1_000_000_001n; + const accumulation = aggregator.createAccumulation(startTimeUnixNano); accumulation.record(0); accumulation.record(1); @@ -184,8 +189,10 @@ describe('HistogramAggregator', () => { dataPoints: [ { attributes: {}, - startTime, - endTime, + startTimeUnixNano, + endTimeUnixNano, + startTime: [0, 0], + endTime: [1, 1], value: { buckets: { boundaries: [1, 10, 100], @@ -204,7 +211,7 @@ describe('HistogramAggregator', () => { defaultInstrumentDescriptor, AggregationTemporality.CUMULATIVE, [[{}, accumulation]], - endTime + endTimeUnixNano ), expected ); @@ -213,9 +220,9 @@ describe('HistogramAggregator', () => { it('should transform to expected data with recordMinMax = false', () => { const aggregator = new HistogramAggregator([1, 10, 100], false); - const startTime: HrTime = [0, 0]; - const endTime: HrTime = [1, 1]; - const accumulation = aggregator.createAccumulation(startTime); + const startTimeUnixNano = 0n; + const endTimeUnixNano = 1_000_000_001n; + const accumulation = aggregator.createAccumulation(startTimeUnixNano); accumulation.record(0); accumulation.record(1); @@ -226,8 +233,10 @@ describe('HistogramAggregator', () => { dataPoints: [ { attributes: {}, - startTime, - endTime, + startTimeUnixNano, + endTimeUnixNano, + startTime: [0, 0], + endTime: [1, 1], value: { buckets: { boundaries: [1, 10, 100], @@ -246,7 +255,7 @@ describe('HistogramAggregator', () => { defaultInstrumentDescriptor, AggregationTemporality.CUMULATIVE, [[{}, accumulation]], - endTime + endTimeUnixNano ), expected ); @@ -255,9 +264,9 @@ describe('HistogramAggregator', () => { it('should transform to expected data with empty boundaries', () => { const aggregator = new HistogramAggregator([], false); - const startTime: HrTime = [0, 0]; - const endTime: HrTime = [1, 1]; - const accumulation = aggregator.createAccumulation(startTime); + const startTimeUnixNano = 0n; + const endTimeUnixNano = 1_000_000_001n; + const accumulation = aggregator.createAccumulation(startTimeUnixNano); accumulation.record(0); accumulation.record(1); @@ -268,8 +277,10 @@ describe('HistogramAggregator', () => { dataPoints: [ { attributes: {}, - startTime, - endTime, + startTimeUnixNano, + endTimeUnixNano, + startTime: [0, 0], + endTime: [1, 1], value: { buckets: { boundaries: [], @@ -288,7 +299,7 @@ describe('HistogramAggregator', () => { defaultInstrumentDescriptor, AggregationTemporality.CUMULATIVE, [[{}, accumulation]], - endTime + endTimeUnixNano ), expected ); @@ -297,10 +308,10 @@ describe('HistogramAggregator', () => { function testSum(instrumentType: InstrumentType, expectSum: boolean) { const aggregator = new HistogramAggregator([1, 10, 100], true); - const startTime: HrTime = [0, 0]; - const endTime: HrTime = [1, 1]; + const startTimeUnixNano = 0n; + const endTimeUnixNano = 1_000_000_001n; - const accumulation = aggregator.createAccumulation(startTime); + const accumulation = aggregator.createAccumulation(startTimeUnixNano); accumulation.record(0); accumulation.record(1); accumulation.record(4); @@ -316,7 +327,7 @@ describe('HistogramAggregator', () => { }, AggregationTemporality.CUMULATIVE, [[{}, accumulation]], - endTime + endTimeUnixNano ); assert.notStrictEqual(aggregatedData, undefined); @@ -347,7 +358,7 @@ describe('HistogramAggregator', () => { describe('HistogramAccumulation', () => { describe('record', () => { it('no exceptions on record', () => { - const accumulation = new HistogramAccumulation([0, 0], [1, 10, 100]); + const accumulation = new HistogramAccumulation(0n, [1, 10, 100]); for (const value of commonValues) { accumulation.record(value); @@ -355,7 +366,7 @@ describe('HistogramAccumulation', () => { }); it('ignores NaN', () => { - const accumulation = new HistogramAccumulation([0, 0], [1, 10, 100]); + const accumulation = new HistogramAccumulation(0n, [1, 10, 100]); accumulation.record(NaN); @@ -370,9 +381,9 @@ describe('HistogramAccumulation', () => { describe('setStartTime', () => { it('should set start time', () => { - const accumulation = new HistogramAccumulation([0, 0], [1, 10, 100]); - accumulation.setStartTime([1, 1]); - assert.deepStrictEqual(accumulation.startTime, [1, 1]); + const accumulation = new HistogramAccumulation(0n, [1, 10, 100]); + accumulation.setStartTime(1_000_000_001n); + assert.deepStrictEqual(accumulation.startTimeUnixNano, 1_000_000_001n); }); }); }); diff --git a/packages/sdk-metrics/test/aggregator/LastValue.test.ts b/packages/sdk-metrics/test/aggregator/LastValue.test.ts index a6e314230ab..60242c749d4 100644 --- a/packages/sdk-metrics/test/aggregator/LastValue.test.ts +++ b/packages/sdk-metrics/test/aggregator/LastValue.test.ts @@ -14,7 +14,6 @@ * limitations under the License. */ -import { HrTime } from '@opentelemetry/api'; import * as assert from 'assert'; import * as sinon from 'sinon'; import { AggregationTemporality } from '../../src'; @@ -39,7 +38,7 @@ describe('LastValueAggregator', () => { describe('createAccumulation', () => { it('no exceptions on createAccumulation', () => { const aggregator = new LastValueAggregator(); - const accumulation = aggregator.createAccumulation([0, 0]); + const accumulation = aggregator.createAccumulation(0n); assert.ok(accumulation instanceof LastValueAccumulation); }); }); @@ -47,21 +46,21 @@ describe('LastValueAggregator', () => { describe('merge', () => { it('no exceptions', () => { const aggregator = new LastValueAggregator(); - const prev = aggregator.createAccumulation([0, 0]); - const delta = aggregator.createAccumulation([1, 1]); + const prev = aggregator.createAccumulation(0n); + const delta = aggregator.createAccumulation(1_000_000_001n); prev.record(2); delta.record(3); - const expected = new LastValueAccumulation([0, 0], 3, delta.sampleTime); + const expected = new LastValueAccumulation(0n, 3, delta.sampleTime); assert.deepStrictEqual(aggregator.merge(prev, delta), expected); }); it('return the newly sampled accumulation', () => { const aggregator = new LastValueAggregator(); - const accumulation1 = aggregator.createAccumulation([0, 0]); - const accumulation2 = aggregator.createAccumulation([1, 1]); + const accumulation1 = aggregator.createAccumulation(0n); + const accumulation2 = aggregator.createAccumulation(1_000_000_001n); accumulation1.record(2); clock.tick(100); @@ -73,7 +72,7 @@ describe('LastValueAggregator', () => { assert.deepStrictEqual( aggregator.merge(accumulation1, accumulation2), new LastValueAccumulation( - accumulation1.startTime, + accumulation1.startTimeUnixNano, 4, accumulation1.sampleTime ) @@ -81,7 +80,7 @@ describe('LastValueAggregator', () => { assert.deepStrictEqual( aggregator.merge(accumulation2, accumulation1), new LastValueAccumulation( - accumulation2.startTime, + accumulation2.startTimeUnixNano, 4, accumulation1.sampleTime ) @@ -92,21 +91,25 @@ describe('LastValueAggregator', () => { describe('diff', () => { it('no exceptions', () => { const aggregator = new LastValueAggregator(); - const prev = aggregator.createAccumulation([0, 0]); - const curr = aggregator.createAccumulation([1, 1]); + const prev = aggregator.createAccumulation(0n); + const curr = aggregator.createAccumulation(1_000_000_001n); prev.record(2); curr.record(3); - const expected = new LastValueAccumulation([1, 1], 3, curr.sampleTime); + const expected = new LastValueAccumulation( + 1_000_000_001n, + 3, + curr.sampleTime + ); assert.deepStrictEqual(aggregator.diff(prev, curr), expected); }); it('return the newly sampled accumulation', () => { const aggregator = new LastValueAggregator(); - const accumulation1 = aggregator.createAccumulation([0, 0]); - const accumulation2 = aggregator.createAccumulation([1, 1]); + const accumulation1 = aggregator.createAccumulation(0n); + const accumulation2 = aggregator.createAccumulation(1_000_000_001n); accumulation1.record(2); accumulation2.record(3); @@ -117,7 +120,7 @@ describe('LastValueAggregator', () => { assert.deepStrictEqual( aggregator.diff(accumulation1, accumulation2), new LastValueAccumulation( - accumulation2.startTime, + accumulation2.startTimeUnixNano, 4, accumulation1.sampleTime ) @@ -125,7 +128,7 @@ describe('LastValueAggregator', () => { assert.deepStrictEqual( aggregator.diff(accumulation2, accumulation1), new LastValueAccumulation( - accumulation1.startTime, + accumulation1.startTimeUnixNano, 4, accumulation1.sampleTime ) @@ -137,9 +140,9 @@ describe('LastValueAggregator', () => { it('transform without exception', () => { const aggregator = new LastValueAggregator(); - const startTime: HrTime = [0, 0]; - const endTime: HrTime = [1, 1]; - const accumulation = aggregator.createAccumulation(startTime); + const startTimeUnixNano = 0n; + const endTimeUnixNano = 1_000_000_001n; + const accumulation = aggregator.createAccumulation(startTimeUnixNano); accumulation.record(1); accumulation.record(2); accumulation.record(1); @@ -152,8 +155,10 @@ describe('LastValueAggregator', () => { dataPoints: [ { attributes: {}, - startTime, - endTime, + startTimeUnixNano, + endTimeUnixNano, + startTime: [0, 0], + endTime: [1, 1], value: 4, }, ], @@ -163,7 +168,7 @@ describe('LastValueAggregator', () => { defaultInstrumentDescriptor, AggregationTemporality.CUMULATIVE, [[{}, accumulation]], - endTime + endTimeUnixNano ), expected ); @@ -174,7 +179,7 @@ describe('LastValueAggregator', () => { describe('LastValueAccumulation', () => { describe('record', () => { it('no exceptions on record', () => { - const accumulation = new LastValueAccumulation([0, 0]); + const accumulation = new LastValueAccumulation(0n); for (const value of commonValues) { accumulation.record(value); @@ -184,9 +189,9 @@ describe('LastValueAccumulation', () => { describe('setStartTime', () => { it('should set start time', () => { - const accumulation = new LastValueAccumulation([0, 0]); - accumulation.setStartTime([1, 1]); - assert.deepStrictEqual(accumulation.startTime, [1, 1]); + const accumulation = new LastValueAccumulation(0n); + accumulation.setStartTime(1_000_000_001n); + assert.deepStrictEqual(accumulation.startTimeUnixNano, 1_000_000_001n); }); }); }); diff --git a/packages/sdk-metrics/test/aggregator/Sum.test.ts b/packages/sdk-metrics/test/aggregator/Sum.test.ts index 0e619d1b2b8..17fea78aee7 100644 --- a/packages/sdk-metrics/test/aggregator/Sum.test.ts +++ b/packages/sdk-metrics/test/aggregator/Sum.test.ts @@ -14,7 +14,6 @@ * limitations under the License. */ -import { HrTime } from '@opentelemetry/api'; import * as assert from 'assert'; import { AggregationTemporality } from '../../src'; import { SumAccumulation, SumAggregator } from '../../src/aggregator'; @@ -25,7 +24,7 @@ describe('SumAggregator', () => { describe('createAccumulation', () => { it('no exceptions on createAccumulation', () => { const aggregator = new SumAggregator(true); - const accumulation = aggregator.createAccumulation([0, 0]); + const accumulation = aggregator.createAccumulation(0n); assert.ok(accumulation instanceof SumAccumulation); }); }); @@ -33,15 +32,15 @@ describe('SumAggregator', () => { describe('merge', () => { it('no exceptions', () => { const aggregator = new SumAggregator(true); - const prev = aggregator.createAccumulation([0, 0]); + const prev = aggregator.createAccumulation(0n); prev.record(1); prev.record(2); - const delta = aggregator.createAccumulation([1, 1]); + const delta = aggregator.createAccumulation(1_000_000_001n); delta.record(3); delta.record(4); - const expected = new SumAccumulation([0, 0], true, 1 + 2 + 3 + 4); + const expected = new SumAccumulation(0n, true, 1 + 2 + 3 + 4); assert.deepStrictEqual(aggregator.merge(prev, delta), expected); }); }); @@ -49,11 +48,11 @@ describe('SumAggregator', () => { describe('diff', () => { it('non-monotonic', () => { const aggregator = new SumAggregator(false); - const prev = aggregator.createAccumulation([0, 0]); + const prev = aggregator.createAccumulation(0n); prev.record(1); prev.record(2); - const curr = aggregator.createAccumulation([1, 1]); + const curr = aggregator.createAccumulation(1_000_000_001n); // replay actions performed on prev curr.record(1); curr.record(2); @@ -61,21 +60,21 @@ describe('SumAggregator', () => { curr.record(3); curr.record(4); - const expected = new SumAccumulation([1, 1], false, 3 + 4); + const expected = new SumAccumulation(1_000_000_001n, false, 3 + 4); assert.deepStrictEqual(aggregator.diff(prev, curr), expected); }); it('monotonic', () => { const aggregator = new SumAggregator(true); - const prev = aggregator.createAccumulation([0, 0]); + const prev = aggregator.createAccumulation(0n); prev.record(10); // Create a new record that indicates a reset. - const curr = aggregator.createAccumulation([1, 1]); + const curr = aggregator.createAccumulation(1_000_000_001n); curr.record(3); // Diff result detected reset. - const expected = new SumAccumulation([1, 1], true, 3, true); + const expected = new SumAccumulation(1_000_000_001n, true, 3, true); assert.deepStrictEqual(aggregator.diff(prev, curr), expected); }); }); @@ -84,9 +83,9 @@ describe('SumAggregator', () => { it('transform without exception', () => { const aggregator = new SumAggregator(true); - const startTime: HrTime = [0, 0]; - const endTime: HrTime = [1, 1]; - const accumulation = aggregator.createAccumulation(startTime); + const startTimeUnixNano = 0n; + const endTimeUnixNano = 1_000_000_001n; + const accumulation = aggregator.createAccumulation(startTimeUnixNano); accumulation.record(1); accumulation.record(2); @@ -98,8 +97,10 @@ describe('SumAggregator', () => { dataPoints: [ { attributes: {}, - startTime, - endTime, + startTimeUnixNano, + endTimeUnixNano, + startTime: [0, 0], + endTime: [1, 1], value: 3, }, ], @@ -109,7 +110,7 @@ describe('SumAggregator', () => { defaultInstrumentDescriptor, AggregationTemporality.CUMULATIVE, [[{}, accumulation]], - endTime + endTimeUnixNano ), expected ); @@ -121,7 +122,7 @@ describe('SumAccumulation', () => { describe('record', () => { it('no exceptions on record', () => { for (const monotonic of [true, false]) { - const accumulation = new SumAccumulation([0, 0], monotonic); + const accumulation = new SumAccumulation(0n, monotonic); for (const value of commonValues) { accumulation.record(value); @@ -130,7 +131,7 @@ describe('SumAccumulation', () => { }); it('should ignore negative values on monotonic sum', () => { - const accumulation = new SumAccumulation([0, 0], true); + const accumulation = new SumAccumulation(0n, true); accumulation.record(1); accumulation.record(-1); assert.strictEqual(accumulation.toPointValue(), 1); @@ -139,9 +140,9 @@ describe('SumAccumulation', () => { describe('setStartTime', () => { it('should set start time', () => { - const accumulation = new SumAccumulation([0, 0], true); - accumulation.setStartTime([1, 1]); - assert.deepStrictEqual(accumulation.startTime, [1, 1]); + const accumulation = new SumAccumulation(0n, true); + accumulation.setStartTime(1_000_000_001n); + assert.deepStrictEqual(accumulation.startTimeUnixNano, 1_000_000_001n); }); }); }); diff --git a/packages/sdk-metrics/test/export/MetricReader.test.ts b/packages/sdk-metrics/test/export/MetricReader.test.ts index de8081381bb..c5558721f6a 100644 --- a/packages/sdk-metrics/test/export/MetricReader.test.ts +++ b/packages/sdk-metrics/test/export/MetricReader.test.ts @@ -51,7 +51,9 @@ const testScopeMetrics: ScopeMetrics[] = [ { attributes: {}, value: 1, + startTimeUnixNano: 0n, startTime: [0, 0], + endTimeUnixNano: 1_000_000_000n, endTime: [1, 0], }, ], diff --git a/packages/sdk-metrics/test/export/PeriodicExportingMetricReader.test.ts b/packages/sdk-metrics/test/export/PeriodicExportingMetricReader.test.ts index eacb28e2634..c2810aba8f1 100644 --- a/packages/sdk-metrics/test/export/PeriodicExportingMetricReader.test.ts +++ b/packages/sdk-metrics/test/export/PeriodicExportingMetricReader.test.ts @@ -151,6 +151,8 @@ describe('PeriodicExportingMetricReader', () => { // Sample hr time datapoints. startTime: [12345, 678901234], endTime: [12345, 678901234], + startTimeUnixNano: 12345678901234n, + endTimeUnixNano: 12345678901234n, attributes: {}, value: 1, }, diff --git a/packages/sdk-metrics/test/regression/cumulative-exponential-histogram.test.ts b/packages/sdk-metrics/test/regression/cumulative-exponential-histogram.test.ts index 32dab127fb8..efa6a464295 100644 --- a/packages/sdk-metrics/test/regression/cumulative-exponential-histogram.test.ts +++ b/packages/sdk-metrics/test/regression/cumulative-exponential-histogram.test.ts @@ -36,12 +36,12 @@ describe('cumulative-exponential-histogram', () => { sinon.restore(); }); - it('Cumulative Histogram should have the same startTime every collection', async () => { + it('Cumulative Histogram should have the same startTimeUnixNano every collection', async () => { // Works fine and passes await doTest({ type: AggregationType.EXPLICIT_BUCKET_HISTOGRAM }); }); - it('Cumulative ExponentialHistogram should have the same startTime every collection', async () => { + it('Cumulative ExponentialHistogram should have the same startTimeUnixNano every collection', async () => { // Fails await doTest({ type: AggregationType.EXPONENTIAL_HISTOGRAM }); }); @@ -78,8 +78,8 @@ describe('cumulative-exponential-histogram', () => { resourceMetrics2.scopeMetrics[0].metrics[0].dataPoints[0]; assert.deepStrictEqual( - dataPoint1.startTime, - dataPoint2.startTime, + dataPoint1.startTimeUnixNano, + dataPoint2.startTimeUnixNano, 'The start time should be the same across cumulative collections' ); }; diff --git a/packages/sdk-metrics/test/state/AsyncMetricStorage.test.ts b/packages/sdk-metrics/test/state/AsyncMetricStorage.test.ts index 30dfde48897..5da2f2107d0 100644 --- a/packages/sdk-metrics/test/state/AsyncMetricStorage.test.ts +++ b/packages/sdk-metrics/test/state/AsyncMetricStorage.test.ts @@ -30,7 +30,6 @@ import { ObservableCallbackDelegate, } from '../util'; import { ObservableInstrument } from '../../src/Instruments'; -import { HrTime } from '@opentelemetry/api'; const deltaCollector: MetricCollectorHandle = { selectAggregationTemporality: () => AggregationTemporality.DELTA, @@ -69,7 +68,7 @@ describe('AsyncMetricStorage', () => { observableResult.observe(3, { key: '3' }); }); { - const collectionTime: HrTime = [0, 0]; + const collectionTime = 0n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect(deltaCollector, collectionTime); @@ -101,7 +100,7 @@ describe('AsyncMetricStorage', () => { delegate.setDelegate(observableResult => {}); // The attributes should not be memorized if no measurement was reported. { - const collectionTime: HrTime = [1, 1]; + const collectionTime = 1_000_000_001n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect(deltaCollector, collectionTime); @@ -114,7 +113,7 @@ describe('AsyncMetricStorage', () => { observableResult.observe(6, { key: '3' }); }); { - const collectionTime: HrTime = [2, 2]; + const collectionTime = 2_000_000_002n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect(deltaCollector, collectionTime); @@ -167,9 +166,9 @@ describe('AsyncMetricStorage', () => { delegate.setDelegate(observableResult => { observableResult.observe(100, { key: '1' }); }); - let lastCollectionTime: HrTime; + let lastCollectionTime: bigint; { - const collectionTime: HrTime = [0, 0]; + const collectionTime = 0n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect(deltaCollector, collectionTime); @@ -191,7 +190,7 @@ describe('AsyncMetricStorage', () => { }); // The result data should not be diff-ed to be a negative value { - const collectionTime: HrTime = [1, 1]; + const collectionTime = 1_000_000_001n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect(deltaCollector, collectionTime); @@ -213,7 +212,7 @@ describe('AsyncMetricStorage', () => { }); // The result data should now be a delta to the previous collection { - const collectionTime: HrTime = [2, 2]; + const collectionTime = 2_000_000_002n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect(deltaCollector, collectionTime); @@ -251,9 +250,9 @@ describe('AsyncMetricStorage', () => { delegate.setDelegate(observableResult => { observableResult.observe(100, { key: '1' }); }); - let lastCollectionTime: HrTime; + let lastCollectionTime: bigint; { - const collectionTime: HrTime = [0, 0]; + const collectionTime = 0n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect(deltaCollector, collectionTime); @@ -275,7 +274,7 @@ describe('AsyncMetricStorage', () => { }); // The result data should be a delta to the previous collection { - const collectionTime: HrTime = [0, 0]; + const collectionTime = 0n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect(deltaCollector, collectionTime); @@ -297,7 +296,7 @@ describe('AsyncMetricStorage', () => { }); // The result data should be a delta to the previous collection { - const collectionTime: HrTime = [2, 2]; + const collectionTime = 2_000_000_002n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect(deltaCollector, collectionTime); @@ -338,37 +337,37 @@ describe('AsyncMetricStorage', () => { observableResult.observe(2, { key: '2' }); observableResult.observe(3, { key: '3' }); }); - let startTime: HrTime; + let startTimeUnixNano: bigint; { - const collectionTime: HrTime = [0, 0]; + const collectionTime = 0n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect( cumulativeCollector, collectionTime ); - startTime = collectionTime; + startTimeUnixNano = collectionTime; assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 3); assertDataPoint( metric.dataPoints[0], { key: '1' }, 1, - startTime, + startTimeUnixNano, collectionTime ); assertDataPoint( metric.dataPoints[1], { key: '2' }, 2, - startTime, + startTimeUnixNano, collectionTime ); assertDataPoint( metric.dataPoints[2], { key: '3' }, 3, - startTime, + startTimeUnixNano, collectionTime ); } @@ -376,7 +375,7 @@ describe('AsyncMetricStorage', () => { delegate.setDelegate(observableResult => {}); // The attributes should be memorized even if no measurement was reported. { - const collectionTime: HrTime = [1, 1]; + const collectionTime = 1_000_000_001n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect( cumulativeCollector, @@ -389,21 +388,21 @@ describe('AsyncMetricStorage', () => { metric.dataPoints[0], { key: '1' }, 1, - startTime, + startTimeUnixNano, collectionTime ); assertDataPoint( metric.dataPoints[1], { key: '2' }, 2, - startTime, + startTimeUnixNano, collectionTime ); assertDataPoint( metric.dataPoints[2], { key: '3' }, 3, - startTime, + startTimeUnixNano, collectionTime ); } @@ -414,7 +413,7 @@ describe('AsyncMetricStorage', () => { observableResult.observe(6, { key: '3' }); }); { - const collectionTime: HrTime = [2, 2]; + const collectionTime = 2_000_000_002n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect( cumulativeCollector, @@ -427,21 +426,21 @@ describe('AsyncMetricStorage', () => { metric.dataPoints[0], { key: '1' }, 4, - startTime, + startTimeUnixNano, collectionTime ); assertDataPoint( metric.dataPoints[1], { key: '2' }, 5, - startTime, + startTimeUnixNano, collectionTime ); assertDataPoint( metric.dataPoints[2], { key: '3' }, 6, - startTime, + startTimeUnixNano, collectionTime ); } @@ -469,23 +468,23 @@ describe('AsyncMetricStorage', () => { delegate.setDelegate(observableResult => { observableResult.observe(100, { key: '1' }); }); - let startTime: HrTime; + let startTimeUnixNano: bigint; { - const collectionTime: HrTime = [0, 0]; + const collectionTime = 0n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect( cumulativeCollector, collectionTime ); - startTime = collectionTime; + startTimeUnixNano = collectionTime; assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); assertDataPoint( metric.dataPoints[0], { key: '1' }, 100, - startTime, + startTimeUnixNano, collectionTime ); } @@ -496,7 +495,7 @@ describe('AsyncMetricStorage', () => { }); // The result data should not be diff-ed to be a negative value { - const collectionTime: HrTime = [1, 1]; + const collectionTime = 1_000_000_001n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect( cumulativeCollector, @@ -505,7 +504,7 @@ describe('AsyncMetricStorage', () => { assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - // The startTime should be reset. + // The startTimeUnixNano should be reset. assertDataPoint( metric.dataPoints[0], { key: '1' }, @@ -513,7 +512,7 @@ describe('AsyncMetricStorage', () => { collectionTime, collectionTime ); - startTime = collectionTime; + startTimeUnixNano = collectionTime; } // Observe a new data point @@ -522,7 +521,7 @@ describe('AsyncMetricStorage', () => { }); // The result data should now be a delta to the previous collection { - const collectionTime: HrTime = [2, 2]; + const collectionTime = 2_000_000_002n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect( cumulativeCollector, @@ -535,7 +534,7 @@ describe('AsyncMetricStorage', () => { metric.dataPoints[0], { key: '1' }, 50, - startTime, + startTimeUnixNano, collectionTime ); } @@ -563,23 +562,23 @@ describe('AsyncMetricStorage', () => { delegate.setDelegate(observableResult => { observableResult.observe(100, { key: '1' }); }); - let startTime: HrTime; + let startTimeUnixNano: bigint; { - const collectionTime: HrTime = [0, 0]; + const collectionTime = 0n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect( cumulativeCollector, collectionTime ); - startTime = collectionTime; + startTimeUnixNano = collectionTime; assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); assertDataPoint( metric.dataPoints[0], { key: '1' }, 100, - startTime, + startTimeUnixNano, collectionTime ); } @@ -590,7 +589,7 @@ describe('AsyncMetricStorage', () => { }); // The result data should be a delta to the previous collection { - const collectionTime: HrTime = [1, 1]; + const collectionTime = 1_000_000_001n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect( cumulativeCollector, @@ -599,12 +598,12 @@ describe('AsyncMetricStorage', () => { assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - // No reset on the value or the startTime + // No reset on the value or the startTimeUnixNano assertDataPoint( metric.dataPoints[0], { key: '1' }, 1, - startTime, + startTimeUnixNano, collectionTime ); } @@ -615,7 +614,7 @@ describe('AsyncMetricStorage', () => { }); // The result data should be a delta to the previous collection { - const collectionTime: HrTime = [2, 2]; + const collectionTime = 2_000_000_002n; await observableRegistry.observe(collectionTime); const metric = metricStorage.collect( cumulativeCollector, @@ -628,7 +627,7 @@ describe('AsyncMetricStorage', () => { metric.dataPoints[0], { key: '1' }, 50, - startTime, + startTimeUnixNano, collectionTime ); } diff --git a/packages/sdk-metrics/test/state/DeltaMetricProcessor.test.ts b/packages/sdk-metrics/test/state/DeltaMetricProcessor.test.ts index a14f89df53a..132eeb81f58 100644 --- a/packages/sdk-metrics/test/state/DeltaMetricProcessor.test.ts +++ b/packages/sdk-metrics/test/state/DeltaMetricProcessor.test.ts @@ -28,12 +28,7 @@ describe('DeltaMetricProcessor', () => { for (const value of commonValues) { for (const attributes of commonAttributes) { - metricProcessor.record( - value, - attributes, - api.context.active(), - [0, 0] - ); + metricProcessor.record(value, attributes, api.context.active(), 0n); } } }); @@ -43,12 +38,7 @@ describe('DeltaMetricProcessor', () => { for (const value of commonValues) { for (const attributes of commonAttributes) { - metricProcessor.record( - value, - attributes, - api.context.active(), - [0, 0] - ); + metricProcessor.record(value, attributes, api.context.active(), 0n); } } }); @@ -64,7 +54,7 @@ describe('DeltaMetricProcessor', () => { measurements.set(attributes, value); } } - metricProcessor.batchCumulate(measurements, [0, 0]); + metricProcessor.batchCumulate(measurements, 0n); }); it('no exceptions on record with no-drop aggregator', () => { @@ -76,7 +66,7 @@ describe('DeltaMetricProcessor', () => { measurements.set(attributes, value); } } - metricProcessor.batchCumulate(measurements, [0, 0]); + metricProcessor.batchCumulate(measurements, 0n); }); it('should compute the diff of accumulations', () => { @@ -85,7 +75,7 @@ describe('DeltaMetricProcessor', () => { { const measurements = new AttributeHashMap(); measurements.set({}, 10); - metricProcessor.batchCumulate(measurements, [0, 0]); + metricProcessor.batchCumulate(measurements, 0n); const accumulations = metricProcessor.collect(); const accumulation = accumulations.get({}); assert.strictEqual(accumulation?.toPointValue(), 10); @@ -94,7 +84,7 @@ describe('DeltaMetricProcessor', () => { { const measurements = new AttributeHashMap(); measurements.set({}, 21); - metricProcessor.batchCumulate(measurements, [0, 0]); + metricProcessor.batchCumulate(measurements, 0n); const accumulations = metricProcessor.collect(); const accumulation = accumulations.get({}); assert.strictEqual(accumulation?.toPointValue(), 11); @@ -107,13 +97,13 @@ describe('DeltaMetricProcessor', () => { { const measurements = new AttributeHashMap(); measurements.set({}, 10); - metricProcessor.batchCumulate(measurements, [0, 0]); + metricProcessor.batchCumulate(measurements, 0n); } { const measurements = new AttributeHashMap(); measurements.set({}, 20); - metricProcessor.batchCumulate(measurements, [1, 1]); + metricProcessor.batchCumulate(measurements, 1_000_000_001n); } const accumulations = metricProcessor.collect(); @@ -133,7 +123,7 @@ describe('DeltaMetricProcessor', () => { measurements.set({ attribute: '1' }, 10); measurements.set({ attribute: '2' }, 20); measurements.set({ attribute: '3' }, 30); - metricProcessor.batchCumulate(measurements, [0, 0]); + metricProcessor.batchCumulate(measurements, 0n); } const accumulations = metricProcessor.collect(); @@ -155,9 +145,19 @@ describe('DeltaMetricProcessor', () => { it('should export', () => { const metricProcessor = new DeltaMetricProcessor(new SumAggregator(true)); - metricProcessor.record(1, { attribute: '1' }, api.ROOT_CONTEXT, [0, 0]); - metricProcessor.record(2, { attribute: '1' }, api.ROOT_CONTEXT, [1, 1]); - metricProcessor.record(1, { attribute: '2' }, api.ROOT_CONTEXT, [2, 2]); + metricProcessor.record(1, { attribute: '1' }, api.ROOT_CONTEXT, 0n); + metricProcessor.record( + 2, + { attribute: '1' }, + api.ROOT_CONTEXT, + 1_000_000_001n + ); + metricProcessor.record( + 1, + { attribute: '2' }, + api.ROOT_CONTEXT, + 2_000_000_002n + ); let accumulations = metricProcessor.collect(); assert.strictEqual(accumulations.size, 2); diff --git a/packages/sdk-metrics/test/state/MetricStorageRegistry.test.ts b/packages/sdk-metrics/test/state/MetricStorageRegistry.test.ts index 35e3faa1fa9..e5a73fc6643 100644 --- a/packages/sdk-metrics/test/state/MetricStorageRegistry.test.ts +++ b/packages/sdk-metrics/test/state/MetricStorageRegistry.test.ts @@ -17,7 +17,6 @@ import { MetricStorageRegistry } from '../../src/state/MetricStorageRegistry'; import { diag, ValueType } from '@opentelemetry/api'; import { MetricStorage } from '../../src/state/MetricStorage'; -import { HrTime } from '@opentelemetry/api'; import { MetricCollectorHandle } from '../../src/state/MetricCollector'; import { MetricData, InstrumentType } from '../../src'; import { Maybe } from '../../src/utils'; @@ -34,7 +33,7 @@ import { InstrumentDescriptor } from '../../src/InstrumentDescriptor'; class TestMetricStorage extends MetricStorage { collect( collector: MetricCollectorHandle, - collectionTime: HrTime + collectionTime: bigint ): Maybe { return undefined; } diff --git a/packages/sdk-metrics/test/state/MultiWritableMetricStorage.test.ts b/packages/sdk-metrics/test/state/MultiWritableMetricStorage.test.ts index ed4c1259209..7844f10cfc7 100644 --- a/packages/sdk-metrics/test/state/MultiWritableMetricStorage.test.ts +++ b/packages/sdk-metrics/test/state/MultiWritableMetricStorage.test.ts @@ -16,7 +16,6 @@ import * as api from '@opentelemetry/api'; import { Attributes } from '@opentelemetry/api'; -import { hrTime } from '@opentelemetry/core'; import * as assert from 'assert'; import { MultiMetricStorage } from '../../src/state/MultiWritableMetricStorage'; import { WritableMetricStorage } from '../../src/state/WritableMetricStorage'; @@ -26,6 +25,7 @@ import { commonValues, Measurement, } from '../util'; +import { millisecondsToNanoseconds } from '@opentelemetry/core'; describe('MultiMetricStorage', () => { describe('record', () => { @@ -34,7 +34,7 @@ describe('MultiMetricStorage', () => { for (const value of commonValues) { for (const attribute of commonAttributes) { - metricStorage.record(value, attribute, api.context.active(), [0, 0]); + metricStorage.record(value, attribute, api.context.active(), 0n); } } }); @@ -63,7 +63,12 @@ describe('MultiMetricStorage', () => { for (const attributes of commonAttributes) { const context = api.context.active(); expectedMeasurements.push({ value, attributes, context }); - metricStorage.record(value, attributes, context, hrTime()); + metricStorage.record( + value, + attributes, + context, + millisecondsToNanoseconds(Date.now()) + ); } } diff --git a/packages/sdk-metrics/test/state/SyncMetricStorage.test.ts b/packages/sdk-metrics/test/state/SyncMetricStorage.test.ts index 8dfb256de91..8edeff95b3a 100644 --- a/packages/sdk-metrics/test/state/SyncMetricStorage.test.ts +++ b/packages/sdk-metrics/test/state/SyncMetricStorage.test.ts @@ -53,7 +53,7 @@ describe('SyncMetricStorage', () => { for (const value of commonValues) { for (const attributes of commonAttributes) { - metricStorage.record(value, attributes, api.context.active(), [0, 0]); + metricStorage.record(value, attributes, api.context.active(), 0n); } } }); @@ -69,31 +69,37 @@ describe('SyncMetricStorage', () => { [deltaCollector] ); - metricStorage.record(1, {}, api.context.active(), [0, 0]); - metricStorage.record(2, {}, api.context.active(), [1, 1]); - metricStorage.record(3, {}, api.context.active(), [2, 2]); + metricStorage.record(1, {}, api.context.active(), 0n); + metricStorage.record(2, {}, api.context.active(), 1_000_000_001n); + metricStorage.record(3, {}, api.context.active(), 2_000_000_002n); { - const metric = metricStorage.collect(deltaCollector, [3, 3]); + const metric = metricStorage.collect(deltaCollector, 3_000_000_003n); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 6, [0, 0], [3, 3]); + assertDataPoint(metric.dataPoints[0], {}, 6, 0n, 3_000_000_003n); } // The attributes should not be memorized. { - const metric = metricStorage.collect(deltaCollector, [4, 4]); + const metric = metricStorage.collect(deltaCollector, 4_000_000_004n); assert.strictEqual(metric, undefined); } - metricStorage.record(1, {}, api.context.active(), [5, 5]); + metricStorage.record(1, {}, api.context.active(), 5_000_000_005n); { - const metric = metricStorage.collect(deltaCollector, [6, 6]); + const metric = metricStorage.collect(deltaCollector, 6_000_000_006n); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 1, [5, 5], [6, 6]); + assertDataPoint( + metric.dataPoints[0], + {}, + 1, + 5_000_000_005n, + 6_000_000_006n + ); } }); }); @@ -106,33 +112,42 @@ describe('SyncMetricStorage', () => { createNoopAttributesProcessor(), [cumulativeCollector] ); - metricStorage.record(1, {}, api.context.active(), [0, 0]); - metricStorage.record(2, {}, api.context.active(), [1, 1]); - metricStorage.record(3, {}, api.context.active(), [2, 2]); + metricStorage.record(1, {}, api.context.active(), 0n); + metricStorage.record(2, {}, api.context.active(), 1_000_000_001n); + metricStorage.record(3, {}, api.context.active(), 2_000_000_002n); { - const metric = metricStorage.collect(cumulativeCollector, [3, 3]); + const metric = metricStorage.collect( + cumulativeCollector, + 3_000_000_003n + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 6, [0, 0], [3, 3]); + assertDataPoint(metric.dataPoints[0], {}, 6, 0n, 3_000_000_003n); } // The attributes should be memorized. { - const metric = metricStorage.collect(cumulativeCollector, [4, 4]); + const metric = metricStorage.collect( + cumulativeCollector, + 4_000_000_004n + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 6, [0, 0], [4, 4]); + assertDataPoint(metric.dataPoints[0], {}, 6, 0n, 4_000_000_004n); } - metricStorage.record(1, {}, api.context.active(), [5, 5]); + metricStorage.record(1, {}, api.context.active(), 5_000_000_005n); { - const metric = metricStorage.collect(cumulativeCollector, [6, 6]); + const metric = metricStorage.collect( + cumulativeCollector, + 6_000_000_006n + ); assertMetricData(metric, DataPointType.SUM); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 7, [0, 0], [6, 6]); + assertDataPoint(metric.dataPoints[0], {}, 7, 0n, 6_000_000_006n); } }); }); diff --git a/packages/sdk-metrics/test/state/TemporalMetricProcessor.test.ts b/packages/sdk-metrics/test/state/TemporalMetricProcessor.test.ts index 932d45c5f15..a5f3e30e232 100644 --- a/packages/sdk-metrics/test/state/TemporalMetricProcessor.test.ts +++ b/packages/sdk-metrics/test/state/TemporalMetricProcessor.test.ts @@ -59,13 +59,13 @@ describe('TemporalMetricProcessor', () => { const temporalMetricStorage = new TemporalMetricProcessor(aggregator, [ deltaCollector1, ]); - deltaMetricStorage.record(1, {}, api.context.active(), [1, 1]); + deltaMetricStorage.record(1, {}, api.context.active(), 1_000_000_001n); { const metric = temporalMetricStorage.buildMetrics( deltaCollector1, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [2, 2] + 2_000_000_002n ); assertMetricData( @@ -75,16 +75,22 @@ describe('TemporalMetricProcessor', () => { AggregationTemporality.DELTA ); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 1, [1, 1], [2, 2]); + assertDataPoint( + metric.dataPoints[0], + {}, + 1, + 1_000_000_001n, + 2_000_000_002n + ); } - deltaMetricStorage.record(2, {}, api.context.active(), [3, 3]); + deltaMetricStorage.record(2, {}, api.context.active(), 3_000_000_003n); { const metric = temporalMetricStorage.buildMetrics( deltaCollector1, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [4, 4] + 4_000_000_004n ); assertMetricData( @@ -95,7 +101,13 @@ describe('TemporalMetricProcessor', () => { ); assert.strictEqual(metric.dataPoints.length, 1); // Time span: (lastCollectionTime, collectionTime) - assertDataPoint(metric.dataPoints[0], {}, 2, [2, 2], [4, 4]); + assertDataPoint( + metric.dataPoints[0], + {}, + 2, + 2_000_000_002n, + 4_000_000_004n + ); } { @@ -103,7 +115,7 @@ describe('TemporalMetricProcessor', () => { deltaCollector1, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [5, 5] + 5_000_000_005n ); // nothing recorded -> nothing collected @@ -124,13 +136,13 @@ describe('TemporalMetricProcessor', () => { deltaCollector2, ]); - deltaMetricStorage.record(1, {}, api.context.active(), [1, 1]); + deltaMetricStorage.record(1, {}, api.context.active(), 1_000_000_001n); { const metric = temporalMetricStorage.buildMetrics( deltaCollector1, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [2, 2] + 2_000_000_002n ); assertMetricData( @@ -140,7 +152,13 @@ describe('TemporalMetricProcessor', () => { AggregationTemporality.DELTA ); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 1, [1, 1], [2, 2]); + assertDataPoint( + metric.dataPoints[0], + {}, + 1, + 1_000_000_001n, + 2_000_000_002n + ); } { @@ -148,7 +166,7 @@ describe('TemporalMetricProcessor', () => { deltaCollector2, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [3, 3] + 3_000_000_003n ); assertMetricData( @@ -158,7 +176,13 @@ describe('TemporalMetricProcessor', () => { AggregationTemporality.DELTA ); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 1, [1, 1], [3, 3]); + assertDataPoint( + metric.dataPoints[0], + {}, + 1, + 1_000_000_001n, + 3_000_000_003n + ); } }); }); @@ -176,13 +200,13 @@ describe('TemporalMetricProcessor', () => { cumulativeCollector1, ]); - deltaMetricStorage.record(1, {}, api.context.active(), [1, 1]); + deltaMetricStorage.record(1, {}, api.context.active(), 1_000_000_001n); { const metric = temporalMetricStorage.buildMetrics( cumulativeCollector1, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [2, 2] + 2_000_000_002n ); assertMetricData( @@ -192,16 +216,22 @@ describe('TemporalMetricProcessor', () => { AggregationTemporality.CUMULATIVE ); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 1, [1, 1], [2, 2]); + assertDataPoint( + metric.dataPoints[0], + {}, + 1, + 1_000_000_001n, + 2_000_000_002n + ); } - deltaMetricStorage.record(2, {}, api.context.active(), [3, 3]); + deltaMetricStorage.record(2, {}, api.context.active(), 3_000_000_003n); { const metric = temporalMetricStorage.buildMetrics( cumulativeCollector1, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [4, 4] + 4_000_000_004n ); assertMetricData( @@ -211,7 +241,13 @@ describe('TemporalMetricProcessor', () => { AggregationTemporality.CUMULATIVE ); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 3, [1, 1], [4, 4]); + assertDataPoint( + metric.dataPoints[0], + {}, + 3, + 1_000_000_001n, + 4_000_000_004n + ); } // selectAggregationTemporality should be called only once. @@ -228,13 +264,13 @@ describe('TemporalMetricProcessor', () => { deltaCollector1, ]); - deltaMetricStorage.record(1, {}, api.context.active(), [1, 1]); + deltaMetricStorage.record(1, {}, api.context.active(), 1_000_000_001n); { const metric = temporalMetricStorage.buildMetrics( cumulativeCollector1, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [2, 2] + 2_000_000_002n ); assertMetricData( @@ -244,16 +280,22 @@ describe('TemporalMetricProcessor', () => { AggregationTemporality.CUMULATIVE ); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 1, [1, 1], [2, 2]); + assertDataPoint( + metric.dataPoints[0], + {}, + 1, + 1_000_000_001n, + 2_000_000_002n + ); } - deltaMetricStorage.record(2, {}, api.context.active(), [3, 3]); + deltaMetricStorage.record(2, {}, api.context.active(), 3_000_000_003n); { const metric = temporalMetricStorage.buildMetrics( deltaCollector1, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [4, 4] + 4_000_000_004n ); assertMetricData( @@ -263,14 +305,20 @@ describe('TemporalMetricProcessor', () => { AggregationTemporality.DELTA ); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 3, [1, 1], [4, 4]); + assertDataPoint( + metric.dataPoints[0], + {}, + 3, + 1_000_000_001n, + 4_000_000_004n + ); } { const metric = temporalMetricStorage.buildMetrics( cumulativeCollector1, defaultInstrumentDescriptor, deltaMetricStorage.collect(), - [5, 5] + 5_000_000_005n ); assertMetricData( @@ -280,7 +328,13 @@ describe('TemporalMetricProcessor', () => { AggregationTemporality.CUMULATIVE ); assert.strictEqual(metric.dataPoints.length, 1); - assertDataPoint(metric.dataPoints[0], {}, 3, [1, 1], [5, 5]); + assertDataPoint( + metric.dataPoints[0], + {}, + 3, + 1_000_000_001n, + 5_000_000_005n + ); } }); }); diff --git a/packages/sdk-metrics/test/util.ts b/packages/sdk-metrics/test/util.ts index 9b10820acf3..6df75827025 100644 --- a/packages/sdk-metrics/test/util.ts +++ b/packages/sdk-metrics/test/util.ts @@ -36,7 +36,6 @@ import { ScopeMetrics, } from '../src/export/MetricData'; import { isNotNullish } from '../src/utils'; -import { HrTime } from '@opentelemetry/api'; import { Histogram } from '../src/aggregator/types'; import { AggregationTemporality } from '../src/export/AggregationTemporality'; @@ -125,27 +124,29 @@ export function assertDataPoint( actual: unknown, attributes: Attributes, point: Histogram | number, - startTime?: HrTime, - endTime?: HrTime + startTimeUnixNano?: bigint, + endTimeUnixNano?: bigint ): asserts actual is DataPoint { const it = actual as DataPoint; assert.deepStrictEqual(it.attributes, attributes); assert.deepStrictEqual(it.value, point); - if (startTime) { + if (startTimeUnixNano) { assert.deepStrictEqual( - it.startTime, - startTime, - 'startTime should be equal' + it.startTimeUnixNano, + startTimeUnixNano, + 'startTimeUnixNano should be equal' ); } else { - assert.ok(Array.isArray(it.startTime)); - assert.strictEqual(it.startTime.length, 2, 'startTime should be equal'); + assert.ok(typeof it.startTimeUnixNano === 'bigint'); } - if (endTime) { - assert.deepStrictEqual(it.endTime, endTime, 'endTime should be equal'); + if (endTimeUnixNano) { + assert.deepStrictEqual( + it.endTimeUnixNano, + endTimeUnixNano, + 'endTimeUnixNano should be equal' + ); } else { - assert.ok(Array.isArray(it.endTime)); - assert.strictEqual(it.endTime.length, 2, 'endTime should be equal'); + assert.ok(typeof it.endTimeUnixNano === 'bigint'); } }