diff --git a/CHANGELOG.md b/CHANGELOG.md index bcf7edf86e2..1fed4e59044 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,8 +15,9 @@ For experimental package changes, see the [experimental CHANGELOG](experimental/ ### :bug: (Bug Fix) -* fix(sdk-metrics): handle zero bucket counts in exponential histogram merge -[#4459](https://github.com/open-telemetry/opentelemetry-js/pull/4459) @mwear +* fix(sdk-metrics): handle zero bucket counts in exponential histogram merge [#4459](https://github.com/open-telemetry/opentelemetry-js/pull/4459) @mwear +* fix(sdk-metrics): ignore `NaN` value recordings in Histograms [#4455](https://github.com/open-telemetry/opentelemetry-js/pull/4455) @pichlermarc + * fixes a bug where recording `NaN` on a histogram would result in the sum of bucket count values not matching the overall count ### :books: (Refine Doc) diff --git a/experimental/packages/opentelemetry-exporter-prometheus/test/PrometheusSerializer.test.ts b/experimental/packages/opentelemetry-exporter-prometheus/test/PrometheusSerializer.test.ts index b2d9e36f8f9..6368cf82c30 100644 --- a/experimental/packages/opentelemetry-exporter-prometheus/test/PrometheusSerializer.test.ts +++ b/experimental/packages/opentelemetry-exporter-prometheus/test/PrometheusSerializer.test.ts @@ -622,7 +622,6 @@ describe('PrometheusSerializer', () => { it('should serialize non-finite values', async () => { const serializer = new PrometheusSerializer(); const cases = [ - [NaN, 'NaN'], [-Infinity, '-Inf'], [+Infinity, '+Inf'], ] as [number, string][]; diff --git a/packages/sdk-metrics/src/aggregator/ExponentialHistogram.ts b/packages/sdk-metrics/src/aggregator/ExponentialHistogram.ts index c35e056a9f6..d13042c3305 100644 --- a/packages/sdk-metrics/src/aggregator/ExponentialHistogram.ts +++ b/packages/sdk-metrics/src/aggregator/ExponentialHistogram.ts @@ -193,6 +193,12 @@ export class ExponentialHistogramAccumulation implements Accumulation { * @param increment */ updateByIncrement(value: number, increment: number) { + // NaN does not fall into any bucket, is not zero and should not be counted, + // NaN is never greater than max nor less than min, therefore return as there's nothing for us to do. + if (Number.isNaN(value)) { + return; + } + if (value > this._max) { this._max = value; } diff --git a/packages/sdk-metrics/src/aggregator/Histogram.ts b/packages/sdk-metrics/src/aggregator/Histogram.ts index 60e5e8df05d..94cf9060393 100644 --- a/packages/sdk-metrics/src/aggregator/Histogram.ts +++ b/packages/sdk-metrics/src/aggregator/Histogram.ts @@ -72,6 +72,12 @@ export class HistogramAccumulation implements Accumulation { ) {} record(value: number): void { + // NaN does not fall into any bucket, is not zero and should not be counted, + // NaN is never greater than max nor less than min, therefore return as there's nothing for us to do. + if (Number.isNaN(value)) { + return; + } + this._current.count += 1; this._current.sum += value; diff --git a/packages/sdk-metrics/test/Instruments.test.ts b/packages/sdk-metrics/test/Instruments.test.ts index ab466c295a1..4b354518310 100644 --- a/packages/sdk-metrics/test/Instruments.test.ts +++ b/packages/sdk-metrics/test/Instruments.test.ts @@ -247,6 +247,7 @@ describe('Instruments', () => { upDownCounter.add(1.1, { foo: 'bar' }); // non-number values should be ignored. upDownCounter.add('1' as any); + await validateExport(deltaReader, { dataPointType: DataPointType.SUM, isMonotonic: false, @@ -506,6 +507,7 @@ describe('Instruments', () => { histogram.record(0.1, { foo: 'bar' }); // non-number values should be ignored. histogram.record('1' as any); + histogram.record(NaN); await validateExport(deltaReader, { dataPointType: DataPointType.HISTOGRAM, diff --git a/packages/sdk-metrics/test/aggregator/ExponentialHistogram.test.ts b/packages/sdk-metrics/test/aggregator/ExponentialHistogram.test.ts index 27120eb8a25..cdeb4cc7618 100644 --- a/packages/sdk-metrics/test/aggregator/ExponentialHistogram.test.ts +++ b/packages/sdk-metrics/test/aggregator/ExponentialHistogram.test.ts @@ -215,6 +215,20 @@ describe('ExponentialHistogramAccumulation', () => { } }); }); + + it('ignores NaN', () => { + const accumulation = new ExponentialHistogramAccumulation([0, 0], 1); + + accumulation.record(NaN); + + assert.strictEqual(accumulation.scale, 0); + assert.strictEqual(accumulation.max, -Infinity); + assert.strictEqual(accumulation.min, Infinity); + assert.strictEqual(accumulation.sum, 0); + assert.strictEqual(accumulation.count, 0); + assert.deepStrictEqual(getCounts(accumulation.positive), []); + assert.deepStrictEqual(getCounts(accumulation.negative), []); + }); }); describe('merge', () => { it('handles simple (even) case', () => { diff --git a/packages/sdk-metrics/test/aggregator/Histogram.test.ts b/packages/sdk-metrics/test/aggregator/Histogram.test.ts index 5d03477d3ee..b0daeac402a 100644 --- a/packages/sdk-metrics/test/aggregator/Histogram.test.ts +++ b/packages/sdk-metrics/test/aggregator/Histogram.test.ts @@ -257,6 +257,19 @@ describe('HistogramAccumulation', () => { accumulation.record(value); } }); + + it('ignores NaN', () => { + const accumulation = new HistogramAccumulation([0, 0], [1, 10, 100]); + + accumulation.record(NaN); + + const pointValue = accumulation.toPointValue(); + assert.strictEqual(pointValue.max, -Infinity); + assert.strictEqual(pointValue.min, Infinity); + assert.strictEqual(pointValue.sum, 0); + assert.strictEqual(pointValue.count, 0); + assert.deepStrictEqual(pointValue.buckets.counts, [0, 0, 0, 0]); + }); }); describe('setStartTime', () => { diff --git a/packages/sdk-metrics/test/regression/histogram-recording-nan.test.ts b/packages/sdk-metrics/test/regression/histogram-recording-nan.test.ts new file mode 100644 index 00000000000..a37c9c8e92e --- /dev/null +++ b/packages/sdk-metrics/test/regression/histogram-recording-nan.test.ts @@ -0,0 +1,99 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as assert from 'assert'; +import { + Aggregation, + AggregationTemporality, + MeterProvider, + MetricReader, + DataPoint, + ExponentialHistogram, + Histogram, +} from '../../src'; +import { TestMetricReader } from '../export/TestMetricReader'; + +describe('histogram-recording-nan', () => { + it('exponential histogram should not count NaN', async () => { + const reader = new TestMetricReader({ + aggregationTemporalitySelector() { + return AggregationTemporality.CUMULATIVE; + }, + aggregationSelector(type) { + return Aggregation.ExponentialHistogram(); + }, + }); + const meterProvider = new MeterProvider({ + readers: [reader], + }); + + const meter = meterProvider.getMeter('my-meter'); + const hist = meter.createHistogram('testhist'); + + hist.record(1); + hist.record(2); + hist.record(4); + hist.record(NaN); + + const resourceMetrics1 = await collectNoErrors(reader); + const dataPoint1 = resourceMetrics1.scopeMetrics[0].metrics[0] + .dataPoints[0] as DataPoint; + + assert.deepStrictEqual( + dataPoint1.value.count, + 3, + 'Sum of bucket count values should match overall count' + ); + }); + + it('explicit bucket histogram should not count NaN', async () => { + const reader = new TestMetricReader({ + aggregationTemporalitySelector() { + return AggregationTemporality.CUMULATIVE; + }, + aggregationSelector(type) { + return Aggregation.Histogram(); + }, + }); + const meterProvider = new MeterProvider({ + readers: [reader], + }); + + const meter = meterProvider.getMeter('my-meter'); + const hist = meter.createHistogram('testhist'); + + hist.record(1); + hist.record(2); + hist.record(4); + hist.record(NaN); + + const resourceMetrics1 = await collectNoErrors(reader); + const dataPoint1 = resourceMetrics1.scopeMetrics[0].metrics[0] + .dataPoints[0] as DataPoint; + + assert.deepStrictEqual( + dataPoint1.value.count, + 3, + 'Sum of bucket count values should match overall count' + ); + }); + + const collectNoErrors = async (reader: MetricReader) => { + const { resourceMetrics, errors } = await reader.collect(); + assert.strictEqual(errors.length, 0); + return resourceMetrics; + }; +});