@@ -446,6 +446,36 @@ describe('renderChartConfig', () => {
446
446
AggregationTemporality : 2 , // Cumulative
447
447
...point ,
448
448
} ) ) ;
449
+ const histPointsE = [
450
+ {
451
+ BucketCounts : [ 1 , 1 , 1 , 1 , 1 , 1 ] ,
452
+ ExplicitBounds : [ 1 , 2 , 5 , 8 , 13 ] ,
453
+ TimeUnix : new Date ( now ) ,
454
+ ResourceAttributes : { host : 'test-a' } ,
455
+ } ,
456
+ {
457
+ BucketCounts : [ 2 , 2 , 2 , 2 , 2 , 2 ] ,
458
+ ExplicitBounds : [ 1 , 2 , 5 , 8 , 13 ] ,
459
+ TimeUnix : new Date ( now + ms ( '5s' ) ) ,
460
+ ResourceAttributes : { host : 'test-b' } ,
461
+ } ,
462
+ {
463
+ BucketCounts : [ 2 , 1 , 2 , 1 , 2 , 1 ] ,
464
+ ExplicitBounds : [ 1 , 2 , 5 , 8 , 13 ] ,
465
+ TimeUnix : new Date ( now + ms ( '1m' ) ) ,
466
+ ResourceAttributes : { host : 'test-a' } ,
467
+ } ,
468
+ {
469
+ BucketCounts : [ 3 , 3 , 2 , 2 , 3 , 3 ] ,
470
+ ExplicitBounds : [ 1 , 2 , 5 , 8 , 13 ] ,
471
+ TimeUnix : new Date ( now + ms ( '65s' ) ) ,
472
+ ResourceAttributes : { host : 'test-b' } ,
473
+ } ,
474
+ ] . map ( point => ( {
475
+ MetricName : 'test.multiple_series' ,
476
+ AggregationTemporality : 2 , // Cumulative
477
+ ...point ,
478
+ } ) ) ;
449
479
450
480
await Promise . all ( [
451
481
bulkInsertMetricsGauge ( [ ...gaugePointsA , ...gaugePointsB ] ) ,
@@ -462,6 +492,7 @@ describe('renderChartConfig', () => {
462
492
...histPointsB ,
463
493
...histPointsC ,
464
494
...histPointsD ,
495
+ ...histPointsE ,
465
496
] ) ,
466
497
] ) ;
467
498
} ) ;
@@ -655,6 +686,10 @@ describe('renderChartConfig', () => {
655
686
} ) ;
656
687
657
688
it ( 'calculates min_rate/max_rate correctly for sum metrics' , async ( ) => {
689
+ // Raw Data is
690
+ // MIN_VARIANT_0: [0, 1, 8, 0, 7, 7, 15, 17, 0, 42]
691
+ // MIN_VARIANT_1: [0, 2, 9, 0, 15, 25 35, 57, 0, 92]
692
+ //
658
693
// Based on the data inserted in the fixture, the expected stream of values
659
694
// for each series after adjusting for the zero reset should be:
660
695
// MIN_VARIANT_0: [0, 1, 8, 8, 15, 15, 23, 25, 25, 67]
@@ -725,20 +760,10 @@ describe('renderChartConfig', () => {
725
760
Since the AggregationTemporality is 2(cumulative), we need to calculate the delta between the two points:
726
761
delta: [10, 10, 10] - [0, 0, 0] = [10, 10, 10]
727
762
728
- Total observations: 10 + 10 + 10 = 30
729
- Cumulative counts: [10, 20, 30]
730
- p50 point:
731
- Rank = 0.5 * 30 = 15
732
- This falls in the second bucket (since 10 < 15 ≤ 20)
733
-
734
763
We need to interpolate between the lower and upper bounds of the second bucket:
735
- Lower bound: 10
736
- Upper bound: 30
737
- Position in bucket: (15 - 10) / (20 - 10) = 0.5
738
- Interpolated value: 10 + (30 - 10) * 0.5 = 10 + 10 = 20
739
-
740
- Thus the first point value would be 0 since it's at the start of the bounds.
741
- The second point value would be 20 since that is the median point value delta from the first point.
764
+ cum sum = [10, 20, 30]
765
+ rank = 0.5 * 30 = 15 (between bounds 10 - 30)
766
+ interpolate: 10 + ((15 - 10) / 30) * (30 - 10) = 13.3333
742
767
*/
743
768
const query = await renderChartConfig (
744
769
{
@@ -952,6 +977,57 @@ describe('renderChartConfig', () => {
952
977
expect ( res ) . toMatchSnapshot ( ) ;
953
978
} ) ;
954
979
980
+ it ( 'should include multiple data points in percentile computation (p50)' , async ( ) => {
981
+ /*
982
+ bounds: [1, 2, 5, 8, 13]
983
+ host = test-a:
984
+ p1 = [1, 1, 1, 1, 1, 1]
985
+ p2 = [2, 1, 2, 1, 2, 1]
986
+ host = test-b:
987
+ p1 = [2, 2, 2, 2, 2, 2]
988
+ p2 = [3, 3, 2, 2, 3, 3]
989
+
990
+ Compute the diff between adjacent points for each unique host (deltaSumForEach)
991
+ host = test-a, diff = [1, 0, 1, 0, 1, 0]
992
+ host = test-b, diff = [1, 1, 0, 0, 1, 1]
993
+
994
+ Sum the diffs together to obtain a combined count for the different series
995
+ sum elements(d) = [2, 1, 1, 0, 2, 1]
996
+
997
+ Now compute the p50 value:
998
+ sum(d) = 7
999
+ cum sum = [2, 3, 4, 4, 6, 7]
1000
+ rank = 0.5 * 7 = 3.5 (between bounds 2 - 5)
1001
+ interpolate: 2 + ((3.5 - 2) / 5) * (5 - 2) = 2.9
1002
+
1003
+ Since all the points fall within a single granularity interval the result should be a single row
1004
+ with the value 2.9.
1005
+ */
1006
+ const query = await renderChartConfig (
1007
+ {
1008
+ select : [
1009
+ {
1010
+ aggFn : 'quantile' ,
1011
+ level : 0.5 ,
1012
+ metricName : 'test.multiple_series' ,
1013
+ metricType : MetricsDataType . Histogram ,
1014
+ valueExpression : 'Value' ,
1015
+ } ,
1016
+ ] ,
1017
+ from : metricSource . from ,
1018
+ where : '' ,
1019
+ metricTables : TEST_METRIC_TABLES ,
1020
+ dateRange : [ new Date ( now ) , new Date ( now + ms ( '2m' ) ) ] ,
1021
+ granularity : '5 minute' ,
1022
+ timestampValueExpression : metricSource . timestampValueExpression ,
1023
+ connection : connection . id ,
1024
+ } ,
1025
+ metadata ,
1026
+ ) ;
1027
+ const res = await queryData ( query ) ;
1028
+ expect ( res ) . toMatchSnapshot ( ) ;
1029
+ } ) ;
1030
+
955
1031
// HDX-1515: Handle counter reset in histogram metric in the same way that the counter reset
956
1032
// is handled for sum metrics.
957
1033
it . skip ( 'three_timestamps_bounded histogram with reset (p50)' , async ( ) => {
0 commit comments