Skip to content

Commit de10070

Browse files
authored
Merge branch 'v2' into aaron/rrweb-client-side
2 parents ad66255 + b16c8e1 commit de10070

33 files changed

+1029
-173
lines changed

.changeset/honest-balloons-walk.md

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
"@hyperdx/api": patch
3+
"@hyperdx/app": patch
4+
---
5+
6+
chore: bump node version to v22

.changeset/honest-fishes-love.md

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
"@hyperdx/common-utils": patch
3+
"@hyperdx/app": patch
4+
---
5+
6+
feat: compute charts ratio

.changeset/mighty-crabs-fry.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@hyperdx/app": patch
3+
---
4+
5+
feat: add saved filters for searches

.changeset/old-rules-check.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@hyperdx/common-utils": patch
3+
---
4+
5+
Fixes the histogram query to perform quantile calculation across all data points

.github/workflows/main.yml

+3-3
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ concurrency:
1010
jobs:
1111
lint:
1212
timeout-minutes: 8
13-
runs-on: ubuntu-20.04
13+
runs-on: ubuntu-24.04
1414
steps:
1515
- name: Checkout
1616
uses: actions/checkout@v4
@@ -30,7 +30,7 @@ jobs:
3030
run: make ci-lint
3131
unit:
3232
timeout-minutes: 8
33-
runs-on: ubuntu-20.04
33+
runs-on: ubuntu-24.04
3434
steps:
3535
- name: Checkout
3636
uses: actions/checkout@v4
@@ -48,7 +48,7 @@ jobs:
4848
run: make ci-unit
4949
integration:
5050
timeout-minutes: 8
51-
runs-on: ubuntu-20.04
51+
runs-on: ubuntu-24.04
5252
steps:
5353
- name: Checkout
5454
uses: actions/checkout@v4

.github/workflows/push.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ concurrency:
88
jobs:
99
push-downstream:
1010
timeout-minutes: 5
11-
runs-on: ubuntu-20.04
11+
runs-on: ubuntu-24.04
1212
steps:
1313
- uses: actions/github-script@v7
1414
env:

.github/workflows/pushv2.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ concurrency:
88
jobs:
99
push-downstream:
1010
timeout-minutes: 5
11-
runs-on: ubuntu-20.04
11+
runs-on: ubuntu-24.04
1212
steps:
1313
- uses: actions/github-script@v7
1414
env:

.github/workflows/release.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ permissions:
1010
jobs:
1111
publish_common_utils:
1212
name: Publish @hyperdx/common-utils
13-
runs-on: ubuntu-latest
13+
runs-on: ubuntu-24.04
1414
outputs:
1515
changeset_outputs_hasChangesets:
1616
${{ steps.changesets.outputs.hasChangesets }}
@@ -44,7 +44,7 @@ jobs:
4444
release:
4545
name: Release
4646
needs: publish_common_utils
47-
runs-on: ubuntu-latest
47+
runs-on: ubuntu-24.04
4848
strategy:
4949
matrix:
5050
release:

.nvmrc

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
v18.20.3
1+
v22.14.0

docker/fullstack/Dockerfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
# - API (Node)
33
# - App (Frontend)
44

5-
ARG NODE_VERSION=18.20.3
5+
ARG NODE_VERSION=22.14.0
66

77
# base #############################################################################################
88
FROM node:${NODE_VERSION}-alpine AS base

docker/local/Dockerfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# - Allow persisting settings on disk
1313
# - Limiting persisted data with some auto rotation
1414

15-
ARG NODE_VERSION=18.20.3
15+
ARG NODE_VERSION=22.14.0
1616
ARG CLICKHOUSE_VERSION=24
1717
ARG OTEL_COLLECTOR_VERSION=0.117.0
1818
# Get Node base image to copy over Node binaries

packages/api/Dockerfile

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
## base #############################################################################################
2-
FROM node:18.20.3-alpine AS base
2+
FROM node:22.14.0-alpine AS base
33

44
WORKDIR /app
55

@@ -30,7 +30,7 @@ RUN rm -rf node_modules && yarn workspaces focus @hyperdx/api --production
3030

3131
## prod ############################################################################################
3232

33-
FROM node:18.20.3-alpine AS prod
33+
FROM node:22.14.0-alpine AS prod
3434

3535
ARG CODE_VERSION
3636

packages/api/src/clickhouse/__tests__/__snapshots__/renderChartConfig.test.ts.snap

+19-10
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,15 @@ Array [
6060
]
6161
`;
6262

63+
exports[`renderChartConfig Query Metrics should include multiple data points in percentile computation (p50) 1`] = `
64+
Array [
65+
Object {
66+
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
67+
"any(toFloat64OrNull(toString(Rate)))": 2.9,
68+
},
69+
]
70+
`;
71+
6372
exports[`renderChartConfig Query Metrics single avg gauge with group-by 1`] = `
6473
Array [
6574
Object {
@@ -192,11 +201,11 @@ exports[`renderChartConfig Query Metrics two_timestamps_bounded histogram (p25)
192201
Array [
193202
Object {
194203
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
195-
"sum(toFloat64OrNull(toString(Rate)))": 0,
204+
"any(toFloat64OrNull(toString(Rate)))": 0,
196205
},
197206
Object {
198207
"__hdx_time_bucket": "2022-01-05T00:01:00Z",
199-
"sum(toFloat64OrNull(toString(Rate)))": 10,
208+
"any(toFloat64OrNull(toString(Rate)))": 10,
200209
},
201210
]
202211
`;
@@ -205,11 +214,11 @@ exports[`renderChartConfig Query Metrics two_timestamps_bounded histogram (p50)
205214
Array [
206215
Object {
207216
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
208-
"sum(toFloat64OrNull(toString(Rate)))": 0,
217+
"any(toFloat64OrNull(toString(Rate)))": 0,
209218
},
210219
Object {
211220
"__hdx_time_bucket": "2022-01-05T00:01:00Z",
212-
"sum(toFloat64OrNull(toString(Rate)))": 20,
221+
"any(toFloat64OrNull(toString(Rate)))": 13.333333333333332,
213222
},
214223
]
215224
`;
@@ -218,11 +227,11 @@ exports[`renderChartConfig Query Metrics two_timestamps_bounded histogram (p90)
218227
Array [
219228
Object {
220229
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
221-
"sum(toFloat64OrNull(toString(Rate)))": 0,
230+
"any(toFloat64OrNull(toString(Rate)))": 0,
222231
},
223232
Object {
224233
"__hdx_time_bucket": "2022-01-05T00:01:00Z",
225-
"sum(toFloat64OrNull(toString(Rate)))": 30,
234+
"any(toFloat64OrNull(toString(Rate)))": 30,
226235
},
227236
]
228237
`;
@@ -231,11 +240,11 @@ exports[`renderChartConfig Query Metrics two_timestamps_lower_bound_inf histogra
231240
Array [
232241
Object {
233242
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
234-
"sum(toFloat64OrNull(toString(Rate)))": 0,
243+
"any(toFloat64OrNull(toString(Rate)))": 0,
235244
},
236245
Object {
237246
"__hdx_time_bucket": "2022-01-05T00:01:00Z",
238-
"sum(toFloat64OrNull(toString(Rate)))": 1,
247+
"any(toFloat64OrNull(toString(Rate)))": 1,
239248
},
240249
]
241250
`;
@@ -244,11 +253,11 @@ exports[`renderChartConfig Query Metrics two_timestamps_upper_bound_inf histogra
244253
Array [
245254
Object {
246255
"__hdx_time_bucket": "2022-01-05T00:00:00Z",
247-
"sum(toFloat64OrNull(toString(Rate)))": 0,
256+
"any(toFloat64OrNull(toString(Rate)))": 0,
248257
},
249258
Object {
250259
"__hdx_time_bucket": "2022-01-05T00:01:00Z",
251-
"sum(toFloat64OrNull(toString(Rate)))": 30,
260+
"any(toFloat64OrNull(toString(Rate)))": 30,
252261
},
253262
]
254263
`;

packages/api/src/clickhouse/__tests__/renderChartConfig.test.ts

+89-13
Original file line numberDiff line numberDiff line change
@@ -446,6 +446,36 @@ describe('renderChartConfig', () => {
446446
AggregationTemporality: 2, // Cumulative
447447
...point,
448448
}));
449+
const histPointsE = [
450+
{
451+
BucketCounts: [1, 1, 1, 1, 1, 1],
452+
ExplicitBounds: [1, 2, 5, 8, 13],
453+
TimeUnix: new Date(now),
454+
ResourceAttributes: { host: 'test-a' },
455+
},
456+
{
457+
BucketCounts: [2, 2, 2, 2, 2, 2],
458+
ExplicitBounds: [1, 2, 5, 8, 13],
459+
TimeUnix: new Date(now + ms('5s')),
460+
ResourceAttributes: { host: 'test-b' },
461+
},
462+
{
463+
BucketCounts: [2, 1, 2, 1, 2, 1],
464+
ExplicitBounds: [1, 2, 5, 8, 13],
465+
TimeUnix: new Date(now + ms('1m')),
466+
ResourceAttributes: { host: 'test-a' },
467+
},
468+
{
469+
BucketCounts: [3, 3, 2, 2, 3, 3],
470+
ExplicitBounds: [1, 2, 5, 8, 13],
471+
TimeUnix: new Date(now + ms('65s')),
472+
ResourceAttributes: { host: 'test-b' },
473+
},
474+
].map(point => ({
475+
MetricName: 'test.multiple_series',
476+
AggregationTemporality: 2, // Cumulative
477+
...point,
478+
}));
449479

450480
await Promise.all([
451481
bulkInsertMetricsGauge([...gaugePointsA, ...gaugePointsB]),
@@ -462,6 +492,7 @@ describe('renderChartConfig', () => {
462492
...histPointsB,
463493
...histPointsC,
464494
...histPointsD,
495+
...histPointsE,
465496
]),
466497
]);
467498
});
@@ -655,6 +686,10 @@ describe('renderChartConfig', () => {
655686
});
656687

657688
it('calculates min_rate/max_rate correctly for sum metrics', async () => {
689+
// Raw Data is
690+
// MIN_VARIANT_0: [0, 1, 8, 0, 7, 7, 15, 17, 0, 42]
691+
// MIN_VARIANT_1: [0, 2, 9, 0, 15, 25 35, 57, 0, 92]
692+
//
658693
// Based on the data inserted in the fixture, the expected stream of values
659694
// for each series after adjusting for the zero reset should be:
660695
// MIN_VARIANT_0: [0, 1, 8, 8, 15, 15, 23, 25, 25, 67]
@@ -725,20 +760,10 @@ describe('renderChartConfig', () => {
725760
Since the AggregationTemporality is 2(cumulative), we need to calculate the delta between the two points:
726761
delta: [10, 10, 10] - [0, 0, 0] = [10, 10, 10]
727762
728-
Total observations: 10 + 10 + 10 = 30
729-
Cumulative counts: [10, 20, 30]
730-
p50 point:
731-
Rank = 0.5 * 30 = 15
732-
This falls in the second bucket (since 10 < 15 ≤ 20)
733-
734763
We need to interpolate between the lower and upper bounds of the second bucket:
735-
Lower bound: 10
736-
Upper bound: 30
737-
Position in bucket: (15 - 10) / (20 - 10) = 0.5
738-
Interpolated value: 10 + (30 - 10) * 0.5 = 10 + 10 = 20
739-
740-
Thus the first point value would be 0 since it's at the start of the bounds.
741-
The second point value would be 20 since that is the median point value delta from the first point.
764+
cum sum = [10, 20, 30]
765+
rank = 0.5 * 30 = 15 (between bounds 10 - 30)
766+
interpolate: 10 + ((15 - 10) / 30) * (30 - 10) = 13.3333
742767
*/
743768
const query = await renderChartConfig(
744769
{
@@ -952,6 +977,57 @@ describe('renderChartConfig', () => {
952977
expect(res).toMatchSnapshot();
953978
});
954979

980+
it('should include multiple data points in percentile computation (p50)', async () => {
981+
/*
982+
bounds: [1, 2, 5, 8, 13]
983+
host = test-a:
984+
p1 = [1, 1, 1, 1, 1, 1]
985+
p2 = [2, 1, 2, 1, 2, 1]
986+
host = test-b:
987+
p1 = [2, 2, 2, 2, 2, 2]
988+
p2 = [3, 3, 2, 2, 3, 3]
989+
990+
Compute the diff between adjacent points for each unique host (deltaSumForEach)
991+
host = test-a, diff = [1, 0, 1, 0, 1, 0]
992+
host = test-b, diff = [1, 1, 0, 0, 1, 1]
993+
994+
Sum the diffs together to obtain a combined count for the different series
995+
sum elements(d) = [2, 1, 1, 0, 2, 1]
996+
997+
Now compute the p50 value:
998+
sum(d) = 7
999+
cum sum = [2, 3, 4, 4, 6, 7]
1000+
rank = 0.5 * 7 = 3.5 (between bounds 2 - 5)
1001+
interpolate: 2 + ((3.5 - 2) / 5) * (5 - 2) = 2.9
1002+
1003+
Since all the points fall within a single granularity interval the result should be a single row
1004+
with the value 2.9.
1005+
*/
1006+
const query = await renderChartConfig(
1007+
{
1008+
select: [
1009+
{
1010+
aggFn: 'quantile',
1011+
level: 0.5,
1012+
metricName: 'test.multiple_series',
1013+
metricType: MetricsDataType.Histogram,
1014+
valueExpression: 'Value',
1015+
},
1016+
],
1017+
from: metricSource.from,
1018+
where: '',
1019+
metricTables: TEST_METRIC_TABLES,
1020+
dateRange: [new Date(now), new Date(now + ms('2m'))],
1021+
granularity: '5 minute',
1022+
timestampValueExpression: metricSource.timestampValueExpression,
1023+
connection: connection.id,
1024+
},
1025+
metadata,
1026+
);
1027+
const res = await queryData(query);
1028+
expect(res).toMatchSnapshot();
1029+
});
1030+
9551031
// HDX-1515: Handle counter reset in histogram metric in the same way that the counter reset
9561032
// is handled for sum metrics.
9571033
it.skip('three_timestamps_bounded histogram with reset (p50)', async () => {

packages/app/Dockerfile

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
## base #############################################################################################
2-
FROM node:18.20.3-alpine AS base
2+
FROM node:22.14.0-alpine AS base
33
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
44
RUN apk add --no-cache libc6-compat
55
WORKDIR /app
@@ -41,7 +41,7 @@ RUN rm -rf node_modules && yarn workspaces focus @hyperdx/app --production
4141

4242

4343
## prod ############################################################################################
44-
FROM node:18.20.3-alpine AS prod
44+
FROM node:22.14.0-alpine AS prod
4545
WORKDIR /app
4646

4747
ENV NODE_ENV production

packages/app/package.json

+1
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
"lint:styles": "stylelint **/*/*.{css,scss}",
1717
"ci:lint": "yarn lint && yarn tsc --noEmit && yarn lint:styles --quiet",
1818
"ci:unit": "jest --ci --coverage",
19+
"dev:unit": "jest --watchAll --detectOpenHandles",
1920
"storybook": "storybook dev -p 6006",
2021
"storybook:build": "storybook build",
2122
"knip": "knip"

packages/app/src/DBSearchPage.tsx

+1
Original file line numberDiff line numberDiff line change
@@ -1249,6 +1249,7 @@ function DBSearchPage() {
12491249
dateRange: searchedTimeRange,
12501250
with: aliasWith,
12511251
}}
1252+
sourceId={inputSourceObj?.id}
12521253
{...searchFilters}
12531254
/>
12541255
</ErrorBoundary>

0 commit comments

Comments
 (0)