-
-
Notifications
You must be signed in to change notification settings - Fork 4.3k
/
Copy pathuseReplayData.tsx
287 lines (264 loc) · 9.42 KB
/
useReplayData.tsx
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
import {useCallback, useMemo, useRef} from 'react';
import {ALL_ACCESS_PROJECTS} from 'sentry/constants/pageFilters';
import useFetchParallelPages from 'sentry/utils/api/useFetchParallelPages';
import useFetchSequentialPages from 'sentry/utils/api/useFetchSequentialPages';
import {DiscoverDatasets} from 'sentry/utils/discover/types';
import parseLinkHeader from 'sentry/utils/parseLinkHeader';
import type {ApiQueryKey} from 'sentry/utils/queryClient';
import {useApiQuery, useQueryClient} from 'sentry/utils/queryClient';
import {mapResponseToReplayRecord} from 'sentry/utils/replays/replayDataUtils';
import type RequestError from 'sentry/utils/requestError/requestError';
import useProjects from 'sentry/utils/useProjects';
import type {ReplayError, ReplayRecord} from 'sentry/views/replays/types';
type Options = {
/**
* The organization slug
*/
orgSlug: string;
/**
* The replayId
*/
replayId: string;
/**
* Default: 50
* You can override this for testing
*/
errorsPerPage?: number;
/**
* Default: 100
* You can override this for testing
*/
segmentsPerPage?: number;
};
interface Result {
attachments: unknown[];
errors: ReplayError[];
fetchError: undefined | RequestError;
fetching: boolean;
onRetry: () => void;
projectSlug: string | null;
replayRecord: ReplayRecord | undefined;
}
/**
* A react hook to load core replay data over the network.
*
* Core replay data includes:
* 1. The root replay EventTransaction object
* - This includes `startTimestamp`, and `tags`
* 2. RRWeb, Breadcrumb, and Span attachment data
* - We make an API call to get a list of segments, each segment contains a
* list of attachments
* - There may be a few large segments, or many small segments. It depends!
* ie: If the replay has many events/errors then there will be many small segments,
* or if the page changes rapidly across each pageload, then there will be
* larger segments, but potentially fewer of them.
* 3. Related Event data
* - Event details are not part of the attachments payload, so we have to
* request them separately
*
* This function should stay focused on loading data over the network.
* Front-end processing, filtering and re-mixing of the different data streams
* must be delegated to the `ReplayReader` class.
*
* @param {orgSlug, replayId} Where to find the root replay event
* @returns An object representing a unified result of the network requests. Either a single `ReplayReader` data object or fetch errors.
*/
function useReplayData({
replayId,
orgSlug,
errorsPerPage = 50,
segmentsPerPage = 100,
}: Options): Result {
const hasFetchedAttachments = useRef(false);
const projects = useProjects();
const queryClient = useQueryClient();
// Fetch every field of the replay. The TS type definition lists every field
// that's available. It's easier to ask for them all and not have to deal with
// partial types or nullable fields.
// We're overfetching for sure.
const {
data: replayData,
isFetching: isFetchingReplay,
error: fetchReplayError,
} = useApiQuery<{data: unknown}>([`/organizations/${orgSlug}/replays/${replayId}/`], {
staleTime: Infinity,
retry: false,
});
const replayRecord = useMemo(
() => (replayData?.data ? mapResponseToReplayRecord(replayData.data) : undefined),
[replayData?.data]
);
const projectSlug = useMemo(() => {
if (!replayRecord) {
return null;
}
return projects.projects.find(p => p.id === replayRecord.project_id)?.slug ?? null;
}, [replayRecord, projects.projects]);
const getAttachmentsQueryKey = useCallback(
({cursor, per_page}): ApiQueryKey => {
return [
`/projects/${orgSlug}/${projectSlug}/replays/${replayId}/recording-segments/`,
{
query: {
download: true,
per_page,
cursor,
},
},
];
},
[orgSlug, projectSlug, replayId]
);
const {
pages: attachmentPages,
isFetching: isFetchingAttachments,
error: fetchAttachmentsError,
} = useFetchParallelPages({
enabled: !fetchReplayError && Boolean(projectSlug) && Boolean(replayRecord),
hits: replayRecord?.count_segments ?? 0,
getQueryKey: getAttachmentsQueryKey,
perPage: segmentsPerPage,
});
const getErrorsQueryKey = useCallback(
({cursor, per_page}): ApiQueryKey => {
// Clone the `finished_at` time and bump it up one second because finishedAt
// has the `ms` portion truncated, while replays-events-meta operates on
// timestamps with `ms` attached. So finishedAt could be at time `12:00:00.000Z`
// while the event is saved with `12:00:00.450Z`.
const finishedAtClone = new Date(replayRecord?.finished_at ?? '');
finishedAtClone.setSeconds(finishedAtClone.getSeconds() + 1);
return [
`/organizations/${orgSlug}/replays-events-meta/`,
{
query: {
dataset: DiscoverDatasets.DISCOVER,
start: replayRecord?.started_at.toISOString(),
end: finishedAtClone.toISOString(),
project: ALL_ACCESS_PROJECTS,
query: `replayId:[${replayRecord?.id}]`,
per_page,
cursor,
},
},
];
},
[orgSlug, replayRecord]
);
const getPlatformErrorsQueryKey = useCallback(
({cursor, per_page}): ApiQueryKey => {
// Clone the `finished_at` time and bump it up one second because finishedAt
// has the `ms` portion truncated, while replays-events-meta operates on
// timestamps with `ms` attached. So finishedAt could be at time `12:00:00.000Z`
// while the event is saved with `12:00:00.450Z`.
const finishedAtClone = new Date(replayRecord?.finished_at ?? '');
finishedAtClone.setSeconds(finishedAtClone.getSeconds() + 1);
return [
`/organizations/${orgSlug}/replays-events-meta/`,
{
query: {
dataset: DiscoverDatasets.ISSUE_PLATFORM,
start: replayRecord?.started_at.toISOString(),
end: finishedAtClone.toISOString(),
project: ALL_ACCESS_PROJECTS,
query: `replayId:[${replayRecord?.id}]`,
per_page,
cursor,
},
},
];
},
[orgSlug, replayRecord]
);
const {
pages: errorPages,
isFetching: isFetchingErrors,
getLastResponseHeader: lastErrorsResponseHeader,
} = useFetchParallelPages<{data: ReplayError[]}>({
enabled: !fetchReplayError && Boolean(projectSlug) && Boolean(replayRecord),
hits: replayRecord?.count_errors ?? 0,
getQueryKey: getErrorsQueryKey,
perPage: errorsPerPage,
});
const linkHeader = lastErrorsResponseHeader?.('Link') ?? null;
const links = parseLinkHeader(linkHeader);
const {pages: extraErrorPages, isFetching: isFetchingExtraErrors} =
useFetchSequentialPages<{data: ReplayError[]}>({
enabled:
!fetchReplayError &&
!isFetchingErrors &&
(!replayRecord?.count_errors || Boolean(links.next?.results)),
initialCursor: links.next?.cursor,
getQueryKey: getErrorsQueryKey,
perPage: errorsPerPage,
});
const {pages: platformErrorPages, isFetching: isFetchingPlatformErrors} =
useFetchSequentialPages<{data: ReplayError[]}>({
enabled: true,
getQueryKey: getPlatformErrorsQueryKey,
perPage: errorsPerPage,
});
const clearQueryCache = useCallback(() => {
queryClient.invalidateQueries({
queryKey: [`/organizations/${orgSlug}/replays/${replayId}/`],
});
queryClient.invalidateQueries({
queryKey: [
`/projects/${orgSlug}/${projectSlug}/replays/${replayId}/recording-segments/`,
],
});
// The next one isn't optimized
// This statement will invalidate the cache of fetched error events for all replayIds
queryClient.invalidateQueries({
queryKey: [`/organizations/${orgSlug}/replays-events-meta/`],
});
}, [orgSlug, replayId, projectSlug, queryClient]);
return useMemo(() => {
// This hook can enter a state where `fetching` below is false
// before it is entirely ready (i.e. it has not fetched
// attachemnts yet). This can cause downstream components to
// think it is no longer fetching and will display an error
// because there are no attachments. The below will require
// that we have attempted to fetch an attachment once (or it
// errors) before we toggle fetching state to false.
hasFetchedAttachments.current =
hasFetchedAttachments.current || isFetchingAttachments;
const fetching =
isFetchingReplay ||
isFetchingAttachments ||
isFetchingErrors ||
isFetchingExtraErrors ||
isFetchingPlatformErrors ||
(!hasFetchedAttachments.current &&
!fetchAttachmentsError &&
Boolean(replayRecord?.count_segments));
const allErrors = errorPages
.concat(extraErrorPages)
.concat(platformErrorPages)
.flatMap(page => page.data);
return {
attachments: attachmentPages.flat(2),
errors: allErrors,
fetchError: fetchReplayError ?? undefined,
fetching,
onRetry: clearQueryCache,
projectSlug,
replayRecord,
};
}, [
attachmentPages,
clearQueryCache,
errorPages,
extraErrorPages,
fetchReplayError,
fetchAttachmentsError,
isFetchingAttachments,
isFetchingErrors,
isFetchingExtraErrors,
isFetchingPlatformErrors,
isFetchingReplay,
platformErrorPages,
projectSlug,
replayRecord,
]);
}
export default useReplayData;