@@ -5,7 +5,6 @@ import { Client } from 'pg'
5
5
type PostgresPlugin = Plugin < {
6
6
global : {
7
7
pgClient : Client
8
- buffer : ReturnType < typeof createBuffer >
9
8
eventsToIgnore : Set < string >
10
9
sanitizedTableName : string
11
10
}
@@ -17,37 +16,54 @@ type PostgresPlugin = Plugin<{
17
16
tableName : string
18
17
dbUsername : string
19
18
dbPassword : string
20
- uploadSeconds : string
21
- uploadMegabytes : string
22
19
eventsToIgnore : string
23
- isHeroku : 'Yes' | 'No'
20
+ hasSelfSignedCert : 'Yes' | 'No'
24
21
}
25
22
} >
26
23
27
24
type PostgresMeta = PluginMeta < PostgresPlugin >
28
25
29
26
interface ParsedEvent {
30
- uuid : string
27
+ uuid ? : string
31
28
eventName : string
32
- properties : Record < string , any >
33
- elements : Record < string , any >
34
- set : Record < string , any >
35
- set_once : Record < string , any >
29
+ properties : string
30
+ elements : string
31
+ set : string
32
+ set_once : string
36
33
distinct_id : string
37
34
team_id : number
38
- ip : string
35
+ ip : string | null
39
36
site_url : string
40
37
timestamp : string
41
38
}
42
39
43
- type InsertQueryValue = string | number
44
-
45
40
interface UploadJobPayload {
46
41
batch : ParsedEvent [ ]
47
42
batchId : number
48
43
retriesPerformedSoFar : number
49
44
}
50
45
46
+ const randomBytes = ( ) : string => {
47
+ return ( ( ( 1 + Math . random ( ) ) * 0x10000 ) | 0 ) . toString ( 16 ) . substring ( 1 )
48
+ }
49
+
50
+ const generateUuid = ( ) : string => {
51
+ return (
52
+ randomBytes ( ) +
53
+ randomBytes ( ) +
54
+ '-' +
55
+ randomBytes ( ) +
56
+ '-3' +
57
+ randomBytes ( ) . substr ( 0 , 2 ) +
58
+ '-' +
59
+ randomBytes ( ) +
60
+ '-' +
61
+ randomBytes ( ) +
62
+ randomBytes ( ) +
63
+ randomBytes ( )
64
+ ) . toLowerCase ( )
65
+ }
66
+
51
67
export const jobs : PostgresPlugin [ 'jobs' ] = {
52
68
uploadBatchToPostgres : async ( payload : UploadJobPayload , meta : PostgresMeta ) => {
53
69
await insertBatchIntoPostgres ( payload , meta )
@@ -66,9 +82,6 @@ export const setupPlugin: PostgresPlugin['setupPlugin'] = async (meta) => {
66
82
}
67
83
}
68
84
69
- const uploadMegabytes = Math . max ( 1 , Math . min ( parseInt ( config . uploadMegabytes ) || 1 , 10 ) )
70
- const uploadSeconds = Math . max ( 1 , Math . min ( parseInt ( config . uploadSeconds ) || 1 , 600 ) )
71
-
72
85
global . sanitizedTableName = sanitizeSqlIdentifier ( config . tableName )
73
86
74
87
const queryError = await executeQuery (
@@ -93,103 +106,102 @@ export const setupPlugin: PostgresPlugin['setupPlugin'] = async (meta) => {
93
106
throw new Error ( `Unable to connect to PostgreSQL instance and create table with error: ${ queryError . message } ` )
94
107
}
95
108
96
- global . buffer = createBuffer ( {
97
- limit : uploadMegabytes * 1024 * 1024 ,
98
- timeoutSeconds : uploadSeconds ,
99
- onFlush : async ( batch ) => {
100
- await insertBatchIntoPostgres (
101
- { batch, batchId : Math . floor ( Math . random ( ) * 1000000 ) , retriesPerformedSoFar : 0 } ,
102
- meta
103
- )
104
- } ,
105
- } )
106
-
107
109
global . eventsToIgnore = new Set (
108
110
config . eventsToIgnore ? config . eventsToIgnore . split ( ',' ) . map ( ( event ) => event . trim ( ) ) : null
109
111
)
110
112
}
111
113
112
- export async function onEvent ( event : PluginEvent , { global } : PostgresMeta ) {
113
- const {
114
- event : eventName ,
115
- properties,
116
- $set,
117
- $set_once,
118
- distinct_id,
119
- team_id,
120
- site_url,
121
- now,
122
- sent_at,
123
- uuid,
124
- ..._discard
125
- } = event
126
-
127
- const ip = properties ?. [ '$ip' ] || event . ip
128
- const timestamp = event . timestamp || properties ?. timestamp || now || sent_at
129
- let ingestedProperties = properties
130
- let elements = [ ]
131
-
132
- // only move prop to elements for the $autocapture action
133
- if ( eventName === '$autocapture' && properties && '$elements' in properties ) {
134
- const { $elements, ...props } = properties
135
- ingestedProperties = props
136
- elements = $elements
137
- }
114
+ export async function exportEvents ( events : PluginEvent [ ] , { global, jobs } : PostgresMeta ) {
115
+ const batch : ParsedEvent [ ] = [ ]
116
+ for ( const event of events ) {
117
+ const {
118
+ event : eventName ,
119
+ properties,
120
+ $set,
121
+ $set_once,
122
+ distinct_id,
123
+ team_id,
124
+ site_url,
125
+ now,
126
+ sent_at,
127
+ uuid,
128
+ ..._discard
129
+ } = event
130
+
131
+ if ( global . eventsToIgnore . has ( eventName ) ) {
132
+ continue
133
+ }
134
+
135
+ const ip = properties ?. [ '$ip' ] || event . ip
136
+ const timestamp = event . timestamp || properties ?. timestamp || now || sent_at
137
+ let ingestedProperties = properties
138
+ let elements = [ ]
139
+
140
+ // only move prop to elements for the $autocapture action
141
+ if ( eventName === '$autocapture' && properties && '$elements' in properties ) {
142
+ const { $elements, ...props } = properties
143
+ ingestedProperties = props
144
+ elements = $elements
145
+ }
138
146
139
- const parsedEvent = {
140
- uuid,
141
- eventName,
142
- properties : JSON . stringify ( ingestedProperties || { } ) ,
143
- elements : JSON . stringify ( elements || { } ) ,
144
- set : JSON . stringify ( $set || { } ) ,
145
- set_once : JSON . stringify ( $set_once || { } ) ,
146
- distinct_id,
147
- team_id,
148
- ip,
149
- site_url,
150
- timestamp : new Date ( timestamp ) . toISOString ( ) ,
147
+ const parsedEvent : ParsedEvent = {
148
+ uuid,
149
+ eventName,
150
+ properties : JSON . stringify ( ingestedProperties || { } ) ,
151
+ elements : JSON . stringify ( elements || { } ) ,
152
+ set : JSON . stringify ( $set || { } ) ,
153
+ set_once : JSON . stringify ( $set_once || { } ) ,
154
+ distinct_id,
155
+ team_id,
156
+ ip,
157
+ site_url,
158
+ timestamp : new Date ( timestamp ) . toISOString ( ) ,
159
+ }
160
+
161
+ batch . push ( parsedEvent )
151
162
}
152
163
153
- if ( ! global . eventsToIgnore . has ( eventName ) ) {
154
- global . buffer . add ( parsedEvent )
164
+ if ( batch . length > 0 ) {
165
+ await jobs
166
+ . uploadBatchToPostgres ( { batch, batchId : Math . floor ( Math . random ( ) * 1000000 ) , retriesPerformedSoFar : 0 } )
167
+ . runNow ( )
155
168
}
156
169
}
157
170
158
171
export const insertBatchIntoPostgres = async ( payload : UploadJobPayload , { global, jobs, config } : PostgresMeta ) => {
159
- let values : InsertQueryValue [ ] = [ ]
172
+ let values : any [ ] = [ ]
160
173
let valuesString = ''
161
174
162
175
for ( let i = 0 ; i < payload . batch . length ; ++ i ) {
163
176
const { uuid, eventName, properties, elements, set, set_once, distinct_id, team_id, ip, site_url, timestamp } =
164
177
payload . batch [ i ]
165
178
179
+
166
180
// Creates format: ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11), ($12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22)
167
181
valuesString += ' ('
168
182
for ( let j = 1 ; j <= 11 ; ++ j ) {
169
183
valuesString += `$${ 11 * i + j } ${ j === 11 ? '' : ', ' } `
170
184
}
171
185
valuesString += `)${ i === payload . batch . length - 1 ? '' : ',' } `
172
-
173
- values = [
174
- ...values ,
175
- ...[
176
- uuid ,
177
- eventName ,
178
- properties ,
179
- elements ,
180
- set ,
181
- set_once ,
182
- distinct_id ,
183
- team_id ,
184
- ip ,
185
- site_url ,
186
- timestamp ,
187
- ] ,
188
- ]
186
+
187
+ values = values . concat ( [
188
+ uuid || generateUuid ( ) ,
189
+ eventName ,
190
+ properties ,
191
+ elements ,
192
+ set ,
193
+ set_once ,
194
+ distinct_id ,
195
+ team_id ,
196
+ ip ,
197
+ site_url ,
198
+ timestamp ,
199
+ ] )
189
200
}
190
201
191
202
console . log (
192
- `(Batch Id: ${ payload . batchId } ) Flushing ${ payload . batch . length } event${ payload . batch . length > 1 ? 's' : ''
203
+ `(Batch Id: ${ payload . batchId } ) Flushing ${ payload . batch . length } event${
204
+ payload . batch . length > 1 ? 's' : ''
193
205
} to Postgres instance`
194
206
)
195
207
@@ -217,42 +229,38 @@ export const insertBatchIntoPostgres = async (payload: UploadJobPayload, { globa
217
229
}
218
230
219
231
const executeQuery = async ( query : string , values : any [ ] , config : PostgresMeta [ 'config' ] ) : Promise < Error | null > => {
220
- const basicConnectionOptions = config . databaseUrl ? {
221
- connectionString : config . databaseUrl
222
- } : {
223
- user : config . dbUsername ,
224
- password : config . dbPassword ,
225
- host : config . host ,
226
- database : config . dbName ,
227
- port : parseInt ( config . port ) ,
228
- }
229
- const pgClient = new Client (
230
- {
231
- ... basicConnectionOptions ,
232
- ssl : {
233
- rejectUnauthorized : config . isHeroku === "No"
234
- }
235
- }
236
- )
232
+ const basicConnectionOptions = config . databaseUrl
233
+ ? {
234
+ connectionString : config . databaseUrl ,
235
+ }
236
+ : {
237
+ user : config . dbUsername ,
238
+ password : config . dbPassword ,
239
+ host : config . host ,
240
+ database : config . dbName ,
241
+ port : parseInt ( config . port ) ,
242
+ }
243
+ const pgClient = new Client ( {
244
+ ... basicConnectionOptions ,
245
+ ssl : {
246
+ rejectUnauthorized : config . hasSelfSignedCert === 'No' ,
247
+ } ,
248
+ } )
237
249
238
250
await pgClient . connect ( )
239
251
240
252
let error : Error | null = null
241
253
try {
242
254
await pgClient . query ( query , values )
243
255
} catch ( err ) {
244
- error = err
256
+ error = err as Error
245
257
}
246
258
247
259
await pgClient . end ( )
248
260
249
261
return error
250
262
}
251
263
252
- export const teardownPlugin : PostgresPlugin [ 'teardownPlugin' ] = ( { global } ) => {
253
- global . buffer . flush ( )
254
- }
255
-
256
264
const sanitizeSqlIdentifier = ( unquotedIdentifier : string ) : string => {
257
265
return unquotedIdentifier . replace ( / [ ^ \w \d _ ] + / g, '' )
258
266
}
0 commit comments