@@ -153,131 +153,144 @@ def export(
153
153
def shutdown (self ) -> None :
154
154
raise NotImplementedError ()
155
155
156
- def convert_to_timeseries (
156
+ def _convert_to_timeseries (
157
157
self , export_records : Sequence [ExportRecord ]
158
158
) -> Sequence [TimeSeries ]:
159
159
converter_map = {
160
- MinMaxSumCountAggregator : self .convert_from_min_max_sum_count ,
161
- SumAggregator : self .convert_from_sum ,
162
- HistogramAggregator : self .convert_from_histogram ,
163
- LastValueAggregator : self .convert_from_last_value ,
164
- ValueObserverAggregator : self .convert_from_last_value ,
160
+ MinMaxSumCountAggregator : self ._convert_from_min_max_sum_count ,
161
+ SumAggregator : self ._convert_from_sum ,
162
+ HistogramAggregator : self ._convert_from_histogram ,
163
+ LastValueAggregator : self ._convert_from_last_value ,
164
+ ValueObserverAggregator : self ._convert_from_value_observer ,
165
165
}
166
166
timeseries = []
167
167
for export_record in export_records :
168
168
aggregator_type = type (export_record .aggregator )
169
169
converter = converter_map .get (aggregator_type )
170
- if not converter :
170
+ if converter is None :
171
171
raise ValueError (
172
172
str (aggregator_type ) + " conversion is not supported"
173
173
)
174
174
timeseries .extend (converter (export_record ))
175
175
return timeseries
176
176
177
- def convert_from_sum (self , sum_record : ExportRecord ) -> TimeSeries :
178
- name = sum_record .instrument .name
179
- value = sum_record .aggregator .checkpoint
180
- return [self .create_timeseries (sum_record , name , value )]
177
+ def _convert_from_sum (self , sum_record : ExportRecord ) -> TimeSeries :
178
+ return [
179
+ self ._create_timeseries (
180
+ sum_record ,
181
+ sum_record .instrument .name ,
182
+ sum_record .aggregator .checkpoint ,
183
+ )
184
+ ]
181
185
182
- def convert_from_min_max_sum_count (
186
+ def _convert_from_min_max_sum_count (
183
187
self , min_max_sum_count_record : ExportRecord
184
188
) -> TimeSeries :
185
189
timeseries = []
186
- agg_types = ["min" , "max" , "sum" , "count" ]
187
- for agg_type in agg_types :
190
+ for agg_type in ["min" , "max" , "sum" , "count" ]:
188
191
name = min_max_sum_count_record .instrument .name + "_" + agg_type
189
192
value = getattr (
190
193
min_max_sum_count_record .aggregator .checkpoint , agg_type
191
194
)
192
195
timeseries .append (
193
- self .create_timeseries (min_max_sum_count_record , name , value )
196
+ self ._create_timeseries (min_max_sum_count_record , name , value )
194
197
)
195
198
return timeseries
196
199
197
- def convert_from_histogram (
200
+ def _convert_from_histogram (
198
201
self , histogram_record : ExportRecord
199
202
) -> TimeSeries :
200
203
count = 0
201
204
timeseries = []
202
205
for bound in histogram_record .aggregator .checkpoint .keys ():
203
- bb = "+Inf" if bound == float ("inf" ) else str (bound )
204
- name = (
205
- histogram_record .instrument .name + '_bucket{le="' + bb + '"}'
206
- )
206
+ bound_str = "+Inf" if bound == float ("inf" ) else str (bound )
207
207
value = histogram_record .aggregator .checkpoint [bound ]
208
208
timeseries .append (
209
- self .create_timeseries (histogram_record , name , value )
209
+ self ._create_timeseries (
210
+ histogram_record ,
211
+ histogram_record .instrument .name ,
212
+ value ,
213
+ labels = [("le" , bound_str )],
214
+ )
210
215
)
211
216
count += value
212
217
name = histogram_record .instrument .name + "_count"
213
218
timeseries .append (
214
- self .create_timeseries (histogram_record , name , float (count ))
219
+ self ._create_timeseries (histogram_record , name , float (count ))
215
220
)
216
221
return timeseries
217
222
218
- def convert_from_last_value (
223
+ def _convert_from_last_value (
219
224
self , last_value_record : ExportRecord
220
225
) -> TimeSeries :
221
- name = last_value_record .instrument .name
222
- value = last_value_record .aggregator .checkpoint
223
- return [self .create_timeseries (last_value_record , name , value )]
226
+ return [
227
+ self ._create_timeseries (
228
+ last_value_record ,
229
+ last_value_record .instrument .name ,
230
+ last_value_record .aggregator .checkpoint ,
231
+ )
232
+ ]
224
233
225
- def convert_from_value_observer (
234
+ def _convert_from_value_observer (
226
235
self , value_observer_record : ExportRecord
227
236
) -> TimeSeries :
228
237
timeseries = []
229
- agg_types = ["min" , "max" , "sum" , "count" , "last" ]
230
- for agg_type in agg_types :
231
- name = value_observer_record .instrument .name + "_" + agg_type
232
- value = getattr (
233
- value_observer_record .aggregator .checkpoint , agg_type
234
- )
238
+ for agg_type in ["min" , "max" , "sum" , "count" , "last" ]:
235
239
timeseries .append (
236
- self .create_timeseries (value_observer_record , name , value )
240
+ self ._create_timeseries (
241
+ value_observer_record ,
242
+ value_observer_record .instrument .name + "_" + agg_type ,
243
+ getattr (
244
+ value_observer_record .aggregator .checkpoint , agg_type
245
+ ),
246
+ )
237
247
)
238
248
return timeseries
239
249
240
250
# TODO: Implement convert from quantile once supported by SDK for Prometheus Summaries
241
- def convert_from_quantile (
251
+ def _convert_from_quantile (
242
252
self , summary_record : ExportRecord
243
253
) -> TimeSeries :
244
254
raise NotImplementedError ()
245
255
246
256
# pylint: disable=no-member
247
- def create_timeseries (
248
- self , export_record : ExportRecord , name , value : float
257
+ def _create_timeseries (
258
+ self , export_record : ExportRecord , name , value : float , labels = None
249
259
) -> TimeSeries :
250
260
timeseries = TimeSeries ()
251
- # Add name label, record labels and resource labels
252
- timeseries .labels .append (self .create_label ("__name__" , name ))
253
- resource_attributes = export_record .resource .attributes
254
- for label_name , label_value in resource_attributes .items ():
255
- timeseries .labels .append (
256
- self .create_label (label_name , label_value )
257
- )
258
- for label in export_record .labels :
259
- if label [0 ] not in resource_attributes .keys ():
260
- timeseries .labels .append (self .create_label (label [0 ], label [1 ]))
261
- # Add sample
262
- timeseries .samples .append (
263
- self .create_sample (
264
- export_record .aggregator .last_update_timestamp , value
265
- )
266
- )
267
- return timeseries
261
+ seen = set ()
262
+
263
+ def add_label (label_name , label_value ):
264
+ # Label name must contain only alphanumeric characters and underscores
265
+ label_name = re .sub ("[^\\ w_]" , "_" , label_name )
266
+ if label_name not in seen :
267
+ label = Label ()
268
+ label .name = label_name
269
+ label .value = label_value
270
+ timeseries .labels .append (label )
271
+ seen .add (label_name )
272
+
273
+ add_label ("__name__" , name )
274
+ if labels :
275
+ for [label_name , label_value ] in labels :
276
+ add_label (label_name , label_value )
277
+ if export_record .resource .attributes :
278
+ for (
279
+ label_name ,
280
+ label_value ,
281
+ ) in export_record .resource .attributes .items ():
282
+ add_label (label_name , label_value )
283
+ if export_record .labels :
284
+ for [label_name , label_value ] in export_record .labels :
285
+ add_label (label_name , label_value )
268
286
269
- def create_sample (self , timestamp : int , value : float ) -> Sample :
270
287
sample = Sample ()
271
- sample .timestamp = int (timestamp / 1000000 )
288
+ sample .timestamp = int (
289
+ export_record .aggregator .last_update_timestamp / 1000000
290
+ )
272
291
sample .value = value
273
- return sample
274
-
275
- def create_label (self , name : str , value : str ) -> Label :
276
- label = Label ()
277
- # Label name must contain only alphanumeric characters and underscores
278
- label .name = re .sub ("[^0-9a-zA-Z_]+" , "_" , name )
279
- label .value = value
280
- return label
292
+ timeseries .samples .append (sample )
293
+ return timeseries
281
294
282
295
def build_message (self , timeseries : Sequence [TimeSeries ]) -> bytes :
283
296
raise NotImplementedError ()
0 commit comments