Skip to content

Commit f971041

Browse files
committed
Heavily refactored tests and code after review and fixed issus in Histogram Conversion
1 parent df62a17 commit f971041

File tree

2 files changed

+219
-161
lines changed

2 files changed

+219
-161
lines changed

exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py

+77-64
Original file line numberDiff line numberDiff line change
@@ -153,131 +153,144 @@ def export(
153153
def shutdown(self) -> None:
154154
raise NotImplementedError()
155155

156-
def convert_to_timeseries(
156+
def _convert_to_timeseries(
157157
self, export_records: Sequence[ExportRecord]
158158
) -> Sequence[TimeSeries]:
159159
converter_map = {
160-
MinMaxSumCountAggregator: self.convert_from_min_max_sum_count,
161-
SumAggregator: self.convert_from_sum,
162-
HistogramAggregator: self.convert_from_histogram,
163-
LastValueAggregator: self.convert_from_last_value,
164-
ValueObserverAggregator: self.convert_from_last_value,
160+
MinMaxSumCountAggregator: self._convert_from_min_max_sum_count,
161+
SumAggregator: self._convert_from_sum,
162+
HistogramAggregator: self._convert_from_histogram,
163+
LastValueAggregator: self._convert_from_last_value,
164+
ValueObserverAggregator: self._convert_from_value_observer,
165165
}
166166
timeseries = []
167167
for export_record in export_records:
168168
aggregator_type = type(export_record.aggregator)
169169
converter = converter_map.get(aggregator_type)
170-
if not converter:
170+
if converter is None:
171171
raise ValueError(
172172
str(aggregator_type) + " conversion is not supported"
173173
)
174174
timeseries.extend(converter(export_record))
175175
return timeseries
176176

177-
def convert_from_sum(self, sum_record: ExportRecord) -> TimeSeries:
178-
name = sum_record.instrument.name
179-
value = sum_record.aggregator.checkpoint
180-
return [self.create_timeseries(sum_record, name, value)]
177+
def _convert_from_sum(self, sum_record: ExportRecord) -> TimeSeries:
178+
return [
179+
self._create_timeseries(
180+
sum_record,
181+
sum_record.instrument.name,
182+
sum_record.aggregator.checkpoint,
183+
)
184+
]
181185

182-
def convert_from_min_max_sum_count(
186+
def _convert_from_min_max_sum_count(
183187
self, min_max_sum_count_record: ExportRecord
184188
) -> TimeSeries:
185189
timeseries = []
186-
agg_types = ["min", "max", "sum", "count"]
187-
for agg_type in agg_types:
190+
for agg_type in ["min", "max", "sum", "count"]:
188191
name = min_max_sum_count_record.instrument.name + "_" + agg_type
189192
value = getattr(
190193
min_max_sum_count_record.aggregator.checkpoint, agg_type
191194
)
192195
timeseries.append(
193-
self.create_timeseries(min_max_sum_count_record, name, value)
196+
self._create_timeseries(min_max_sum_count_record, name, value)
194197
)
195198
return timeseries
196199

197-
def convert_from_histogram(
200+
def _convert_from_histogram(
198201
self, histogram_record: ExportRecord
199202
) -> TimeSeries:
200203
count = 0
201204
timeseries = []
202205
for bound in histogram_record.aggregator.checkpoint.keys():
203-
bb = "+Inf" if bound == float("inf") else str(bound)
204-
name = (
205-
histogram_record.instrument.name + '_bucket{le="' + bb + '"}'
206-
)
206+
bound_str = "+Inf" if bound == float("inf") else str(bound)
207207
value = histogram_record.aggregator.checkpoint[bound]
208208
timeseries.append(
209-
self.create_timeseries(histogram_record, name, value)
209+
self._create_timeseries(
210+
histogram_record,
211+
histogram_record.instrument.name,
212+
value,
213+
labels=[("le", bound_str)],
214+
)
210215
)
211216
count += value
212217
name = histogram_record.instrument.name + "_count"
213218
timeseries.append(
214-
self.create_timeseries(histogram_record, name, float(count))
219+
self._create_timeseries(histogram_record, name, float(count))
215220
)
216221
return timeseries
217222

218-
def convert_from_last_value(
223+
def _convert_from_last_value(
219224
self, last_value_record: ExportRecord
220225
) -> TimeSeries:
221-
name = last_value_record.instrument.name
222-
value = last_value_record.aggregator.checkpoint
223-
return [self.create_timeseries(last_value_record, name, value)]
226+
return [
227+
self._create_timeseries(
228+
last_value_record,
229+
last_value_record.instrument.name,
230+
last_value_record.aggregator.checkpoint,
231+
)
232+
]
224233

225-
def convert_from_value_observer(
234+
def _convert_from_value_observer(
226235
self, value_observer_record: ExportRecord
227236
) -> TimeSeries:
228237
timeseries = []
229-
agg_types = ["min", "max", "sum", "count", "last"]
230-
for agg_type in agg_types:
231-
name = value_observer_record.instrument.name + "_" + agg_type
232-
value = getattr(
233-
value_observer_record.aggregator.checkpoint, agg_type
234-
)
238+
for agg_type in ["min", "max", "sum", "count", "last"]:
235239
timeseries.append(
236-
self.create_timeseries(value_observer_record, name, value)
240+
self._create_timeseries(
241+
value_observer_record,
242+
value_observer_record.instrument.name + "_" + agg_type,
243+
getattr(
244+
value_observer_record.aggregator.checkpoint, agg_type
245+
),
246+
)
237247
)
238248
return timeseries
239249

240250
# TODO: Implement convert from quantile once supported by SDK for Prometheus Summaries
241-
def convert_from_quantile(
251+
def _convert_from_quantile(
242252
self, summary_record: ExportRecord
243253
) -> TimeSeries:
244254
raise NotImplementedError()
245255

246256
# pylint: disable=no-member
247-
def create_timeseries(
248-
self, export_record: ExportRecord, name, value: float
257+
def _create_timeseries(
258+
self, export_record: ExportRecord, name, value: float, labels=None
249259
) -> TimeSeries:
250260
timeseries = TimeSeries()
251-
# Add name label, record labels and resource labels
252-
timeseries.labels.append(self.create_label("__name__", name))
253-
resource_attributes = export_record.resource.attributes
254-
for label_name, label_value in resource_attributes.items():
255-
timeseries.labels.append(
256-
self.create_label(label_name, label_value)
257-
)
258-
for label in export_record.labels:
259-
if label[0] not in resource_attributes.keys():
260-
timeseries.labels.append(self.create_label(label[0], label[1]))
261-
# Add sample
262-
timeseries.samples.append(
263-
self.create_sample(
264-
export_record.aggregator.last_update_timestamp, value
265-
)
266-
)
267-
return timeseries
261+
seen = set()
262+
263+
def add_label(label_name, label_value):
264+
# Label name must contain only alphanumeric characters and underscores
265+
label_name = re.sub("[^\\w_]", "_", label_name)
266+
if label_name not in seen:
267+
label = Label()
268+
label.name = label_name
269+
label.value = label_value
270+
timeseries.labels.append(label)
271+
seen.add(label_name)
272+
273+
add_label("__name__", name)
274+
if labels:
275+
for [label_name, label_value] in labels:
276+
add_label(label_name, label_value)
277+
if export_record.resource.attributes:
278+
for (
279+
label_name,
280+
label_value,
281+
) in export_record.resource.attributes.items():
282+
add_label(label_name, label_value)
283+
if export_record.labels:
284+
for [label_name, label_value] in export_record.labels:
285+
add_label(label_name, label_value)
268286

269-
def create_sample(self, timestamp: int, value: float) -> Sample:
270287
sample = Sample()
271-
sample.timestamp = int(timestamp / 1000000)
288+
sample.timestamp = int(
289+
export_record.aggregator.last_update_timestamp / 1000000
290+
)
272291
sample.value = value
273-
return sample
274-
275-
def create_label(self, name: str, value: str) -> Label:
276-
label = Label()
277-
# Label name must contain only alphanumeric characters and underscores
278-
label.name = re.sub("[^0-9a-zA-Z_]+", "_", name)
279-
label.value = value
280-
return label
292+
timeseries.samples.append(sample)
293+
return timeseries
281294

282295
def build_message(self, timeseries: Sequence[TimeSeries]) -> bytes:
283296
raise NotImplementedError()

0 commit comments

Comments
 (0)