|
12 | 12 | # See the License for the specific language governing permissions and
|
13 | 13 | # limitations under the License.
|
14 | 14 |
|
| 15 | +import logging |
| 16 | +import re |
15 | 17 | from typing import Dict, Sequence
|
16 | 18 |
|
| 19 | +from opentelemetry.exporter.prometheus_remote_write.gen.remote_pb2 import ( |
| 20 | + WriteRequest, |
| 21 | +) |
17 | 22 | from opentelemetry.exporter.prometheus_remote_write.gen.types_pb2 import (
|
18 | 23 | Label,
|
19 | 24 | Sample,
|
|
24 | 29 | MetricsExporter,
|
25 | 30 | MetricsExportResult,
|
26 | 31 | )
|
| 32 | +from opentelemetry.sdk.metrics.export.aggregate import ( |
| 33 | + HistogramAggregator, |
| 34 | + LastValueAggregator, |
| 35 | + MinMaxSumCountAggregator, |
| 36 | + SumAggregator, |
| 37 | + ValueObserverAggregator, |
| 38 | +) |
| 39 | + |
| 40 | +logger = logging.getLogger(__name__) |
27 | 41 |
|
28 | 42 |
|
29 | 43 | class PrometheusRemoteWriteMetricsExporter(MetricsExporter):
|
@@ -55,6 +69,14 @@ def __init__(
|
55 | 69 | self.tls_config = tls_config
|
56 | 70 | self.proxies = proxies
|
57 | 71 |
|
| 72 | + self.converter_map = { |
| 73 | + MinMaxSumCountAggregator: self._convert_from_min_max_sum_count, |
| 74 | + SumAggregator: self._convert_from_sum, |
| 75 | + HistogramAggregator: self._convert_from_histogram, |
| 76 | + LastValueAggregator: self._convert_from_last_value, |
| 77 | + ValueObserverAggregator: self._convert_from_value_observer, |
| 78 | + } |
| 79 | + |
58 | 80 | @property
|
59 | 81 | def endpoint(self):
|
60 | 82 | return self._endpoint
|
@@ -142,50 +164,145 @@ def export(
|
142 | 164 | def shutdown(self) -> None:
|
143 | 165 | raise NotImplementedError()
|
144 | 166 |
|
145 |
| - def convert_to_timeseries( |
| 167 | + def _convert_to_timeseries( |
146 | 168 | self, export_records: Sequence[ExportRecord]
|
147 | 169 | ) -> Sequence[TimeSeries]:
|
148 |
| - raise NotImplementedError() |
149 |
| - |
150 |
| - def convert_from_sum(self, sum_record: ExportRecord) -> TimeSeries: |
151 |
| - raise NotImplementedError() |
| 170 | + timeseries = [] |
| 171 | + for export_record in export_records: |
| 172 | + aggregator_type = type(export_record.aggregator) |
| 173 | + converter = self.converter_map.get(aggregator_type) |
| 174 | + if converter: |
| 175 | + timeseries.extend(converter(export_record)) |
| 176 | + else: |
| 177 | + logger.warning( |
| 178 | + "%s aggregator is not supported, record dropped", |
| 179 | + aggregator_type, |
| 180 | + ) |
| 181 | + return timeseries |
152 | 182 |
|
153 |
| - def convert_from_min_max_sum_count( |
| 183 | + def _convert_from_sum( |
| 184 | + self, sum_record: ExportRecord |
| 185 | + ) -> Sequence[TimeSeries]: |
| 186 | + return [ |
| 187 | + self._create_timeseries( |
| 188 | + sum_record, |
| 189 | + sum_record.instrument.name + "_sum", |
| 190 | + sum_record.aggregator.checkpoint, |
| 191 | + ) |
| 192 | + ] |
| 193 | + |
| 194 | + def _convert_from_min_max_sum_count( |
154 | 195 | self, min_max_sum_count_record: ExportRecord
|
155 |
| - ) -> TimeSeries: |
156 |
| - raise NotImplementedError() |
157 |
| - |
158 |
| - def convert_from_histogram( |
| 196 | + ) -> Sequence[TimeSeries]: |
| 197 | + timeseries = [] |
| 198 | + for agg_type in ["min", "max", "sum", "count"]: |
| 199 | + name = min_max_sum_count_record.instrument.name + "_" + agg_type |
| 200 | + value = getattr( |
| 201 | + min_max_sum_count_record.aggregator.checkpoint, agg_type |
| 202 | + ) |
| 203 | + timeseries.append( |
| 204 | + self._create_timeseries(min_max_sum_count_record, name, value) |
| 205 | + ) |
| 206 | + return timeseries |
| 207 | + |
| 208 | + def _convert_from_histogram( |
159 | 209 | self, histogram_record: ExportRecord
|
160 |
| - ) -> TimeSeries: |
161 |
| - raise NotImplementedError() |
| 210 | + ) -> Sequence[TimeSeries]: |
| 211 | + timeseries = [] |
| 212 | + for bound in histogram_record.aggregator.checkpoint.keys(): |
| 213 | + bound_str = "+Inf" if bound == float("inf") else str(bound) |
| 214 | + value = histogram_record.aggregator.checkpoint[bound] |
| 215 | + timeseries.append( |
| 216 | + self._create_timeseries( |
| 217 | + histogram_record, |
| 218 | + histogram_record.instrument.name + "_histogram", |
| 219 | + value, |
| 220 | + extra_label=("le", bound_str), |
| 221 | + ) |
| 222 | + ) |
| 223 | + return timeseries |
162 | 224 |
|
163 |
| - def convert_from_last_value( |
| 225 | + def _convert_from_last_value( |
164 | 226 | self, last_value_record: ExportRecord
|
165 |
| - ) -> TimeSeries: |
166 |
| - raise NotImplementedError() |
167 |
| - |
168 |
| - def convert_from_value_observer( |
| 227 | + ) -> Sequence[TimeSeries]: |
| 228 | + return [ |
| 229 | + self._create_timeseries( |
| 230 | + last_value_record, |
| 231 | + last_value_record.instrument.name + "_last", |
| 232 | + last_value_record.aggregator.checkpoint, |
| 233 | + ) |
| 234 | + ] |
| 235 | + |
| 236 | + def _convert_from_value_observer( |
169 | 237 | self, value_observer_record: ExportRecord
|
170 |
| - ) -> TimeSeries: |
171 |
| - raise NotImplementedError() |
| 238 | + ) -> Sequence[TimeSeries]: |
| 239 | + timeseries = [] |
| 240 | + for agg_type in ["min", "max", "sum", "count", "last"]: |
| 241 | + timeseries.append( |
| 242 | + self._create_timeseries( |
| 243 | + value_observer_record, |
| 244 | + value_observer_record.instrument.name + "_" + agg_type, |
| 245 | + getattr( |
| 246 | + value_observer_record.aggregator.checkpoint, agg_type |
| 247 | + ), |
| 248 | + ) |
| 249 | + ) |
| 250 | + return timeseries |
172 | 251 |
|
173 |
| - def convert_from_quantile( |
| 252 | + # TODO: Implement convert from quantile once supported by SDK for Prometheus Summaries |
| 253 | + def _convert_from_quantile( |
174 | 254 | self, summary_record: ExportRecord
|
175 |
| - ) -> TimeSeries: |
| 255 | + ) -> Sequence[TimeSeries]: |
176 | 256 | raise NotImplementedError()
|
177 | 257 |
|
178 | 258 | # pylint: disable=no-member
|
179 |
| - def create_timeseries( |
180 |
| - self, export_record: ExportRecord, name, value: float |
| 259 | + def _create_timeseries( |
| 260 | + self, |
| 261 | + export_record: ExportRecord, |
| 262 | + name: str, |
| 263 | + value: float, |
| 264 | + extra_label: (str, str) = None, |
181 | 265 | ) -> TimeSeries:
|
182 |
| - raise NotImplementedError() |
183 |
| - |
184 |
| - def create_sample(self, timestamp: int, value: float) -> Sample: |
185 |
| - raise NotImplementedError() |
| 266 | + timeseries = TimeSeries() |
| 267 | + seen = set() |
| 268 | + |
| 269 | + def add_label(label_name: str, label_value: str): |
| 270 | + # Label name must contain only alphanumeric characters and underscores |
| 271 | + label_name = re.sub("[^\\w_]", "_", label_name) |
| 272 | + if label_name not in seen: |
| 273 | + label = Label() |
| 274 | + label.name = label_name |
| 275 | + label.value = label_value |
| 276 | + timeseries.labels.append(label) |
| 277 | + seen.add(label_name) |
| 278 | + else: |
| 279 | + logger.warning( |
| 280 | + "Duplicate label with name %s and value %s", |
| 281 | + label_name, |
| 282 | + label_value, |
| 283 | + ) |
186 | 284 |
|
187 |
| - def create_label(self, name: str, value: str) -> Label: |
188 |
| - raise NotImplementedError() |
| 285 | + # The __name__ label is required by PromQL as its value appears as the metric_name |
| 286 | + add_label("__name__", name) |
| 287 | + if extra_label: |
| 288 | + add_label(extra_label[0], extra_label[1]) |
| 289 | + if export_record.resource.attributes: |
| 290 | + for ( |
| 291 | + label_name, |
| 292 | + label_value, |
| 293 | + ) in export_record.resource.attributes.items(): |
| 294 | + add_label(label_name, str(label_value)) |
| 295 | + if export_record.labels: |
| 296 | + for [label_name, label_value] in export_record.labels: |
| 297 | + add_label(label_name, label_value) |
| 298 | + |
| 299 | + sample = Sample() |
| 300 | + sample.timestamp = int( |
| 301 | + export_record.aggregator.last_update_timestamp / 1000000 |
| 302 | + ) |
| 303 | + sample.value = value |
| 304 | + timeseries.samples.append(sample) |
| 305 | + return timeseries |
189 | 306 |
|
190 | 307 | def build_message(self, timeseries: Sequence[TimeSeries]) -> bytes:
|
191 | 308 | raise NotImplementedError()
|
|
0 commit comments