From 64634b4a768e52d192529800f1a835dfd1abee9f Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Tue, 9 Aug 2022 12:19:00 -0400 Subject: [PATCH 01/16] Proto & Tox updates - add Tox commands to setup & test the exporter - add the Prometheus Remote Write Proto files & update them. - added a mini-script that wraps the protoc command to regen. them --- .../README.rst | 322 ++++++++++++++ .../examples/Dockerfile | 8 + .../examples/README.md | 42 ++ .../examples/cortex-config.yml | 100 +++++ .../examples/docker-compose.yml | 33 ++ .../examples/requirements.txt | 7 + .../examples/sampleapp.py | 153 +++++++ .../proto/generate-proto-py.sh | 6 + .../gen/gogoproto/gogo.proto | 144 ++++++ .../prometheus_remote_write/gen/remote.proto | 86 ++++ .../prometheus_remote_write/gen/types.proto | 119 +++++ .../setup.cfg | 51 +++ .../setup.py | 32 ++ .../prometheus_remote_write/__init__.py | 384 ++++++++++++++++ .../gen/gogoproto/gogo_pb2.py | 101 +++++ .../prometheus_remote_write/gen/remote_pb2.py | 44 ++ .../prometheus_remote_write/gen/types_pb2.py | 65 +++ .../prometheus_remote_write/version.py | 15 + .../tests/__init__.py | 13 + .../test_prometheus_remote_write_exporter.py | 414 ++++++++++++++++++ tox.ini | 7 + 21 files changed, 2146 insertions(+) create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/README.rst create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/examples/Dockerfile create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/examples/README.md create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/examples/cortex-config.yml create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/examples/docker-compose.yml create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/examples/requirements.txt create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py create mode 100755 exporter/opentelemetry-exporter-prometheus-remote-write/proto/generate-proto-py.sh create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/remote.proto create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/types.proto create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/setup.py create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/version.py create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/tests/__init__.py create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst b/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst new file mode 100644 index 0000000000..1f5dc01404 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst @@ -0,0 +1,322 @@ +OpenTelemetry Prometheus Remote Write Exporter +========================================================= + +This package contains an exporter to send `OTLP`_ metrics from the +`OpenTelemetry Python SDK`_ directly to a `Prometheus Remote Write integrated backend`_ +(such as Cortex or Thanos) without having to run an instance of the +Prometheus server. The latest `types.proto`_ and `remote.proto`_ +protocol buffers are used to create the WriteRequest. The image below shows the +two Prometheus exporters in the OpenTelemetry Python SDK. + +Pipeline 1 illustrates the setup required for a `Prometheus "pull" exporter`_. + +Pipeline 2 illustrates the setup required for the Prometheus Remote +Write exporter. + +|Prometheus SDK pipelines| + +The Prometheus Remote Write Exporter is a "push" based exporter and only +works with the OpenTelemetry `push controller`_. The controller +periodically collects data and passes it to the exporter. This exporter +then converts the data into `timeseries`_ and sends it to the Remote +Write integrated backend through HTTP POST requests. The metrics +collection datapath is shown below: + + +See the ``examples`` folder for a demo usage of this exporter + +Table of Contents +================= + +- `Summary`_ +- `Table of Contents`_ + + - `Installation`_ + - `Quickstart`_ + - `Examples`_ + - `Configuring the Exporter`_ + - `Securing the Exporter`_ + + - `Authentication`_ + - `TLS`_ + + - `Supported Aggregators`_ + - `Error Handling`_ + - `Contributing`_ + + - `Design Doc`_ + +Installation +------------ +Prerequisites +~~~~~~~~~~~~~ +1. Install the snappy c-library + **DEB**: ``sudo apt-get install libsnappy-dev`` + + **RPM**: ``sudo yum install libsnappy-devel`` + + **OSX/Brew**: ``brew install snappy`` + + **Windows**: ``pip install python_snappy-0.5-cp36-cp36m-win_amd64.whl`` + +Exporter +~~~~~~~~ + +- To install from the latest PyPi release, run + ``pip install opentelemetry-exporter-prometheus-remote-write`` + + +Quickstart +---------- + +.. code:: python + + from opentelemetry import metrics + from opentelemetry.sdk.metrics import MeterProvider + from opentelemetry.exporter.prometheus_remote_write import ( + PrometheusRemoteWriteMetricsExporter + ) + + # Sets the global MeterProvider instance + metrics.set_meter_provider(MeterProvider()) + + # The Meter is responsible for creating and recording metrics. Each meter has a unique name, which we set as the module's name here. + meter = metrics.get_meter(__name__) + + exporter = PrometheusRemoteWriteMetricsExporter(endpoint="endpoint_here") # add other params as needed + + metrics.get_meter_provider().start_pipeline(meter, exporter, 5) + + +Examples +-------- + +This example uses `Docker Compose`_ to set up: + +1. A Python program that creates 5 instruments with 5 unique aggregators + and a randomized load generator +2. An instance of `Cortex`_ to recieve the metrics data +3. An instance of `Grafana`_ to visualizse the exported data + +Requirements +~~~~~~~~~~~~ + +- Have Docker Compose `installed`_ + +*Users do not need to install Python as the app will be run in the +Docker Container* + +Instructions +~~~~~~~~~~~~ + +1. Run ``docker-compose up -d`` in the the ``examples/`` directory + +The ``-d`` flag causes all services to run in detached mode and frees up +your terminal session. This also causes no logs to show up. Users can +attach themselves to the service’s logs manually using +``docker logs ${CONTAINER_ID} --follow`` + +2. Log into the Grafana instance at http://localhost:3000 + + - login credentials are ``username: admin`` and ``password: admin`` + - There may be an additional screen on setting a new password. This + can be skipped and is optional + +3. Navigate to the ``Data Sources`` page + + - Look for a gear icon on the left sidebar and select + ``Data Sources`` + +4. Add a new Prometheus Data Source + + - Use ``http://cortex:9009/api/prom`` as the URL + - Set the scrape interval to ``2s`` to make updates + appear quickly **(Optional)** + - click ``Save & Test`` + +5. Go to ``Metrics Explore`` to query metrics + + - Look for a compass icon on the left sidebar + - click ``Metrics`` for a dropdown list of all the available metrics + - Adjust time range by clicking the ``Last 6 hours`` + button on the upper right side of the graph **(Optional)** + - Set up auto-refresh by selecting an option under the + dropdown next to the refresh button on the upper right side of the + graph **(Optional)** + - Click the refresh button and data should show up on hte graph + +6. Shutdown the services when finished + + - Run ``docker-compose down`` in the examples directory + +Configuring the Exporter +------------------------ + +The exporter can be configured through parameters passed to the +constructor. Here are all the options: + +- ``endpoint``: url where data will be sent **(Required)** +- ``basic_auth``: username and password for authentication + **(Optional)** +- ``headers``: additional headers for remote write request as + determined by the remote write backend's API **(Optional)** +- ``timeout``: timeout for requests to the remote write endpoint in + seconds **(Optional)** +- ``proxies``: dict mapping request proxy protocols to proxy urls + **(Optional)** +- ``tls_config``: configuration for remote write TLS settings + **(Optional)** + +Example with all the configuration options: + +.. code:: python + + exporter = PrometheusRemoteWriteMetricsExporter( + endpoint="http://localhost:9009/api/prom/push", + timeout=30, + basic_auth={ + "username": "user", + "password": "pass123", + }, + headers={ + "X-Scope-Org-ID": "5", + "Authorization": "Bearer mytoken123", + }, + proxies={ + "http": "http://10.10.1.10:3000", + "https": "http://10.10.1.10:1080", + }, + tls_config={ + "cert_file": "path/to/file", + "key_file": "path/to/file", + "ca_file": "path_to_file", + "insecure_skip_verify": true, # for developing purposes + } + ) + +Securing the Exporter +--------------------- + +Authentication +~~~~~~~~~~~~~~ + +The exporter provides two forms of authentication which are shown below. +Users can add their own custom authentication by setting the appropriate +values in the ``headers`` dictionary + +1. Basic Authentication Basic authentication sets a HTTP Authorization + header containing a base64 encoded username/password pair. See `RFC + 7617`_ for more information. This + +.. code:: python + + exporter = PrometheusRemoteWriteMetricsExporter( + basic_auth={"username": "base64user", "password": "base64pass"} + ) + +2. Bearer Token Authentication This custom configuration can be achieved + by passing in a custom ``header`` to the constructor. See `RFC 6750`_ + for more information. + +.. code:: python + + header = { + "Authorization": "Bearer mytoken123" + } + +TLS +~~~ + +Users can add TLS to the exporter's HTTP Client by providing certificate +and key files in the ``tls_config`` parameter. + +Supported Aggregators +--------------------- +Behaviour of these aggregators is outlined in the `OpenTelemetry Specification `_. +All aggregators are converted into the `timeseries`_ data format. However, method in +which they are converted `differs `_ from aggregator to aggregator. A +map of the conversion methods can be found `here `_. + ++------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ +| **OpenTelemetry Aggregator** | **Equivalent Prometheus Data Type** | **Behaviour** | ++------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ +| Sum | Counter | Metric value can only go up or be reset to 0 | ++------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ +| MinMaxSumCount | Gauge | Metric value can arbitrarily increment or decrement | ++------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ +| Histogram | Histogram | Unlike the Prometheus histogram, the OpenTelemetry Histogram does not provide a sum of all observed values | ++------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ +| LastValue | N/A | Metric only contains the most recently observed value | ++------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ +| ValueObserver | N/A | Similar to MinMaxSumCount but also contains LastValue | ++------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ + + +Error Handling +-------------- + +In general, errors are raised by the calling function. The exception is +for failed requests where any error status code is logged as a warning +instead. + +This is because the exporter does not implement any retry logic as data that +failed to export will be dropped. + +For example, consider a situation where a user increments a Counter +instrument 5 times and an export happens between each increment. If the +exports happen like so: + +:: + + SUCCESS FAIL FAIL SUCCESS SUCCESS + 1 2 3 4 5 + +Then the received data will be: + +:: + + 1 4 5 + +Contributing +------------ + +If you would like to learn more about the exporter's structure and +design decisions please view the design document below + +Design Doc +~~~~~~~~~~ + +`Design Document`_ + +This document is stored elsewhere as it contains large images which will +significantly increase the size of this repo. + +.. _Summary: #opentelemetry-python-sdk-prometheus-remote-write-exporter +.. _Table of Contents: #table-of-contents +.. _Installation: #installation +.. _Quickstart: #quickstart +.. _Examples: #examples +.. _Configuring the Exporter: #configuring-the-exporter +.. _Securing the Exporter: #securing-the-exporter +.. _Authentication: #authentication +.. _TLS: #tls +.. _Supported Aggregators: #supported-aggregators +.. _Error Handling: #error-handling +.. _Contributing: #contributing +.. _Design Doc: #design-doc +.. |Prometheus SDK pipelines| image:: https://user-images.githubusercontent.com/20804975/100285430-e320fd80-2f3e-11eb-8217-a562c559153c.png +.. _RFC 7617: https://tools.ietf.org/html/rfc7617 +.. _RFC 6750: https://tools.ietf.org/html/rfc6750 +.. _Design Document: https://github.com/open-o11y/docs/blob/master/python-prometheus-remote-write/design-doc.md +.. _OTLP: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/otlp.md +.. _OpenTelemetry Python SDK: https://github.com/open-telemetry/opentelemetry-python +.. _Prometheus "pull" exporter: https://github.com/open-telemetry/opentelemetry-python/tree/main/exporter/opentelemetry-exporter-prometheus +.. _Prometheus Remote Write integrated backend: https://prometheus.io/docs/operating/integrations/ +.. _types.proto: https://github.com/prometheus/prometheus/blob/master/prompb/types.proto +.. _remote.proto: https://github.com/prometheus/prometheus/blob/master/prompb/remote.proto +.. _push controller: https://github.com/open-telemetry/opentelemetry-python/blob/main/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py#L22 +.. _timeseries: https://prometheus.io/docs/concepts/data_model/ +.. _Docker Compose: https://docs.docker.com/compose/ +.. _Cortex: https://cortexmetrics.io/ +.. _Grafana: https://grafana.com/ +.. _installed: https://docs.docker.com/compose/install/ diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/Dockerfile b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/Dockerfile new file mode 100644 index 0000000000..09ce8cc323 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/Dockerfile @@ -0,0 +1,8 @@ +FROM python:3.7 +WORKDIR /code + +COPY . . +RUN apt-get update -y && apt-get install libsnappy-dev -y +RUN pip install -e . +RUN pip install -r ./examples/requirements.txt +CMD ["python", "./examples/sampleapp.py"] diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/README.md b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/README.md new file mode 100644 index 0000000000..91f7ead578 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/README.md @@ -0,0 +1,42 @@ +# Prometheus Remote Write Exporter Example +This example uses [Docker Compose](https://docs.docker.com/compose/) to set up: + +1. A Python program that creates 5 instruments with 5 unique +aggregators and a randomized load generator +2. An instance of [Cortex](https://cortexmetrics.io/) to recieve the metrics +data +3. An instance of [Grafana](https://grafana.com/) to visualizse the exported +data + +## Requirements +* Have Docker Compose [installed](https://docs.docker.com/compose/install/) + +*Users do not need to install Python as the app will be run in the Docker Container* + +## Instructions +1. Run `docker-compose up -d` in the the `examples/` directory + +The `-d` flag causes all services to run in detached mode and frees up your +terminal session. This also causes no logs to show up. Users can attach themselves to the service's logs manually using `docker logs ${CONTAINER_ID} --follow` + +2. Log into the Grafana instance at [http://localhost:3000](http://localhost:3000) + * login credentials are `username: admin` and `password: admin` + * There may be an additional screen on setting a new password. This can be skipped and is optional + +3. Navigate to the `Data Sources` page + * Look for a gear icon on the left sidebar and select `Data Sources` + +4. Add a new Prometheus Data Source + * Use `http://cortex:9009/api/prom` as the URL + * (OPTIONAl) set the scrape interval to `2s` to make updates appear quickly + * click `Save & Test` + +5. Go to `Metrics Explore` to query metrics + * Look for a compass icon on the left sidebar + * click `Metrics` for a dropdown list of all the available metrics + * (OPTIONAL) Adjust time range by clicking the `Last 6 hours` button on the upper right side of the graph + * (OPTIONAL) Set up auto-refresh by selecting an option under the dropdown next to the refresh button on the upper right side of the graph + * Click the refresh button and data should show up on hte graph + +6. Shutdown the services when finished + * Run `docker-compose down` in the examples directory \ No newline at end of file diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/cortex-config.yml b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/cortex-config.yml new file mode 100644 index 0000000000..37bd6473d6 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/cortex-config.yml @@ -0,0 +1,100 @@ +# This Cortex Config is copied from the Cortex Project documentation +# Source: https://github.com/cortexproject/cortex/blob/master/docs/configuration/single-process-config.yaml + +# Configuration for running Cortex in single-process mode. +# This configuration should not be used in production. +# It is only for getting started and development. + +# Disable the requirement that every request to Cortex has a +# X-Scope-OrgID header. `fake` will be substituted in instead. +auth_enabled: false + +server: + http_listen_port: 9009 + + # Configure the server to allow messages up to 100MB. + grpc_server_max_recv_msg_size: 104857600 + grpc_server_max_send_msg_size: 104857600 + grpc_server_max_concurrent_streams: 1000 + +distributor: + shard_by_all_labels: true + pool: + health_check_ingesters: true + +ingester_client: + grpc_client_config: + # Configure the client to allow messages up to 100MB. + max_recv_msg_size: 104857600 + max_send_msg_size: 104857600 + use_gzip_compression: true + +ingester: + # We want our ingesters to flush chunks at the same time to optimise + # deduplication opportunities. + spread_flushes: true + chunk_age_jitter: 0 + + walconfig: + wal_enabled: true + recover_from_wal: true + wal_dir: /tmp/cortex/wal + + lifecycler: + # The address to advertise for this ingester. Will be autodiscovered by + # looking up address on eth0 or en0; can be specified if this fails. + # address: 127.0.0.1 + + # We want to start immediately and flush on shutdown. + join_after: 0 + min_ready_duration: 0s + final_sleep: 0s + num_tokens: 512 + tokens_file_path: /tmp/cortex/wal/tokens + + # Use an in memory ring store, so we don't need to launch a Consul. + ring: + kvstore: + store: inmemory + replication_factor: 1 + +# Use local storage - BoltDB for the index, and the filesystem +# for the chunks. +schema: + configs: + - from: 2019-07-29 + store: boltdb + object_store: filesystem + schema: v10 + index: + prefix: index_ + period: 1w + +storage: + boltdb: + directory: /tmp/cortex/index + + filesystem: + directory: /tmp/cortex/chunks + + delete_store: + store: boltdb + +purger: + object_store_type: filesystem + +frontend_worker: + # Configure the frontend worker in the querier to match worker count + # to max_concurrent on the queriers. + match_max_concurrent: true + +# Configure the ruler to scan the /tmp/cortex/rules directory for prometheus +# rules: https://prometheus.io/docs/prometheus/latest/configuration/recording_rules/#recording-rules +ruler: + enable_api: true + enable_sharding: false + storage: + type: local + local: + directory: /tmp/cortex/rules + diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/docker-compose.yml b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/docker-compose.yml new file mode 100644 index 0000000000..61e6f4981e --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/docker-compose.yml @@ -0,0 +1,33 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +version: "3.8" + +services: + cortex: + image: quay.io/cortexproject/cortex:v1.5.0 + command: + - -config.file=./config/cortex-config.yml + volumes: + - ./cortex-config.yml:/config/cortex-config.yml:ro + ports: + - 9009:9009 + grafana: + image: grafana/grafana:latest + ports: + - 3000:3000 + sample_app: + build: + context: ../ + dockerfile: ./examples/Dockerfile diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/requirements.txt b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/requirements.txt new file mode 100644 index 0000000000..f049aac258 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/requirements.txt @@ -0,0 +1,7 @@ +psutil +protobuf>=3.13.0 +requests>=2.25.0 +python-snappy>=0.5.4 +opentelemetry-api +opentelemetry-sdk +opentelemetry-proto diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py new file mode 100644 index 0000000000..69f7a068ea --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py @@ -0,0 +1,153 @@ +import logging +import random +import sys +import time +from logging import INFO + +import psutil + +from opentelemetry import metrics +from opentelemetry.exporter.prometheus_remote_write import ( + PrometheusRemoteWriteMetricsExporter, +) +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export.aggregate import ( + HistogramAggregator, + LastValueAggregator, + MinMaxSumCountAggregator, + SumAggregator, +) +from opentelemetry.sdk.metrics.view import View, ViewConfig + +logging.basicConfig(stream=sys.stdout, level=logging.INFO) +logger = logging.getLogger(__name__) + +metrics.set_meter_provider(MeterProvider()) +meter = metrics.get_meter(__name__) +exporter = PrometheusRemoteWriteMetricsExporter( + endpoint="http://cortex:9009/api/prom/push", + headers={"X-Scope-Org-ID": "5"}, +) +metrics.get_meter_provider().start_pipeline(meter, exporter, 1) +testing_labels = {"environment": "testing"} + + +# Callback to gather cpu usage +def get_cpu_usage_callback(observer): + for (number, percent) in enumerate(psutil.cpu_percent(percpu=True)): + labels = {"cpu_number": str(number)} + observer.observe(percent, labels) + + +# Callback to gather RAM usage +def get_ram_usage_callback(observer): + ram_percent = psutil.virtual_memory().percent + observer.observe(ram_percent, {}) + + +requests_counter = meter.create_counter( + name="requests", + description="number of requests", + unit="1", + value_type=int, +) + +request_min_max = meter.create_counter( + name="requests_min_max", + description="min max sum count of requests", + unit="1", + value_type=int, +) + +request_last_value = meter.create_counter( + name="requests_last_value", + description="last value number of requests", + unit="1", + value_type=int, +) + +requests_size = meter.create_valuerecorder( + name="requests_size", + description="size of requests", + unit="1", + value_type=int, +) + +requests_size_histogram = meter.create_valuerecorder( + name="requests_size_histogram", + description="histogram of request_size", + unit="1", + value_type=int, +) +requests_active = meter.create_updowncounter( + name="requests_active", + description="number of active requests", + unit="1", + value_type=int, +) + +meter.register_sumobserver( + callback=get_ram_usage_callback, + name="ram_usage", + description="ram usage", + unit="1", + value_type=float, +) + +meter.register_valueobserver( + callback=get_cpu_usage_callback, + name="cpu_percent", + description="per-cpu usage", + unit="1", + value_type=float, +) + + +counter_view1 = View( + requests_counter, + SumAggregator, + label_keys=["environment"], + view_config=ViewConfig.LABEL_KEYS, +) +counter_view2 = View( + request_min_max, + MinMaxSumCountAggregator, + label_keys=["os_type"], + view_config=ViewConfig.LABEL_KEYS, +) + +counter_view3 = View( + request_last_value, + LastValueAggregator, + label_keys=["environment"], + view_config=ViewConfig.UNGROUPED, +) +size_view = View( + requests_size_histogram, + HistogramAggregator, + label_keys=["environment"], + aggregator_config={"bounds": [20, 40, 60, 80, 100]}, + view_config=ViewConfig.UNGROUPED, +) +meter.register_view(counter_view1) +meter.register_view(counter_view2) +meter.register_view(counter_view3) +meter.register_view(size_view) + +# Load generator +num = random.randint(0, 1000) +while True: + # counters + requests_counter.add(num % 131 + 200, testing_labels) + request_min_max.add(num % 181 + 200, testing_labels) + request_last_value.add(num % 101 + 200, testing_labels) + + # updown counter + requests_active.add(num % 7231 + 200, testing_labels) + + # value observers + requests_size.record(num % 6101 + 100, testing_labels) + requests_size_histogram.record(num % 113, testing_labels) + logger.log(level=INFO, msg="completed metrics collection cycle") + time.sleep(1) + num += 9791 diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/generate-proto-py.sh b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/generate-proto-py.sh new file mode 100755 index 0000000000..97ffa8084c --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/generate-proto-py.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +# Used libprotoc 3.21.1 +SRC_DIR=opentelemetry/exporter/prometheus_remote_write/gen/ +DST_DIR=../src/opentelemetry/exporter/prometheus_remote_write/gen/ +protoc -I . --python_out=../src ${SRC_DIR}/gogoproto/gogo.proto ${SRC_DIR}/remote.proto ${SRC_DIR}/types.proto diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto new file mode 100644 index 0000000000..b80c85653f --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto @@ -0,0 +1,144 @@ +// Protocol Buffers for Go with Gadgets +// +// Copyright (c) 2013, The GoGo Authors. All rights reserved. +// http://github.com/gogo/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; +package gogoproto; + +import "google/protobuf/descriptor.proto"; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "GoGoProtos"; +option go_package = "github.com/gogo/protobuf/gogoproto"; + +extend google.protobuf.EnumOptions { + optional bool goproto_enum_prefix = 62001; + optional bool goproto_enum_stringer = 62021; + optional bool enum_stringer = 62022; + optional string enum_customname = 62023; + optional bool enumdecl = 62024; +} + +extend google.protobuf.EnumValueOptions { + optional string enumvalue_customname = 66001; +} + +extend google.protobuf.FileOptions { + optional bool goproto_getters_all = 63001; + optional bool goproto_enum_prefix_all = 63002; + optional bool goproto_stringer_all = 63003; + optional bool verbose_equal_all = 63004; + optional bool face_all = 63005; + optional bool gostring_all = 63006; + optional bool populate_all = 63007; + optional bool stringer_all = 63008; + optional bool onlyone_all = 63009; + + optional bool equal_all = 63013; + optional bool description_all = 63014; + optional bool testgen_all = 63015; + optional bool benchgen_all = 63016; + optional bool marshaler_all = 63017; + optional bool unmarshaler_all = 63018; + optional bool stable_marshaler_all = 63019; + + optional bool sizer_all = 63020; + + optional bool goproto_enum_stringer_all = 63021; + optional bool enum_stringer_all = 63022; + + optional bool unsafe_marshaler_all = 63023; + optional bool unsafe_unmarshaler_all = 63024; + + optional bool goproto_extensions_map_all = 63025; + optional bool goproto_unrecognized_all = 63026; + optional bool gogoproto_import = 63027; + optional bool protosizer_all = 63028; + optional bool compare_all = 63029; + optional bool typedecl_all = 63030; + optional bool enumdecl_all = 63031; + + optional bool goproto_registration = 63032; + optional bool messagename_all = 63033; + + optional bool goproto_sizecache_all = 63034; + optional bool goproto_unkeyed_all = 63035; +} + +extend google.protobuf.MessageOptions { + optional bool goproto_getters = 64001; + optional bool goproto_stringer = 64003; + optional bool verbose_equal = 64004; + optional bool face = 64005; + optional bool gostring = 64006; + optional bool populate = 64007; + optional bool stringer = 67008; + optional bool onlyone = 64009; + + optional bool equal = 64013; + optional bool description = 64014; + optional bool testgen = 64015; + optional bool benchgen = 64016; + optional bool marshaler = 64017; + optional bool unmarshaler = 64018; + optional bool stable_marshaler = 64019; + + optional bool sizer = 64020; + + optional bool unsafe_marshaler = 64023; + optional bool unsafe_unmarshaler = 64024; + + optional bool goproto_extensions_map = 64025; + optional bool goproto_unrecognized = 64026; + + optional bool protosizer = 64028; + optional bool compare = 64029; + + optional bool typedecl = 64030; + + optional bool messagename = 64033; + + optional bool goproto_sizecache = 64034; + optional bool goproto_unkeyed = 64035; +} + +extend google.protobuf.FieldOptions { + optional bool nullable = 65001; + optional bool embed = 65002; + optional string customtype = 65003; + optional string customname = 65004; + optional string jsontag = 65005; + optional string moretags = 65006; + optional string casttype = 65007; + optional string castkey = 65008; + optional string castvalue = 65009; + + optional bool stdtime = 65010; + optional bool stdduration = 65011; + optional bool wktpointer = 65012; + +} diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/remote.proto b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/remote.proto new file mode 100644 index 0000000000..51bce1ed55 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/remote.proto @@ -0,0 +1,86 @@ +// Copyright 2016 Prometheus Team +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; +package prometheus; + +option go_package = "prompb"; + +import "opentelemetry/exporter/prometheus_remote_write/gen/types.proto"; +import "opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto"; + +message WriteRequest { + repeated prometheus.TimeSeries timeseries = 1 [(gogoproto.nullable) = false]; + // Cortex uses this field to determine the source of the write request. + // We reserve it to avoid any compatibility issues. + reserved 2; + repeated prometheus.MetricMetadata metadata = 3 [(gogoproto.nullable) = false]; +} + +// ReadRequest represents a remote read request. +message ReadRequest { + repeated Query queries = 1; + + enum ResponseType { + // Server will return a single ReadResponse message with matched series that includes list of raw samples. + // It's recommended to use streamed response types instead. + // + // Response headers: + // Content-Type: "application/x-protobuf" + // Content-Encoding: "snappy" + SAMPLES = 0; + // Server will stream a delimited ChunkedReadResponse message that contains XOR encoded chunks for a single series. + // Each message is following varint size and fixed size bigendian uint32 for CRC32 Castagnoli checksum. + // + // Response headers: + // Content-Type: "application/x-streamed-protobuf; proto=prometheus.ChunkedReadResponse" + // Content-Encoding: "" + STREAMED_XOR_CHUNKS = 1; + } + + // accepted_response_types allows negotiating the content type of the response. + // + // Response types are taken from the list in the FIFO order. If no response type in `accepted_response_types` is + // implemented by server, error is returned. + // For request that do not contain `accepted_response_types` field the SAMPLES response type will be used. + repeated ResponseType accepted_response_types = 2; +} + +// ReadResponse is a response when response_type equals SAMPLES. +message ReadResponse { + // In same order as the request's queries. + repeated QueryResult results = 1; +} + +message Query { + int64 start_timestamp_ms = 1; + int64 end_timestamp_ms = 2; + repeated prometheus.LabelMatcher matchers = 3; + prometheus.ReadHints hints = 4; +} + +message QueryResult { + // Samples within a time series must be ordered by time. + repeated prometheus.TimeSeries timeseries = 1; +} + +// ChunkedReadResponse is a response when response_type equals STREAMED_XOR_CHUNKS. +// We strictly stream full series after series, optionally split by time. This means that a single frame can contain +// partition of the single series, but once a new series is started to be streamed it means that no more chunks will +// be sent for previous one. Series are returned sorted in the same way TSDB block are internally. +message ChunkedReadResponse { + repeated prometheus.ChunkedSeries chunked_series = 1; + + // query_index represents an index of the query from ReadRequest.queries these chunks relates to. + int64 query_index = 2; +} diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/types.proto b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/types.proto new file mode 100644 index 0000000000..176f807a71 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/types.proto @@ -0,0 +1,119 @@ +// Copyright 2017 Prometheus Team +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; +package prometheus; + +option go_package = "prompb"; + +import "opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto"; + +message MetricMetadata { + enum MetricType { + UNKNOWN = 0; + COUNTER = 1; + GAUGE = 2; + HISTOGRAM = 3; + GAUGEHISTOGRAM = 4; + SUMMARY = 5; + INFO = 6; + STATESET = 7; + } + + // Represents the metric type, these match the set from Prometheus. + // Refer to model/textparse/interface.go for details. + MetricType type = 1; + string metric_family_name = 2; + string help = 4; + string unit = 5; +} + +message Sample { + double value = 1; + // timestamp is in ms format, see model/timestamp/timestamp.go for + // conversion from time.Time to Prometheus timestamp. + int64 timestamp = 2; +} + +message Exemplar { + // Optional, can be empty. + repeated Label labels = 1 [(gogoproto.nullable) = false]; + double value = 2; + // timestamp is in ms format, see model/timestamp/timestamp.go for + // conversion from time.Time to Prometheus timestamp. + int64 timestamp = 3; +} + +// TimeSeries represents samples and labels for a single time series. +message TimeSeries { + // For a timeseries to be valid, and for the samples and exemplars + // to be ingested by the remote system properly, the labels field is required. + repeated Label labels = 1 [(gogoproto.nullable) = false]; + repeated Sample samples = 2 [(gogoproto.nullable) = false]; + repeated Exemplar exemplars = 3 [(gogoproto.nullable) = false]; +} + +message Label { + string name = 1; + string value = 2; +} + +message Labels { + repeated Label labels = 1 [(gogoproto.nullable) = false]; +} + +// Matcher specifies a rule, which can match or set of labels or not. +message LabelMatcher { + enum Type { + EQ = 0; + NEQ = 1; + RE = 2; + NRE = 3; + } + Type type = 1; + string name = 2; + string value = 3; +} + +message ReadHints { + int64 step_ms = 1; // Query step size in milliseconds. + string func = 2; // String representation of surrounding function or aggregation. + int64 start_ms = 3; // Start time in milliseconds. + int64 end_ms = 4; // End time in milliseconds. + repeated string grouping = 5; // List of label names used in aggregation. + bool by = 6; // Indicate whether it is without or by. + int64 range_ms = 7; // Range vector selector range in milliseconds. +} + +// Chunk represents a TSDB chunk. +// Time range [min, max] is inclusive. +message Chunk { + int64 min_time_ms = 1; + int64 max_time_ms = 2; + + // We require this to match chunkenc.Encoding. + enum Encoding { + UNKNOWN = 0; + XOR = 1; + } + Encoding type = 3; + bytes data = 4; +} + +// ChunkedSeries represents single, encoded time series. +message ChunkedSeries { + // Labels should be sorted. + repeated Label labels = 1 [(gogoproto.nullable) = false]; + // Chunks will be in start time order and may overlap. + repeated Chunk chunks = 2 [(gogoproto.nullable) = false]; +} diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg b/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg new file mode 100644 index 0000000000..88b8cc1ce6 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg @@ -0,0 +1,51 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +[metadata] +name = opentelemetry-exporter-prometheus-remote-write +description = Prometheus Remote Write Metrics Exporter for OpenTelemetry +long_description = file: README.rst +long_description_content_type = text/x-rst +author = OpenTelemetry Authors +author_email = cncf-opentelemetry-contributors@lists.cncf.io +url = https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/exporter/opentelemetry-exporter-prometheus-remote-write +platforms = any +license = Apache-2.0 +classifiers = + Development Status :: 4 - Beta + Intended Audience :: Developers + License :: OSI Approved :: Apache Software License + Programming Language :: Python + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.5 + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + +[options] +python_requires = >=3.5 +package_dir= + =src +packages=find_namespace: +install_requires = + snappy >= 2.8 + #protobuf >= 3.13.0 + protobuf == 3.20.0 + requests == 2.25.0 + opentelemetry-api == 1.12.0rc2 + opentelemetry-sdk == 1.12.0rc2 + python-snappy >= 0.5.4 + +[options.packages.find] +where = src diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/setup.py b/exporter/opentelemetry-exporter-prometheus-remote-write/setup.py new file mode 100644 index 0000000000..b2d9a5a47c --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/setup.py @@ -0,0 +1,32 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import setuptools + +BASE_DIR = os.path.dirname(__file__) +VERSION_FILENAME = os.path.join( + BASE_DIR, + "src", + "opentelemetry", + "exporter", + "prometheus_remote_write", + "version.py", +) +PACKAGE_INFO = {} +with open(VERSION_FILENAME) as f: + exec(f.read(), PACKAGE_INFO) + +setuptools.setup(version=PACKAGE_INFO["__version__"]) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py new file mode 100644 index 0000000000..0e808283c3 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py @@ -0,0 +1,384 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import re +from typing import Dict, Sequence + +import requests +import snappy + +from opentelemetry.exporter.prometheus_remote_write.gen.remote_pb2 import ( + WriteRequest, +) +from opentelemetry.exporter.prometheus_remote_write.gen.types_pb2 import ( + Label, + Sample, + TimeSeries, +) +from opentelemetry.sdk.metrics.export import ( + MetricExporter, + MetricExportResult, + AggregationTemporality, + Gauge, + Sum, + Histogram, +) +#from opentelemetry.sdk.metrics.export.aggregate import ( +# HistogramAggregator, +# LastValueAggregator, +# MinMaxSumCountAggregator, +# SumAggregator, +# ValueObserverAggregator, +#) + +logger = logging.getLogger(__name__) + + +class PrometheusRemoteWriteMetricsExporter(MetricExporter): + """ + Prometheus remote write metric exporter for OpenTelemetry. + + Args: + endpoint: url where data will be sent (Required) + basic_auth: username and password for authentication (Optional) + headers: additional headers for remote write request (Optional) + timeout: timeout for remote write requests in seconds, defaults to 30 (Optional) + proxies: dict mapping request proxy protocols to proxy urls (Optional) + tls_config: configuration for remote write TLS settings (Optional) + """ + + def __init__( + self, + endpoint: str, + basic_auth: Dict = None, + headers: Dict = None, + timeout: int = 30, + tls_config: Dict = None, + proxies: Dict = None, + ): + self.endpoint = endpoint + self.basic_auth = basic_auth + self.headers = headers + self.timeout = timeout + self.tls_config = tls_config + self.proxies = proxies + + self.converter_map = { + Sum: self._convert_from_sum, + Histogram: self._convert_from_histogram, + Gauge: self._convert_from_gauge, + } + + + @property + def endpoint(self): + return self._endpoint + + @endpoint.setter + def endpoint(self, endpoint: str): + if endpoint == "": + raise ValueError("endpoint required") + self._endpoint = endpoint + + @property + def basic_auth(self): + return self._basic_auth + + @basic_auth.setter + def basic_auth(self, basic_auth: Dict): + if basic_auth: + if "username" not in basic_auth: + raise ValueError("username required in basic_auth") + if "password_file" in basic_auth: + if "password" in basic_auth: + raise ValueError( + "basic_auth cannot contain password and password_file" + ) + with open(basic_auth["password_file"]) as file: + basic_auth["password"] = file.readline().strip() + elif "password" not in basic_auth: + raise ValueError("password required in basic_auth") + self._basic_auth = basic_auth + + @property + def timeout(self): + return self._timeout + + @timeout.setter + def timeout(self, timeout: int): + if timeout <= 0: + raise ValueError("timeout must be greater than 0") + self._timeout = timeout + + @property + def tls_config(self): + return self._tls_config + + @tls_config.setter + def tls_config(self, tls_config: Dict): + if tls_config: + new_config = {} + if "ca_file" in tls_config: + new_config["ca_file"] = tls_config["ca_file"] + if "cert_file" in tls_config and "key_file" in tls_config: + new_config["cert_file"] = tls_config["cert_file"] + new_config["key_file"] = tls_config["key_file"] + elif "cert_file" in tls_config or "key_file" in tls_config: + raise ValueError( + "tls_config requires both cert_file and key_file" + ) + if "insecure_skip_verify" in tls_config: + new_config["insecure_skip_verify"] = tls_config[ + "insecure_skip_verify" + ] + self._tls_config = tls_config + + @property + def proxies(self): + return self._proxies + + @proxies.setter + def proxies(self, proxies: Dict): + self._proxies = proxies + + @property + def headers(self): + return self._headers + + @headers.setter + def headers(self, headers: Dict): + self._headers = headers + + def export( + self, export_records: Sequence[ExportRecord] + ) -> MetricsExportResult: + if not export_records: + return MetricsExportResult.SUCCESS + timeseries = self._convert_to_timeseries(export_records) + if not timeseries: + logger.error( + "All records contain unsupported aggregators, export aborted" + ) + return MetricsExportResult.FAILURE + message = self._build_message(timeseries) + headers = self._build_headers() + return self._send_message(message, headers) + + def shutdown(self) -> None: + pass + + def _translate_data(self, data: MetricsData): + rw_timeseries = [] + + def _convert_to_timeseries( + self, export_records: Sequence[ExportRecord] + ) -> Sequence[TimeSeries]: + timeseries = [] + for export_record in export_records: + aggregator_type = type(export_record.aggregator) + converter = self.converter_map.get(aggregator_type) + if converter: + timeseries.extend(converter(export_record)) + else: + logger.warning( + "%s aggregator is not supported, record dropped", + aggregator_type, + ) + return timeseries + + def _convert_from_sum( + self, sum_record: ExportRecord + ) -> Sequence[TimeSeries]: + return [ + self._create_timeseries( + sum_record, + sum_record.instrument.name + "_sum", + sum_record.aggregator.checkpoint, + ) + ] + + def _convert_from_gauge(self, gauge_record): + raise NotImplementedError("Do this") + def _convert_from_min_max_sum_count( + self, min_max_sum_count_record: ExportRecord + ) -> Sequence[TimeSeries]: + timeseries = [] + for agg_type in ["min", "max", "sum", "count"]: + name = min_max_sum_count_record.instrument.name + "_" + agg_type + value = getattr( + min_max_sum_count_record.aggregator.checkpoint, agg_type + ) + timeseries.append( + self._create_timeseries(min_max_sum_count_record, name, value) + ) + return timeseries + + def _convert_from_histogram( + self, histogram_record: ExportRecord + ) -> Sequence[TimeSeries]: + timeseries = [] + for bound in histogram_record.aggregator.checkpoint.keys(): + bound_str = "+Inf" if bound == float("inf") else str(bound) + value = histogram_record.aggregator.checkpoint[bound] + timeseries.append( + self._create_timeseries( + histogram_record, + histogram_record.instrument.name + "_histogram", + value, + extra_label=("le", bound_str), + ) + ) + return timeseries + + def _convert_from_last_value( + self, last_value_record: ExportRecord + ) -> Sequence[TimeSeries]: + return [ + self._create_timeseries( + last_value_record, + last_value_record.instrument.name + "_last", + last_value_record.aggregator.checkpoint, + ) + ] + + def _convert_from_value_observer( + self, value_observer_record: ExportRecord + ) -> Sequence[TimeSeries]: + timeseries = [] + for agg_type in ["min", "max", "sum", "count", "last"]: + timeseries.append( + self._create_timeseries( + value_observer_record, + value_observer_record.instrument.name + "_" + agg_type, + getattr( + value_observer_record.aggregator.checkpoint, agg_type + ), + ) + ) + return timeseries + + # TODO: Implement convert from quantile once supported by SDK for Prometheus Summaries + def _convert_from_quantile( + self, summary_record: ExportRecord + ) -> Sequence[TimeSeries]: + raise NotImplementedError() + + # pylint: disable=no-member,no-self-use + def _create_timeseries( + self, + export_record: ExportRecord, + name: str, + value: float, + extra_label: (str, str) = None, + ) -> TimeSeries: + timeseries = TimeSeries() + seen = set() + + def add_label(label_name: str, label_value: str): + # Label name must contain only alphanumeric characters and underscores + label_name = re.sub("[^\\w_]", "_", label_name) + if label_name not in seen: + label = Label() + label.name = label_name + label.value = label_value + timeseries.labels.append(label) + seen.add(label_name) + else: + logger.warning( + "Duplicate label with name %s and value %s", + label_name, + label_value, + ) + + # The __name__ label is required by PromQL as its value appears as the metric_name + add_label("__name__", name) + if extra_label: + add_label(extra_label[0], extra_label[1]) + if export_record.resource.attributes: + for ( + label_name, + label_value, + ) in export_record.resource.attributes.items(): + add_label(label_name, str(label_value)) + if export_record.labels: + for [label_name, label_value] in export_record.labels: + add_label(label_name, label_value) + + sample = Sample() + sample.timestamp = int( + export_record.aggregator.last_update_timestamp / 1000000 + ) + sample.value = value + timeseries.samples.append(sample) + return timeseries + + # pylint: disable=no-member,no-self-use + def _build_message(self, timeseries: Sequence[TimeSeries]) -> bytes: + write_request = WriteRequest() + write_request.timeseries.extend(timeseries) + serialized_message = write_request.SerializeToString() + return snappy.compress(serialized_message) + + def _build_headers(self) -> Dict: + headers = { + "Content-Encoding": "snappy", + "Content-Type": "application/x-protobuf", + "X-Prometheus-Remote-Write-Version": "0.1.0", + } + if self.headers: + for header_name, header_value in self.headers.items(): + headers[header_name] = header_value + return headers + + def _send_message( + self, message: bytes, headers: Dict + ) -> MetricsExportResult: + auth = None + if self.basic_auth: + auth = (self.basic_auth["username"], self.basic_auth["password"]) + + cert = None + verify = True + if self.tls_config: + if "ca_file" in self.tls_config: + verify = self.tls_config["ca_file"] + elif "insecure_skip_verify" in self.tls_config: + verify = self.tls_config["insecure_skip_verify"] + + if ( + "cert_file" in self.tls_config + and "key_file" in self.tls_config + ): + cert = ( + self.tls_config["cert_file"], + self.tls_config["key_file"], + ) + try: + response = requests.post( + self.endpoint, + data=message, + headers=headers, + auth=auth, + timeout=self.timeout, + proxies=self.proxies, + cert=cert, + verify=verify, + ) + if not response.ok: + response.raise_for_status() + except requests.exceptions.RequestException as err: + logger.error("Export POST request failed with reason: %s", err) + return MetricsExportResult.FAILURE + return MetricsExportResult.SUCCESS diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py new file mode 100644 index 0000000000..35e48b214a --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\x12\tgogoproto\x1a google/protobuf/descriptor.proto:;\n\x13goproto_enum_prefix\x12\x1c.google.protobuf.EnumOptions\x18\xb1\xe4\x03 \x01(\x08:=\n\x15goproto_enum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc5\xe4\x03 \x01(\x08:5\n\renum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc6\xe4\x03 \x01(\x08:7\n\x0f\x65num_customname\x12\x1c.google.protobuf.EnumOptions\x18\xc7\xe4\x03 \x01(\t:0\n\x08\x65numdecl\x12\x1c.google.protobuf.EnumOptions\x18\xc8\xe4\x03 \x01(\x08:A\n\x14\x65numvalue_customname\x12!.google.protobuf.EnumValueOptions\x18\xd1\x83\x04 \x01(\t:;\n\x13goproto_getters_all\x12\x1c.google.protobuf.FileOptions\x18\x99\xec\x03 \x01(\x08:?\n\x17goproto_enum_prefix_all\x12\x1c.google.protobuf.FileOptions\x18\x9a\xec\x03 \x01(\x08:<\n\x14goproto_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\x9b\xec\x03 \x01(\x08:9\n\x11verbose_equal_all\x12\x1c.google.protobuf.FileOptions\x18\x9c\xec\x03 \x01(\x08:0\n\x08\x66\x61\x63\x65_all\x12\x1c.google.protobuf.FileOptions\x18\x9d\xec\x03 \x01(\x08:4\n\x0cgostring_all\x12\x1c.google.protobuf.FileOptions\x18\x9e\xec\x03 \x01(\x08:4\n\x0cpopulate_all\x12\x1c.google.protobuf.FileOptions\x18\x9f\xec\x03 \x01(\x08:4\n\x0cstringer_all\x12\x1c.google.protobuf.FileOptions\x18\xa0\xec\x03 \x01(\x08:3\n\x0bonlyone_all\x12\x1c.google.protobuf.FileOptions\x18\xa1\xec\x03 \x01(\x08:1\n\tequal_all\x12\x1c.google.protobuf.FileOptions\x18\xa5\xec\x03 \x01(\x08:7\n\x0f\x64\x65scription_all\x12\x1c.google.protobuf.FileOptions\x18\xa6\xec\x03 \x01(\x08:3\n\x0btestgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa7\xec\x03 \x01(\x08:4\n\x0c\x62\x65nchgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa8\xec\x03 \x01(\x08:5\n\rmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xa9\xec\x03 \x01(\x08:7\n\x0funmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaa\xec\x03 \x01(\x08:<\n\x14stable_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xab\xec\x03 \x01(\x08:1\n\tsizer_all\x12\x1c.google.protobuf.FileOptions\x18\xac\xec\x03 \x01(\x08:A\n\x19goproto_enum_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xad\xec\x03 \x01(\x08:9\n\x11\x65num_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xae\xec\x03 \x01(\x08:<\n\x14unsafe_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaf\xec\x03 \x01(\x08:>\n\x16unsafe_unmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xb0\xec\x03 \x01(\x08:B\n\x1agoproto_extensions_map_all\x12\x1c.google.protobuf.FileOptions\x18\xb1\xec\x03 \x01(\x08:@\n\x18goproto_unrecognized_all\x12\x1c.google.protobuf.FileOptions\x18\xb2\xec\x03 \x01(\x08:8\n\x10gogoproto_import\x12\x1c.google.protobuf.FileOptions\x18\xb3\xec\x03 \x01(\x08:6\n\x0eprotosizer_all\x12\x1c.google.protobuf.FileOptions\x18\xb4\xec\x03 \x01(\x08:3\n\x0b\x63ompare_all\x12\x1c.google.protobuf.FileOptions\x18\xb5\xec\x03 \x01(\x08:4\n\x0ctypedecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb6\xec\x03 \x01(\x08:4\n\x0c\x65numdecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb7\xec\x03 \x01(\x08:<\n\x14goproto_registration\x12\x1c.google.protobuf.FileOptions\x18\xb8\xec\x03 \x01(\x08:7\n\x0fmessagename_all\x12\x1c.google.protobuf.FileOptions\x18\xb9\xec\x03 \x01(\x08:=\n\x15goproto_sizecache_all\x12\x1c.google.protobuf.FileOptions\x18\xba\xec\x03 \x01(\x08:;\n\x13goproto_unkeyed_all\x12\x1c.google.protobuf.FileOptions\x18\xbb\xec\x03 \x01(\x08::\n\x0fgoproto_getters\x12\x1f.google.protobuf.MessageOptions\x18\x81\xf4\x03 \x01(\x08:;\n\x10goproto_stringer\x12\x1f.google.protobuf.MessageOptions\x18\x83\xf4\x03 \x01(\x08:8\n\rverbose_equal\x12\x1f.google.protobuf.MessageOptions\x18\x84\xf4\x03 \x01(\x08:/\n\x04\x66\x61\x63\x65\x12\x1f.google.protobuf.MessageOptions\x18\x85\xf4\x03 \x01(\x08:3\n\x08gostring\x12\x1f.google.protobuf.MessageOptions\x18\x86\xf4\x03 \x01(\x08:3\n\x08populate\x12\x1f.google.protobuf.MessageOptions\x18\x87\xf4\x03 \x01(\x08:3\n\x08stringer\x12\x1f.google.protobuf.MessageOptions\x18\xc0\x8b\x04 \x01(\x08:2\n\x07onlyone\x12\x1f.google.protobuf.MessageOptions\x18\x89\xf4\x03 \x01(\x08:0\n\x05\x65qual\x12\x1f.google.protobuf.MessageOptions\x18\x8d\xf4\x03 \x01(\x08:6\n\x0b\x64\x65scription\x12\x1f.google.protobuf.MessageOptions\x18\x8e\xf4\x03 \x01(\x08:2\n\x07testgen\x12\x1f.google.protobuf.MessageOptions\x18\x8f\xf4\x03 \x01(\x08:3\n\x08\x62\x65nchgen\x12\x1f.google.protobuf.MessageOptions\x18\x90\xf4\x03 \x01(\x08:4\n\tmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x91\xf4\x03 \x01(\x08:6\n\x0bunmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x92\xf4\x03 \x01(\x08:;\n\x10stable_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x93\xf4\x03 \x01(\x08:0\n\x05sizer\x12\x1f.google.protobuf.MessageOptions\x18\x94\xf4\x03 \x01(\x08:;\n\x10unsafe_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x97\xf4\x03 \x01(\x08:=\n\x12unsafe_unmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x98\xf4\x03 \x01(\x08:A\n\x16goproto_extensions_map\x12\x1f.google.protobuf.MessageOptions\x18\x99\xf4\x03 \x01(\x08:?\n\x14goproto_unrecognized\x12\x1f.google.protobuf.MessageOptions\x18\x9a\xf4\x03 \x01(\x08:5\n\nprotosizer\x12\x1f.google.protobuf.MessageOptions\x18\x9c\xf4\x03 \x01(\x08:2\n\x07\x63ompare\x12\x1f.google.protobuf.MessageOptions\x18\x9d\xf4\x03 \x01(\x08:3\n\x08typedecl\x12\x1f.google.protobuf.MessageOptions\x18\x9e\xf4\x03 \x01(\x08:6\n\x0bmessagename\x12\x1f.google.protobuf.MessageOptions\x18\xa1\xf4\x03 \x01(\x08:<\n\x11goproto_sizecache\x12\x1f.google.protobuf.MessageOptions\x18\xa2\xf4\x03 \x01(\x08::\n\x0fgoproto_unkeyed\x12\x1f.google.protobuf.MessageOptions\x18\xa3\xf4\x03 \x01(\x08:1\n\x08nullable\x12\x1d.google.protobuf.FieldOptions\x18\xe9\xfb\x03 \x01(\x08:.\n\x05\x65mbed\x12\x1d.google.protobuf.FieldOptions\x18\xea\xfb\x03 \x01(\x08:3\n\ncustomtype\x12\x1d.google.protobuf.FieldOptions\x18\xeb\xfb\x03 \x01(\t:3\n\ncustomname\x12\x1d.google.protobuf.FieldOptions\x18\xec\xfb\x03 \x01(\t:0\n\x07jsontag\x12\x1d.google.protobuf.FieldOptions\x18\xed\xfb\x03 \x01(\t:1\n\x08moretags\x12\x1d.google.protobuf.FieldOptions\x18\xee\xfb\x03 \x01(\t:1\n\x08\x63\x61sttype\x12\x1d.google.protobuf.FieldOptions\x18\xef\xfb\x03 \x01(\t:0\n\x07\x63\x61stkey\x12\x1d.google.protobuf.FieldOptions\x18\xf0\xfb\x03 \x01(\t:2\n\tcastvalue\x12\x1d.google.protobuf.FieldOptions\x18\xf1\xfb\x03 \x01(\t:0\n\x07stdtime\x12\x1d.google.protobuf.FieldOptions\x18\xf2\xfb\x03 \x01(\x08:4\n\x0bstdduration\x12\x1d.google.protobuf.FieldOptions\x18\xf3\xfb\x03 \x01(\x08:3\n\nwktpointer\x12\x1d.google.protobuf.FieldOptions\x18\xf4\xfb\x03 \x01(\x08\x42\x45\n\x13\x63om.google.protobufB\nGoGoProtosZ\"github.com/gogo/protobuf/gogoproto') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.exporter.prometheus_remote_write.gen.gogoproto.gogo_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(goproto_enum_prefix) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(goproto_enum_stringer) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enum_stringer) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enum_customname) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enumdecl) + google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension(enumvalue_customname) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_getters_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_enum_prefix_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_stringer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(verbose_equal_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(face_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(gostring_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(populate_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(stringer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(onlyone_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(equal_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(description_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(testgen_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(benchgen_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(marshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unmarshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(stable_marshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(sizer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_enum_stringer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(enum_stringer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unsafe_marshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unsafe_unmarshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_extensions_map_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_unrecognized_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(gogoproto_import) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(protosizer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(compare_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(typedecl_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(enumdecl_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_registration) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(messagename_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_sizecache_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_unkeyed_all) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_getters) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_stringer) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(verbose_equal) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(face) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(gostring) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(populate) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(stringer) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(onlyone) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(equal) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(description) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(testgen) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(benchgen) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(marshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unmarshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(stable_marshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sizer) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unsafe_marshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unsafe_unmarshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_extensions_map) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_unrecognized) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(protosizer) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(compare) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(typedecl) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(messagename) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_sizecache) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_unkeyed) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(nullable) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(embed) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(customtype) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(customname) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(jsontag) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(moretags) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(casttype) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(castkey) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(castvalue) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(stdtime) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(stdduration) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(wktpointer) + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nGoGoProtosZ\"github.com/gogo/protobuf/gogoproto' +# @@protoc_insertion_point(module_scope) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py new file mode 100644 index 0000000000..a274dbf204 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/exporter/prometheus_remote_write/gen/remote.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from opentelemetry.exporter.prometheus_remote_write.gen import types_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_types__pb2 +from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n?opentelemetry/exporter/prometheus_remote_write/gen/remote.proto\x12\nprometheus\x1a>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\"z\n\x0cWriteRequest\x12\x30\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeriesB\x04\xc8\xde\x1f\x00\x12\x32\n\x08metadata\x18\x03 \x03(\x0b\x32\x1a.prometheus.MetricMetadataB\x04\xc8\xde\x1f\x00J\x04\x08\x02\x10\x03\"\xae\x01\n\x0bReadRequest\x12\"\n\x07queries\x18\x01 \x03(\x0b\x32\x11.prometheus.Query\x12\x45\n\x17\x61\x63\x63\x65pted_response_types\x18\x02 \x03(\x0e\x32$.prometheus.ReadRequest.ResponseType\"4\n\x0cResponseType\x12\x0b\n\x07SAMPLES\x10\x00\x12\x17\n\x13STREAMED_XOR_CHUNKS\x10\x01\"8\n\x0cReadResponse\x12(\n\x07results\x18\x01 \x03(\x0b\x32\x17.prometheus.QueryResult\"\x8f\x01\n\x05Query\x12\x1a\n\x12start_timestamp_ms\x18\x01 \x01(\x03\x12\x18\n\x10\x65nd_timestamp_ms\x18\x02 \x01(\x03\x12*\n\x08matchers\x18\x03 \x03(\x0b\x32\x18.prometheus.LabelMatcher\x12$\n\x05hints\x18\x04 \x01(\x0b\x32\x15.prometheus.ReadHints\"9\n\x0bQueryResult\x12*\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeries\"]\n\x13\x43hunkedReadResponse\x12\x31\n\x0e\x63hunked_series\x18\x01 \x03(\x0b\x32\x19.prometheus.ChunkedSeries\x12\x13\n\x0bquery_index\x18\x02 \x01(\x03\x42\x08Z\x06prompbb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.exporter.prometheus_remote_write.gen.remote_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'Z\006prompb' + _WRITEREQUEST.fields_by_name['timeseries']._options = None + _WRITEREQUEST.fields_by_name['timeseries']._serialized_options = b'\310\336\037\000' + _WRITEREQUEST.fields_by_name['metadata']._options = None + _WRITEREQUEST.fields_by_name['metadata']._serialized_options = b'\310\336\037\000' + _WRITEREQUEST._serialized_start=216 + _WRITEREQUEST._serialized_end=338 + _READREQUEST._serialized_start=341 + _READREQUEST._serialized_end=515 + _READREQUEST_RESPONSETYPE._serialized_start=463 + _READREQUEST_RESPONSETYPE._serialized_end=515 + _READRESPONSE._serialized_start=517 + _READRESPONSE._serialized_end=573 + _QUERY._serialized_start=576 + _QUERY._serialized_end=719 + _QUERYRESULT._serialized_start=721 + _QUERYRESULT._serialized_end=778 + _CHUNKEDREADRESPONSE._serialized_start=780 + _CHUNKEDREADRESPONSE._serialized_end=873 +# @@protoc_insertion_point(module_scope) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py new file mode 100644 index 0000000000..d519e03423 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/exporter/prometheus_remote_write/gen/types.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x12\nprometheus\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\"\xf8\x01\n\x0eMetricMetadata\x12\x33\n\x04type\x18\x01 \x01(\x0e\x32%.prometheus.MetricMetadata.MetricType\x12\x1a\n\x12metric_family_name\x18\x02 \x01(\t\x12\x0c\n\x04help\x18\x04 \x01(\t\x12\x0c\n\x04unit\x18\x05 \x01(\t\"y\n\nMetricType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\t\n\x05GAUGE\x10\x02\x12\r\n\tHISTOGRAM\x10\x03\x12\x12\n\x0eGAUGEHISTOGRAM\x10\x04\x12\x0b\n\x07SUMMARY\x10\x05\x12\x08\n\x04INFO\x10\x06\x12\x0c\n\x08STATESET\x10\x07\"*\n\x06Sample\x12\r\n\x05value\x18\x01 \x01(\x01\x12\x11\n\ttimestamp\x18\x02 \x01(\x03\"U\n\x08\x45xemplar\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\r\n\x05value\x18\x02 \x01(\x01\x12\x11\n\ttimestamp\x18\x03 \x01(\x03\"\x8f\x01\n\nTimeSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12)\n\x07samples\x18\x02 \x03(\x0b\x32\x12.prometheus.SampleB\x04\xc8\xde\x1f\x00\x12-\n\texemplars\x18\x03 \x03(\x0b\x32\x14.prometheus.ExemplarB\x04\xc8\xde\x1f\x00\"$\n\x05Label\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"1\n\x06Labels\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\"\x82\x01\n\x0cLabelMatcher\x12+\n\x04type\x18\x01 \x01(\x0e\x32\x1d.prometheus.LabelMatcher.Type\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\"(\n\x04Type\x12\x06\n\x02\x45Q\x10\x00\x12\x07\n\x03NEQ\x10\x01\x12\x06\n\x02RE\x10\x02\x12\x07\n\x03NRE\x10\x03\"|\n\tReadHints\x12\x0f\n\x07step_ms\x18\x01 \x01(\x03\x12\x0c\n\x04\x66unc\x18\x02 \x01(\t\x12\x10\n\x08start_ms\x18\x03 \x01(\x03\x12\x0e\n\x06\x65nd_ms\x18\x04 \x01(\x03\x12\x10\n\x08grouping\x18\x05 \x03(\t\x12\n\n\x02\x62y\x18\x06 \x01(\x08\x12\x10\n\x08range_ms\x18\x07 \x01(\x03\"\x8b\x01\n\x05\x43hunk\x12\x13\n\x0bmin_time_ms\x18\x01 \x01(\x03\x12\x13\n\x0bmax_time_ms\x18\x02 \x01(\x03\x12(\n\x04type\x18\x03 \x01(\x0e\x32\x1a.prometheus.Chunk.Encoding\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\" \n\x08\x45ncoding\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03XOR\x10\x01\"a\n\rChunkedSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\'\n\x06\x63hunks\x18\x02 \x03(\x0b\x32\x11.prometheus.ChunkB\x04\xc8\xde\x1f\x00\x42\x08Z\x06prompbb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.exporter.prometheus_remote_write.gen.types_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'Z\006prompb' + _EXEMPLAR.fields_by_name['labels']._options = None + _EXEMPLAR.fields_by_name['labels']._serialized_options = b'\310\336\037\000' + _TIMESERIES.fields_by_name['labels']._options = None + _TIMESERIES.fields_by_name['labels']._serialized_options = b'\310\336\037\000' + _TIMESERIES.fields_by_name['samples']._options = None + _TIMESERIES.fields_by_name['samples']._serialized_options = b'\310\336\037\000' + _TIMESERIES.fields_by_name['exemplars']._options = None + _TIMESERIES.fields_by_name['exemplars']._serialized_options = b'\310\336\037\000' + _LABELS.fields_by_name['labels']._options = None + _LABELS.fields_by_name['labels']._serialized_options = b'\310\336\037\000' + _CHUNKEDSERIES.fields_by_name['labels']._options = None + _CHUNKEDSERIES.fields_by_name['labels']._serialized_options = b'\310\336\037\000' + _CHUNKEDSERIES.fields_by_name['chunks']._options = None + _CHUNKEDSERIES.fields_by_name['chunks']._serialized_options = b'\310\336\037\000' + _METRICMETADATA._serialized_start=152 + _METRICMETADATA._serialized_end=400 + _METRICMETADATA_METRICTYPE._serialized_start=279 + _METRICMETADATA_METRICTYPE._serialized_end=400 + _SAMPLE._serialized_start=402 + _SAMPLE._serialized_end=444 + _EXEMPLAR._serialized_start=446 + _EXEMPLAR._serialized_end=531 + _TIMESERIES._serialized_start=534 + _TIMESERIES._serialized_end=677 + _LABEL._serialized_start=679 + _LABEL._serialized_end=715 + _LABELS._serialized_start=717 + _LABELS._serialized_end=766 + _LABELMATCHER._serialized_start=769 + _LABELMATCHER._serialized_end=899 + _LABELMATCHER_TYPE._serialized_start=859 + _LABELMATCHER_TYPE._serialized_end=899 + _READHINTS._serialized_start=901 + _READHINTS._serialized_end=1025 + _CHUNK._serialized_start=1028 + _CHUNK._serialized_end=1167 + _CHUNK_ENCODING._serialized_start=1135 + _CHUNK_ENCODING._serialized_end=1167 + _CHUNKEDSERIES._serialized_start=1169 + _CHUNKEDSERIES._serialized_end=1266 +# @@protoc_insertion_point(module_scope) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/version.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/version.py new file mode 100644 index 0000000000..ebb75f6c11 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/version.py @@ -0,0 +1,15 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = "0.18.dev0" diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/__init__.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/__init__.py new file mode 100644 index 0000000000..b0a6f42841 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/__init__.py @@ -0,0 +1,13 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py new file mode 100644 index 0000000000..f76596d807 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py @@ -0,0 +1,414 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +from unittest.mock import patch + +from opentelemetry.exporter.prometheus_remote_write import ( + PrometheusRemoteWriteMetricsExporter, +) +from opentelemetry.exporter.prometheus_remote_write.gen.types_pb2 import ( + Label, + TimeSeries, +) +from opentelemetry.sdk.metrics import Counter +from opentelemetry.sdk.metrics.export import ExportRecord, MetricsExportResult +from opentelemetry.sdk.metrics.export.aggregate import ( + HistogramAggregator, + LastValueAggregator, + MinMaxSumCountAggregator, + SumAggregator, + ValueObserverAggregator, +) +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.util import get_dict_as_key + + +class TestValidation(unittest.TestCase): + # Test cases to ensure exporter parameter validation works as intended + def test_valid_standard_param(self): + exporter = PrometheusRemoteWriteMetricsExporter( + endpoint="/prom/test_endpoint", + ) + self.assertEqual(exporter.endpoint, "/prom/test_endpoint") + + def test_valid_basic_auth_param(self): + exporter = PrometheusRemoteWriteMetricsExporter( + endpoint="/prom/test_endpoint", + basic_auth={ + "username": "test_username", + "password": "test_password", + }, + ) + self.assertEqual(exporter.basic_auth["username"], "test_username") + self.assertEqual(exporter.basic_auth["password"], "test_password") + + def test_invalid_no_endpoint_param(self): + with self.assertRaises(ValueError): + PrometheusRemoteWriteMetricsExporter("") + + def test_invalid_no_username_param(self): + with self.assertRaises(ValueError): + PrometheusRemoteWriteMetricsExporter( + endpoint="/prom/test_endpoint", + basic_auth={"password": "test_password"}, + ) + + def test_invalid_no_password_param(self): + with self.assertRaises(ValueError): + PrometheusRemoteWriteMetricsExporter( + endpoint="/prom/test_endpoint", + basic_auth={"username": "test_username"}, + ) + + def test_invalid_conflicting_passwords_param(self): + with self.assertRaises(ValueError): + PrometheusRemoteWriteMetricsExporter( + endpoint="/prom/test_endpoint", + basic_auth={ + "username": "test_username", + "password": "test_password", + "password_file": "test_file", + }, + ) + + def test_invalid_timeout_param(self): + with self.assertRaises(ValueError): + PrometheusRemoteWriteMetricsExporter( + endpoint="/prom/test_endpoint", timeout=0 + ) + + def test_valid_tls_config_param(self): + tls_config = { + "ca_file": "test_ca_file", + "cert_file": "test_cert_file", + "key_file": "test_key_file", + "insecure_skip_verify": True, + } + exporter = PrometheusRemoteWriteMetricsExporter( + endpoint="/prom/test_endpoint", tls_config=tls_config + ) + self.assertEqual(exporter.tls_config["ca_file"], tls_config["ca_file"]) + self.assertEqual( + exporter.tls_config["cert_file"], tls_config["cert_file"] + ) + self.assertEqual( + exporter.tls_config["key_file"], tls_config["key_file"] + ) + self.assertEqual( + exporter.tls_config["insecure_skip_verify"], + tls_config["insecure_skip_verify"], + ) + + # if cert_file is provided, then key_file must also be provided + def test_invalid_tls_config_cert_only_param(self): + tls_config = {"cert_file": "value"} + with self.assertRaises(ValueError): + PrometheusRemoteWriteMetricsExporter( + endpoint="/prom/test_endpoint", tls_config=tls_config + ) + + # if cert_file is provided, then key_file must also be provided + def test_invalid_tls_config_key_only_param(self): + tls_config = {"cert_file": "value"} + with self.assertRaises(ValueError): + PrometheusRemoteWriteMetricsExporter( + endpoint="/prom/test_endpoint", tls_config=tls_config + ) + + +class TestConversion(unittest.TestCase): + # Initializes test data that is reused across tests + def setUp(self): + self.exporter = PrometheusRemoteWriteMetricsExporter( + endpoint="/prom/test_endpoint" + ) + + # Ensures conversion to timeseries function works with valid aggregation types + def test_valid_convert_to_timeseries(self): + test_records = [ + ExportRecord( + Counter("testname", "testdesc", "testunit", int, None), + None, + SumAggregator(), + Resource({}), + ), + ExportRecord( + Counter("testname", "testdesc", "testunit", int, None), + None, + MinMaxSumCountAggregator(), + Resource({}), + ), + ExportRecord( + Counter("testname", "testdesc", "testunit", int, None), + None, + HistogramAggregator(), + Resource({}), + ), + ExportRecord( + Counter("testname", "testdesc", "testunit", int, None), + None, + LastValueAggregator(), + Resource({}), + ), + ExportRecord( + Counter("testname", "testdesc", "testunit", int, None), + None, + ValueObserverAggregator(), + Resource({}), + ), + ] + for record in test_records: + record.aggregator.update(5) + record.aggregator.take_checkpoint() + data = self.exporter._convert_to_timeseries(test_records) + self.assertIsInstance(data, list) + self.assertEqual(len(data), 13) + for timeseries in data: + self.assertIsInstance(timeseries, TimeSeries) + + # Ensures conversion to timeseries fails for unsupported aggregation types + def test_invalid_convert_to_timeseries(self): + data = self.exporter._convert_to_timeseries( + [ExportRecord(None, None, None, Resource({}))] + ) + self.assertIsInstance(data, list) + self.assertEqual(len(data), 0) + + # Ensures sum aggregator is correctly converted to timeseries + def test_convert_from_sum(self): + sum_record = ExportRecord( + Counter("testname", "testdesc", "testunit", int, None), + None, + SumAggregator(), + Resource({}), + ) + sum_record.aggregator.update(3) + sum_record.aggregator.update(2) + sum_record.aggregator.take_checkpoint() + + expected_timeseries = self.exporter._create_timeseries( + sum_record, "testname_sum", 5.0 + ) + timeseries = self.exporter._convert_from_sum(sum_record) + self.assertEqual(timeseries[0], expected_timeseries) + + # Ensures sum min_max_count aggregator is correctly converted to timeseries + def test_convert_from_min_max_sum_count(self): + min_max_sum_count_record = ExportRecord( + Counter("testname", "testdesc", "testunit", int, None), + None, + MinMaxSumCountAggregator(), + Resource({}), + ) + min_max_sum_count_record.aggregator.update(5) + min_max_sum_count_record.aggregator.update(1) + min_max_sum_count_record.aggregator.take_checkpoint() + + expected_min_timeseries = self.exporter._create_timeseries( + min_max_sum_count_record, "testname_min", 1.0 + ) + expected_max_timeseries = self.exporter._create_timeseries( + min_max_sum_count_record, "testname_max", 5.0 + ) + expected_sum_timeseries = self.exporter._create_timeseries( + min_max_sum_count_record, "testname_sum", 6.0 + ) + expected_count_timeseries = self.exporter._create_timeseries( + min_max_sum_count_record, "testname_count", 2.0 + ) + + timeseries = self.exporter._convert_from_min_max_sum_count( + min_max_sum_count_record + ) + self.assertEqual(timeseries[0], expected_min_timeseries) + self.assertEqual(timeseries[1], expected_max_timeseries) + self.assertEqual(timeseries[2], expected_sum_timeseries) + self.assertEqual(timeseries[3], expected_count_timeseries) + + # Ensures histogram aggregator is correctly converted to timeseries + def test_convert_from_histogram(self): + histogram_record = ExportRecord( + Counter("testname", "testdesc", "testunit", int, None), + None, + HistogramAggregator(), + Resource({}), + ) + histogram_record.aggregator.update(5) + histogram_record.aggregator.update(2) + histogram_record.aggregator.update(-1) + histogram_record.aggregator.take_checkpoint() + + expected_le_0_timeseries = self.exporter._create_timeseries( + histogram_record, "testname_histogram", 1.0, ("le", "0") + ) + expected_le_inf_timeseries = self.exporter._create_timeseries( + histogram_record, "testname_histogram", 2.0, ("le", "+Inf") + ) + timeseries = self.exporter._convert_from_histogram(histogram_record) + self.assertEqual(timeseries[0], expected_le_0_timeseries) + self.assertEqual(timeseries[1], expected_le_inf_timeseries) + + # Ensures last value aggregator is correctly converted to timeseries + def test_convert_from_last_value(self): + last_value_record = ExportRecord( + Counter("testname", "testdesc", "testunit", int, None), + None, + LastValueAggregator(), + Resource({}), + ) + last_value_record.aggregator.update(1) + last_value_record.aggregator.update(5) + last_value_record.aggregator.take_checkpoint() + + expected_timeseries = self.exporter._create_timeseries( + last_value_record, "testname_last", 5.0 + ) + timeseries = self.exporter._convert_from_last_value(last_value_record) + self.assertEqual(timeseries[0], expected_timeseries) + + # Ensures value observer aggregator is correctly converted to timeseries + def test_convert_from_value_observer(self): + value_observer_record = ExportRecord( + Counter("testname", "testdesc", "testunit", int, None), + None, + ValueObserverAggregator(), + Resource({}), + ) + value_observer_record.aggregator.update(5) + value_observer_record.aggregator.update(1) + value_observer_record.aggregator.update(2) + value_observer_record.aggregator.take_checkpoint() + + expected_min_timeseries = self.exporter._create_timeseries( + value_observer_record, "testname_min", 1.0 + ) + expected_max_timeseries = self.exporter._create_timeseries( + value_observer_record, "testname_max", 5.0 + ) + expected_sum_timeseries = self.exporter._create_timeseries( + value_observer_record, "testname_sum", 8.0 + ) + expected_count_timeseries = self.exporter._create_timeseries( + value_observer_record, "testname_count", 3.0 + ) + expected_last_timeseries = self.exporter._create_timeseries( + value_observer_record, "testname_last", 2.0 + ) + timeseries = self.exporter._convert_from_value_observer( + value_observer_record + ) + self.assertEqual(timeseries[0], expected_min_timeseries) + self.assertEqual(timeseries[1], expected_max_timeseries) + self.assertEqual(timeseries[2], expected_sum_timeseries) + self.assertEqual(timeseries[3], expected_count_timeseries) + self.assertEqual(timeseries[4], expected_last_timeseries) + + # Ensures quantile aggregator is correctly converted to timeseries + # TODO: Add test_convert_from_quantile once method is implemented + + # Ensures timeseries produced contains appropriate sample and labels + def test_create_timeseries(self): + def create_label(name, value): + label = Label() + label.name = name + label.value = value + return label + + sum_aggregator = SumAggregator() + sum_aggregator.update(5) + sum_aggregator.take_checkpoint() + export_record = ExportRecord( + Counter("testname", "testdesc", "testunit", int, None), + get_dict_as_key({"record_name": "record_value"}), + sum_aggregator, + Resource({"resource_name": "resource_value"}), + ) + + expected_timeseries = TimeSeries() + expected_timeseries.labels.append( # pylint:disable=E1101 + create_label("__name__", "testname") + ) + expected_timeseries.labels.append( # pylint:disable=E1101 + create_label("resource_name", "resource_value") + ) + expected_timeseries.labels.append( # pylint:disable=E1101 + create_label("record_name", "record_value") + ) + + sample = expected_timeseries.samples.add() # pylint:disable=E1101 + sample.timestamp = int(sum_aggregator.last_update_timestamp / 1000000) + sample.value = 5.0 + + timeseries = self.exporter._create_timeseries( + export_record, "testname", 5.0 + ) + self.assertEqual(timeseries, expected_timeseries) + + +class TestExport(unittest.TestCase): + # Initializes test data that is reused across tests + def setUp(self): + self.exporter = PrometheusRemoteWriteMetricsExporter( + endpoint="/prom/test_endpoint" + ) + + # Ensures export is successful with valid export_records and config + @patch("requests.post") + def test_valid_export(self, mock_post): + mock_post.return_value.configure_mock(**{"status_code": 200}) + test_metric = Counter("testname", "testdesc", "testunit", int, None) + labels = get_dict_as_key({"environment": "testing"}) + record = ExportRecord( + test_metric, labels, SumAggregator(), Resource({}) + ) + result = self.exporter.export([record]) + self.assertIs(result, MetricsExportResult.SUCCESS) + self.assertEqual(mock_post.call_count, 1) + + result = self.exporter.export([]) + self.assertIs(result, MetricsExportResult.SUCCESS) + + def test_invalid_export(self): + record = ExportRecord(None, None, None, None) + result = self.exporter.export([record]) + self.assertIs(result, MetricsExportResult.FAILURE) + + @patch("requests.post") + def test_valid_send_message(self, mock_post): + mock_post.return_value.configure_mock(**{"ok": True}) + result = self.exporter._send_message(bytes(), {}) + self.assertEqual(mock_post.call_count, 1) + self.assertEqual(result, MetricsExportResult.SUCCESS) + + def test_invalid_send_message(self): + result = self.exporter._send_message(bytes(), {}) + self.assertEqual(result, MetricsExportResult.FAILURE) + + # Verifies that build_message calls snappy.compress and returns SerializedString + @patch("snappy.compress", return_value=bytes()) + def test_build_message(self, mock_compress): + message = self.exporter._build_message([TimeSeries()]) + self.assertEqual(mock_compress.call_count, 1) + self.assertIsInstance(message, bytes) + + # Ensure correct headers are added when valid config is provided + def test_build_headers(self): + self.exporter.headers = {"Custom Header": "test_header"} + + headers = self.exporter._build_headers() + self.assertEqual(headers["Content-Encoding"], "snappy") + self.assertEqual(headers["Content-Type"], "application/x-protobuf") + self.assertEqual(headers["X-Prometheus-Remote-Write-Version"], "0.1.0") + self.assertEqual(headers["Custom Header"], "test_header") diff --git a/tox.ini b/tox.ini index 4c028e2ccc..081b721634 100644 --- a/tox.ini +++ b/tox.ini @@ -104,6 +104,9 @@ envlist = ; opentelemetry-exporter-richconsole py3{7,8,9,10}-test-exporter-richconsole + ; opentelemetry-exporter-prometheus-remote-write + py3{6,7,8,9,10}-test-exporter-prometheus-remote-write + ; opentelemetry-instrumentation-mysql py3{7,8,9,10}-test-instrumentation-mysql pypy3-test-instrumentation-mysql @@ -300,6 +303,7 @@ changedir = test-propagator-aws: propagator/opentelemetry-propagator-aws-xray/tests test-propagator-ot-trace: propagator/opentelemetry-propagator-ot-trace/tests test-exporter-richconsole: exporter/opentelemetry-exporter-richconsole/tests + test-exporter-prometheus-remote-write: exporter/opentelemetry-exporter-prometheus-remote-write/tests commands_pre = ; Install without -e to test the actual installation @@ -387,6 +391,8 @@ commands_pre = richconsole: pip install flaky {toxinidir}/exporter/opentelemetry-exporter-richconsole[test] + prometheus: pip install {toxinidir}/exporter/opentelemetry-exporter-prometheus-remote-write[test] + sklearn: pip install {toxinidir}/instrumentation/opentelemetry-instrumentation-sklearn[test] sqlalchemy{11,14}: pip install {toxinidir}/instrumentation/opentelemetry-instrumentation-sqlalchemy[test] @@ -498,6 +504,7 @@ commands_pre = python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-aws-lambda[test] python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-system-metrics[test] python -m pip install -e {toxinidir}/exporter/opentelemetry-exporter-richconsole[test] + python -m pip install -e {toxinidir}/exporter/opentelemetry-exporter-prometheus-remote-write[test] python -m pip install -e {toxinidir}/sdk-extension/opentelemetry-sdk-extension-aws[test] python -m pip install -e {toxinidir}/propagator/opentelemetry-propagator-aws-xray[test] python -m pip install -e {toxinidir}/propagator/opentelemetry-propagator-ot-trace[test] From 6db88241b0e92318694354f38a2302456b088640 Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Thu, 11 Aug 2022 14:37:07 -0400 Subject: [PATCH 02/16] Add some tests & test infra. --- .../prometheus_remote_write/__init__.py | 141 +++++++++++------- .../tests/conftest.py | 66 ++++++++ .../test_prometheus_remote_write_exporter.py | 65 +++++++- 3 files changed, 210 insertions(+), 62 deletions(-) create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py index 0e808283c3..b9d7a3ab0e 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py @@ -16,6 +16,8 @@ import re from typing import Dict, Sequence +from collections import defaultdict +from itertools import chain import requests import snappy @@ -30,10 +32,12 @@ from opentelemetry.sdk.metrics.export import ( MetricExporter, MetricExportResult, - AggregationTemporality, Gauge, Sum, Histogram, + MetricExportResult, + MetricsData, + Metric, ) #from opentelemetry.sdk.metrics.export.aggregate import ( # HistogramAggregator, @@ -162,8 +166,8 @@ def headers(self, headers: Dict): self._headers = headers def export( - self, export_records: Sequence[ExportRecord] - ) -> MetricsExportResult: + self, export_records + ) ->MetricExportResult: if not export_records: return MetricsExportResult.SUCCESS timeseries = self._convert_to_timeseries(export_records) @@ -181,9 +185,82 @@ def shutdown(self) -> None: def _translate_data(self, data: MetricsData): rw_timeseries = [] - + + for resource_metrics in data.resource_metrics: + resource = resource_metrics.resource + # OTLP Data model suggests combining some attrs into job/instance + # Should we do that here? + resource_labels = self._get_resource_labels(resource.attributes) + # Scope name/version probably not too useful from a labeling perspective + for scope_metrics in resource_metrics.scope_metrics: + for metric in scope_metrics.metrics: + rw_timeseries.extend( self._parse_metric(metric,resource_labels) ) + + def _get_resource_labels(self,attrs): + """ Converts Resource Attributes to Prometheus Labels based on + OTLP Metric Data Model's recommendations on Resource Attributes + """ + return [ (n,str(v)) for n,v in resource.attributes.items() ] + + def _parse_metric(self, metric: Metric, resource_labels: Sequence) -> Sequence[TimeSeries]: + """ + Parses the Metric & lower objects, then converts the output into + OM TimeSeries. Returns a List of TimeSeries objects based on one Metric + """ + # datapoints have attributes associated with them. these would be sent + # to RW as different metrics: name & labels is a unique time series + sample_sets = defaultdict(list) + if isinstance(metric.data,(Gauge,Sum)): + for dp in metric.data.data_points: + attrs,sample = self._parse_data_point(dp) + sample_sets[attrs].append(sample) + elif isinstance(metric.data,(HistogramType)): + raise NotImplementedError("Coming sooN!") + else: + logger.warn("Unsupported Metric Type: %s",type(metric.data)) + return [] + + # Create the metric name, will be a label later + if metric.unit: + #Prom. naming guidelines add unit to the name + name =f"{metric.name}_{metric.unit}" + else: + name = metric.name + + timeseries = [] + for labels, samples in sample_sets.items(): + ts = TimeSeries() + ts.labels.append(self._label("__name__",name)) + for label_name,label_value in chain(resource_labels,labels): + # Previous implementation did not str() the names... + ts.labels.append(self._label(label_name,str(label_value))) + for value,timestamp in samples: + ts.samples.append(self._sample(value,timestamp)) + timeseries.append(ts) + return timeseries + + def _sample(self,value,timestamp :int): + sample = Sample() + sample.value = value + sample.timestamp = timestamp + return sample + + def _label(self,name:str,value:str): + label = Label() + label.name = name + label.value = value + return label + + def _parse_data_point(self, data_point): + + attrs = tuple(data_point.attributes.items()) + #TODO: Optimize? create Sample here + # remote write time is in milliseconds + sample = (data_point.value,(data_point.time_unix_nano // 1_000_000)) + return attrs,sample + def _convert_to_timeseries( - self, export_records: Sequence[ExportRecord] + self, export_records ) -> Sequence[TimeSeries]: timeseries = [] for export_record in export_records: @@ -199,7 +276,7 @@ def _convert_to_timeseries( return timeseries def _convert_from_sum( - self, sum_record: ExportRecord + self, sum_record ) -> Sequence[TimeSeries]: return [ self._create_timeseries( @@ -211,22 +288,9 @@ def _convert_from_sum( def _convert_from_gauge(self, gauge_record): raise NotImplementedError("Do this") - def _convert_from_min_max_sum_count( - self, min_max_sum_count_record: ExportRecord - ) -> Sequence[TimeSeries]: - timeseries = [] - for agg_type in ["min", "max", "sum", "count"]: - name = min_max_sum_count_record.instrument.name + "_" + agg_type - value = getattr( - min_max_sum_count_record.aggregator.checkpoint, agg_type - ) - timeseries.append( - self._create_timeseries(min_max_sum_count_record, name, value) - ) - return timeseries def _convert_from_histogram( - self, histogram_record: ExportRecord + self, histogram_record ) -> Sequence[TimeSeries]: timeseries = [] for bound in histogram_record.aggregator.checkpoint.keys(): @@ -242,43 +306,10 @@ def _convert_from_histogram( ) return timeseries - def _convert_from_last_value( - self, last_value_record: ExportRecord - ) -> Sequence[TimeSeries]: - return [ - self._create_timeseries( - last_value_record, - last_value_record.instrument.name + "_last", - last_value_record.aggregator.checkpoint, - ) - ] - - def _convert_from_value_observer( - self, value_observer_record: ExportRecord - ) -> Sequence[TimeSeries]: - timeseries = [] - for agg_type in ["min", "max", "sum", "count", "last"]: - timeseries.append( - self._create_timeseries( - value_observer_record, - value_observer_record.instrument.name + "_" + agg_type, - getattr( - value_observer_record.aggregator.checkpoint, agg_type - ), - ) - ) - return timeseries - - # TODO: Implement convert from quantile once supported by SDK for Prometheus Summaries - def _convert_from_quantile( - self, summary_record: ExportRecord - ) -> Sequence[TimeSeries]: - raise NotImplementedError() - # pylint: disable=no-member,no-self-use def _create_timeseries( self, - export_record: ExportRecord, + export_record, name: str, value: float, extra_label: (str, str) = None, @@ -344,7 +375,7 @@ def _build_headers(self) -> Dict: def _send_message( self, message: bytes, headers: Dict - ) -> MetricsExportResult: + ) -> MetricExportResult: auth = None if self.basic_auth: auth = (self.basic_auth["username"], self.basic_auth["password"]) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py new file mode 100644 index 0000000000..fee338c723 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py @@ -0,0 +1,66 @@ + + +import random +import pytest + +import opentelemetry.test.metrictestutil as metric_util#import _generate_gauge, _generate_sum + +from opentelemetry.sdk.metrics.export import ( + AggregationTemporality, + Histogram, + HistogramDataPoint, + Sum, + Gauge, + MetricExportResult, + MetricsData, + ResourceMetrics, + ScopeMetrics, + Metric, +) + +from opentelemetry.exporter.prometheus_remote_write import ( + PrometheusRemoteWriteMetricsExporter, +) +@pytest.fixture +def prom_rw(): + return PrometheusRemoteWriteMetricsExporter("http://victoria:8428/api/v1/write") + + + +@pytest.fixture +def generate_metrics_data(data): + pass + + + +@pytest.fixture +def metric_histogram(): + dp = HistogramDataPoint( + attributes={"foo": "bar", "baz": 42}, + start_time_unix_nano=1641946016139533244, + time_unix_nano=1641946016139533244, + count=random.randint(1,10), + sum=random.randint(42,420), + bucket_counts=[1, 4], + explicit_bounds=[10.0, 20.0], + min=8, + max=18, + ) + data = Histogram( + [dp], + AggregationTemporality.CUMULATIVE, + ) + return Metric( + "test_histogram", + "foo", + "tu", + data=data, + ) + +@pytest.fixture +def metric(request): + if request.param == "gauge": + return metric_util._generate_gauge("test_gauge",random.randint(0,100)) + elif request.param == "sum": + return metric_util._generate_sum("test_sum",random.randint(0,9_999_999_999)) + diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py index f76596d807..196bfe6092 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py @@ -23,17 +23,68 @@ TimeSeries, ) from opentelemetry.sdk.metrics import Counter -from opentelemetry.sdk.metrics.export import ExportRecord, MetricsExportResult -from opentelemetry.sdk.metrics.export.aggregate import ( - HistogramAggregator, - LastValueAggregator, - MinMaxSumCountAggregator, - SumAggregator, - ValueObserverAggregator, +#from opentelemetry.sdk.metrics.export import ExportRecord, MetricExportResult +#from opentelemetry.sdk.metrics.export.aggregate import ( +# HistogramAggregator, +# LastValueAggregator, +# MinMaxSumCountAggregator, +# SumAggregator, +# ValueObserverAggregator, +#) + +from opentelemetry.sdk.metrics.export import ( + NumberDataPoint, ) from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.util import get_dict_as_key +import pytest + +def test_parse_data_point(prom_rw): + + attrs = {"Foo" : "Bar","Baz" : 42} + timestamp = 1641946016139533244 + value = 242.42 + dp = NumberDataPoint( + attrs, + 0, + timestamp, + value + ) + labels, sample = prom_rw._parse_data_point(dp) + assert labels == (("Foo", "Bar"),("Baz", 42)) + assert sample == (value,timestamp // 1_000_000) + +@pytest.mark.parametrize("metric",[ + "gauge", + "sum", +],indirect=["metric"]) +def test_parse_metric(metric,prom_rw): + # We have 1 data point & 5 labels total + attributes = { + "service" : "foo", + "id" : 42, + } + + series = prom_rw._parse_metric(metric,tuple(attributes.items())) + assert len(series) == 1 + + #Build out the expected attributes and check they all made it as labels + proto_out = series[0] + number_data_point = metric.data.data_points[0] + attributes.update(number_data_point.attributes) + attributes["__name__"] = metric.name +f"_{metric.unit}" + + for label in proto_out.labels: + assert label.value == str(attributes[label.name]) + + # Ensure we have one sample with the correct time & value + assert len(series.samples) == 1 + sample = proto_out.samples[0] + assert sample.timestamp == (number_data_point.time_unix_nano // 1_000_000) + assert sample.value == number_data_point.value + + class TestValidation(unittest.TestCase): # Test cases to ensure exporter parameter validation works as intended From b115a467f78f978f3003b787dd0c583f4cbdf395 Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Wed, 21 Sep 2022 11:30:37 -0400 Subject: [PATCH 03/16] Update test cases --- .../setup.cfg | 4 +- .../prometheus_remote_write/__init__.py | 138 +++--- .../tests/conftest.py | 36 +- .../test_prometheus_remote_write_exporter.py | 444 ++++++------------ 4 files changed, 240 insertions(+), 382 deletions(-) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg b/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg index 88b8cc1ce6..f08c4176b0 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg @@ -34,13 +34,11 @@ classifiers = Programming Language :: Python :: 3.8 [options] -python_requires = >=3.5 +python_requires = >=3.8 package_dir= =src packages=find_namespace: install_requires = - snappy >= 2.8 - #protobuf >= 3.13.0 protobuf == 3.20.0 requests == 2.25.0 opentelemetry-api == 1.12.0rc2 diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py index b9d7a3ab0e..d895867b23 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py @@ -71,6 +71,7 @@ def __init__( timeout: int = 30, tls_config: Dict = None, proxies: Dict = None, + resources_as_labels : bool = True, ): self.endpoint = endpoint self.basic_auth = basic_auth @@ -78,12 +79,7 @@ def __init__( self.timeout = timeout self.tls_config = tls_config self.proxies = proxies - - self.converter_map = { - Sum: self._convert_from_sum, - Histogram: self._convert_from_histogram, - Gauge: self._convert_from_gauge, - } + self.resources_as_labels = resources_as_labels @property @@ -166,16 +162,16 @@ def headers(self, headers: Dict): self._headers = headers def export( - self, export_records + self,metrics_data : MetricsData ) ->MetricExportResult: - if not export_records: - return MetricsExportResult.SUCCESS - timeseries = self._convert_to_timeseries(export_records) + if not metrics_data: + return MetricExportResult.SUCCESS + timeseries = self._translate_data(metrics_data) if not timeseries: logger.error( "All records contain unsupported aggregators, export aborted" ) - return MetricsExportResult.FAILURE + return MetricExportResult.FAILURE message = self._build_message(timeseries) headers = self._build_headers() return self._send_message(message, headers) @@ -183,54 +179,56 @@ def export( def shutdown(self) -> None: pass - def _translate_data(self, data: MetricsData): + def _translate_data(self, data: MetricsData) -> Sequence[TimeSeries]: rw_timeseries = [] for resource_metrics in data.resource_metrics: resource = resource_metrics.resource # OTLP Data model suggests combining some attrs into job/instance # Should we do that here? - resource_labels = self._get_resource_labels(resource.attributes) + if self.resources_as_labels: + resource_labels = [ (n,str(v)) for n,v in resource.attributes.items() ] + else: + resource_labels = [] # Scope name/version probably not too useful from a labeling perspective for scope_metrics in resource_metrics.scope_metrics: for metric in scope_metrics.metrics: rw_timeseries.extend( self._parse_metric(metric,resource_labels) ) - - def _get_resource_labels(self,attrs): - """ Converts Resource Attributes to Prometheus Labels based on - OTLP Metric Data Model's recommendations on Resource Attributes - """ - return [ (n,str(v)) for n,v in resource.attributes.items() ] + return rw_timeseries def _parse_metric(self, metric: Metric, resource_labels: Sequence) -> Sequence[TimeSeries]: """ Parses the Metric & lower objects, then converts the output into OM TimeSeries. Returns a List of TimeSeries objects based on one Metric """ + + + # Create the metric name, will be a label later + if metric.unit: + #Prom. naming guidelines add unit to the name + name =f"{metric.name}_{metric.unit}" + else: + name = metric.name + # datapoints have attributes associated with them. these would be sent # to RW as different metrics: name & labels is a unique time series sample_sets = defaultdict(list) if isinstance(metric.data,(Gauge,Sum)): for dp in metric.data.data_points: - attrs,sample = self._parse_data_point(dp) + attrs,sample = self._parse_data_point(dp,name) sample_sets[attrs].append(sample) - elif isinstance(metric.data,(HistogramType)): - raise NotImplementedError("Coming sooN!") + elif isinstance(metric.data,Histogram): + for dp in metric.data.data_points: + dp_result = self._parse_histogram_data_point(dp,name) + for attrs,sample in dp_result: + sample_sets[attrs].append(sample) else: logger.warn("Unsupported Metric Type: %s",type(metric.data)) return [] - # Create the metric name, will be a label later - if metric.unit: - #Prom. naming guidelines add unit to the name - name =f"{metric.name}_{metric.unit}" - else: - name = metric.name - timeseries = [] for labels, samples in sample_sets.items(): ts = TimeSeries() - ts.labels.append(self._label("__name__",name)) for label_name,label_value in chain(resource_labels,labels): # Previous implementation did not str() the names... ts.labels.append(self._label(label_name,str(label_value))) @@ -239,23 +237,61 @@ def _parse_metric(self, metric: Metric, resource_labels: Sequence) -> Sequence[T timeseries.append(ts) return timeseries - def _sample(self,value,timestamp :int): + def _sample(self,value: int,timestamp :int) -> Sample: sample = Sample() sample.value = value sample.timestamp = timestamp return sample - def _label(self,name:str,value:str): + def _label(self,name:str,value:str) -> Label: label = Label() label.name = name label.value = value return label - def _parse_data_point(self, data_point): + def _parse_histogram_data_point(self, data_point, name): + + #if (len(data_point.explicit_bounds)+1) != len(data_point.bucket_counts): + # raise ValueError("Number of buckets must be 1 more than the explicit bounds!") + + sample_attr_pairs = [] + + base_attrs = [(n,v) for n,v in data_point.attributes.items()] + timestamp = data_point.time_unix_nano // 1_000_000 + + + def handle_bucket(value,bound=None,name_override=None): + # Metric Level attributes + the bucket boundry attribute + name + ts_attrs = base_attrs.copy() + ts_attrs.append(("__name__",name_override or name)) + if bound: + ts_attrs.append(("le",str(bound))) + # Value is count of values in each bucket + ts_sample = (value,timestamp) + return tuple(ts_attrs), ts_sample - attrs = tuple(data_point.attributes.items()) - #TODO: Optimize? create Sample here - # remote write time is in milliseconds + for bound_pos,bound in enumerate(data_point.explicit_bounds): + sample_attr_pairs.append( + handle_bucket(data_point.bucket_counts[bound_pos],bound) + ) + + # Add the last label for implicit +inf bucket + sample_attr_pairs.append( + handle_bucket(data_point.bucket_counts[-1],bound="+Inf") + ) + + #Lastly, add series for count & sum + sample_attr_pairs.append( + handle_bucket(data_point.sum,name_override=f"{name}_sum") + ) + sample_attr_pairs.append( + handle_bucket(data_point.count,name_override=f"{name}_count") + ) + return sample_attr_pairs + + def _parse_data_point(self, data_point,name=None): + + attrs = tuple(data_point.attributes.items()) + (("__name__",name),) sample = (data_point.value,(data_point.time_unix_nano // 1_000_000)) return attrs,sample @@ -275,27 +311,17 @@ def _convert_to_timeseries( ) return timeseries - def _convert_from_sum( - self, sum_record - ) -> Sequence[TimeSeries]: - return [ - self._create_timeseries( - sum_record, - sum_record.instrument.name + "_sum", - sum_record.aggregator.checkpoint, - ) - ] - - def _convert_from_gauge(self, gauge_record): - raise NotImplementedError("Do this") - def _convert_from_histogram( - self, histogram_record + self, histogram: Histogram, ) -> Sequence[TimeSeries]: - timeseries = [] - for bound in histogram_record.aggregator.checkpoint.keys(): + sample_sets = defaultdict(list) + + base_attrs = [self._label(n,v) for n,v in histogram.attributes] + for bound in histogram.explicit_bounds: bound_str = "+Inf" if bound == float("inf") else str(bound) - value = histogram_record.aggregator.checkpoint[bound] + # General attributes apply + ts_attrs = base_attrs.copy.append(self._label("le",str(bound))) + sample_sets[attrs].append(sample) timeseries.append( self._create_timeseries( histogram_record, @@ -411,5 +437,5 @@ def _send_message( response.raise_for_status() except requests.exceptions.RequestException as err: logger.error("Export POST request failed with reason: %s", err) - return MetricsExportResult.FAILURE - return MetricsExportResult.SUCCESS + return MetricExportResult.FAILURE + return MetricExportResult.SUCCESS diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py index fee338c723..67b6f8ec00 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py @@ -26,41 +26,39 @@ def prom_rw(): return PrometheusRemoteWriteMetricsExporter("http://victoria:8428/api/v1/write") - @pytest.fixture -def generate_metrics_data(data): - pass - - +def metric(request): + if hasattr(request,"param"): + type_ = request.param + else: + type_ = random.choice(["gauge","sum"]) + if type_ == "gauge": + return metric_util._generate_gauge("test_gauge",random.randint(0,100)) + elif type_ == "sum": + return metric_util._generate_sum("test_sum",random.randint(0,9_999_999_999)) + elif type_ == "histogram": + return _generate_histogram("test_histogram") -@pytest.fixture -def metric_histogram(): +def _generate_histogram(name): dp = HistogramDataPoint( attributes={"foo": "bar", "baz": 42}, start_time_unix_nano=1641946016139533244, time_unix_nano=1641946016139533244, - count=random.randint(1,10), - sum=random.randint(42,420), + count=5, + sum=420, bucket_counts=[1, 4], - explicit_bounds=[10.0, 20.0], + explicit_bounds=[10.0], min=8, - max=18, + max=80, ) data = Histogram( [dp], AggregationTemporality.CUMULATIVE, ) return Metric( - "test_histogram", + name, "foo", "tu", data=data, ) -@pytest.fixture -def metric(request): - if request.param == "gauge": - return metric_util._generate_gauge("test_gauge",random.randint(0,100)) - elif request.param == "sum": - return metric_util._generate_sum("test_sum",random.randint(0,9_999_999_999)) - diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py index 196bfe6092..24e9f86510 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py @@ -14,6 +14,7 @@ import unittest from unittest.mock import patch +import snappy from opentelemetry.exporter.prometheus_remote_write import ( PrometheusRemoteWriteMetricsExporter, @@ -23,7 +24,7 @@ TimeSeries, ) from opentelemetry.sdk.metrics import Counter -#from opentelemetry.sdk.metrics.export import ExportRecord, MetricExportResult +#from opentelemetry.sdk.metrics.export import MetricExportResult #from opentelemetry.sdk.metrics.export.aggregate import ( # HistogramAggregator, # LastValueAggregator, @@ -34,7 +35,15 @@ from opentelemetry.sdk.metrics.export import ( NumberDataPoint, + HistogramDataPoint, + Histogram, + MetricsData, + ScopeMetrics, + ResourceMetrics, + MetricExportResult, ) + +from opentelemetry.sdk.util.instrumentation import InstrumentationScope from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.util import get_dict_as_key @@ -51,39 +60,89 @@ def test_parse_data_point(prom_rw): timestamp, value ) - labels, sample = prom_rw._parse_data_point(dp) - assert labels == (("Foo", "Bar"),("Baz", 42)) + name = "abc123_42" + labels, sample = prom_rw._parse_data_point(dp,name) + + assert labels == (("Foo", "Bar"),("Baz", 42),("__name__",name)) assert sample == (value,timestamp // 1_000_000) +def test_parse_histogram_dp(prom_rw): + attrs = {"foo": "bar", "baz": 42} + timestamp = 1641946016139533244 + bounds = [10.0, 20.0] + dp = HistogramDataPoint( + attributes=attrs, + start_time_unix_nano=1641946016139533244, + time_unix_nano=timestamp, + count=9, + sum=180, + bucket_counts=[1, 4, 4], + explicit_bounds=bounds, + min=8, + max=80, + ) + name = "foo_histogram" + label_sample_pairs = prom_rw._parse_histogram_data_point(dp,name) + timestamp = timestamp // 1_000_000 + bounds.append("+Inf") + for pos,bound in enumerate(bounds): + # We have to attributes, we kinda assume the bucket label is last... + assert ("le",str(bound)) == label_sample_pairs[pos][0][-1] + # Check and make sure we are putting the bucket counts in there + assert (dp.bucket_counts[pos],timestamp) == label_sample_pairs[pos][1] + + # Last two are the sum & total count + pos +=1 + assert ("__name__",f"{name}_sum") in label_sample_pairs[pos][0] + assert (dp.sum,timestamp) == label_sample_pairs[pos][1] + + pos +=1 + assert ("__name__",f"{name}_count") in label_sample_pairs[pos][0] + assert (dp.count,timestamp) == label_sample_pairs[pos][1] + @pytest.mark.parametrize("metric",[ "gauge", "sum", + "histogram", ],indirect=["metric"]) def test_parse_metric(metric,prom_rw): - # We have 1 data point & 5 labels total + """ + Ensures output from parse_metrics are TimeSeries with expected data/size + """ attributes = { "service" : "foo", - "id" : 42, + "bool" : True, } + assert len(metric.data.data_points) == 1, "We can only support a single datapoint in tests" series = prom_rw._parse_metric(metric,tuple(attributes.items())) - assert len(series) == 1 - - #Build out the expected attributes and check they all made it as labels - proto_out = series[0] - number_data_point = metric.data.data_points[0] - attributes.update(number_data_point.attributes) - attributes["__name__"] = metric.name +f"_{metric.unit}" - - for label in proto_out.labels: - assert label.value == str(attributes[label.name]) - - # Ensure we have one sample with the correct time & value - assert len(series.samples) == 1 - sample = proto_out.samples[0] - assert sample.timestamp == (number_data_point.time_unix_nano // 1_000_000) - assert sample.value == number_data_point.value - + timestamp = metric.data.data_points[0].time_unix_nano // 1_000_000 + for single_series in series: + labels = str(single_series.labels) + # Its a bit easier to validate these stringified where we dont have to + # worry about ordering and protobuf TimeSeries object structure + # This doesn't guarantee the labels aren't mixed up, but our other + # test cases already do. + assert "__name__" in labels + assert metric.name in labels + combined_attrs = list(attributes.items()) + list(metric.data.data_points[0].attributes.items()) + for name,value in combined_attrs: + assert name in labels + assert str(value) in labels + if isinstance(metric.data,Histogram): + values = [ + metric.data.data_points[0].count, + metric.data.data_points[0].sum, + metric.data.data_points[0].bucket_counts[0], + metric.data.data_points[0].bucket_counts[1], + ] + else: + values = [ + metric.data.data_points[0].value, + ] + for sample in single_series.samples: + assert sample.timestamp == timestamp + assert sample.value in values class TestValidation(unittest.TestCase): @@ -179,287 +238,64 @@ def test_invalid_tls_config_key_only_param(self): ) -class TestConversion(unittest.TestCase): - # Initializes test data that is reused across tests - def setUp(self): - self.exporter = PrometheusRemoteWriteMetricsExporter( - endpoint="/prom/test_endpoint" - ) - - # Ensures conversion to timeseries function works with valid aggregation types - def test_valid_convert_to_timeseries(self): - test_records = [ - ExportRecord( - Counter("testname", "testdesc", "testunit", int, None), - None, - SumAggregator(), - Resource({}), - ), - ExportRecord( - Counter("testname", "testdesc", "testunit", int, None), - None, - MinMaxSumCountAggregator(), - Resource({}), - ), - ExportRecord( - Counter("testname", "testdesc", "testunit", int, None), - None, - HistogramAggregator(), - Resource({}), - ), - ExportRecord( - Counter("testname", "testdesc", "testunit", int, None), - None, - LastValueAggregator(), - Resource({}), - ), - ExportRecord( - Counter("testname", "testdesc", "testunit", int, None), - None, - ValueObserverAggregator(), - Resource({}), - ), - ] - for record in test_records: - record.aggregator.update(5) - record.aggregator.take_checkpoint() - data = self.exporter._convert_to_timeseries(test_records) - self.assertIsInstance(data, list) - self.assertEqual(len(data), 13) - for timeseries in data: - self.assertIsInstance(timeseries, TimeSeries) - - # Ensures conversion to timeseries fails for unsupported aggregation types - def test_invalid_convert_to_timeseries(self): - data = self.exporter._convert_to_timeseries( - [ExportRecord(None, None, None, Resource({}))] - ) - self.assertIsInstance(data, list) - self.assertEqual(len(data), 0) - - # Ensures sum aggregator is correctly converted to timeseries - def test_convert_from_sum(self): - sum_record = ExportRecord( - Counter("testname", "testdesc", "testunit", int, None), - None, - SumAggregator(), - Resource({}), - ) - sum_record.aggregator.update(3) - sum_record.aggregator.update(2) - sum_record.aggregator.take_checkpoint() - - expected_timeseries = self.exporter._create_timeseries( - sum_record, "testname_sum", 5.0 - ) - timeseries = self.exporter._convert_from_sum(sum_record) - self.assertEqual(timeseries[0], expected_timeseries) - - # Ensures sum min_max_count aggregator is correctly converted to timeseries - def test_convert_from_min_max_sum_count(self): - min_max_sum_count_record = ExportRecord( - Counter("testname", "testdesc", "testunit", int, None), - None, - MinMaxSumCountAggregator(), - Resource({}), - ) - min_max_sum_count_record.aggregator.update(5) - min_max_sum_count_record.aggregator.update(1) - min_max_sum_count_record.aggregator.take_checkpoint() - - expected_min_timeseries = self.exporter._create_timeseries( - min_max_sum_count_record, "testname_min", 1.0 - ) - expected_max_timeseries = self.exporter._create_timeseries( - min_max_sum_count_record, "testname_max", 5.0 - ) - expected_sum_timeseries = self.exporter._create_timeseries( - min_max_sum_count_record, "testname_sum", 6.0 - ) - expected_count_timeseries = self.exporter._create_timeseries( - min_max_sum_count_record, "testname_count", 2.0 - ) - timeseries = self.exporter._convert_from_min_max_sum_count( - min_max_sum_count_record - ) - self.assertEqual(timeseries[0], expected_min_timeseries) - self.assertEqual(timeseries[1], expected_max_timeseries) - self.assertEqual(timeseries[2], expected_sum_timeseries) - self.assertEqual(timeseries[3], expected_count_timeseries) - - # Ensures histogram aggregator is correctly converted to timeseries - def test_convert_from_histogram(self): - histogram_record = ExportRecord( - Counter("testname", "testdesc", "testunit", int, None), - None, - HistogramAggregator(), - Resource({}), - ) - histogram_record.aggregator.update(5) - histogram_record.aggregator.update(2) - histogram_record.aggregator.update(-1) - histogram_record.aggregator.take_checkpoint() +# Ensures export is successful with valid export_records and config +@patch("requests.post") +def test_valid_export(mock_post,prom_rw,metric): + metric = metric + mock_post.return_value.configure_mock(**{"status_code": 200}) + labels = get_dict_as_key({"environment": "testing"}) - expected_le_0_timeseries = self.exporter._create_timeseries( - histogram_record, "testname_histogram", 1.0, ("le", "0") - ) - expected_le_inf_timeseries = self.exporter._create_timeseries( - histogram_record, "testname_histogram", 2.0, ("le", "+Inf") - ) - timeseries = self.exporter._convert_from_histogram(histogram_record) - self.assertEqual(timeseries[0], expected_le_0_timeseries) - self.assertEqual(timeseries[1], expected_le_inf_timeseries) - - # Ensures last value aggregator is correctly converted to timeseries - def test_convert_from_last_value(self): - last_value_record = ExportRecord( - Counter("testname", "testdesc", "testunit", int, None), - None, - LastValueAggregator(), - Resource({}), - ) - last_value_record.aggregator.update(1) - last_value_record.aggregator.update(5) - last_value_record.aggregator.take_checkpoint() - - expected_timeseries = self.exporter._create_timeseries( - last_value_record, "testname_last", 5.0 - ) - timeseries = self.exporter._convert_from_last_value(last_value_record) - self.assertEqual(timeseries[0], expected_timeseries) - - # Ensures value observer aggregator is correctly converted to timeseries - def test_convert_from_value_observer(self): - value_observer_record = ExportRecord( - Counter("testname", "testdesc", "testunit", int, None), - None, - ValueObserverAggregator(), - Resource({}), - ) - value_observer_record.aggregator.update(5) - value_observer_record.aggregator.update(1) - value_observer_record.aggregator.update(2) - value_observer_record.aggregator.take_checkpoint() - - expected_min_timeseries = self.exporter._create_timeseries( - value_observer_record, "testname_min", 1.0 - ) - expected_max_timeseries = self.exporter._create_timeseries( - value_observer_record, "testname_max", 5.0 - ) - expected_sum_timeseries = self.exporter._create_timeseries( - value_observer_record, "testname_sum", 8.0 - ) - expected_count_timeseries = self.exporter._create_timeseries( - value_observer_record, "testname_count", 3.0 - ) - expected_last_timeseries = self.exporter._create_timeseries( - value_observer_record, "testname_last", 2.0 - ) - timeseries = self.exporter._convert_from_value_observer( - value_observer_record - ) - self.assertEqual(timeseries[0], expected_min_timeseries) - self.assertEqual(timeseries[1], expected_max_timeseries) - self.assertEqual(timeseries[2], expected_sum_timeseries) - self.assertEqual(timeseries[3], expected_count_timeseries) - self.assertEqual(timeseries[4], expected_last_timeseries) - - # Ensures quantile aggregator is correctly converted to timeseries - # TODO: Add test_convert_from_quantile once method is implemented - - # Ensures timeseries produced contains appropriate sample and labels - def test_create_timeseries(self): - def create_label(name, value): - label = Label() - label.name = name - label.value = value - return label - - sum_aggregator = SumAggregator() - sum_aggregator.update(5) - sum_aggregator.take_checkpoint() - export_record = ExportRecord( - Counter("testname", "testdesc", "testunit", int, None), - get_dict_as_key({"record_name": "record_value"}), - sum_aggregator, - Resource({"resource_name": "resource_value"}), - ) - - expected_timeseries = TimeSeries() - expected_timeseries.labels.append( # pylint:disable=E1101 - create_label("__name__", "testname") - ) - expected_timeseries.labels.append( # pylint:disable=E1101 - create_label("resource_name", "resource_value") - ) - expected_timeseries.labels.append( # pylint:disable=E1101 - create_label("record_name", "record_value") - ) - - sample = expected_timeseries.samples.add() # pylint:disable=E1101 - sample.timestamp = int(sum_aggregator.last_update_timestamp / 1000000) - sample.value = 5.0 - - timeseries = self.exporter._create_timeseries( - export_record, "testname", 5.0 - ) - self.assertEqual(timeseries, expected_timeseries) - - -class TestExport(unittest.TestCase): - # Initializes test data that is reused across tests - def setUp(self): - self.exporter = PrometheusRemoteWriteMetricsExporter( - endpoint="/prom/test_endpoint" - ) - - # Ensures export is successful with valid export_records and config - @patch("requests.post") - def test_valid_export(self, mock_post): - mock_post.return_value.configure_mock(**{"status_code": 200}) - test_metric = Counter("testname", "testdesc", "testunit", int, None) - labels = get_dict_as_key({"environment": "testing"}) - record = ExportRecord( - test_metric, labels, SumAggregator(), Resource({}) - ) - result = self.exporter.export([record]) - self.assertIs(result, MetricsExportResult.SUCCESS) - self.assertEqual(mock_post.call_count, 1) - - result = self.exporter.export([]) - self.assertIs(result, MetricsExportResult.SUCCESS) - - def test_invalid_export(self): - record = ExportRecord(None, None, None, None) - result = self.exporter.export([record]) - self.assertIs(result, MetricsExportResult.FAILURE) - - @patch("requests.post") - def test_valid_send_message(self, mock_post): - mock_post.return_value.configure_mock(**{"ok": True}) - result = self.exporter._send_message(bytes(), {}) - self.assertEqual(mock_post.call_count, 1) - self.assertEqual(result, MetricsExportResult.SUCCESS) - - def test_invalid_send_message(self): - result = self.exporter._send_message(bytes(), {}) - self.assertEqual(result, MetricsExportResult.FAILURE) - - # Verifies that build_message calls snappy.compress and returns SerializedString - @patch("snappy.compress", return_value=bytes()) - def test_build_message(self, mock_compress): - message = self.exporter._build_message([TimeSeries()]) - self.assertEqual(mock_compress.call_count, 1) - self.assertIsInstance(message, bytes) - - # Ensure correct headers are added when valid config is provided - def test_build_headers(self): - self.exporter.headers = {"Custom Header": "test_header"} - - headers = self.exporter._build_headers() - self.assertEqual(headers["Content-Encoding"], "snappy") - self.assertEqual(headers["Content-Type"], "application/x-protobuf") - self.assertEqual(headers["X-Prometheus-Remote-Write-Version"], "0.1.0") - self.assertEqual(headers["Custom Header"], "test_header") + # Assumed a "None" for Scope or Resource aren't valid, so build them here + scope = ScopeMetrics( + InstrumentationScope(name="prom-rw-test"), + [metric], + None + ) + resource = ResourceMetrics( + Resource({"service.name" : "foo"}), + [scope], + None + ) + record = MetricsData([resource]) + + result = prom_rw.export(record) + assert result == MetricExportResult.SUCCESS + assert mock_post.call_count == 1 + + result = prom_rw.export([]) + assert result == MetricExportResult.SUCCESS + +def test_invalid_export(prom_rw): + record = MetricsData([]) + + result = prom_rw.export(record) + assert result == MetricExportResult.FAILURE + +@patch("requests.post") +def test_valid_send_message(mock_post,prom_rw): + mock_post.return_value.configure_mock(**{"ok": True}) + result = prom_rw._send_message(bytes(), {}) + assert mock_post.call_count == 1 + assert result == MetricExportResult.SUCCESS + +def test_invalid_send_message(prom_rw): + result = prom_rw._send_message(bytes(), {}) + assert result == MetricExportResult.FAILURE + +# Verifies that build_message calls snappy.compress and returns SerializedString +@patch("snappy.compress", return_value=bytes()) +def test_build_message(mock_compress,prom_rw): + message = prom_rw._build_message([TimeSeries()]) + assert mock_compress.call_count == 1 + assert isinstance(message, bytes) + +# Ensure correct headers are added when valid config is provided +def test_build_headers(prom_rw): + prom_rw.headers = {"Custom Header": "test_header"} + + headers = prom_rw._build_headers() + assert headers["Content-Encoding"] == "snappy" + assert headers["Content-Type"] == "application/x-protobuf" + assert headers["X-Prometheus-Remote-Write-Version"] == "0.1.0" + assert headers["Custom Header"] == "test_header" From ad84c56f7a9797c4beb317d2a6975ceea6ca9cc3 Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Mon, 26 Sep 2022 09:26:35 -0400 Subject: [PATCH 04/16] More fixes from Example run - Fix some naming issues,add a regex to correct these - Update setup.cfg versions - Update the example app for newer sdk syntax --- .../examples/Dockerfile | 9 +- .../examples/sampleapp.py | 95 +++-------- .../setup.cfg | 10 +- .../prometheus_remote_write/__init__.py | 149 ++++++------------ .../tests/conftest.py | 4 +- .../test_prometheus_remote_write_exporter.py | 23 ++- 6 files changed, 101 insertions(+), 189 deletions(-) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/Dockerfile b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/Dockerfile index 09ce8cc323..f3fca0d568 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/Dockerfile +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/Dockerfile @@ -1,8 +1,11 @@ -FROM python:3.7 -WORKDIR /code +FROM python:3.8 -COPY . . RUN apt-get update -y && apt-get install libsnappy-dev -y + +WORKDIR /code +COPY . . + RUN pip install -e . RUN pip install -r ./examples/requirements.txt + CMD ["python", "./examples/sampleapp.py"] diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py index 69f7a068ea..3a764c5683 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py @@ -7,132 +7,85 @@ import psutil from opentelemetry import metrics + +from opentelemetry.metrics import ( + Observation, + get_meter_provider, + set_meter_provider, +) from opentelemetry.exporter.prometheus_remote_write import ( PrometheusRemoteWriteMetricsExporter, ) from opentelemetry.sdk.metrics import MeterProvider -from opentelemetry.sdk.metrics.export.aggregate import ( - HistogramAggregator, - LastValueAggregator, - MinMaxSumCountAggregator, - SumAggregator, -) -from opentelemetry.sdk.metrics.view import View, ViewConfig +from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader logging.basicConfig(stream=sys.stdout, level=logging.INFO) logger = logging.getLogger(__name__) -metrics.set_meter_provider(MeterProvider()) -meter = metrics.get_meter(__name__) + +testing_labels = {"environment": "testing"} + exporter = PrometheusRemoteWriteMetricsExporter( endpoint="http://cortex:9009/api/prom/push", headers={"X-Scope-Org-ID": "5"}, ) -metrics.get_meter_provider().start_pipeline(meter, exporter, 1) -testing_labels = {"environment": "testing"} - +reader = PeriodicExportingMetricReader(exporter,1000) +provider = MeterProvider(metric_readers=[reader]) +metrics.set_meter_provider(provider) +meter = metrics.get_meter(__name__) # Callback to gather cpu usage def get_cpu_usage_callback(observer): for (number, percent) in enumerate(psutil.cpu_percent(percpu=True)): labels = {"cpu_number": str(number)} - observer.observe(percent, labels) + yield Observation(percent, labels) # Callback to gather RAM usage def get_ram_usage_callback(observer): ram_percent = psutil.virtual_memory().percent - observer.observe(ram_percent, {}) + yield Observation(ram_percent, {}) requests_counter = meter.create_counter( name="requests", description="number of requests", unit="1", - value_type=int, ) request_min_max = meter.create_counter( name="requests_min_max", description="min max sum count of requests", unit="1", - value_type=int, ) request_last_value = meter.create_counter( name="requests_last_value", description="last value number of requests", unit="1", - value_type=int, ) -requests_size = meter.create_valuerecorder( - name="requests_size", - description="size of requests", - unit="1", - value_type=int, -) - -requests_size_histogram = meter.create_valuerecorder( - name="requests_size_histogram", - description="histogram of request_size", - unit="1", - value_type=int, -) -requests_active = meter.create_updowncounter( +requests_active = meter.create_up_down_counter( name="requests_active", description="number of active requests", unit="1", - value_type=int, ) -meter.register_sumobserver( - callback=get_ram_usage_callback, +meter.create_observable_counter( + callbacks=[get_ram_usage_callback], name="ram_usage", description="ram usage", unit="1", - value_type=float, ) -meter.register_valueobserver( - callback=get_cpu_usage_callback, +meter.create_observable_up_down_counter( + callbacks=[get_cpu_usage_callback], name="cpu_percent", description="per-cpu usage", unit="1", - value_type=float, -) - - -counter_view1 = View( - requests_counter, - SumAggregator, - label_keys=["environment"], - view_config=ViewConfig.LABEL_KEYS, -) -counter_view2 = View( - request_min_max, - MinMaxSumCountAggregator, - label_keys=["os_type"], - view_config=ViewConfig.LABEL_KEYS, ) -counter_view3 = View( - request_last_value, - LastValueAggregator, - label_keys=["environment"], - view_config=ViewConfig.UNGROUPED, -) -size_view = View( - requests_size_histogram, - HistogramAggregator, - label_keys=["environment"], - aggregator_config={"bounds": [20, 40, 60, 80, 100]}, - view_config=ViewConfig.UNGROUPED, -) -meter.register_view(counter_view1) -meter.register_view(counter_view2) -meter.register_view(counter_view3) -meter.register_view(size_view) +request_latency = meter.create_histogram("request_latency") # Load generator num = random.randint(0, 1000) @@ -145,9 +98,7 @@ def get_ram_usage_callback(observer): # updown counter requests_active.add(num % 7231 + 200, testing_labels) - # value observers - requests_size.record(num % 6101 + 100, testing_labels) - requests_size_histogram.record(num % 113, testing_labels) + request_latency.record(num % 92,testing_labels) logger.log(level=INFO, msg="completed metrics collection cycle") time.sleep(1) num += 9791 diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg b/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg index f08c4176b0..40fe91b4fb 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg @@ -39,11 +39,11 @@ package_dir= =src packages=find_namespace: install_requires = - protobuf == 3.20.0 - requests == 2.25.0 - opentelemetry-api == 1.12.0rc2 - opentelemetry-sdk == 1.12.0rc2 - python-snappy >= 0.5.4 + protobuf ~= 4.21 + requests ~= 2.28 + opentelemetry-api == 1.12.0 + opentelemetry-sdk == 1.12.0 + python-snappy ~= 0.6 [options.packages.find] where = src diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py index d895867b23..006ee1f5d7 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py @@ -30,25 +30,28 @@ TimeSeries, ) from opentelemetry.sdk.metrics.export import ( - MetricExporter, - MetricExportResult, + AggregationTemporality, Gauge, Sum, Histogram, + MetricExporter, MetricExportResult, MetricsData, Metric, ) -#from opentelemetry.sdk.metrics.export.aggregate import ( -# HistogramAggregator, -# LastValueAggregator, -# MinMaxSumCountAggregator, -# SumAggregator, -# ValueObserverAggregator, -#) +from opentelemetry.sdk.metrics import ( + Counter, + Histogram as ClientHistogram, + ObservableCounter, + ObservableGauge, + ObservableUpDownCounter, + UpDownCounter, +) logger = logging.getLogger(__name__) +PROMETHEUS_NAME_REGEX = re.compile(r'[^\w:]') +PROMETHEUS_LABEL_REGEX = re.compile(r'[^\w]') class PrometheusRemoteWriteMetricsExporter(MetricExporter): """ @@ -72,6 +75,8 @@ def __init__( tls_config: Dict = None, proxies: Dict = None, resources_as_labels : bool = True, + preferred_temporality: Dict[type, AggregationTemporality] = None, + preferred_aggregation: Dict = None, ): self.endpoint = endpoint self.basic_auth = basic_auth @@ -81,6 +86,18 @@ def __init__( self.proxies = proxies self.resources_as_labels = resources_as_labels + if not preferred_temporality: + preferred_temporality = { + Counter: AggregationTemporality.CUMULATIVE, + UpDownCounter: AggregationTemporality.CUMULATIVE, + ClientHistogram: AggregationTemporality.CUMULATIVE, + ObservableCounter: AggregationTemporality.CUMULATIVE, + ObservableUpDownCounter: AggregationTemporality.CUMULATIVE, + ObservableGauge: AggregationTemporality.CUMULATIVE, + } + logger.error("Calling MetricExporter") + + super().__init__(preferred_temporality,preferred_aggregation) @property def endpoint(self): @@ -162,7 +179,9 @@ def headers(self, headers: Dict): self._headers = headers def export( - self,metrics_data : MetricsData + self, + metrics_data : MetricsData, + timeout_millis: float = 10_000, ) ->MetricExportResult: if not metrics_data: return MetricExportResult.SUCCESS @@ -176,9 +195,6 @@ def export( headers = self._build_headers() return self._send_message(message, headers) - def shutdown(self) -> None: - pass - def _translate_data(self, data: MetricsData) -> Sequence[TimeSeries]: rw_timeseries = [] @@ -245,10 +261,18 @@ def _sample(self,value: int,timestamp :int) -> Sample: def _label(self,name:str,value:str) -> Label: label = Label() - label.name = name + label.name = PROMETHEUS_LABEL_REGEX.sub("_",name) label.value = value return label + def _sanitize_name(self,name): + # I Think Prometheus requires names to NOT start with a number this + # would not catch that, but do cover the other cases. The naming rules + # don't explicit say this, but the supplied regex implies it. + # Got a little weird trying to do substitution with it, but can be + # fixed if we allow numeric beginnings to metric names + return PROMETHEUS_NAME_REGEX.sub("_",name) + def _parse_histogram_data_point(self, data_point, name): #if (len(data_point.explicit_bounds)+1) != len(data_point.bucket_counts): @@ -263,7 +287,7 @@ def _parse_histogram_data_point(self, data_point, name): def handle_bucket(value,bound=None,name_override=None): # Metric Level attributes + the bucket boundry attribute + name ts_attrs = base_attrs.copy() - ts_attrs.append(("__name__",name_override or name)) + ts_attrs.append(("__name__",self._sanitize_name(name_override or name))) if bound: ts_attrs.append(("le",str(bound))) # Value is count of values in each bucket @@ -291,96 +315,10 @@ def handle_bucket(value,bound=None,name_override=None): def _parse_data_point(self, data_point,name=None): - attrs = tuple(data_point.attributes.items()) + (("__name__",name),) + attrs = tuple(data_point.attributes.items()) + (("__name__",self._sanitize_name(name)),) sample = (data_point.value,(data_point.time_unix_nano // 1_000_000)) return attrs,sample - def _convert_to_timeseries( - self, export_records - ) -> Sequence[TimeSeries]: - timeseries = [] - for export_record in export_records: - aggregator_type = type(export_record.aggregator) - converter = self.converter_map.get(aggregator_type) - if converter: - timeseries.extend(converter(export_record)) - else: - logger.warning( - "%s aggregator is not supported, record dropped", - aggregator_type, - ) - return timeseries - - def _convert_from_histogram( - self, histogram: Histogram, - ) -> Sequence[TimeSeries]: - sample_sets = defaultdict(list) - - base_attrs = [self._label(n,v) for n,v in histogram.attributes] - for bound in histogram.explicit_bounds: - bound_str = "+Inf" if bound == float("inf") else str(bound) - # General attributes apply - ts_attrs = base_attrs.copy.append(self._label("le",str(bound))) - sample_sets[attrs].append(sample) - timeseries.append( - self._create_timeseries( - histogram_record, - histogram_record.instrument.name + "_histogram", - value, - extra_label=("le", bound_str), - ) - ) - return timeseries - - # pylint: disable=no-member,no-self-use - def _create_timeseries( - self, - export_record, - name: str, - value: float, - extra_label: (str, str) = None, - ) -> TimeSeries: - timeseries = TimeSeries() - seen = set() - - def add_label(label_name: str, label_value: str): - # Label name must contain only alphanumeric characters and underscores - label_name = re.sub("[^\\w_]", "_", label_name) - if label_name not in seen: - label = Label() - label.name = label_name - label.value = label_value - timeseries.labels.append(label) - seen.add(label_name) - else: - logger.warning( - "Duplicate label with name %s and value %s", - label_name, - label_value, - ) - - # The __name__ label is required by PromQL as its value appears as the metric_name - add_label("__name__", name) - if extra_label: - add_label(extra_label[0], extra_label[1]) - if export_record.resource.attributes: - for ( - label_name, - label_value, - ) in export_record.resource.attributes.items(): - add_label(label_name, str(label_value)) - if export_record.labels: - for [label_name, label_value] in export_record.labels: - add_label(label_name, label_value) - - sample = Sample() - sample.timestamp = int( - export_record.aggregator.last_update_timestamp / 1000000 - ) - sample.value = value - timeseries.samples.append(sample) - return timeseries - # pylint: disable=no-member,no-self-use def _build_message(self, timeseries: Sequence[TimeSeries]) -> bytes: write_request = WriteRequest() @@ -439,3 +377,10 @@ def _send_message( logger.error("Export POST request failed with reason: %s", err) return MetricExportResult.FAILURE return MetricExportResult.SUCCESS + + def force_flush(self, timeout_millis: float = 10_000) -> bool: + return True + + def shutdown(self) -> None: + pass + diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py index 67b6f8ec00..d9ef67fd8d 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py @@ -33,9 +33,9 @@ def metric(request): else: type_ = random.choice(["gauge","sum"]) if type_ == "gauge": - return metric_util._generate_gauge("test_gauge",random.randint(0,100)) + return metric_util._generate_gauge("test.gauge",random.randint(0,100)) elif type_ == "sum": - return metric_util._generate_sum("test_sum",random.randint(0,9_999_999_999)) + return metric_util._generate_sum("test.sum",random.randint(0,9_999_999_999)) elif type_ == "histogram": return _generate_histogram("test_histogram") diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py index 24e9f86510..3ebcb58708 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py @@ -18,6 +18,8 @@ from opentelemetry.exporter.prometheus_remote_write import ( PrometheusRemoteWriteMetricsExporter, + PROMETHEUS_LABEL_REGEX, + PROMETHEUS_NAME_REGEX, ) from opentelemetry.exporter.prometheus_remote_write.gen.types_pb2 import ( Label, @@ -49,6 +51,16 @@ import pytest +@pytest.mark.parametrize("name,result",[ + ("abc.124","abc_124"), + (":abc", ":abc"), + ("abc.name.hi","abc_name_hi"), + ("service.name...","service_name___"), +]) +def test_name_regex(name,result,prom_rw): + assert prom_rw._sanitize_name(name) == result + + def test_parse_data_point(prom_rw): attrs = {"Foo" : "Bar","Baz" : 42} @@ -60,9 +72,10 @@ def test_parse_data_point(prom_rw): timestamp, value ) - name = "abc123_42" + name = "abc.123_42" labels, sample = prom_rw._parse_data_point(dp,name) + name = "abc_123_42" assert labels == (("Foo", "Bar"),("Baz", 42),("__name__",name)) assert sample == (value,timestamp // 1_000_000) @@ -110,8 +123,8 @@ def test_parse_metric(metric,prom_rw): Ensures output from parse_metrics are TimeSeries with expected data/size """ attributes = { - "service" : "foo", - "bool" : True, + "service_name" : "foo", + "bool_value" : True, } assert len(metric.data.data_points) == 1, "We can only support a single datapoint in tests" @@ -124,10 +137,10 @@ def test_parse_metric(metric,prom_rw): # This doesn't guarantee the labels aren't mixed up, but our other # test cases already do. assert "__name__" in labels - assert metric.name in labels + assert PROMETHEUS_NAME_REGEX.sub("_",metric.name) in labels combined_attrs = list(attributes.items()) + list(metric.data.data_points[0].attributes.items()) for name,value in combined_attrs: - assert name in labels + assert PROMETHEUS_LABEL_REGEX.sub("_",name) in labels assert str(value) in labels if isinstance(metric.data,Histogram): values = [ From f45a968d3d5d4610f834cbe606908da87e064c84 Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Tue, 27 Sep 2022 14:10:27 -0400 Subject: [PATCH 05/16] Ran linter --- .../examples/sampleapp.py | 11 +- .../prometheus_remote_write/__init__.py | 123 +++---- .../gen/gogoproto/gogo_pb2.py | 323 +++++++++++++----- .../prometheus_remote_write/gen/remote_pb2.py | 63 ++-- .../prometheus_remote_write/gen/types_pb2.py | 111 +++--- .../tests/conftest.py | 37 +- .../test_prometheus_remote_write_exporter.py | 150 ++++---- 7 files changed, 517 insertions(+), 301 deletions(-) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py index 3a764c5683..120c8eee3a 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py @@ -7,15 +7,14 @@ import psutil from opentelemetry import metrics - +from opentelemetry.exporter.prometheus_remote_write import ( + PrometheusRemoteWriteMetricsExporter, +) from opentelemetry.metrics import ( Observation, get_meter_provider, set_meter_provider, ) -from opentelemetry.exporter.prometheus_remote_write import ( - PrometheusRemoteWriteMetricsExporter, -) from opentelemetry.sdk.metrics import MeterProvider from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader @@ -29,7 +28,7 @@ endpoint="http://cortex:9009/api/prom/push", headers={"X-Scope-Org-ID": "5"}, ) -reader = PeriodicExportingMetricReader(exporter,1000) +reader = PeriodicExportingMetricReader(exporter, 1000) provider = MeterProvider(metric_readers=[reader]) metrics.set_meter_provider(provider) meter = metrics.get_meter(__name__) @@ -98,7 +97,7 @@ def get_ram_usage_callback(observer): # updown counter requests_active.add(num % 7231 + 200, testing_labels) - request_latency.record(num % 92,testing_labels) + request_latency.record(num % 92, testing_labels) logger.log(level=INFO, msg="completed metrics collection cycle") time.sleep(1) num += 9791 diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py index 006ee1f5d7..210a1866f1 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py @@ -14,10 +14,10 @@ import logging import re -from typing import Dict, Sequence - from collections import defaultdict from itertools import chain +from typing import Dict, Sequence + import requests import snappy @@ -29,29 +29,30 @@ Sample, TimeSeries, ) +from opentelemetry.sdk.metrics import Counter +from opentelemetry.sdk.metrics import Histogram as ClientHistogram +from opentelemetry.sdk.metrics import ( + ObservableCounter, + ObservableGauge, + ObservableUpDownCounter, + UpDownCounter, +) from opentelemetry.sdk.metrics.export import ( AggregationTemporality, Gauge, - Sum, Histogram, + Metric, MetricExporter, MetricExportResult, MetricsData, - Metric, -) -from opentelemetry.sdk.metrics import ( - Counter, - Histogram as ClientHistogram, - ObservableCounter, - ObservableGauge, - ObservableUpDownCounter, - UpDownCounter, + Sum, ) logger = logging.getLogger(__name__) -PROMETHEUS_NAME_REGEX = re.compile(r'[^\w:]') -PROMETHEUS_LABEL_REGEX = re.compile(r'[^\w]') +PROMETHEUS_NAME_REGEX = re.compile(r"[^\w:]") +PROMETHEUS_LABEL_REGEX = re.compile(r"[^\w]") + class PrometheusRemoteWriteMetricsExporter(MetricExporter): """ @@ -74,7 +75,7 @@ def __init__( timeout: int = 30, tls_config: Dict = None, proxies: Dict = None, - resources_as_labels : bool = True, + resources_as_labels: bool = True, preferred_temporality: Dict[type, AggregationTemporality] = None, preferred_aggregation: Dict = None, ): @@ -95,9 +96,8 @@ def __init__( ObservableUpDownCounter: AggregationTemporality.CUMULATIVE, ObservableGauge: AggregationTemporality.CUMULATIVE, } - logger.error("Calling MetricExporter") - super().__init__(preferred_temporality,preferred_aggregation) + super().__init__(preferred_temporality, preferred_aggregation) @property def endpoint(self): @@ -180,9 +180,9 @@ def headers(self, headers: Dict): def export( self, - metrics_data : MetricsData, + metrics_data: MetricsData, timeout_millis: float = 10_000, - ) ->MetricExportResult: + ) -> MetricExportResult: if not metrics_data: return MetricExportResult.SUCCESS timeseries = self._translate_data(metrics_data) @@ -203,121 +203,129 @@ def _translate_data(self, data: MetricsData) -> Sequence[TimeSeries]: # OTLP Data model suggests combining some attrs into job/instance # Should we do that here? if self.resources_as_labels: - resource_labels = [ (n,str(v)) for n,v in resource.attributes.items() ] + resource_labels = [ + (n, str(v)) for n, v in resource.attributes.items() + ] else: resource_labels = [] # Scope name/version probably not too useful from a labeling perspective for scope_metrics in resource_metrics.scope_metrics: for metric in scope_metrics.metrics: - rw_timeseries.extend( self._parse_metric(metric,resource_labels) ) + rw_timeseries.extend( + self._parse_metric(metric, resource_labels) + ) return rw_timeseries - def _parse_metric(self, metric: Metric, resource_labels: Sequence) -> Sequence[TimeSeries]: + def _parse_metric( + self, metric: Metric, resource_labels: Sequence + ) -> Sequence[TimeSeries]: """ Parses the Metric & lower objects, then converts the output into OM TimeSeries. Returns a List of TimeSeries objects based on one Metric """ - # Create the metric name, will be a label later if metric.unit: - #Prom. naming guidelines add unit to the name - name =f"{metric.name}_{metric.unit}" + # Prom. naming guidelines add unit to the name + name = f"{metric.name}_{metric.unit}" else: name = metric.name # datapoints have attributes associated with them. these would be sent # to RW as different metrics: name & labels is a unique time series sample_sets = defaultdict(list) - if isinstance(metric.data,(Gauge,Sum)): + if isinstance(metric.data, (Gauge, Sum)): for dp in metric.data.data_points: - attrs,sample = self._parse_data_point(dp,name) + attrs, sample = self._parse_data_point(dp, name) sample_sets[attrs].append(sample) - elif isinstance(metric.data,Histogram): + elif isinstance(metric.data, Histogram): for dp in metric.data.data_points: - dp_result = self._parse_histogram_data_point(dp,name) - for attrs,sample in dp_result: + dp_result = self._parse_histogram_data_point(dp, name) + for attrs, sample in dp_result: sample_sets[attrs].append(sample) else: - logger.warn("Unsupported Metric Type: %s",type(metric.data)) + logger.warn("Unsupported Metric Type: %s", type(metric.data)) return [] timeseries = [] for labels, samples in sample_sets.items(): ts = TimeSeries() - for label_name,label_value in chain(resource_labels,labels): + for label_name, label_value in chain(resource_labels, labels): # Previous implementation did not str() the names... - ts.labels.append(self._label(label_name,str(label_value))) - for value,timestamp in samples: - ts.samples.append(self._sample(value,timestamp)) + ts.labels.append(self._label(label_name, str(label_value))) + for value, timestamp in samples: + ts.samples.append(self._sample(value, timestamp)) timeseries.append(ts) return timeseries - def _sample(self,value: int,timestamp :int) -> Sample: + def _sample(self, value: int, timestamp: int) -> Sample: sample = Sample() sample.value = value sample.timestamp = timestamp return sample - def _label(self,name:str,value:str) -> Label: + def _label(self, name: str, value: str) -> Label: label = Label() - label.name = PROMETHEUS_LABEL_REGEX.sub("_",name) + label.name = PROMETHEUS_LABEL_REGEX.sub("_", name) label.value = value return label - def _sanitize_name(self,name): + def _sanitize_name(self, name): # I Think Prometheus requires names to NOT start with a number this # would not catch that, but do cover the other cases. The naming rules # don't explicit say this, but the supplied regex implies it. # Got a little weird trying to do substitution with it, but can be # fixed if we allow numeric beginnings to metric names - return PROMETHEUS_NAME_REGEX.sub("_",name) + return PROMETHEUS_NAME_REGEX.sub("_", name) def _parse_histogram_data_point(self, data_point, name): - #if (len(data_point.explicit_bounds)+1) != len(data_point.bucket_counts): + # if (len(data_point.explicit_bounds)+1) != len(data_point.bucket_counts): # raise ValueError("Number of buckets must be 1 more than the explicit bounds!") sample_attr_pairs = [] - base_attrs = [(n,v) for n,v in data_point.attributes.items()] + base_attrs = [(n, v) for n, v in data_point.attributes.items()] timestamp = data_point.time_unix_nano // 1_000_000 - - def handle_bucket(value,bound=None,name_override=None): + def handle_bucket(value, bound=None, name_override=None): # Metric Level attributes + the bucket boundry attribute + name ts_attrs = base_attrs.copy() - ts_attrs.append(("__name__",self._sanitize_name(name_override or name))) + ts_attrs.append( + ("__name__", self._sanitize_name(name_override or name)) + ) if bound: - ts_attrs.append(("le",str(bound))) + ts_attrs.append(("le", str(bound))) # Value is count of values in each bucket - ts_sample = (value,timestamp) + ts_sample = (value, timestamp) return tuple(ts_attrs), ts_sample - for bound_pos,bound in enumerate(data_point.explicit_bounds): + for bound_pos, bound in enumerate(data_point.explicit_bounds): sample_attr_pairs.append( - handle_bucket(data_point.bucket_counts[bound_pos],bound) + handle_bucket(data_point.bucket_counts[bound_pos], bound) ) # Add the last label for implicit +inf bucket sample_attr_pairs.append( - handle_bucket(data_point.bucket_counts[-1],bound="+Inf") + handle_bucket(data_point.bucket_counts[-1], bound="+Inf") ) - #Lastly, add series for count & sum + # Lastly, add series for count & sum sample_attr_pairs.append( - handle_bucket(data_point.sum,name_override=f"{name}_sum") + handle_bucket(data_point.sum, name_override=f"{name}_sum") ) sample_attr_pairs.append( - handle_bucket(data_point.count,name_override=f"{name}_count") + handle_bucket(data_point.count, name_override=f"{name}_count") ) return sample_attr_pairs - def _parse_data_point(self, data_point,name=None): + def _parse_data_point(self, data_point, name=None): - attrs = tuple(data_point.attributes.items()) + (("__name__",self._sanitize_name(name)),) - sample = (data_point.value,(data_point.time_unix_nano // 1_000_000)) - return attrs,sample + attrs = tuple(data_point.attributes.items()) + ( + ("__name__", self._sanitize_name(name)), + ) + sample = (data_point.value, (data_point.time_unix_nano // 1_000_000)) + return attrs, sample # pylint: disable=no-member,no-self-use def _build_message(self, timeseries: Sequence[TimeSeries]) -> bytes: @@ -383,4 +391,3 @@ def force_flush(self, timeout_millis: float = 10_000) -> bool: def shutdown(self) -> None: pass - diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py index 35e48b214a..d5cce2a857 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py @@ -6,96 +6,257 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 +from google.protobuf import ( + descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2, +) -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\x12\tgogoproto\x1a google/protobuf/descriptor.proto:;\n\x13goproto_enum_prefix\x12\x1c.google.protobuf.EnumOptions\x18\xb1\xe4\x03 \x01(\x08:=\n\x15goproto_enum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc5\xe4\x03 \x01(\x08:5\n\renum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc6\xe4\x03 \x01(\x08:7\n\x0f\x65num_customname\x12\x1c.google.protobuf.EnumOptions\x18\xc7\xe4\x03 \x01(\t:0\n\x08\x65numdecl\x12\x1c.google.protobuf.EnumOptions\x18\xc8\xe4\x03 \x01(\x08:A\n\x14\x65numvalue_customname\x12!.google.protobuf.EnumValueOptions\x18\xd1\x83\x04 \x01(\t:;\n\x13goproto_getters_all\x12\x1c.google.protobuf.FileOptions\x18\x99\xec\x03 \x01(\x08:?\n\x17goproto_enum_prefix_all\x12\x1c.google.protobuf.FileOptions\x18\x9a\xec\x03 \x01(\x08:<\n\x14goproto_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\x9b\xec\x03 \x01(\x08:9\n\x11verbose_equal_all\x12\x1c.google.protobuf.FileOptions\x18\x9c\xec\x03 \x01(\x08:0\n\x08\x66\x61\x63\x65_all\x12\x1c.google.protobuf.FileOptions\x18\x9d\xec\x03 \x01(\x08:4\n\x0cgostring_all\x12\x1c.google.protobuf.FileOptions\x18\x9e\xec\x03 \x01(\x08:4\n\x0cpopulate_all\x12\x1c.google.protobuf.FileOptions\x18\x9f\xec\x03 \x01(\x08:4\n\x0cstringer_all\x12\x1c.google.protobuf.FileOptions\x18\xa0\xec\x03 \x01(\x08:3\n\x0bonlyone_all\x12\x1c.google.protobuf.FileOptions\x18\xa1\xec\x03 \x01(\x08:1\n\tequal_all\x12\x1c.google.protobuf.FileOptions\x18\xa5\xec\x03 \x01(\x08:7\n\x0f\x64\x65scription_all\x12\x1c.google.protobuf.FileOptions\x18\xa6\xec\x03 \x01(\x08:3\n\x0btestgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa7\xec\x03 \x01(\x08:4\n\x0c\x62\x65nchgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa8\xec\x03 \x01(\x08:5\n\rmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xa9\xec\x03 \x01(\x08:7\n\x0funmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaa\xec\x03 \x01(\x08:<\n\x14stable_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xab\xec\x03 \x01(\x08:1\n\tsizer_all\x12\x1c.google.protobuf.FileOptions\x18\xac\xec\x03 \x01(\x08:A\n\x19goproto_enum_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xad\xec\x03 \x01(\x08:9\n\x11\x65num_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xae\xec\x03 \x01(\x08:<\n\x14unsafe_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaf\xec\x03 \x01(\x08:>\n\x16unsafe_unmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xb0\xec\x03 \x01(\x08:B\n\x1agoproto_extensions_map_all\x12\x1c.google.protobuf.FileOptions\x18\xb1\xec\x03 \x01(\x08:@\n\x18goproto_unrecognized_all\x12\x1c.google.protobuf.FileOptions\x18\xb2\xec\x03 \x01(\x08:8\n\x10gogoproto_import\x12\x1c.google.protobuf.FileOptions\x18\xb3\xec\x03 \x01(\x08:6\n\x0eprotosizer_all\x12\x1c.google.protobuf.FileOptions\x18\xb4\xec\x03 \x01(\x08:3\n\x0b\x63ompare_all\x12\x1c.google.protobuf.FileOptions\x18\xb5\xec\x03 \x01(\x08:4\n\x0ctypedecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb6\xec\x03 \x01(\x08:4\n\x0c\x65numdecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb7\xec\x03 \x01(\x08:<\n\x14goproto_registration\x12\x1c.google.protobuf.FileOptions\x18\xb8\xec\x03 \x01(\x08:7\n\x0fmessagename_all\x12\x1c.google.protobuf.FileOptions\x18\xb9\xec\x03 \x01(\x08:=\n\x15goproto_sizecache_all\x12\x1c.google.protobuf.FileOptions\x18\xba\xec\x03 \x01(\x08:;\n\x13goproto_unkeyed_all\x12\x1c.google.protobuf.FileOptions\x18\xbb\xec\x03 \x01(\x08::\n\x0fgoproto_getters\x12\x1f.google.protobuf.MessageOptions\x18\x81\xf4\x03 \x01(\x08:;\n\x10goproto_stringer\x12\x1f.google.protobuf.MessageOptions\x18\x83\xf4\x03 \x01(\x08:8\n\rverbose_equal\x12\x1f.google.protobuf.MessageOptions\x18\x84\xf4\x03 \x01(\x08:/\n\x04\x66\x61\x63\x65\x12\x1f.google.protobuf.MessageOptions\x18\x85\xf4\x03 \x01(\x08:3\n\x08gostring\x12\x1f.google.protobuf.MessageOptions\x18\x86\xf4\x03 \x01(\x08:3\n\x08populate\x12\x1f.google.protobuf.MessageOptions\x18\x87\xf4\x03 \x01(\x08:3\n\x08stringer\x12\x1f.google.protobuf.MessageOptions\x18\xc0\x8b\x04 \x01(\x08:2\n\x07onlyone\x12\x1f.google.protobuf.MessageOptions\x18\x89\xf4\x03 \x01(\x08:0\n\x05\x65qual\x12\x1f.google.protobuf.MessageOptions\x18\x8d\xf4\x03 \x01(\x08:6\n\x0b\x64\x65scription\x12\x1f.google.protobuf.MessageOptions\x18\x8e\xf4\x03 \x01(\x08:2\n\x07testgen\x12\x1f.google.protobuf.MessageOptions\x18\x8f\xf4\x03 \x01(\x08:3\n\x08\x62\x65nchgen\x12\x1f.google.protobuf.MessageOptions\x18\x90\xf4\x03 \x01(\x08:4\n\tmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x91\xf4\x03 \x01(\x08:6\n\x0bunmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x92\xf4\x03 \x01(\x08:;\n\x10stable_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x93\xf4\x03 \x01(\x08:0\n\x05sizer\x12\x1f.google.protobuf.MessageOptions\x18\x94\xf4\x03 \x01(\x08:;\n\x10unsafe_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x97\xf4\x03 \x01(\x08:=\n\x12unsafe_unmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x98\xf4\x03 \x01(\x08:A\n\x16goproto_extensions_map\x12\x1f.google.protobuf.MessageOptions\x18\x99\xf4\x03 \x01(\x08:?\n\x14goproto_unrecognized\x12\x1f.google.protobuf.MessageOptions\x18\x9a\xf4\x03 \x01(\x08:5\n\nprotosizer\x12\x1f.google.protobuf.MessageOptions\x18\x9c\xf4\x03 \x01(\x08:2\n\x07\x63ompare\x12\x1f.google.protobuf.MessageOptions\x18\x9d\xf4\x03 \x01(\x08:3\n\x08typedecl\x12\x1f.google.protobuf.MessageOptions\x18\x9e\xf4\x03 \x01(\x08:6\n\x0bmessagename\x12\x1f.google.protobuf.MessageOptions\x18\xa1\xf4\x03 \x01(\x08:<\n\x11goproto_sizecache\x12\x1f.google.protobuf.MessageOptions\x18\xa2\xf4\x03 \x01(\x08::\n\x0fgoproto_unkeyed\x12\x1f.google.protobuf.MessageOptions\x18\xa3\xf4\x03 \x01(\x08:1\n\x08nullable\x12\x1d.google.protobuf.FieldOptions\x18\xe9\xfb\x03 \x01(\x08:.\n\x05\x65mbed\x12\x1d.google.protobuf.FieldOptions\x18\xea\xfb\x03 \x01(\x08:3\n\ncustomtype\x12\x1d.google.protobuf.FieldOptions\x18\xeb\xfb\x03 \x01(\t:3\n\ncustomname\x12\x1d.google.protobuf.FieldOptions\x18\xec\xfb\x03 \x01(\t:0\n\x07jsontag\x12\x1d.google.protobuf.FieldOptions\x18\xed\xfb\x03 \x01(\t:1\n\x08moretags\x12\x1d.google.protobuf.FieldOptions\x18\xee\xfb\x03 \x01(\t:1\n\x08\x63\x61sttype\x12\x1d.google.protobuf.FieldOptions\x18\xef\xfb\x03 \x01(\t:0\n\x07\x63\x61stkey\x12\x1d.google.protobuf.FieldOptions\x18\xf0\xfb\x03 \x01(\t:2\n\tcastvalue\x12\x1d.google.protobuf.FieldOptions\x18\xf1\xfb\x03 \x01(\t:0\n\x07stdtime\x12\x1d.google.protobuf.FieldOptions\x18\xf2\xfb\x03 \x01(\x08:4\n\x0bstdduration\x12\x1d.google.protobuf.FieldOptions\x18\xf3\xfb\x03 \x01(\x08:3\n\nwktpointer\x12\x1d.google.protobuf.FieldOptions\x18\xf4\xfb\x03 \x01(\x08\x42\x45\n\x13\x63om.google.protobufB\nGoGoProtosZ\"github.com/gogo/protobuf/gogoproto') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\nGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\x12\tgogoproto\x1a google/protobuf/descriptor.proto:;\n\x13goproto_enum_prefix\x12\x1c.google.protobuf.EnumOptions\x18\xb1\xe4\x03 \x01(\x08:=\n\x15goproto_enum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc5\xe4\x03 \x01(\x08:5\n\renum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc6\xe4\x03 \x01(\x08:7\n\x0f\x65num_customname\x12\x1c.google.protobuf.EnumOptions\x18\xc7\xe4\x03 \x01(\t:0\n\x08\x65numdecl\x12\x1c.google.protobuf.EnumOptions\x18\xc8\xe4\x03 \x01(\x08:A\n\x14\x65numvalue_customname\x12!.google.protobuf.EnumValueOptions\x18\xd1\x83\x04 \x01(\t:;\n\x13goproto_getters_all\x12\x1c.google.protobuf.FileOptions\x18\x99\xec\x03 \x01(\x08:?\n\x17goproto_enum_prefix_all\x12\x1c.google.protobuf.FileOptions\x18\x9a\xec\x03 \x01(\x08:<\n\x14goproto_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\x9b\xec\x03 \x01(\x08:9\n\x11verbose_equal_all\x12\x1c.google.protobuf.FileOptions\x18\x9c\xec\x03 \x01(\x08:0\n\x08\x66\x61\x63\x65_all\x12\x1c.google.protobuf.FileOptions\x18\x9d\xec\x03 \x01(\x08:4\n\x0cgostring_all\x12\x1c.google.protobuf.FileOptions\x18\x9e\xec\x03 \x01(\x08:4\n\x0cpopulate_all\x12\x1c.google.protobuf.FileOptions\x18\x9f\xec\x03 \x01(\x08:4\n\x0cstringer_all\x12\x1c.google.protobuf.FileOptions\x18\xa0\xec\x03 \x01(\x08:3\n\x0bonlyone_all\x12\x1c.google.protobuf.FileOptions\x18\xa1\xec\x03 \x01(\x08:1\n\tequal_all\x12\x1c.google.protobuf.FileOptions\x18\xa5\xec\x03 \x01(\x08:7\n\x0f\x64\x65scription_all\x12\x1c.google.protobuf.FileOptions\x18\xa6\xec\x03 \x01(\x08:3\n\x0btestgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa7\xec\x03 \x01(\x08:4\n\x0c\x62\x65nchgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa8\xec\x03 \x01(\x08:5\n\rmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xa9\xec\x03 \x01(\x08:7\n\x0funmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaa\xec\x03 \x01(\x08:<\n\x14stable_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xab\xec\x03 \x01(\x08:1\n\tsizer_all\x12\x1c.google.protobuf.FileOptions\x18\xac\xec\x03 \x01(\x08:A\n\x19goproto_enum_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xad\xec\x03 \x01(\x08:9\n\x11\x65num_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xae\xec\x03 \x01(\x08:<\n\x14unsafe_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaf\xec\x03 \x01(\x08:>\n\x16unsafe_unmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xb0\xec\x03 \x01(\x08:B\n\x1agoproto_extensions_map_all\x12\x1c.google.protobuf.FileOptions\x18\xb1\xec\x03 \x01(\x08:@\n\x18goproto_unrecognized_all\x12\x1c.google.protobuf.FileOptions\x18\xb2\xec\x03 \x01(\x08:8\n\x10gogoproto_import\x12\x1c.google.protobuf.FileOptions\x18\xb3\xec\x03 \x01(\x08:6\n\x0eprotosizer_all\x12\x1c.google.protobuf.FileOptions\x18\xb4\xec\x03 \x01(\x08:3\n\x0b\x63ompare_all\x12\x1c.google.protobuf.FileOptions\x18\xb5\xec\x03 \x01(\x08:4\n\x0ctypedecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb6\xec\x03 \x01(\x08:4\n\x0c\x65numdecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb7\xec\x03 \x01(\x08:<\n\x14goproto_registration\x12\x1c.google.protobuf.FileOptions\x18\xb8\xec\x03 \x01(\x08:7\n\x0fmessagename_all\x12\x1c.google.protobuf.FileOptions\x18\xb9\xec\x03 \x01(\x08:=\n\x15goproto_sizecache_all\x12\x1c.google.protobuf.FileOptions\x18\xba\xec\x03 \x01(\x08:;\n\x13goproto_unkeyed_all\x12\x1c.google.protobuf.FileOptions\x18\xbb\xec\x03 \x01(\x08::\n\x0fgoproto_getters\x12\x1f.google.protobuf.MessageOptions\x18\x81\xf4\x03 \x01(\x08:;\n\x10goproto_stringer\x12\x1f.google.protobuf.MessageOptions\x18\x83\xf4\x03 \x01(\x08:8\n\rverbose_equal\x12\x1f.google.protobuf.MessageOptions\x18\x84\xf4\x03 \x01(\x08:/\n\x04\x66\x61\x63\x65\x12\x1f.google.protobuf.MessageOptions\x18\x85\xf4\x03 \x01(\x08:3\n\x08gostring\x12\x1f.google.protobuf.MessageOptions\x18\x86\xf4\x03 \x01(\x08:3\n\x08populate\x12\x1f.google.protobuf.MessageOptions\x18\x87\xf4\x03 \x01(\x08:3\n\x08stringer\x12\x1f.google.protobuf.MessageOptions\x18\xc0\x8b\x04 \x01(\x08:2\n\x07onlyone\x12\x1f.google.protobuf.MessageOptions\x18\x89\xf4\x03 \x01(\x08:0\n\x05\x65qual\x12\x1f.google.protobuf.MessageOptions\x18\x8d\xf4\x03 \x01(\x08:6\n\x0b\x64\x65scription\x12\x1f.google.protobuf.MessageOptions\x18\x8e\xf4\x03 \x01(\x08:2\n\x07testgen\x12\x1f.google.protobuf.MessageOptions\x18\x8f\xf4\x03 \x01(\x08:3\n\x08\x62\x65nchgen\x12\x1f.google.protobuf.MessageOptions\x18\x90\xf4\x03 \x01(\x08:4\n\tmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x91\xf4\x03 \x01(\x08:6\n\x0bunmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x92\xf4\x03 \x01(\x08:;\n\x10stable_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x93\xf4\x03 \x01(\x08:0\n\x05sizer\x12\x1f.google.protobuf.MessageOptions\x18\x94\xf4\x03 \x01(\x08:;\n\x10unsafe_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x97\xf4\x03 \x01(\x08:=\n\x12unsafe_unmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x98\xf4\x03 \x01(\x08:A\n\x16goproto_extensions_map\x12\x1f.google.protobuf.MessageOptions\x18\x99\xf4\x03 \x01(\x08:?\n\x14goproto_unrecognized\x12\x1f.google.protobuf.MessageOptions\x18\x9a\xf4\x03 \x01(\x08:5\n\nprotosizer\x12\x1f.google.protobuf.MessageOptions\x18\x9c\xf4\x03 \x01(\x08:2\n\x07\x63ompare\x12\x1f.google.protobuf.MessageOptions\x18\x9d\xf4\x03 \x01(\x08:3\n\x08typedecl\x12\x1f.google.protobuf.MessageOptions\x18\x9e\xf4\x03 \x01(\x08:6\n\x0bmessagename\x12\x1f.google.protobuf.MessageOptions\x18\xa1\xf4\x03 \x01(\x08:<\n\x11goproto_sizecache\x12\x1f.google.protobuf.MessageOptions\x18\xa2\xf4\x03 \x01(\x08::\n\x0fgoproto_unkeyed\x12\x1f.google.protobuf.MessageOptions\x18\xa3\xf4\x03 \x01(\x08:1\n\x08nullable\x12\x1d.google.protobuf.FieldOptions\x18\xe9\xfb\x03 \x01(\x08:.\n\x05\x65mbed\x12\x1d.google.protobuf.FieldOptions\x18\xea\xfb\x03 \x01(\x08:3\n\ncustomtype\x12\x1d.google.protobuf.FieldOptions\x18\xeb\xfb\x03 \x01(\t:3\n\ncustomname\x12\x1d.google.protobuf.FieldOptions\x18\xec\xfb\x03 \x01(\t:0\n\x07jsontag\x12\x1d.google.protobuf.FieldOptions\x18\xed\xfb\x03 \x01(\t:1\n\x08moretags\x12\x1d.google.protobuf.FieldOptions\x18\xee\xfb\x03 \x01(\t:1\n\x08\x63\x61sttype\x12\x1d.google.protobuf.FieldOptions\x18\xef\xfb\x03 \x01(\t:0\n\x07\x63\x61stkey\x12\x1d.google.protobuf.FieldOptions\x18\xf0\xfb\x03 \x01(\t:2\n\tcastvalue\x12\x1d.google.protobuf.FieldOptions\x18\xf1\xfb\x03 \x01(\t:0\n\x07stdtime\x12\x1d.google.protobuf.FieldOptions\x18\xf2\xfb\x03 \x01(\x08:4\n\x0bstdduration\x12\x1d.google.protobuf.FieldOptions\x18\xf3\xfb\x03 \x01(\x08:3\n\nwktpointer\x12\x1d.google.protobuf.FieldOptions\x18\xf4\xfb\x03 \x01(\x08\x42\x45\n\x13\x63om.google.protobufB\nGoGoProtosZ"github.com/gogo/protobuf/gogoproto' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.exporter.prometheus_remote_write.gen.gogoproto.gogo_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "opentelemetry.exporter.prometheus_remote_write.gen.gogoproto.gogo_pb2", + globals(), +) if _descriptor._USE_C_DESCRIPTORS == False: - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(goproto_enum_prefix) - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(goproto_enum_stringer) - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enum_stringer) - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enum_customname) - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enumdecl) - google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension(enumvalue_customname) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_getters_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_enum_prefix_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_stringer_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(verbose_equal_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(face_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(gostring_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(populate_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(stringer_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(onlyone_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(equal_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(description_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(testgen_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(benchgen_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(marshaler_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unmarshaler_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(stable_marshaler_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(sizer_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_enum_stringer_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(enum_stringer_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unsafe_marshaler_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unsafe_unmarshaler_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_extensions_map_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_unrecognized_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(gogoproto_import) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(protosizer_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(compare_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(typedecl_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(enumdecl_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_registration) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(messagename_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_sizecache_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_unkeyed_all) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_getters) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_stringer) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(verbose_equal) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(face) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(gostring) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(populate) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(stringer) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(onlyone) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(equal) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(description) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(testgen) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(benchgen) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(marshaler) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unmarshaler) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(stable_marshaler) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sizer) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unsafe_marshaler) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unsafe_unmarshaler) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_extensions_map) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_unrecognized) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(protosizer) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(compare) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(typedecl) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(messagename) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_sizecache) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_unkeyed) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(nullable) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(embed) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(customtype) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(customname) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(jsontag) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(moretags) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(casttype) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(castkey) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(castvalue) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(stdtime) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(stdduration) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(wktpointer) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( + goproto_enum_prefix + ) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( + goproto_enum_stringer + ) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( + enum_stringer + ) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( + enum_customname + ) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( + enumdecl + ) + google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension( + enumvalue_customname + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_getters_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_enum_prefix_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_stringer_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + verbose_equal_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + face_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + gostring_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + populate_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + stringer_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + onlyone_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + equal_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + description_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + testgen_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + benchgen_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + marshaler_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + unmarshaler_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + stable_marshaler_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + sizer_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_enum_stringer_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + enum_stringer_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + unsafe_marshaler_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + unsafe_unmarshaler_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_extensions_map_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_unrecognized_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + gogoproto_import + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + protosizer_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + compare_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + typedecl_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + enumdecl_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_registration + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + messagename_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_sizecache_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_unkeyed_all + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + goproto_getters + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + goproto_stringer + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + verbose_equal + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + face + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + gostring + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + populate + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + stringer + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + onlyone + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + equal + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + description + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + testgen + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + benchgen + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + marshaler + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + unmarshaler + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + stable_marshaler + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + sizer + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + unsafe_marshaler + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + unsafe_unmarshaler + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + goproto_extensions_map + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + goproto_unrecognized + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + protosizer + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + compare + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + typedecl + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + messagename + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + goproto_sizecache + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + goproto_unkeyed + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + nullable + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + embed + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + customtype + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + customname + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + jsontag + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + moretags + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + casttype + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + castkey + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + castvalue + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + stdtime + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + stdduration + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + wktpointer + ) - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nGoGoProtosZ\"github.com/gogo/protobuf/gogoproto' + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nGoGoProtosZ"github.com/gogo/protobuf/gogoproto' # @@protoc_insertion_point(module_scope) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py index a274dbf204..09d13a7a09 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py @@ -6,39 +6,54 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.exporter.prometheus_remote_write.gen import types_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_types__pb2 -from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2 +from opentelemetry.exporter.prometheus_remote_write.gen import ( + types_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_types__pb2, +) +from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import ( + gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2, +) -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n?opentelemetry/exporter/prometheus_remote_write/gen/remote.proto\x12\nprometheus\x1a>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\"z\n\x0cWriteRequest\x12\x30\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeriesB\x04\xc8\xde\x1f\x00\x12\x32\n\x08metadata\x18\x03 \x03(\x0b\x32\x1a.prometheus.MetricMetadataB\x04\xc8\xde\x1f\x00J\x04\x08\x02\x10\x03\"\xae\x01\n\x0bReadRequest\x12\"\n\x07queries\x18\x01 \x03(\x0b\x32\x11.prometheus.Query\x12\x45\n\x17\x61\x63\x63\x65pted_response_types\x18\x02 \x03(\x0e\x32$.prometheus.ReadRequest.ResponseType\"4\n\x0cResponseType\x12\x0b\n\x07SAMPLES\x10\x00\x12\x17\n\x13STREAMED_XOR_CHUNKS\x10\x01\"8\n\x0cReadResponse\x12(\n\x07results\x18\x01 \x03(\x0b\x32\x17.prometheus.QueryResult\"\x8f\x01\n\x05Query\x12\x1a\n\x12start_timestamp_ms\x18\x01 \x01(\x03\x12\x18\n\x10\x65nd_timestamp_ms\x18\x02 \x01(\x03\x12*\n\x08matchers\x18\x03 \x03(\x0b\x32\x18.prometheus.LabelMatcher\x12$\n\x05hints\x18\x04 \x01(\x0b\x32\x15.prometheus.ReadHints\"9\n\x0bQueryResult\x12*\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeries\"]\n\x13\x43hunkedReadResponse\x12\x31\n\x0e\x63hunked_series\x18\x01 \x03(\x0b\x32\x19.prometheus.ChunkedSeries\x12\x13\n\x0bquery_index\x18\x02 \x01(\x03\x42\x08Z\x06prompbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n?opentelemetry/exporter/prometheus_remote_write/gen/remote.proto\x12\nprometheus\x1a>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto"z\n\x0cWriteRequest\x12\x30\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeriesB\x04\xc8\xde\x1f\x00\x12\x32\n\x08metadata\x18\x03 \x03(\x0b\x32\x1a.prometheus.MetricMetadataB\x04\xc8\xde\x1f\x00J\x04\x08\x02\x10\x03"\xae\x01\n\x0bReadRequest\x12"\n\x07queries\x18\x01 \x03(\x0b\x32\x11.prometheus.Query\x12\x45\n\x17\x61\x63\x63\x65pted_response_types\x18\x02 \x03(\x0e\x32$.prometheus.ReadRequest.ResponseType"4\n\x0cResponseType\x12\x0b\n\x07SAMPLES\x10\x00\x12\x17\n\x13STREAMED_XOR_CHUNKS\x10\x01"8\n\x0cReadResponse\x12(\n\x07results\x18\x01 \x03(\x0b\x32\x17.prometheus.QueryResult"\x8f\x01\n\x05Query\x12\x1a\n\x12start_timestamp_ms\x18\x01 \x01(\x03\x12\x18\n\x10\x65nd_timestamp_ms\x18\x02 \x01(\x03\x12*\n\x08matchers\x18\x03 \x03(\x0b\x32\x18.prometheus.LabelMatcher\x12$\n\x05hints\x18\x04 \x01(\x0b\x32\x15.prometheus.ReadHints"9\n\x0bQueryResult\x12*\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeries"]\n\x13\x43hunkedReadResponse\x12\x31\n\x0e\x63hunked_series\x18\x01 \x03(\x0b\x32\x19.prometheus.ChunkedSeries\x12\x13\n\x0bquery_index\x18\x02 \x01(\x03\x42\x08Z\x06prompbb\x06proto3' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.exporter.prometheus_remote_write.gen.remote_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "opentelemetry.exporter.prometheus_remote_write.gen.remote_pb2", + globals(), +) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'Z\006prompb' - _WRITEREQUEST.fields_by_name['timeseries']._options = None - _WRITEREQUEST.fields_by_name['timeseries']._serialized_options = b'\310\336\037\000' - _WRITEREQUEST.fields_by_name['metadata']._options = None - _WRITEREQUEST.fields_by_name['metadata']._serialized_options = b'\310\336\037\000' - _WRITEREQUEST._serialized_start=216 - _WRITEREQUEST._serialized_end=338 - _READREQUEST._serialized_start=341 - _READREQUEST._serialized_end=515 - _READREQUEST_RESPONSETYPE._serialized_start=463 - _READREQUEST_RESPONSETYPE._serialized_end=515 - _READRESPONSE._serialized_start=517 - _READRESPONSE._serialized_end=573 - _QUERY._serialized_start=576 - _QUERY._serialized_end=719 - _QUERYRESULT._serialized_start=721 - _QUERYRESULT._serialized_end=778 - _CHUNKEDREADRESPONSE._serialized_start=780 - _CHUNKEDREADRESPONSE._serialized_end=873 + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"Z\006prompb" + _WRITEREQUEST.fields_by_name["timeseries"]._options = None + _WRITEREQUEST.fields_by_name[ + "timeseries" + ]._serialized_options = b"\310\336\037\000" + _WRITEREQUEST.fields_by_name["metadata"]._options = None + _WRITEREQUEST.fields_by_name[ + "metadata" + ]._serialized_options = b"\310\336\037\000" + _WRITEREQUEST._serialized_start = 216 + _WRITEREQUEST._serialized_end = 338 + _READREQUEST._serialized_start = 341 + _READREQUEST._serialized_end = 515 + _READREQUEST_RESPONSETYPE._serialized_start = 463 + _READREQUEST_RESPONSETYPE._serialized_end = 515 + _READRESPONSE._serialized_start = 517 + _READRESPONSE._serialized_end = 573 + _QUERY._serialized_start = 576 + _QUERY._serialized_end = 719 + _QUERYRESULT._serialized_start = 721 + _QUERYRESULT._serialized_end = 778 + _CHUNKEDREADRESPONSE._serialized_start = 780 + _CHUNKEDREADRESPONSE._serialized_end = 873 # @@protoc_insertion_point(module_scope) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py index d519e03423..a58e0194ee 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py @@ -6,60 +6,81 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2 +from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import ( + gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2, +) -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x12\nprometheus\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\"\xf8\x01\n\x0eMetricMetadata\x12\x33\n\x04type\x18\x01 \x01(\x0e\x32%.prometheus.MetricMetadata.MetricType\x12\x1a\n\x12metric_family_name\x18\x02 \x01(\t\x12\x0c\n\x04help\x18\x04 \x01(\t\x12\x0c\n\x04unit\x18\x05 \x01(\t\"y\n\nMetricType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\t\n\x05GAUGE\x10\x02\x12\r\n\tHISTOGRAM\x10\x03\x12\x12\n\x0eGAUGEHISTOGRAM\x10\x04\x12\x0b\n\x07SUMMARY\x10\x05\x12\x08\n\x04INFO\x10\x06\x12\x0c\n\x08STATESET\x10\x07\"*\n\x06Sample\x12\r\n\x05value\x18\x01 \x01(\x01\x12\x11\n\ttimestamp\x18\x02 \x01(\x03\"U\n\x08\x45xemplar\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\r\n\x05value\x18\x02 \x01(\x01\x12\x11\n\ttimestamp\x18\x03 \x01(\x03\"\x8f\x01\n\nTimeSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12)\n\x07samples\x18\x02 \x03(\x0b\x32\x12.prometheus.SampleB\x04\xc8\xde\x1f\x00\x12-\n\texemplars\x18\x03 \x03(\x0b\x32\x14.prometheus.ExemplarB\x04\xc8\xde\x1f\x00\"$\n\x05Label\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"1\n\x06Labels\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\"\x82\x01\n\x0cLabelMatcher\x12+\n\x04type\x18\x01 \x01(\x0e\x32\x1d.prometheus.LabelMatcher.Type\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\"(\n\x04Type\x12\x06\n\x02\x45Q\x10\x00\x12\x07\n\x03NEQ\x10\x01\x12\x06\n\x02RE\x10\x02\x12\x07\n\x03NRE\x10\x03\"|\n\tReadHints\x12\x0f\n\x07step_ms\x18\x01 \x01(\x03\x12\x0c\n\x04\x66unc\x18\x02 \x01(\t\x12\x10\n\x08start_ms\x18\x03 \x01(\x03\x12\x0e\n\x06\x65nd_ms\x18\x04 \x01(\x03\x12\x10\n\x08grouping\x18\x05 \x03(\t\x12\n\n\x02\x62y\x18\x06 \x01(\x08\x12\x10\n\x08range_ms\x18\x07 \x01(\x03\"\x8b\x01\n\x05\x43hunk\x12\x13\n\x0bmin_time_ms\x18\x01 \x01(\x03\x12\x13\n\x0bmax_time_ms\x18\x02 \x01(\x03\x12(\n\x04type\x18\x03 \x01(\x0e\x32\x1a.prometheus.Chunk.Encoding\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\" \n\x08\x45ncoding\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03XOR\x10\x01\"a\n\rChunkedSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\'\n\x06\x63hunks\x18\x02 \x03(\x0b\x32\x11.prometheus.ChunkB\x04\xc8\xde\x1f\x00\x42\x08Z\x06prompbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x12\nprometheus\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto"\xf8\x01\n\x0eMetricMetadata\x12\x33\n\x04type\x18\x01 \x01(\x0e\x32%.prometheus.MetricMetadata.MetricType\x12\x1a\n\x12metric_family_name\x18\x02 \x01(\t\x12\x0c\n\x04help\x18\x04 \x01(\t\x12\x0c\n\x04unit\x18\x05 \x01(\t"y\n\nMetricType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\t\n\x05GAUGE\x10\x02\x12\r\n\tHISTOGRAM\x10\x03\x12\x12\n\x0eGAUGEHISTOGRAM\x10\x04\x12\x0b\n\x07SUMMARY\x10\x05\x12\x08\n\x04INFO\x10\x06\x12\x0c\n\x08STATESET\x10\x07"*\n\x06Sample\x12\r\n\x05value\x18\x01 \x01(\x01\x12\x11\n\ttimestamp\x18\x02 \x01(\x03"U\n\x08\x45xemplar\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\r\n\x05value\x18\x02 \x01(\x01\x12\x11\n\ttimestamp\x18\x03 \x01(\x03"\x8f\x01\n\nTimeSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12)\n\x07samples\x18\x02 \x03(\x0b\x32\x12.prometheus.SampleB\x04\xc8\xde\x1f\x00\x12-\n\texemplars\x18\x03 \x03(\x0b\x32\x14.prometheus.ExemplarB\x04\xc8\xde\x1f\x00"$\n\x05Label\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t"1\n\x06Labels\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00"\x82\x01\n\x0cLabelMatcher\x12+\n\x04type\x18\x01 \x01(\x0e\x32\x1d.prometheus.LabelMatcher.Type\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t"(\n\x04Type\x12\x06\n\x02\x45Q\x10\x00\x12\x07\n\x03NEQ\x10\x01\x12\x06\n\x02RE\x10\x02\x12\x07\n\x03NRE\x10\x03"|\n\tReadHints\x12\x0f\n\x07step_ms\x18\x01 \x01(\x03\x12\x0c\n\x04\x66unc\x18\x02 \x01(\t\x12\x10\n\x08start_ms\x18\x03 \x01(\x03\x12\x0e\n\x06\x65nd_ms\x18\x04 \x01(\x03\x12\x10\n\x08grouping\x18\x05 \x03(\t\x12\n\n\x02\x62y\x18\x06 \x01(\x08\x12\x10\n\x08range_ms\x18\x07 \x01(\x03"\x8b\x01\n\x05\x43hunk\x12\x13\n\x0bmin_time_ms\x18\x01 \x01(\x03\x12\x13\n\x0bmax_time_ms\x18\x02 \x01(\x03\x12(\n\x04type\x18\x03 \x01(\x0e\x32\x1a.prometheus.Chunk.Encoding\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c" \n\x08\x45ncoding\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03XOR\x10\x01"a\n\rChunkedSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\'\n\x06\x63hunks\x18\x02 \x03(\x0b\x32\x11.prometheus.ChunkB\x04\xc8\xde\x1f\x00\x42\x08Z\x06prompbb\x06proto3' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.exporter.prometheus_remote_write.gen.types_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "opentelemetry.exporter.prometheus_remote_write.gen.types_pb2", + globals(), +) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'Z\006prompb' - _EXEMPLAR.fields_by_name['labels']._options = None - _EXEMPLAR.fields_by_name['labels']._serialized_options = b'\310\336\037\000' - _TIMESERIES.fields_by_name['labels']._options = None - _TIMESERIES.fields_by_name['labels']._serialized_options = b'\310\336\037\000' - _TIMESERIES.fields_by_name['samples']._options = None - _TIMESERIES.fields_by_name['samples']._serialized_options = b'\310\336\037\000' - _TIMESERIES.fields_by_name['exemplars']._options = None - _TIMESERIES.fields_by_name['exemplars']._serialized_options = b'\310\336\037\000' - _LABELS.fields_by_name['labels']._options = None - _LABELS.fields_by_name['labels']._serialized_options = b'\310\336\037\000' - _CHUNKEDSERIES.fields_by_name['labels']._options = None - _CHUNKEDSERIES.fields_by_name['labels']._serialized_options = b'\310\336\037\000' - _CHUNKEDSERIES.fields_by_name['chunks']._options = None - _CHUNKEDSERIES.fields_by_name['chunks']._serialized_options = b'\310\336\037\000' - _METRICMETADATA._serialized_start=152 - _METRICMETADATA._serialized_end=400 - _METRICMETADATA_METRICTYPE._serialized_start=279 - _METRICMETADATA_METRICTYPE._serialized_end=400 - _SAMPLE._serialized_start=402 - _SAMPLE._serialized_end=444 - _EXEMPLAR._serialized_start=446 - _EXEMPLAR._serialized_end=531 - _TIMESERIES._serialized_start=534 - _TIMESERIES._serialized_end=677 - _LABEL._serialized_start=679 - _LABEL._serialized_end=715 - _LABELS._serialized_start=717 - _LABELS._serialized_end=766 - _LABELMATCHER._serialized_start=769 - _LABELMATCHER._serialized_end=899 - _LABELMATCHER_TYPE._serialized_start=859 - _LABELMATCHER_TYPE._serialized_end=899 - _READHINTS._serialized_start=901 - _READHINTS._serialized_end=1025 - _CHUNK._serialized_start=1028 - _CHUNK._serialized_end=1167 - _CHUNK_ENCODING._serialized_start=1135 - _CHUNK_ENCODING._serialized_end=1167 - _CHUNKEDSERIES._serialized_start=1169 - _CHUNKEDSERIES._serialized_end=1266 + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"Z\006prompb" + _EXEMPLAR.fields_by_name["labels"]._options = None + _EXEMPLAR.fields_by_name[ + "labels" + ]._serialized_options = b"\310\336\037\000" + _TIMESERIES.fields_by_name["labels"]._options = None + _TIMESERIES.fields_by_name[ + "labels" + ]._serialized_options = b"\310\336\037\000" + _TIMESERIES.fields_by_name["samples"]._options = None + _TIMESERIES.fields_by_name[ + "samples" + ]._serialized_options = b"\310\336\037\000" + _TIMESERIES.fields_by_name["exemplars"]._options = None + _TIMESERIES.fields_by_name[ + "exemplars" + ]._serialized_options = b"\310\336\037\000" + _LABELS.fields_by_name["labels"]._options = None + _LABELS.fields_by_name["labels"]._serialized_options = b"\310\336\037\000" + _CHUNKEDSERIES.fields_by_name["labels"]._options = None + _CHUNKEDSERIES.fields_by_name[ + "labels" + ]._serialized_options = b"\310\336\037\000" + _CHUNKEDSERIES.fields_by_name["chunks"]._options = None + _CHUNKEDSERIES.fields_by_name[ + "chunks" + ]._serialized_options = b"\310\336\037\000" + _METRICMETADATA._serialized_start = 152 + _METRICMETADATA._serialized_end = 400 + _METRICMETADATA_METRICTYPE._serialized_start = 279 + _METRICMETADATA_METRICTYPE._serialized_end = 400 + _SAMPLE._serialized_start = 402 + _SAMPLE._serialized_end = 444 + _EXEMPLAR._serialized_start = 446 + _EXEMPLAR._serialized_end = 531 + _TIMESERIES._serialized_start = 534 + _TIMESERIES._serialized_end = 677 + _LABEL._serialized_start = 679 + _LABEL._serialized_end = 715 + _LABELS._serialized_start = 717 + _LABELS._serialized_end = 766 + _LABELMATCHER._serialized_start = 769 + _LABELMATCHER._serialized_end = 899 + _LABELMATCHER_TYPE._serialized_start = 859 + _LABELMATCHER_TYPE._serialized_end = 899 + _READHINTS._serialized_start = 901 + _READHINTS._serialized_end = 1025 + _CHUNK._serialized_start = 1028 + _CHUNK._serialized_end = 1167 + _CHUNK_ENCODING._serialized_start = 1135 + _CHUNK_ENCODING._serialized_end = 1167 + _CHUNKEDSERIES._serialized_start = 1169 + _CHUNKEDSERIES._serialized_end = 1266 # @@protoc_insertion_point(module_scope) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py index d9ef67fd8d..fe0a3c8af8 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py @@ -1,44 +1,50 @@ - - import random -import pytest -import opentelemetry.test.metrictestutil as metric_util#import _generate_gauge, _generate_sum +import pytest +import opentelemetry.test.metrictestutil as metric_util # import _generate_gauge, _generate_sum +from opentelemetry.exporter.prometheus_remote_write import ( + PrometheusRemoteWriteMetricsExporter, +) from opentelemetry.sdk.metrics.export import ( AggregationTemporality, + Gauge, Histogram, HistogramDataPoint, - Sum, - Gauge, + Metric, MetricExportResult, MetricsData, ResourceMetrics, ScopeMetrics, - Metric, + Sum, ) -from opentelemetry.exporter.prometheus_remote_write import ( - PrometheusRemoteWriteMetricsExporter, -) + @pytest.fixture def prom_rw(): - return PrometheusRemoteWriteMetricsExporter("http://victoria:8428/api/v1/write") + return PrometheusRemoteWriteMetricsExporter( + "http://victoria:8428/api/v1/write" + ) @pytest.fixture def metric(request): - if hasattr(request,"param"): + if hasattr(request, "param"): type_ = request.param else: - type_ = random.choice(["gauge","sum"]) + type_ = random.choice(["gauge", "sum"]) if type_ == "gauge": - return metric_util._generate_gauge("test.gauge",random.randint(0,100)) + return metric_util._generate_gauge( + "test.gauge", random.randint(0, 100) + ) elif type_ == "sum": - return metric_util._generate_sum("test.sum",random.randint(0,9_999_999_999)) + return metric_util._generate_sum( + "test.sum", random.randint(0, 9_999_999_999) + ) elif type_ == "histogram": return _generate_histogram("test_histogram") + def _generate_histogram(name): dp = HistogramDataPoint( attributes={"foo": "bar", "baz": 42}, @@ -61,4 +67,3 @@ def _generate_histogram(name): "tu", data=data, ) - diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py index 3ebcb58708..77b742ce4b 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py @@ -14,70 +14,69 @@ import unittest from unittest.mock import patch + +import pytest import snappy from opentelemetry.exporter.prometheus_remote_write import ( - PrometheusRemoteWriteMetricsExporter, PROMETHEUS_LABEL_REGEX, PROMETHEUS_NAME_REGEX, + PrometheusRemoteWriteMetricsExporter, ) from opentelemetry.exporter.prometheus_remote_write.gen.types_pb2 import ( Label, TimeSeries, ) from opentelemetry.sdk.metrics import Counter -#from opentelemetry.sdk.metrics.export import MetricExportResult -#from opentelemetry.sdk.metrics.export.aggregate import ( -# HistogramAggregator, -# LastValueAggregator, -# MinMaxSumCountAggregator, -# SumAggregator, -# ValueObserverAggregator, -#) - from opentelemetry.sdk.metrics.export import ( - NumberDataPoint, - HistogramDataPoint, Histogram, + HistogramDataPoint, + MetricExportResult, MetricsData, - ScopeMetrics, + NumberDataPoint, ResourceMetrics, - MetricExportResult, + ScopeMetrics, ) - -from opentelemetry.sdk.util.instrumentation import InstrumentationScope from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.util import get_dict_as_key +from opentelemetry.sdk.util.instrumentation import InstrumentationScope + +# from opentelemetry.sdk.metrics.export import MetricExportResult +# from opentelemetry.sdk.metrics.export.aggregate import ( +# HistogramAggregator, +# LastValueAggregator, +# MinMaxSumCountAggregator, +# SumAggregator, +# ValueObserverAggregator, +# ) -import pytest -@pytest.mark.parametrize("name,result",[ - ("abc.124","abc_124"), - (":abc", ":abc"), - ("abc.name.hi","abc_name_hi"), - ("service.name...","service_name___"), -]) -def test_name_regex(name,result,prom_rw): +@pytest.mark.parametrize( + "name,result", + [ + ("abc.124", "abc_124"), + (":abc", ":abc"), + ("abc.name.hi", "abc_name_hi"), + ("service.name...", "service_name___"), + ], +) +def test_name_regex(name, result, prom_rw): assert prom_rw._sanitize_name(name) == result def test_parse_data_point(prom_rw): - attrs = {"Foo" : "Bar","Baz" : 42} + attrs = {"Foo": "Bar", "Baz": 42} timestamp = 1641946016139533244 value = 242.42 - dp = NumberDataPoint( - attrs, - 0, - timestamp, - value - ) + dp = NumberDataPoint(attrs, 0, timestamp, value) name = "abc.123_42" - labels, sample = prom_rw._parse_data_point(dp,name) + labels, sample = prom_rw._parse_data_point(dp, name) name = "abc_123_42" - assert labels == (("Foo", "Bar"),("Baz", 42),("__name__",name)) - assert sample == (value,timestamp // 1_000_000) + assert labels == (("Foo", "Bar"), ("Baz", 42), ("__name__", name)) + assert sample == (value, timestamp // 1_000_000) + def test_parse_histogram_dp(prom_rw): attrs = {"foo": "bar", "baz": 42} @@ -95,40 +94,47 @@ def test_parse_histogram_dp(prom_rw): max=80, ) name = "foo_histogram" - label_sample_pairs = prom_rw._parse_histogram_data_point(dp,name) + label_sample_pairs = prom_rw._parse_histogram_data_point(dp, name) timestamp = timestamp // 1_000_000 bounds.append("+Inf") - for pos,bound in enumerate(bounds): + for pos, bound in enumerate(bounds): # We have to attributes, we kinda assume the bucket label is last... - assert ("le",str(bound)) == label_sample_pairs[pos][0][-1] + assert ("le", str(bound)) == label_sample_pairs[pos][0][-1] # Check and make sure we are putting the bucket counts in there - assert (dp.bucket_counts[pos],timestamp) == label_sample_pairs[pos][1] + assert (dp.bucket_counts[pos], timestamp) == label_sample_pairs[pos][1] # Last two are the sum & total count - pos +=1 - assert ("__name__",f"{name}_sum") in label_sample_pairs[pos][0] - assert (dp.sum,timestamp) == label_sample_pairs[pos][1] - - pos +=1 - assert ("__name__",f"{name}_count") in label_sample_pairs[pos][0] - assert (dp.count,timestamp) == label_sample_pairs[pos][1] - -@pytest.mark.parametrize("metric",[ - "gauge", - "sum", - "histogram", -],indirect=["metric"]) -def test_parse_metric(metric,prom_rw): + pos += 1 + assert ("__name__", f"{name}_sum") in label_sample_pairs[pos][0] + assert (dp.sum, timestamp) == label_sample_pairs[pos][1] + + pos += 1 + assert ("__name__", f"{name}_count") in label_sample_pairs[pos][0] + assert (dp.count, timestamp) == label_sample_pairs[pos][1] + + +@pytest.mark.parametrize( + "metric", + [ + "gauge", + "sum", + "histogram", + ], + indirect=["metric"], +) +def test_parse_metric(metric, prom_rw): """ Ensures output from parse_metrics are TimeSeries with expected data/size """ attributes = { - "service_name" : "foo", - "bool_value" : True, + "service_name": "foo", + "bool_value": True, } - assert len(metric.data.data_points) == 1, "We can only support a single datapoint in tests" - series = prom_rw._parse_metric(metric,tuple(attributes.items())) + assert ( + len(metric.data.data_points) == 1 + ), "We can only support a single datapoint in tests" + series = prom_rw._parse_metric(metric, tuple(attributes.items())) timestamp = metric.data.data_points[0].time_unix_nano // 1_000_000 for single_series in series: labels = str(single_series.labels) @@ -137,12 +143,14 @@ def test_parse_metric(metric,prom_rw): # This doesn't guarantee the labels aren't mixed up, but our other # test cases already do. assert "__name__" in labels - assert PROMETHEUS_NAME_REGEX.sub("_",metric.name) in labels - combined_attrs = list(attributes.items()) + list(metric.data.data_points[0].attributes.items()) - for name,value in combined_attrs: - assert PROMETHEUS_LABEL_REGEX.sub("_",name) in labels + assert PROMETHEUS_NAME_REGEX.sub("_", metric.name) in labels + combined_attrs = list(attributes.items()) + list( + metric.data.data_points[0].attributes.items() + ) + for name, value in combined_attrs: + assert PROMETHEUS_LABEL_REGEX.sub("_", name) in labels assert str(value) in labels - if isinstance(metric.data,Histogram): + if isinstance(metric.data, Histogram): values = [ metric.data.data_points[0].count, metric.data.data_points[0].sum, @@ -251,24 +259,19 @@ def test_invalid_tls_config_key_only_param(self): ) - # Ensures export is successful with valid export_records and config @patch("requests.post") -def test_valid_export(mock_post,prom_rw,metric): +def test_valid_export(mock_post, prom_rw, metric): metric = metric mock_post.return_value.configure_mock(**{"status_code": 200}) labels = get_dict_as_key({"environment": "testing"}) # Assumed a "None" for Scope or Resource aren't valid, so build them here scope = ScopeMetrics( - InstrumentationScope(name="prom-rw-test"), - [metric], - None + InstrumentationScope(name="prom-rw-test"), [metric], None ) resource = ResourceMetrics( - Resource({"service.name" : "foo"}), - [scope], - None + Resource({"service.name": "foo"}), [scope], None ) record = MetricsData([resource]) @@ -279,30 +282,35 @@ def test_valid_export(mock_post,prom_rw,metric): result = prom_rw.export([]) assert result == MetricExportResult.SUCCESS + def test_invalid_export(prom_rw): record = MetricsData([]) result = prom_rw.export(record) assert result == MetricExportResult.FAILURE + @patch("requests.post") -def test_valid_send_message(mock_post,prom_rw): +def test_valid_send_message(mock_post, prom_rw): mock_post.return_value.configure_mock(**{"ok": True}) result = prom_rw._send_message(bytes(), {}) assert mock_post.call_count == 1 assert result == MetricExportResult.SUCCESS + def test_invalid_send_message(prom_rw): result = prom_rw._send_message(bytes(), {}) assert result == MetricExportResult.FAILURE + # Verifies that build_message calls snappy.compress and returns SerializedString @patch("snappy.compress", return_value=bytes()) -def test_build_message(mock_compress,prom_rw): +def test_build_message(mock_compress, prom_rw): message = prom_rw._build_message([TimeSeries()]) assert mock_compress.call_count == 1 assert isinstance(message, bytes) + # Ensure correct headers are added when valid config is provided def test_build_headers(prom_rw): prom_rw.headers = {"Custom Header": "test_header"} From 5c2ad66f77f35609f0fc61f8ef4e3f914ec799e8 Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Tue, 27 Sep 2022 14:14:12 -0400 Subject: [PATCH 06/16] Version update this would be released as part of 1.13.0 --- .../opentelemetry/exporter/prometheus_remote_write/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/version.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/version.py index ebb75f6c11..b9536c2461 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/version.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.18.dev0" +__version__ = "1.13.0" From 24836336b45c9c5b5918b10b8531501a94c2e2b4 Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Fri, 30 Sep 2022 09:50:33 -0400 Subject: [PATCH 07/16] Update setup.cfg to use >=3.7 instead of 3.8 --- .../opentelemetry-exporter-prometheus-remote-write/setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg b/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg index 40fe91b4fb..8a28aefa47 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg @@ -34,7 +34,7 @@ classifiers = Programming Language :: Python :: 3.8 [options] -python_requires = >=3.8 +python_requires = >=3.7 package_dir= =src packages=find_namespace: From 11b9e8f63acd25b183c502727f2eff40a7e6a759 Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Tue, 4 Oct 2022 11:16:10 -0400 Subject: [PATCH 08/16] Address automated checks - Fix flake8 issues - Correct spelling mistakes (except false positive 'ro' in docker compose) - Updated Changelog. Not sure if this is worthy --- .flake8 | 1 + CHANGELOG.md | 3 ++- .../README.rst | 4 ++-- .../examples/README.md | 6 +++--- .../examples/sampleapp.py | 7 ++----- .../exporter/prometheus_remote_write/__init__.py | 2 +- .../tests/conftest.py | 6 ------ .../tests/test_prometheus_remote_write_exporter.py | 14 -------------- 8 files changed, 11 insertions(+), 32 deletions(-) diff --git a/.flake8 b/.flake8 index dd55df608a..84b003d4b0 100644 --- a/.flake8 +++ b/.flake8 @@ -16,6 +16,7 @@ exclude = target __pycache__ exporter/opentelemetry-exporter-jaeger/src/opentelemetry/exporter/jaeger/gen/ + exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/ exporter/opentelemetry-exporter-jaeger/build/* docs/examples/opentelemetry-example-app/src/opentelemetry_example_app/grpc/gen/ docs/examples/opentelemetry-example-app/build/* diff --git a/CHANGELOG.md b/CHANGELOG.md index c2ee7393e1..20e6c3d751 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,7 +33,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ([#1253](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1253)) - Add metric instrumentation in starlette ([#1327](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1327)) - +- Add metric exporter for Prometheus Remote Write + ([#1359](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1359)) ### Fixed diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst b/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst index 1f5dc01404..8c93044b38 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst @@ -95,7 +95,7 @@ This example uses `Docker Compose`_ to set up: 1. A Python program that creates 5 instruments with 5 unique aggregators and a randomized load generator -2. An instance of `Cortex`_ to recieve the metrics data +2. An instance of `Cortex`_ to receive the metrics data 3. An instance of `Grafana`_ to visualizse the exported data Requirements @@ -143,7 +143,7 @@ attach themselves to the service’s logs manually using - Set up auto-refresh by selecting an option under the dropdown next to the refresh button on the upper right side of the graph **(Optional)** - - Click the refresh button and data should show up on hte graph + - Click the refresh button and data should show up on the graph 6. Shutdown the services when finished diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/README.md b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/README.md index 91f7ead578..72c60015c4 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/README.md +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/README.md @@ -3,7 +3,7 @@ This example uses [Docker Compose](https://docs.docker.com/compose/) to set up: 1. A Python program that creates 5 instruments with 5 unique aggregators and a randomized load generator -2. An instance of [Cortex](https://cortexmetrics.io/) to recieve the metrics +2. An instance of [Cortex](https://cortexmetrics.io/) to receive the metrics data 3. An instance of [Grafana](https://grafana.com/) to visualizse the exported data @@ -36,7 +36,7 @@ terminal session. This also causes no logs to show up. Users can attach themselv * click `Metrics` for a dropdown list of all the available metrics * (OPTIONAL) Adjust time range by clicking the `Last 6 hours` button on the upper right side of the graph * (OPTIONAL) Set up auto-refresh by selecting an option under the dropdown next to the refresh button on the upper right side of the graph - * Click the refresh button and data should show up on hte graph + * Click the refresh button and data should show up on the graph 6. Shutdown the services when finished - * Run `docker-compose down` in the examples directory \ No newline at end of file + * Run `docker-compose down` in the examples directory diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py index 120c8eee3a..d7957bc8d6 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py @@ -10,11 +10,7 @@ from opentelemetry.exporter.prometheus_remote_write import ( PrometheusRemoteWriteMetricsExporter, ) -from opentelemetry.metrics import ( - Observation, - get_meter_provider, - set_meter_provider, -) +from opentelemetry.metrics import Observation from opentelemetry.sdk.metrics import MeterProvider from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader @@ -33,6 +29,7 @@ metrics.set_meter_provider(provider) meter = metrics.get_meter(__name__) + # Callback to gather cpu usage def get_cpu_usage_callback(observer): for (number, percent) in enumerate(psutil.cpu_percent(percpu=True)): diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py index 210a1866f1..811fe90ff6 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py @@ -289,7 +289,7 @@ def _parse_histogram_data_point(self, data_point, name): timestamp = data_point.time_unix_nano // 1_000_000 def handle_bucket(value, bound=None, name_override=None): - # Metric Level attributes + the bucket boundry attribute + name + # Metric Level attributes + the bucket boundary attribute + name ts_attrs = base_attrs.copy() ts_attrs.append( ("__name__", self._sanitize_name(name_override or name)) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py index fe0a3c8af8..aa7114306f 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py @@ -8,15 +8,9 @@ ) from opentelemetry.sdk.metrics.export import ( AggregationTemporality, - Gauge, Histogram, HistogramDataPoint, Metric, - MetricExportResult, - MetricsData, - ResourceMetrics, - ScopeMetrics, - Sum, ) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py index 77b742ce4b..1f61d30e43 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py @@ -16,7 +16,6 @@ from unittest.mock import patch import pytest -import snappy from opentelemetry.exporter.prometheus_remote_write import ( PROMETHEUS_LABEL_REGEX, @@ -24,10 +23,8 @@ PrometheusRemoteWriteMetricsExporter, ) from opentelemetry.exporter.prometheus_remote_write.gen.types_pb2 import ( - Label, TimeSeries, ) -from opentelemetry.sdk.metrics import Counter from opentelemetry.sdk.metrics.export import ( Histogram, HistogramDataPoint, @@ -38,18 +35,8 @@ ScopeMetrics, ) from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.util import get_dict_as_key from opentelemetry.sdk.util.instrumentation import InstrumentationScope -# from opentelemetry.sdk.metrics.export import MetricExportResult -# from opentelemetry.sdk.metrics.export.aggregate import ( -# HistogramAggregator, -# LastValueAggregator, -# MinMaxSumCountAggregator, -# SumAggregator, -# ValueObserverAggregator, -# ) - @pytest.mark.parametrize( "name,result", @@ -264,7 +251,6 @@ def test_invalid_tls_config_key_only_param(self): def test_valid_export(mock_post, prom_rw, metric): metric = metric mock_post.return_value.configure_mock(**{"status_code": 200}) - labels = get_dict_as_key({"environment": "testing"}) # Assumed a "None" for Scope or Resource aren't valid, so build them here scope = ScopeMetrics( From d134a5271b90b91ad3c7d4954712315886fd1a0f Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Wed, 12 Oct 2022 09:45:18 -0400 Subject: [PATCH 09/16] Updates from review - Update to pyproject.toml & float deps - Update README.rst - Updates to automate pulling & generating protobuf files - Add README for protobuf generation --- .../README.rst | 319 +---------------- .../proto/README.md | 3 + .../proto/generate-proto-py.sh | 48 ++- .../pyproject.toml | 51 +++ .../setup.cfg | 49 --- .../setup.py | 32 -- .../gen/gogoproto/gogo_pb2.py | 323 +++++------------- .../prometheus_remote_write/gen/remote_pb2.py | 63 ++-- .../prometheus_remote_write/gen/types_pb2.py | 111 +++--- 9 files changed, 265 insertions(+), 734 deletions(-) create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/proto/README.md create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/pyproject.toml delete mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg delete mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/setup.py diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst b/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst index 8c93044b38..b38dc8efdb 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst @@ -1,322 +1,31 @@ OpenTelemetry Prometheus Remote Write Exporter -========================================================= +============================================== + +|pypi| + +.. |pypi| image:: https://badge.fury.io/py/opentelemetry-exporter-prometheus-remote-write.svg + :target: https://pypi.org/project/opentelemetry-exporter-prometheus-remote-write/ This package contains an exporter to send `OTLP`_ metrics from the `OpenTelemetry Python SDK`_ directly to a `Prometheus Remote Write integrated backend`_ (such as Cortex or Thanos) without having to run an instance of the -Prometheus server. The latest `types.proto`_ and `remote.proto`_ -protocol buffers are used to create the WriteRequest. The image below shows the -two Prometheus exporters in the OpenTelemetry Python SDK. - -Pipeline 1 illustrates the setup required for a `Prometheus "pull" exporter`_. - -Pipeline 2 illustrates the setup required for the Prometheus Remote -Write exporter. - -|Prometheus SDK pipelines| - -The Prometheus Remote Write Exporter is a "push" based exporter and only -works with the OpenTelemetry `push controller`_. The controller -periodically collects data and passes it to the exporter. This exporter -then converts the data into `timeseries`_ and sends it to the Remote -Write integrated backend through HTTP POST requests. The metrics -collection datapath is shown below: - - -See the ``examples`` folder for a demo usage of this exporter - -Table of Contents -================= - -- `Summary`_ -- `Table of Contents`_ +Prometheus server. - - `Installation`_ - - `Quickstart`_ - - `Examples`_ - - `Configuring the Exporter`_ - - `Securing the Exporter`_ - - - `Authentication`_ - - `TLS`_ - - - `Supported Aggregators`_ - - `Error Handling`_ - - `Contributing`_ - - - `Design Doc`_ Installation ------------ -Prerequisites -~~~~~~~~~~~~~ -1. Install the snappy c-library - **DEB**: ``sudo apt-get install libsnappy-dev`` - - **RPM**: ``sudo yum install libsnappy-devel`` - - **OSX/Brew**: ``brew install snappy`` - - **Windows**: ``pip install python_snappy-0.5-cp36-cp36m-win_amd64.whl`` - -Exporter -~~~~~~~~ - -- To install from the latest PyPi release, run - ``pip install opentelemetry-exporter-prometheus-remote-write`` - - -Quickstart ----------- - -.. code:: python - - from opentelemetry import metrics - from opentelemetry.sdk.metrics import MeterProvider - from opentelemetry.exporter.prometheus_remote_write import ( - PrometheusRemoteWriteMetricsExporter - ) - - # Sets the global MeterProvider instance - metrics.set_meter_provider(MeterProvider()) - - # The Meter is responsible for creating and recording metrics. Each meter has a unique name, which we set as the module's name here. - meter = metrics.get_meter(__name__) - - exporter = PrometheusRemoteWriteMetricsExporter(endpoint="endpoint_here") # add other params as needed - - metrics.get_meter_provider().start_pipeline(meter, exporter, 5) - - -Examples --------- - -This example uses `Docker Compose`_ to set up: - -1. A Python program that creates 5 instruments with 5 unique aggregators - and a randomized load generator -2. An instance of `Cortex`_ to receive the metrics data -3. An instance of `Grafana`_ to visualizse the exported data - -Requirements -~~~~~~~~~~~~ - -- Have Docker Compose `installed`_ - -*Users do not need to install Python as the app will be run in the -Docker Container* - -Instructions -~~~~~~~~~~~~ - -1. Run ``docker-compose up -d`` in the the ``examples/`` directory - -The ``-d`` flag causes all services to run in detached mode and frees up -your terminal session. This also causes no logs to show up. Users can -attach themselves to the service’s logs manually using -``docker logs ${CONTAINER_ID} --follow`` - -2. Log into the Grafana instance at http://localhost:3000 - - - login credentials are ``username: admin`` and ``password: admin`` - - There may be an additional screen on setting a new password. This - can be skipped and is optional - -3. Navigate to the ``Data Sources`` page - - - Look for a gear icon on the left sidebar and select - ``Data Sources`` - -4. Add a new Prometheus Data Source - - - Use ``http://cortex:9009/api/prom`` as the URL - - Set the scrape interval to ``2s`` to make updates - appear quickly **(Optional)** - - click ``Save & Test`` - -5. Go to ``Metrics Explore`` to query metrics - - - Look for a compass icon on the left sidebar - - click ``Metrics`` for a dropdown list of all the available metrics - - Adjust time range by clicking the ``Last 6 hours`` - button on the upper right side of the graph **(Optional)** - - Set up auto-refresh by selecting an option under the - dropdown next to the refresh button on the upper right side of the - graph **(Optional)** - - Click the refresh button and data should show up on the graph - -6. Shutdown the services when finished - - - Run ``docker-compose down`` in the examples directory - -Configuring the Exporter ------------------------- - -The exporter can be configured through parameters passed to the -constructor. Here are all the options: - -- ``endpoint``: url where data will be sent **(Required)** -- ``basic_auth``: username and password for authentication - **(Optional)** -- ``headers``: additional headers for remote write request as - determined by the remote write backend's API **(Optional)** -- ``timeout``: timeout for requests to the remote write endpoint in - seconds **(Optional)** -- ``proxies``: dict mapping request proxy protocols to proxy urls - **(Optional)** -- ``tls_config``: configuration for remote write TLS settings - **(Optional)** - -Example with all the configuration options: - -.. code:: python - - exporter = PrometheusRemoteWriteMetricsExporter( - endpoint="http://localhost:9009/api/prom/push", - timeout=30, - basic_auth={ - "username": "user", - "password": "pass123", - }, - headers={ - "X-Scope-Org-ID": "5", - "Authorization": "Bearer mytoken123", - }, - proxies={ - "http": "http://10.10.1.10:3000", - "https": "http://10.10.1.10:1080", - }, - tls_config={ - "cert_file": "path/to/file", - "key_file": "path/to/file", - "ca_file": "path_to_file", - "insecure_skip_verify": true, # for developing purposes - } - ) - -Securing the Exporter ---------------------- - -Authentication -~~~~~~~~~~~~~~ - -The exporter provides two forms of authentication which are shown below. -Users can add their own custom authentication by setting the appropriate -values in the ``headers`` dictionary - -1. Basic Authentication Basic authentication sets a HTTP Authorization - header containing a base64 encoded username/password pair. See `RFC - 7617`_ for more information. This - -.. code:: python - - exporter = PrometheusRemoteWriteMetricsExporter( - basic_auth={"username": "base64user", "password": "base64pass"} - ) - -2. Bearer Token Authentication This custom configuration can be achieved - by passing in a custom ``header`` to the constructor. See `RFC 6750`_ - for more information. - -.. code:: python - - header = { - "Authorization": "Bearer mytoken123" - } - -TLS -~~~ - -Users can add TLS to the exporter's HTTP Client by providing certificate -and key files in the ``tls_config`` parameter. - -Supported Aggregators ---------------------- -Behaviour of these aggregators is outlined in the `OpenTelemetry Specification `_. -All aggregators are converted into the `timeseries`_ data format. However, method in -which they are converted `differs `_ from aggregator to aggregator. A -map of the conversion methods can be found `here `_. - -+------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ -| **OpenTelemetry Aggregator** | **Equivalent Prometheus Data Type** | **Behaviour** | -+------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ -| Sum | Counter | Metric value can only go up or be reset to 0 | -+------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ -| MinMaxSumCount | Gauge | Metric value can arbitrarily increment or decrement | -+------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ -| Histogram | Histogram | Unlike the Prometheus histogram, the OpenTelemetry Histogram does not provide a sum of all observed values | -+------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ -| LastValue | N/A | Metric only contains the most recently observed value | -+------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ -| ValueObserver | N/A | Similar to MinMaxSumCount but also contains LastValue | -+------------------------------+-------------------------------------+------------------------------------------------------------------------------------------------------------+ - - -Error Handling --------------- - -In general, errors are raised by the calling function. The exception is -for failed requests where any error status code is logged as a warning -instead. - -This is because the exporter does not implement any retry logic as data that -failed to export will be dropped. - -For example, consider a situation where a user increments a Counter -instrument 5 times and an export happens between each increment. If the -exports happen like so: - -:: - - SUCCESS FAIL FAIL SUCCESS SUCCESS - 1 2 3 4 5 - -Then the received data will be: :: - 1 4 5 + pip install opentelemetry-exporter-prometheus-remote-write -Contributing ------------- - -If you would like to learn more about the exporter's structure and -design decisions please view the design document below -Design Doc -~~~~~~~~~~ +.. _OpenTelemetry: https://github.com/open-telemetry/opentelemetry-python/ +.. _Prometheus Remote Write integrated backend: https://prometheus.io/docs/operating/integrations/ -`Design Document`_ -This document is stored elsewhere as it contains large images which will -significantly increase the size of this repo. +References +---------- -.. _Summary: #opentelemetry-python-sdk-prometheus-remote-write-exporter -.. _Table of Contents: #table-of-contents -.. _Installation: #installation -.. _Quickstart: #quickstart -.. _Examples: #examples -.. _Configuring the Exporter: #configuring-the-exporter -.. _Securing the Exporter: #securing-the-exporter -.. _Authentication: #authentication -.. _TLS: #tls -.. _Supported Aggregators: #supported-aggregators -.. _Error Handling: #error-handling -.. _Contributing: #contributing -.. _Design Doc: #design-doc -.. |Prometheus SDK pipelines| image:: https://user-images.githubusercontent.com/20804975/100285430-e320fd80-2f3e-11eb-8217-a562c559153c.png -.. _RFC 7617: https://tools.ietf.org/html/rfc7617 -.. _RFC 6750: https://tools.ietf.org/html/rfc6750 -.. _Design Document: https://github.com/open-o11y/docs/blob/master/python-prometheus-remote-write/design-doc.md -.. _OTLP: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/otlp.md -.. _OpenTelemetry Python SDK: https://github.com/open-telemetry/opentelemetry-python -.. _Prometheus "pull" exporter: https://github.com/open-telemetry/opentelemetry-python/tree/main/exporter/opentelemetry-exporter-prometheus -.. _Prometheus Remote Write integrated backend: https://prometheus.io/docs/operating/integrations/ -.. _types.proto: https://github.com/prometheus/prometheus/blob/master/prompb/types.proto -.. _remote.proto: https://github.com/prometheus/prometheus/blob/master/prompb/remote.proto -.. _push controller: https://github.com/open-telemetry/opentelemetry-python/blob/main/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py#L22 -.. _timeseries: https://prometheus.io/docs/concepts/data_model/ -.. _Docker Compose: https://docs.docker.com/compose/ -.. _Cortex: https://cortexmetrics.io/ -.. _Grafana: https://grafana.com/ -.. _installed: https://docs.docker.com/compose/install/ +* `OpenTelemetry Project `_ +* `Prometheus Remote Write Integration `_ diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/README.md b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/README.md new file mode 100644 index 0000000000..23fdaa392d --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/README.md @@ -0,0 +1,3 @@ +## Instructions +1. Install protobuf tools. Can use your package manager or download from [GitHub](https://github.com/protocolbuffers/protobuf/releases/tag/v21.7) +2. Run `generate-proto-py.sh` from inside the `proto/` directory diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/generate-proto-py.sh b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/generate-proto-py.sh index 97ffa8084c..a444ddecce 100755 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/generate-proto-py.sh +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/generate-proto-py.sh @@ -1,6 +1,52 @@ #!/bin/bash -# Used libprotoc 3.21.1 +PROM_VERSION=v2.39.0 +PROTO_VERSION=v1.3.2 + +# SRC_DIR is from protoc perspective. ie its the destination for our checkouts/clones SRC_DIR=opentelemetry/exporter/prometheus_remote_write/gen/ DST_DIR=../src/opentelemetry/exporter/prometheus_remote_write/gen/ + +echo "Creating our destination directory" +mkdir -p ${SRC_DIR}/gogoproto + +# Clone prometheus +echo "Grabbing Prometheus protobuf files" +git clone --filter=blob:none --sparse https://github.com/prometheus/prometheus.git +cd prometheus +git checkout ${PROM_VERSION} +git sparse-checkout set prompb +cd .. + + +# We also need gogo.proto which is in the protobuf Repo +# Could also try to pull this locally from the install location of protobuf +# but that will be harder in a platform agnostic way. +echo "Grabbing gogo.proto" +git clone --filter=blob:none --sparse https://github.com/gogo/protobuf.git +cd protobuf +git checkout ${PROTO_VERSION} +git sparse-checkout set /gogoproto/gogo.proto +cd .. + +# Move the proto files into our structure +echo "Moving proto files to ${SRC_DIR}" +cp prometheus/prompb/remote.proto prometheus/prompb/types.proto ${SRC_DIR} +cp protobuf/gogoproto/gogo.proto ${SRC_DIR}/gogoproto/ + + +# A bit of a hack, but we need to fix the imports to fit the python structure. +# using sed to find the 3 files and point them at each other using OUR structure +echo "Fixing imports" +sed -i 's/import "types.proto";/import "opentelemetry\/exporter\/prometheus_remote_write\/gen\/types.proto";/' ${SRC_DIR}/remote.proto +sed -i 's/import "gogoproto\/gogo.proto";/import "opentelemetry\/exporter\/prometheus_remote_write\/gen\/gogoproto\/gogo.proto";/' ${SRC_DIR}/remote.proto +sed -i 's/import "gogoproto\/gogo.proto";/import "opentelemetry\/exporter\/prometheus_remote_write\/gen\/gogoproto\/gogo.proto";/' ${SRC_DIR}/types.proto + + +# Cleanup the repos +echo "Removing clones..." +rm -rf protobuf prometheus + +# Used libprotoc 3.21.1 & protoc 21.7 +echo "Compiling proto files to Python" protoc -I . --python_out=../src ${SRC_DIR}/gogoproto/gogo.proto ${SRC_DIR}/remote.proto ${SRC_DIR}/types.proto diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/pyproject.toml b/exporter/opentelemetry-exporter-prometheus-remote-write/pyproject.toml new file mode 100644 index 0000000000..a9db5c2e08 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/pyproject.toml @@ -0,0 +1,51 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + + +[project] +name = "opentelemetry-exporter-prometheus-remote-write" +dynamic = ["version"] +description = "Prometheus Remote Write Metrics Exporter for OpenTelemetry" +readme = "README.rst" +license = "Apache-2.0" +requires-python = ">=3.7" +authors = [ + { name = "OpenTelemetry Authors", email = "cncf-opentelemetry-contributors@lists.cncf.io" }, +] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", +] +dependencies = [ + "protobuf ~= 4.21", + "requests ~= 2.28", + "opentelemetry-api ~= 1.13.0", + "opentelemetry-sdk ~= 1.13.0", + "python-snappy ~= 0.6", +] + +[project.optional-dependencies] +test = [] + +[project.urls] +Homepage = "https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/exporter/opentelemetry-exporter-prometheus-remote-write" + +[tool.hatch.version] +path = "src/opentelemetry/exporter/prometheus_remote_write/version.py" + +[tool.hatch.build.targets.sdist] +include = [ + "/src", + "/tests", +] + +[tool.hatch.build.targets.wheel] +packages = ["src/opentelemetry"] diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg b/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg deleted file mode 100644 index 8a28aefa47..0000000000 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/setup.cfg +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright The OpenTelemetry Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -[metadata] -name = opentelemetry-exporter-prometheus-remote-write -description = Prometheus Remote Write Metrics Exporter for OpenTelemetry -long_description = file: README.rst -long_description_content_type = text/x-rst -author = OpenTelemetry Authors -author_email = cncf-opentelemetry-contributors@lists.cncf.io -url = https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/exporter/opentelemetry-exporter-prometheus-remote-write -platforms = any -license = Apache-2.0 -classifiers = - Development Status :: 4 - Beta - Intended Audience :: Developers - License :: OSI Approved :: Apache Software License - Programming Language :: Python - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.5 - Programming Language :: Python :: 3.6 - Programming Language :: Python :: 3.7 - Programming Language :: Python :: 3.8 - -[options] -python_requires = >=3.7 -package_dir= - =src -packages=find_namespace: -install_requires = - protobuf ~= 4.21 - requests ~= 2.28 - opentelemetry-api == 1.12.0 - opentelemetry-sdk == 1.12.0 - python-snappy ~= 0.6 - -[options.packages.find] -where = src diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/setup.py b/exporter/opentelemetry-exporter-prometheus-remote-write/setup.py deleted file mode 100644 index b2d9a5a47c..0000000000 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/setup.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright The OpenTelemetry Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import setuptools - -BASE_DIR = os.path.dirname(__file__) -VERSION_FILENAME = os.path.join( - BASE_DIR, - "src", - "opentelemetry", - "exporter", - "prometheus_remote_write", - "version.py", -) -PACKAGE_INFO = {} -with open(VERSION_FILENAME) as f: - exec(f.read(), PACKAGE_INFO) - -setuptools.setup(version=PACKAGE_INFO["__version__"]) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py index d5cce2a857..35e48b214a 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py @@ -6,257 +6,96 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from google.protobuf import ( - descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2, -) +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\nGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\x12\tgogoproto\x1a google/protobuf/descriptor.proto:;\n\x13goproto_enum_prefix\x12\x1c.google.protobuf.EnumOptions\x18\xb1\xe4\x03 \x01(\x08:=\n\x15goproto_enum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc5\xe4\x03 \x01(\x08:5\n\renum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc6\xe4\x03 \x01(\x08:7\n\x0f\x65num_customname\x12\x1c.google.protobuf.EnumOptions\x18\xc7\xe4\x03 \x01(\t:0\n\x08\x65numdecl\x12\x1c.google.protobuf.EnumOptions\x18\xc8\xe4\x03 \x01(\x08:A\n\x14\x65numvalue_customname\x12!.google.protobuf.EnumValueOptions\x18\xd1\x83\x04 \x01(\t:;\n\x13goproto_getters_all\x12\x1c.google.protobuf.FileOptions\x18\x99\xec\x03 \x01(\x08:?\n\x17goproto_enum_prefix_all\x12\x1c.google.protobuf.FileOptions\x18\x9a\xec\x03 \x01(\x08:<\n\x14goproto_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\x9b\xec\x03 \x01(\x08:9\n\x11verbose_equal_all\x12\x1c.google.protobuf.FileOptions\x18\x9c\xec\x03 \x01(\x08:0\n\x08\x66\x61\x63\x65_all\x12\x1c.google.protobuf.FileOptions\x18\x9d\xec\x03 \x01(\x08:4\n\x0cgostring_all\x12\x1c.google.protobuf.FileOptions\x18\x9e\xec\x03 \x01(\x08:4\n\x0cpopulate_all\x12\x1c.google.protobuf.FileOptions\x18\x9f\xec\x03 \x01(\x08:4\n\x0cstringer_all\x12\x1c.google.protobuf.FileOptions\x18\xa0\xec\x03 \x01(\x08:3\n\x0bonlyone_all\x12\x1c.google.protobuf.FileOptions\x18\xa1\xec\x03 \x01(\x08:1\n\tequal_all\x12\x1c.google.protobuf.FileOptions\x18\xa5\xec\x03 \x01(\x08:7\n\x0f\x64\x65scription_all\x12\x1c.google.protobuf.FileOptions\x18\xa6\xec\x03 \x01(\x08:3\n\x0btestgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa7\xec\x03 \x01(\x08:4\n\x0c\x62\x65nchgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa8\xec\x03 \x01(\x08:5\n\rmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xa9\xec\x03 \x01(\x08:7\n\x0funmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaa\xec\x03 \x01(\x08:<\n\x14stable_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xab\xec\x03 \x01(\x08:1\n\tsizer_all\x12\x1c.google.protobuf.FileOptions\x18\xac\xec\x03 \x01(\x08:A\n\x19goproto_enum_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xad\xec\x03 \x01(\x08:9\n\x11\x65num_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xae\xec\x03 \x01(\x08:<\n\x14unsafe_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaf\xec\x03 \x01(\x08:>\n\x16unsafe_unmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xb0\xec\x03 \x01(\x08:B\n\x1agoproto_extensions_map_all\x12\x1c.google.protobuf.FileOptions\x18\xb1\xec\x03 \x01(\x08:@\n\x18goproto_unrecognized_all\x12\x1c.google.protobuf.FileOptions\x18\xb2\xec\x03 \x01(\x08:8\n\x10gogoproto_import\x12\x1c.google.protobuf.FileOptions\x18\xb3\xec\x03 \x01(\x08:6\n\x0eprotosizer_all\x12\x1c.google.protobuf.FileOptions\x18\xb4\xec\x03 \x01(\x08:3\n\x0b\x63ompare_all\x12\x1c.google.protobuf.FileOptions\x18\xb5\xec\x03 \x01(\x08:4\n\x0ctypedecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb6\xec\x03 \x01(\x08:4\n\x0c\x65numdecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb7\xec\x03 \x01(\x08:<\n\x14goproto_registration\x12\x1c.google.protobuf.FileOptions\x18\xb8\xec\x03 \x01(\x08:7\n\x0fmessagename_all\x12\x1c.google.protobuf.FileOptions\x18\xb9\xec\x03 \x01(\x08:=\n\x15goproto_sizecache_all\x12\x1c.google.protobuf.FileOptions\x18\xba\xec\x03 \x01(\x08:;\n\x13goproto_unkeyed_all\x12\x1c.google.protobuf.FileOptions\x18\xbb\xec\x03 \x01(\x08::\n\x0fgoproto_getters\x12\x1f.google.protobuf.MessageOptions\x18\x81\xf4\x03 \x01(\x08:;\n\x10goproto_stringer\x12\x1f.google.protobuf.MessageOptions\x18\x83\xf4\x03 \x01(\x08:8\n\rverbose_equal\x12\x1f.google.protobuf.MessageOptions\x18\x84\xf4\x03 \x01(\x08:/\n\x04\x66\x61\x63\x65\x12\x1f.google.protobuf.MessageOptions\x18\x85\xf4\x03 \x01(\x08:3\n\x08gostring\x12\x1f.google.protobuf.MessageOptions\x18\x86\xf4\x03 \x01(\x08:3\n\x08populate\x12\x1f.google.protobuf.MessageOptions\x18\x87\xf4\x03 \x01(\x08:3\n\x08stringer\x12\x1f.google.protobuf.MessageOptions\x18\xc0\x8b\x04 \x01(\x08:2\n\x07onlyone\x12\x1f.google.protobuf.MessageOptions\x18\x89\xf4\x03 \x01(\x08:0\n\x05\x65qual\x12\x1f.google.protobuf.MessageOptions\x18\x8d\xf4\x03 \x01(\x08:6\n\x0b\x64\x65scription\x12\x1f.google.protobuf.MessageOptions\x18\x8e\xf4\x03 \x01(\x08:2\n\x07testgen\x12\x1f.google.protobuf.MessageOptions\x18\x8f\xf4\x03 \x01(\x08:3\n\x08\x62\x65nchgen\x12\x1f.google.protobuf.MessageOptions\x18\x90\xf4\x03 \x01(\x08:4\n\tmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x91\xf4\x03 \x01(\x08:6\n\x0bunmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x92\xf4\x03 \x01(\x08:;\n\x10stable_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x93\xf4\x03 \x01(\x08:0\n\x05sizer\x12\x1f.google.protobuf.MessageOptions\x18\x94\xf4\x03 \x01(\x08:;\n\x10unsafe_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x97\xf4\x03 \x01(\x08:=\n\x12unsafe_unmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x98\xf4\x03 \x01(\x08:A\n\x16goproto_extensions_map\x12\x1f.google.protobuf.MessageOptions\x18\x99\xf4\x03 \x01(\x08:?\n\x14goproto_unrecognized\x12\x1f.google.protobuf.MessageOptions\x18\x9a\xf4\x03 \x01(\x08:5\n\nprotosizer\x12\x1f.google.protobuf.MessageOptions\x18\x9c\xf4\x03 \x01(\x08:2\n\x07\x63ompare\x12\x1f.google.protobuf.MessageOptions\x18\x9d\xf4\x03 \x01(\x08:3\n\x08typedecl\x12\x1f.google.protobuf.MessageOptions\x18\x9e\xf4\x03 \x01(\x08:6\n\x0bmessagename\x12\x1f.google.protobuf.MessageOptions\x18\xa1\xf4\x03 \x01(\x08:<\n\x11goproto_sizecache\x12\x1f.google.protobuf.MessageOptions\x18\xa2\xf4\x03 \x01(\x08::\n\x0fgoproto_unkeyed\x12\x1f.google.protobuf.MessageOptions\x18\xa3\xf4\x03 \x01(\x08:1\n\x08nullable\x12\x1d.google.protobuf.FieldOptions\x18\xe9\xfb\x03 \x01(\x08:.\n\x05\x65mbed\x12\x1d.google.protobuf.FieldOptions\x18\xea\xfb\x03 \x01(\x08:3\n\ncustomtype\x12\x1d.google.protobuf.FieldOptions\x18\xeb\xfb\x03 \x01(\t:3\n\ncustomname\x12\x1d.google.protobuf.FieldOptions\x18\xec\xfb\x03 \x01(\t:0\n\x07jsontag\x12\x1d.google.protobuf.FieldOptions\x18\xed\xfb\x03 \x01(\t:1\n\x08moretags\x12\x1d.google.protobuf.FieldOptions\x18\xee\xfb\x03 \x01(\t:1\n\x08\x63\x61sttype\x12\x1d.google.protobuf.FieldOptions\x18\xef\xfb\x03 \x01(\t:0\n\x07\x63\x61stkey\x12\x1d.google.protobuf.FieldOptions\x18\xf0\xfb\x03 \x01(\t:2\n\tcastvalue\x12\x1d.google.protobuf.FieldOptions\x18\xf1\xfb\x03 \x01(\t:0\n\x07stdtime\x12\x1d.google.protobuf.FieldOptions\x18\xf2\xfb\x03 \x01(\x08:4\n\x0bstdduration\x12\x1d.google.protobuf.FieldOptions\x18\xf3\xfb\x03 \x01(\x08:3\n\nwktpointer\x12\x1d.google.protobuf.FieldOptions\x18\xf4\xfb\x03 \x01(\x08\x42\x45\n\x13\x63om.google.protobufB\nGoGoProtosZ"github.com/gogo/protobuf/gogoproto' -) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\x12\tgogoproto\x1a google/protobuf/descriptor.proto:;\n\x13goproto_enum_prefix\x12\x1c.google.protobuf.EnumOptions\x18\xb1\xe4\x03 \x01(\x08:=\n\x15goproto_enum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc5\xe4\x03 \x01(\x08:5\n\renum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc6\xe4\x03 \x01(\x08:7\n\x0f\x65num_customname\x12\x1c.google.protobuf.EnumOptions\x18\xc7\xe4\x03 \x01(\t:0\n\x08\x65numdecl\x12\x1c.google.protobuf.EnumOptions\x18\xc8\xe4\x03 \x01(\x08:A\n\x14\x65numvalue_customname\x12!.google.protobuf.EnumValueOptions\x18\xd1\x83\x04 \x01(\t:;\n\x13goproto_getters_all\x12\x1c.google.protobuf.FileOptions\x18\x99\xec\x03 \x01(\x08:?\n\x17goproto_enum_prefix_all\x12\x1c.google.protobuf.FileOptions\x18\x9a\xec\x03 \x01(\x08:<\n\x14goproto_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\x9b\xec\x03 \x01(\x08:9\n\x11verbose_equal_all\x12\x1c.google.protobuf.FileOptions\x18\x9c\xec\x03 \x01(\x08:0\n\x08\x66\x61\x63\x65_all\x12\x1c.google.protobuf.FileOptions\x18\x9d\xec\x03 \x01(\x08:4\n\x0cgostring_all\x12\x1c.google.protobuf.FileOptions\x18\x9e\xec\x03 \x01(\x08:4\n\x0cpopulate_all\x12\x1c.google.protobuf.FileOptions\x18\x9f\xec\x03 \x01(\x08:4\n\x0cstringer_all\x12\x1c.google.protobuf.FileOptions\x18\xa0\xec\x03 \x01(\x08:3\n\x0bonlyone_all\x12\x1c.google.protobuf.FileOptions\x18\xa1\xec\x03 \x01(\x08:1\n\tequal_all\x12\x1c.google.protobuf.FileOptions\x18\xa5\xec\x03 \x01(\x08:7\n\x0f\x64\x65scription_all\x12\x1c.google.protobuf.FileOptions\x18\xa6\xec\x03 \x01(\x08:3\n\x0btestgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa7\xec\x03 \x01(\x08:4\n\x0c\x62\x65nchgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa8\xec\x03 \x01(\x08:5\n\rmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xa9\xec\x03 \x01(\x08:7\n\x0funmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaa\xec\x03 \x01(\x08:<\n\x14stable_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xab\xec\x03 \x01(\x08:1\n\tsizer_all\x12\x1c.google.protobuf.FileOptions\x18\xac\xec\x03 \x01(\x08:A\n\x19goproto_enum_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xad\xec\x03 \x01(\x08:9\n\x11\x65num_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xae\xec\x03 \x01(\x08:<\n\x14unsafe_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaf\xec\x03 \x01(\x08:>\n\x16unsafe_unmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xb0\xec\x03 \x01(\x08:B\n\x1agoproto_extensions_map_all\x12\x1c.google.protobuf.FileOptions\x18\xb1\xec\x03 \x01(\x08:@\n\x18goproto_unrecognized_all\x12\x1c.google.protobuf.FileOptions\x18\xb2\xec\x03 \x01(\x08:8\n\x10gogoproto_import\x12\x1c.google.protobuf.FileOptions\x18\xb3\xec\x03 \x01(\x08:6\n\x0eprotosizer_all\x12\x1c.google.protobuf.FileOptions\x18\xb4\xec\x03 \x01(\x08:3\n\x0b\x63ompare_all\x12\x1c.google.protobuf.FileOptions\x18\xb5\xec\x03 \x01(\x08:4\n\x0ctypedecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb6\xec\x03 \x01(\x08:4\n\x0c\x65numdecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb7\xec\x03 \x01(\x08:<\n\x14goproto_registration\x12\x1c.google.protobuf.FileOptions\x18\xb8\xec\x03 \x01(\x08:7\n\x0fmessagename_all\x12\x1c.google.protobuf.FileOptions\x18\xb9\xec\x03 \x01(\x08:=\n\x15goproto_sizecache_all\x12\x1c.google.protobuf.FileOptions\x18\xba\xec\x03 \x01(\x08:;\n\x13goproto_unkeyed_all\x12\x1c.google.protobuf.FileOptions\x18\xbb\xec\x03 \x01(\x08::\n\x0fgoproto_getters\x12\x1f.google.protobuf.MessageOptions\x18\x81\xf4\x03 \x01(\x08:;\n\x10goproto_stringer\x12\x1f.google.protobuf.MessageOptions\x18\x83\xf4\x03 \x01(\x08:8\n\rverbose_equal\x12\x1f.google.protobuf.MessageOptions\x18\x84\xf4\x03 \x01(\x08:/\n\x04\x66\x61\x63\x65\x12\x1f.google.protobuf.MessageOptions\x18\x85\xf4\x03 \x01(\x08:3\n\x08gostring\x12\x1f.google.protobuf.MessageOptions\x18\x86\xf4\x03 \x01(\x08:3\n\x08populate\x12\x1f.google.protobuf.MessageOptions\x18\x87\xf4\x03 \x01(\x08:3\n\x08stringer\x12\x1f.google.protobuf.MessageOptions\x18\xc0\x8b\x04 \x01(\x08:2\n\x07onlyone\x12\x1f.google.protobuf.MessageOptions\x18\x89\xf4\x03 \x01(\x08:0\n\x05\x65qual\x12\x1f.google.protobuf.MessageOptions\x18\x8d\xf4\x03 \x01(\x08:6\n\x0b\x64\x65scription\x12\x1f.google.protobuf.MessageOptions\x18\x8e\xf4\x03 \x01(\x08:2\n\x07testgen\x12\x1f.google.protobuf.MessageOptions\x18\x8f\xf4\x03 \x01(\x08:3\n\x08\x62\x65nchgen\x12\x1f.google.protobuf.MessageOptions\x18\x90\xf4\x03 \x01(\x08:4\n\tmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x91\xf4\x03 \x01(\x08:6\n\x0bunmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x92\xf4\x03 \x01(\x08:;\n\x10stable_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x93\xf4\x03 \x01(\x08:0\n\x05sizer\x12\x1f.google.protobuf.MessageOptions\x18\x94\xf4\x03 \x01(\x08:;\n\x10unsafe_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x97\xf4\x03 \x01(\x08:=\n\x12unsafe_unmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x98\xf4\x03 \x01(\x08:A\n\x16goproto_extensions_map\x12\x1f.google.protobuf.MessageOptions\x18\x99\xf4\x03 \x01(\x08:?\n\x14goproto_unrecognized\x12\x1f.google.protobuf.MessageOptions\x18\x9a\xf4\x03 \x01(\x08:5\n\nprotosizer\x12\x1f.google.protobuf.MessageOptions\x18\x9c\xf4\x03 \x01(\x08:2\n\x07\x63ompare\x12\x1f.google.protobuf.MessageOptions\x18\x9d\xf4\x03 \x01(\x08:3\n\x08typedecl\x12\x1f.google.protobuf.MessageOptions\x18\x9e\xf4\x03 \x01(\x08:6\n\x0bmessagename\x12\x1f.google.protobuf.MessageOptions\x18\xa1\xf4\x03 \x01(\x08:<\n\x11goproto_sizecache\x12\x1f.google.protobuf.MessageOptions\x18\xa2\xf4\x03 \x01(\x08::\n\x0fgoproto_unkeyed\x12\x1f.google.protobuf.MessageOptions\x18\xa3\xf4\x03 \x01(\x08:1\n\x08nullable\x12\x1d.google.protobuf.FieldOptions\x18\xe9\xfb\x03 \x01(\x08:.\n\x05\x65mbed\x12\x1d.google.protobuf.FieldOptions\x18\xea\xfb\x03 \x01(\x08:3\n\ncustomtype\x12\x1d.google.protobuf.FieldOptions\x18\xeb\xfb\x03 \x01(\t:3\n\ncustomname\x12\x1d.google.protobuf.FieldOptions\x18\xec\xfb\x03 \x01(\t:0\n\x07jsontag\x12\x1d.google.protobuf.FieldOptions\x18\xed\xfb\x03 \x01(\t:1\n\x08moretags\x12\x1d.google.protobuf.FieldOptions\x18\xee\xfb\x03 \x01(\t:1\n\x08\x63\x61sttype\x12\x1d.google.protobuf.FieldOptions\x18\xef\xfb\x03 \x01(\t:0\n\x07\x63\x61stkey\x12\x1d.google.protobuf.FieldOptions\x18\xf0\xfb\x03 \x01(\t:2\n\tcastvalue\x12\x1d.google.protobuf.FieldOptions\x18\xf1\xfb\x03 \x01(\t:0\n\x07stdtime\x12\x1d.google.protobuf.FieldOptions\x18\xf2\xfb\x03 \x01(\x08:4\n\x0bstdduration\x12\x1d.google.protobuf.FieldOptions\x18\xf3\xfb\x03 \x01(\x08:3\n\nwktpointer\x12\x1d.google.protobuf.FieldOptions\x18\xf4\xfb\x03 \x01(\x08\x42\x45\n\x13\x63om.google.protobufB\nGoGoProtosZ\"github.com/gogo/protobuf/gogoproto') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, - "opentelemetry.exporter.prometheus_remote_write.gen.gogoproto.gogo_pb2", - globals(), -) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.exporter.prometheus_remote_write.gen.gogoproto.gogo_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( - goproto_enum_prefix - ) - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( - goproto_enum_stringer - ) - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( - enum_stringer - ) - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( - enum_customname - ) - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( - enumdecl - ) - google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension( - enumvalue_customname - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - goproto_getters_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - goproto_enum_prefix_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - goproto_stringer_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - verbose_equal_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - face_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - gostring_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - populate_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - stringer_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - onlyone_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - equal_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - description_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - testgen_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - benchgen_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - marshaler_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - unmarshaler_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - stable_marshaler_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - sizer_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - goproto_enum_stringer_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - enum_stringer_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - unsafe_marshaler_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - unsafe_unmarshaler_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - goproto_extensions_map_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - goproto_unrecognized_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - gogoproto_import - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - protosizer_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - compare_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - typedecl_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - enumdecl_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - goproto_registration - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - messagename_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - goproto_sizecache_all - ) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( - goproto_unkeyed_all - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - goproto_getters - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - goproto_stringer - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - verbose_equal - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - face - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - gostring - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - populate - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - stringer - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - onlyone - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - equal - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - description - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - testgen - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - benchgen - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - marshaler - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - unmarshaler - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - stable_marshaler - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - sizer - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - unsafe_marshaler - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - unsafe_unmarshaler - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - goproto_extensions_map - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - goproto_unrecognized - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - protosizer - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - compare - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - typedecl - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - messagename - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - goproto_sizecache - ) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( - goproto_unkeyed - ) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( - nullable - ) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( - embed - ) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( - customtype - ) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( - customname - ) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( - jsontag - ) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( - moretags - ) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( - casttype - ) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( - castkey - ) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( - castvalue - ) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( - stdtime - ) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( - stdduration - ) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( - wktpointer - ) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(goproto_enum_prefix) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(goproto_enum_stringer) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enum_stringer) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enum_customname) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enumdecl) + google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension(enumvalue_customname) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_getters_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_enum_prefix_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_stringer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(verbose_equal_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(face_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(gostring_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(populate_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(stringer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(onlyone_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(equal_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(description_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(testgen_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(benchgen_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(marshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unmarshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(stable_marshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(sizer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_enum_stringer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(enum_stringer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unsafe_marshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unsafe_unmarshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_extensions_map_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_unrecognized_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(gogoproto_import) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(protosizer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(compare_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(typedecl_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(enumdecl_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_registration) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(messagename_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_sizecache_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_unkeyed_all) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_getters) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_stringer) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(verbose_equal) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(face) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(gostring) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(populate) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(stringer) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(onlyone) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(equal) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(description) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(testgen) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(benchgen) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(marshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unmarshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(stable_marshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sizer) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unsafe_marshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unsafe_unmarshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_extensions_map) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_unrecognized) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(protosizer) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(compare) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(typedecl) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(messagename) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_sizecache) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_unkeyed) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(nullable) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(embed) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(customtype) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(customname) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(jsontag) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(moretags) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(casttype) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(castkey) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(castvalue) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(stdtime) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(stdduration) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(wktpointer) - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nGoGoProtosZ"github.com/gogo/protobuf/gogoproto' + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nGoGoProtosZ\"github.com/gogo/protobuf/gogoproto' # @@protoc_insertion_point(module_scope) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py index 09d13a7a09..a274dbf204 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py @@ -6,54 +6,39 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.exporter.prometheus_remote_write.gen import ( - types_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_types__pb2, -) -from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import ( - gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2, -) +from opentelemetry.exporter.prometheus_remote_write.gen import types_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_types__pb2 +from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n?opentelemetry/exporter/prometheus_remote_write/gen/remote.proto\x12\nprometheus\x1a>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto"z\n\x0cWriteRequest\x12\x30\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeriesB\x04\xc8\xde\x1f\x00\x12\x32\n\x08metadata\x18\x03 \x03(\x0b\x32\x1a.prometheus.MetricMetadataB\x04\xc8\xde\x1f\x00J\x04\x08\x02\x10\x03"\xae\x01\n\x0bReadRequest\x12"\n\x07queries\x18\x01 \x03(\x0b\x32\x11.prometheus.Query\x12\x45\n\x17\x61\x63\x63\x65pted_response_types\x18\x02 \x03(\x0e\x32$.prometheus.ReadRequest.ResponseType"4\n\x0cResponseType\x12\x0b\n\x07SAMPLES\x10\x00\x12\x17\n\x13STREAMED_XOR_CHUNKS\x10\x01"8\n\x0cReadResponse\x12(\n\x07results\x18\x01 \x03(\x0b\x32\x17.prometheus.QueryResult"\x8f\x01\n\x05Query\x12\x1a\n\x12start_timestamp_ms\x18\x01 \x01(\x03\x12\x18\n\x10\x65nd_timestamp_ms\x18\x02 \x01(\x03\x12*\n\x08matchers\x18\x03 \x03(\x0b\x32\x18.prometheus.LabelMatcher\x12$\n\x05hints\x18\x04 \x01(\x0b\x32\x15.prometheus.ReadHints"9\n\x0bQueryResult\x12*\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeries"]\n\x13\x43hunkedReadResponse\x12\x31\n\x0e\x63hunked_series\x18\x01 \x03(\x0b\x32\x19.prometheus.ChunkedSeries\x12\x13\n\x0bquery_index\x18\x02 \x01(\x03\x42\x08Z\x06prompbb\x06proto3' -) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n?opentelemetry/exporter/prometheus_remote_write/gen/remote.proto\x12\nprometheus\x1a>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\"z\n\x0cWriteRequest\x12\x30\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeriesB\x04\xc8\xde\x1f\x00\x12\x32\n\x08metadata\x18\x03 \x03(\x0b\x32\x1a.prometheus.MetricMetadataB\x04\xc8\xde\x1f\x00J\x04\x08\x02\x10\x03\"\xae\x01\n\x0bReadRequest\x12\"\n\x07queries\x18\x01 \x03(\x0b\x32\x11.prometheus.Query\x12\x45\n\x17\x61\x63\x63\x65pted_response_types\x18\x02 \x03(\x0e\x32$.prometheus.ReadRequest.ResponseType\"4\n\x0cResponseType\x12\x0b\n\x07SAMPLES\x10\x00\x12\x17\n\x13STREAMED_XOR_CHUNKS\x10\x01\"8\n\x0cReadResponse\x12(\n\x07results\x18\x01 \x03(\x0b\x32\x17.prometheus.QueryResult\"\x8f\x01\n\x05Query\x12\x1a\n\x12start_timestamp_ms\x18\x01 \x01(\x03\x12\x18\n\x10\x65nd_timestamp_ms\x18\x02 \x01(\x03\x12*\n\x08matchers\x18\x03 \x03(\x0b\x32\x18.prometheus.LabelMatcher\x12$\n\x05hints\x18\x04 \x01(\x0b\x32\x15.prometheus.ReadHints\"9\n\x0bQueryResult\x12*\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeries\"]\n\x13\x43hunkedReadResponse\x12\x31\n\x0e\x63hunked_series\x18\x01 \x03(\x0b\x32\x19.prometheus.ChunkedSeries\x12\x13\n\x0bquery_index\x18\x02 \x01(\x03\x42\x08Z\x06prompbb\x06proto3') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, - "opentelemetry.exporter.prometheus_remote_write.gen.remote_pb2", - globals(), -) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.exporter.prometheus_remote_write.gen.remote_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b"Z\006prompb" - _WRITEREQUEST.fields_by_name["timeseries"]._options = None - _WRITEREQUEST.fields_by_name[ - "timeseries" - ]._serialized_options = b"\310\336\037\000" - _WRITEREQUEST.fields_by_name["metadata"]._options = None - _WRITEREQUEST.fields_by_name[ - "metadata" - ]._serialized_options = b"\310\336\037\000" - _WRITEREQUEST._serialized_start = 216 - _WRITEREQUEST._serialized_end = 338 - _READREQUEST._serialized_start = 341 - _READREQUEST._serialized_end = 515 - _READREQUEST_RESPONSETYPE._serialized_start = 463 - _READREQUEST_RESPONSETYPE._serialized_end = 515 - _READRESPONSE._serialized_start = 517 - _READRESPONSE._serialized_end = 573 - _QUERY._serialized_start = 576 - _QUERY._serialized_end = 719 - _QUERYRESULT._serialized_start = 721 - _QUERYRESULT._serialized_end = 778 - _CHUNKEDREADRESPONSE._serialized_start = 780 - _CHUNKEDREADRESPONSE._serialized_end = 873 + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'Z\006prompb' + _WRITEREQUEST.fields_by_name['timeseries']._options = None + _WRITEREQUEST.fields_by_name['timeseries']._serialized_options = b'\310\336\037\000' + _WRITEREQUEST.fields_by_name['metadata']._options = None + _WRITEREQUEST.fields_by_name['metadata']._serialized_options = b'\310\336\037\000' + _WRITEREQUEST._serialized_start=216 + _WRITEREQUEST._serialized_end=338 + _READREQUEST._serialized_start=341 + _READREQUEST._serialized_end=515 + _READREQUEST_RESPONSETYPE._serialized_start=463 + _READREQUEST_RESPONSETYPE._serialized_end=515 + _READRESPONSE._serialized_start=517 + _READRESPONSE._serialized_end=573 + _QUERY._serialized_start=576 + _QUERY._serialized_end=719 + _QUERYRESULT._serialized_start=721 + _QUERYRESULT._serialized_end=778 + _CHUNKEDREADRESPONSE._serialized_start=780 + _CHUNKEDREADRESPONSE._serialized_end=873 # @@protoc_insertion_point(module_scope) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py index a58e0194ee..d519e03423 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py @@ -6,81 +6,60 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import ( - gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2, -) +from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x12\nprometheus\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto"\xf8\x01\n\x0eMetricMetadata\x12\x33\n\x04type\x18\x01 \x01(\x0e\x32%.prometheus.MetricMetadata.MetricType\x12\x1a\n\x12metric_family_name\x18\x02 \x01(\t\x12\x0c\n\x04help\x18\x04 \x01(\t\x12\x0c\n\x04unit\x18\x05 \x01(\t"y\n\nMetricType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\t\n\x05GAUGE\x10\x02\x12\r\n\tHISTOGRAM\x10\x03\x12\x12\n\x0eGAUGEHISTOGRAM\x10\x04\x12\x0b\n\x07SUMMARY\x10\x05\x12\x08\n\x04INFO\x10\x06\x12\x0c\n\x08STATESET\x10\x07"*\n\x06Sample\x12\r\n\x05value\x18\x01 \x01(\x01\x12\x11\n\ttimestamp\x18\x02 \x01(\x03"U\n\x08\x45xemplar\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\r\n\x05value\x18\x02 \x01(\x01\x12\x11\n\ttimestamp\x18\x03 \x01(\x03"\x8f\x01\n\nTimeSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12)\n\x07samples\x18\x02 \x03(\x0b\x32\x12.prometheus.SampleB\x04\xc8\xde\x1f\x00\x12-\n\texemplars\x18\x03 \x03(\x0b\x32\x14.prometheus.ExemplarB\x04\xc8\xde\x1f\x00"$\n\x05Label\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t"1\n\x06Labels\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00"\x82\x01\n\x0cLabelMatcher\x12+\n\x04type\x18\x01 \x01(\x0e\x32\x1d.prometheus.LabelMatcher.Type\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t"(\n\x04Type\x12\x06\n\x02\x45Q\x10\x00\x12\x07\n\x03NEQ\x10\x01\x12\x06\n\x02RE\x10\x02\x12\x07\n\x03NRE\x10\x03"|\n\tReadHints\x12\x0f\n\x07step_ms\x18\x01 \x01(\x03\x12\x0c\n\x04\x66unc\x18\x02 \x01(\t\x12\x10\n\x08start_ms\x18\x03 \x01(\x03\x12\x0e\n\x06\x65nd_ms\x18\x04 \x01(\x03\x12\x10\n\x08grouping\x18\x05 \x03(\t\x12\n\n\x02\x62y\x18\x06 \x01(\x08\x12\x10\n\x08range_ms\x18\x07 \x01(\x03"\x8b\x01\n\x05\x43hunk\x12\x13\n\x0bmin_time_ms\x18\x01 \x01(\x03\x12\x13\n\x0bmax_time_ms\x18\x02 \x01(\x03\x12(\n\x04type\x18\x03 \x01(\x0e\x32\x1a.prometheus.Chunk.Encoding\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c" \n\x08\x45ncoding\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03XOR\x10\x01"a\n\rChunkedSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\'\n\x06\x63hunks\x18\x02 \x03(\x0b\x32\x11.prometheus.ChunkB\x04\xc8\xde\x1f\x00\x42\x08Z\x06prompbb\x06proto3' -) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x12\nprometheus\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\"\xf8\x01\n\x0eMetricMetadata\x12\x33\n\x04type\x18\x01 \x01(\x0e\x32%.prometheus.MetricMetadata.MetricType\x12\x1a\n\x12metric_family_name\x18\x02 \x01(\t\x12\x0c\n\x04help\x18\x04 \x01(\t\x12\x0c\n\x04unit\x18\x05 \x01(\t\"y\n\nMetricType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\t\n\x05GAUGE\x10\x02\x12\r\n\tHISTOGRAM\x10\x03\x12\x12\n\x0eGAUGEHISTOGRAM\x10\x04\x12\x0b\n\x07SUMMARY\x10\x05\x12\x08\n\x04INFO\x10\x06\x12\x0c\n\x08STATESET\x10\x07\"*\n\x06Sample\x12\r\n\x05value\x18\x01 \x01(\x01\x12\x11\n\ttimestamp\x18\x02 \x01(\x03\"U\n\x08\x45xemplar\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\r\n\x05value\x18\x02 \x01(\x01\x12\x11\n\ttimestamp\x18\x03 \x01(\x03\"\x8f\x01\n\nTimeSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12)\n\x07samples\x18\x02 \x03(\x0b\x32\x12.prometheus.SampleB\x04\xc8\xde\x1f\x00\x12-\n\texemplars\x18\x03 \x03(\x0b\x32\x14.prometheus.ExemplarB\x04\xc8\xde\x1f\x00\"$\n\x05Label\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"1\n\x06Labels\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\"\x82\x01\n\x0cLabelMatcher\x12+\n\x04type\x18\x01 \x01(\x0e\x32\x1d.prometheus.LabelMatcher.Type\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\"(\n\x04Type\x12\x06\n\x02\x45Q\x10\x00\x12\x07\n\x03NEQ\x10\x01\x12\x06\n\x02RE\x10\x02\x12\x07\n\x03NRE\x10\x03\"|\n\tReadHints\x12\x0f\n\x07step_ms\x18\x01 \x01(\x03\x12\x0c\n\x04\x66unc\x18\x02 \x01(\t\x12\x10\n\x08start_ms\x18\x03 \x01(\x03\x12\x0e\n\x06\x65nd_ms\x18\x04 \x01(\x03\x12\x10\n\x08grouping\x18\x05 \x03(\t\x12\n\n\x02\x62y\x18\x06 \x01(\x08\x12\x10\n\x08range_ms\x18\x07 \x01(\x03\"\x8b\x01\n\x05\x43hunk\x12\x13\n\x0bmin_time_ms\x18\x01 \x01(\x03\x12\x13\n\x0bmax_time_ms\x18\x02 \x01(\x03\x12(\n\x04type\x18\x03 \x01(\x0e\x32\x1a.prometheus.Chunk.Encoding\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\" \n\x08\x45ncoding\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03XOR\x10\x01\"a\n\rChunkedSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\'\n\x06\x63hunks\x18\x02 \x03(\x0b\x32\x11.prometheus.ChunkB\x04\xc8\xde\x1f\x00\x42\x08Z\x06prompbb\x06proto3') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, - "opentelemetry.exporter.prometheus_remote_write.gen.types_pb2", - globals(), -) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.exporter.prometheus_remote_write.gen.types_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b"Z\006prompb" - _EXEMPLAR.fields_by_name["labels"]._options = None - _EXEMPLAR.fields_by_name[ - "labels" - ]._serialized_options = b"\310\336\037\000" - _TIMESERIES.fields_by_name["labels"]._options = None - _TIMESERIES.fields_by_name[ - "labels" - ]._serialized_options = b"\310\336\037\000" - _TIMESERIES.fields_by_name["samples"]._options = None - _TIMESERIES.fields_by_name[ - "samples" - ]._serialized_options = b"\310\336\037\000" - _TIMESERIES.fields_by_name["exemplars"]._options = None - _TIMESERIES.fields_by_name[ - "exemplars" - ]._serialized_options = b"\310\336\037\000" - _LABELS.fields_by_name["labels"]._options = None - _LABELS.fields_by_name["labels"]._serialized_options = b"\310\336\037\000" - _CHUNKEDSERIES.fields_by_name["labels"]._options = None - _CHUNKEDSERIES.fields_by_name[ - "labels" - ]._serialized_options = b"\310\336\037\000" - _CHUNKEDSERIES.fields_by_name["chunks"]._options = None - _CHUNKEDSERIES.fields_by_name[ - "chunks" - ]._serialized_options = b"\310\336\037\000" - _METRICMETADATA._serialized_start = 152 - _METRICMETADATA._serialized_end = 400 - _METRICMETADATA_METRICTYPE._serialized_start = 279 - _METRICMETADATA_METRICTYPE._serialized_end = 400 - _SAMPLE._serialized_start = 402 - _SAMPLE._serialized_end = 444 - _EXEMPLAR._serialized_start = 446 - _EXEMPLAR._serialized_end = 531 - _TIMESERIES._serialized_start = 534 - _TIMESERIES._serialized_end = 677 - _LABEL._serialized_start = 679 - _LABEL._serialized_end = 715 - _LABELS._serialized_start = 717 - _LABELS._serialized_end = 766 - _LABELMATCHER._serialized_start = 769 - _LABELMATCHER._serialized_end = 899 - _LABELMATCHER_TYPE._serialized_start = 859 - _LABELMATCHER_TYPE._serialized_end = 899 - _READHINTS._serialized_start = 901 - _READHINTS._serialized_end = 1025 - _CHUNK._serialized_start = 1028 - _CHUNK._serialized_end = 1167 - _CHUNK_ENCODING._serialized_start = 1135 - _CHUNK_ENCODING._serialized_end = 1167 - _CHUNKEDSERIES._serialized_start = 1169 - _CHUNKEDSERIES._serialized_end = 1266 + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'Z\006prompb' + _EXEMPLAR.fields_by_name['labels']._options = None + _EXEMPLAR.fields_by_name['labels']._serialized_options = b'\310\336\037\000' + _TIMESERIES.fields_by_name['labels']._options = None + _TIMESERIES.fields_by_name['labels']._serialized_options = b'\310\336\037\000' + _TIMESERIES.fields_by_name['samples']._options = None + _TIMESERIES.fields_by_name['samples']._serialized_options = b'\310\336\037\000' + _TIMESERIES.fields_by_name['exemplars']._options = None + _TIMESERIES.fields_by_name['exemplars']._serialized_options = b'\310\336\037\000' + _LABELS.fields_by_name['labels']._options = None + _LABELS.fields_by_name['labels']._serialized_options = b'\310\336\037\000' + _CHUNKEDSERIES.fields_by_name['labels']._options = None + _CHUNKEDSERIES.fields_by_name['labels']._serialized_options = b'\310\336\037\000' + _CHUNKEDSERIES.fields_by_name['chunks']._options = None + _CHUNKEDSERIES.fields_by_name['chunks']._serialized_options = b'\310\336\037\000' + _METRICMETADATA._serialized_start=152 + _METRICMETADATA._serialized_end=400 + _METRICMETADATA_METRICTYPE._serialized_start=279 + _METRICMETADATA_METRICTYPE._serialized_end=400 + _SAMPLE._serialized_start=402 + _SAMPLE._serialized_end=444 + _EXEMPLAR._serialized_start=446 + _EXEMPLAR._serialized_end=531 + _TIMESERIES._serialized_start=534 + _TIMESERIES._serialized_end=677 + _LABEL._serialized_start=679 + _LABEL._serialized_end=715 + _LABELS._serialized_start=717 + _LABELS._serialized_end=766 + _LABELMATCHER._serialized_start=769 + _LABELMATCHER._serialized_end=899 + _LABELMATCHER_TYPE._serialized_start=859 + _LABELMATCHER_TYPE._serialized_end=899 + _READHINTS._serialized_start=901 + _READHINTS._serialized_end=1025 + _CHUNK._serialized_start=1028 + _CHUNK._serialized_end=1167 + _CHUNK_ENCODING._serialized_start=1135 + _CHUNK_ENCODING._serialized_end=1167 + _CHUNKEDSERIES._serialized_start=1169 + _CHUNKEDSERIES._serialized_end=1266 # @@protoc_insertion_point(module_scope) From 15a03b4a569a59e97a6233a5e30ba840182db0d8 Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Wed, 12 Oct 2022 10:17:57 -0400 Subject: [PATCH 10/16] Fix the shutdown method --- .../opentelemetry/exporter/prometheus_remote_write/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py index 811fe90ff6..25167880dd 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py @@ -389,5 +389,5 @@ def _send_message( def force_flush(self, timeout_millis: float = 10_000) -> bool: return True - def shutdown(self) -> None: + def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None: pass From b8e249d0299a88a0aa9a26f2019ce41858c3bd24 Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Tue, 18 Oct 2022 09:23:39 -0400 Subject: [PATCH 11/16] Remove extra proto files & update regex --- .../gen/gogoproto/gogo.proto | 144 ------------------ .../prometheus_remote_write/gen/remote.proto | 86 ----------- .../prometheus_remote_write/gen/types.proto | 119 --------------- .../prometheus_remote_write/__init__.py | 26 +++- .../test_prometheus_remote_write_exporter.py | 14 +- 5 files changed, 27 insertions(+), 362 deletions(-) delete mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto delete mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/remote.proto delete mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/types.proto diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto deleted file mode 100644 index b80c85653f..0000000000 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto +++ /dev/null @@ -1,144 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2013, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -syntax = "proto2"; -package gogoproto; - -import "google/protobuf/descriptor.proto"; - -option java_package = "com.google.protobuf"; -option java_outer_classname = "GoGoProtos"; -option go_package = "github.com/gogo/protobuf/gogoproto"; - -extend google.protobuf.EnumOptions { - optional bool goproto_enum_prefix = 62001; - optional bool goproto_enum_stringer = 62021; - optional bool enum_stringer = 62022; - optional string enum_customname = 62023; - optional bool enumdecl = 62024; -} - -extend google.protobuf.EnumValueOptions { - optional string enumvalue_customname = 66001; -} - -extend google.protobuf.FileOptions { - optional bool goproto_getters_all = 63001; - optional bool goproto_enum_prefix_all = 63002; - optional bool goproto_stringer_all = 63003; - optional bool verbose_equal_all = 63004; - optional bool face_all = 63005; - optional bool gostring_all = 63006; - optional bool populate_all = 63007; - optional bool stringer_all = 63008; - optional bool onlyone_all = 63009; - - optional bool equal_all = 63013; - optional bool description_all = 63014; - optional bool testgen_all = 63015; - optional bool benchgen_all = 63016; - optional bool marshaler_all = 63017; - optional bool unmarshaler_all = 63018; - optional bool stable_marshaler_all = 63019; - - optional bool sizer_all = 63020; - - optional bool goproto_enum_stringer_all = 63021; - optional bool enum_stringer_all = 63022; - - optional bool unsafe_marshaler_all = 63023; - optional bool unsafe_unmarshaler_all = 63024; - - optional bool goproto_extensions_map_all = 63025; - optional bool goproto_unrecognized_all = 63026; - optional bool gogoproto_import = 63027; - optional bool protosizer_all = 63028; - optional bool compare_all = 63029; - optional bool typedecl_all = 63030; - optional bool enumdecl_all = 63031; - - optional bool goproto_registration = 63032; - optional bool messagename_all = 63033; - - optional bool goproto_sizecache_all = 63034; - optional bool goproto_unkeyed_all = 63035; -} - -extend google.protobuf.MessageOptions { - optional bool goproto_getters = 64001; - optional bool goproto_stringer = 64003; - optional bool verbose_equal = 64004; - optional bool face = 64005; - optional bool gostring = 64006; - optional bool populate = 64007; - optional bool stringer = 67008; - optional bool onlyone = 64009; - - optional bool equal = 64013; - optional bool description = 64014; - optional bool testgen = 64015; - optional bool benchgen = 64016; - optional bool marshaler = 64017; - optional bool unmarshaler = 64018; - optional bool stable_marshaler = 64019; - - optional bool sizer = 64020; - - optional bool unsafe_marshaler = 64023; - optional bool unsafe_unmarshaler = 64024; - - optional bool goproto_extensions_map = 64025; - optional bool goproto_unrecognized = 64026; - - optional bool protosizer = 64028; - optional bool compare = 64029; - - optional bool typedecl = 64030; - - optional bool messagename = 64033; - - optional bool goproto_sizecache = 64034; - optional bool goproto_unkeyed = 64035; -} - -extend google.protobuf.FieldOptions { - optional bool nullable = 65001; - optional bool embed = 65002; - optional string customtype = 65003; - optional string customname = 65004; - optional string jsontag = 65005; - optional string moretags = 65006; - optional string casttype = 65007; - optional string castkey = 65008; - optional string castvalue = 65009; - - optional bool stdtime = 65010; - optional bool stdduration = 65011; - optional bool wktpointer = 65012; - -} diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/remote.proto b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/remote.proto deleted file mode 100644 index 51bce1ed55..0000000000 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/remote.proto +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2016 Prometheus Team -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; -package prometheus; - -option go_package = "prompb"; - -import "opentelemetry/exporter/prometheus_remote_write/gen/types.proto"; -import "opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto"; - -message WriteRequest { - repeated prometheus.TimeSeries timeseries = 1 [(gogoproto.nullable) = false]; - // Cortex uses this field to determine the source of the write request. - // We reserve it to avoid any compatibility issues. - reserved 2; - repeated prometheus.MetricMetadata metadata = 3 [(gogoproto.nullable) = false]; -} - -// ReadRequest represents a remote read request. -message ReadRequest { - repeated Query queries = 1; - - enum ResponseType { - // Server will return a single ReadResponse message with matched series that includes list of raw samples. - // It's recommended to use streamed response types instead. - // - // Response headers: - // Content-Type: "application/x-protobuf" - // Content-Encoding: "snappy" - SAMPLES = 0; - // Server will stream a delimited ChunkedReadResponse message that contains XOR encoded chunks for a single series. - // Each message is following varint size and fixed size bigendian uint32 for CRC32 Castagnoli checksum. - // - // Response headers: - // Content-Type: "application/x-streamed-protobuf; proto=prometheus.ChunkedReadResponse" - // Content-Encoding: "" - STREAMED_XOR_CHUNKS = 1; - } - - // accepted_response_types allows negotiating the content type of the response. - // - // Response types are taken from the list in the FIFO order. If no response type in `accepted_response_types` is - // implemented by server, error is returned. - // For request that do not contain `accepted_response_types` field the SAMPLES response type will be used. - repeated ResponseType accepted_response_types = 2; -} - -// ReadResponse is a response when response_type equals SAMPLES. -message ReadResponse { - // In same order as the request's queries. - repeated QueryResult results = 1; -} - -message Query { - int64 start_timestamp_ms = 1; - int64 end_timestamp_ms = 2; - repeated prometheus.LabelMatcher matchers = 3; - prometheus.ReadHints hints = 4; -} - -message QueryResult { - // Samples within a time series must be ordered by time. - repeated prometheus.TimeSeries timeseries = 1; -} - -// ChunkedReadResponse is a response when response_type equals STREAMED_XOR_CHUNKS. -// We strictly stream full series after series, optionally split by time. This means that a single frame can contain -// partition of the single series, but once a new series is started to be streamed it means that no more chunks will -// be sent for previous one. Series are returned sorted in the same way TSDB block are internally. -message ChunkedReadResponse { - repeated prometheus.ChunkedSeries chunked_series = 1; - - // query_index represents an index of the query from ReadRequest.queries these chunks relates to. - int64 query_index = 2; -} diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/types.proto b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/types.proto deleted file mode 100644 index 176f807a71..0000000000 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/opentelemetry/exporter/prometheus_remote_write/gen/types.proto +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2017 Prometheus Team -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; -package prometheus; - -option go_package = "prompb"; - -import "opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto"; - -message MetricMetadata { - enum MetricType { - UNKNOWN = 0; - COUNTER = 1; - GAUGE = 2; - HISTOGRAM = 3; - GAUGEHISTOGRAM = 4; - SUMMARY = 5; - INFO = 6; - STATESET = 7; - } - - // Represents the metric type, these match the set from Prometheus. - // Refer to model/textparse/interface.go for details. - MetricType type = 1; - string metric_family_name = 2; - string help = 4; - string unit = 5; -} - -message Sample { - double value = 1; - // timestamp is in ms format, see model/timestamp/timestamp.go for - // conversion from time.Time to Prometheus timestamp. - int64 timestamp = 2; -} - -message Exemplar { - // Optional, can be empty. - repeated Label labels = 1 [(gogoproto.nullable) = false]; - double value = 2; - // timestamp is in ms format, see model/timestamp/timestamp.go for - // conversion from time.Time to Prometheus timestamp. - int64 timestamp = 3; -} - -// TimeSeries represents samples and labels for a single time series. -message TimeSeries { - // For a timeseries to be valid, and for the samples and exemplars - // to be ingested by the remote system properly, the labels field is required. - repeated Label labels = 1 [(gogoproto.nullable) = false]; - repeated Sample samples = 2 [(gogoproto.nullable) = false]; - repeated Exemplar exemplars = 3 [(gogoproto.nullable) = false]; -} - -message Label { - string name = 1; - string value = 2; -} - -message Labels { - repeated Label labels = 1 [(gogoproto.nullable) = false]; -} - -// Matcher specifies a rule, which can match or set of labels or not. -message LabelMatcher { - enum Type { - EQ = 0; - NEQ = 1; - RE = 2; - NRE = 3; - } - Type type = 1; - string name = 2; - string value = 3; -} - -message ReadHints { - int64 step_ms = 1; // Query step size in milliseconds. - string func = 2; // String representation of surrounding function or aggregation. - int64 start_ms = 3; // Start time in milliseconds. - int64 end_ms = 4; // End time in milliseconds. - repeated string grouping = 5; // List of label names used in aggregation. - bool by = 6; // Indicate whether it is without or by. - int64 range_ms = 7; // Range vector selector range in milliseconds. -} - -// Chunk represents a TSDB chunk. -// Time range [min, max] is inclusive. -message Chunk { - int64 min_time_ms = 1; - int64 max_time_ms = 2; - - // We require this to match chunkenc.Encoding. - enum Encoding { - UNKNOWN = 0; - XOR = 1; - } - Encoding type = 3; - bytes data = 4; -} - -// ChunkedSeries represents single, encoded time series. -message ChunkedSeries { - // Labels should be sorted. - repeated Label labels = 1 [(gogoproto.nullable) = false]; - // Chunks will be in start time order and may overlap. - repeated Chunk chunks = 2 [(gogoproto.nullable) = false]; -} diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py index 25167880dd..68f2c37a6e 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py @@ -50,8 +50,9 @@ logger = logging.getLogger(__name__) -PROMETHEUS_NAME_REGEX = re.compile(r"[^\w:]") -PROMETHEUS_LABEL_REGEX = re.compile(r"[^\w]") +PROMETHEUS_NAME_REGEX = re.compile(r"^\d|[^\w:]") +PROMETHEUS_LABEL_REGEX = re.compile(r"^\d|[^\w]") +UNDERSCORE_REGEX = re.compile(r"_+") class PrometheusRemoteWriteMetricsExporter(MetricExporter): @@ -266,17 +267,28 @@ def _sample(self, value: int, timestamp: int) -> Sample: def _label(self, name: str, value: str) -> Label: label = Label() - label.name = PROMETHEUS_LABEL_REGEX.sub("_", name) + label.name = self._sanitize_string(name,"label") label.value = value return label - def _sanitize_name(self, name): + def _sanitize_string(self, string: str, type_: str) -> str: # I Think Prometheus requires names to NOT start with a number this # would not catch that, but do cover the other cases. The naming rules # don't explicit say this, but the supplied regex implies it. # Got a little weird trying to do substitution with it, but can be # fixed if we allow numeric beginnings to metric names - return PROMETHEUS_NAME_REGEX.sub("_", name) + if type_ == "name": + sanitized = PROMETHEUS_NAME_REGEX.sub("_", string) + elif type_ == "label": + sanitized = PROMETHEUS_LABEL_REGEX.sub("_", string) + else: + raise TypeError(f"Unsupported string type: {type_}") + + # Remove consecutive underscores + # TODO: Unfortunately this clobbbers __name__ + #sanitized = UNDERSCORE_REGEX.sub("_",sanitized) + + return sanitized def _parse_histogram_data_point(self, data_point, name): @@ -292,7 +304,7 @@ def handle_bucket(value, bound=None, name_override=None): # Metric Level attributes + the bucket boundary attribute + name ts_attrs = base_attrs.copy() ts_attrs.append( - ("__name__", self._sanitize_name(name_override or name)) + ("__name__", self._sanitize_string(name_override or name,"name")) ) if bound: ts_attrs.append(("le", str(bound))) @@ -322,7 +334,7 @@ def handle_bucket(value, bound=None, name_override=None): def _parse_data_point(self, data_point, name=None): attrs = tuple(data_point.attributes.items()) + ( - ("__name__", self._sanitize_name(name)), + ("__name__", self._sanitize_string(name,"name")), ) sample = (data_point.value, (data_point.time_unix_nano // 1_000_000)) return attrs, sample diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py index 1f61d30e43..c3150396fc 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py @@ -18,8 +18,6 @@ import pytest from opentelemetry.exporter.prometheus_remote_write import ( - PROMETHEUS_LABEL_REGEX, - PROMETHEUS_NAME_REGEX, PrometheusRemoteWriteMetricsExporter, ) from opentelemetry.exporter.prometheus_remote_write.gen.types_pb2 import ( @@ -45,11 +43,15 @@ (":abc", ":abc"), ("abc.name.hi", "abc_name_hi"), ("service.name...", "service_name___"), + ("4hellowor:ld5∂©∑","_hellowor:ld5___"), ], ) -def test_name_regex(name, result, prom_rw): - assert prom_rw._sanitize_name(name) == result +def test_regex(name, result, prom_rw): + assert prom_rw._sanitize_string(name,"name") == result +def test_regex_invalid(prom_rw): + with pytest.raises(TypeError): + prom_rw("foo_bar","A random type") def test_parse_data_point(prom_rw): @@ -130,12 +132,12 @@ def test_parse_metric(metric, prom_rw): # This doesn't guarantee the labels aren't mixed up, but our other # test cases already do. assert "__name__" in labels - assert PROMETHEUS_NAME_REGEX.sub("_", metric.name) in labels + assert prom_rw._sanitize_string(metric.name,"name") in labels combined_attrs = list(attributes.items()) + list( metric.data.data_points[0].attributes.items() ) for name, value in combined_attrs: - assert PROMETHEUS_LABEL_REGEX.sub("_", name) in labels + assert prom_rw._sanitize_string(name,"label") in labels assert str(value) in labels if isinstance(metric.data, Histogram): values = [ From 1cf17d777576230faba203af72c4a753c87ada2d Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Thu, 20 Oct 2022 13:21:58 -0400 Subject: [PATCH 12/16] More updates from PR review --- .../examples/sampleapp.py | 14 ++++++++++++++ .../pyproject.toml | 4 ++-- .../exporter/prometheus_remote_write/__init__.py | 14 +++++++------- .../exporter/prometheus_remote_write/version.py | 2 +- .../tests/conftest.py | 2 +- .../tests/test_prometheus_remote_write_exporter.py | 12 +++++++----- pyproject.toml | 1 + 7 files changed, 33 insertions(+), 16 deletions(-) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py index d7957bc8d6..40e217d22c 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py @@ -1,3 +1,17 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import logging import random import sys diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/pyproject.toml b/exporter/opentelemetry-exporter-prometheus-remote-write/pyproject.toml index a9db5c2e08..49ae48d397 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/pyproject.toml +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/pyproject.toml @@ -27,8 +27,8 @@ classifiers = [ dependencies = [ "protobuf ~= 4.21", "requests ~= 2.28", - "opentelemetry-api ~= 1.13.0", - "opentelemetry-sdk ~= 1.13.0", + "opentelemetry-api ~= 1.12", + "opentelemetry-sdk ~= 1.12", "python-snappy ~= 0.6", ] diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py index 68f2c37a6e..d1d42ef283 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py @@ -267,7 +267,7 @@ def _sample(self, value: int, timestamp: int) -> Sample: def _label(self, name: str, value: str) -> Label: label = Label() - label.name = self._sanitize_string(name,"label") + label.name = self._sanitize_string(name, "label") label.value = value return label @@ -286,15 +286,12 @@ def _sanitize_string(self, string: str, type_: str) -> str: # Remove consecutive underscores # TODO: Unfortunately this clobbbers __name__ - #sanitized = UNDERSCORE_REGEX.sub("_",sanitized) + # sanitized = UNDERSCORE_REGEX.sub("_",sanitized) return sanitized def _parse_histogram_data_point(self, data_point, name): - # if (len(data_point.explicit_bounds)+1) != len(data_point.bucket_counts): - # raise ValueError("Number of buckets must be 1 more than the explicit bounds!") - sample_attr_pairs = [] base_attrs = [(n, v) for n, v in data_point.attributes.items()] @@ -304,7 +301,10 @@ def handle_bucket(value, bound=None, name_override=None): # Metric Level attributes + the bucket boundary attribute + name ts_attrs = base_attrs.copy() ts_attrs.append( - ("__name__", self._sanitize_string(name_override or name,"name")) + ( + "__name__", + self._sanitize_string(name_override or name, "name"), + ) ) if bound: ts_attrs.append(("le", str(bound))) @@ -334,7 +334,7 @@ def handle_bucket(value, bound=None, name_override=None): def _parse_data_point(self, data_point, name=None): attrs = tuple(data_point.attributes.items()) + ( - ("__name__", self._sanitize_string(name,"name")), + ("__name__", self._sanitize_string(name, "name")), ) sample = (data_point.value, (data_point.time_unix_nano // 1_000_000)) return attrs, sample diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/version.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/version.py index b9536c2461..09b3473b7d 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/version.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.13.0" +__version__ = "0.34b0" diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py index aa7114306f..9e50919bd8 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py @@ -2,7 +2,7 @@ import pytest -import opentelemetry.test.metrictestutil as metric_util # import _generate_gauge, _generate_sum +import opentelemetry.test.metrictestutil as metric_util from opentelemetry.exporter.prometheus_remote_write import ( PrometheusRemoteWriteMetricsExporter, ) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py index c3150396fc..4d8c925ecf 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py @@ -43,15 +43,17 @@ (":abc", ":abc"), ("abc.name.hi", "abc_name_hi"), ("service.name...", "service_name___"), - ("4hellowor:ld5∂©∑","_hellowor:ld5___"), + ("4hellowor:ld5∂©∑", "_hellowor:ld5___"), ], ) def test_regex(name, result, prom_rw): - assert prom_rw._sanitize_string(name,"name") == result + assert prom_rw._sanitize_string(name, "name") == result + def test_regex_invalid(prom_rw): with pytest.raises(TypeError): - prom_rw("foo_bar","A random type") + prom_rw("foo_bar", "A random type") + def test_parse_data_point(prom_rw): @@ -132,12 +134,12 @@ def test_parse_metric(metric, prom_rw): # This doesn't guarantee the labels aren't mixed up, but our other # test cases already do. assert "__name__" in labels - assert prom_rw._sanitize_string(metric.name,"name") in labels + assert prom_rw._sanitize_string(metric.name, "name") in labels combined_attrs = list(attributes.items()) + list( metric.data.data_points[0].attributes.items() ) for name, value in combined_attrs: - assert prom_rw._sanitize_string(name,"label") in labels + assert prom_rw._sanitize_string(name, "label") in labels assert str(value) in labels if isinstance(metric.data, Histogram): values = [ diff --git a/pyproject.toml b/pyproject.toml index c1a64c5240..bd14c6605d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,6 +6,7 @@ exclude = ''' | \.tox | venv | build + | gen | dist ) ''' From e83d9b5d2c887d0c0f88fc46bce52ee2bd34947b Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Wed, 26 Oct 2022 09:40:52 -0400 Subject: [PATCH 13/16] Undo adding 'gen' to ignore list for black - Was causing issues with other tools. I just ran our formatting tool on the generated files to avoid issues. - add a gitignore here to ensure our mimicked import setup doesn't get committed. --- .../proto/.gitignore | 1 + .../proto/generate-proto-py.sh | 5 + .../gen/gogoproto/gogo_pb2.py | 323 +++++++++++++----- .../prometheus_remote_write/gen/remote_pb2.py | 63 ++-- .../prometheus_remote_write/gen/types_pb2.py | 111 +++--- pyproject.toml | 1 - 6 files changed, 353 insertions(+), 151 deletions(-) create mode 100644 exporter/opentelemetry-exporter-prometheus-remote-write/proto/.gitignore diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/.gitignore b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/.gitignore new file mode 100644 index 0000000000..25138d1941 --- /dev/null +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/.gitignore @@ -0,0 +1 @@ +opentelemetry diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/generate-proto-py.sh b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/generate-proto-py.sh index a444ddecce..3cde0bd1ac 100755 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/proto/generate-proto-py.sh +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/proto/generate-proto-py.sh @@ -7,6 +7,8 @@ PROTO_VERSION=v1.3.2 SRC_DIR=opentelemetry/exporter/prometheus_remote_write/gen/ DST_DIR=../src/opentelemetry/exporter/prometheus_remote_write/gen/ +#TODO: +# Check that black & protoc are installed properly echo "Creating our destination directory" mkdir -p ${SRC_DIR}/gogoproto @@ -50,3 +52,6 @@ rm -rf protobuf prometheus # Used libprotoc 3.21.1 & protoc 21.7 echo "Compiling proto files to Python" protoc -I . --python_out=../src ${SRC_DIR}/gogoproto/gogo.proto ${SRC_DIR}/remote.proto ${SRC_DIR}/types.proto + +echo "Running formatting on the generated files" +../../../scripts/eachdist.py format --path $PWD/.. diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py index 35e48b214a..d5cce2a857 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo_pb2.py @@ -6,96 +6,257 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 +from google.protobuf import ( + descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2, +) -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\x12\tgogoproto\x1a google/protobuf/descriptor.proto:;\n\x13goproto_enum_prefix\x12\x1c.google.protobuf.EnumOptions\x18\xb1\xe4\x03 \x01(\x08:=\n\x15goproto_enum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc5\xe4\x03 \x01(\x08:5\n\renum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc6\xe4\x03 \x01(\x08:7\n\x0f\x65num_customname\x12\x1c.google.protobuf.EnumOptions\x18\xc7\xe4\x03 \x01(\t:0\n\x08\x65numdecl\x12\x1c.google.protobuf.EnumOptions\x18\xc8\xe4\x03 \x01(\x08:A\n\x14\x65numvalue_customname\x12!.google.protobuf.EnumValueOptions\x18\xd1\x83\x04 \x01(\t:;\n\x13goproto_getters_all\x12\x1c.google.protobuf.FileOptions\x18\x99\xec\x03 \x01(\x08:?\n\x17goproto_enum_prefix_all\x12\x1c.google.protobuf.FileOptions\x18\x9a\xec\x03 \x01(\x08:<\n\x14goproto_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\x9b\xec\x03 \x01(\x08:9\n\x11verbose_equal_all\x12\x1c.google.protobuf.FileOptions\x18\x9c\xec\x03 \x01(\x08:0\n\x08\x66\x61\x63\x65_all\x12\x1c.google.protobuf.FileOptions\x18\x9d\xec\x03 \x01(\x08:4\n\x0cgostring_all\x12\x1c.google.protobuf.FileOptions\x18\x9e\xec\x03 \x01(\x08:4\n\x0cpopulate_all\x12\x1c.google.protobuf.FileOptions\x18\x9f\xec\x03 \x01(\x08:4\n\x0cstringer_all\x12\x1c.google.protobuf.FileOptions\x18\xa0\xec\x03 \x01(\x08:3\n\x0bonlyone_all\x12\x1c.google.protobuf.FileOptions\x18\xa1\xec\x03 \x01(\x08:1\n\tequal_all\x12\x1c.google.protobuf.FileOptions\x18\xa5\xec\x03 \x01(\x08:7\n\x0f\x64\x65scription_all\x12\x1c.google.protobuf.FileOptions\x18\xa6\xec\x03 \x01(\x08:3\n\x0btestgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa7\xec\x03 \x01(\x08:4\n\x0c\x62\x65nchgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa8\xec\x03 \x01(\x08:5\n\rmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xa9\xec\x03 \x01(\x08:7\n\x0funmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaa\xec\x03 \x01(\x08:<\n\x14stable_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xab\xec\x03 \x01(\x08:1\n\tsizer_all\x12\x1c.google.protobuf.FileOptions\x18\xac\xec\x03 \x01(\x08:A\n\x19goproto_enum_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xad\xec\x03 \x01(\x08:9\n\x11\x65num_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xae\xec\x03 \x01(\x08:<\n\x14unsafe_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaf\xec\x03 \x01(\x08:>\n\x16unsafe_unmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xb0\xec\x03 \x01(\x08:B\n\x1agoproto_extensions_map_all\x12\x1c.google.protobuf.FileOptions\x18\xb1\xec\x03 \x01(\x08:@\n\x18goproto_unrecognized_all\x12\x1c.google.protobuf.FileOptions\x18\xb2\xec\x03 \x01(\x08:8\n\x10gogoproto_import\x12\x1c.google.protobuf.FileOptions\x18\xb3\xec\x03 \x01(\x08:6\n\x0eprotosizer_all\x12\x1c.google.protobuf.FileOptions\x18\xb4\xec\x03 \x01(\x08:3\n\x0b\x63ompare_all\x12\x1c.google.protobuf.FileOptions\x18\xb5\xec\x03 \x01(\x08:4\n\x0ctypedecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb6\xec\x03 \x01(\x08:4\n\x0c\x65numdecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb7\xec\x03 \x01(\x08:<\n\x14goproto_registration\x12\x1c.google.protobuf.FileOptions\x18\xb8\xec\x03 \x01(\x08:7\n\x0fmessagename_all\x12\x1c.google.protobuf.FileOptions\x18\xb9\xec\x03 \x01(\x08:=\n\x15goproto_sizecache_all\x12\x1c.google.protobuf.FileOptions\x18\xba\xec\x03 \x01(\x08:;\n\x13goproto_unkeyed_all\x12\x1c.google.protobuf.FileOptions\x18\xbb\xec\x03 \x01(\x08::\n\x0fgoproto_getters\x12\x1f.google.protobuf.MessageOptions\x18\x81\xf4\x03 \x01(\x08:;\n\x10goproto_stringer\x12\x1f.google.protobuf.MessageOptions\x18\x83\xf4\x03 \x01(\x08:8\n\rverbose_equal\x12\x1f.google.protobuf.MessageOptions\x18\x84\xf4\x03 \x01(\x08:/\n\x04\x66\x61\x63\x65\x12\x1f.google.protobuf.MessageOptions\x18\x85\xf4\x03 \x01(\x08:3\n\x08gostring\x12\x1f.google.protobuf.MessageOptions\x18\x86\xf4\x03 \x01(\x08:3\n\x08populate\x12\x1f.google.protobuf.MessageOptions\x18\x87\xf4\x03 \x01(\x08:3\n\x08stringer\x12\x1f.google.protobuf.MessageOptions\x18\xc0\x8b\x04 \x01(\x08:2\n\x07onlyone\x12\x1f.google.protobuf.MessageOptions\x18\x89\xf4\x03 \x01(\x08:0\n\x05\x65qual\x12\x1f.google.protobuf.MessageOptions\x18\x8d\xf4\x03 \x01(\x08:6\n\x0b\x64\x65scription\x12\x1f.google.protobuf.MessageOptions\x18\x8e\xf4\x03 \x01(\x08:2\n\x07testgen\x12\x1f.google.protobuf.MessageOptions\x18\x8f\xf4\x03 \x01(\x08:3\n\x08\x62\x65nchgen\x12\x1f.google.protobuf.MessageOptions\x18\x90\xf4\x03 \x01(\x08:4\n\tmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x91\xf4\x03 \x01(\x08:6\n\x0bunmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x92\xf4\x03 \x01(\x08:;\n\x10stable_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x93\xf4\x03 \x01(\x08:0\n\x05sizer\x12\x1f.google.protobuf.MessageOptions\x18\x94\xf4\x03 \x01(\x08:;\n\x10unsafe_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x97\xf4\x03 \x01(\x08:=\n\x12unsafe_unmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x98\xf4\x03 \x01(\x08:A\n\x16goproto_extensions_map\x12\x1f.google.protobuf.MessageOptions\x18\x99\xf4\x03 \x01(\x08:?\n\x14goproto_unrecognized\x12\x1f.google.protobuf.MessageOptions\x18\x9a\xf4\x03 \x01(\x08:5\n\nprotosizer\x12\x1f.google.protobuf.MessageOptions\x18\x9c\xf4\x03 \x01(\x08:2\n\x07\x63ompare\x12\x1f.google.protobuf.MessageOptions\x18\x9d\xf4\x03 \x01(\x08:3\n\x08typedecl\x12\x1f.google.protobuf.MessageOptions\x18\x9e\xf4\x03 \x01(\x08:6\n\x0bmessagename\x12\x1f.google.protobuf.MessageOptions\x18\xa1\xf4\x03 \x01(\x08:<\n\x11goproto_sizecache\x12\x1f.google.protobuf.MessageOptions\x18\xa2\xf4\x03 \x01(\x08::\n\x0fgoproto_unkeyed\x12\x1f.google.protobuf.MessageOptions\x18\xa3\xf4\x03 \x01(\x08:1\n\x08nullable\x12\x1d.google.protobuf.FieldOptions\x18\xe9\xfb\x03 \x01(\x08:.\n\x05\x65mbed\x12\x1d.google.protobuf.FieldOptions\x18\xea\xfb\x03 \x01(\x08:3\n\ncustomtype\x12\x1d.google.protobuf.FieldOptions\x18\xeb\xfb\x03 \x01(\t:3\n\ncustomname\x12\x1d.google.protobuf.FieldOptions\x18\xec\xfb\x03 \x01(\t:0\n\x07jsontag\x12\x1d.google.protobuf.FieldOptions\x18\xed\xfb\x03 \x01(\t:1\n\x08moretags\x12\x1d.google.protobuf.FieldOptions\x18\xee\xfb\x03 \x01(\t:1\n\x08\x63\x61sttype\x12\x1d.google.protobuf.FieldOptions\x18\xef\xfb\x03 \x01(\t:0\n\x07\x63\x61stkey\x12\x1d.google.protobuf.FieldOptions\x18\xf0\xfb\x03 \x01(\t:2\n\tcastvalue\x12\x1d.google.protobuf.FieldOptions\x18\xf1\xfb\x03 \x01(\t:0\n\x07stdtime\x12\x1d.google.protobuf.FieldOptions\x18\xf2\xfb\x03 \x01(\x08:4\n\x0bstdduration\x12\x1d.google.protobuf.FieldOptions\x18\xf3\xfb\x03 \x01(\x08:3\n\nwktpointer\x12\x1d.google.protobuf.FieldOptions\x18\xf4\xfb\x03 \x01(\x08\x42\x45\n\x13\x63om.google.protobufB\nGoGoProtosZ\"github.com/gogo/protobuf/gogoproto') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\nGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\x12\tgogoproto\x1a google/protobuf/descriptor.proto:;\n\x13goproto_enum_prefix\x12\x1c.google.protobuf.EnumOptions\x18\xb1\xe4\x03 \x01(\x08:=\n\x15goproto_enum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc5\xe4\x03 \x01(\x08:5\n\renum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc6\xe4\x03 \x01(\x08:7\n\x0f\x65num_customname\x12\x1c.google.protobuf.EnumOptions\x18\xc7\xe4\x03 \x01(\t:0\n\x08\x65numdecl\x12\x1c.google.protobuf.EnumOptions\x18\xc8\xe4\x03 \x01(\x08:A\n\x14\x65numvalue_customname\x12!.google.protobuf.EnumValueOptions\x18\xd1\x83\x04 \x01(\t:;\n\x13goproto_getters_all\x12\x1c.google.protobuf.FileOptions\x18\x99\xec\x03 \x01(\x08:?\n\x17goproto_enum_prefix_all\x12\x1c.google.protobuf.FileOptions\x18\x9a\xec\x03 \x01(\x08:<\n\x14goproto_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\x9b\xec\x03 \x01(\x08:9\n\x11verbose_equal_all\x12\x1c.google.protobuf.FileOptions\x18\x9c\xec\x03 \x01(\x08:0\n\x08\x66\x61\x63\x65_all\x12\x1c.google.protobuf.FileOptions\x18\x9d\xec\x03 \x01(\x08:4\n\x0cgostring_all\x12\x1c.google.protobuf.FileOptions\x18\x9e\xec\x03 \x01(\x08:4\n\x0cpopulate_all\x12\x1c.google.protobuf.FileOptions\x18\x9f\xec\x03 \x01(\x08:4\n\x0cstringer_all\x12\x1c.google.protobuf.FileOptions\x18\xa0\xec\x03 \x01(\x08:3\n\x0bonlyone_all\x12\x1c.google.protobuf.FileOptions\x18\xa1\xec\x03 \x01(\x08:1\n\tequal_all\x12\x1c.google.protobuf.FileOptions\x18\xa5\xec\x03 \x01(\x08:7\n\x0f\x64\x65scription_all\x12\x1c.google.protobuf.FileOptions\x18\xa6\xec\x03 \x01(\x08:3\n\x0btestgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa7\xec\x03 \x01(\x08:4\n\x0c\x62\x65nchgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa8\xec\x03 \x01(\x08:5\n\rmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xa9\xec\x03 \x01(\x08:7\n\x0funmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaa\xec\x03 \x01(\x08:<\n\x14stable_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xab\xec\x03 \x01(\x08:1\n\tsizer_all\x12\x1c.google.protobuf.FileOptions\x18\xac\xec\x03 \x01(\x08:A\n\x19goproto_enum_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xad\xec\x03 \x01(\x08:9\n\x11\x65num_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xae\xec\x03 \x01(\x08:<\n\x14unsafe_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaf\xec\x03 \x01(\x08:>\n\x16unsafe_unmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xb0\xec\x03 \x01(\x08:B\n\x1agoproto_extensions_map_all\x12\x1c.google.protobuf.FileOptions\x18\xb1\xec\x03 \x01(\x08:@\n\x18goproto_unrecognized_all\x12\x1c.google.protobuf.FileOptions\x18\xb2\xec\x03 \x01(\x08:8\n\x10gogoproto_import\x12\x1c.google.protobuf.FileOptions\x18\xb3\xec\x03 \x01(\x08:6\n\x0eprotosizer_all\x12\x1c.google.protobuf.FileOptions\x18\xb4\xec\x03 \x01(\x08:3\n\x0b\x63ompare_all\x12\x1c.google.protobuf.FileOptions\x18\xb5\xec\x03 \x01(\x08:4\n\x0ctypedecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb6\xec\x03 \x01(\x08:4\n\x0c\x65numdecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb7\xec\x03 \x01(\x08:<\n\x14goproto_registration\x12\x1c.google.protobuf.FileOptions\x18\xb8\xec\x03 \x01(\x08:7\n\x0fmessagename_all\x12\x1c.google.protobuf.FileOptions\x18\xb9\xec\x03 \x01(\x08:=\n\x15goproto_sizecache_all\x12\x1c.google.protobuf.FileOptions\x18\xba\xec\x03 \x01(\x08:;\n\x13goproto_unkeyed_all\x12\x1c.google.protobuf.FileOptions\x18\xbb\xec\x03 \x01(\x08::\n\x0fgoproto_getters\x12\x1f.google.protobuf.MessageOptions\x18\x81\xf4\x03 \x01(\x08:;\n\x10goproto_stringer\x12\x1f.google.protobuf.MessageOptions\x18\x83\xf4\x03 \x01(\x08:8\n\rverbose_equal\x12\x1f.google.protobuf.MessageOptions\x18\x84\xf4\x03 \x01(\x08:/\n\x04\x66\x61\x63\x65\x12\x1f.google.protobuf.MessageOptions\x18\x85\xf4\x03 \x01(\x08:3\n\x08gostring\x12\x1f.google.protobuf.MessageOptions\x18\x86\xf4\x03 \x01(\x08:3\n\x08populate\x12\x1f.google.protobuf.MessageOptions\x18\x87\xf4\x03 \x01(\x08:3\n\x08stringer\x12\x1f.google.protobuf.MessageOptions\x18\xc0\x8b\x04 \x01(\x08:2\n\x07onlyone\x12\x1f.google.protobuf.MessageOptions\x18\x89\xf4\x03 \x01(\x08:0\n\x05\x65qual\x12\x1f.google.protobuf.MessageOptions\x18\x8d\xf4\x03 \x01(\x08:6\n\x0b\x64\x65scription\x12\x1f.google.protobuf.MessageOptions\x18\x8e\xf4\x03 \x01(\x08:2\n\x07testgen\x12\x1f.google.protobuf.MessageOptions\x18\x8f\xf4\x03 \x01(\x08:3\n\x08\x62\x65nchgen\x12\x1f.google.protobuf.MessageOptions\x18\x90\xf4\x03 \x01(\x08:4\n\tmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x91\xf4\x03 \x01(\x08:6\n\x0bunmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x92\xf4\x03 \x01(\x08:;\n\x10stable_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x93\xf4\x03 \x01(\x08:0\n\x05sizer\x12\x1f.google.protobuf.MessageOptions\x18\x94\xf4\x03 \x01(\x08:;\n\x10unsafe_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x97\xf4\x03 \x01(\x08:=\n\x12unsafe_unmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x98\xf4\x03 \x01(\x08:A\n\x16goproto_extensions_map\x12\x1f.google.protobuf.MessageOptions\x18\x99\xf4\x03 \x01(\x08:?\n\x14goproto_unrecognized\x12\x1f.google.protobuf.MessageOptions\x18\x9a\xf4\x03 \x01(\x08:5\n\nprotosizer\x12\x1f.google.protobuf.MessageOptions\x18\x9c\xf4\x03 \x01(\x08:2\n\x07\x63ompare\x12\x1f.google.protobuf.MessageOptions\x18\x9d\xf4\x03 \x01(\x08:3\n\x08typedecl\x12\x1f.google.protobuf.MessageOptions\x18\x9e\xf4\x03 \x01(\x08:6\n\x0bmessagename\x12\x1f.google.protobuf.MessageOptions\x18\xa1\xf4\x03 \x01(\x08:<\n\x11goproto_sizecache\x12\x1f.google.protobuf.MessageOptions\x18\xa2\xf4\x03 \x01(\x08::\n\x0fgoproto_unkeyed\x12\x1f.google.protobuf.MessageOptions\x18\xa3\xf4\x03 \x01(\x08:1\n\x08nullable\x12\x1d.google.protobuf.FieldOptions\x18\xe9\xfb\x03 \x01(\x08:.\n\x05\x65mbed\x12\x1d.google.protobuf.FieldOptions\x18\xea\xfb\x03 \x01(\x08:3\n\ncustomtype\x12\x1d.google.protobuf.FieldOptions\x18\xeb\xfb\x03 \x01(\t:3\n\ncustomname\x12\x1d.google.protobuf.FieldOptions\x18\xec\xfb\x03 \x01(\t:0\n\x07jsontag\x12\x1d.google.protobuf.FieldOptions\x18\xed\xfb\x03 \x01(\t:1\n\x08moretags\x12\x1d.google.protobuf.FieldOptions\x18\xee\xfb\x03 \x01(\t:1\n\x08\x63\x61sttype\x12\x1d.google.protobuf.FieldOptions\x18\xef\xfb\x03 \x01(\t:0\n\x07\x63\x61stkey\x12\x1d.google.protobuf.FieldOptions\x18\xf0\xfb\x03 \x01(\t:2\n\tcastvalue\x12\x1d.google.protobuf.FieldOptions\x18\xf1\xfb\x03 \x01(\t:0\n\x07stdtime\x12\x1d.google.protobuf.FieldOptions\x18\xf2\xfb\x03 \x01(\x08:4\n\x0bstdduration\x12\x1d.google.protobuf.FieldOptions\x18\xf3\xfb\x03 \x01(\x08:3\n\nwktpointer\x12\x1d.google.protobuf.FieldOptions\x18\xf4\xfb\x03 \x01(\x08\x42\x45\n\x13\x63om.google.protobufB\nGoGoProtosZ"github.com/gogo/protobuf/gogoproto' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.exporter.prometheus_remote_write.gen.gogoproto.gogo_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "opentelemetry.exporter.prometheus_remote_write.gen.gogoproto.gogo_pb2", + globals(), +) if _descriptor._USE_C_DESCRIPTORS == False: - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(goproto_enum_prefix) - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(goproto_enum_stringer) - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enum_stringer) - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enum_customname) - google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enumdecl) - google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension(enumvalue_customname) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_getters_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_enum_prefix_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_stringer_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(verbose_equal_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(face_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(gostring_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(populate_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(stringer_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(onlyone_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(equal_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(description_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(testgen_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(benchgen_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(marshaler_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unmarshaler_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(stable_marshaler_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(sizer_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_enum_stringer_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(enum_stringer_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unsafe_marshaler_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unsafe_unmarshaler_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_extensions_map_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_unrecognized_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(gogoproto_import) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(protosizer_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(compare_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(typedecl_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(enumdecl_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_registration) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(messagename_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_sizecache_all) - google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_unkeyed_all) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_getters) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_stringer) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(verbose_equal) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(face) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(gostring) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(populate) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(stringer) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(onlyone) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(equal) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(description) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(testgen) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(benchgen) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(marshaler) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unmarshaler) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(stable_marshaler) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sizer) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unsafe_marshaler) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unsafe_unmarshaler) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_extensions_map) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_unrecognized) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(protosizer) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(compare) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(typedecl) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(messagename) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_sizecache) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_unkeyed) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(nullable) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(embed) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(customtype) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(customname) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(jsontag) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(moretags) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(casttype) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(castkey) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(castvalue) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(stdtime) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(stdduration) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(wktpointer) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( + goproto_enum_prefix + ) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( + goproto_enum_stringer + ) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( + enum_stringer + ) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( + enum_customname + ) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension( + enumdecl + ) + google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension( + enumvalue_customname + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_getters_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_enum_prefix_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_stringer_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + verbose_equal_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + face_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + gostring_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + populate_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + stringer_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + onlyone_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + equal_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + description_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + testgen_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + benchgen_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + marshaler_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + unmarshaler_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + stable_marshaler_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + sizer_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_enum_stringer_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + enum_stringer_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + unsafe_marshaler_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + unsafe_unmarshaler_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_extensions_map_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_unrecognized_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + gogoproto_import + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + protosizer_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + compare_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + typedecl_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + enumdecl_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_registration + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + messagename_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_sizecache_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_unkeyed_all + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + goproto_getters + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + goproto_stringer + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + verbose_equal + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + face + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + gostring + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + populate + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + stringer + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + onlyone + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + equal + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + description + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + testgen + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + benchgen + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + marshaler + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + unmarshaler + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + stable_marshaler + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + sizer + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + unsafe_marshaler + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + unsafe_unmarshaler + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + goproto_extensions_map + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + goproto_unrecognized + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + protosizer + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + compare + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + typedecl + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + messagename + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + goproto_sizecache + ) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension( + goproto_unkeyed + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + nullable + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + embed + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + customtype + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + customname + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + jsontag + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + moretags + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + casttype + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + castkey + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + castvalue + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + stdtime + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + stdduration + ) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension( + wktpointer + ) - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nGoGoProtosZ\"github.com/gogo/protobuf/gogoproto' + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nGoGoProtosZ"github.com/gogo/protobuf/gogoproto' # @@protoc_insertion_point(module_scope) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py index a274dbf204..09d13a7a09 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/remote_pb2.py @@ -6,39 +6,54 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.exporter.prometheus_remote_write.gen import types_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_types__pb2 -from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2 +from opentelemetry.exporter.prometheus_remote_write.gen import ( + types_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_types__pb2, +) +from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import ( + gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2, +) -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n?opentelemetry/exporter/prometheus_remote_write/gen/remote.proto\x12\nprometheus\x1a>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\"z\n\x0cWriteRequest\x12\x30\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeriesB\x04\xc8\xde\x1f\x00\x12\x32\n\x08metadata\x18\x03 \x03(\x0b\x32\x1a.prometheus.MetricMetadataB\x04\xc8\xde\x1f\x00J\x04\x08\x02\x10\x03\"\xae\x01\n\x0bReadRequest\x12\"\n\x07queries\x18\x01 \x03(\x0b\x32\x11.prometheus.Query\x12\x45\n\x17\x61\x63\x63\x65pted_response_types\x18\x02 \x03(\x0e\x32$.prometheus.ReadRequest.ResponseType\"4\n\x0cResponseType\x12\x0b\n\x07SAMPLES\x10\x00\x12\x17\n\x13STREAMED_XOR_CHUNKS\x10\x01\"8\n\x0cReadResponse\x12(\n\x07results\x18\x01 \x03(\x0b\x32\x17.prometheus.QueryResult\"\x8f\x01\n\x05Query\x12\x1a\n\x12start_timestamp_ms\x18\x01 \x01(\x03\x12\x18\n\x10\x65nd_timestamp_ms\x18\x02 \x01(\x03\x12*\n\x08matchers\x18\x03 \x03(\x0b\x32\x18.prometheus.LabelMatcher\x12$\n\x05hints\x18\x04 \x01(\x0b\x32\x15.prometheus.ReadHints\"9\n\x0bQueryResult\x12*\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeries\"]\n\x13\x43hunkedReadResponse\x12\x31\n\x0e\x63hunked_series\x18\x01 \x03(\x0b\x32\x19.prometheus.ChunkedSeries\x12\x13\n\x0bquery_index\x18\x02 \x01(\x03\x42\x08Z\x06prompbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n?opentelemetry/exporter/prometheus_remote_write/gen/remote.proto\x12\nprometheus\x1a>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto"z\n\x0cWriteRequest\x12\x30\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeriesB\x04\xc8\xde\x1f\x00\x12\x32\n\x08metadata\x18\x03 \x03(\x0b\x32\x1a.prometheus.MetricMetadataB\x04\xc8\xde\x1f\x00J\x04\x08\x02\x10\x03"\xae\x01\n\x0bReadRequest\x12"\n\x07queries\x18\x01 \x03(\x0b\x32\x11.prometheus.Query\x12\x45\n\x17\x61\x63\x63\x65pted_response_types\x18\x02 \x03(\x0e\x32$.prometheus.ReadRequest.ResponseType"4\n\x0cResponseType\x12\x0b\n\x07SAMPLES\x10\x00\x12\x17\n\x13STREAMED_XOR_CHUNKS\x10\x01"8\n\x0cReadResponse\x12(\n\x07results\x18\x01 \x03(\x0b\x32\x17.prometheus.QueryResult"\x8f\x01\n\x05Query\x12\x1a\n\x12start_timestamp_ms\x18\x01 \x01(\x03\x12\x18\n\x10\x65nd_timestamp_ms\x18\x02 \x01(\x03\x12*\n\x08matchers\x18\x03 \x03(\x0b\x32\x18.prometheus.LabelMatcher\x12$\n\x05hints\x18\x04 \x01(\x0b\x32\x15.prometheus.ReadHints"9\n\x0bQueryResult\x12*\n\ntimeseries\x18\x01 \x03(\x0b\x32\x16.prometheus.TimeSeries"]\n\x13\x43hunkedReadResponse\x12\x31\n\x0e\x63hunked_series\x18\x01 \x03(\x0b\x32\x19.prometheus.ChunkedSeries\x12\x13\n\x0bquery_index\x18\x02 \x01(\x03\x42\x08Z\x06prompbb\x06proto3' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.exporter.prometheus_remote_write.gen.remote_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "opentelemetry.exporter.prometheus_remote_write.gen.remote_pb2", + globals(), +) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'Z\006prompb' - _WRITEREQUEST.fields_by_name['timeseries']._options = None - _WRITEREQUEST.fields_by_name['timeseries']._serialized_options = b'\310\336\037\000' - _WRITEREQUEST.fields_by_name['metadata']._options = None - _WRITEREQUEST.fields_by_name['metadata']._serialized_options = b'\310\336\037\000' - _WRITEREQUEST._serialized_start=216 - _WRITEREQUEST._serialized_end=338 - _READREQUEST._serialized_start=341 - _READREQUEST._serialized_end=515 - _READREQUEST_RESPONSETYPE._serialized_start=463 - _READREQUEST_RESPONSETYPE._serialized_end=515 - _READRESPONSE._serialized_start=517 - _READRESPONSE._serialized_end=573 - _QUERY._serialized_start=576 - _QUERY._serialized_end=719 - _QUERYRESULT._serialized_start=721 - _QUERYRESULT._serialized_end=778 - _CHUNKEDREADRESPONSE._serialized_start=780 - _CHUNKEDREADRESPONSE._serialized_end=873 + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"Z\006prompb" + _WRITEREQUEST.fields_by_name["timeseries"]._options = None + _WRITEREQUEST.fields_by_name[ + "timeseries" + ]._serialized_options = b"\310\336\037\000" + _WRITEREQUEST.fields_by_name["metadata"]._options = None + _WRITEREQUEST.fields_by_name[ + "metadata" + ]._serialized_options = b"\310\336\037\000" + _WRITEREQUEST._serialized_start = 216 + _WRITEREQUEST._serialized_end = 338 + _READREQUEST._serialized_start = 341 + _READREQUEST._serialized_end = 515 + _READREQUEST_RESPONSETYPE._serialized_start = 463 + _READREQUEST_RESPONSETYPE._serialized_end = 515 + _READRESPONSE._serialized_start = 517 + _READRESPONSE._serialized_end = 573 + _QUERY._serialized_start = 576 + _QUERY._serialized_end = 719 + _QUERYRESULT._serialized_start = 721 + _QUERYRESULT._serialized_end = 778 + _CHUNKEDREADRESPONSE._serialized_start = 780 + _CHUNKEDREADRESPONSE._serialized_end = 873 # @@protoc_insertion_point(module_scope) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py index d519e03423..a58e0194ee 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/gen/types_pb2.py @@ -6,60 +6,81 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2 +from opentelemetry.exporter.prometheus_remote_write.gen.gogoproto import ( + gogo_pb2 as opentelemetry_dot_exporter_dot_prometheus__remote__write_dot_gen_dot_gogoproto_dot_gogo__pb2, +) -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x12\nprometheus\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto\"\xf8\x01\n\x0eMetricMetadata\x12\x33\n\x04type\x18\x01 \x01(\x0e\x32%.prometheus.MetricMetadata.MetricType\x12\x1a\n\x12metric_family_name\x18\x02 \x01(\t\x12\x0c\n\x04help\x18\x04 \x01(\t\x12\x0c\n\x04unit\x18\x05 \x01(\t\"y\n\nMetricType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\t\n\x05GAUGE\x10\x02\x12\r\n\tHISTOGRAM\x10\x03\x12\x12\n\x0eGAUGEHISTOGRAM\x10\x04\x12\x0b\n\x07SUMMARY\x10\x05\x12\x08\n\x04INFO\x10\x06\x12\x0c\n\x08STATESET\x10\x07\"*\n\x06Sample\x12\r\n\x05value\x18\x01 \x01(\x01\x12\x11\n\ttimestamp\x18\x02 \x01(\x03\"U\n\x08\x45xemplar\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\r\n\x05value\x18\x02 \x01(\x01\x12\x11\n\ttimestamp\x18\x03 \x01(\x03\"\x8f\x01\n\nTimeSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12)\n\x07samples\x18\x02 \x03(\x0b\x32\x12.prometheus.SampleB\x04\xc8\xde\x1f\x00\x12-\n\texemplars\x18\x03 \x03(\x0b\x32\x14.prometheus.ExemplarB\x04\xc8\xde\x1f\x00\"$\n\x05Label\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"1\n\x06Labels\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\"\x82\x01\n\x0cLabelMatcher\x12+\n\x04type\x18\x01 \x01(\x0e\x32\x1d.prometheus.LabelMatcher.Type\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\"(\n\x04Type\x12\x06\n\x02\x45Q\x10\x00\x12\x07\n\x03NEQ\x10\x01\x12\x06\n\x02RE\x10\x02\x12\x07\n\x03NRE\x10\x03\"|\n\tReadHints\x12\x0f\n\x07step_ms\x18\x01 \x01(\x03\x12\x0c\n\x04\x66unc\x18\x02 \x01(\t\x12\x10\n\x08start_ms\x18\x03 \x01(\x03\x12\x0e\n\x06\x65nd_ms\x18\x04 \x01(\x03\x12\x10\n\x08grouping\x18\x05 \x03(\t\x12\n\n\x02\x62y\x18\x06 \x01(\x08\x12\x10\n\x08range_ms\x18\x07 \x01(\x03\"\x8b\x01\n\x05\x43hunk\x12\x13\n\x0bmin_time_ms\x18\x01 \x01(\x03\x12\x13\n\x0bmax_time_ms\x18\x02 \x01(\x03\x12(\n\x04type\x18\x03 \x01(\x0e\x32\x1a.prometheus.Chunk.Encoding\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\" \n\x08\x45ncoding\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03XOR\x10\x01\"a\n\rChunkedSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\'\n\x06\x63hunks\x18\x02 \x03(\x0b\x32\x11.prometheus.ChunkB\x04\xc8\xde\x1f\x00\x42\x08Z\x06prompbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n>opentelemetry/exporter/prometheus_remote_write/gen/types.proto\x12\nprometheus\x1aGopentelemetry/exporter/prometheus_remote_write/gen/gogoproto/gogo.proto"\xf8\x01\n\x0eMetricMetadata\x12\x33\n\x04type\x18\x01 \x01(\x0e\x32%.prometheus.MetricMetadata.MetricType\x12\x1a\n\x12metric_family_name\x18\x02 \x01(\t\x12\x0c\n\x04help\x18\x04 \x01(\t\x12\x0c\n\x04unit\x18\x05 \x01(\t"y\n\nMetricType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\t\n\x05GAUGE\x10\x02\x12\r\n\tHISTOGRAM\x10\x03\x12\x12\n\x0eGAUGEHISTOGRAM\x10\x04\x12\x0b\n\x07SUMMARY\x10\x05\x12\x08\n\x04INFO\x10\x06\x12\x0c\n\x08STATESET\x10\x07"*\n\x06Sample\x12\r\n\x05value\x18\x01 \x01(\x01\x12\x11\n\ttimestamp\x18\x02 \x01(\x03"U\n\x08\x45xemplar\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\r\n\x05value\x18\x02 \x01(\x01\x12\x11\n\ttimestamp\x18\x03 \x01(\x03"\x8f\x01\n\nTimeSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12)\n\x07samples\x18\x02 \x03(\x0b\x32\x12.prometheus.SampleB\x04\xc8\xde\x1f\x00\x12-\n\texemplars\x18\x03 \x03(\x0b\x32\x14.prometheus.ExemplarB\x04\xc8\xde\x1f\x00"$\n\x05Label\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t"1\n\x06Labels\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00"\x82\x01\n\x0cLabelMatcher\x12+\n\x04type\x18\x01 \x01(\x0e\x32\x1d.prometheus.LabelMatcher.Type\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t"(\n\x04Type\x12\x06\n\x02\x45Q\x10\x00\x12\x07\n\x03NEQ\x10\x01\x12\x06\n\x02RE\x10\x02\x12\x07\n\x03NRE\x10\x03"|\n\tReadHints\x12\x0f\n\x07step_ms\x18\x01 \x01(\x03\x12\x0c\n\x04\x66unc\x18\x02 \x01(\t\x12\x10\n\x08start_ms\x18\x03 \x01(\x03\x12\x0e\n\x06\x65nd_ms\x18\x04 \x01(\x03\x12\x10\n\x08grouping\x18\x05 \x03(\t\x12\n\n\x02\x62y\x18\x06 \x01(\x08\x12\x10\n\x08range_ms\x18\x07 \x01(\x03"\x8b\x01\n\x05\x43hunk\x12\x13\n\x0bmin_time_ms\x18\x01 \x01(\x03\x12\x13\n\x0bmax_time_ms\x18\x02 \x01(\x03\x12(\n\x04type\x18\x03 \x01(\x0e\x32\x1a.prometheus.Chunk.Encoding\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c" \n\x08\x45ncoding\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03XOR\x10\x01"a\n\rChunkedSeries\x12\'\n\x06labels\x18\x01 \x03(\x0b\x32\x11.prometheus.LabelB\x04\xc8\xde\x1f\x00\x12\'\n\x06\x63hunks\x18\x02 \x03(\x0b\x32\x11.prometheus.ChunkB\x04\xc8\xde\x1f\x00\x42\x08Z\x06prompbb\x06proto3' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.exporter.prometheus_remote_write.gen.types_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "opentelemetry.exporter.prometheus_remote_write.gen.types_pb2", + globals(), +) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'Z\006prompb' - _EXEMPLAR.fields_by_name['labels']._options = None - _EXEMPLAR.fields_by_name['labels']._serialized_options = b'\310\336\037\000' - _TIMESERIES.fields_by_name['labels']._options = None - _TIMESERIES.fields_by_name['labels']._serialized_options = b'\310\336\037\000' - _TIMESERIES.fields_by_name['samples']._options = None - _TIMESERIES.fields_by_name['samples']._serialized_options = b'\310\336\037\000' - _TIMESERIES.fields_by_name['exemplars']._options = None - _TIMESERIES.fields_by_name['exemplars']._serialized_options = b'\310\336\037\000' - _LABELS.fields_by_name['labels']._options = None - _LABELS.fields_by_name['labels']._serialized_options = b'\310\336\037\000' - _CHUNKEDSERIES.fields_by_name['labels']._options = None - _CHUNKEDSERIES.fields_by_name['labels']._serialized_options = b'\310\336\037\000' - _CHUNKEDSERIES.fields_by_name['chunks']._options = None - _CHUNKEDSERIES.fields_by_name['chunks']._serialized_options = b'\310\336\037\000' - _METRICMETADATA._serialized_start=152 - _METRICMETADATA._serialized_end=400 - _METRICMETADATA_METRICTYPE._serialized_start=279 - _METRICMETADATA_METRICTYPE._serialized_end=400 - _SAMPLE._serialized_start=402 - _SAMPLE._serialized_end=444 - _EXEMPLAR._serialized_start=446 - _EXEMPLAR._serialized_end=531 - _TIMESERIES._serialized_start=534 - _TIMESERIES._serialized_end=677 - _LABEL._serialized_start=679 - _LABEL._serialized_end=715 - _LABELS._serialized_start=717 - _LABELS._serialized_end=766 - _LABELMATCHER._serialized_start=769 - _LABELMATCHER._serialized_end=899 - _LABELMATCHER_TYPE._serialized_start=859 - _LABELMATCHER_TYPE._serialized_end=899 - _READHINTS._serialized_start=901 - _READHINTS._serialized_end=1025 - _CHUNK._serialized_start=1028 - _CHUNK._serialized_end=1167 - _CHUNK_ENCODING._serialized_start=1135 - _CHUNK_ENCODING._serialized_end=1167 - _CHUNKEDSERIES._serialized_start=1169 - _CHUNKEDSERIES._serialized_end=1266 + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"Z\006prompb" + _EXEMPLAR.fields_by_name["labels"]._options = None + _EXEMPLAR.fields_by_name[ + "labels" + ]._serialized_options = b"\310\336\037\000" + _TIMESERIES.fields_by_name["labels"]._options = None + _TIMESERIES.fields_by_name[ + "labels" + ]._serialized_options = b"\310\336\037\000" + _TIMESERIES.fields_by_name["samples"]._options = None + _TIMESERIES.fields_by_name[ + "samples" + ]._serialized_options = b"\310\336\037\000" + _TIMESERIES.fields_by_name["exemplars"]._options = None + _TIMESERIES.fields_by_name[ + "exemplars" + ]._serialized_options = b"\310\336\037\000" + _LABELS.fields_by_name["labels"]._options = None + _LABELS.fields_by_name["labels"]._serialized_options = b"\310\336\037\000" + _CHUNKEDSERIES.fields_by_name["labels"]._options = None + _CHUNKEDSERIES.fields_by_name[ + "labels" + ]._serialized_options = b"\310\336\037\000" + _CHUNKEDSERIES.fields_by_name["chunks"]._options = None + _CHUNKEDSERIES.fields_by_name[ + "chunks" + ]._serialized_options = b"\310\336\037\000" + _METRICMETADATA._serialized_start = 152 + _METRICMETADATA._serialized_end = 400 + _METRICMETADATA_METRICTYPE._serialized_start = 279 + _METRICMETADATA_METRICTYPE._serialized_end = 400 + _SAMPLE._serialized_start = 402 + _SAMPLE._serialized_end = 444 + _EXEMPLAR._serialized_start = 446 + _EXEMPLAR._serialized_end = 531 + _TIMESERIES._serialized_start = 534 + _TIMESERIES._serialized_end = 677 + _LABEL._serialized_start = 679 + _LABEL._serialized_end = 715 + _LABELS._serialized_start = 717 + _LABELS._serialized_end = 766 + _LABELMATCHER._serialized_start = 769 + _LABELMATCHER._serialized_end = 899 + _LABELMATCHER_TYPE._serialized_start = 859 + _LABELMATCHER_TYPE._serialized_end = 899 + _READHINTS._serialized_start = 901 + _READHINTS._serialized_end = 1025 + _CHUNK._serialized_start = 1028 + _CHUNK._serialized_end = 1167 + _CHUNK_ENCODING._serialized_start = 1135 + _CHUNK_ENCODING._serialized_end = 1167 + _CHUNKEDSERIES._serialized_start = 1169 + _CHUNKEDSERIES._serialized_end = 1266 # @@protoc_insertion_point(module_scope) diff --git a/pyproject.toml b/pyproject.toml index bd14c6605d..c1a64c5240 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,6 @@ exclude = ''' | \.tox | venv | build - | gen | dist ) ''' From 54ac259677379b6f079b32646083b1f9690ed1d7 Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Fri, 28 Oct 2022 10:19:25 -0400 Subject: [PATCH 14/16] Fixes from pylint output - Cannot fix the syntax failures on 'examples/*' directory tho... --- .../examples/cortex-config.yml | 1 + .../prometheus_remote_write/__init__.py | 27 ++++++++++++------- .../tests/conftest.py | 7 +++-- .../test_prometheus_remote_write_exporter.py | 13 ++++----- 4 files changed, 29 insertions(+), 19 deletions(-) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/cortex-config.yml b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/cortex-config.yml index 37bd6473d6..e3451b94c2 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/cortex-config.yml +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/examples/cortex-config.yml @@ -7,6 +7,7 @@ # Disable the requirement that every request to Cortex has a # X-Scope-OrgID header. `fake` will be substituted in instead. +# pylint: skip-file auth_enabled: false server: diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py index d1d42ef283..0adfcb6d33 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/src/opentelemetry/exporter/prometheus_remote_write/__init__.py @@ -21,10 +21,10 @@ import requests import snappy -from opentelemetry.exporter.prometheus_remote_write.gen.remote_pb2 import ( +from opentelemetry.exporter.prometheus_remote_write.gen.remote_pb2 import ( # pylint: disable=no-name-in-module WriteRequest, ) -from opentelemetry.exporter.prometheus_remote_write.gen.types_pb2 import ( +from opentelemetry.exporter.prometheus_remote_write.gen.types_pb2 import ( # pylint: disable=no-name-in-module Label, Sample, TimeSeries, @@ -124,7 +124,9 @@ def basic_auth(self, basic_auth: Dict): raise ValueError( "basic_auth cannot contain password and password_file" ) - with open(basic_auth["password_file"]) as file: + with open( # pylint: disable=unspecified-encoding + basic_auth["password_file"] + ) as file: basic_auth["password"] = file.readline().strip() elif "password" not in basic_auth: raise ValueError("password required in basic_auth") @@ -183,6 +185,7 @@ def export( self, metrics_data: MetricsData, timeout_millis: float = 10_000, + **kwargs, ) -> MetricExportResult: if not metrics_data: return MetricExportResult.SUCCESS @@ -245,9 +248,13 @@ def _parse_metric( for attrs, sample in dp_result: sample_sets[attrs].append(sample) else: - logger.warn("Unsupported Metric Type: %s", type(metric.data)) + logger.warning("Unsupported Metric Type: %s", type(metric.data)) return [] + return self._convert_to_timeseries(sample_sets, resource_labels) + def _convert_to_timeseries( + self, sample_sets: Sequence[tuple], resource_labels: Sequence + ) -> Sequence[TimeSeries]: timeseries = [] for labels, samples in sample_sets.items(): ts = TimeSeries() @@ -259,7 +266,8 @@ def _parse_metric( timeseries.append(ts) return timeseries - def _sample(self, value: int, timestamp: int) -> Sample: + @staticmethod + def _sample(value: int, timestamp: int) -> Sample: sample = Sample() sample.value = value sample.timestamp = timestamp @@ -271,7 +279,8 @@ def _label(self, name: str, value: str) -> Label: label.value = value return label - def _sanitize_string(self, string: str, type_: str) -> str: + @staticmethod + def _sanitize_string(string: str, type_: str) -> str: # I Think Prometheus requires names to NOT start with a number this # would not catch that, but do cover the other cases. The naming rules # don't explicit say this, but the supplied regex implies it. @@ -294,7 +303,7 @@ def _parse_histogram_data_point(self, data_point, name): sample_attr_pairs = [] - base_attrs = [(n, v) for n, v in data_point.attributes.items()] + base_attrs = list(data_point.attributes.items()) timestamp = data_point.time_unix_nano // 1_000_000 def handle_bucket(value, bound=None, name_override=None): @@ -339,8 +348,8 @@ def _parse_data_point(self, data_point, name=None): sample = (data_point.value, (data_point.time_unix_nano // 1_000_000)) return attrs, sample - # pylint: disable=no-member,no-self-use - def _build_message(self, timeseries: Sequence[TimeSeries]) -> bytes: + @staticmethod + def _build_message(timeseries: Sequence[TimeSeries]) -> bytes: write_request = WriteRequest() write_request.timeseries.extend(timeseries) serialized_message = write_request.SerializeToString() diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py index 9e50919bd8..259de7b7a2 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/conftest.py @@ -27,17 +27,20 @@ def metric(request): type_ = request.param else: type_ = random.choice(["gauge", "sum"]) + if type_ == "gauge": return metric_util._generate_gauge( "test.gauge", random.randint(0, 100) ) - elif type_ == "sum": + if type_ == "sum": return metric_util._generate_sum( "test.sum", random.randint(0, 9_999_999_999) ) - elif type_ == "histogram": + if type_ == "histogram": return _generate_histogram("test_histogram") + raise ValueError(f"Unsupported metric type '{type_}'.") + def _generate_histogram(name): dp = HistogramDataPoint( diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py index 4d8c925ecf..4579baad68 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/tests/test_prometheus_remote_write_exporter.py @@ -20,7 +20,7 @@ from opentelemetry.exporter.prometheus_remote_write import ( PrometheusRemoteWriteMetricsExporter, ) -from opentelemetry.exporter.prometheus_remote_write.gen.types_pb2 import ( +from opentelemetry.exporter.prometheus_remote_write.gen.types_pb2 import ( # pylint: disable=E0611 TimeSeries, ) from opentelemetry.sdk.metrics.export import ( @@ -95,13 +95,11 @@ def test_parse_histogram_dp(prom_rw): assert (dp.bucket_counts[pos], timestamp) == label_sample_pairs[pos][1] # Last two are the sum & total count - pos += 1 - assert ("__name__", f"{name}_sum") in label_sample_pairs[pos][0] - assert (dp.sum, timestamp) == label_sample_pairs[pos][1] + assert ("__name__", f"{name}_sum") in label_sample_pairs[-2][0] + assert (dp.sum, timestamp) == label_sample_pairs[-2][1] - pos += 1 - assert ("__name__", f"{name}_count") in label_sample_pairs[pos][0] - assert (dp.count, timestamp) == label_sample_pairs[pos][1] + assert ("__name__", f"{name}_count") in label_sample_pairs[-1][0] + assert (dp.count, timestamp) == label_sample_pairs[-1][1] @pytest.mark.parametrize( @@ -253,7 +251,6 @@ def test_invalid_tls_config_key_only_param(self): # Ensures export is successful with valid export_records and config @patch("requests.post") def test_valid_export(mock_post, prom_rw, metric): - metric = metric mock_post.return_value.configure_mock(**{"status_code": 200}) # Assumed a "None" for Scope or Resource aren't valid, so build them here From e0f25d2f73710b0e55779105b427557ed9f030ff Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Fri, 28 Oct 2022 12:07:06 -0400 Subject: [PATCH 15/16] Move to `examples` dir to `example` - avoids the pylint issue. This is probably better anyway as the example is focused on setting up an environment to test exporting, rather than providing multiple examples. --- .../{examples => example}/Dockerfile | 0 .../{examples => example}/README.md | 0 .../{examples => example}/cortex-config.yml | 0 .../{examples => example}/docker-compose.yml | 0 .../{examples => example}/requirements.txt | 0 .../{examples => example}/sampleapp.py | 0 6 files changed, 0 insertions(+), 0 deletions(-) rename exporter/opentelemetry-exporter-prometheus-remote-write/{examples => example}/Dockerfile (100%) rename exporter/opentelemetry-exporter-prometheus-remote-write/{examples => example}/README.md (100%) rename exporter/opentelemetry-exporter-prometheus-remote-write/{examples => example}/cortex-config.yml (100%) rename exporter/opentelemetry-exporter-prometheus-remote-write/{examples => example}/docker-compose.yml (100%) rename exporter/opentelemetry-exporter-prometheus-remote-write/{examples => example}/requirements.txt (100%) rename exporter/opentelemetry-exporter-prometheus-remote-write/{examples => example}/sampleapp.py (100%) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/Dockerfile b/exporter/opentelemetry-exporter-prometheus-remote-write/example/Dockerfile similarity index 100% rename from exporter/opentelemetry-exporter-prometheus-remote-write/examples/Dockerfile rename to exporter/opentelemetry-exporter-prometheus-remote-write/example/Dockerfile diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/README.md b/exporter/opentelemetry-exporter-prometheus-remote-write/example/README.md similarity index 100% rename from exporter/opentelemetry-exporter-prometheus-remote-write/examples/README.md rename to exporter/opentelemetry-exporter-prometheus-remote-write/example/README.md diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/cortex-config.yml b/exporter/opentelemetry-exporter-prometheus-remote-write/example/cortex-config.yml similarity index 100% rename from exporter/opentelemetry-exporter-prometheus-remote-write/examples/cortex-config.yml rename to exporter/opentelemetry-exporter-prometheus-remote-write/example/cortex-config.yml diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/docker-compose.yml b/exporter/opentelemetry-exporter-prometheus-remote-write/example/docker-compose.yml similarity index 100% rename from exporter/opentelemetry-exporter-prometheus-remote-write/examples/docker-compose.yml rename to exporter/opentelemetry-exporter-prometheus-remote-write/example/docker-compose.yml diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/requirements.txt b/exporter/opentelemetry-exporter-prometheus-remote-write/example/requirements.txt similarity index 100% rename from exporter/opentelemetry-exporter-prometheus-remote-write/examples/requirements.txt rename to exporter/opentelemetry-exporter-prometheus-remote-write/example/requirements.txt diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py b/exporter/opentelemetry-exporter-prometheus-remote-write/example/sampleapp.py similarity index 100% rename from exporter/opentelemetry-exporter-prometheus-remote-write/examples/sampleapp.py rename to exporter/opentelemetry-exporter-prometheus-remote-write/example/sampleapp.py From 0572e16fde15a8f81b06a3a72864bf2c3ad4a964 Mon Sep 17 00:00:00 2001 From: Youssef Gamal Date: Mon, 31 Oct 2022 08:51:35 -0400 Subject: [PATCH 16/16] Cleanup README --- .../README.rst | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst b/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst index b38dc8efdb..6ed5c5ebcf 100644 --- a/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst +++ b/exporter/opentelemetry-exporter-prometheus-remote-write/README.rst @@ -6,10 +6,8 @@ OpenTelemetry Prometheus Remote Write Exporter .. |pypi| image:: https://badge.fury.io/py/opentelemetry-exporter-prometheus-remote-write.svg :target: https://pypi.org/project/opentelemetry-exporter-prometheus-remote-write/ -This package contains an exporter to send `OTLP`_ metrics from the -`OpenTelemetry Python SDK`_ directly to a `Prometheus Remote Write integrated backend`_ -(such as Cortex or Thanos) without having to run an instance of the -Prometheus server. +This package contains an exporter to send metrics from the OpenTelemetry Python SDK directly to a Prometheus Remote Write integrated backend +(such as Cortex or Thanos) without having to run an instance of the Prometheus server. Installation