Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 7e8778d

Browse files
committedMay 31, 2022
add confluent kafka instrumentation
1 parent 1677ee2 commit 7e8778d

File tree

13 files changed

+326
-86
lines changed

13 files changed

+326
-86
lines changed
 

‎.github/component_owners.yml

+4
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,10 @@ components:
1111
- oxeye-nikolay
1212
- nikosokolik
1313

14+
instrumentation/opentelemetry-instrumentation-confluent-kafka:
15+
- oxeye-dorkolog
16+
- dorkolog
17+
1418
propagator/opentelemetry-propagator-aws-xray:
1519
- NathanielRN
1620

‎CHANGELOG.md

+5
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
1313
- cleanup type hints for textmap `Getter` and `Setter` classes
1414
([1106](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1106))
1515

16+
### Added
17+
- Added `opentelemetry-instrumention-confluent-kafka`
18+
([#1111](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1111))
19+
20+
1621
## [1.12.0rc1-0.31b0](https://github.com/open-telemetry/opentelemetry-python/releases/tag/v1.12.0rc1-0.31b0) - 2022-05-17
1722

1823

‎instrumentation/README.md

+1
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
| [opentelemetry-instrumentation-boto3sqs](./opentelemetry-instrumentation-boto3sqs) | boto3 ~= 1.0 |
1111
| [opentelemetry-instrumentation-botocore](./opentelemetry-instrumentation-botocore) | botocore ~= 1.0 |
1212
| [opentelemetry-instrumentation-celery](./opentelemetry-instrumentation-celery) | celery >= 4.0, < 6.0 |
13+
| [opentelemetry-instrumentation-confluent-kafka](./opentelemetry-instrumentation-confluent-kafka) | confluent-kafka ~= 1.8.2 |
1314
| [opentelemetry-instrumentation-dbapi](./opentelemetry-instrumentation-dbapi) | dbapi |
1415
| [opentelemetry-instrumentation-django](./opentelemetry-instrumentation-django) | django >= 1.10 |
1516
| [opentelemetry-instrumentation-elasticsearch](./opentelemetry-instrumentation-elasticsearch) | elasticsearch >= 2.0 |
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
OpenTelemetry confluent-kafka Instrumentation
2+
===========================
3+
4+
|pypi|
5+
6+
.. |pypi| image:: https://badge.fury.io/py/opentelemetry-instrumentation-confluent-kafka.svg
7+
:target: https://pypi.org/project/opentelemetry-instrumentation-confluent-kafka/
8+
9+
This library allows tracing requests made by the confluent-kafka library.
10+
11+
Installation
12+
------------
13+
14+
::
15+
16+
pip install opentelemetry-instrumentation-confluent-kafka
17+
18+
19+
References
20+
----------
21+
22+
* `OpenTelemetry confluent-kafka/ Tracing <https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/confluent-kafka/confluent-kafka.html>`_
23+
* `OpenTelemetry Project <https://opentelemetry.io/>`_
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
# Copyright The OpenTelemetry Authors
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
#
15+
[metadata]
16+
name = opentelemetry-instrumentation-confluent-kafka
17+
description = OpenTelemetry Confluent Kafka instrumentation
18+
long_description = file: README.rst
19+
long_description_content_type = text/x-rst
20+
author = OpenTelemetry Authors
21+
author_email = cncf-opentelemetry-contributors@lists.cncf.io
22+
url = https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation/opentelemetry-instrumentation-confluent-kafka
23+
platforms = any
24+
license = Apache-2.0
25+
classifiers =
26+
Development Status :: 4 - Beta
27+
Intended Audience :: Developers
28+
License :: OSI Approved :: Apache Software License
29+
Programming Language :: Python
30+
Programming Language :: Python :: 3
31+
Programming Language :: Python :: 3.6
32+
Programming Language :: Python :: 3.7
33+
Programming Language :: Python :: 3.8
34+
Programming Language :: Python :: 3.9
35+
Programming Language :: Python :: 3.10
36+
37+
[options]
38+
python_requires = >=3.6
39+
package_dir=
40+
=src
41+
packages=find_namespace:
42+
43+
install_requires =
44+
opentelemetry-api ~= 1.3
45+
wrapt >= 1.0.0, < 2.0.0
46+
47+
[options.extras_require]
48+
test =
49+
# add any test dependencies here
50+
confluent-kafka ~= 1.8.2
51+
52+
[options.packages.find]
53+
where = src
54+
55+
[options.entry_points]
56+
opentelemetry_instrumentor =
57+
confluent_kafka = opentelemetry.instrumentation.confluent_kafka:ConfluentKafkaInstrumentor
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
# Copyright The OpenTelemetry Authors
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
16+
# DO NOT EDIT. THIS FILE WAS AUTOGENERATED FROM templates/instrumentation_setup.py.txt.
17+
# RUN `python scripts/generate_setup.py` TO REGENERATE.
18+
19+
20+
import distutils.cmd
21+
import json
22+
import os
23+
from configparser import ConfigParser
24+
25+
import setuptools
26+
27+
config = ConfigParser()
28+
config.read("setup.cfg")
29+
30+
# We provide extras_require parameter to setuptools.setup later which
31+
# overwrites the extras_require section from setup.cfg. To support extras_require
32+
# section in setup.cfg, we load it here and merge it with the extras_require param.
33+
extras_require = {}
34+
if "options.extras_require" in config:
35+
for key, value in config["options.extras_require"].items():
36+
extras_require[key] = [v for v in value.split("\n") if v.strip()]
37+
38+
BASE_DIR = os.path.dirname(__file__)
39+
PACKAGE_INFO = {}
40+
41+
VERSION_FILENAME = os.path.join(
42+
BASE_DIR,
43+
"src",
44+
"opentelemetry",
45+
"instrumentation",
46+
"confluent_kafka",
47+
"version.py",
48+
)
49+
with open(VERSION_FILENAME, encoding="utf-8") as f:
50+
exec(f.read(), PACKAGE_INFO)
51+
52+
PACKAGE_FILENAME = os.path.join(
53+
BASE_DIR,
54+
"src",
55+
"opentelemetry",
56+
"instrumentation",
57+
"confluent_kafka",
58+
"package.py",
59+
)
60+
with open(PACKAGE_FILENAME, encoding="utf-8") as f:
61+
exec(f.read(), PACKAGE_INFO)
62+
63+
# Mark any instruments/runtime dependencies as test dependencies as well.
64+
extras_require["instruments"] = PACKAGE_INFO["_instruments"]
65+
test_deps = extras_require.get("test", [])
66+
for dep in extras_require["instruments"]:
67+
test_deps.append(dep)
68+
69+
extras_require["test"] = test_deps
70+
71+
72+
class JSONMetadataCommand(distutils.cmd.Command):
73+
74+
description = (
75+
"print out package metadata as JSON. This is used by OpenTelemetry dev scripts to ",
76+
"auto-generate code in other places",
77+
)
78+
user_options = []
79+
80+
def initialize_options(self):
81+
pass
82+
83+
def finalize_options(self):
84+
pass
85+
86+
def run(self):
87+
metadata = {
88+
"name": config["metadata"]["name"],
89+
"version": PACKAGE_INFO["__version__"],
90+
"instruments": PACKAGE_INFO["_instruments"],
91+
}
92+
print(json.dumps(metadata))
93+
94+
95+
setuptools.setup(
96+
cmdclass={"meta": JSONMetadataCommand},
97+
version=PACKAGE_INFO["__version__"],
98+
extras_require=extras_require,
99+
)

‎instrumentation/opentelemetry-instrumentation-confluent-kafka/src/opentelemetry/instrumentation/confluent_kafka/__init__.py

+92-47
Original file line numberDiff line numberDiff line change
@@ -101,24 +101,30 @@ def instrument_consumer(consumer: Consumer, tracer_provider=None)
101101

102102
import confluent_kafka
103103
import wrapt
104-
from confluent_kafka import Producer, Consumer
105-
from opentelemetry import trace, propagate, context
104+
from confluent_kafka import Consumer, Producer
105+
106+
from opentelemetry import context, propagate, trace
107+
from opentelemetry.instrumentation.confluent_kafka.package import _instruments
108+
from opentelemetry.instrumentation.confluent_kafka.utils import (
109+
KafkaPropertiesExtractor,
110+
_enrich_span,
111+
_get_span_name,
112+
_kafka_getter,
113+
_kafka_setter,
114+
)
115+
from opentelemetry.instrumentation.confluent_kafka.version import __version__
106116
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
107-
from opentelemetry.semconv.trace import MessagingOperationValues
108-
from opentelemetry.trace import Tracer, Link, SpanKind
109117
from opentelemetry.instrumentation.utils import unwrap
110-
111-
from kafka_instrumentation.package import _instruments
112-
from kafka_instrumentation.utils import KafkaPropertiesExtractor, _get_span_name, \
113-
_kafka_setter, _enrich_span, _kafka_getter
114-
from kafka_instrumentation.version import __version__
118+
from opentelemetry.semconv.trace import MessagingOperationValues
119+
from opentelemetry.trace import Link, SpanKind, Tracer
115120

116121

117122
class AutoInstrumentedProducer(Producer):
118-
def __init__(self, config):
119-
super().__init__(config)
120123

121-
def produce(self, topic, value=None, *args, **kwargs):
124+
# This method is deliberately implemented in order to allow wrapt to wrap this function
125+
def produce(
126+
self, topic, value=None, *args, **kwargs
127+
): # pylint: disable=keyword-arg-before-vararg,useless-super-delegation
122128
super().produce(topic, value, *args, **kwargs)
123129

124130

@@ -127,12 +133,12 @@ def __init__(self, config):
127133
super().__init__(config)
128134
self._current_consume_span = None
129135

130-
def poll(self, timeout=-1):
136+
# This method is deliberately implemented in order to allow wrapt to wrap this function
137+
def poll(self, timeout=-1): # pylint: disable=useless-super-delegation
131138
return super().poll(timeout)
132139

133140

134141
class ProxiedProducer(Producer):
135-
136142
def __init__(self, producer: Producer, tracer: Tracer):
137143
self._producer = producer
138144
self._tracer = tracer
@@ -143,19 +149,22 @@ def flush(self, timeout=-1):
143149
def poll(self, timeout=-1):
144150
self._producer.poll(timeout)
145151

146-
def produce(self, topic, value=None, *args, **kwargs):
152+
def produce(
153+
self, topic, value=None, *args, **kwargs
154+
): # pylint: disable=keyword-arg-before-vararg
147155
new_kwargs = kwargs.copy()
148-
new_kwargs['topic'] = topic
149-
new_kwargs['value'] = value
156+
new_kwargs["topic"] = topic
157+
new_kwargs["value"] = value
150158

151-
return ConfluentKafkaInstrumentor.wrap_produce(self._producer.produce, self, self._tracer, args, new_kwargs)
159+
return ConfluentKafkaInstrumentor.wrap_produce(
160+
self._producer.produce, self, self._tracer, args, new_kwargs
161+
)
152162

153163
def original_producer(self):
154164
return self._producer
155165

156166

157167
class ProxiedConsumer(Consumer):
158-
159168
def __init__(self, consumer: Consumer, tracer: Tracer):
160169
self._consumer = consumer
161170
self._tracer = tracer
@@ -165,19 +174,29 @@ def __init__(self, consumer: Consumer, tracer: Tracer):
165174
def committed(self, partitions, timeout=-1):
166175
return self._consumer.committed(partitions, timeout)
167176

168-
def consume(self, num_messages=1, *args, **kwargs):
177+
def consume(
178+
self, num_messages=1, *args, **kwargs
179+
): # pylint: disable=keyword-arg-before-vararg
169180
return self._consumer.consume(num_messages, *args, **kwargs)
170181

171-
def get_watermark_offsets(self, partition, timeout=-1, *args, **kwargs):
172-
return self._consumer.get_watermark_offsets(partition, timeout, *args, **kwargs)
182+
def get_watermark_offsets(
183+
self, partition, timeout=-1, *args, **kwargs
184+
): # pylint: disable=keyword-arg-before-vararg
185+
return self._consumer.get_watermark_offsets(
186+
partition, timeout, *args, **kwargs
187+
)
173188

174189
def offsets_for_times(self, partitions, timeout=-1):
175190
return self._consumer.offsets_for_times(partitions, timeout)
176191

177192
def poll(self, timeout=-1):
178-
return ConfluentKafkaInstrumentor.wrap_poll(self._consumer.poll, self, self._tracer, [timeout], {})
193+
return ConfluentKafkaInstrumentor.wrap_poll(
194+
self._consumer.poll, self, self._tracer, [timeout], {}
195+
)
179196

180-
def subscribe(self, topics, on_assign=lambda *args: None, *args, **kwargs):
197+
def subscribe(
198+
self, topics, on_assign=lambda *args: None, *args, **kwargs
199+
): # pylint: disable=keyword-arg-before-vararg
181200
self._consumer.subscribe(topics, on_assign, *args, **kwargs)
182201

183202
def original_consumer(self):
@@ -189,8 +208,11 @@ class ConfluentKafkaInstrumentor(BaseInstrumentor):
189208
See `BaseInstrumentor`
190209
"""
191210

211+
# pylint: disable=attribute-defined-outside-init
192212
@staticmethod
193-
def instrument_producer(producer: Producer, tracer_provider=None) -> ProxiedProducer:
213+
def instrument_producer(
214+
producer: Producer, tracer_provider=None
215+
) -> ProxiedProducer:
194216
tracer = trace.get_tracer(
195217
__name__, __version__, tracer_provider=tracer_provider
196218
)
@@ -200,7 +222,9 @@ def instrument_producer(producer: Producer, tracer_provider=None) -> ProxiedProd
200222
return manual_producer
201223

202224
@staticmethod
203-
def instrument_consumer(consumer: Consumer, tracer_provider=None) -> ProxiedConsumer:
225+
def instrument_consumer(
226+
consumer: Consumer, tracer_provider=None
227+
) -> ProxiedConsumer:
204228
tracer = trace.get_tracer(
205229
__name__, __version__, tracer_provider=tracer_provider
206230
)
@@ -210,14 +234,16 @@ def instrument_consumer(consumer: Consumer, tracer_provider=None) -> ProxiedCons
210234
return manual_consumer
211235

212236
@staticmethod
213-
def uninstrument_producer(producer) -> Producer:
237+
def uninstrument_producer(producer: Producer) -> Producer:
214238
if isinstance(producer, ProxiedProducer):
215239
return producer.original_producer()
240+
return producer
216241

217242
@staticmethod
218-
def uninstrument_consumer(consumer) -> Consumer:
243+
def uninstrument_consumer(consumer: Consumer) -> Consumer:
219244
if isinstance(consumer, ProxiedConsumer):
220245
return consumer.original_consumer()
246+
return consumer
221247

222248
def instrumentation_dependencies(self) -> Collection[str]:
223249
return _instruments
@@ -237,16 +263,26 @@ def _instrument(self, **kwargs):
237263
self._tracer = tracer
238264

239265
def _inner_wrap_produce(func, instance, args, kwargs):
240-
return ConfluentKafkaInstrumentor.wrap_produce(func, instance, self._tracer, args, kwargs)
266+
return ConfluentKafkaInstrumentor.wrap_produce(
267+
func, instance, self._tracer, args, kwargs
268+
)
241269

242270
def _inner_wrap_poll(func, instance, args, kwargs):
243-
return ConfluentKafkaInstrumentor.wrap_poll(func, instance, self._tracer, args, kwargs)
271+
return ConfluentKafkaInstrumentor.wrap_poll(
272+
func, instance, self._tracer, args, kwargs
273+
)
244274

245-
wrapt.wrap_function_wrapper("kafka_instrumentation",
246-
"AutoInstrumentedProducer.produce", _inner_wrap_produce)
275+
wrapt.wrap_function_wrapper(
276+
AutoInstrumentedProducer,
277+
"produce",
278+
_inner_wrap_produce,
279+
)
247280

248-
wrapt.wrap_function_wrapper("kafka_instrumentation",
249-
"AutoInstrumentedConsumer.poll", _inner_wrap_poll)
281+
wrapt.wrap_function_wrapper(
282+
AutoInstrumentedConsumer,
283+
"poll",
284+
_inner_wrap_poll,
285+
)
250286

251287
def _uninstrument(self, **kwargs):
252288
confluent_kafka.Producer = self._original_kafka_producer
@@ -261,22 +297,29 @@ def wrap_produce(func, instance, tracer, args, kwargs):
261297
if not topic:
262298
topic = args[0]
263299

264-
span_name = _get_span_name("send", topic)
265-
with tracer.start_as_current_span(name=span_name, kind=trace.SpanKind.PRODUCER) as span:
266-
headers = KafkaPropertiesExtractor.extract_produce_headers(args, kwargs)
300+
span_name = _get_span_name("send", topic)
301+
with tracer.start_as_current_span(
302+
name=span_name, kind=trace.SpanKind.PRODUCER
303+
) as span:
304+
headers = KafkaPropertiesExtractor.extract_produce_headers(
305+
args, kwargs
306+
)
267307
if headers is None:
268308
headers = []
269309
kwargs["headers"] = headers
270310

271311
topic = KafkaPropertiesExtractor.extract_produce_topic(args)
272-
bootstrap_servers = KafkaPropertiesExtractor.extract_bootstrap_servers(instance)
273-
_enrich_span(span, topic, bootstrap_servers, operation=MessagingOperationValues.RECEIVE) # Replace
312+
_enrich_span(
313+
span,
314+
topic,
315+
operation=MessagingOperationValues.RECEIVE,
316+
) # Replace
274317
propagate.inject(
275318
headers,
276319
setter=_kafka_setter,
277320
)
278321
return func(*args, **kwargs)
279-
322+
280323
@staticmethod
281324
def wrap_poll(func, instance, tracer, args, kwargs):
282325
if instance._current_consume_span:
@@ -285,7 +328,9 @@ def wrap_poll(func, instance, tracer, args, kwargs):
285328
instance._current_consume_span.end()
286329
instance._current_consume_span = None
287330

288-
with tracer.start_as_current_span("recv", end_on_exit=True, kind=trace.SpanKind.CONSUMER) as span:
331+
with tracer.start_as_current_span(
332+
"recv", end_on_exit=True, kind=trace.SpanKind.CONSUMER
333+
):
289334
record = func(*args, **kwargs)
290335
if record:
291336
links = []
@@ -296,20 +341,20 @@ def wrap_poll(func, instance, tracer, args, kwargs):
296341
links.append(Link(context=item.get_span_context()))
297342

298343
instance._current_consume_span = tracer.start_span(
299-
name=f"{record.topic()} process", links=links, kind=SpanKind.CONSUMER
344+
name=f"{record.topic()} process",
345+
links=links,
346+
kind=SpanKind.CONSUMER,
300347
)
301348

302-
bootstrap_servers = KafkaPropertiesExtractor.extract_bootstrap_servers(instance)
303349
_enrich_span(
304350
instance._current_consume_span,
305351
record.topic(),
306-
bootstrap_servers,
307352
record.partition(),
308353
record.offset(),
309354
operation=MessagingOperationValues.PROCESS,
310-
311355
)
312356
instance._current_context_token = context.attach(
313-
trace.set_span_in_context(instance._current_consume_span))
357+
trace.set_span_in_context(instance._current_consume_span)
358+
)
314359

315-
return record
360+
return record

‎instrumentation/opentelemetry-instrumentation-confluent-kafka/src/opentelemetry/instrumentation/confluent_kafka/package.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -13,4 +13,4 @@
1313
# limitations under the License.
1414

1515

16-
_instruments = ("confluent-kafka ~= 1.8.2",)
16+
_instruments = ("confluent-kafka ~= 1.8.2",)

‎instrumentation/opentelemetry-instrumentation-confluent-kafka/src/opentelemetry/instrumentation/confluent_kafka/utils.py

+11-13
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,12 @@
1-
import json
2-
import trace
31
from logging import getLogger
42
from typing import List, Optional
53

6-
from opentelemetry import propagate
74
from opentelemetry.propagators import textmap
85
from opentelemetry.semconv.trace import (
9-
SpanAttributes,
10-
MessagingOperationValues,
116
MessagingDestinationKindValues,
7+
MessagingOperationValues,
8+
SpanAttributes,
129
)
13-
from opentelemetry.trace import Link, SpanKind
1410

1511
_LOG = getLogger(__name__)
1612

@@ -68,26 +64,25 @@ def set(self, carrier: textmap.CarrierT, key: str, value: str) -> None:
6864

6965

7066
_kafka_getter = KafkaContextGetter()
67+
68+
7169
def _enrich_span(
7270
span,
7371
topic,
74-
bootstrap_servers: List[str],
7572
partition: Optional[int] = None,
7673
offset: Optional[int] = None,
7774
operation: Optional[MessagingOperationValues] = None,
78-
7975
):
8076

8177
if not span.is_recording():
8278
return
8379

8480
span.set_attribute(SpanAttributes.MESSAGING_SYSTEM, "kafka")
8581
span.set_attribute(SpanAttributes.MESSAGING_DESTINATION, topic)
86-
span.set_attribute(SpanAttributes.MESSAGING_KAFKA_PARTITION, partition)
8782

88-
span.set_attribute(
89-
SpanAttributes.MESSAGING_URL, json.dumps(bootstrap_servers)
90-
)
83+
if partition:
84+
span.set_attribute(SpanAttributes.MESSAGING_KAFKA_PARTITION, partition)
85+
9186
span.set_attribute(
9287
SpanAttributes.MESSAGING_DESTINATION_KIND,
9388
MessagingDestinationKindValues.QUEUE.value,
@@ -101,7 +96,10 @@ def _enrich_span(
10196
# https://stackoverflow.com/questions/65935155/identify-and-find-specific-message-in-kafka-topic
10297
# A message within Kafka is uniquely defined by its topic name, topic partition and offset.
10398
if partition and offset and topic:
104-
span.set_attribute(SpanAttributes.MESSAGING_MESSAGE_ID, f"{topic}.{partition}.{offset}")
99+
span.set_attribute(
100+
SpanAttributes.MESSAGING_MESSAGE_ID,
101+
f"{topic}.{partition}.{offset}",
102+
)
105103

106104

107105
_kafka_setter = KafkaContextSetter()

‎instrumentation/opentelemetry-instrumentation-confluent-kafka/src/opentelemetry/instrumentation/confluent_kafka/version.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,4 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
__version__ = "0.27b0"
15+
__version__ = "0.31b0"

‎instrumentation/opentelemetry-instrumentation-confluent-kafka/tests/test_instrumentation.py

+27-24
Original file line numberDiff line numberDiff line change
@@ -13,41 +13,44 @@
1313
# limitations under the License.
1414
from unittest import TestCase
1515

16-
17-
from confluent_kafka import Producer, Consumer
18-
19-
from kafka_instrumentation import ConfluentKafkaInstrumentor, ProxiedProducer, ProxiedConsumer
16+
from confluent_kafka import Consumer, Producer
17+
from opentelemetry.instrumentation.confluent_kafka import (
18+
ConfluentKafkaInstrumentor,
19+
ProxiedConsumer,
20+
ProxiedProducer,
21+
)
2022

2123

2224
class TestConfluentKafka(TestCase):
2325
def test_instrument_api(self) -> None:
2426
instrumentation = ConfluentKafkaInstrumentor()
2527

26-
p = Producer({'bootstrap.servers': 'localhost:29092'})
27-
p = instrumentation.instrument_producer(p)
28-
29-
self.assertEqual(p.__class__, ProxiedProducer)
28+
producer = Producer({"bootstrap.servers": "localhost:29092"})
29+
producer = instrumentation.instrument_producer(producer)
3030

31-
p = instrumentation.uninstrument_producer(p)
32-
self.assertEqual(p.__class__, Producer)
31+
self.assertEqual(producer.__class__, ProxiedProducer)
3332

34-
p = Producer({'bootstrap.servers': 'localhost:29092'})
35-
p = instrumentation.instrument_producer(p)
33+
producer = instrumentation.uninstrument_producer(producer)
34+
self.assertEqual(producer.__class__, Producer)
3635

37-
self.assertEqual(p.__class__, ProxiedProducer)
36+
producer = Producer({"bootstrap.servers": "localhost:29092"})
37+
producer = instrumentation.instrument_producer(producer)
3838

39-
p = instrumentation.uninstrument_producer(p)
40-
self.assertEqual(p.__class__, Producer)
39+
self.assertEqual(producer.__class__, ProxiedProducer)
4140

42-
c = Consumer({
43-
'bootstrap.servers': 'localhost:29092',
44-
'group.id': 'mygroup',
45-
'auto.offset.reset': 'earliest'
46-
})
41+
producer = instrumentation.uninstrument_producer(producer)
42+
self.assertEqual(producer.__class__, Producer)
4743

48-
c = instrumentation.instrument_consumer(c)
49-
self.assertEqual(c.__class__, ProxiedConsumer)
44+
consumer = Consumer(
45+
{
46+
"bootstrap.servers": "localhost:29092",
47+
"group.id": "mygroup",
48+
"auto.offset.reset": "earliest",
49+
}
50+
)
5051

51-
c = instrumentation.uninstrument_consumer(c)
52-
self.assertEqual(c.__class__, Consumer)
52+
consumer = instrumentation.instrument_consumer(consumer)
53+
self.assertEqual(consumer.__class__, ProxiedConsumer)
5354

55+
consumer = instrumentation.uninstrument_consumer(consumer)
56+
self.assertEqual(consumer.__class__, Consumer)

‎opentelemetry-contrib-instrumentations/setup.cfg

+1
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ install_requires =
3737
opentelemetry-instrumentation-boto3sqs==0.31b0
3838
opentelemetry-instrumentation-botocore==0.31b0
3939
opentelemetry-instrumentation-celery==0.31b0
40+
opentelemetry-instrumentation-confluent-kafka==0.31b0
4041
opentelemetry-instrumentation-dbapi==0.31b0
4142
opentelemetry-instrumentation-django==0.31b0
4243
opentelemetry-instrumentation-elasticsearch==0.31b0

‎opentelemetry-instrumentation/src/opentelemetry/instrumentation/bootstrap_gen.py

+4
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,10 @@
4848
"library": "celery >= 4.0, < 6.0",
4949
"instrumentation": "opentelemetry-instrumentation-celery==0.31b0",
5050
},
51+
"confluent-kafka": {
52+
"library": "confluent-kafka ~= 1.8.2",
53+
"instrumentation": "opentelemetry-instrumentation-confluent-kafka==0.31b0",
54+
},
5155
"django": {
5256
"library": "django >= 1.10",
5357
"instrumentation": "opentelemetry-instrumentation-django==0.31b0",

0 commit comments

Comments
 (0)
Please sign in to comment.