diff --git a/.circleci/config.yml b/.circleci/config.yml index de13b1e4..4ce55bef 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -99,35 +99,6 @@ jobs: - *persist_to_workspace_step - *save_cache_step - opentracer: - docker: - - *test_runner - resource_class: *resource_class - steps: - - checkout - - *restore_cache_step - - run: scripts/run-tox-scenario '^py..-opentracer' - - *persist_to_workspace_step - - *save_cache_step - - integration: - docker: - - <<: *test_runner - env: - TEST_DATADOG_INTEGRATION: 1 - - image: datadog/docker-dd-agent - env: - - DD_APM_ENABLED=true - - DD_BIND_HOST=0.0.0.0 - - DD_API_KEY=invalid_key_but_this_is_fine - resource_class: *resource_class - steps: - - checkout - - *restore_cache_step - - run: scripts/run-tox-scenario '^py..-integration$' - - *persist_to_workspace_step - - *save_cache_step - futures: docker: - *test_runner @@ -150,14 +121,14 @@ jobs: - *persist_to_workspace_step - *save_cache_step - ddtracerun: + oteltracerun: docker: - *test_runner - image: redis:4.0-alpine resource_class: *resource_class steps: - checkout - - run: scripts/run-tox-scenario '^py..-ddtracerun$' + - run: scripts/run-tox-scenario '^py..-oteltracerun$' - *persist_to_workspace_step test_utils: @@ -193,17 +164,6 @@ jobs: - *persist_to_workspace_step - *save_cache_step - pylons: - docker: - - *test_runner - resource_class: *resource_class - steps: - - checkout - - *restore_cache_step - - run: scripts/run-tox-scenario '^pylons_contrib-' - - *persist_to_workspace_step - - *save_cache_step - aiohttp: docker: - *test_runner @@ -298,7 +258,7 @@ jobs: steps: - checkout - *restore_cache_step - - run: tox -e 'falcon_contrib{,_autopatch}-{py27,py34,py35,py36}-falcon{10,11,12,13,14}' --result-json /tmp/falcon.results + - run: tox -e 'falcon_contrib{,_autopatch}-{py34,py35,py36}-falcon{10,11,12,13,14}' --result-json /tmp/falcon.results - *persist_to_workspace_step - *save_cache_step @@ -309,9 +269,9 @@ jobs: - image: memcached:1.5-alpine - image: datadog/docker-dd-agent env: - - DD_APM_ENABLED=true - - DD_BIND_HOST=0.0.0.0 - - DD_API_KEY=invalid_key_but_this_is_fine + - OTEL_APM_ENABLED=true + - OTEL_BIND_HOST=0.0.0.0 + - OTEL_API_KEY=invalid_key_but_this_is_fine resource_class: *resource_class steps: - checkout @@ -413,24 +373,6 @@ jobs: - *persist_to_workspace_step - *save_cache_step - mysqldb: - docker: - - *test_runner - - image: mysql:5.7 - env: - - MYSQL_ROOT_PASSWORD=admin - - MYSQL_PASSWORD=test - - MYSQL_USER=test - - MYSQL_DATABASE=test - resource_class: *resource_class - steps: - - checkout - - *restore_cache_step - - run: tox -e 'wait' mysql - - run: scripts/run-tox-scenario '^mysqldb_contrib-.*-mysqldb' - - *persist_to_workspace_step - - *save_cache_step - pymysql: docker: - *test_runner @@ -700,7 +642,7 @@ jobs: - run: command: | mkdir -p /tmp/test-reports - tox -e 'benchmarks-{py27,py34,py35,py36,py37}' --result-json /tmp/benchmarks.results -- --benchmark-storage=file:///tmp/test-reports/ --benchmark-autosave + tox -e 'benchmarks-{py34,py35,py36,py37}' --result-json /tmp/benchmarks.results -- --benchmark-storage=file:///tmp/test-reports/ --benchmark-autosave - store_test_results: path: /tmp/test-reports - store_artifacts: @@ -860,7 +802,7 @@ workflows: - dbapi: requires: - flake8 - - ddtracerun: + - oteltracerun: requires: - flake8 - django: @@ -887,9 +829,6 @@ workflows: - httplib: requires: - flake8 - - integration: - requires: - - flake8 - internal: requires: - flake8 @@ -911,24 +850,15 @@ workflows: - mysqlconnector: requires: - flake8 - - mysqldb: - requires: - - flake8 - mysqlpython: requires: - flake8 - - opentracer: - requires: - - flake8 - psycopg: requires: - flake8 - pylibmc: requires: - flake8 - - pylons: - requires: - - flake8 - pymemcache: requires: - flake8 @@ -997,7 +927,7 @@ workflows: - celery - consul - dbapi - - ddtracerun + - oteltracerun - django - elasticsearch - falcon @@ -1006,7 +936,6 @@ workflows: - gevent - grpc - httplib - - integration - internal - jinja2 - kombu @@ -1014,12 +943,9 @@ workflows: - molten - mongoengine - mysqlconnector - - mysqldb - mysqlpython - - opentracer - psycopg - pylibmc - - pylons - pymemcache - pymongo - pymysql diff --git a/README.md b/README.md index d9ac8226..e6d73c90 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,10 @@ -# dd-trace-py +# otel-trace-py -[![CircleCI](https://circleci.com/gh/DataDog/dd-trace-py/tree/master.svg?style=svg)](https://circleci.com/gh/DataDog/dd-trace-py/tree/master) -[![Pyversions](https://img.shields.io/pypi/pyversions/ddtrace.svg?style=flat)](https://pypi.org/project/ddtrace/) -[![PypiVersions](https://img.shields.io/pypi/v/ddtrace.svg)](https://pypi.org/project/ddtrace/) -[![OpenTracing Badge](https://img.shields.io/badge/OpenTracing-enabled-blue.svg)](http://pypi.datadoghq.com/trace/docs/installation_quickstart.html#opentracing) +[![CircleCI](https://circleci.com/gh/DataDog/otel-trace-py/tree/master.svg?style=svg)](https://circleci.com/gh/DataDog/otel-trace-py/tree/master) +[![Pyversions](https://img.shields.io/pypi/pyversions/oteltrace.svg?style=flat)](https://pypi.org/project/oteltrace/) +[![PypiVersions](https://img.shields.io/pypi/v/oteltrace.svg)](https://pypi.org/project/oteltrace/) -`ddtrace` is Datadog's tracing library for Python. It is used to trace requests +`oteltrace` is OpenTelemetry's tracing library for Python. It is used to trace requests as they flow across web servers, databases and microservices so that developers have great visiblity into bottlenecks and troublesome requests. diff --git a/Rakefile b/Rakefile index ea90004a..d82c7b2a 100644 --- a/Rakefile +++ b/Rakefile @@ -36,20 +36,20 @@ namespace :pypi do end task :confirm do - ddtrace_version = get_version + oteltrace_version = get_version - if get_branch.downcase != 'tags/v#{ddtrace_version}' - print "WARNING: Expected current commit to be tagged as 'tags/v#{ddtrace_version}, instead we are on '#{get_branch}', proceed anyways [y|N]? " + if get_branch.downcase != 'tags/v#{oteltrace_version}' + print "WARNING: Expected current commit to be tagged as 'tags/v#{oteltrace_version}, instead we are on '#{get_branch}', proceed anyways [y|N]? " $stdout.flush abort if $stdin.gets.to_s.strip.downcase != 'y' end - puts "WARNING: This task will build and release new wheels to https://pypi.org/project/ddtrace/, this action cannot be undone" - print " To proceed please type the version '#{ddtrace_version}': " + puts "WARNING: This task will build and release new wheels to https://pypi.org/project/oteltrace/, this action cannot be undone" + print " To proceed please type the version '#{oteltrace_version}': " $stdout.flush - abort if $stdin.gets.to_s.strip.downcase != ddtrace_version + abort if $stdin.gets.to_s.strip.downcase != oteltrace_version end task :clean do diff --git a/ddtrace/api.py b/ddtrace/api.py deleted file mode 100644 index bca92cb1..00000000 --- a/ddtrace/api.py +++ /dev/null @@ -1,279 +0,0 @@ -# stdlib -import time -import ddtrace -from json import loads -import socket - -# project -from .encoding import get_encoder, JSONEncoder -from .compat import httplib, PYTHON_VERSION, PYTHON_INTERPRETER, get_connection_response -from .internal.logger import get_logger -from .internal.runtime import container -from .payload import Payload, PayloadFull -from .utils.deprecation import deprecated - - -log = get_logger(__name__) - - -_VERSIONS = {'v0.4': {'traces': '/v0.4/traces', - 'services': '/v0.4/services', - 'compatibility_mode': False, - 'fallback': 'v0.3'}, - 'v0.3': {'traces': '/v0.3/traces', - 'services': '/v0.3/services', - 'compatibility_mode': False, - 'fallback': 'v0.2'}, - 'v0.2': {'traces': '/v0.2/traces', - 'services': '/v0.2/services', - 'compatibility_mode': True, - 'fallback': None}} - - -class Response(object): - """ - Custom API Response object to represent a response from calling the API. - - We do this to ensure we know expected properties will exist, and so we - can call `resp.read()` and load the body once into an instance before we - close the HTTPConnection used for the request. - """ - __slots__ = ['status', 'body', 'reason', 'msg'] - - def __init__(self, status=None, body=None, reason=None, msg=None): - self.status = status - self.body = body - self.reason = reason - self.msg = msg - - @classmethod - def from_http_response(cls, resp): - """ - Build a ``Response`` from the provided ``HTTPResponse`` object. - - This function will call `.read()` to consume the body of the ``HTTPResponse`` object. - - :param resp: ``HTTPResponse`` object to build the ``Response`` from - :type resp: ``HTTPResponse`` - :rtype: ``Response`` - :returns: A new ``Response`` - """ - return cls( - status=resp.status, - body=resp.read(), - reason=getattr(resp, 'reason', None), - msg=getattr(resp, 'msg', None), - ) - - def get_json(self): - """Helper to parse the body of this request as JSON""" - try: - body = self.body - if not body: - log.debug('Empty reply from Datadog Agent, %r', self) - return - - if not isinstance(body, str) and hasattr(body, 'decode'): - body = body.decode('utf-8') - - if hasattr(body, 'startswith') and body.startswith('OK'): - # This typically happens when using a priority-sampling enabled - # library with an outdated agent. It still works, but priority sampling - # will probably send too many traces, so the next step is to upgrade agent. - log.debug('Cannot parse Datadog Agent response, please make sure your Datadog Agent is up to date') - return - - return loads(body) - except (ValueError, TypeError) as err: - log.debug('Unable to parse Datadog Agent JSON response: %s %r', err, body) - - def __repr__(self): - return '{0}(status={1!r}, body={2!r}, reason={3!r}, msg={4!r})'.format( - self.__class__.__name__, - self.status, - self.body, - self.reason, - self.msg, - ) - - -class UDSHTTPConnection(httplib.HTTPConnection): - """An HTTP connection established over a Unix Domain Socket.""" - - # It's "important" to keep the hostname and port arguments here; while there are not used by the connection - # mechanism, they are actually used as HTTP headers such as `Host`. - def __init__(self, path, https, *args, **kwargs): - if https: - httplib.HTTPSConnection.__init__(self, *args, **kwargs) - else: - httplib.HTTPConnection.__init__(self, *args, **kwargs) - self.path = path - - def connect(self): - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - sock.connect(self.path) - self.sock = sock - - -class API(object): - """ - Send data to the trace agent using the HTTP protocol and JSON format - """ - - TRACE_COUNT_HEADER = 'X-Datadog-Trace-Count' - - # Default timeout when establishing HTTP connection and sending/receiving from socket. - # This ought to be enough as the agent is local - TIMEOUT = 2 - - def __init__(self, hostname, port, uds_path=None, https=False, headers=None, encoder=None, priority_sampling=False): - """Create a new connection to the Tracer API. - - :param hostname: The hostname. - :param port: The TCP port to use. - :param uds_path: The path to use if the connection is to be established with a Unix Domain Socket. - :param headers: The headers to pass along the request. - :param encoder: The encoder to use to serialize data. - :param priority_sampling: Whether to use priority sampling. - """ - self.hostname = hostname - self.port = int(port) - self.uds_path = uds_path - self.https = https - - self._headers = headers or {} - self._version = None - - if priority_sampling: - self._set_version('v0.4', encoder=encoder) - else: - self._set_version('v0.3', encoder=encoder) - - self._headers.update({ - 'Datadog-Meta-Lang': 'python', - 'Datadog-Meta-Lang-Version': PYTHON_VERSION, - 'Datadog-Meta-Lang-Interpreter': PYTHON_INTERPRETER, - 'Datadog-Meta-Tracer-Version': ddtrace.__version__, - }) - - # Add container information if we have it - self._container_info = container.get_container_info() - if self._container_info and self._container_info.container_id: - self._headers.update({ - 'Datadog-Container-Id': self._container_info.container_id, - }) - - def __str__(self): - if self.uds_path: - return 'unix://' + self.uds_path - if self.https: - scheme = 'https://' - else: - scheme = 'http://' - return '%s%s:%s' % (scheme, self.hostname, self.port) - - def _set_version(self, version, encoder=None): - if version not in _VERSIONS: - version = 'v0.2' - if version == self._version: - return - self._version = version - self._traces = _VERSIONS[version]['traces'] - self._services = _VERSIONS[version]['services'] - self._fallback = _VERSIONS[version]['fallback'] - self._compatibility_mode = _VERSIONS[version]['compatibility_mode'] - if self._compatibility_mode: - self._encoder = JSONEncoder() - else: - self._encoder = encoder or get_encoder() - # overwrite the Content-type with the one chosen in the Encoder - self._headers.update({'Content-Type': self._encoder.content_type}) - - def _downgrade(self): - """ - Downgrades the used encoder and API level. This method must fallback to a safe - encoder and API, so that it will success despite users' configurations. This action - ensures that the compatibility mode is activated so that the downgrade will be - executed only once. - """ - self._set_version(self._fallback) - - def send_traces(self, traces): - """Send traces to the API. - - :param traces: A list of traces. - :return: The list of API HTTP responses. - """ - if not traces: - return [] - - start = time.time() - responses = [] - payload = Payload(encoder=self._encoder) - for trace in traces: - try: - payload.add_trace(trace) - except PayloadFull: - # Is payload full or is the trace too big? - # If payload is not empty, then using a new Payload might allow us to fit the trace. - # Let's flush the Payload and try to put the trace in a new empty Payload. - if not payload.empty: - responses.append(self._flush(payload)) - # Create a new payload - payload = Payload(encoder=self._encoder) - try: - # Add the trace that we were unable to add in that iteration - payload.add_trace(trace) - except PayloadFull: - # If the trace does not fit in a payload on its own, that's bad. Drop it. - log.warning('Trace %r is too big to fit in a payload, dropping it', trace) - - # Check that the Payload is not empty: - # it could be empty if the last trace was too big to fit. - if not payload.empty: - responses.append(self._flush(payload)) - - log.debug('reported %d traces in %.5fs', len(traces), time.time() - start) - - return responses - - def _flush(self, payload): - try: - response = self._put(self._traces, payload.get_payload(), payload.length) - except (httplib.HTTPException, OSError, IOError) as e: - return e - - # the API endpoint is not available so we should downgrade the connection and re-try the call - if response.status in [404, 415] and self._fallback: - log.debug("calling endpoint '%s' but received %s; downgrading API", self._traces, response.status) - self._downgrade() - return self._flush(payload) - - return response - - @deprecated(message='Sending services to the API is no longer necessary', version='1.0.0') - def send_services(self, *args, **kwargs): - return - - def _put(self, endpoint, data, count): - headers = self._headers.copy() - headers[self.TRACE_COUNT_HEADER] = str(count) - - if self.uds_path is None: - if self.https: - conn = httplib.HTTPSConnection(self.hostname, self.port, timeout=self.TIMEOUT) - else: - conn = httplib.HTTPConnection(self.hostname, self.port, timeout=self.TIMEOUT) - else: - conn = UDSHTTPConnection(self.uds_path, self.https, self.hostname, self.port, timeout=self.TIMEOUT) - - try: - conn.request('PUT', endpoint, data, headers) - - # Parse the HTTPResponse into an API.Response - # DEV: This will call `resp.read()` which must happen before the `conn.close()` below, - # if we call `.close()` then all future `.read()` calls will return `b''` - resp = get_connection_response(conn) - return Response.from_http_response(resp) - finally: - conn.close() diff --git a/ddtrace/bootstrap/sitecustomize.py b/ddtrace/bootstrap/sitecustomize.py deleted file mode 100644 index 10b39dd8..00000000 --- a/ddtrace/bootstrap/sitecustomize.py +++ /dev/null @@ -1,147 +0,0 @@ -""" -Bootstrapping code that is run when using the `ddtrace-run` Python entrypoint -Add all monkey-patching that needs to run by default here -""" - -import os -import imp -import sys -import logging - -from ddtrace.utils.formats import asbool, get_env -from ddtrace.internal.logger import get_logger -from ddtrace import constants - -logs_injection = asbool(get_env('logs', 'injection')) -DD_LOG_FORMAT = '%(asctime)s %(levelname)s [%(name)s] [%(filename)s:%(lineno)d] {}- %(message)s'.format( - '[dd.trace_id=%(dd.trace_id)s dd.span_id=%(dd.span_id)s] ' if logs_injection else '' -) - -if logs_injection: - # immediately patch logging if trace id injected - from ddtrace import patch - patch(logging=True) - -debug = os.environ.get('DATADOG_TRACE_DEBUG') - -# Set here a default logging format for basicConfig - -# DEV: Once basicConfig is called here, future calls to it cannot be used to -# change the formatter since it applies the formatter to the root handler only -# upon initializing it the first time. -# See https://github.com/python/cpython/blob/112e4afd582515fcdcc0cde5012a4866e5cfda12/Lib/logging/__init__.py#L1550 -if debug and debug.lower() == 'true': - logging.basicConfig(level=logging.DEBUG, format=DD_LOG_FORMAT) -else: - logging.basicConfig(format=DD_LOG_FORMAT) - -log = get_logger(__name__) - -EXTRA_PATCHED_MODULES = { - 'bottle': True, - 'django': True, - 'falcon': True, - 'flask': True, - 'pylons': True, - 'pyramid': True, -} - - -def update_patched_modules(): - modules_to_patch = os.environ.get('DATADOG_PATCH_MODULES') - if not modules_to_patch: - return - for patch in modules_to_patch.split(','): - if len(patch.split(':')) != 2: - log.debug('skipping malformed patch instruction') - continue - - module, should_patch = patch.split(':') - if should_patch.lower() not in ['true', 'false']: - log.debug('skipping malformed patch instruction for %s', module) - continue - - EXTRA_PATCHED_MODULES.update({module: should_patch.lower() == 'true'}) - - -def add_global_tags(tracer): - tags = {} - for tag in os.environ.get('DD_TRACE_GLOBAL_TAGS', '').split(','): - tag_name, _, tag_value = tag.partition(':') - if not tag_name or not tag_value: - log.debug('skipping malformed tracer tag') - continue - - tags[tag_name] = tag_value - tracer.set_tags(tags) - - -try: - from ddtrace import tracer - patch = True - - # Respect DATADOG_* environment variables in global tracer configuration - # TODO: these variables are deprecated; use utils method and update our documentation - # correct prefix should be DD_* - enabled = os.environ.get('DATADOG_TRACE_ENABLED') - hostname = os.environ.get('DD_AGENT_HOST', os.environ.get('DATADOG_TRACE_AGENT_HOSTNAME')) - port = os.environ.get('DATADOG_TRACE_AGENT_PORT') - priority_sampling = os.environ.get('DATADOG_PRIORITY_SAMPLING') - - opts = {} - - if enabled and enabled.lower() == 'false': - opts['enabled'] = False - patch = False - if hostname: - opts['hostname'] = hostname - if port: - opts['port'] = int(port) - if priority_sampling: - opts['priority_sampling'] = asbool(priority_sampling) - - opts['collect_metrics'] = asbool(get_env('runtime_metrics', 'enabled')) - - if opts: - tracer.configure(**opts) - - if logs_injection: - EXTRA_PATCHED_MODULES.update({'logging': True}) - - if patch: - update_patched_modules() - from ddtrace import patch_all - patch_all(**EXTRA_PATCHED_MODULES) - - if 'DATADOG_ENV' in os.environ: - tracer.set_tags({constants.ENV_KEY: os.environ['DATADOG_ENV']}) - - if 'DD_TRACE_GLOBAL_TAGS' in os.environ: - add_global_tags(tracer) - - # Ensure sitecustomize.py is properly called if available in application directories: - # * exclude `bootstrap_dir` from the search - # * find a user `sitecustomize.py` module - # * import that module via `imp` - bootstrap_dir = os.path.dirname(__file__) - path = list(sys.path) - - if bootstrap_dir in path: - path.remove(bootstrap_dir) - - try: - (f, path, description) = imp.find_module('sitecustomize', path) - except ImportError: - pass - else: - # `sitecustomize.py` found, load it - log.debug('sitecustomize from user found in: %s', path) - imp.load_module('sitecustomize', f, path, description) - - # Loading status used in tests to detect if the `sitecustomize` has been - # properly loaded without exceptions. This must be the last action in the module - # when the execution ends with a success. - loaded = True -except Exception: - loaded = False - log.warning('error configuring Datadog tracing', exc_info=True) diff --git a/ddtrace/contrib/pylons/constants.py b/ddtrace/contrib/pylons/constants.py deleted file mode 100644 index ae0fb424..00000000 --- a/ddtrace/contrib/pylons/constants.py +++ /dev/null @@ -1 +0,0 @@ -CONFIG_MIDDLEWARE = '__datadog_middleware' diff --git a/ddtrace/contrib/pyramid/constants.py b/ddtrace/contrib/pyramid/constants.py deleted file mode 100644 index 176699d7..00000000 --- a/ddtrace/contrib/pyramid/constants.py +++ /dev/null @@ -1,6 +0,0 @@ -SETTINGS_SERVICE = 'datadog_trace_service' -SETTINGS_TRACER = 'datadog_tracer' -SETTINGS_TRACE_ENABLED = 'datadog_trace_enabled' -SETTINGS_DISTRIBUTED_TRACING = 'datadog_distributed_tracing' -SETTINGS_ANALYTICS_ENABLED = 'datadog_analytics_enabled' -SETTINGS_ANALYTICS_SAMPLE_RATE = 'datadog_analytics_sample_rate' diff --git a/ddtrace/contrib/redis/tracers.py b/ddtrace/contrib/redis/tracers.py deleted file mode 100644 index 62912ce0..00000000 --- a/ddtrace/contrib/redis/tracers.py +++ /dev/null @@ -1,20 +0,0 @@ -from redis import StrictRedis - -from ...utils.deprecation import deprecated - - -DEFAULT_SERVICE = 'redis' - - -@deprecated(message='Use patching instead (see the docs).', version='1.0.0') -def get_traced_redis(ddtracer, service=DEFAULT_SERVICE, meta=None): - return _get_traced_redis(ddtracer, StrictRedis, service, meta) - - -@deprecated(message='Use patching instead (see the docs).', version='1.0.0') -def get_traced_redis_from(ddtracer, baseclass, service=DEFAULT_SERVICE, meta=None): - return _get_traced_redis(ddtracer, baseclass, service, meta) - - -def _get_traced_redis(ddtracer, baseclass, service, meta): - return baseclass diff --git a/ddtrace/contrib/tornado/constants.py b/ddtrace/contrib/tornado/constants.py deleted file mode 100644 index 7052ee3d..00000000 --- a/ddtrace/contrib/tornado/constants.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -This module defines Tornado settings that are shared between -integration modules. -""" -CONFIG_KEY = 'datadog_trace' -REQUEST_CONTEXT_KEY = 'datadog_context' -REQUEST_SPAN_KEY = '__datadog_request_span' -FUTURE_SPAN_KEY = '__datadog_future_span' -PARENT_SPAN_KEY = '__datadog_parent_span' diff --git a/ddtrace/encoding.py b/ddtrace/encoding.py deleted file mode 100644 index f650966b..00000000 --- a/ddtrace/encoding.py +++ /dev/null @@ -1,128 +0,0 @@ -import json -import struct - -from .internal.logger import get_logger - - -# Try to import msgpack, fallback to just JSON if something went wrong -# DEV: We are ok with the pure Python fallback for msgpack if the C-extension failed to install -try: - from ddtrace.vendor import msgpack - # DEV: `use_bin_type` only exists since `0.4.0`, but we vendor a more recent version - MSGPACK_PARAMS = {'use_bin_type': True} - MSGPACK_ENCODING = True -except ImportError: - # fallback to JSON - MSGPACK_PARAMS = {} - MSGPACK_ENCODING = False - -log = get_logger(__name__) - - -class Encoder(object): - """ - Encoder interface that provides the logic to encode traces and service. - """ - def __init__(self): - """ - When extending the ``Encoder`` class, ``headers`` must be set because - they're returned by the encoding methods, so that the API transport doesn't - need to know what is the right header to suggest the decoding format to the - agent - """ - self.content_type = '' - - def encode_traces(self, traces): - """ - Encodes a list of traces, expecting a list of items where each items - is a list of spans. Before dump the string in a serialized format all - traces are normalized, calling the ``to_dict()`` method. The traces - nesting is not changed. - - :param traces: A list of traces that should be serialized - """ - normalized_traces = [[span.to_dict() for span in trace] for trace in traces] - return self.encode(normalized_traces) - - def encode_trace(self, trace): - """ - Encodes a trace, expecting a list of spans. Before dump the string in a - serialized format all traces are normalized, calling the ``to_dict()`` method. - The traces nesting is not changed. - - :param trace: A list of traces that should be serialized - """ - return self.encode([span.to_dict() for span in trace]) - - def encode(self, obj): - """ - Defines the underlying format used during traces or services encoding. - This method must be implemented and should only be used by the internal functions. - """ - raise NotImplementedError - - def decode(self, data): - """ - Defines the underlying format used during traces or services encoding. - This method must be implemented and should only be used by the internal functions. - """ - raise NotImplementedError - - def join_encoded(self, objs): - """Helper used to join a list of encoded objects into an encoded list of objects""" - raise NotImplementedError - - -class JSONEncoder(Encoder): - def __init__(self): - # TODO[manu]: add instructions about how users can switch to Msgpack - log.debug('using JSON encoder; application performance may be degraded') - self.content_type = 'application/json' - - def encode(self, obj): - return json.dumps(obj) - - def decode(self, data): - return json.loads(data) - - def join_encoded(self, objs): - """Join a list of encoded objects together as a json array""" - return '[' + ','.join(objs) + ']' - - -class MsgpackEncoder(Encoder): - def __init__(self): - log.debug('using Msgpack encoder') - self.content_type = 'application/msgpack' - - def encode(self, obj): - return msgpack.packb(obj) - - def decode(self, data): - return msgpack.unpackb(data) - - def join_encoded(self, objs): - """Join a list of encoded objects together as a msgpack array""" - buf = b''.join(objs) - - # Prepend array header to buffer - # https://github.com/msgpack/msgpack-python/blob/f46523b1af7ff2d408da8500ea36a4f9f2abe915/msgpack/fallback.py#L948-L955 - count = len(objs) - if count <= 0xf: - return struct.pack('B', 0x90 + count) + buf - elif count <= 0xffff: - return struct.pack('>BH', 0xdc, count) + buf - else: - return struct.pack('>BI', 0xdd, count) + buf - - -def get_encoder(): - """ - Switching logic that choose the best encoder for the API transport. - The default behavior is to use Msgpack if we have a CPP implementation - installed, falling back to the Python built-in JSON encoder. - """ - if MSGPACK_ENCODING: - return MsgpackEncoder() - else: - return JSONEncoder() diff --git a/ddtrace/internal/README.md b/ddtrace/internal/README.md deleted file mode 100644 index 5cb38087..00000000 --- a/ddtrace/internal/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Internal -This internal module is used to define and document an internal only API for `ddtrace`. - -These modules are not intended to be used outside of `ddtrace`. - -The APIs found within `ddtrace.internal` are subject to breaking changes at any time -and do not follow the semver versioning scheme of the `ddtrace` package. diff --git a/ddtrace/opentracer/__init__.py b/ddtrace/opentracer/__init__.py deleted file mode 100644 index cf5e0412..00000000 --- a/ddtrace/opentracer/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from .tracer import Tracer -from .helpers import set_global_tracer - -__all__ = [ - 'Tracer', - 'set_global_tracer', -] diff --git a/ddtrace/opentracer/helpers.py b/ddtrace/opentracer/helpers.py deleted file mode 100644 index f088c3f7..00000000 --- a/ddtrace/opentracer/helpers.py +++ /dev/null @@ -1,16 +0,0 @@ -import opentracing -import ddtrace - -""" -Helper routines for Datadog OpenTracing. -""" - - -def set_global_tracer(tracer): - """Sets the global tracers to the given tracer.""" - - # overwrite the opentracer reference - opentracing.tracer = tracer - - # overwrite the Datadog tracer reference - ddtrace.tracer = tracer._dd_tracer diff --git a/ddtrace/opentracer/propagation/__init__.py b/ddtrace/opentracer/propagation/__init__.py deleted file mode 100644 index 28f5ad62..00000000 --- a/ddtrace/opentracer/propagation/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .http import HTTPPropagator - - -__all__ = [ - 'HTTPPropagator', -] diff --git a/ddtrace/opentracer/propagation/binary.py b/ddtrace/opentracer/propagation/binary.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ddtrace/opentracer/propagation/http.py b/ddtrace/opentracer/propagation/http.py deleted file mode 100644 index 9652c8ea..00000000 --- a/ddtrace/opentracer/propagation/http.py +++ /dev/null @@ -1,79 +0,0 @@ -from opentracing import InvalidCarrierException, SpanContextCorruptedException -from ddtrace.propagation.http import HTTPPropagator as DDHTTPPropagator - -from ...internal.logger import get_logger -from ..span_context import SpanContext -from .propagator import Propagator - - -log = get_logger(__name__) - -HTTP_BAGGAGE_PREFIX = 'ot-baggage-' -HTTP_BAGGAGE_PREFIX_LEN = len(HTTP_BAGGAGE_PREFIX) - - -class HTTPPropagator(Propagator): - """OpenTracing compatible HTTP_HEADER and TEXT_MAP format propagator. - - `HTTPPropagator` provides compatibility by using existing OpenTracing - compatible methods from the ddtracer along with new logic supporting the - outstanding OpenTracing-defined functionality. - """ - - __slots__ = ['_dd_propagator'] - - def __init__(self): - self._dd_propagator = DDHTTPPropagator() - - def inject(self, span_context, carrier): - """Inject a span context into a carrier. - - *span_context* is injected into the carrier by first using an - :class:`ddtrace.propagation.http.HTTPPropagator` to inject the ddtracer - specific fields. - - Then the baggage is injected into *carrier*. - - :param span_context: span context to inject. - - :param carrier: carrier to inject into. - """ - if not isinstance(carrier, dict): - raise InvalidCarrierException('propagator expects carrier to be a dict') - - self._dd_propagator.inject(span_context._dd_context, carrier) - - # Add the baggage - if span_context.baggage is not None: - for key in span_context.baggage: - carrier[HTTP_BAGGAGE_PREFIX + key] = span_context.baggage[key] - - def extract(self, carrier): - """Extract a span context from a carrier. - - :class:`ddtrace.propagation.http.HTTPPropagator` is used to extract - ddtracer supported fields into a `ddtrace.Context` context which is - combined with new logic to extract the baggage which is returned in an - OpenTracing compatible span context. - - :param carrier: carrier to extract from. - - :return: extracted span context. - """ - if not isinstance(carrier, dict): - raise InvalidCarrierException('propagator expects carrier to be a dict') - - ddspan_ctx = self._dd_propagator.extract(carrier) - - # if the dd propagator fails then it will return a new empty span - # context (with trace_id=None), we however want to raise an exception - # if this occurs. - if not ddspan_ctx.trace_id: - raise SpanContextCorruptedException('failed to extract span context') - - baggage = {} - for key in carrier: - if key.startswith(HTTP_BAGGAGE_PREFIX): - baggage[key[HTTP_BAGGAGE_PREFIX_LEN:]] = carrier[key] - - return SpanContext(ddcontext=ddspan_ctx, baggage=baggage) diff --git a/ddtrace/opentracer/propagation/propagator.py b/ddtrace/opentracer/propagation/propagator.py deleted file mode 100644 index b7f7cda8..00000000 --- a/ddtrace/opentracer/propagation/propagator.py +++ /dev/null @@ -1,15 +0,0 @@ -from abc import ABCMeta, abstractmethod - -# ref: https://stackoverflow.com/a/38668373 -ABC = ABCMeta('ABC', (object,), {'__slots__': ()}) - - -class Propagator(ABC): - - @abstractmethod - def inject(self, span_context, carrier): - pass - - @abstractmethod - def extract(self, carrier): - pass diff --git a/ddtrace/opentracer/propagation/text.py b/ddtrace/opentracer/propagation/text.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ddtrace/opentracer/settings.py b/ddtrace/opentracer/settings.py deleted file mode 100644 index f4a5bee3..00000000 --- a/ddtrace/opentracer/settings.py +++ /dev/null @@ -1,34 +0,0 @@ -from collections import namedtuple - - -CONFIG_KEY_NAMES = [ - 'AGENT_HOSTNAME', - 'AGENT_HTTPS', - 'AGENT_PORT', - 'DEBUG', - 'ENABLED', - 'GLOBAL_TAGS', - 'SAMPLER', - 'PRIORITY_SAMPLING', - 'SETTINGS', -] - -# Keys used for the configuration dict -ConfigKeyNames = namedtuple('ConfigKeyNames', CONFIG_KEY_NAMES) - -ConfigKeys = ConfigKeyNames( - AGENT_HOSTNAME='agent_hostname', - AGENT_HTTPS='agent_https', - AGENT_PORT='agent_port', - DEBUG='debug', - ENABLED='enabled', - GLOBAL_TAGS='global_tags', - SAMPLER='sampler', - PRIORITY_SAMPLING='priority_sampling', - SETTINGS='settings', -) - - -def config_invalid_keys(config): - """Returns a list of keys that exist in *config* and not in KEYS.""" - return [key for key in config.keys() if key not in ConfigKeys] diff --git a/ddtrace/opentracer/span.py b/ddtrace/opentracer/span.py deleted file mode 100644 index 7342c4b4..00000000 --- a/ddtrace/opentracer/span.py +++ /dev/null @@ -1,160 +0,0 @@ -import threading - -from opentracing import Span as OpenTracingSpan -from opentracing.ext import tags as OTTags -from ddtrace.span import Span as DatadogSpan -from ddtrace.ext import errors -from .tags import Tags - -from .span_context import SpanContext - - -class Span(OpenTracingSpan): - """Datadog implementation of :class:`opentracing.Span`""" - - def __init__(self, tracer, context, operation_name): - if context is not None: - context = SpanContext(ddcontext=context._dd_context, - baggage=context.baggage) - else: - context = SpanContext() - - super(Span, self).__init__(tracer, context) - - self.finished = False - self._lock = threading.Lock() - # use a datadog span - self._dd_span = DatadogSpan(tracer._dd_tracer, operation_name, - context=context._dd_context) - - def finish(self, finish_time=None): - """Finish the span. - - This calls finish on the ddspan. - - :param finish_time: specify a custom finish time with a unix timestamp - per time.time() - :type timestamp: float - """ - if self.finished: - return - - # finish the datadog span - self._dd_span.finish(finish_time) - self.finished = True - - def set_baggage_item(self, key, value): - """Sets a baggage item in the span context of this span. - - Baggage is used to propagate state between spans. - - :param key: baggage item key - :type key: str - - :param value: baggage item value - :type value: a type that can be compat.stringify()'d - - :rtype: Span - :return: itself for chaining calls - """ - new_ctx = self.context.with_baggage_item(key, value) - with self._lock: - self._context = new_ctx - return self - - def get_baggage_item(self, key): - """Gets a baggage item from the span context of this span. - - :param key: baggage item key - :type key: str - - :rtype: str - :return: the baggage value for the given key or ``None``. - """ - return self.context.get_baggage_item(key) - - def set_operation_name(self, operation_name): - """Set the operation name.""" - self._dd_span.name = operation_name - - def log_kv(self, key_values, timestamp=None): - """Add a log record to this span. - - Passes on relevant opentracing key values onto the datadog span. - - :param key_values: a dict of string keys and values of any type - :type key_values: dict - - :param timestamp: a unix timestamp per time.time() - :type timestamp: float - - :return: the span itself, for call chaining - :rtype: Span - """ - - # match opentracing defined keys to datadog functionality - # opentracing/specification/blob/1be630515dafd4d2a468d083300900f89f28e24d/semantic_conventions.md#log-fields-table - for key, val in key_values.items(): - if key == 'event' and val == 'error': - # TODO: not sure if it's actually necessary to set the error manually - self._dd_span.error = 1 - self.set_tag('error', 1) - elif key == 'error' or key == 'error.object': - self.set_tag(errors.ERROR_TYPE, val) - elif key == 'message': - self.set_tag(errors.ERROR_MSG, val) - elif key == 'stack': - self.set_tag(errors.ERROR_STACK, val) - else: - pass - - return self - - def set_tag(self, key, value): - """Set a tag on the span. - - This sets the tag on the underlying datadog span. - """ - if key == Tags.SPAN_TYPE: - self._dd_span.span_type = value - elif key == Tags.SERVICE_NAME: - self._dd_span.service = value - elif key == Tags.RESOURCE_NAME or key == OTTags.DATABASE_STATEMENT: - self._dd_span.resource = value - elif key == OTTags.PEER_HOSTNAME: - self._dd_span.set_tag(Tags.TARGET_HOST, value) - elif key == OTTags.PEER_PORT: - self._dd_span.set_tag(Tags.TARGET_PORT, value) - elif key == Tags.SAMPLING_PRIORITY: - self._dd_span.context.sampling_priority = value - else: - self._dd_span.set_tag(key, value) - - def _get_tag(self, key): - """Gets a tag from the span. - - This method retrieves the tag from the underlying datadog span. - """ - return self._dd_span.get_tag(key) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type: - self._dd_span.set_exc_info(exc_type, exc_val, exc_tb) - - # note: self.finish() AND _dd_span.__exit__ will call _span.finish() but - # it is idempotent - self._dd_span.__exit__(exc_type, exc_val, exc_tb) - self.finish() - - def _associate_dd_span(self, ddspan): - """Associates a DD span with this span.""" - # get the datadog span context - self._dd_span = ddspan - self.context._dd_context = ddspan.context - - @property - def _dd_context(self): - return self._dd_span.context diff --git a/ddtrace/opentracer/span_context.py b/ddtrace/opentracer/span_context.py deleted file mode 100644 index 35794228..00000000 --- a/ddtrace/opentracer/span_context.py +++ /dev/null @@ -1,50 +0,0 @@ -from opentracing import SpanContext as OpenTracingSpanContext - -from ddtrace.context import Context as DatadogContext - - -class SpanContext(OpenTracingSpanContext): - """Implementation of the OpenTracing span context.""" - - def __init__(self, trace_id=None, span_id=None, - sampling_priority=None, baggage=None, ddcontext=None): - # create a new dict for the baggage if it is not provided - # NOTE: it would be preferable to use opentracing.SpanContext.EMPTY_BAGGAGE - # but it is mutable. - # see: opentracing-python/blob/8775c7bfc57fd66e1c8bcf9a54d3e434d37544f9/opentracing/span.py#L30 - baggage = baggage or {} - - if ddcontext is not None: - self._dd_context = ddcontext - else: - self._dd_context = DatadogContext( - trace_id=trace_id, - span_id=span_id, - sampling_priority=sampling_priority, - ) - - self._baggage = dict(baggage) - - @property - def baggage(self): - return self._baggage - - def set_baggage_item(self, key, value): - """Sets a baggage item in this span context. - - Note that this operation mutates the baggage of this span context - """ - self.baggage[key] = value - - def with_baggage_item(self, key, value): - """Returns a copy of this span with a new baggage item. - - Useful for instantiating new child span contexts. - """ - baggage = dict(self._baggage) - baggage[key] = value - return SpanContext(ddcontext=self._dd_context, baggage=baggage) - - def get_baggage_item(self, key): - """Gets a baggage item in this span context.""" - return self.baggage.get(key, None) diff --git a/ddtrace/opentracer/tags.py b/ddtrace/opentracer/tags.py deleted file mode 100644 index 9b413277..00000000 --- a/ddtrace/opentracer/tags.py +++ /dev/null @@ -1,21 +0,0 @@ -from collections import namedtuple - -TAG_NAMES = [ - 'RESOURCE_NAME', - 'SAMPLING_PRIORITY', - 'SERVICE_NAME', - 'SPAN_TYPE', - 'TARGET_HOST', - 'TARGET_PORT', -] - -TagNames = namedtuple('TagNames', TAG_NAMES) - -Tags = TagNames( - RESOURCE_NAME='resource.name', - SAMPLING_PRIORITY='sampling.priority', - SERVICE_NAME='service.name', - TARGET_HOST='out.host', - TARGET_PORT='out.port', - SPAN_TYPE='span.type', -) diff --git a/ddtrace/opentracer/tracer.py b/ddtrace/opentracer/tracer.py deleted file mode 100644 index e2537311..00000000 --- a/ddtrace/opentracer/tracer.py +++ /dev/null @@ -1,299 +0,0 @@ -import opentracing -from opentracing import Format -from opentracing.scope_managers import ThreadLocalScopeManager - -import ddtrace -from ddtrace import Tracer as DatadogTracer -from ddtrace.constants import FILTERS_KEY -from ddtrace.settings import ConfigException -from ddtrace.utils import merge_dicts -from ddtrace.utils.config import get_application_name - -from ..internal.logger import get_logger -from .propagation import HTTPPropagator -from .span import Span -from .span_context import SpanContext -from .settings import ConfigKeys as keys, config_invalid_keys -from .utils import get_context_provider_for_scope_manager - -log = get_logger(__name__) - -DEFAULT_CONFIG = { - keys.AGENT_HOSTNAME: 'localhost', - keys.AGENT_HTTPS: False, - keys.AGENT_PORT: 8126, - keys.DEBUG: False, - keys.ENABLED: True, - keys.GLOBAL_TAGS: {}, - keys.SAMPLER: None, - keys.PRIORITY_SAMPLING: None, - keys.SETTINGS: { - FILTERS_KEY: [], - }, -} - - -class Tracer(opentracing.Tracer): - """A wrapper providing an OpenTracing API for the Datadog tracer.""" - - def __init__(self, service_name=None, config=None, scope_manager=None, dd_tracer=None): - """Initialize a new Datadog opentracer. - - :param service_name: (optional) the name of the service that this - tracer will be used with. Note if not provided, a service name will - try to be determined based off of ``sys.argv``. If this fails a - :class:`ddtrace.settings.ConfigException` will be raised. - :param config: (optional) a configuration object to specify additional - options. See the documentation for further information. - :param scope_manager: (optional) the scope manager for this tracer to - use. The available managers are listed in the Python OpenTracing repo - here: https://github.com/opentracing/opentracing-python#scope-managers. - If ``None`` is provided, defaults to - :class:`opentracing.scope_managers.ThreadLocalScopeManager`. - :param dd_tracer: (optional) the Datadog tracer for this tracer to use. This - should only be passed if a custom Datadog tracer is being used. Defaults - to the global ``ddtrace.tracer`` tracer. - """ - # Merge the given config with the default into a new dict - config = config or {} - self._config = merge_dicts(DEFAULT_CONFIG, config) - - # Pull out commonly used properties for performance - self._service_name = service_name or get_application_name() - self._enabled = self._config.get(keys.ENABLED) - self._debug = self._config.get(keys.DEBUG) - - if self._debug: - # Ensure there are no typos in any of the keys - invalid_keys = config_invalid_keys(self._config) - if invalid_keys: - str_invalid_keys = ','.join(invalid_keys) - raise ConfigException('invalid key(s) given (%s)'.format(str_invalid_keys)) - - if not self._service_name: - raise ConfigException(""" Cannot detect the \'service_name\'. - Please set the \'service_name=\' - keyword argument. - """) - - self._scope_manager = scope_manager or ThreadLocalScopeManager() - - dd_context_provider = get_context_provider_for_scope_manager(self._scope_manager) - - self._dd_tracer = dd_tracer or ddtrace.tracer or DatadogTracer() - self._dd_tracer.set_tags(self._config.get(keys.GLOBAL_TAGS)) - self._dd_tracer.configure(enabled=self._enabled, - hostname=self._config.get(keys.AGENT_HOSTNAME), - https=self._config.get(keys.AGENT_HTTPS), - port=self._config.get(keys.AGENT_PORT), - sampler=self._config.get(keys.SAMPLER), - settings=self._config.get(keys.SETTINGS), - priority_sampling=self._config.get(keys.PRIORITY_SAMPLING), - context_provider=dd_context_provider, - ) - self._propagators = { - Format.HTTP_HEADERS: HTTPPropagator(), - Format.TEXT_MAP: HTTPPropagator(), - } - - @property - def scope_manager(self): - """Returns the scope manager being used by this tracer.""" - return self._scope_manager - - def start_active_span(self, operation_name, child_of=None, references=None, - tags=None, start_time=None, ignore_active_span=False, - finish_on_close=True): - """Returns a newly started and activated `Scope`. - The returned `Scope` supports with-statement contexts. For example:: - - with tracer.start_active_span('...') as scope: - scope.span.set_tag('http.method', 'GET') - do_some_work() - # Span.finish() is called as part of Scope deactivation through - # the with statement. - - It's also possible to not finish the `Span` when the `Scope` context - expires:: - - with tracer.start_active_span('...', - finish_on_close=False) as scope: - scope.span.set_tag('http.method', 'GET') - do_some_work() - # Span.finish() is not called as part of Scope deactivation as - # `finish_on_close` is `False`. - - :param operation_name: name of the operation represented by the new - span from the perspective of the current service. - :param child_of: (optional) a Span or SpanContext instance representing - the parent in a REFERENCE_CHILD_OF Reference. If specified, the - `references` parameter must be omitted. - :param references: (optional) a list of Reference objects that identify - one or more parent SpanContexts. (See the Reference documentation - for detail). - :param tags: an optional dictionary of Span Tags. The caller gives up - ownership of that dictionary, because the Tracer may use it as-is - to avoid extra data copying. - :param start_time: an explicit Span start time as a unix timestamp per - time.time(). - :param ignore_active_span: (optional) an explicit flag that ignores - the current active `Scope` and creates a root `Span`. - :param finish_on_close: whether span should automatically be finished - when `Scope.close()` is called. - :return: a `Scope`, already registered via the `ScopeManager`. - """ - otspan = self.start_span( - operation_name=operation_name, - child_of=child_of, - references=references, - tags=tags, - start_time=start_time, - ignore_active_span=ignore_active_span, - ) - - # activate this new span - scope = self._scope_manager.activate(otspan, finish_on_close) - - return scope - - def start_span(self, operation_name=None, child_of=None, references=None, - tags=None, start_time=None, ignore_active_span=False): - """Starts and returns a new Span representing a unit of work. - - Starting a root Span (a Span with no causal references):: - - tracer.start_span('...') - - Starting a child Span (see also start_child_span()):: - - tracer.start_span( - '...', - child_of=parent_span) - - Starting a child Span in a more verbose way:: - - tracer.start_span( - '...', - references=[opentracing.child_of(parent_span)]) - - Note: the precedence when defining a relationship is the following: - (highest) - 1. *child_of* - 2. *references* - 3. `scope_manager.active` (unless *ignore_active_span* is True) - 4. None - (lowest) - - Currently Datadog only supports `child_of` references. - - :param operation_name: name of the operation represented by the new - span from the perspective of the current service. - :param child_of: (optional) a Span or SpanContext instance representing - the parent in a REFERENCE_CHILD_OF Reference. If specified, the - `references` parameter must be omitted. - :param references: (optional) a list of Reference objects that identify - one or more parent SpanContexts. (See the Reference documentation - for detail) - :param tags: an optional dictionary of Span Tags. The caller gives up - ownership of that dictionary, because the Tracer may use it as-is - to avoid extra data copying. - :param start_time: an explicit Span start time as a unix timestamp per - time.time() - :param ignore_active_span: an explicit flag that ignores the current - active `Scope` and creates a root `Span`. - :return: an already-started Span instance. - """ - ot_parent = None # 'ot_parent' is more readable than 'child_of' - ot_parent_context = None # the parent span's context - dd_parent = None # the child_of to pass to the ddtracer - - if child_of is not None: - ot_parent = child_of # 'ot_parent' is more readable than 'child_of' - elif references and isinstance(references, list): - # we currently only support child_of relations to one span - ot_parent = references[0].referenced_context - - # - whenever child_of is not None ddspans with parent-child - # relationships will share a ddcontext which maintains a hierarchy of - # ddspans for the execution flow - # - when child_of is a ddspan then the ddtracer uses this ddspan to - # create the child ddspan - # - when child_of is a ddcontext then the ddtracer uses the ddcontext to - # get_current_span() for the parent - if ot_parent is None and not ignore_active_span: - # attempt to get the parent span from the scope manager - scope = self._scope_manager.active - parent_span = getattr(scope, 'span', None) - ot_parent_context = getattr(parent_span, 'context', None) - # we want the ddcontext of the active span in order to maintain the - # ddspan hierarchy - dd_parent = getattr(ot_parent_context, '_dd_context', None) - - # if we cannot get the context then try getting it from the DD tracer - # this emulates the behaviour of tracer.trace() - if dd_parent is None: - dd_parent = self._dd_tracer.get_call_context() - elif ot_parent is not None and isinstance(ot_parent, Span): - # a span is given to use as a parent - ot_parent_context = ot_parent.context - dd_parent = ot_parent._dd_span - elif ot_parent is not None and isinstance(ot_parent, SpanContext): - # a span context is given to use to find the parent ddspan - dd_parent = ot_parent._dd_context - elif ot_parent is None: - # user wants to create a new parent span we don't have to do - # anything - pass - else: - raise TypeError('invalid span configuration given') - - # create a new otspan and ddspan using the ddtracer and associate it - # with the new otspan - ddspan = self._dd_tracer.start_span( - name=operation_name, - child_of=dd_parent, - service=self._service_name, - ) - - # set the start time if one is specified - ddspan.start = start_time or ddspan.start - if tags is not None: - ddspan.set_tags(tags) - - otspan = Span(self, ot_parent_context, operation_name) - # sync up the OT span with the DD span - otspan._associate_dd_span(ddspan) - - return otspan - - def inject(self, span_context, format, carrier): - """Injects a span context into a carrier. - - :param span_context: span context to inject. - :param format: format to encode the span context with. - :param carrier: the carrier of the encoded span context. - """ - propagator = self._propagators.get(format, None) - - if propagator is None: - raise opentracing.UnsupportedFormatException - - propagator.inject(span_context, carrier) - - def extract(self, format, carrier): - """Extracts a span context from a carrier. - - :param format: format that the carrier is encoded with. - :param carrier: the carrier to extract from. - """ - propagator = self._propagators.get(format, None) - - if propagator is None: - raise opentracing.UnsupportedFormatException - - # we have to manually activate the returned context from a distributed - # trace - ot_span_ctx = propagator.extract(carrier) - dd_span_ctx = ot_span_ctx._dd_context - self._dd_tracer.context_provider.activate(dd_span_ctx) - return ot_span_ctx diff --git a/ddtrace/opentracer/utils.py b/ddtrace/opentracer/utils.py deleted file mode 100644 index 06953ba0..00000000 --- a/ddtrace/opentracer/utils.py +++ /dev/null @@ -1,22 +0,0 @@ -# DEV: If `asyncio` or `gevent` are unavailable we do not throw an error, -# `context_provider` will just not be set and we'll get an `AttributeError` instead - - -def get_context_provider_for_scope_manager(scope_manager): - """Returns the context_provider to use with a given scope_manager.""" - - scope_manager_type = type(scope_manager).__name__ - - # avoid having to import scope managers which may not be compatible - # with the version of python being used - if scope_manager_type == 'AsyncioScopeManager': - import ddtrace.contrib.asyncio - dd_context_provider = ddtrace.contrib.asyncio.context_provider - elif scope_manager_type == 'GeventScopeManager': - import ddtrace.contrib.gevent - dd_context_provider = ddtrace.contrib.gevent.context_provider - else: - from ddtrace.provider import DefaultContextProvider - dd_context_provider = DefaultContextProvider() - - return dd_context_provider diff --git a/ddtrace/payload.py b/ddtrace/payload.py deleted file mode 100644 index df5cb295..00000000 --- a/ddtrace/payload.py +++ /dev/null @@ -1,90 +0,0 @@ -from .encoding import get_encoder - - -class PayloadFull(Exception): - """The payload is full.""" - pass - - -class Payload(object): - """ - Trace agent API payload buffer class - - This class is used to encoded and store traces to build the payload we send to - the trace agent. - - DEV: We encoded and buffer traces so that we can reliable determine the size of - the payload easily so we can flush based on the payload size. - """ - __slots__ = ('traces', 'size', 'encoder', 'max_payload_size') - - # Trace agent limit payload size of 10 MB - # 5 MB should be a good average efficient size - DEFAULT_MAX_PAYLOAD_SIZE = 5 * 1000000 - - def __init__(self, encoder=None, max_payload_size=DEFAULT_MAX_PAYLOAD_SIZE): - """ - Constructor for Payload - - :param encoder: The encoded to use, default is the default encoder - :type encoder: ``ddtrace.encoding.Encoder`` - :param max_payload_size: The max number of bytes a payload should be before - being considered full (default: 5mb) - """ - self.max_payload_size = max_payload_size - self.encoder = encoder or get_encoder() - self.traces = [] - self.size = 0 - - def add_trace(self, trace): - """ - Encode and append a trace to this payload - - :param trace: A trace to append - :type trace: A list of ``ddtrace.span.Span``s - """ - # No trace or empty trace was given, ignore - if not trace: - return - - # Encode the trace, append, and add it's length to the size - encoded = self.encoder.encode_trace(trace) - if len(encoded) + self.size > self.max_payload_size: - raise PayloadFull() - self.traces.append(encoded) - self.size += len(encoded) - - @property - def length(self): - """ - Get the number of traces in this payload - - :returns: The number of traces in the payload - :rtype: int - """ - return len(self.traces) - - @property - def empty(self): - """ - Whether this payload is empty or not - - :returns: Whether this payload is empty or not - :rtype: bool - """ - return self.length == 0 - - def get_payload(self): - """ - Get the fully encoded payload - - :returns: The fully encoded payload - :rtype: str | bytes - """ - # DEV: `self.traces` is an array of encoded traces, `join_encoded` joins them together - return self.encoder.join_encoded(self.traces) - - def __repr__(self): - """Get the string representation of this payload""" - return '{0}(length={1}, size={2} B, max_payload_size={3} B)'.format( - self.__class__.__name__, self.length, self.size, self.max_payload_size) diff --git a/ddtrace/vendor/dogstatsd/__init__.py b/ddtrace/vendor/dogstatsd/__init__.py deleted file mode 100644 index 0e93d752..00000000 --- a/ddtrace/vendor/dogstatsd/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -""" -Copyright (c) 2015, Datadog -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Datadog nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""" - -from .base import DogStatsd, statsd # noqa diff --git a/ddtrace/vendor/dogstatsd/base.py b/ddtrace/vendor/dogstatsd/base.py deleted file mode 100644 index f0167cf8..00000000 --- a/ddtrace/vendor/dogstatsd/base.py +++ /dev/null @@ -1,425 +0,0 @@ -#!/usr/bin/env python -""" -DogStatsd is a Python client for DogStatsd, a Statsd fork for Datadog. -""" -# stdlib -from random import random -import logging -import os -import socket -from threading import Lock - -# datadog -from .context import TimedContextManagerDecorator -from .route import get_default_route -from .compat import text - -# Logging -log = logging.getLogger('datadog.dogstatsd') - -# Default config -DEFAULT_HOST = 'localhost' -DEFAULT_PORT = 8125 - -# Tag name of entity_id -ENTITY_ID_TAG_NAME = "dd.internal.entity_id" - - -class DogStatsd(object): - OK, WARNING, CRITICAL, UNKNOWN = (0, 1, 2, 3) - - def __init__(self, host=DEFAULT_HOST, port=DEFAULT_PORT, max_buffer_size=50, namespace=None, - constant_tags=None, use_ms=False, use_default_route=False, - socket_path=None): - """ - Initialize a DogStatsd object. - - >>> statsd = DogStatsd() - - :envvar DD_AGENT_HOST: the host of the DogStatsd server. - If set, it overrides default value. - :type DD_AGENT_HOST: string - - :envvar DD_DOGSTATSD_PORT: the port of the DogStatsd server. - If set, it overrides default value. - :type DD_DOGSTATSD_PORT: integer - - :param host: the host of the DogStatsd server. - :type host: string - - :param port: the port of the DogStatsd server. - :type port: integer - - :param max_buffer_size: Maximum number of metrics to buffer before sending to the server - if sending metrics in batch - :type max_buffer_size: integer - - :param namespace: Namespace to prefix all metric names - :type namespace: string - - :param constant_tags: Tags to attach to all metrics - :type constant_tags: list of strings - - :param use_ms: Report timed values in milliseconds instead of seconds (default False) - :type use_ms: boolean - - :envvar DATADOG_TAGS: Tags to attach to every metric reported by dogstatsd client - :type DATADOG_TAGS: list of strings - - :envvar DD_ENTITY_ID: Tag to identify the client entity. - :type DD_ENTITY_ID: string - - :param use_default_route: Dynamically set the DogStatsd host to the default route - (Useful when running the client in a container) (Linux only) - :type use_default_route: boolean - - :param socket_path: Communicate with dogstatsd through a UNIX socket instead of - UDP. If set, disables UDP transmission (Linux only) - :type socket_path: string - """ - - self.lock = Lock() - - # Check host and port env vars - agent_host = os.environ.get('DD_AGENT_HOST') - if agent_host and host == DEFAULT_HOST: - host = agent_host - - dogstatsd_port = os.environ.get('DD_DOGSTATSD_PORT') - if dogstatsd_port and port == DEFAULT_PORT: - try: - port = int(dogstatsd_port) - except ValueError: - log.warning("Port number provided in DD_DOGSTATSD_PORT env var is not an integer: \ - %s, using %s as port number", dogstatsd_port, port) - - # Connection - if socket_path is not None: - self.socket_path = socket_path - self.host = None - self.port = None - else: - self.socket_path = None - self.host = self.resolve_host(host, use_default_route) - self.port = int(port) - - # Socket - self.socket = None - self.max_buffer_size = max_buffer_size - self._send = self._send_to_server - self.encoding = 'utf-8' - - # Options - env_tags = [tag for tag in os.environ.get('DATADOG_TAGS', '').split(',') if tag] - if constant_tags is None: - constant_tags = [] - self.constant_tags = constant_tags + env_tags - entity_id = os.environ.get('DD_ENTITY_ID') - if entity_id: - entity_tag = '{name}:{value}'.format(name=ENTITY_ID_TAG_NAME, value=entity_id) - self.constant_tags.append(entity_tag) - if namespace is not None: - namespace = text(namespace) - self.namespace = namespace - self.use_ms = use_ms - - def __enter__(self): - self.open_buffer(self.max_buffer_size) - return self - - def __exit__(self, type, value, traceback): - self.close_buffer() - - @staticmethod - def resolve_host(host, use_default_route): - """ - Resolve the DogStatsd host. - - Args: - host (string): host - use_default_route (bool): use the system default route as host - (overrides the `host` parameter) - """ - if not use_default_route: - return host - - return get_default_route() - - def get_socket(self): - """ - Return a connected socket. - - Note: connect the socket before assigning it to the class instance to - avoid bad thread race conditions. - """ - with self.lock: - if not self.socket: - if self.socket_path is not None: - sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) - sock.connect(self.socket_path) - sock.setblocking(0) - self.socket = sock - else: - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.connect((self.host, self.port)) - self.socket = sock - - return self.socket - - def open_buffer(self, max_buffer_size=50): - """ - Open a buffer to send a batch of metrics in one packet. - - You can also use this as a context manager. - - >>> with DogStatsd() as batch: - >>> batch.gauge('users.online', 123) - >>> batch.gauge('active.connections', 1001) - """ - self.max_buffer_size = max_buffer_size - self.buffer = [] - self._send = self._send_to_buffer - - def close_buffer(self): - """ - Flush the buffer and switch back to single metric packets. - """ - self._send = self._send_to_server - - if self.buffer: - # Only send packets if there are packets to send - self._flush_buffer() - - def gauge(self, metric, value, tags=None, sample_rate=1): - """ - Record the value of a gauge, optionally setting a list of tags and a - sample rate. - - >>> statsd.gauge('users.online', 123) - >>> statsd.gauge('active.connections', 1001, tags=["protocol:http"]) - """ - return self._report(metric, 'g', value, tags, sample_rate) - - def increment(self, metric, value=1, tags=None, sample_rate=1): - """ - Increment a counter, optionally setting a value, tags and a sample - rate. - - >>> statsd.increment('page.views') - >>> statsd.increment('files.transferred', 124) - """ - self._report(metric, 'c', value, tags, sample_rate) - - def decrement(self, metric, value=1, tags=None, sample_rate=1): - """ - Decrement a counter, optionally setting a value, tags and a sample - rate. - - >>> statsd.decrement('files.remaining') - >>> statsd.decrement('active.connections', 2) - """ - metric_value = -value if value else value - self._report(metric, 'c', metric_value, tags, sample_rate) - - def histogram(self, metric, value, tags=None, sample_rate=1): - """ - Sample a histogram value, optionally setting tags and a sample rate. - - >>> statsd.histogram('uploaded.file.size', 1445) - >>> statsd.histogram('album.photo.count', 26, tags=["gender:female"]) - """ - self._report(metric, 'h', value, tags, sample_rate) - - def distribution(self, metric, value, tags=None, sample_rate=1): - """ - Send a global distribution value, optionally setting tags and a sample rate. - - >>> statsd.distribution('uploaded.file.size', 1445) - >>> statsd.distribution('album.photo.count', 26, tags=["gender:female"]) - - This is a beta feature that must be enabled specifically for your organization. - """ - self._report(metric, 'd', value, tags, sample_rate) - - def timing(self, metric, value, tags=None, sample_rate=1): - """ - Record a timing, optionally setting tags and a sample rate. - - >>> statsd.timing("query.response.time", 1234) - """ - self._report(metric, 'ms', value, tags, sample_rate) - - def timed(self, metric=None, tags=None, sample_rate=1, use_ms=None): - """ - A decorator or context manager that will measure the distribution of a - function's/context's run time. Optionally specify a list of tags or a - sample rate. If the metric is not defined as a decorator, the module - name and function name will be used. The metric is required as a context - manager. - :: - - @statsd.timed('user.query.time', sample_rate=0.5) - def get_user(user_id): - # Do what you need to ... - pass - - # Is equivalent to ... - with statsd.timed('user.query.time', sample_rate=0.5): - # Do what you need to ... - pass - - # Is equivalent to ... - start = time.time() - try: - get_user(user_id) - finally: - statsd.timing('user.query.time', time.time() - start) - """ - return TimedContextManagerDecorator(self, metric, tags, sample_rate, use_ms) - - def set(self, metric, value, tags=None, sample_rate=1): - """ - Sample a set value. - - >>> statsd.set('visitors.uniques', 999) - """ - self._report(metric, 's', value, tags, sample_rate) - - def close_socket(self): - """ - Closes connected socket if connected. - """ - if self.socket: - self.socket.close() - self.socket = None - - def _report(self, metric, metric_type, value, tags, sample_rate): - """ - Create a metric packet and send it. - - More information about the packets' format: http://docs.datadoghq.com/guides/dogstatsd/ - """ - if value is None: - return - - if sample_rate != 1 and random() > sample_rate: - return - - # Resolve the full tag list - tags = self._add_constant_tags(tags) - - # Create/format the metric packet - payload = "%s%s:%s|%s%s%s" % ( - (self.namespace + ".") if self.namespace else "", - metric, - value, - metric_type, - ("|@" + text(sample_rate)) if sample_rate != 1 else "", - ("|#" + ",".join(tags)) if tags else "", - ) - - # Send it - self._send(payload) - - def _send_to_server(self, packet): - try: - # If set, use socket directly - (self.socket or self.get_socket()).send(packet.encode(self.encoding)) - except socket.timeout: - # dogstatsd is overflowing, drop the packets (mimicks the UDP behaviour) - return - except (socket.error, socket.herror, socket.gaierror) as se: - log.warning("Error submitting packet: {}, dropping the packet and closing the socket".format(se)) - self.close_socket() - except Exception as e: - log.error("Unexpected error: %s", str(e)) - return - - def _send_to_buffer(self, packet): - self.buffer.append(packet) - if len(self.buffer) >= self.max_buffer_size: - self._flush_buffer() - - def _flush_buffer(self): - self._send_to_server("\n".join(self.buffer)) - self.buffer = [] - - def _escape_event_content(self, string): - return string.replace('\n', '\\n') - - def _escape_service_check_message(self, string): - return string.replace('\n', '\\n').replace('m:', 'm\\:') - - def event(self, title, text, alert_type=None, aggregation_key=None, - source_type_name=None, date_happened=None, priority=None, - tags=None, hostname=None): - """ - Send an event. Attributes are the same as the Event API. - http://docs.datadoghq.com/api/ - - >>> statsd.event('Man down!', 'This server needs assistance.') - >>> statsd.event('The web server restarted', 'The web server is up again', alert_type='success') # NOQA - """ - title = self._escape_event_content(title) - text = self._escape_event_content(text) - - # Append all client level tags to every event - tags = self._add_constant_tags(tags) - - string = u'_e{%d,%d}:%s|%s' % (len(title), len(text), title, text) - if date_happened: - string = '%s|d:%d' % (string, date_happened) - if hostname: - string = '%s|h:%s' % (string, hostname) - if aggregation_key: - string = '%s|k:%s' % (string, aggregation_key) - if priority: - string = '%s|p:%s' % (string, priority) - if source_type_name: - string = '%s|s:%s' % (string, source_type_name) - if alert_type: - string = '%s|t:%s' % (string, alert_type) - if tags: - string = '%s|#%s' % (string, ','.join(tags)) - - if len(string) > 8 * 1024: - raise Exception(u'Event "%s" payload is too big (more than 8KB), ' - 'event discarded' % title) - - self._send(string) - - def service_check(self, check_name, status, tags=None, timestamp=None, - hostname=None, message=None): - """ - Send a service check run. - - >>> statsd.service_check('my_service.check_name', DogStatsd.WARNING) - """ - message = self._escape_service_check_message(message) if message is not None else '' - - string = u'_sc|{0}|{1}'.format(check_name, status) - - # Append all client level tags to every status check - tags = self._add_constant_tags(tags) - - if timestamp: - string = u'{0}|d:{1}'.format(string, timestamp) - if hostname: - string = u'{0}|h:{1}'.format(string, hostname) - if tags: - string = u'{0}|#{1}'.format(string, ','.join(tags)) - if message: - string = u'{0}|m:{1}'.format(string, message) - - self._send(string) - - def _add_constant_tags(self, tags): - if self.constant_tags: - if tags: - return tags + self.constant_tags - else: - return self.constant_tags - return tags - - -statsd = DogStatsd() diff --git a/ddtrace/vendor/dogstatsd/compat.py b/ddtrace/vendor/dogstatsd/compat.py deleted file mode 100644 index bff3899a..00000000 --- a/ddtrace/vendor/dogstatsd/compat.py +++ /dev/null @@ -1,107 +0,0 @@ -# flake8: noqa -""" -Imports for compatibility with Python 2, Python 3 and Google App Engine. -""" -from functools import wraps -import logging -import socket -import sys - - -def _is_py_version_higher_than(major, minor=0): - """ - Assert that the Python version is higher than `$maj.$min`. - """ - return sys.version_info >= (major, minor) - - -def is_p3k(): - """ - Assert that Python is version 3 or higher. - """ - return _is_py_version_higher_than(3) - - -def is_higher_py35(): - """ - Assert that Python is version 3.5 or higher. - """ - return _is_py_version_higher_than(3, 5) - - -get_input = input - -# Python 3.x -if is_p3k(): - from io import StringIO - import builtins - import configparser - import urllib.request as url_lib, urllib.error, urllib.parse - - imap = map - text = str - - def iteritems(d): - return iter(d.items()) - - def iternext(iter): - return next(iter) - - -# Python 2.x -else: - import __builtin__ as builtins - from cStringIO import StringIO - from itertools import imap - import ConfigParser as configparser - import urllib2 as url_lib - - get_input = raw_input - text = unicode - - def iteritems(d): - return d.iteritems() - - def iternext(iter): - return iter.next() - - -# Python > 3.5 -if is_higher_py35(): - from asyncio import iscoroutinefunction - -# Others -else: - def iscoroutinefunction(*args, **kwargs): - return False - -# Optional requirements -try: - from UserDict import IterableUserDict -except ImportError: - from collections import UserDict as IterableUserDict - -try: - from configparser import ConfigParser -except ImportError: - from ConfigParser import ConfigParser - -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse - -try: - import pkg_resources as pkg -except ImportError: - pkg = None - -#Python 2.6.x -try: - from logging import NullHandler -except ImportError: - from logging import Handler - - class NullHandler(Handler): - def emit(self, record): - pass diff --git a/ddtrace/vendor/dogstatsd/context.py b/ddtrace/vendor/dogstatsd/context.py deleted file mode 100644 index f4e7a57a..00000000 --- a/ddtrace/vendor/dogstatsd/context.py +++ /dev/null @@ -1,79 +0,0 @@ -# stdlib -from functools import wraps -from time import time - -# datadog -from .compat import ( - is_higher_py35, - iscoroutinefunction, -) - - -if is_higher_py35(): - from .context_async import _get_wrapped_co -else: - def _get_wrapped_co(self, func): - raise NotImplementedError( - u"Decorator `timed` compatibility with coroutine functions" - u" requires Python 3.5 or higher." - ) - - -class TimedContextManagerDecorator(object): - """ - A context manager and a decorator which will report the elapsed time in - the context OR in a function call. - """ - def __init__(self, statsd, metric=None, tags=None, sample_rate=1, use_ms=None): - self.statsd = statsd - self.metric = metric - self.tags = tags - self.sample_rate = sample_rate - self.use_ms = use_ms - self.elapsed = None - - def __call__(self, func): - """ - Decorator which returns the elapsed time of the function call. - - Default to the function name if metric was not provided. - """ - if not self.metric: - self.metric = '%s.%s' % (func.__module__, func.__name__) - - # Coroutines - if iscoroutinefunction(func): - return _get_wrapped_co(self, func) - - # Others - @wraps(func) - def wrapped(*args, **kwargs): - start = time() - try: - return func(*args, **kwargs) - finally: - self._send(start) - return wrapped - - def __enter__(self): - if not self.metric: - raise TypeError("Cannot used timed without a metric!") - self._start = time() - return self - - def __exit__(self, type, value, traceback): - # Report the elapsed time of the context manager. - self._send(self._start) - - def _send(self, start): - elapsed = time() - start - use_ms = self.use_ms if self.use_ms is not None else self.statsd.use_ms - elapsed = int(round(1000 * elapsed)) if use_ms else elapsed - self.statsd.timing(self.metric, elapsed, self.tags, self.sample_rate) - self.elapsed = elapsed - - def start(self): - self.__enter__() - - def stop(self): - self.__exit__(None, None, None) diff --git a/ddtrace/vendor/dogstatsd/context_async.py b/ddtrace/vendor/dogstatsd/context_async.py deleted file mode 100644 index 97debc88..00000000 --- a/ddtrace/vendor/dogstatsd/context_async.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Decorator `timed` for coroutine methods. - -Warning: requires Python 3.5 or higher. -""" -# stdlib -from functools import wraps -from time import time - - -def _get_wrapped_co(self, func): - """ - `timed` wrapper for coroutine methods. - """ - @wraps(func) - async def wrapped_co(*args, **kwargs): - start = time() - try: - result = await func(*args, **kwargs) - return result - finally: - self._send(start) - return wrapped_co diff --git a/ddtrace/vendor/dogstatsd/route.py b/ddtrace/vendor/dogstatsd/route.py deleted file mode 100644 index 59c23967..00000000 --- a/ddtrace/vendor/dogstatsd/route.py +++ /dev/null @@ -1,38 +0,0 @@ -""" -Helper(s), resolve the system's default interface. -""" -# stdlib -import socket -import struct - - -class UnresolvableDefaultRoute(Exception): - """ - Unable to resolve system's default route. - """ - - -def get_default_route(): - """ - Return the system default interface using the proc filesystem. - - Returns: - string: default route - - Raises: - `NotImplementedError`: No proc filesystem is found (non-Linux systems) - `StopIteration`: No default route found - """ - try: - with open('/proc/net/route') as f: - for line in f.readlines(): - fields = line.strip().split() - if fields[1] == '00000000': - return socket.inet_ntoa(struct.pack(' -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #if PY_VERSION_HEX >= 0x02070000 - #define HAVE_LONG_LONG - #endif -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#ifdef PYPY_VERSION - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#elif defined(PYSTON_VERSION) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #elif !defined(CYTHON_USE_PYLONG_INTERNALS) - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) - #endif - #ifndef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) - #endif - #ifndef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #include "longintrepr.h" - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int32 uint32_t; - #endif - #endif -#else - #include -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) && __cplusplus >= 201103L - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #elif __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__ ) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif - -#ifndef __cplusplus - #error "Cython files generated with the C++ option must be compiled with a C++ compiler." -#endif -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #else - #define CYTHON_INLINE inline - #endif -#endif -template -void __Pyx_call_destructor(T& x) { - x.~T(); -} -template -class __Pyx_FakeReference { - public: - __Pyx_FakeReference() : ptr(NULL) { } - __Pyx_FakeReference(const T& ref) : ptr(const_cast(&ref)) { } - T *operator->() { return ptr; } - T *operator&() { return ptr; } - operator T&() { return *ptr; } - template bool operator ==(U other) { return *ptr == other; } - template bool operator !=(U other) { return *ptr != other; } - private: - T *ptr; -}; - -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) - #define Py_OptimizeFlag 0 -#endif -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) - #define __Pyx_DefaultClassType PyClass_Type -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) - #define __Pyx_DefaultClassType PyType_Type -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #define __Pyx_PyCFunctionFast _PyCFunctionFast - #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords -#endif -#if CYTHON_FAST_PYCCALL -#define __Pyx_PyFastCFunction_Check(func)\ - ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) -#else -#define __Pyx_PyFastCFunction_Check(func) 0 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 - #define PyMem_RawMalloc(n) PyMem_Malloc(n) - #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) - #define PyMem_RawFree(p) PyMem_Free(p) -#endif -#if CYTHON_COMPILING_IN_PYSTON - #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -#else -#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) -#endif -#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) - #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact - #define PyObject_Unicode PyObject_Str -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) -#else - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t PyInt_AsLong -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) -#else - #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(WIN32) || defined(MS_WINDOWS) - #define _USE_MATH_DEFINES -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - - -#define __PYX_ERR(f_index, lineno, Ln_error) \ -{ \ - __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ -} - -#ifndef __PYX_EXTERN_C - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__msgpack___cmsgpack -#define __PYX_HAVE_API__msgpack___cmsgpack -/* Early includes */ -#include -#include -#include "pythread.h" -#include "pack.h" -#include "buff_converter.h" -#include -#include -#include "unpack.h" -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 1 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "ascii" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { - const Py_UNICODE *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) -#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -static PyObject *__pyx_m = NULL; -static PyObject *__pyx_d; -static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime = NULL; -static PyObject *__pyx_empty_tuple; -static PyObject *__pyx_empty_bytes; -static PyObject *__pyx_empty_unicode; -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm= __FILE__; -static const char *__pyx_filename; - - -static const char *__pyx_f[] = { - "msgpack/_packer.pyx", - "msgpack/_unpacker.pyx", - "stringsource", - "msgpack/_cmsgpack.pyx", - "type.pxd", - "bool.pxd", - "complex.pxd", -}; - -/* "msgpack/_unpacker.pyx":13 - * from libc.string cimport * - * from libc.limits cimport * - * ctypedef unsigned long long uint64_t # <<<<<<<<<<<<<< - * - * from ddtrace.vendor.msgpack.exceptions import ( - */ -typedef unsigned PY_LONG_LONG __pyx_t_7msgpack_9_cmsgpack_uint64_t; - -/*--- Type declarations ---*/ -struct __pyx_obj_7msgpack_9_cmsgpack_Packer; -struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker; -struct __pyx_opt_args_7msgpack_9_cmsgpack_6Packer__pack; -struct __pyx_opt_args_7msgpack_9_cmsgpack_8Unpacker__unpack; - -/* "msgpack/_packer.pyx":148 - * self.pk.buf = NULL - * - * cdef int _pack(self, object o, int nest_limit=DEFAULT_RECURSE_LIMIT) except -1: # <<<<<<<<<<<<<< - * cdef long long llval - * cdef unsigned long long ullval - */ -struct __pyx_opt_args_7msgpack_9_cmsgpack_6Packer__pack { - int __pyx_n; - int nest_limit; -}; - -/* "msgpack/_unpacker.pyx":477 - * self.file_like = None - * - * cdef object _unpack(self, execute_fn execute, bint iter=0): # <<<<<<<<<<<<<< - * cdef int ret - * cdef object obj - */ -struct __pyx_opt_args_7msgpack_9_cmsgpack_8Unpacker__unpack { - int __pyx_n; - int iter; -}; - -/* "msgpack/_packer.pyx":54 - * - * - * cdef class Packer(object): # <<<<<<<<<<<<<< - * """ - * MessagePack Packer - */ -struct __pyx_obj_7msgpack_9_cmsgpack_Packer { - PyObject_HEAD - struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Packer *__pyx_vtab; - struct msgpack_packer pk; - PyObject *_default; - PyObject *_bencoding; - PyObject *_berrors; - char const *encoding; - char const *unicode_errors; - int strict_types; - PyBoolObject *use_float; - int autoreset; -}; - - -/* "msgpack/_unpacker.pyx":229 - * - * - * cdef class Unpacker(object): # <<<<<<<<<<<<<< - * """Streaming unpacker. - * - */ -struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker { - PyObject_HEAD - struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker *__pyx_vtab; - unpack_context ctx; - char *buf; - Py_ssize_t buf_size; - Py_ssize_t buf_head; - Py_ssize_t buf_tail; - PyObject *file_like; - PyObject *file_like_read; - Py_ssize_t read_size; - PyObject *object_hook; - PyObject *object_pairs_hook; - PyObject *list_hook; - PyObject *ext_hook; - PyObject *encoding; - PyObject *unicode_errors; - Py_ssize_t max_buffer_size; - __pyx_t_7msgpack_9_cmsgpack_uint64_t stream_offset; -}; - - - -/* "msgpack/_packer.pyx":54 - * - * - * cdef class Packer(object): # <<<<<<<<<<<<<< - * """ - * MessagePack Packer - */ - -struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Packer { - int (*_pack)(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *, PyObject *, struct __pyx_opt_args_7msgpack_9_cmsgpack_6Packer__pack *__pyx_optional_args); - PyObject *(*pack)(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *, PyObject *, int __pyx_skip_dispatch); -}; -static struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Packer *__pyx_vtabptr_7msgpack_9_cmsgpack_Packer; - - -/* "msgpack/_unpacker.pyx":229 - * - * - * cdef class Unpacker(object): # <<<<<<<<<<<<<< - * """Streaming unpacker. - * - */ - -struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker { - PyObject *(*append_buffer)(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *, void *, Py_ssize_t); - PyObject *(*read_from_file)(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *); - PyObject *(*_unpack)(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *, execute_fn, struct __pyx_opt_args_7msgpack_9_cmsgpack_8Unpacker__unpack *__pyx_optional_args); -}; -static struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker *__pyx_vtabptr_7msgpack_9_cmsgpack_Unpacker; - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, int); - void (*DECREF)(void*, PyObject*, int); - void (*GOTREF)(void*, PyObject*, int); - void (*GIVEREF)(void*, PyObject*, int); - void* (*SetupContext)(const char*, int, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) -#endif - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* KeywordStringCheck.proto */ -static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ - const char* function_name); - -/* ExtTypeTest.proto */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); - -/* GetTopmostException.proto */ -#if CYTHON_USE_EXC_INFO_STACK -static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); -#endif - -/* SaveResetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -#else -#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) -#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) -#endif - -/* PyErrExceptionMatches.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) -static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); -#else -#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) -#endif - -/* GetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); -#endif - -/* PyCFunctionFastCall.proto */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); -#else -#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) -#endif - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); -#else -#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif - -/* PyObjectCall2Args.proto */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* SwapException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#else -static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); -#endif - -/* IterFinish.proto */ -static CYTHON_INLINE int __Pyx_IterFinish(void); - -/* PyObjectCallNoArg.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); -#else -#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) -#endif - -/* PyObjectGetMethod.proto */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); - -/* PyObjectCallMethod0.proto */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); - -/* RaiseNeedMoreValuesToUnpack.proto */ -static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); - -/* RaiseTooManyValuesToUnpack.proto */ -static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); - -/* UnpackItemEndCheck.proto */ -static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); - -/* RaiseNoneIterError.proto */ -static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); - -/* UnpackTupleError.proto */ -static void __Pyx_UnpackTupleError(PyObject *, Py_ssize_t index); - -/* UnpackTuple2.proto */ -#define __Pyx_unpack_tuple2(tuple, value1, value2, is_tuple, has_known_size, decref_tuple)\ - (likely(is_tuple || PyTuple_Check(tuple)) ?\ - (likely(has_known_size || PyTuple_GET_SIZE(tuple) == 2) ?\ - __Pyx_unpack_tuple2_exact(tuple, value1, value2, decref_tuple) :\ - (__Pyx_UnpackTupleError(tuple, 2), -1)) :\ - __Pyx_unpack_tuple2_generic(tuple, value1, value2, has_known_size, decref_tuple)) -static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( - PyObject* tuple, PyObject** value1, PyObject** value2, int decref_tuple); -static int __Pyx_unpack_tuple2_generic( - PyObject* tuple, PyObject** value1, PyObject** value2, int has_known_size, int decref_tuple); - -/* dict_iter.proto */ -static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* dict, int is_dict, PyObject* method_name, - Py_ssize_t* p_orig_length, int* p_is_dict); -static CYTHON_INLINE int __Pyx_dict_iter_next(PyObject* dict_or_iter, Py_ssize_t orig_length, Py_ssize_t* ppos, - PyObject** pkey, PyObject** pvalue, PyObject** pitem, int is_dict); - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* ReRaiseException.proto */ -static CYTHON_INLINE void __Pyx_ReraiseException(void); - -/* None.proto */ -static CYTHON_INLINE Py_ssize_t __Pyx_div_Py_ssize_t(Py_ssize_t, Py_ssize_t); - -/* BuildPyUnicode.proto */ -static PyObject* __Pyx_PyUnicode_BuildFromAscii(Py_ssize_t ulength, char* chars, int clength, - int prepend_sign, char padding_char); - -/* CIntToPyUnicode.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_From_int(int value, Py_ssize_t width, char padding_char, char format_char); - -/* PyObject_GenericGetAttrNoDict.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr -#endif - -/* PyObject_GenericGetAttr.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr -#endif - -/* SetVTable.proto */ -static int __Pyx_SetVtable(PyObject *dict, void *vtable); - -/* SetupReduce.proto */ -static int __Pyx_setup_reduce(PyObject* type_obj); - -/* TypeImport.proto */ -#ifndef __PYX_HAVE_RT_ImportType_proto -#define __PYX_HAVE_RT_ImportType_proto -enum __Pyx_ImportType_CheckSize { - __Pyx_ImportType_CheckSize_Error = 0, - __Pyx_ImportType_CheckSize_Warn = 1, - __Pyx_ImportType_CheckSize_Ignore = 2 -}; -static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); -#endif - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_PY_LONG_LONG(unsigned PY_LONG_LONG value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_As_PY_LONG_LONG(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_As_unsigned_PY_LONG_LONG(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE char __Pyx_PyInt_As_char(PyObject *); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static int __Pyx_check_binary_version(void); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - -static int __pyx_f_7msgpack_9_cmsgpack_6Packer__pack(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PyObject *__pyx_v_o, struct __pyx_opt_args_7msgpack_9_cmsgpack_6Packer__pack *__pyx_optional_args); /* proto*/ -static PyObject *__pyx_f_7msgpack_9_cmsgpack_6Packer_pack(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PyObject *__pyx_v_obj, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_7msgpack_9_cmsgpack_8Unpacker_append_buffer(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self, void *__pyx_v__buf, Py_ssize_t __pyx_v__buf_len); /* proto*/ -static PyObject *__pyx_f_7msgpack_9_cmsgpack_8Unpacker_read_from_file(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self); /* proto*/ -static PyObject *__pyx_f_7msgpack_9_cmsgpack_8Unpacker__unpack(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self, execute_fn __pyx_v_execute, struct __pyx_opt_args_7msgpack_9_cmsgpack_8Unpacker__unpack *__pyx_optional_args); /* proto*/ - -/* Module declarations from 'cpython.version' */ - -/* Module declarations from '__builtin__' */ - -/* Module declarations from 'cpython.type' */ -static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; - -/* Module declarations from 'libc.string' */ - -/* Module declarations from 'libc.stdio' */ - -/* Module declarations from 'cpython.object' */ - -/* Module declarations from 'cpython.ref' */ - -/* Module declarations from 'cpython.exc' */ - -/* Module declarations from 'cpython.module' */ - -/* Module declarations from 'cpython.mem' */ - -/* Module declarations from 'cpython.tuple' */ - -/* Module declarations from 'cpython.list' */ - -/* Module declarations from 'cpython.sequence' */ - -/* Module declarations from 'cpython.mapping' */ - -/* Module declarations from 'cpython.iterator' */ - -/* Module declarations from 'cpython.number' */ - -/* Module declarations from 'cpython.int' */ - -/* Module declarations from '__builtin__' */ - -/* Module declarations from 'cpython.bool' */ -static PyTypeObject *__pyx_ptype_7cpython_4bool_bool = 0; - -/* Module declarations from 'cpython.long' */ - -/* Module declarations from 'cpython.float' */ - -/* Module declarations from '__builtin__' */ - -/* Module declarations from 'cpython.complex' */ -static PyTypeObject *__pyx_ptype_7cpython_7complex_complex = 0; - -/* Module declarations from 'cpython.string' */ - -/* Module declarations from 'cpython.unicode' */ - -/* Module declarations from 'cpython.dict' */ - -/* Module declarations from 'cpython.instance' */ - -/* Module declarations from 'cpython.function' */ - -/* Module declarations from 'cpython.method' */ - -/* Module declarations from 'cpython.weakref' */ - -/* Module declarations from 'cpython.getargs' */ - -/* Module declarations from 'cpython.pythread' */ - -/* Module declarations from 'cpython.pystate' */ - -/* Module declarations from 'cpython.cobject' */ - -/* Module declarations from 'cpython.oldbuffer' */ - -/* Module declarations from 'cpython.set' */ - -/* Module declarations from 'cpython.buffer' */ - -/* Module declarations from 'cpython.bytes' */ - -/* Module declarations from 'cpython.pycapsule' */ - -/* Module declarations from 'cpython' */ - -/* Module declarations from 'cpython.bytearray' */ - -/* Module declarations from 'libc.stdlib' */ - -/* Module declarations from 'libc.limits' */ - -/* Module declarations from 'msgpack._cmsgpack' */ -static PyTypeObject *__pyx_ptype_7msgpack_9_cmsgpack_Packer = 0; -static PyTypeObject *__pyx_ptype_7msgpack_9_cmsgpack_Unpacker = 0; -static int __pyx_v_7msgpack_9_cmsgpack_DEFAULT_RECURSE_LIMIT; -static PY_LONG_LONG __pyx_v_7msgpack_9_cmsgpack_ITEM_LIMIT; -static CYTHON_INLINE int __pyx_f_7msgpack_9_cmsgpack_PyBytesLike_Check(PyObject *); /*proto*/ -static CYTHON_INLINE int __pyx_f_7msgpack_9_cmsgpack_PyBytesLike_CheckExact(PyObject *); /*proto*/ -static CYTHON_INLINE PyObject *__pyx_f_7msgpack_9_cmsgpack_init_ctx(unpack_context *, PyObject *, PyObject *, PyObject *, PyObject *, int, int, int, char const *, char const *, Py_ssize_t, Py_ssize_t, Py_ssize_t, Py_ssize_t, Py_ssize_t); /*proto*/ -static CYTHON_INLINE int __pyx_f_7msgpack_9_cmsgpack_get_data_from_buffer(PyObject *, Py_buffer *, char **, Py_ssize_t *, int *); /*proto*/ -#define __Pyx_MODULE_NAME "msgpack._cmsgpack" -extern int __pyx_module_is_main_msgpack___cmsgpack; -int __pyx_module_is_main_msgpack___cmsgpack = 0; - -/* Implementation of 'msgpack._cmsgpack' */ -static PyObject *__pyx_builtin_MemoryError; -static PyObject *__pyx_builtin_DeprecationWarning; -static PyObject *__pyx_builtin_TypeError; -static PyObject *__pyx_builtin_ValueError; -static PyObject *__pyx_builtin_OverflowError; -static PyObject *__pyx_builtin_RuntimeError; -static PyObject *__pyx_builtin_NotImplementedError; -static PyObject *__pyx_builtin_BufferError; -static PyObject *__pyx_builtin_RuntimeWarning; -static PyObject *__pyx_builtin_AssertionError; -static PyObject *__pyx_builtin_StopIteration; -static const char __pyx_k_d[] = "d"; -static const char __pyx_k_buf[] = "buf"; -static const char __pyx_k_ctx[] = "ctx"; -static const char __pyx_k_obj[] = "obj"; -static const char __pyx_k_off[] = "off"; -static const char __pyx_k_raw[] = "raw"; -static const char __pyx_k_ret[] = "ret"; -static const char __pyx_k_cenc[] = "cenc"; -static const char __pyx_k_cerr[] = "cerr"; -static const char __pyx_k_code[] = "code"; -static const char __pyx_k_data[] = "data"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_pack[] = "pack"; -static const char __pyx_k_read[] = "read"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_view[] = "view"; -static const char __pyx_k_items[] = "items"; -static const char __pyx_k_Packer[] = "Packer"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_kwargs[] = "kwargs"; -static const char __pyx_k_packed[] = "packed"; -static const char __pyx_k_reduce[] = "__reduce__"; -static const char __pyx_k_stream[] = "stream"; -static const char __pyx_k_unpack[] = "unpack"; -static const char __pyx_k_ExtType[] = "ExtType"; -static const char __pyx_k_buf_len[] = "buf_len"; -static const char __pyx_k_default[] = "default"; -static const char __pyx_k_unpackb[] = "unpackb"; -static const char __pyx_k_Unpacker[] = "Unpacker"; -static const char __pyx_k_encoding[] = "encoding"; -static const char __pyx_k_ext_hook[] = "ext_hook"; -static const char __pyx_k_getstate[] = "__getstate__"; -static const char __pyx_k_setstate[] = "__setstate__"; -static const char __pyx_k_typecode[] = "typecode"; -static const char __pyx_k_use_list[] = "use_list"; -static const char __pyx_k_ExtraData[] = "ExtraData"; -static const char __pyx_k_OutOfData[] = "OutOfData"; -static const char __pyx_k_TypeError[] = "TypeError"; -static const char __pyx_k_autoreset[] = "autoreset"; -static const char __pyx_k_file_like[] = "file_like"; -static const char __pyx_k_list_hook[] = "list_hook"; -static const char __pyx_k_read_size[] = "read_size"; -static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; -static const char __pyx_k_BufferFull[] = "BufferFull"; -static const char __pyx_k_StackError[] = "StackError"; -static const char __pyx_k_ValueError[] = "ValueError"; -static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; -static const char __pyx_k_BufferError[] = "BufferError"; -static const char __pyx_k_FormatError[] = "FormatError"; -static const char __pyx_k_MemoryError[] = "MemoryError"; -static const char __pyx_k_max_bin_len[] = "max_bin_len"; -static const char __pyx_k_max_ext_len[] = "max_ext_len"; -static const char __pyx_k_max_map_len[] = "max_map_len"; -static const char __pyx_k_max_str_len[] = "max_str_len"; -static const char __pyx_k_object_hook[] = "object_hook"; -static const char __pyx_k_RuntimeError[] = "RuntimeError"; -static const char __pyx_k_new_protocol[] = "new_protocol"; -static const char __pyx_k_strict_types[] = "strict_types"; -static const char __pyx_k_use_bin_type[] = "use_bin_type"; -static const char __pyx_k_OverflowError[] = "OverflowError"; -static const char __pyx_k_StopIteration[] = "StopIteration"; -static const char __pyx_k_max_array_len[] = "max_array_len"; -static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; -static const char __pyx_k_AssertionError[] = "AssertionError"; -static const char __pyx_k_RuntimeWarning[] = "RuntimeWarning"; -static const char __pyx_k_internal_error[] = "internal error"; -static const char __pyx_k_strict_map_key[] = "strict_map_key"; -static const char __pyx_k_unicode_errors[] = "unicode_errors"; -static const char __pyx_k_max_buffer_size[] = "max_buffer_size"; -static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; -static const char __pyx_k_use_single_float[] = "use_single_float"; -static const char __pyx_k_dict_is_too_large[] = "dict is too large"; -static const char __pyx_k_list_is_too_large[] = "list is too large"; -static const char __pyx_k_msgpack__cmsgpack[] = "msgpack._cmsgpack"; -static const char __pyx_k_object_pairs_hook[] = "object_pairs_hook"; -static const char __pyx_k_DeprecationWarning[] = "DeprecationWarning"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_NotImplementedError[] = "NotImplementedError"; -static const char __pyx_k_Unpack_failed_error[] = "Unpack failed: error = "; -static const char __pyx_k_EXT_data_is_too_large[] = "EXT data is too large"; -static const char __pyx_k_msgpack__unpacker_pyx[] = "msgpack/_unpacker.pyx"; -static const char __pyx_k_No_more_data_to_unpack[] = "No more data to unpack."; -static const char __pyx_k_ddtrace_vendor_msgpack[] = "ddtrace.vendor.msgpack"; -static const char __pyx_k_memoryview_is_too_large[] = "memoryview is too large"; -static const char __pyx_k_could_not_get_memoryview[] = "could not get memoryview"; -static const char __pyx_k_recursion_limit_exceeded[] = "recursion limit exceeded."; -static const char __pyx_k_Integer_value_out_of_range[] = "Integer value out of range"; -static const char __pyx_k_default_must_be_a_callable[] = "default must be a callable."; -static const char __pyx_k_default_read_extended_type[] = "default_read_extended_type"; -static const char __pyx_k_ext_hook_must_be_a_callable[] = "ext_hook must be a callable."; -static const char __pyx_k_unicode_string_is_too_large[] = "unicode string is too large"; -static const char __pyx_k_list_hook_must_be_a_callable[] = "list_hook must be a callable."; -static const char __pyx_k_Unpack_failed_incomplete_input[] = "Unpack failed: incomplete input"; -static const char __pyx_k_object_hook_must_be_a_callable[] = "object_hook must be a callable."; -static const char __pyx_k_file_like_read_must_be_a_callab[] = "`file_like.read` must be a callable."; -static const char __pyx_k_unpacker_feed_is_not_be_able_to[] = "unpacker.feed() is not be able to use with `file_like`."; -static const char __pyx_k_Cannot_decode_extended_type_with[] = "Cannot decode extended type with typecode=%d"; -static const char __pyx_k_Unable_to_allocate_internal_buff[] = "Unable to allocate internal buffer."; -static const char __pyx_k_Unable_to_enlarge_internal_buffe[] = "Unable to enlarge internal buffer."; -static const char __pyx_k_cannot_unpack_from_multi_byte_ob[] = "cannot unpack from multi-byte object"; -static const char __pyx_k_could_not_get_buffer_for_memoryv[] = "could not get buffer for memoryview"; -static const char __pyx_k_ddtrace_vendor_msgpack_exception[] = "ddtrace.vendor.msgpack.exceptions"; -static const char __pyx_k_no_default___reduce___due_to_non[] = "no default __reduce__ due to non-trivial __cinit__"; -static const char __pyx_k_object_pairs_hook_and_object_hoo[] = "object_pairs_hook and object_hook are mutually exclusive."; -static const char __pyx_k_object_pairs_hook_must_be_a_call[] = "object_pairs_hook must be a callable."; -static const char __pyx_k_read_size_should_be_less_or_equa[] = "read_size should be less or equal to max_buffer_size"; -static const char __pyx_k_using_old_buffer_interface_to_un[] = "using old buffer interface to unpack %s; this leads to unpacking errors if slicing is used and will be removed in a future version"; -static PyObject *__pyx_n_s_AssertionError; -static PyObject *__pyx_n_s_BufferError; -static PyObject *__pyx_n_s_BufferFull; -static PyObject *__pyx_kp_u_Cannot_decode_extended_type_with; -static PyObject *__pyx_n_s_DeprecationWarning; -static PyObject *__pyx_kp_u_EXT_data_is_too_large; -static PyObject *__pyx_n_s_ExtType; -static PyObject *__pyx_n_s_ExtraData; -static PyObject *__pyx_n_s_FormatError; -static PyObject *__pyx_kp_u_Integer_value_out_of_range; -static PyObject *__pyx_n_s_MemoryError; -static PyObject *__pyx_kp_u_No_more_data_to_unpack; -static PyObject *__pyx_n_s_NotImplementedError; -static PyObject *__pyx_n_s_OutOfData; -static PyObject *__pyx_n_s_OverflowError; -static PyObject *__pyx_n_s_Packer; -static PyObject *__pyx_n_s_RuntimeError; -static PyObject *__pyx_n_s_RuntimeWarning; -static PyObject *__pyx_n_s_StackError; -static PyObject *__pyx_n_s_StopIteration; -static PyObject *__pyx_n_s_TypeError; -static PyObject *__pyx_kp_u_Unable_to_allocate_internal_buff; -static PyObject *__pyx_kp_u_Unable_to_enlarge_internal_buffe; -static PyObject *__pyx_kp_u_Unpack_failed_error; -static PyObject *__pyx_kp_u_Unpack_failed_incomplete_input; -static PyObject *__pyx_n_s_Unpacker; -static PyObject *__pyx_n_s_ValueError; -static PyObject *__pyx_n_s_autoreset; -static PyObject *__pyx_n_s_buf; -static PyObject *__pyx_n_s_buf_len; -static PyObject *__pyx_kp_u_cannot_unpack_from_multi_byte_ob; -static PyObject *__pyx_n_s_cenc; -static PyObject *__pyx_n_s_cerr; -static PyObject *__pyx_n_s_cline_in_traceback; -static PyObject *__pyx_n_s_code; -static PyObject *__pyx_kp_u_could_not_get_buffer_for_memoryv; -static PyObject *__pyx_kp_u_could_not_get_memoryview; -static PyObject *__pyx_n_s_ctx; -static PyObject *__pyx_n_u_d; -static PyObject *__pyx_n_s_data; -static PyObject *__pyx_n_s_ddtrace_vendor_msgpack; -static PyObject *__pyx_n_s_ddtrace_vendor_msgpack_exception; -static PyObject *__pyx_n_s_default; -static PyObject *__pyx_kp_u_default_must_be_a_callable; -static PyObject *__pyx_n_s_default_read_extended_type; -static PyObject *__pyx_kp_u_dict_is_too_large; -static PyObject *__pyx_n_s_encoding; -static PyObject *__pyx_n_s_ext_hook; -static PyObject *__pyx_kp_u_ext_hook_must_be_a_callable; -static PyObject *__pyx_n_s_file_like; -static PyObject *__pyx_kp_u_file_like_read_must_be_a_callab; -static PyObject *__pyx_n_s_getstate; -static PyObject *__pyx_n_s_import; -static PyObject *__pyx_kp_u_internal_error; -static PyObject *__pyx_n_s_items; -static PyObject *__pyx_n_s_kwargs; -static PyObject *__pyx_n_s_list_hook; -static PyObject *__pyx_kp_u_list_hook_must_be_a_callable; -static PyObject *__pyx_kp_u_list_is_too_large; -static PyObject *__pyx_n_s_main; -static PyObject *__pyx_n_s_max_array_len; -static PyObject *__pyx_n_s_max_bin_len; -static PyObject *__pyx_n_s_max_buffer_size; -static PyObject *__pyx_n_s_max_ext_len; -static PyObject *__pyx_n_s_max_map_len; -static PyObject *__pyx_n_s_max_str_len; -static PyObject *__pyx_kp_u_memoryview_is_too_large; -static PyObject *__pyx_n_s_msgpack__cmsgpack; -static PyObject *__pyx_kp_s_msgpack__unpacker_pyx; -static PyObject *__pyx_n_s_name; -static PyObject *__pyx_n_s_new_protocol; -static PyObject *__pyx_kp_s_no_default___reduce___due_to_non; -static PyObject *__pyx_n_s_obj; -static PyObject *__pyx_n_s_object_hook; -static PyObject *__pyx_kp_u_object_hook_must_be_a_callable; -static PyObject *__pyx_n_s_object_pairs_hook; -static PyObject *__pyx_kp_u_object_pairs_hook_and_object_hoo; -static PyObject *__pyx_kp_u_object_pairs_hook_must_be_a_call; -static PyObject *__pyx_n_s_off; -static PyObject *__pyx_n_s_pack; -static PyObject *__pyx_n_s_packed; -static PyObject *__pyx_n_s_pyx_vtable; -static PyObject *__pyx_n_s_raw; -static PyObject *__pyx_n_s_read; -static PyObject *__pyx_n_s_read_size; -static PyObject *__pyx_kp_u_read_size_should_be_less_or_equa; -static PyObject *__pyx_kp_u_recursion_limit_exceeded; -static PyObject *__pyx_n_s_reduce; -static PyObject *__pyx_n_s_reduce_cython; -static PyObject *__pyx_n_s_reduce_ex; -static PyObject *__pyx_n_s_ret; -static PyObject *__pyx_n_s_setstate; -static PyObject *__pyx_n_s_setstate_cython; -static PyObject *__pyx_n_s_stream; -static PyObject *__pyx_n_s_strict_map_key; -static PyObject *__pyx_n_s_strict_types; -static PyObject *__pyx_n_s_test; -static PyObject *__pyx_n_s_typecode; -static PyObject *__pyx_n_s_unicode_errors; -static PyObject *__pyx_kp_u_unicode_string_is_too_large; -static PyObject *__pyx_n_s_unpack; -static PyObject *__pyx_n_s_unpackb; -static PyObject *__pyx_kp_u_unpacker_feed_is_not_be_able_to; -static PyObject *__pyx_n_s_use_bin_type; -static PyObject *__pyx_n_s_use_list; -static PyObject *__pyx_n_s_use_single_float; -static PyObject *__pyx_kp_u_using_old_buffer_interface_to_un; -static PyObject *__pyx_n_s_view; -static int __pyx_pf_7msgpack_9_cmsgpack_6Packer___cinit__(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self); /* proto */ -static int __pyx_pf_7msgpack_9_cmsgpack_6Packer_2__init__(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PyObject *__pyx_v_default, PyObject *__pyx_v_encoding, PyObject *__pyx_v_unicode_errors, int __pyx_v_use_single_float, int __pyx_v_autoreset, int __pyx_v_use_bin_type, int __pyx_v_strict_types); /* proto */ -static void __pyx_pf_7msgpack_9_cmsgpack_6Packer_4__dealloc__(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_6pack(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PyObject *__pyx_v_obj); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_8pack_ext_type(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PyObject *__pyx_v_typecode, PyObject *__pyx_v_data); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_10pack_array_header(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PY_LONG_LONG __pyx_v_size); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_12pack_map_header(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PY_LONG_LONG __pyx_v_size); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_14pack_map_pairs(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PyObject *__pyx_v_pairs); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_16reset(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_18bytes(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_20getbuffer(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_22__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_24__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_default_read_extended_type(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_typecode, CYTHON_UNUSED PyObject *__pyx_v_data); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_2unpackb(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_packed, PyObject *__pyx_v_object_hook, PyObject *__pyx_v_list_hook, int __pyx_v_use_list, int __pyx_v_raw, int __pyx_v_strict_map_key, PyObject *__pyx_v_encoding, PyObject *__pyx_v_unicode_errors, PyObject *__pyx_v_object_pairs_hook, PyObject *__pyx_v_ext_hook, Py_ssize_t __pyx_v_max_str_len, Py_ssize_t __pyx_v_max_bin_len, Py_ssize_t __pyx_v_max_array_len, Py_ssize_t __pyx_v_max_map_len, Py_ssize_t __pyx_v_max_ext_len); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_4unpack(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_stream, PyObject *__pyx_v_kwargs); /* proto */ -static int __pyx_pf_7msgpack_9_cmsgpack_8Unpacker___cinit__(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self); /* proto */ -static void __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_2__dealloc__(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self); /* proto */ -static int __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_4__init__(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self, PyObject *__pyx_v_file_like, Py_ssize_t __pyx_v_read_size, int __pyx_v_use_list, int __pyx_v_raw, int __pyx_v_strict_map_key, PyObject *__pyx_v_object_hook, PyObject *__pyx_v_object_pairs_hook, PyObject *__pyx_v_list_hook, PyObject *__pyx_v_encoding, PyObject *__pyx_v_unicode_errors, Py_ssize_t __pyx_v_max_buffer_size, PyObject *__pyx_v_ext_hook, Py_ssize_t __pyx_v_max_str_len, Py_ssize_t __pyx_v_max_bin_len, Py_ssize_t __pyx_v_max_array_len, Py_ssize_t __pyx_v_max_map_len, Py_ssize_t __pyx_v_max_ext_len); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_6feed(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self, PyObject *__pyx_v_next_bytes); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_8read_bytes(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self, Py_ssize_t __pyx_v_nbytes); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_10unpack(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_12skip(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_14read_array_header(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_16read_map_header(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_18tell(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_20__iter__(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_22__next__(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_24__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_26__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_tp_new_7msgpack_9_cmsgpack_Packer(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ -static PyObject *__pyx_tp_new_7msgpack_9_cmsgpack_Unpacker(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ -static PyObject *__pyx_int_0; -static int __pyx_k__3; -static PyObject *__pyx_k__22; -static PyObject *__pyx_k__24; -static PyObject *__pyx_tuple_; -static PyObject *__pyx_tuple__2; -static PyObject *__pyx_tuple__4; -static PyObject *__pyx_tuple__5; -static PyObject *__pyx_tuple__6; -static PyObject *__pyx_tuple__7; -static PyObject *__pyx_tuple__8; -static PyObject *__pyx_tuple__9; -static PyObject *__pyx_tuple__10; -static PyObject *__pyx_tuple__11; -static PyObject *__pyx_tuple__12; -static PyObject *__pyx_tuple__13; -static PyObject *__pyx_tuple__14; -static PyObject *__pyx_tuple__15; -static PyObject *__pyx_tuple__16; -static PyObject *__pyx_tuple__17; -static PyObject *__pyx_tuple__18; -static PyObject *__pyx_tuple__19; -static PyObject *__pyx_tuple__20; -static PyObject *__pyx_tuple__21; -static PyObject *__pyx_tuple__23; -static PyObject *__pyx_tuple__25; -static PyObject *__pyx_tuple__26; -static PyObject *__pyx_tuple__27; -static PyObject *__pyx_tuple__28; -static PyObject *__pyx_tuple__29; -static PyObject *__pyx_tuple__30; -static PyObject *__pyx_tuple__31; -static PyObject *__pyx_tuple__32; -static PyObject *__pyx_tuple__34; -static PyObject *__pyx_tuple__36; -static PyObject *__pyx_codeobj__33; -static PyObject *__pyx_codeobj__35; -static PyObject *__pyx_codeobj__37; -/* Late includes */ - -/* "msgpack/_packer.pyx":46 - * - * - * cdef inline int PyBytesLike_Check(object o): # <<<<<<<<<<<<<< - * return PyBytes_Check(o) or PyByteArray_Check(o) - * - */ - -static CYTHON_INLINE int __pyx_f_7msgpack_9_cmsgpack_PyBytesLike_Check(PyObject *__pyx_v_o) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - __Pyx_RefNannySetupContext("PyBytesLike_Check", 0); - - /* "msgpack/_packer.pyx":47 - * - * cdef inline int PyBytesLike_Check(object o): - * return PyBytes_Check(o) or PyByteArray_Check(o) # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_2 = PyBytes_Check(__pyx_v_o); - if (!__pyx_t_2) { - } else { - __pyx_t_1 = __pyx_t_2; - goto __pyx_L3_bool_binop_done; - } - __pyx_t_2 = PyByteArray_Check(__pyx_v_o); - __pyx_t_1 = __pyx_t_2; - __pyx_L3_bool_binop_done:; - __pyx_r = __pyx_t_1; - goto __pyx_L0; - - /* "msgpack/_packer.pyx":46 - * - * - * cdef inline int PyBytesLike_Check(object o): # <<<<<<<<<<<<<< - * return PyBytes_Check(o) or PyByteArray_Check(o) - * - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_packer.pyx":50 - * - * - * cdef inline int PyBytesLike_CheckExact(object o): # <<<<<<<<<<<<<< - * return PyBytes_CheckExact(o) or PyByteArray_CheckExact(o) - * - */ - -static CYTHON_INLINE int __pyx_f_7msgpack_9_cmsgpack_PyBytesLike_CheckExact(PyObject *__pyx_v_o) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - __Pyx_RefNannySetupContext("PyBytesLike_CheckExact", 0); - - /* "msgpack/_packer.pyx":51 - * - * cdef inline int PyBytesLike_CheckExact(object o): - * return PyBytes_CheckExact(o) or PyByteArray_CheckExact(o) # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_2 = PyBytes_CheckExact(__pyx_v_o); - if (!__pyx_t_2) { - } else { - __pyx_t_1 = __pyx_t_2; - goto __pyx_L3_bool_binop_done; - } - __pyx_t_2 = PyByteArray_CheckExact(__pyx_v_o); - __pyx_t_1 = __pyx_t_2; - __pyx_L3_bool_binop_done:; - __pyx_r = __pyx_t_1; - goto __pyx_L0; - - /* "msgpack/_packer.pyx":50 - * - * - * cdef inline int PyBytesLike_CheckExact(object o): # <<<<<<<<<<<<<< - * return PyBytes_CheckExact(o) or PyByteArray_CheckExact(o) - * - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_packer.pyx":107 - * cdef bint autoreset - * - * def __cinit__(self): # <<<<<<<<<<<<<< - * cdef int buf_size = 1024*1024 - * self.pk.buf = PyMem_Malloc(buf_size) - */ - -/* Python wrapper */ -static int __pyx_pw_7msgpack_9_cmsgpack_6Packer_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_7msgpack_9_cmsgpack_6Packer_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); - if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { - __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} - if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1; - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_6Packer___cinit__(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7msgpack_9_cmsgpack_6Packer___cinit__(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self) { - int __pyx_v_buf_size; - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - __Pyx_RefNannySetupContext("__cinit__", 0); - - /* "msgpack/_packer.pyx":108 - * - * def __cinit__(self): - * cdef int buf_size = 1024*1024 # <<<<<<<<<<<<<< - * self.pk.buf = PyMem_Malloc(buf_size) - * if self.pk.buf == NULL: - */ - __pyx_v_buf_size = 0x100000; - - /* "msgpack/_packer.pyx":109 - * def __cinit__(self): - * cdef int buf_size = 1024*1024 - * self.pk.buf = PyMem_Malloc(buf_size) # <<<<<<<<<<<<<< - * if self.pk.buf == NULL: - * raise MemoryError("Unable to allocate internal buffer.") - */ - __pyx_v_self->pk.buf = ((char *)PyMem_Malloc(__pyx_v_buf_size)); - - /* "msgpack/_packer.pyx":110 - * cdef int buf_size = 1024*1024 - * self.pk.buf = PyMem_Malloc(buf_size) - * if self.pk.buf == NULL: # <<<<<<<<<<<<<< - * raise MemoryError("Unable to allocate internal buffer.") - * self.pk.buf_size = buf_size - */ - __pyx_t_1 = ((__pyx_v_self->pk.buf == NULL) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_packer.pyx":111 - * self.pk.buf = PyMem_Malloc(buf_size) - * if self.pk.buf == NULL: - * raise MemoryError("Unable to allocate internal buffer.") # <<<<<<<<<<<<<< - * self.pk.buf_size = buf_size - * self.pk.length = 0 - */ - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_MemoryError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 111, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_Raise(__pyx_t_2, 0, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __PYX_ERR(0, 111, __pyx_L1_error) - - /* "msgpack/_packer.pyx":110 - * cdef int buf_size = 1024*1024 - * self.pk.buf = PyMem_Malloc(buf_size) - * if self.pk.buf == NULL: # <<<<<<<<<<<<<< - * raise MemoryError("Unable to allocate internal buffer.") - * self.pk.buf_size = buf_size - */ - } - - /* "msgpack/_packer.pyx":112 - * if self.pk.buf == NULL: - * raise MemoryError("Unable to allocate internal buffer.") - * self.pk.buf_size = buf_size # <<<<<<<<<<<<<< - * self.pk.length = 0 - * - */ - __pyx_v_self->pk.buf_size = __pyx_v_buf_size; - - /* "msgpack/_packer.pyx":113 - * raise MemoryError("Unable to allocate internal buffer.") - * self.pk.buf_size = buf_size - * self.pk.length = 0 # <<<<<<<<<<<<<< - * - * def __init__(self, default=None, encoding=None, unicode_errors=None, - */ - __pyx_v_self->pk.length = 0; - - /* "msgpack/_packer.pyx":107 - * cdef bint autoreset - * - * def __cinit__(self): # <<<<<<<<<<<<<< - * cdef int buf_size = 1024*1024 - * self.pk.buf = PyMem_Malloc(buf_size) - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_packer.pyx":115 - * self.pk.length = 0 - * - * def __init__(self, default=None, encoding=None, unicode_errors=None, # <<<<<<<<<<<<<< - * bint use_single_float=False, bint autoreset=True, bint use_bin_type=False, - * bint strict_types=False): - */ - -/* Python wrapper */ -static int __pyx_pw_7msgpack_9_cmsgpack_6Packer_3__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_7msgpack_9_cmsgpack_6Packer_3__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_default = 0; - PyObject *__pyx_v_encoding = 0; - PyObject *__pyx_v_unicode_errors = 0; - int __pyx_v_use_single_float; - int __pyx_v_autoreset; - int __pyx_v_use_bin_type; - int __pyx_v_strict_types; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_default,&__pyx_n_s_encoding,&__pyx_n_s_unicode_errors,&__pyx_n_s_use_single_float,&__pyx_n_s_autoreset,&__pyx_n_s_use_bin_type,&__pyx_n_s_strict_types,0}; - PyObject* values[7] = {0,0,0,0,0,0,0}; - values[0] = ((PyObject *)Py_None); - values[1] = ((PyObject *)Py_None); - values[2] = ((PyObject *)Py_None); - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_default); - if (value) { values[0] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 1: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_encoding); - if (value) { values[1] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 2: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_unicode_errors); - if (value) { values[2] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 3: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_use_single_float); - if (value) { values[3] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 4: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_autoreset); - if (value) { values[4] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 5: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_use_bin_type); - if (value) { values[5] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 6: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_strict_types); - if (value) { values[6] = value; kw_args--; } - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 115, __pyx_L3_error) - } - } else { - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_default = values[0]; - __pyx_v_encoding = values[1]; - __pyx_v_unicode_errors = values[2]; - if (values[3]) { - __pyx_v_use_single_float = __Pyx_PyObject_IsTrue(values[3]); if (unlikely((__pyx_v_use_single_float == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 116, __pyx_L3_error) - } else { - - /* "msgpack/_packer.pyx":116 - * - * def __init__(self, default=None, encoding=None, unicode_errors=None, - * bint use_single_float=False, bint autoreset=True, bint use_bin_type=False, # <<<<<<<<<<<<<< - * bint strict_types=False): - * if encoding is not None: - */ - __pyx_v_use_single_float = ((int)0); - } - if (values[4]) { - __pyx_v_autoreset = __Pyx_PyObject_IsTrue(values[4]); if (unlikely((__pyx_v_autoreset == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 116, __pyx_L3_error) - } else { - __pyx_v_autoreset = ((int)1); - } - if (values[5]) { - __pyx_v_use_bin_type = __Pyx_PyObject_IsTrue(values[5]); if (unlikely((__pyx_v_use_bin_type == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 116, __pyx_L3_error) - } else { - __pyx_v_use_bin_type = ((int)0); - } - if (values[6]) { - __pyx_v_strict_types = __Pyx_PyObject_IsTrue(values[6]); if (unlikely((__pyx_v_strict_types == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 117, __pyx_L3_error) - } else { - - /* "msgpack/_packer.pyx":117 - * def __init__(self, default=None, encoding=None, unicode_errors=None, - * bint use_single_float=False, bint autoreset=True, bint use_bin_type=False, - * bint strict_types=False): # <<<<<<<<<<<<<< - * if encoding is not None: - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated.", 1) - */ - __pyx_v_strict_types = ((int)0); - } - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 0, 0, 7, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 115, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_6Packer_2__init__(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self), __pyx_v_default, __pyx_v_encoding, __pyx_v_unicode_errors, __pyx_v_use_single_float, __pyx_v_autoreset, __pyx_v_use_bin_type, __pyx_v_strict_types); - - /* "msgpack/_packer.pyx":115 - * self.pk.length = 0 - * - * def __init__(self, default=None, encoding=None, unicode_errors=None, # <<<<<<<<<<<<<< - * bint use_single_float=False, bint autoreset=True, bint use_bin_type=False, - * bint strict_types=False): - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7msgpack_9_cmsgpack_6Packer_2__init__(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PyObject *__pyx_v_default, PyObject *__pyx_v_encoding, PyObject *__pyx_v_unicode_errors, int __pyx_v_use_single_float, int __pyx_v_autoreset, int __pyx_v_use_bin_type, int __pyx_v_strict_types) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - char const *__pyx_t_5; - char const *__pyx_t_6; - __Pyx_RefNannySetupContext("__init__", 0); - - /* "msgpack/_packer.pyx":118 - * bint use_single_float=False, bint autoreset=True, bint use_bin_type=False, - * bint strict_types=False): - * if encoding is not None: # <<<<<<<<<<<<<< - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated.", 1) - * self.use_float = use_single_float - */ - __pyx_t_1 = (__pyx_v_encoding != Py_None); - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "msgpack/_packer.pyx":119 - * bint strict_types=False): - * if encoding is not None: - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated.", 1) # <<<<<<<<<<<<<< - * self.use_float = use_single_float - * self.strict_types = strict_types - */ - __pyx_t_3 = PyErr_WarnEx(__pyx_builtin_DeprecationWarning, ((char *)"encoding is deprecated."), 1); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 119, __pyx_L1_error) - - /* "msgpack/_packer.pyx":118 - * bint use_single_float=False, bint autoreset=True, bint use_bin_type=False, - * bint strict_types=False): - * if encoding is not None: # <<<<<<<<<<<<<< - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated.", 1) - * self.use_float = use_single_float - */ - } - - /* "msgpack/_packer.pyx":120 - * if encoding is not None: - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated.", 1) - * self.use_float = use_single_float # <<<<<<<<<<<<<< - * self.strict_types = strict_types - * self.autoreset = autoreset - */ - __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_use_single_float); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 120, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - if (!(likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_7cpython_4bool_bool)))) __PYX_ERR(0, 120, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_4); - __Pyx_GOTREF(__pyx_v_self->use_float); - __Pyx_DECREF(((PyObject *)__pyx_v_self->use_float)); - __pyx_v_self->use_float = ((PyBoolObject *)__pyx_t_4); - __pyx_t_4 = 0; - - /* "msgpack/_packer.pyx":121 - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated.", 1) - * self.use_float = use_single_float - * self.strict_types = strict_types # <<<<<<<<<<<<<< - * self.autoreset = autoreset - * self.pk.use_bin_type = use_bin_type - */ - __pyx_v_self->strict_types = __pyx_v_strict_types; - - /* "msgpack/_packer.pyx":122 - * self.use_float = use_single_float - * self.strict_types = strict_types - * self.autoreset = autoreset # <<<<<<<<<<<<<< - * self.pk.use_bin_type = use_bin_type - * if default is not None: - */ - __pyx_v_self->autoreset = __pyx_v_autoreset; - - /* "msgpack/_packer.pyx":123 - * self.strict_types = strict_types - * self.autoreset = autoreset - * self.pk.use_bin_type = use_bin_type # <<<<<<<<<<<<<< - * if default is not None: - * if not PyCallable_Check(default): - */ - __pyx_v_self->pk.use_bin_type = __pyx_v_use_bin_type; - - /* "msgpack/_packer.pyx":124 - * self.autoreset = autoreset - * self.pk.use_bin_type = use_bin_type - * if default is not None: # <<<<<<<<<<<<<< - * if not PyCallable_Check(default): - * raise TypeError("default must be a callable.") - */ - __pyx_t_2 = (__pyx_v_default != Py_None); - __pyx_t_1 = (__pyx_t_2 != 0); - if (__pyx_t_1) { - - /* "msgpack/_packer.pyx":125 - * self.pk.use_bin_type = use_bin_type - * if default is not None: - * if not PyCallable_Check(default): # <<<<<<<<<<<<<< - * raise TypeError("default must be a callable.") - * self._default = default - */ - __pyx_t_1 = ((!(PyCallable_Check(__pyx_v_default) != 0)) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_packer.pyx":126 - * if default is not None: - * if not PyCallable_Check(default): - * raise TypeError("default must be a callable.") # <<<<<<<<<<<<<< - * self._default = default - * - */ - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 126, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(0, 126, __pyx_L1_error) - - /* "msgpack/_packer.pyx":125 - * self.pk.use_bin_type = use_bin_type - * if default is not None: - * if not PyCallable_Check(default): # <<<<<<<<<<<<<< - * raise TypeError("default must be a callable.") - * self._default = default - */ - } - - /* "msgpack/_packer.pyx":124 - * self.autoreset = autoreset - * self.pk.use_bin_type = use_bin_type - * if default is not None: # <<<<<<<<<<<<<< - * if not PyCallable_Check(default): - * raise TypeError("default must be a callable.") - */ - } - - /* "msgpack/_packer.pyx":127 - * if not PyCallable_Check(default): - * raise TypeError("default must be a callable.") - * self._default = default # <<<<<<<<<<<<<< - * - * self._bencoding = encoding - */ - __Pyx_INCREF(__pyx_v_default); - __Pyx_GIVEREF(__pyx_v_default); - __Pyx_GOTREF(__pyx_v_self->_default); - __Pyx_DECREF(__pyx_v_self->_default); - __pyx_v_self->_default = __pyx_v_default; - - /* "msgpack/_packer.pyx":129 - * self._default = default - * - * self._bencoding = encoding # <<<<<<<<<<<<<< - * if encoding is None: - * if PY_MAJOR_VERSION < 3: - */ - __Pyx_INCREF(__pyx_v_encoding); - __Pyx_GIVEREF(__pyx_v_encoding); - __Pyx_GOTREF(__pyx_v_self->_bencoding); - __Pyx_DECREF(__pyx_v_self->_bencoding); - __pyx_v_self->_bencoding = __pyx_v_encoding; - - /* "msgpack/_packer.pyx":130 - * - * self._bencoding = encoding - * if encoding is None: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * self.encoding = 'utf-8' - */ - __pyx_t_1 = (__pyx_v_encoding == Py_None); - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "msgpack/_packer.pyx":131 - * self._bencoding = encoding - * if encoding is None: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * self.encoding = 'utf-8' - * else: - */ - __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0); - if (__pyx_t_2) { - - /* "msgpack/_packer.pyx":132 - * if encoding is None: - * if PY_MAJOR_VERSION < 3: - * self.encoding = 'utf-8' # <<<<<<<<<<<<<< - * else: - * self.encoding = NULL - */ - __pyx_v_self->encoding = ((char const *)"utf-8"); - - /* "msgpack/_packer.pyx":131 - * self._bencoding = encoding - * if encoding is None: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * self.encoding = 'utf-8' - * else: - */ - goto __pyx_L7; - } - - /* "msgpack/_packer.pyx":134 - * self.encoding = 'utf-8' - * else: - * self.encoding = NULL # <<<<<<<<<<<<<< - * else: - * self.encoding = self._bencoding - */ - /*else*/ { - __pyx_v_self->encoding = NULL; - } - __pyx_L7:; - - /* "msgpack/_packer.pyx":130 - * - * self._bencoding = encoding - * if encoding is None: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * self.encoding = 'utf-8' - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":136 - * self.encoding = NULL - * else: - * self.encoding = self._bencoding # <<<<<<<<<<<<<< - * - * self._berrors = unicode_errors - */ - /*else*/ { - __pyx_t_5 = __Pyx_PyObject_AsString(__pyx_v_self->_bencoding); if (unlikely((!__pyx_t_5) && PyErr_Occurred())) __PYX_ERR(0, 136, __pyx_L1_error) - __pyx_v_self->encoding = __pyx_t_5; - } - __pyx_L6:; - - /* "msgpack/_packer.pyx":138 - * self.encoding = self._bencoding - * - * self._berrors = unicode_errors # <<<<<<<<<<<<<< - * if unicode_errors is None: - * self.unicode_errors = NULL - */ - __Pyx_INCREF(__pyx_v_unicode_errors); - __Pyx_GIVEREF(__pyx_v_unicode_errors); - __Pyx_GOTREF(__pyx_v_self->_berrors); - __Pyx_DECREF(__pyx_v_self->_berrors); - __pyx_v_self->_berrors = __pyx_v_unicode_errors; - - /* "msgpack/_packer.pyx":139 - * - * self._berrors = unicode_errors - * if unicode_errors is None: # <<<<<<<<<<<<<< - * self.unicode_errors = NULL - * else: - */ - __pyx_t_2 = (__pyx_v_unicode_errors == Py_None); - __pyx_t_1 = (__pyx_t_2 != 0); - if (__pyx_t_1) { - - /* "msgpack/_packer.pyx":140 - * self._berrors = unicode_errors - * if unicode_errors is None: - * self.unicode_errors = NULL # <<<<<<<<<<<<<< - * else: - * self.unicode_errors = self._berrors - */ - __pyx_v_self->unicode_errors = NULL; - - /* "msgpack/_packer.pyx":139 - * - * self._berrors = unicode_errors - * if unicode_errors is None: # <<<<<<<<<<<<<< - * self.unicode_errors = NULL - * else: - */ - goto __pyx_L8; - } - - /* "msgpack/_packer.pyx":142 - * self.unicode_errors = NULL - * else: - * self.unicode_errors = self._berrors # <<<<<<<<<<<<<< - * - * def __dealloc__(self): - */ - /*else*/ { - __pyx_t_6 = __Pyx_PyObject_AsString(__pyx_v_self->_berrors); if (unlikely((!__pyx_t_6) && PyErr_Occurred())) __PYX_ERR(0, 142, __pyx_L1_error) - __pyx_v_self->unicode_errors = __pyx_t_6; - } - __pyx_L8:; - - /* "msgpack/_packer.pyx":115 - * self.pk.length = 0 - * - * def __init__(self, default=None, encoding=None, unicode_errors=None, # <<<<<<<<<<<<<< - * bint use_single_float=False, bint autoreset=True, bint use_bin_type=False, - * bint strict_types=False): - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_packer.pyx":144 - * self.unicode_errors = self._berrors - * - * def __dealloc__(self): # <<<<<<<<<<<<<< - * PyMem_Free(self.pk.buf) - * self.pk.buf = NULL - */ - -/* Python wrapper */ -static void __pyx_pw_7msgpack_9_cmsgpack_6Packer_5__dealloc__(PyObject *__pyx_v_self); /*proto*/ -static void __pyx_pw_7msgpack_9_cmsgpack_6Packer_5__dealloc__(PyObject *__pyx_v_self) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0); - __pyx_pf_7msgpack_9_cmsgpack_6Packer_4__dealloc__(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); -} - -static void __pyx_pf_7msgpack_9_cmsgpack_6Packer_4__dealloc__(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__dealloc__", 0); - - /* "msgpack/_packer.pyx":145 - * - * def __dealloc__(self): - * PyMem_Free(self.pk.buf) # <<<<<<<<<<<<<< - * self.pk.buf = NULL - * - */ - PyMem_Free(__pyx_v_self->pk.buf); - - /* "msgpack/_packer.pyx":146 - * def __dealloc__(self): - * PyMem_Free(self.pk.buf) - * self.pk.buf = NULL # <<<<<<<<<<<<<< - * - * cdef int _pack(self, object o, int nest_limit=DEFAULT_RECURSE_LIMIT) except -1: - */ - __pyx_v_self->pk.buf = NULL; - - /* "msgpack/_packer.pyx":144 - * self.unicode_errors = self._berrors - * - * def __dealloc__(self): # <<<<<<<<<<<<<< - * PyMem_Free(self.pk.buf) - * self.pk.buf = NULL - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); -} - -/* "msgpack/_packer.pyx":148 - * self.pk.buf = NULL - * - * cdef int _pack(self, object o, int nest_limit=DEFAULT_RECURSE_LIMIT) except -1: # <<<<<<<<<<<<<< - * cdef long long llval - * cdef unsigned long long ullval - */ - -static int __pyx_f_7msgpack_9_cmsgpack_6Packer__pack(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PyObject *__pyx_v_o, struct __pyx_opt_args_7msgpack_9_cmsgpack_6Packer__pack *__pyx_optional_args) { - int __pyx_v_nest_limit = __pyx_k__3; - PY_LONG_LONG __pyx_v_llval; - unsigned PY_LONG_LONG __pyx_v_ullval; - long __pyx_v_longval; - float __pyx_v_fval; - double __pyx_v_dval; - char *__pyx_v_rawval; - int __pyx_v_ret; - PyObject *__pyx_v_d = 0; - Py_ssize_t __pyx_v_L; - int __pyx_v_default_used; - int __pyx_v_strict_types; - Py_buffer __pyx_v_view; - CYTHON_UNUSED PyObject *__pyx_v_oe = NULL; - PyObject *__pyx_v_k = NULL; - PyObject *__pyx_v_v = NULL; - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - unsigned PY_LONG_LONG __pyx_t_7; - PY_LONG_LONG __pyx_t_8; - int __pyx_t_9; - PyObject *__pyx_t_10 = NULL; - PyObject *__pyx_t_11 = NULL; - int __pyx_t_12; - PyObject *__pyx_t_13 = NULL; - PyObject *__pyx_t_14 = NULL; - PyObject *__pyx_t_15 = NULL; - int __pyx_t_16; - char const *__pyx_t_17; - PyObject *__pyx_t_18 = NULL; - PyObject *__pyx_t_19 = NULL; - PyObject *__pyx_t_20 = NULL; - PyObject *__pyx_t_21 = NULL; - PyObject *__pyx_t_22 = NULL; - PyObject *__pyx_t_23 = NULL; - long __pyx_t_24; - float __pyx_t_25; - double __pyx_t_26; - Py_ssize_t __pyx_t_27; - PyObject *__pyx_t_28; - char *__pyx_t_29; - Py_ssize_t __pyx_t_30; - struct __pyx_opt_args_7msgpack_9_cmsgpack_6Packer__pack __pyx_t_31; - PyObject *(*__pyx_t_32)(PyObject *); - __Pyx_RefNannySetupContext("_pack", 0); - if (__pyx_optional_args) { - if (__pyx_optional_args->__pyx_n > 0) { - __pyx_v_nest_limit = __pyx_optional_args->nest_limit; - } - } - __Pyx_INCREF(__pyx_v_o); - - /* "msgpack/_packer.pyx":158 - * cdef dict d - * cdef Py_ssize_t L - * cdef int default_used = 0 # <<<<<<<<<<<<<< - * cdef bint strict_types = self.strict_types - * cdef Py_buffer view - */ - __pyx_v_default_used = 0; - - /* "msgpack/_packer.pyx":159 - * cdef Py_ssize_t L - * cdef int default_used = 0 - * cdef bint strict_types = self.strict_types # <<<<<<<<<<<<<< - * cdef Py_buffer view - * - */ - __pyx_t_1 = __pyx_v_self->strict_types; - __pyx_v_strict_types = __pyx_t_1; - - /* "msgpack/_packer.pyx":162 - * cdef Py_buffer view - * - * if nest_limit < 0: # <<<<<<<<<<<<<< - * raise ValueError("recursion limit exceeded.") - * - */ - __pyx_t_1 = ((__pyx_v_nest_limit < 0) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_packer.pyx":163 - * - * if nest_limit < 0: - * raise ValueError("recursion limit exceeded.") # <<<<<<<<<<<<<< - * - * while True: - */ - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 163, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_Raise(__pyx_t_2, 0, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __PYX_ERR(0, 163, __pyx_L1_error) - - /* "msgpack/_packer.pyx":162 - * cdef Py_buffer view - * - * if nest_limit < 0: # <<<<<<<<<<<<<< - * raise ValueError("recursion limit exceeded.") - * - */ - } - - /* "msgpack/_packer.pyx":165 - * raise ValueError("recursion limit exceeded.") - * - * while True: # <<<<<<<<<<<<<< - * if o is None: - * ret = msgpack_pack_nil(&self.pk) - */ - while (1) { - - /* "msgpack/_packer.pyx":166 - * - * while True: - * if o is None: # <<<<<<<<<<<<<< - * ret = msgpack_pack_nil(&self.pk) - * elif PyBool_Check(o) if strict_types else isinstance(o, bool): - */ - __pyx_t_1 = (__pyx_v_o == Py_None); - __pyx_t_3 = (__pyx_t_1 != 0); - if (__pyx_t_3) { - - /* "msgpack/_packer.pyx":167 - * while True: - * if o is None: - * ret = msgpack_pack_nil(&self.pk) # <<<<<<<<<<<<<< - * elif PyBool_Check(o) if strict_types else isinstance(o, bool): - * if o: - */ - __pyx_v_ret = msgpack_pack_nil((&__pyx_v_self->pk)); - - /* "msgpack/_packer.pyx":166 - * - * while True: - * if o is None: # <<<<<<<<<<<<<< - * ret = msgpack_pack_nil(&self.pk) - * elif PyBool_Check(o) if strict_types else isinstance(o, bool): - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":168 - * if o is None: - * ret = msgpack_pack_nil(&self.pk) - * elif PyBool_Check(o) if strict_types else isinstance(o, bool): # <<<<<<<<<<<<<< - * if o: - * ret = msgpack_pack_true(&self.pk) - */ - if ((__pyx_v_strict_types != 0)) { - __pyx_t_3 = PyBool_Check(__pyx_v_o); - } else { - __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_o, __pyx_ptype_7cpython_4bool_bool); - __pyx_t_3 = __pyx_t_1; - } - __pyx_t_1 = (__pyx_t_3 != 0); - if (__pyx_t_1) { - - /* "msgpack/_packer.pyx":169 - * ret = msgpack_pack_nil(&self.pk) - * elif PyBool_Check(o) if strict_types else isinstance(o, bool): - * if o: # <<<<<<<<<<<<<< - * ret = msgpack_pack_true(&self.pk) - * else: - */ - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_o); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 169, __pyx_L1_error) - if (__pyx_t_1) { - - /* "msgpack/_packer.pyx":170 - * elif PyBool_Check(o) if strict_types else isinstance(o, bool): - * if o: - * ret = msgpack_pack_true(&self.pk) # <<<<<<<<<<<<<< - * else: - * ret = msgpack_pack_false(&self.pk) - */ - __pyx_v_ret = msgpack_pack_true((&__pyx_v_self->pk)); - - /* "msgpack/_packer.pyx":169 - * ret = msgpack_pack_nil(&self.pk) - * elif PyBool_Check(o) if strict_types else isinstance(o, bool): - * if o: # <<<<<<<<<<<<<< - * ret = msgpack_pack_true(&self.pk) - * else: - */ - goto __pyx_L7; - } - - /* "msgpack/_packer.pyx":172 - * ret = msgpack_pack_true(&self.pk) - * else: - * ret = msgpack_pack_false(&self.pk) # <<<<<<<<<<<<<< - * elif PyLong_CheckExact(o) if strict_types else PyLong_Check(o): - * # PyInt_Check(long) is True for Python 3. - */ - /*else*/ { - __pyx_v_ret = msgpack_pack_false((&__pyx_v_self->pk)); - } - __pyx_L7:; - - /* "msgpack/_packer.pyx":168 - * if o is None: - * ret = msgpack_pack_nil(&self.pk) - * elif PyBool_Check(o) if strict_types else isinstance(o, bool): # <<<<<<<<<<<<<< - * if o: - * ret = msgpack_pack_true(&self.pk) - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":173 - * else: - * ret = msgpack_pack_false(&self.pk) - * elif PyLong_CheckExact(o) if strict_types else PyLong_Check(o): # <<<<<<<<<<<<<< - * # PyInt_Check(long) is True for Python 3. - * # So we should test long before int. - */ - if ((__pyx_v_strict_types != 0)) { - __pyx_t_1 = PyLong_CheckExact(__pyx_v_o); - } else { - __pyx_t_1 = PyLong_Check(__pyx_v_o); - } - __pyx_t_3 = (__pyx_t_1 != 0); - if (__pyx_t_3) { - - /* "msgpack/_packer.pyx":176 - * # PyInt_Check(long) is True for Python 3. - * # So we should test long before int. - * try: # <<<<<<<<<<<<<< - * if o > 0: - * ullval = o - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_4, &__pyx_t_5, &__pyx_t_6); - __Pyx_XGOTREF(__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_5); - __Pyx_XGOTREF(__pyx_t_6); - /*try:*/ { - - /* "msgpack/_packer.pyx":177 - * # So we should test long before int. - * try: - * if o > 0: # <<<<<<<<<<<<<< - * ullval = o - * ret = msgpack_pack_unsigned_long_long(&self.pk, ullval) - */ - __pyx_t_2 = PyObject_RichCompare(__pyx_v_o, __pyx_int_0, Py_GT); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 177, __pyx_L8_error) - __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 177, __pyx_L8_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__pyx_t_3) { - - /* "msgpack/_packer.pyx":178 - * try: - * if o > 0: - * ullval = o # <<<<<<<<<<<<<< - * ret = msgpack_pack_unsigned_long_long(&self.pk, ullval) - * else: - */ - __pyx_t_7 = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(__pyx_v_o); if (unlikely((__pyx_t_7 == (unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 178, __pyx_L8_error) - __pyx_v_ullval = __pyx_t_7; - - /* "msgpack/_packer.pyx":179 - * if o > 0: - * ullval = o - * ret = msgpack_pack_unsigned_long_long(&self.pk, ullval) # <<<<<<<<<<<<<< - * else: - * llval = o - */ - __pyx_v_ret = msgpack_pack_unsigned_long_long((&__pyx_v_self->pk), __pyx_v_ullval); - - /* "msgpack/_packer.pyx":177 - * # So we should test long before int. - * try: - * if o > 0: # <<<<<<<<<<<<<< - * ullval = o - * ret = msgpack_pack_unsigned_long_long(&self.pk, ullval) - */ - goto __pyx_L16; - } - - /* "msgpack/_packer.pyx":181 - * ret = msgpack_pack_unsigned_long_long(&self.pk, ullval) - * else: - * llval = o # <<<<<<<<<<<<<< - * ret = msgpack_pack_long_long(&self.pk, llval) - * except OverflowError as oe: - */ - /*else*/ { - __pyx_t_8 = __Pyx_PyInt_As_PY_LONG_LONG(__pyx_v_o); if (unlikely((__pyx_t_8 == (PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 181, __pyx_L8_error) - __pyx_v_llval = __pyx_t_8; - - /* "msgpack/_packer.pyx":182 - * else: - * llval = o - * ret = msgpack_pack_long_long(&self.pk, llval) # <<<<<<<<<<<<<< - * except OverflowError as oe: - * if not default_used and self._default is not None: - */ - __pyx_v_ret = msgpack_pack_long_long((&__pyx_v_self->pk), __pyx_v_llval); - } - __pyx_L16:; - - /* "msgpack/_packer.pyx":176 - * # PyInt_Check(long) is True for Python 3. - * # So we should test long before int. - * try: # <<<<<<<<<<<<<< - * if o > 0: - * ullval = o - */ - } - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - goto __pyx_L15_try_end; - __pyx_L8_error:; - __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "msgpack/_packer.pyx":183 - * llval = o - * ret = msgpack_pack_long_long(&self.pk, llval) - * except OverflowError as oe: # <<<<<<<<<<<<<< - * if not default_used and self._default is not None: - * o = self._default(o) - */ - __pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_OverflowError); - if (__pyx_t_9) { - __Pyx_AddTraceback("msgpack._cmsgpack.Packer._pack", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_2, &__pyx_t_10, &__pyx_t_11) < 0) __PYX_ERR(0, 183, __pyx_L10_except_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_t_10); - __Pyx_GOTREF(__pyx_t_11); - __Pyx_INCREF(__pyx_t_10); - __pyx_v_oe = __pyx_t_10; - /*try:*/ { - - /* "msgpack/_packer.pyx":184 - * ret = msgpack_pack_long_long(&self.pk, llval) - * except OverflowError as oe: - * if not default_used and self._default is not None: # <<<<<<<<<<<<<< - * o = self._default(o) - * default_used = True - */ - __pyx_t_1 = ((!(__pyx_v_default_used != 0)) != 0); - if (__pyx_t_1) { - } else { - __pyx_t_3 = __pyx_t_1; - goto __pyx_L25_bool_binop_done; - } - __pyx_t_1 = (__pyx_v_self->_default != Py_None); - __pyx_t_12 = (__pyx_t_1 != 0); - __pyx_t_3 = __pyx_t_12; - __pyx_L25_bool_binop_done:; - if (likely(__pyx_t_3)) { - - /* "msgpack/_packer.pyx":185 - * except OverflowError as oe: - * if not default_used and self._default is not None: - * o = self._default(o) # <<<<<<<<<<<<<< - * default_used = True - * continue - */ - __Pyx_INCREF(__pyx_v_self->_default); - __pyx_t_14 = __pyx_v_self->_default; __pyx_t_15 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_14))) { - __pyx_t_15 = PyMethod_GET_SELF(__pyx_t_14); - if (likely(__pyx_t_15)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_14); - __Pyx_INCREF(__pyx_t_15); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_14, function); - } - } - __pyx_t_13 = (__pyx_t_15) ? __Pyx_PyObject_Call2Args(__pyx_t_14, __pyx_t_15, __pyx_v_o) : __Pyx_PyObject_CallOneArg(__pyx_t_14, __pyx_v_o); - __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; - if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 185, __pyx_L22_error) - __Pyx_GOTREF(__pyx_t_13); - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - __Pyx_DECREF_SET(__pyx_v_o, __pyx_t_13); - __pyx_t_13 = 0; - - /* "msgpack/_packer.pyx":186 - * if not default_used and self._default is not None: - * o = self._default(o) - * default_used = True # <<<<<<<<<<<<<< - * continue - * else: - */ - __pyx_v_default_used = 1; - - /* "msgpack/_packer.pyx":187 - * o = self._default(o) - * default_used = True - * continue # <<<<<<<<<<<<<< - * else: - * raise OverflowError("Integer value out of range") - */ - goto __pyx_L19_continue; - - /* "msgpack/_packer.pyx":184 - * ret = msgpack_pack_long_long(&self.pk, llval) - * except OverflowError as oe: - * if not default_used and self._default is not None: # <<<<<<<<<<<<<< - * o = self._default(o) - * default_used = True - */ - } - - /* "msgpack/_packer.pyx":189 - * continue - * else: - * raise OverflowError("Integer value out of range") # <<<<<<<<<<<<<< - * elif PyInt_CheckExact(o) if strict_types else PyInt_Check(o): - * longval = o - */ - /*else*/ { - __pyx_t_13 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 189, __pyx_L22_error) - __Pyx_GOTREF(__pyx_t_13); - __Pyx_Raise(__pyx_t_13, 0, 0, 0); - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - __PYX_ERR(0, 189, __pyx_L22_error) - } - } - - /* "msgpack/_packer.pyx":183 - * llval = o - * ret = msgpack_pack_long_long(&self.pk, llval) - * except OverflowError as oe: # <<<<<<<<<<<<<< - * if not default_used and self._default is not None: - * o = self._default(o) - */ - /*finally:*/ { - __pyx_L22_error:; - /*exception exit:*/{ - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __pyx_t_18 = 0; __pyx_t_19 = 0; __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; - __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; - __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; - __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; - if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_21, &__pyx_t_22, &__pyx_t_23); - if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_18, &__pyx_t_19, &__pyx_t_20) < 0)) __Pyx_ErrFetch(&__pyx_t_18, &__pyx_t_19, &__pyx_t_20); - __Pyx_XGOTREF(__pyx_t_18); - __Pyx_XGOTREF(__pyx_t_19); - __Pyx_XGOTREF(__pyx_t_20); - __Pyx_XGOTREF(__pyx_t_21); - __Pyx_XGOTREF(__pyx_t_22); - __Pyx_XGOTREF(__pyx_t_23); - __pyx_t_9 = __pyx_lineno; __pyx_t_16 = __pyx_clineno; __pyx_t_17 = __pyx_filename; - { - __Pyx_DECREF(__pyx_v_oe); - __pyx_v_oe = NULL; - } - if (PY_MAJOR_VERSION >= 3) { - __Pyx_XGIVEREF(__pyx_t_21); - __Pyx_XGIVEREF(__pyx_t_22); - __Pyx_XGIVEREF(__pyx_t_23); - __Pyx_ExceptionReset(__pyx_t_21, __pyx_t_22, __pyx_t_23); - } - __Pyx_XGIVEREF(__pyx_t_18); - __Pyx_XGIVEREF(__pyx_t_19); - __Pyx_XGIVEREF(__pyx_t_20); - __Pyx_ErrRestore(__pyx_t_18, __pyx_t_19, __pyx_t_20); - __pyx_t_18 = 0; __pyx_t_19 = 0; __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; - __pyx_lineno = __pyx_t_9; __pyx_clineno = __pyx_t_16; __pyx_filename = __pyx_t_17; - goto __pyx_L10_except_error; - } - __pyx_L19_continue: { - __Pyx_DECREF(__pyx_v_oe); - __pyx_v_oe = NULL; - goto __pyx_L18_except_continue; - } - } - __pyx_L18_except_continue:; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - goto __pyx_L14_try_continue; - } - goto __pyx_L10_except_error; - __pyx_L10_except_error:; - - /* "msgpack/_packer.pyx":176 - * # PyInt_Check(long) is True for Python 3. - * # So we should test long before int. - * try: # <<<<<<<<<<<<<< - * if o > 0: - * ullval = o - */ - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_XGIVEREF(__pyx_t_5); - __Pyx_XGIVEREF(__pyx_t_6); - __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); - goto __pyx_L1_error; - __pyx_L14_try_continue:; - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_XGIVEREF(__pyx_t_5); - __Pyx_XGIVEREF(__pyx_t_6); - __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); - goto __pyx_L4_continue; - __pyx_L15_try_end:; - } - - /* "msgpack/_packer.pyx":173 - * else: - * ret = msgpack_pack_false(&self.pk) - * elif PyLong_CheckExact(o) if strict_types else PyLong_Check(o): # <<<<<<<<<<<<<< - * # PyInt_Check(long) is True for Python 3. - * # So we should test long before int. - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":190 - * else: - * raise OverflowError("Integer value out of range") - * elif PyInt_CheckExact(o) if strict_types else PyInt_Check(o): # <<<<<<<<<<<<<< - * longval = o - * ret = msgpack_pack_long(&self.pk, longval) - */ - if ((__pyx_v_strict_types != 0)) { - __pyx_t_3 = PyInt_CheckExact(__pyx_v_o); - } else { - __pyx_t_3 = PyInt_Check(__pyx_v_o); - } - __pyx_t_12 = (__pyx_t_3 != 0); - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":191 - * raise OverflowError("Integer value out of range") - * elif PyInt_CheckExact(o) if strict_types else PyInt_Check(o): - * longval = o # <<<<<<<<<<<<<< - * ret = msgpack_pack_long(&self.pk, longval) - * elif PyFloat_CheckExact(o) if strict_types else PyFloat_Check(o): - */ - __pyx_t_24 = __Pyx_PyInt_As_long(__pyx_v_o); if (unlikely((__pyx_t_24 == (long)-1) && PyErr_Occurred())) __PYX_ERR(0, 191, __pyx_L1_error) - __pyx_v_longval = __pyx_t_24; - - /* "msgpack/_packer.pyx":192 - * elif PyInt_CheckExact(o) if strict_types else PyInt_Check(o): - * longval = o - * ret = msgpack_pack_long(&self.pk, longval) # <<<<<<<<<<<<<< - * elif PyFloat_CheckExact(o) if strict_types else PyFloat_Check(o): - * if self.use_float: - */ - __pyx_v_ret = msgpack_pack_long((&__pyx_v_self->pk), __pyx_v_longval); - - /* "msgpack/_packer.pyx":190 - * else: - * raise OverflowError("Integer value out of range") - * elif PyInt_CheckExact(o) if strict_types else PyInt_Check(o): # <<<<<<<<<<<<<< - * longval = o - * ret = msgpack_pack_long(&self.pk, longval) - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":193 - * longval = o - * ret = msgpack_pack_long(&self.pk, longval) - * elif PyFloat_CheckExact(o) if strict_types else PyFloat_Check(o): # <<<<<<<<<<<<<< - * if self.use_float: - * fval = o - */ - if ((__pyx_v_strict_types != 0)) { - __pyx_t_12 = PyFloat_CheckExact(__pyx_v_o); - } else { - __pyx_t_12 = PyFloat_Check(__pyx_v_o); - } - __pyx_t_3 = (__pyx_t_12 != 0); - if (__pyx_t_3) { - - /* "msgpack/_packer.pyx":194 - * ret = msgpack_pack_long(&self.pk, longval) - * elif PyFloat_CheckExact(o) if strict_types else PyFloat_Check(o): - * if self.use_float: # <<<<<<<<<<<<<< - * fval = o - * ret = msgpack_pack_float(&self.pk, fval) - */ - __pyx_t_3 = __Pyx_PyObject_IsTrue(((PyObject *)__pyx_v_self->use_float)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 194, __pyx_L1_error) - if (__pyx_t_3) { - - /* "msgpack/_packer.pyx":195 - * elif PyFloat_CheckExact(o) if strict_types else PyFloat_Check(o): - * if self.use_float: - * fval = o # <<<<<<<<<<<<<< - * ret = msgpack_pack_float(&self.pk, fval) - * else: - */ - __pyx_t_25 = __pyx_PyFloat_AsFloat(__pyx_v_o); if (unlikely((__pyx_t_25 == (float)-1) && PyErr_Occurred())) __PYX_ERR(0, 195, __pyx_L1_error) - __pyx_v_fval = __pyx_t_25; - - /* "msgpack/_packer.pyx":196 - * if self.use_float: - * fval = o - * ret = msgpack_pack_float(&self.pk, fval) # <<<<<<<<<<<<<< - * else: - * dval = o - */ - __pyx_v_ret = msgpack_pack_float((&__pyx_v_self->pk), __pyx_v_fval); - - /* "msgpack/_packer.pyx":194 - * ret = msgpack_pack_long(&self.pk, longval) - * elif PyFloat_CheckExact(o) if strict_types else PyFloat_Check(o): - * if self.use_float: # <<<<<<<<<<<<<< - * fval = o - * ret = msgpack_pack_float(&self.pk, fval) - */ - goto __pyx_L31; - } - - /* "msgpack/_packer.pyx":198 - * ret = msgpack_pack_float(&self.pk, fval) - * else: - * dval = o # <<<<<<<<<<<<<< - * ret = msgpack_pack_double(&self.pk, dval) - * elif PyBytesLike_CheckExact(o) if strict_types else PyBytesLike_Check(o): - */ - /*else*/ { - __pyx_t_26 = __pyx_PyFloat_AsDouble(__pyx_v_o); if (unlikely((__pyx_t_26 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 198, __pyx_L1_error) - __pyx_v_dval = __pyx_t_26; - - /* "msgpack/_packer.pyx":199 - * else: - * dval = o - * ret = msgpack_pack_double(&self.pk, dval) # <<<<<<<<<<<<<< - * elif PyBytesLike_CheckExact(o) if strict_types else PyBytesLike_Check(o): - * L = len(o) - */ - __pyx_v_ret = msgpack_pack_double((&__pyx_v_self->pk), __pyx_v_dval); - } - __pyx_L31:; - - /* "msgpack/_packer.pyx":193 - * longval = o - * ret = msgpack_pack_long(&self.pk, longval) - * elif PyFloat_CheckExact(o) if strict_types else PyFloat_Check(o): # <<<<<<<<<<<<<< - * if self.use_float: - * fval = o - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":200 - * dval = o - * ret = msgpack_pack_double(&self.pk, dval) - * elif PyBytesLike_CheckExact(o) if strict_types else PyBytesLike_Check(o): # <<<<<<<<<<<<<< - * L = len(o) - * if L > ITEM_LIMIT: - */ - if ((__pyx_v_strict_types != 0)) { - __pyx_t_16 = __pyx_f_7msgpack_9_cmsgpack_PyBytesLike_CheckExact(__pyx_v_o); - } else { - __pyx_t_16 = __pyx_f_7msgpack_9_cmsgpack_PyBytesLike_Check(__pyx_v_o); - } - __pyx_t_3 = (__pyx_t_16 != 0); - if (__pyx_t_3) { - - /* "msgpack/_packer.pyx":201 - * ret = msgpack_pack_double(&self.pk, dval) - * elif PyBytesLike_CheckExact(o) if strict_types else PyBytesLike_Check(o): - * L = len(o) # <<<<<<<<<<<<<< - * if L > ITEM_LIMIT: - * PyErr_Format(ValueError, b"%.200s object is too large", Py_TYPE(o).tp_name) - */ - __pyx_t_27 = PyObject_Length(__pyx_v_o); if (unlikely(__pyx_t_27 == ((Py_ssize_t)-1))) __PYX_ERR(0, 201, __pyx_L1_error) - __pyx_v_L = __pyx_t_27; - - /* "msgpack/_packer.pyx":202 - * elif PyBytesLike_CheckExact(o) if strict_types else PyBytesLike_Check(o): - * L = len(o) - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * PyErr_Format(ValueError, b"%.200s object is too large", Py_TYPE(o).tp_name) - * rawval = o - */ - __pyx_t_3 = ((__pyx_v_L > __pyx_v_7msgpack_9_cmsgpack_ITEM_LIMIT) != 0); - if (__pyx_t_3) { - - /* "msgpack/_packer.pyx":203 - * L = len(o) - * if L > ITEM_LIMIT: - * PyErr_Format(ValueError, b"%.200s object is too large", Py_TYPE(o).tp_name) # <<<<<<<<<<<<<< - * rawval = o - * ret = msgpack_pack_bin(&self.pk, L) - */ - __pyx_t_28 = PyErr_Format(__pyx_builtin_ValueError, ((char *)"%.200s object is too large"), Py_TYPE(__pyx_v_o)->tp_name); if (unlikely(__pyx_t_28 == ((PyObject *)NULL))) __PYX_ERR(0, 203, __pyx_L1_error) - - /* "msgpack/_packer.pyx":202 - * elif PyBytesLike_CheckExact(o) if strict_types else PyBytesLike_Check(o): - * L = len(o) - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * PyErr_Format(ValueError, b"%.200s object is too large", Py_TYPE(o).tp_name) - * rawval = o - */ - } - - /* "msgpack/_packer.pyx":204 - * if L > ITEM_LIMIT: - * PyErr_Format(ValueError, b"%.200s object is too large", Py_TYPE(o).tp_name) - * rawval = o # <<<<<<<<<<<<<< - * ret = msgpack_pack_bin(&self.pk, L) - * if ret == 0: - */ - __pyx_t_29 = __Pyx_PyObject_AsWritableString(__pyx_v_o); if (unlikely((!__pyx_t_29) && PyErr_Occurred())) __PYX_ERR(0, 204, __pyx_L1_error) - __pyx_v_rawval = __pyx_t_29; - - /* "msgpack/_packer.pyx":205 - * PyErr_Format(ValueError, b"%.200s object is too large", Py_TYPE(o).tp_name) - * rawval = o - * ret = msgpack_pack_bin(&self.pk, L) # <<<<<<<<<<<<<< - * if ret == 0: - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - */ - __pyx_v_ret = msgpack_pack_bin((&__pyx_v_self->pk), __pyx_v_L); - - /* "msgpack/_packer.pyx":206 - * rawval = o - * ret = msgpack_pack_bin(&self.pk, L) - * if ret == 0: # <<<<<<<<<<<<<< - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyUnicode_CheckExact(o) if strict_types else PyUnicode_Check(o): - */ - __pyx_t_3 = ((__pyx_v_ret == 0) != 0); - if (__pyx_t_3) { - - /* "msgpack/_packer.pyx":207 - * ret = msgpack_pack_bin(&self.pk, L) - * if ret == 0: - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) # <<<<<<<<<<<<<< - * elif PyUnicode_CheckExact(o) if strict_types else PyUnicode_Check(o): - * if self.encoding == NULL and self.unicode_errors == NULL: - */ - __pyx_v_ret = msgpack_pack_raw_body((&__pyx_v_self->pk), __pyx_v_rawval, __pyx_v_L); - - /* "msgpack/_packer.pyx":206 - * rawval = o - * ret = msgpack_pack_bin(&self.pk, L) - * if ret == 0: # <<<<<<<<<<<<<< - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyUnicode_CheckExact(o) if strict_types else PyUnicode_Check(o): - */ - } - - /* "msgpack/_packer.pyx":200 - * dval = o - * ret = msgpack_pack_double(&self.pk, dval) - * elif PyBytesLike_CheckExact(o) if strict_types else PyBytesLike_Check(o): # <<<<<<<<<<<<<< - * L = len(o) - * if L > ITEM_LIMIT: - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":208 - * if ret == 0: - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyUnicode_CheckExact(o) if strict_types else PyUnicode_Check(o): # <<<<<<<<<<<<<< - * if self.encoding == NULL and self.unicode_errors == NULL: - * ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT); - */ - if ((__pyx_v_strict_types != 0)) { - __pyx_t_3 = PyUnicode_CheckExact(__pyx_v_o); - } else { - __pyx_t_3 = PyUnicode_Check(__pyx_v_o); - } - __pyx_t_12 = (__pyx_t_3 != 0); - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":209 - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyUnicode_CheckExact(o) if strict_types else PyUnicode_Check(o): - * if self.encoding == NULL and self.unicode_errors == NULL: # <<<<<<<<<<<<<< - * ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT); - * if ret == -2: - */ - __pyx_t_3 = ((__pyx_v_self->encoding == NULL) != 0); - if (__pyx_t_3) { - } else { - __pyx_t_12 = __pyx_t_3; - goto __pyx_L35_bool_binop_done; - } - __pyx_t_3 = ((__pyx_v_self->unicode_errors == NULL) != 0); - __pyx_t_12 = __pyx_t_3; - __pyx_L35_bool_binop_done:; - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":210 - * elif PyUnicode_CheckExact(o) if strict_types else PyUnicode_Check(o): - * if self.encoding == NULL and self.unicode_errors == NULL: - * ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT); # <<<<<<<<<<<<<< - * if ret == -2: - * raise ValueError("unicode string is too large") - */ - __pyx_v_ret = msgpack_pack_unicode((&__pyx_v_self->pk), __pyx_v_o, __pyx_v_7msgpack_9_cmsgpack_ITEM_LIMIT); - - /* "msgpack/_packer.pyx":211 - * if self.encoding == NULL and self.unicode_errors == NULL: - * ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT); - * if ret == -2: # <<<<<<<<<<<<<< - * raise ValueError("unicode string is too large") - * else: - */ - __pyx_t_12 = ((__pyx_v_ret == -2L) != 0); - if (unlikely(__pyx_t_12)) { - - /* "msgpack/_packer.pyx":212 - * ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT); - * if ret == -2: - * raise ValueError("unicode string is too large") # <<<<<<<<<<<<<< - * else: - * o = PyUnicode_AsEncodedString(o, self.encoding, self.unicode_errors) - */ - __pyx_t_11 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 212, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_Raise(__pyx_t_11, 0, 0, 0); - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - __PYX_ERR(0, 212, __pyx_L1_error) - - /* "msgpack/_packer.pyx":211 - * if self.encoding == NULL and self.unicode_errors == NULL: - * ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT); - * if ret == -2: # <<<<<<<<<<<<<< - * raise ValueError("unicode string is too large") - * else: - */ - } - - /* "msgpack/_packer.pyx":209 - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyUnicode_CheckExact(o) if strict_types else PyUnicode_Check(o): - * if self.encoding == NULL and self.unicode_errors == NULL: # <<<<<<<<<<<<<< - * ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT); - * if ret == -2: - */ - goto __pyx_L34; - } - - /* "msgpack/_packer.pyx":214 - * raise ValueError("unicode string is too large") - * else: - * o = PyUnicode_AsEncodedString(o, self.encoding, self.unicode_errors) # <<<<<<<<<<<<<< - * L = len(o) - * if L > ITEM_LIMIT: - */ - /*else*/ { - __pyx_t_11 = PyUnicode_AsEncodedString(__pyx_v_o, __pyx_v_self->encoding, __pyx_v_self->unicode_errors); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 214, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_DECREF_SET(__pyx_v_o, __pyx_t_11); - __pyx_t_11 = 0; - - /* "msgpack/_packer.pyx":215 - * else: - * o = PyUnicode_AsEncodedString(o, self.encoding, self.unicode_errors) - * L = len(o) # <<<<<<<<<<<<<< - * if L > ITEM_LIMIT: - * raise ValueError("unicode string is too large") - */ - __pyx_t_27 = PyObject_Length(__pyx_v_o); if (unlikely(__pyx_t_27 == ((Py_ssize_t)-1))) __PYX_ERR(0, 215, __pyx_L1_error) - __pyx_v_L = __pyx_t_27; - - /* "msgpack/_packer.pyx":216 - * o = PyUnicode_AsEncodedString(o, self.encoding, self.unicode_errors) - * L = len(o) - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError("unicode string is too large") - * ret = msgpack_pack_raw(&self.pk, L) - */ - __pyx_t_12 = ((__pyx_v_L > __pyx_v_7msgpack_9_cmsgpack_ITEM_LIMIT) != 0); - if (unlikely(__pyx_t_12)) { - - /* "msgpack/_packer.pyx":217 - * L = len(o) - * if L > ITEM_LIMIT: - * raise ValueError("unicode string is too large") # <<<<<<<<<<<<<< - * ret = msgpack_pack_raw(&self.pk, L) - * if ret == 0: - */ - __pyx_t_11 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 217, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_Raise(__pyx_t_11, 0, 0, 0); - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - __PYX_ERR(0, 217, __pyx_L1_error) - - /* "msgpack/_packer.pyx":216 - * o = PyUnicode_AsEncodedString(o, self.encoding, self.unicode_errors) - * L = len(o) - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError("unicode string is too large") - * ret = msgpack_pack_raw(&self.pk, L) - */ - } - - /* "msgpack/_packer.pyx":218 - * if L > ITEM_LIMIT: - * raise ValueError("unicode string is too large") - * ret = msgpack_pack_raw(&self.pk, L) # <<<<<<<<<<<<<< - * if ret == 0: - * rawval = o - */ - __pyx_v_ret = msgpack_pack_raw((&__pyx_v_self->pk), __pyx_v_L); - - /* "msgpack/_packer.pyx":219 - * raise ValueError("unicode string is too large") - * ret = msgpack_pack_raw(&self.pk, L) - * if ret == 0: # <<<<<<<<<<<<<< - * rawval = o - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - */ - __pyx_t_12 = ((__pyx_v_ret == 0) != 0); - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":220 - * ret = msgpack_pack_raw(&self.pk, L) - * if ret == 0: - * rawval = o # <<<<<<<<<<<<<< - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyDict_CheckExact(o): - */ - __pyx_t_29 = __Pyx_PyObject_AsWritableString(__pyx_v_o); if (unlikely((!__pyx_t_29) && PyErr_Occurred())) __PYX_ERR(0, 220, __pyx_L1_error) - __pyx_v_rawval = __pyx_t_29; - - /* "msgpack/_packer.pyx":221 - * if ret == 0: - * rawval = o - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) # <<<<<<<<<<<<<< - * elif PyDict_CheckExact(o): - * d = o - */ - __pyx_v_ret = msgpack_pack_raw_body((&__pyx_v_self->pk), __pyx_v_rawval, __pyx_v_L); - - /* "msgpack/_packer.pyx":219 - * raise ValueError("unicode string is too large") - * ret = msgpack_pack_raw(&self.pk, L) - * if ret == 0: # <<<<<<<<<<<<<< - * rawval = o - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - */ - } - } - __pyx_L34:; - - /* "msgpack/_packer.pyx":208 - * if ret == 0: - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyUnicode_CheckExact(o) if strict_types else PyUnicode_Check(o): # <<<<<<<<<<<<<< - * if self.encoding == NULL and self.unicode_errors == NULL: - * ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT); - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":222 - * rawval = o - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyDict_CheckExact(o): # <<<<<<<<<<<<<< - * d = o - * L = len(d) - */ - __pyx_t_12 = (PyDict_CheckExact(__pyx_v_o) != 0); - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":223 - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyDict_CheckExact(o): - * d = o # <<<<<<<<<<<<<< - * L = len(d) - * if L > ITEM_LIMIT: - */ - __pyx_t_11 = __pyx_v_o; - __Pyx_INCREF(__pyx_t_11); - __pyx_v_d = ((PyObject*)__pyx_t_11); - __pyx_t_11 = 0; - - /* "msgpack/_packer.pyx":224 - * elif PyDict_CheckExact(o): - * d = o - * L = len(d) # <<<<<<<<<<<<<< - * if L > ITEM_LIMIT: - * raise ValueError("dict is too large") - */ - if (unlikely(__pyx_v_d == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 224, __pyx_L1_error) - } - __pyx_t_27 = PyDict_Size(__pyx_v_d); if (unlikely(__pyx_t_27 == ((Py_ssize_t)-1))) __PYX_ERR(0, 224, __pyx_L1_error) - __pyx_v_L = __pyx_t_27; - - /* "msgpack/_packer.pyx":225 - * d = o - * L = len(d) - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError("dict is too large") - * ret = msgpack_pack_map(&self.pk, L) - */ - __pyx_t_12 = ((__pyx_v_L > __pyx_v_7msgpack_9_cmsgpack_ITEM_LIMIT) != 0); - if (unlikely(__pyx_t_12)) { - - /* "msgpack/_packer.pyx":226 - * L = len(d) - * if L > ITEM_LIMIT: - * raise ValueError("dict is too large") # <<<<<<<<<<<<<< - * ret = msgpack_pack_map(&self.pk, L) - * if ret == 0: - */ - __pyx_t_11 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 226, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_Raise(__pyx_t_11, 0, 0, 0); - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - __PYX_ERR(0, 226, __pyx_L1_error) - - /* "msgpack/_packer.pyx":225 - * d = o - * L = len(d) - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError("dict is too large") - * ret = msgpack_pack_map(&self.pk, L) - */ - } - - /* "msgpack/_packer.pyx":227 - * if L > ITEM_LIMIT: - * raise ValueError("dict is too large") - * ret = msgpack_pack_map(&self.pk, L) # <<<<<<<<<<<<<< - * if ret == 0: - * for k, v in d.items(): - */ - __pyx_v_ret = msgpack_pack_map((&__pyx_v_self->pk), __pyx_v_L); - - /* "msgpack/_packer.pyx":228 - * raise ValueError("dict is too large") - * ret = msgpack_pack_map(&self.pk, L) - * if ret == 0: # <<<<<<<<<<<<<< - * for k, v in d.items(): - * ret = self._pack(k, nest_limit-1) - */ - __pyx_t_12 = ((__pyx_v_ret == 0) != 0); - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":229 - * ret = msgpack_pack_map(&self.pk, L) - * if ret == 0: - * for k, v in d.items(): # <<<<<<<<<<<<<< - * ret = self._pack(k, nest_limit-1) - * if ret != 0: break - */ - __pyx_t_27 = 0; - if (unlikely(__pyx_v_d == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "items"); - __PYX_ERR(0, 229, __pyx_L1_error) - } - __pyx_t_10 = __Pyx_dict_iterator(__pyx_v_d, 1, __pyx_n_s_items, (&__pyx_t_30), (&__pyx_t_16)); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 229, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_XDECREF(__pyx_t_11); - __pyx_t_11 = __pyx_t_10; - __pyx_t_10 = 0; - while (1) { - __pyx_t_9 = __Pyx_dict_iter_next(__pyx_t_11, __pyx_t_30, &__pyx_t_27, &__pyx_t_10, &__pyx_t_2, NULL, __pyx_t_16); - if (unlikely(__pyx_t_9 == 0)) break; - if (unlikely(__pyx_t_9 == -1)) __PYX_ERR(0, 229, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_GOTREF(__pyx_t_2); - __Pyx_XDECREF_SET(__pyx_v_k, __pyx_t_10); - __pyx_t_10 = 0; - __Pyx_XDECREF_SET(__pyx_v_v, __pyx_t_2); - __pyx_t_2 = 0; - - /* "msgpack/_packer.pyx":230 - * if ret == 0: - * for k, v in d.items(): - * ret = self._pack(k, nest_limit-1) # <<<<<<<<<<<<<< - * if ret != 0: break - * ret = self._pack(v, nest_limit-1) - */ - __pyx_t_31.__pyx_n = 1; - __pyx_t_31.nest_limit = (__pyx_v_nest_limit - 1); - __pyx_t_9 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Packer *)__pyx_v_self->__pyx_vtab)->_pack(__pyx_v_self, __pyx_v_k, &__pyx_t_31); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 230, __pyx_L1_error) - __pyx_v_ret = __pyx_t_9; - - /* "msgpack/_packer.pyx":231 - * for k, v in d.items(): - * ret = self._pack(k, nest_limit-1) - * if ret != 0: break # <<<<<<<<<<<<<< - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break - */ - __pyx_t_12 = ((__pyx_v_ret != 0) != 0); - if (__pyx_t_12) { - goto __pyx_L43_break; - } - - /* "msgpack/_packer.pyx":232 - * ret = self._pack(k, nest_limit-1) - * if ret != 0: break - * ret = self._pack(v, nest_limit-1) # <<<<<<<<<<<<<< - * if ret != 0: break - * elif not strict_types and PyDict_Check(o): - */ - __pyx_t_31.__pyx_n = 1; - __pyx_t_31.nest_limit = (__pyx_v_nest_limit - 1); - __pyx_t_9 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Packer *)__pyx_v_self->__pyx_vtab)->_pack(__pyx_v_self, __pyx_v_v, &__pyx_t_31); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 232, __pyx_L1_error) - __pyx_v_ret = __pyx_t_9; - - /* "msgpack/_packer.pyx":233 - * if ret != 0: break - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break # <<<<<<<<<<<<<< - * elif not strict_types and PyDict_Check(o): - * L = len(o) - */ - __pyx_t_12 = ((__pyx_v_ret != 0) != 0); - if (__pyx_t_12) { - goto __pyx_L43_break; - } - } - __pyx_L43_break:; - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - - /* "msgpack/_packer.pyx":228 - * raise ValueError("dict is too large") - * ret = msgpack_pack_map(&self.pk, L) - * if ret == 0: # <<<<<<<<<<<<<< - * for k, v in d.items(): - * ret = self._pack(k, nest_limit-1) - */ - } - - /* "msgpack/_packer.pyx":222 - * rawval = o - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyDict_CheckExact(o): # <<<<<<<<<<<<<< - * d = o - * L = len(d) - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":234 - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break - * elif not strict_types and PyDict_Check(o): # <<<<<<<<<<<<<< - * L = len(o) - * if L > ITEM_LIMIT: - */ - __pyx_t_3 = ((!(__pyx_v_strict_types != 0)) != 0); - if (__pyx_t_3) { - } else { - __pyx_t_12 = __pyx_t_3; - goto __pyx_L46_bool_binop_done; - } - __pyx_t_3 = (PyDict_Check(__pyx_v_o) != 0); - __pyx_t_12 = __pyx_t_3; - __pyx_L46_bool_binop_done:; - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":235 - * if ret != 0: break - * elif not strict_types and PyDict_Check(o): - * L = len(o) # <<<<<<<<<<<<<< - * if L > ITEM_LIMIT: - * raise ValueError("dict is too large") - */ - __pyx_t_30 = PyObject_Length(__pyx_v_o); if (unlikely(__pyx_t_30 == ((Py_ssize_t)-1))) __PYX_ERR(0, 235, __pyx_L1_error) - __pyx_v_L = __pyx_t_30; - - /* "msgpack/_packer.pyx":236 - * elif not strict_types and PyDict_Check(o): - * L = len(o) - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError("dict is too large") - * ret = msgpack_pack_map(&self.pk, L) - */ - __pyx_t_12 = ((__pyx_v_L > __pyx_v_7msgpack_9_cmsgpack_ITEM_LIMIT) != 0); - if (unlikely(__pyx_t_12)) { - - /* "msgpack/_packer.pyx":237 - * L = len(o) - * if L > ITEM_LIMIT: - * raise ValueError("dict is too large") # <<<<<<<<<<<<<< - * ret = msgpack_pack_map(&self.pk, L) - * if ret == 0: - */ - __pyx_t_11 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 237, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_Raise(__pyx_t_11, 0, 0, 0); - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - __PYX_ERR(0, 237, __pyx_L1_error) - - /* "msgpack/_packer.pyx":236 - * elif not strict_types and PyDict_Check(o): - * L = len(o) - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError("dict is too large") - * ret = msgpack_pack_map(&self.pk, L) - */ - } - - /* "msgpack/_packer.pyx":238 - * if L > ITEM_LIMIT: - * raise ValueError("dict is too large") - * ret = msgpack_pack_map(&self.pk, L) # <<<<<<<<<<<<<< - * if ret == 0: - * for k, v in o.items(): - */ - __pyx_v_ret = msgpack_pack_map((&__pyx_v_self->pk), __pyx_v_L); - - /* "msgpack/_packer.pyx":239 - * raise ValueError("dict is too large") - * ret = msgpack_pack_map(&self.pk, L) - * if ret == 0: # <<<<<<<<<<<<<< - * for k, v in o.items(): - * ret = self._pack(k, nest_limit-1) - */ - __pyx_t_12 = ((__pyx_v_ret == 0) != 0); - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":240 - * ret = msgpack_pack_map(&self.pk, L) - * if ret == 0: - * for k, v in o.items(): # <<<<<<<<<<<<<< - * ret = self._pack(k, nest_limit-1) - * if ret != 0: break - */ - __pyx_t_30 = 0; - if (unlikely(__pyx_v_o == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "items"); - __PYX_ERR(0, 240, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_dict_iterator(__pyx_v_o, 0, __pyx_n_s_items, (&__pyx_t_27), (&__pyx_t_16)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 240, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_11); - __pyx_t_11 = __pyx_t_2; - __pyx_t_2 = 0; - while (1) { - __pyx_t_9 = __Pyx_dict_iter_next(__pyx_t_11, __pyx_t_27, &__pyx_t_30, &__pyx_t_2, &__pyx_t_10, NULL, __pyx_t_16); - if (unlikely(__pyx_t_9 == 0)) break; - if (unlikely(__pyx_t_9 == -1)) __PYX_ERR(0, 240, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_t_10); - __Pyx_XDECREF_SET(__pyx_v_k, __pyx_t_2); - __pyx_t_2 = 0; - __Pyx_XDECREF_SET(__pyx_v_v, __pyx_t_10); - __pyx_t_10 = 0; - - /* "msgpack/_packer.pyx":241 - * if ret == 0: - * for k, v in o.items(): - * ret = self._pack(k, nest_limit-1) # <<<<<<<<<<<<<< - * if ret != 0: break - * ret = self._pack(v, nest_limit-1) - */ - __pyx_t_31.__pyx_n = 1; - __pyx_t_31.nest_limit = (__pyx_v_nest_limit - 1); - __pyx_t_9 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Packer *)__pyx_v_self->__pyx_vtab)->_pack(__pyx_v_self, __pyx_v_k, &__pyx_t_31); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 241, __pyx_L1_error) - __pyx_v_ret = __pyx_t_9; - - /* "msgpack/_packer.pyx":242 - * for k, v in o.items(): - * ret = self._pack(k, nest_limit-1) - * if ret != 0: break # <<<<<<<<<<<<<< - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break - */ - __pyx_t_12 = ((__pyx_v_ret != 0) != 0); - if (__pyx_t_12) { - goto __pyx_L51_break; - } - - /* "msgpack/_packer.pyx":243 - * ret = self._pack(k, nest_limit-1) - * if ret != 0: break - * ret = self._pack(v, nest_limit-1) # <<<<<<<<<<<<<< - * if ret != 0: break - * elif type(o) is ExtType if strict_types else isinstance(o, ExtType): - */ - __pyx_t_31.__pyx_n = 1; - __pyx_t_31.nest_limit = (__pyx_v_nest_limit - 1); - __pyx_t_9 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Packer *)__pyx_v_self->__pyx_vtab)->_pack(__pyx_v_self, __pyx_v_v, &__pyx_t_31); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 243, __pyx_L1_error) - __pyx_v_ret = __pyx_t_9; - - /* "msgpack/_packer.pyx":244 - * if ret != 0: break - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break # <<<<<<<<<<<<<< - * elif type(o) is ExtType if strict_types else isinstance(o, ExtType): - * # This should be before Tuple because ExtType is namedtuple. - */ - __pyx_t_12 = ((__pyx_v_ret != 0) != 0); - if (__pyx_t_12) { - goto __pyx_L51_break; - } - } - __pyx_L51_break:; - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - - /* "msgpack/_packer.pyx":239 - * raise ValueError("dict is too large") - * ret = msgpack_pack_map(&self.pk, L) - * if ret == 0: # <<<<<<<<<<<<<< - * for k, v in o.items(): - * ret = self._pack(k, nest_limit-1) - */ - } - - /* "msgpack/_packer.pyx":234 - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break - * elif not strict_types and PyDict_Check(o): # <<<<<<<<<<<<<< - * L = len(o) - * if L > ITEM_LIMIT: - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":245 - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break - * elif type(o) is ExtType if strict_types else isinstance(o, ExtType): # <<<<<<<<<<<<<< - * # This should be before Tuple because ExtType is namedtuple. - * longval = o.code - */ - if ((__pyx_v_strict_types != 0)) { - __pyx_t_11 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_7cpython_4type_type), __pyx_v_o); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 245, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_GetModuleGlobalName(__pyx_t_10, __pyx_n_s_ExtType); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 245, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_3 = (__pyx_t_11 == __pyx_t_10); - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_t_12 = __pyx_t_3; - } else { - __Pyx_GetModuleGlobalName(__pyx_t_10, __pyx_n_s_ExtType); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 245, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_3 = PyObject_IsInstance(__pyx_v_o, __pyx_t_10); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 245, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_t_12 = __pyx_t_3; - } - __pyx_t_3 = (__pyx_t_12 != 0); - if (__pyx_t_3) { - - /* "msgpack/_packer.pyx":247 - * elif type(o) is ExtType if strict_types else isinstance(o, ExtType): - * # This should be before Tuple because ExtType is namedtuple. - * longval = o.code # <<<<<<<<<<<<<< - * rawval = o.data - * L = len(o.data) - */ - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_o, __pyx_n_s_code); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 247, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_24 = __Pyx_PyInt_As_long(__pyx_t_10); if (unlikely((__pyx_t_24 == (long)-1) && PyErr_Occurred())) __PYX_ERR(0, 247, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_v_longval = __pyx_t_24; - - /* "msgpack/_packer.pyx":248 - * # This should be before Tuple because ExtType is namedtuple. - * longval = o.code - * rawval = o.data # <<<<<<<<<<<<<< - * L = len(o.data) - * if L > ITEM_LIMIT: - */ - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_o, __pyx_n_s_data); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 248, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_29 = __Pyx_PyObject_AsWritableString(__pyx_t_10); if (unlikely((!__pyx_t_29) && PyErr_Occurred())) __PYX_ERR(0, 248, __pyx_L1_error) - __pyx_v_rawval = __pyx_t_29; - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - - /* "msgpack/_packer.pyx":249 - * longval = o.code - * rawval = o.data - * L = len(o.data) # <<<<<<<<<<<<<< - * if L > ITEM_LIMIT: - * raise ValueError("EXT data is too large") - */ - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_o, __pyx_n_s_data); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 249, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_27 = PyObject_Length(__pyx_t_10); if (unlikely(__pyx_t_27 == ((Py_ssize_t)-1))) __PYX_ERR(0, 249, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_v_L = __pyx_t_27; - - /* "msgpack/_packer.pyx":250 - * rawval = o.data - * L = len(o.data) - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError("EXT data is too large") - * ret = msgpack_pack_ext(&self.pk, longval, L) - */ - __pyx_t_3 = ((__pyx_v_L > __pyx_v_7msgpack_9_cmsgpack_ITEM_LIMIT) != 0); - if (unlikely(__pyx_t_3)) { - - /* "msgpack/_packer.pyx":251 - * L = len(o.data) - * if L > ITEM_LIMIT: - * raise ValueError("EXT data is too large") # <<<<<<<<<<<<<< - * ret = msgpack_pack_ext(&self.pk, longval, L) - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - */ - __pyx_t_10 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 251, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_Raise(__pyx_t_10, 0, 0, 0); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __PYX_ERR(0, 251, __pyx_L1_error) - - /* "msgpack/_packer.pyx":250 - * rawval = o.data - * L = len(o.data) - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError("EXT data is too large") - * ret = msgpack_pack_ext(&self.pk, longval, L) - */ - } - - /* "msgpack/_packer.pyx":252 - * if L > ITEM_LIMIT: - * raise ValueError("EXT data is too large") - * ret = msgpack_pack_ext(&self.pk, longval, L) # <<<<<<<<<<<<<< - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)): - */ - __pyx_v_ret = msgpack_pack_ext((&__pyx_v_self->pk), __pyx_v_longval, __pyx_v_L); - - /* "msgpack/_packer.pyx":253 - * raise ValueError("EXT data is too large") - * ret = msgpack_pack_ext(&self.pk, longval, L) - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) # <<<<<<<<<<<<<< - * elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)): - * L = len(o) - */ - __pyx_v_ret = msgpack_pack_raw_body((&__pyx_v_self->pk), __pyx_v_rawval, __pyx_v_L); - - /* "msgpack/_packer.pyx":245 - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break - * elif type(o) is ExtType if strict_types else isinstance(o, ExtType): # <<<<<<<<<<<<<< - * # This should be before Tuple because ExtType is namedtuple. - * longval = o.code - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":254 - * ret = msgpack_pack_ext(&self.pk, longval, L) - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)): # <<<<<<<<<<<<<< - * L = len(o) - * if L > ITEM_LIMIT: - */ - if ((__pyx_v_strict_types != 0)) { - __pyx_t_3 = PyList_CheckExact(__pyx_v_o); - } else { - __pyx_t_1 = PyTuple_Check(__pyx_v_o); - if (!__pyx_t_1) { - } else { - __pyx_t_12 = __pyx_t_1; - goto __pyx_L55_bool_binop_done; - } - __pyx_t_1 = PyList_Check(__pyx_v_o); - __pyx_t_12 = __pyx_t_1; - __pyx_L55_bool_binop_done:; - __pyx_t_3 = __pyx_t_12; - } - __pyx_t_12 = (__pyx_t_3 != 0); - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":255 - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)): - * L = len(o) # <<<<<<<<<<<<<< - * if L > ITEM_LIMIT: - * raise ValueError("list is too large") - */ - __pyx_t_27 = PyObject_Length(__pyx_v_o); if (unlikely(__pyx_t_27 == ((Py_ssize_t)-1))) __PYX_ERR(0, 255, __pyx_L1_error) - __pyx_v_L = __pyx_t_27; - - /* "msgpack/_packer.pyx":256 - * elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)): - * L = len(o) - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError("list is too large") - * ret = msgpack_pack_array(&self.pk, L) - */ - __pyx_t_12 = ((__pyx_v_L > __pyx_v_7msgpack_9_cmsgpack_ITEM_LIMIT) != 0); - if (unlikely(__pyx_t_12)) { - - /* "msgpack/_packer.pyx":257 - * L = len(o) - * if L > ITEM_LIMIT: - * raise ValueError("list is too large") # <<<<<<<<<<<<<< - * ret = msgpack_pack_array(&self.pk, L) - * if ret == 0: - */ - __pyx_t_10 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 257, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_Raise(__pyx_t_10, 0, 0, 0); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __PYX_ERR(0, 257, __pyx_L1_error) - - /* "msgpack/_packer.pyx":256 - * elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)): - * L = len(o) - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError("list is too large") - * ret = msgpack_pack_array(&self.pk, L) - */ - } - - /* "msgpack/_packer.pyx":258 - * if L > ITEM_LIMIT: - * raise ValueError("list is too large") - * ret = msgpack_pack_array(&self.pk, L) # <<<<<<<<<<<<<< - * if ret == 0: - * for v in o: - */ - __pyx_v_ret = msgpack_pack_array((&__pyx_v_self->pk), __pyx_v_L); - - /* "msgpack/_packer.pyx":259 - * raise ValueError("list is too large") - * ret = msgpack_pack_array(&self.pk, L) - * if ret == 0: # <<<<<<<<<<<<<< - * for v in o: - * ret = self._pack(v, nest_limit-1) - */ - __pyx_t_12 = ((__pyx_v_ret == 0) != 0); - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":260 - * ret = msgpack_pack_array(&self.pk, L) - * if ret == 0: - * for v in o: # <<<<<<<<<<<<<< - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break - */ - if (likely(PyList_CheckExact(__pyx_v_o)) || PyTuple_CheckExact(__pyx_v_o)) { - __pyx_t_10 = __pyx_v_o; __Pyx_INCREF(__pyx_t_10); __pyx_t_27 = 0; - __pyx_t_32 = NULL; - } else { - __pyx_t_27 = -1; __pyx_t_10 = PyObject_GetIter(__pyx_v_o); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 260, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_32 = Py_TYPE(__pyx_t_10)->tp_iternext; if (unlikely(!__pyx_t_32)) __PYX_ERR(0, 260, __pyx_L1_error) - } - for (;;) { - if (likely(!__pyx_t_32)) { - if (likely(PyList_CheckExact(__pyx_t_10))) { - if (__pyx_t_27 >= PyList_GET_SIZE(__pyx_t_10)) break; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_11 = PyList_GET_ITEM(__pyx_t_10, __pyx_t_27); __Pyx_INCREF(__pyx_t_11); __pyx_t_27++; if (unlikely(0 < 0)) __PYX_ERR(0, 260, __pyx_L1_error) - #else - __pyx_t_11 = PySequence_ITEM(__pyx_t_10, __pyx_t_27); __pyx_t_27++; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 260, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - #endif - } else { - if (__pyx_t_27 >= PyTuple_GET_SIZE(__pyx_t_10)) break; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_11 = PyTuple_GET_ITEM(__pyx_t_10, __pyx_t_27); __Pyx_INCREF(__pyx_t_11); __pyx_t_27++; if (unlikely(0 < 0)) __PYX_ERR(0, 260, __pyx_L1_error) - #else - __pyx_t_11 = PySequence_ITEM(__pyx_t_10, __pyx_t_27); __pyx_t_27++; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 260, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - #endif - } - } else { - __pyx_t_11 = __pyx_t_32(__pyx_t_10); - if (unlikely(!__pyx_t_11)) { - PyObject* exc_type = PyErr_Occurred(); - if (exc_type) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 260, __pyx_L1_error) - } - break; - } - __Pyx_GOTREF(__pyx_t_11); - } - __Pyx_XDECREF_SET(__pyx_v_v, __pyx_t_11); - __pyx_t_11 = 0; - - /* "msgpack/_packer.pyx":261 - * if ret == 0: - * for v in o: - * ret = self._pack(v, nest_limit-1) # <<<<<<<<<<<<<< - * if ret != 0: break - * elif PyMemoryView_Check(o): - */ - __pyx_t_31.__pyx_n = 1; - __pyx_t_31.nest_limit = (__pyx_v_nest_limit - 1); - __pyx_t_16 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Packer *)__pyx_v_self->__pyx_vtab)->_pack(__pyx_v_self, __pyx_v_v, &__pyx_t_31); if (unlikely(__pyx_t_16 == ((int)-1))) __PYX_ERR(0, 261, __pyx_L1_error) - __pyx_v_ret = __pyx_t_16; - - /* "msgpack/_packer.pyx":262 - * for v in o: - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break # <<<<<<<<<<<<<< - * elif PyMemoryView_Check(o): - * if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0: - */ - __pyx_t_12 = ((__pyx_v_ret != 0) != 0); - if (__pyx_t_12) { - goto __pyx_L60_break; - } - - /* "msgpack/_packer.pyx":260 - * ret = msgpack_pack_array(&self.pk, L) - * if ret == 0: - * for v in o: # <<<<<<<<<<<<<< - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break - */ - } - __pyx_L60_break:; - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - - /* "msgpack/_packer.pyx":259 - * raise ValueError("list is too large") - * ret = msgpack_pack_array(&self.pk, L) - * if ret == 0: # <<<<<<<<<<<<<< - * for v in o: - * ret = self._pack(v, nest_limit-1) - */ - } - - /* "msgpack/_packer.pyx":254 - * ret = msgpack_pack_ext(&self.pk, longval, L) - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - * elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)): # <<<<<<<<<<<<<< - * L = len(o) - * if L > ITEM_LIMIT: - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":263 - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break - * elif PyMemoryView_Check(o): # <<<<<<<<<<<<<< - * if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0: - * raise ValueError("could not get buffer for memoryview") - */ - __pyx_t_12 = (PyMemoryView_Check(__pyx_v_o) != 0); - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":264 - * if ret != 0: break - * elif PyMemoryView_Check(o): - * if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0: # <<<<<<<<<<<<<< - * raise ValueError("could not get buffer for memoryview") - * L = view.len - */ - __pyx_t_16 = PyObject_GetBuffer(__pyx_v_o, (&__pyx_v_view), PyBUF_SIMPLE); if (unlikely(__pyx_t_16 == ((int)-1))) __PYX_ERR(0, 264, __pyx_L1_error) - __pyx_t_12 = ((__pyx_t_16 != 0) != 0); - if (unlikely(__pyx_t_12)) { - - /* "msgpack/_packer.pyx":265 - * elif PyMemoryView_Check(o): - * if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0: - * raise ValueError("could not get buffer for memoryview") # <<<<<<<<<<<<<< - * L = view.len - * if L > ITEM_LIMIT: - */ - __pyx_t_10 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__10, NULL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 265, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_Raise(__pyx_t_10, 0, 0, 0); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __PYX_ERR(0, 265, __pyx_L1_error) - - /* "msgpack/_packer.pyx":264 - * if ret != 0: break - * elif PyMemoryView_Check(o): - * if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0: # <<<<<<<<<<<<<< - * raise ValueError("could not get buffer for memoryview") - * L = view.len - */ - } - - /* "msgpack/_packer.pyx":266 - * if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0: - * raise ValueError("could not get buffer for memoryview") - * L = view.len # <<<<<<<<<<<<<< - * if L > ITEM_LIMIT: - * PyBuffer_Release(&view); - */ - __pyx_t_27 = __pyx_v_view.len; - __pyx_v_L = __pyx_t_27; - - /* "msgpack/_packer.pyx":267 - * raise ValueError("could not get buffer for memoryview") - * L = view.len - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * PyBuffer_Release(&view); - * raise ValueError("memoryview is too large") - */ - __pyx_t_12 = ((__pyx_v_L > __pyx_v_7msgpack_9_cmsgpack_ITEM_LIMIT) != 0); - if (unlikely(__pyx_t_12)) { - - /* "msgpack/_packer.pyx":268 - * L = view.len - * if L > ITEM_LIMIT: - * PyBuffer_Release(&view); # <<<<<<<<<<<<<< - * raise ValueError("memoryview is too large") - * ret = msgpack_pack_bin(&self.pk, L) - */ - PyBuffer_Release((&__pyx_v_view)); - - /* "msgpack/_packer.pyx":269 - * if L > ITEM_LIMIT: - * PyBuffer_Release(&view); - * raise ValueError("memoryview is too large") # <<<<<<<<<<<<<< - * ret = msgpack_pack_bin(&self.pk, L) - * if ret == 0: - */ - __pyx_t_10 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 269, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_Raise(__pyx_t_10, 0, 0, 0); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __PYX_ERR(0, 269, __pyx_L1_error) - - /* "msgpack/_packer.pyx":267 - * raise ValueError("could not get buffer for memoryview") - * L = view.len - * if L > ITEM_LIMIT: # <<<<<<<<<<<<<< - * PyBuffer_Release(&view); - * raise ValueError("memoryview is too large") - */ - } - - /* "msgpack/_packer.pyx":270 - * PyBuffer_Release(&view); - * raise ValueError("memoryview is too large") - * ret = msgpack_pack_bin(&self.pk, L) # <<<<<<<<<<<<<< - * if ret == 0: - * ret = msgpack_pack_raw_body(&self.pk, view.buf, L) - */ - __pyx_v_ret = msgpack_pack_bin((&__pyx_v_self->pk), __pyx_v_L); - - /* "msgpack/_packer.pyx":271 - * raise ValueError("memoryview is too large") - * ret = msgpack_pack_bin(&self.pk, L) - * if ret == 0: # <<<<<<<<<<<<<< - * ret = msgpack_pack_raw_body(&self.pk, view.buf, L) - * PyBuffer_Release(&view); - */ - __pyx_t_12 = ((__pyx_v_ret == 0) != 0); - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":272 - * ret = msgpack_pack_bin(&self.pk, L) - * if ret == 0: - * ret = msgpack_pack_raw_body(&self.pk, view.buf, L) # <<<<<<<<<<<<<< - * PyBuffer_Release(&view); - * elif not default_used and self._default: - */ - __pyx_v_ret = msgpack_pack_raw_body((&__pyx_v_self->pk), ((char *)__pyx_v_view.buf), __pyx_v_L); - - /* "msgpack/_packer.pyx":271 - * raise ValueError("memoryview is too large") - * ret = msgpack_pack_bin(&self.pk, L) - * if ret == 0: # <<<<<<<<<<<<<< - * ret = msgpack_pack_raw_body(&self.pk, view.buf, L) - * PyBuffer_Release(&view); - */ - } - - /* "msgpack/_packer.pyx":273 - * if ret == 0: - * ret = msgpack_pack_raw_body(&self.pk, view.buf, L) - * PyBuffer_Release(&view); # <<<<<<<<<<<<<< - * elif not default_used and self._default: - * o = self._default(o) - */ - PyBuffer_Release((&__pyx_v_view)); - - /* "msgpack/_packer.pyx":263 - * ret = self._pack(v, nest_limit-1) - * if ret != 0: break - * elif PyMemoryView_Check(o): # <<<<<<<<<<<<<< - * if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0: - * raise ValueError("could not get buffer for memoryview") - */ - goto __pyx_L6; - } - - /* "msgpack/_packer.pyx":274 - * ret = msgpack_pack_raw_body(&self.pk, view.buf, L) - * PyBuffer_Release(&view); - * elif not default_used and self._default: # <<<<<<<<<<<<<< - * o = self._default(o) - * default_used = 1 - */ - __pyx_t_3 = ((!(__pyx_v_default_used != 0)) != 0); - if (__pyx_t_3) { - } else { - __pyx_t_12 = __pyx_t_3; - goto __pyx_L65_bool_binop_done; - } - __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_self->_default); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 274, __pyx_L1_error) - __pyx_t_12 = __pyx_t_3; - __pyx_L65_bool_binop_done:; - if (__pyx_t_12) { - - /* "msgpack/_packer.pyx":275 - * PyBuffer_Release(&view); - * elif not default_used and self._default: - * o = self._default(o) # <<<<<<<<<<<<<< - * default_used = 1 - * continue - */ - __Pyx_INCREF(__pyx_v_self->_default); - __pyx_t_11 = __pyx_v_self->_default; __pyx_t_2 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_11))) { - __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_11); - if (likely(__pyx_t_2)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_11); - __Pyx_INCREF(__pyx_t_2); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_11, function); - } - } - __pyx_t_10 = (__pyx_t_2) ? __Pyx_PyObject_Call2Args(__pyx_t_11, __pyx_t_2, __pyx_v_o) : __Pyx_PyObject_CallOneArg(__pyx_t_11, __pyx_v_o); - __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; - if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 275, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - __Pyx_DECREF_SET(__pyx_v_o, __pyx_t_10); - __pyx_t_10 = 0; - - /* "msgpack/_packer.pyx":276 - * elif not default_used and self._default: - * o = self._default(o) - * default_used = 1 # <<<<<<<<<<<<<< - * continue - * else: - */ - __pyx_v_default_used = 1; - - /* "msgpack/_packer.pyx":277 - * o = self._default(o) - * default_used = 1 - * continue # <<<<<<<<<<<<<< - * else: - * PyErr_Format(TypeError, b"can not serialize '%.200s' object", Py_TYPE(o).tp_name) - */ - goto __pyx_L4_continue; - - /* "msgpack/_packer.pyx":274 - * ret = msgpack_pack_raw_body(&self.pk, view.buf, L) - * PyBuffer_Release(&view); - * elif not default_used and self._default: # <<<<<<<<<<<<<< - * o = self._default(o) - * default_used = 1 - */ - } - - /* "msgpack/_packer.pyx":279 - * continue - * else: - * PyErr_Format(TypeError, b"can not serialize '%.200s' object", Py_TYPE(o).tp_name) # <<<<<<<<<<<<<< - * return ret - * - */ - /*else*/ { - __pyx_t_28 = PyErr_Format(__pyx_builtin_TypeError, ((char *)"can not serialize '%.200s' object"), Py_TYPE(__pyx_v_o)->tp_name); if (unlikely(__pyx_t_28 == ((PyObject *)NULL))) __PYX_ERR(0, 279, __pyx_L1_error) - } - __pyx_L6:; - - /* "msgpack/_packer.pyx":280 - * else: - * PyErr_Format(TypeError, b"can not serialize '%.200s' object", Py_TYPE(o).tp_name) - * return ret # <<<<<<<<<<<<<< - * - * cpdef pack(self, object obj): - */ - __pyx_r = __pyx_v_ret; - goto __pyx_L0; - __pyx_L4_continue:; - } - - /* "msgpack/_packer.pyx":148 - * self.pk.buf = NULL - * - * cdef int _pack(self, object o, int nest_limit=DEFAULT_RECURSE_LIMIT) except -1: # <<<<<<<<<<<<<< - * cdef long long llval - * cdef unsigned long long ullval - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_10); - __Pyx_XDECREF(__pyx_t_11); - __Pyx_XDECREF(__pyx_t_13); - __Pyx_XDECREF(__pyx_t_14); - __Pyx_XDECREF(__pyx_t_15); - __Pyx_AddTraceback("msgpack._cmsgpack.Packer._pack", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_d); - __Pyx_XDECREF(__pyx_v_oe); - __Pyx_XDECREF(__pyx_v_k); - __Pyx_XDECREF(__pyx_v_v); - __Pyx_XDECREF(__pyx_v_o); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_packer.pyx":282 - * return ret - * - * cpdef pack(self, object obj): # <<<<<<<<<<<<<< - * cdef int ret - * try: - */ - -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_7pack(PyObject *__pyx_v_self, PyObject *__pyx_v_obj); /*proto*/ -static PyObject *__pyx_f_7msgpack_9_cmsgpack_6Packer_pack(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PyObject *__pyx_v_obj, int __pyx_skip_dispatch) { - int __pyx_v_ret; - PyObject *__pyx_v_buf = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - int __pyx_t_8; - struct __pyx_opt_args_7msgpack_9_cmsgpack_6Packer__pack __pyx_t_9; - int __pyx_t_10; - __Pyx_RefNannySetupContext("pack", 0); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || (Py_TYPE(((PyObject *)__pyx_v_self))->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_type_dict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_pack); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 282, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)(void*)__pyx_pw_7msgpack_9_cmsgpack_6Packer_7pack)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_obj) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_obj); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 282, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_type_dict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "msgpack/_packer.pyx":284 - * cpdef pack(self, object obj): - * cdef int ret - * try: # <<<<<<<<<<<<<< - * ret = self._pack(obj, DEFAULT_RECURSE_LIMIT) - * except: - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7); - __Pyx_XGOTREF(__pyx_t_5); - __Pyx_XGOTREF(__pyx_t_6); - __Pyx_XGOTREF(__pyx_t_7); - /*try:*/ { - - /* "msgpack/_packer.pyx":285 - * cdef int ret - * try: - * ret = self._pack(obj, DEFAULT_RECURSE_LIMIT) # <<<<<<<<<<<<<< - * except: - * self.pk.length = 0 - */ - __pyx_t_9.__pyx_n = 1; - __pyx_t_9.nest_limit = __pyx_v_7msgpack_9_cmsgpack_DEFAULT_RECURSE_LIMIT; - __pyx_t_8 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Packer *)__pyx_v_self->__pyx_vtab)->_pack(__pyx_v_self, __pyx_v_obj, &__pyx_t_9); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 285, __pyx_L3_error) - __pyx_v_ret = __pyx_t_8; - - /* "msgpack/_packer.pyx":284 - * cpdef pack(self, object obj): - * cdef int ret - * try: # <<<<<<<<<<<<<< - * ret = self._pack(obj, DEFAULT_RECURSE_LIMIT) - * except: - */ - } - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - goto __pyx_L8_try_end; - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - - /* "msgpack/_packer.pyx":286 - * try: - * ret = self._pack(obj, DEFAULT_RECURSE_LIMIT) - * except: # <<<<<<<<<<<<<< - * self.pk.length = 0 - * raise - */ - /*except:*/ { - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.pack", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3) < 0) __PYX_ERR(0, 286, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_t_3); - - /* "msgpack/_packer.pyx":287 - * ret = self._pack(obj, DEFAULT_RECURSE_LIMIT) - * except: - * self.pk.length = 0 # <<<<<<<<<<<<<< - * raise - * if ret: # should not happen. - */ - __pyx_v_self->pk.length = 0; - - /* "msgpack/_packer.pyx":288 - * except: - * self.pk.length = 0 - * raise # <<<<<<<<<<<<<< - * if ret: # should not happen. - * raise RuntimeError("internal error") - */ - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ErrRestoreWithState(__pyx_t_1, __pyx_t_2, __pyx_t_3); - __pyx_t_1 = 0; __pyx_t_2 = 0; __pyx_t_3 = 0; - __PYX_ERR(0, 288, __pyx_L5_except_error) - } - __pyx_L5_except_error:; - - /* "msgpack/_packer.pyx":284 - * cpdef pack(self, object obj): - * cdef int ret - * try: # <<<<<<<<<<<<<< - * ret = self._pack(obj, DEFAULT_RECURSE_LIMIT) - * except: - */ - __Pyx_XGIVEREF(__pyx_t_5); - __Pyx_XGIVEREF(__pyx_t_6); - __Pyx_XGIVEREF(__pyx_t_7); - __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); - goto __pyx_L1_error; - __pyx_L8_try_end:; - } - - /* "msgpack/_packer.pyx":289 - * self.pk.length = 0 - * raise - * if ret: # should not happen. # <<<<<<<<<<<<<< - * raise RuntimeError("internal error") - * if self.autoreset: - */ - __pyx_t_10 = (__pyx_v_ret != 0); - if (unlikely(__pyx_t_10)) { - - /* "msgpack/_packer.pyx":290 - * raise - * if ret: # should not happen. - * raise RuntimeError("internal error") # <<<<<<<<<<<<<< - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__12, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 290, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(0, 290, __pyx_L1_error) - - /* "msgpack/_packer.pyx":289 - * self.pk.length = 0 - * raise - * if ret: # should not happen. # <<<<<<<<<<<<<< - * raise RuntimeError("internal error") - * if self.autoreset: - */ - } - - /* "msgpack/_packer.pyx":291 - * if ret: # should not happen. - * raise RuntimeError("internal error") - * if self.autoreset: # <<<<<<<<<<<<<< - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 - */ - __pyx_t_10 = (__pyx_v_self->autoreset != 0); - if (__pyx_t_10) { - - /* "msgpack/_packer.pyx":292 - * raise RuntimeError("internal error") - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) # <<<<<<<<<<<<<< - * self.pk.length = 0 - * return buf - */ - __pyx_t_3 = PyBytes_FromStringAndSize(__pyx_v_self->pk.buf, __pyx_v_self->pk.length); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 292, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_v_buf = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; - - /* "msgpack/_packer.pyx":293 - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 # <<<<<<<<<<<<<< - * return buf - * - */ - __pyx_v_self->pk.length = 0; - - /* "msgpack/_packer.pyx":294 - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 - * return buf # <<<<<<<<<<<<<< - * - * def pack_ext_type(self, typecode, data): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_buf); - __pyx_r = __pyx_v_buf; - goto __pyx_L0; - - /* "msgpack/_packer.pyx":291 - * if ret: # should not happen. - * raise RuntimeError("internal error") - * if self.autoreset: # <<<<<<<<<<<<<< - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 - */ - } - - /* "msgpack/_packer.pyx":282 - * return ret - * - * cpdef pack(self, object obj): # <<<<<<<<<<<<<< - * cdef int ret - * try: - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.pack", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_buf); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_7pack(PyObject *__pyx_v_self, PyObject *__pyx_v_obj); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_6Packer_6pack[] = "Packer.pack(self, obj)"; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_7pack(PyObject *__pyx_v_self, PyObject *__pyx_v_obj) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("pack (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_6Packer_6pack(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self), ((PyObject *)__pyx_v_obj)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_6pack(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PyObject *__pyx_v_obj) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("pack", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_7msgpack_9_cmsgpack_6Packer_pack(__pyx_v_self, __pyx_v_obj, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 282, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.pack", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_packer.pyx":296 - * return buf - * - * def pack_ext_type(self, typecode, data): # <<<<<<<<<<<<<< - * msgpack_pack_ext(&self.pk, typecode, len(data)) - * msgpack_pack_raw_body(&self.pk, data, len(data)) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_9pack_ext_type(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_6Packer_8pack_ext_type[] = "Packer.pack_ext_type(self, typecode, data)"; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_9pack_ext_type(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_typecode = 0; - PyObject *__pyx_v_data = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("pack_ext_type (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_typecode,&__pyx_n_s_data,0}; - PyObject* values[2] = {0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_typecode)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("pack_ext_type", 1, 2, 2, 1); __PYX_ERR(0, 296, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "pack_ext_type") < 0)) __PYX_ERR(0, 296, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - } - __pyx_v_typecode = values[0]; - __pyx_v_data = values[1]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("pack_ext_type", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 296, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.pack_ext_type", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_6Packer_8pack_ext_type(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self), __pyx_v_typecode, __pyx_v_data); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_8pack_ext_type(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PyObject *__pyx_v_typecode, PyObject *__pyx_v_data) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - char __pyx_t_1; - Py_ssize_t __pyx_t_2; - char *__pyx_t_3; - __Pyx_RefNannySetupContext("pack_ext_type", 0); - - /* "msgpack/_packer.pyx":297 - * - * def pack_ext_type(self, typecode, data): - * msgpack_pack_ext(&self.pk, typecode, len(data)) # <<<<<<<<<<<<<< - * msgpack_pack_raw_body(&self.pk, data, len(data)) - * - */ - __pyx_t_1 = __Pyx_PyInt_As_char(__pyx_v_typecode); if (unlikely((__pyx_t_1 == (char)-1) && PyErr_Occurred())) __PYX_ERR(0, 297, __pyx_L1_error) - __pyx_t_2 = PyObject_Length(__pyx_v_data); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 297, __pyx_L1_error) - (void)(msgpack_pack_ext((&__pyx_v_self->pk), __pyx_t_1, __pyx_t_2)); - - /* "msgpack/_packer.pyx":298 - * def pack_ext_type(self, typecode, data): - * msgpack_pack_ext(&self.pk, typecode, len(data)) - * msgpack_pack_raw_body(&self.pk, data, len(data)) # <<<<<<<<<<<<<< - * - * def pack_array_header(self, long long size): - */ - __pyx_t_3 = __Pyx_PyObject_AsWritableString(__pyx_v_data); if (unlikely((!__pyx_t_3) && PyErr_Occurred())) __PYX_ERR(0, 298, __pyx_L1_error) - __pyx_t_2 = PyObject_Length(__pyx_v_data); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 298, __pyx_L1_error) - (void)(msgpack_pack_raw_body((&__pyx_v_self->pk), __pyx_t_3, __pyx_t_2)); - - /* "msgpack/_packer.pyx":296 - * return buf - * - * def pack_ext_type(self, typecode, data): # <<<<<<<<<<<<<< - * msgpack_pack_ext(&self.pk, typecode, len(data)) - * msgpack_pack_raw_body(&self.pk, data, len(data)) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.pack_ext_type", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_packer.pyx":300 - * msgpack_pack_raw_body(&self.pk, data, len(data)) - * - * def pack_array_header(self, long long size): # <<<<<<<<<<<<<< - * if size > ITEM_LIMIT: - * raise ValueError - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_11pack_array_header(PyObject *__pyx_v_self, PyObject *__pyx_arg_size); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_6Packer_10pack_array_header[] = "Packer.pack_array_header(self, long long size)"; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_11pack_array_header(PyObject *__pyx_v_self, PyObject *__pyx_arg_size) { - PY_LONG_LONG __pyx_v_size; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("pack_array_header (wrapper)", 0); - assert(__pyx_arg_size); { - __pyx_v_size = __Pyx_PyInt_As_PY_LONG_LONG(__pyx_arg_size); if (unlikely((__pyx_v_size == (PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 300, __pyx_L3_error) - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.pack_array_header", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_6Packer_10pack_array_header(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self), ((PY_LONG_LONG)__pyx_v_size)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_10pack_array_header(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PY_LONG_LONG __pyx_v_size) { - int __pyx_v_ret; - PyObject *__pyx_v_buf = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - __Pyx_RefNannySetupContext("pack_array_header", 0); - - /* "msgpack/_packer.pyx":301 - * - * def pack_array_header(self, long long size): - * if size > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError - * cdef int ret = msgpack_pack_array(&self.pk, size) - */ - __pyx_t_1 = ((__pyx_v_size > __pyx_v_7msgpack_9_cmsgpack_ITEM_LIMIT) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_packer.pyx":302 - * def pack_array_header(self, long long size): - * if size > ITEM_LIMIT: - * raise ValueError # <<<<<<<<<<<<<< - * cdef int ret = msgpack_pack_array(&self.pk, size) - * if ret == -1: - */ - __Pyx_Raise(__pyx_builtin_ValueError, 0, 0, 0); - __PYX_ERR(0, 302, __pyx_L1_error) - - /* "msgpack/_packer.pyx":301 - * - * def pack_array_header(self, long long size): - * if size > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError - * cdef int ret = msgpack_pack_array(&self.pk, size) - */ - } - - /* "msgpack/_packer.pyx":303 - * if size > ITEM_LIMIT: - * raise ValueError - * cdef int ret = msgpack_pack_array(&self.pk, size) # <<<<<<<<<<<<<< - * if ret == -1: - * raise MemoryError - */ - __pyx_v_ret = msgpack_pack_array((&__pyx_v_self->pk), __pyx_v_size); - - /* "msgpack/_packer.pyx":304 - * raise ValueError - * cdef int ret = msgpack_pack_array(&self.pk, size) - * if ret == -1: # <<<<<<<<<<<<<< - * raise MemoryError - * elif ret: # should not happen - */ - __pyx_t_1 = ((__pyx_v_ret == -1L) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_packer.pyx":305 - * cdef int ret = msgpack_pack_array(&self.pk, size) - * if ret == -1: - * raise MemoryError # <<<<<<<<<<<<<< - * elif ret: # should not happen - * raise TypeError - */ - PyErr_NoMemory(); __PYX_ERR(0, 305, __pyx_L1_error) - - /* "msgpack/_packer.pyx":304 - * raise ValueError - * cdef int ret = msgpack_pack_array(&self.pk, size) - * if ret == -1: # <<<<<<<<<<<<<< - * raise MemoryError - * elif ret: # should not happen - */ - } - - /* "msgpack/_packer.pyx":306 - * if ret == -1: - * raise MemoryError - * elif ret: # should not happen # <<<<<<<<<<<<<< - * raise TypeError - * if self.autoreset: - */ - __pyx_t_1 = (__pyx_v_ret != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_packer.pyx":307 - * raise MemoryError - * elif ret: # should not happen - * raise TypeError # <<<<<<<<<<<<<< - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - */ - __Pyx_Raise(__pyx_builtin_TypeError, 0, 0, 0); - __PYX_ERR(0, 307, __pyx_L1_error) - - /* "msgpack/_packer.pyx":306 - * if ret == -1: - * raise MemoryError - * elif ret: # should not happen # <<<<<<<<<<<<<< - * raise TypeError - * if self.autoreset: - */ - } - - /* "msgpack/_packer.pyx":308 - * elif ret: # should not happen - * raise TypeError - * if self.autoreset: # <<<<<<<<<<<<<< - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 - */ - __pyx_t_1 = (__pyx_v_self->autoreset != 0); - if (__pyx_t_1) { - - /* "msgpack/_packer.pyx":309 - * raise TypeError - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) # <<<<<<<<<<<<<< - * self.pk.length = 0 - * return buf - */ - __pyx_t_2 = PyBytes_FromStringAndSize(__pyx_v_self->pk.buf, __pyx_v_self->pk.length); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 309, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_v_buf = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "msgpack/_packer.pyx":310 - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 # <<<<<<<<<<<<<< - * return buf - * - */ - __pyx_v_self->pk.length = 0; - - /* "msgpack/_packer.pyx":311 - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 - * return buf # <<<<<<<<<<<<<< - * - * def pack_map_header(self, long long size): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_buf); - __pyx_r = __pyx_v_buf; - goto __pyx_L0; - - /* "msgpack/_packer.pyx":308 - * elif ret: # should not happen - * raise TypeError - * if self.autoreset: # <<<<<<<<<<<<<< - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 - */ - } - - /* "msgpack/_packer.pyx":300 - * msgpack_pack_raw_body(&self.pk, data, len(data)) - * - * def pack_array_header(self, long long size): # <<<<<<<<<<<<<< - * if size > ITEM_LIMIT: - * raise ValueError - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.pack_array_header", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_buf); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_packer.pyx":313 - * return buf - * - * def pack_map_header(self, long long size): # <<<<<<<<<<<<<< - * if size > ITEM_LIMIT: - * raise ValueError - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_13pack_map_header(PyObject *__pyx_v_self, PyObject *__pyx_arg_size); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_6Packer_12pack_map_header[] = "Packer.pack_map_header(self, long long size)"; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_13pack_map_header(PyObject *__pyx_v_self, PyObject *__pyx_arg_size) { - PY_LONG_LONG __pyx_v_size; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("pack_map_header (wrapper)", 0); - assert(__pyx_arg_size); { - __pyx_v_size = __Pyx_PyInt_As_PY_LONG_LONG(__pyx_arg_size); if (unlikely((__pyx_v_size == (PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 313, __pyx_L3_error) - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.pack_map_header", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_6Packer_12pack_map_header(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self), ((PY_LONG_LONG)__pyx_v_size)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_12pack_map_header(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PY_LONG_LONG __pyx_v_size) { - int __pyx_v_ret; - PyObject *__pyx_v_buf = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - __Pyx_RefNannySetupContext("pack_map_header", 0); - - /* "msgpack/_packer.pyx":314 - * - * def pack_map_header(self, long long size): - * if size > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError - * cdef int ret = msgpack_pack_map(&self.pk, size) - */ - __pyx_t_1 = ((__pyx_v_size > __pyx_v_7msgpack_9_cmsgpack_ITEM_LIMIT) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_packer.pyx":315 - * def pack_map_header(self, long long size): - * if size > ITEM_LIMIT: - * raise ValueError # <<<<<<<<<<<<<< - * cdef int ret = msgpack_pack_map(&self.pk, size) - * if ret == -1: - */ - __Pyx_Raise(__pyx_builtin_ValueError, 0, 0, 0); - __PYX_ERR(0, 315, __pyx_L1_error) - - /* "msgpack/_packer.pyx":314 - * - * def pack_map_header(self, long long size): - * if size > ITEM_LIMIT: # <<<<<<<<<<<<<< - * raise ValueError - * cdef int ret = msgpack_pack_map(&self.pk, size) - */ - } - - /* "msgpack/_packer.pyx":316 - * if size > ITEM_LIMIT: - * raise ValueError - * cdef int ret = msgpack_pack_map(&self.pk, size) # <<<<<<<<<<<<<< - * if ret == -1: - * raise MemoryError - */ - __pyx_v_ret = msgpack_pack_map((&__pyx_v_self->pk), __pyx_v_size); - - /* "msgpack/_packer.pyx":317 - * raise ValueError - * cdef int ret = msgpack_pack_map(&self.pk, size) - * if ret == -1: # <<<<<<<<<<<<<< - * raise MemoryError - * elif ret: # should not happen - */ - __pyx_t_1 = ((__pyx_v_ret == -1L) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_packer.pyx":318 - * cdef int ret = msgpack_pack_map(&self.pk, size) - * if ret == -1: - * raise MemoryError # <<<<<<<<<<<<<< - * elif ret: # should not happen - * raise TypeError - */ - PyErr_NoMemory(); __PYX_ERR(0, 318, __pyx_L1_error) - - /* "msgpack/_packer.pyx":317 - * raise ValueError - * cdef int ret = msgpack_pack_map(&self.pk, size) - * if ret == -1: # <<<<<<<<<<<<<< - * raise MemoryError - * elif ret: # should not happen - */ - } - - /* "msgpack/_packer.pyx":319 - * if ret == -1: - * raise MemoryError - * elif ret: # should not happen # <<<<<<<<<<<<<< - * raise TypeError - * if self.autoreset: - */ - __pyx_t_1 = (__pyx_v_ret != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_packer.pyx":320 - * raise MemoryError - * elif ret: # should not happen - * raise TypeError # <<<<<<<<<<<<<< - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - */ - __Pyx_Raise(__pyx_builtin_TypeError, 0, 0, 0); - __PYX_ERR(0, 320, __pyx_L1_error) - - /* "msgpack/_packer.pyx":319 - * if ret == -1: - * raise MemoryError - * elif ret: # should not happen # <<<<<<<<<<<<<< - * raise TypeError - * if self.autoreset: - */ - } - - /* "msgpack/_packer.pyx":321 - * elif ret: # should not happen - * raise TypeError - * if self.autoreset: # <<<<<<<<<<<<<< - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 - */ - __pyx_t_1 = (__pyx_v_self->autoreset != 0); - if (__pyx_t_1) { - - /* "msgpack/_packer.pyx":322 - * raise TypeError - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) # <<<<<<<<<<<<<< - * self.pk.length = 0 - * return buf - */ - __pyx_t_2 = PyBytes_FromStringAndSize(__pyx_v_self->pk.buf, __pyx_v_self->pk.length); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 322, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_v_buf = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "msgpack/_packer.pyx":323 - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 # <<<<<<<<<<<<<< - * return buf - * - */ - __pyx_v_self->pk.length = 0; - - /* "msgpack/_packer.pyx":324 - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 - * return buf # <<<<<<<<<<<<<< - * - * def pack_map_pairs(self, object pairs): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_buf); - __pyx_r = __pyx_v_buf; - goto __pyx_L0; - - /* "msgpack/_packer.pyx":321 - * elif ret: # should not happen - * raise TypeError - * if self.autoreset: # <<<<<<<<<<<<<< - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 - */ - } - - /* "msgpack/_packer.pyx":313 - * return buf - * - * def pack_map_header(self, long long size): # <<<<<<<<<<<<<< - * if size > ITEM_LIMIT: - * raise ValueError - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.pack_map_header", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_buf); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_packer.pyx":326 - * return buf - * - * def pack_map_pairs(self, object pairs): # <<<<<<<<<<<<<< - * """ - * Pack *pairs* as msgpack map type. - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_15pack_map_pairs(PyObject *__pyx_v_self, PyObject *__pyx_v_pairs); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_6Packer_14pack_map_pairs[] = "Packer.pack_map_pairs(self, pairs)\n\n Pack *pairs* as msgpack map type.\n\n *pairs* should be a sequence of pairs.\n (`len(pairs)` and `for k, v in pairs:` should be supported.)\n "; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_15pack_map_pairs(PyObject *__pyx_v_self, PyObject *__pyx_v_pairs) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("pack_map_pairs (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_6Packer_14pack_map_pairs(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self), ((PyObject *)__pyx_v_pairs)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_14pack_map_pairs(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, PyObject *__pyx_v_pairs) { - int __pyx_v_ret; - PyObject *__pyx_v_k = NULL; - PyObject *__pyx_v_v = NULL; - PyObject *__pyx_v_buf = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - Py_ssize_t __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *(*__pyx_t_4)(PyObject *); - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - PyObject *(*__pyx_t_9)(PyObject *); - int __pyx_t_10; - __Pyx_RefNannySetupContext("pack_map_pairs", 0); - - /* "msgpack/_packer.pyx":333 - * (`len(pairs)` and `for k, v in pairs:` should be supported.) - * """ - * cdef int ret = msgpack_pack_map(&self.pk, len(pairs)) # <<<<<<<<<<<<<< - * if ret == 0: - * for k, v in pairs: - */ - __pyx_t_1 = PyObject_Length(__pyx_v_pairs); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 333, __pyx_L1_error) - __pyx_v_ret = msgpack_pack_map((&__pyx_v_self->pk), __pyx_t_1); - - /* "msgpack/_packer.pyx":334 - * """ - * cdef int ret = msgpack_pack_map(&self.pk, len(pairs)) - * if ret == 0: # <<<<<<<<<<<<<< - * for k, v in pairs: - * ret = self._pack(k) - */ - __pyx_t_2 = ((__pyx_v_ret == 0) != 0); - if (__pyx_t_2) { - - /* "msgpack/_packer.pyx":335 - * cdef int ret = msgpack_pack_map(&self.pk, len(pairs)) - * if ret == 0: - * for k, v in pairs: # <<<<<<<<<<<<<< - * ret = self._pack(k) - * if ret != 0: break - */ - if (likely(PyList_CheckExact(__pyx_v_pairs)) || PyTuple_CheckExact(__pyx_v_pairs)) { - __pyx_t_3 = __pyx_v_pairs; __Pyx_INCREF(__pyx_t_3); __pyx_t_1 = 0; - __pyx_t_4 = NULL; - } else { - __pyx_t_1 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_pairs); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 335, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 335, __pyx_L1_error) - } - for (;;) { - if (likely(!__pyx_t_4)) { - if (likely(PyList_CheckExact(__pyx_t_3))) { - if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_3)) break; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_5 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_1); __Pyx_INCREF(__pyx_t_5); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 335, __pyx_L1_error) - #else - __pyx_t_5 = PySequence_ITEM(__pyx_t_3, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 335, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - #endif - } else { - if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_3)) break; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_1); __Pyx_INCREF(__pyx_t_5); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 335, __pyx_L1_error) - #else - __pyx_t_5 = PySequence_ITEM(__pyx_t_3, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 335, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - #endif - } - } else { - __pyx_t_5 = __pyx_t_4(__pyx_t_3); - if (unlikely(!__pyx_t_5)) { - PyObject* exc_type = PyErr_Occurred(); - if (exc_type) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 335, __pyx_L1_error) - } - break; - } - __Pyx_GOTREF(__pyx_t_5); - } - if ((likely(PyTuple_CheckExact(__pyx_t_5))) || (PyList_CheckExact(__pyx_t_5))) { - PyObject* sequence = __pyx_t_5; - Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); - if (unlikely(size != 2)) { - if (size > 2) __Pyx_RaiseTooManyValuesError(2); - else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); - __PYX_ERR(0, 335, __pyx_L1_error) - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - if (likely(PyTuple_CheckExact(sequence))) { - __pyx_t_6 = PyTuple_GET_ITEM(sequence, 0); - __pyx_t_7 = PyTuple_GET_ITEM(sequence, 1); - } else { - __pyx_t_6 = PyList_GET_ITEM(sequence, 0); - __pyx_t_7 = PyList_GET_ITEM(sequence, 1); - } - __Pyx_INCREF(__pyx_t_6); - __Pyx_INCREF(__pyx_t_7); - #else - __pyx_t_6 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 335, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_7 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 335, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - #endif - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - } else { - Py_ssize_t index = -1; - __pyx_t_8 = PyObject_GetIter(__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 335, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_9 = Py_TYPE(__pyx_t_8)->tp_iternext; - index = 0; __pyx_t_6 = __pyx_t_9(__pyx_t_8); if (unlikely(!__pyx_t_6)) goto __pyx_L6_unpacking_failed; - __Pyx_GOTREF(__pyx_t_6); - index = 1; __pyx_t_7 = __pyx_t_9(__pyx_t_8); if (unlikely(!__pyx_t_7)) goto __pyx_L6_unpacking_failed; - __Pyx_GOTREF(__pyx_t_7); - if (__Pyx_IternextUnpackEndCheck(__pyx_t_9(__pyx_t_8), 2) < 0) __PYX_ERR(0, 335, __pyx_L1_error) - __pyx_t_9 = NULL; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - goto __pyx_L7_unpacking_done; - __pyx_L6_unpacking_failed:; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_9 = NULL; - if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); - __PYX_ERR(0, 335, __pyx_L1_error) - __pyx_L7_unpacking_done:; - } - __Pyx_XDECREF_SET(__pyx_v_k, __pyx_t_6); - __pyx_t_6 = 0; - __Pyx_XDECREF_SET(__pyx_v_v, __pyx_t_7); - __pyx_t_7 = 0; - - /* "msgpack/_packer.pyx":336 - * if ret == 0: - * for k, v in pairs: - * ret = self._pack(k) # <<<<<<<<<<<<<< - * if ret != 0: break - * ret = self._pack(v) - */ - __pyx_t_10 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Packer *)__pyx_v_self->__pyx_vtab)->_pack(__pyx_v_self, __pyx_v_k, NULL); if (unlikely(__pyx_t_10 == ((int)-1))) __PYX_ERR(0, 336, __pyx_L1_error) - __pyx_v_ret = __pyx_t_10; - - /* "msgpack/_packer.pyx":337 - * for k, v in pairs: - * ret = self._pack(k) - * if ret != 0: break # <<<<<<<<<<<<<< - * ret = self._pack(v) - * if ret != 0: break - */ - __pyx_t_2 = ((__pyx_v_ret != 0) != 0); - if (__pyx_t_2) { - goto __pyx_L5_break; - } - - /* "msgpack/_packer.pyx":338 - * ret = self._pack(k) - * if ret != 0: break - * ret = self._pack(v) # <<<<<<<<<<<<<< - * if ret != 0: break - * if ret == -1: - */ - __pyx_t_10 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Packer *)__pyx_v_self->__pyx_vtab)->_pack(__pyx_v_self, __pyx_v_v, NULL); if (unlikely(__pyx_t_10 == ((int)-1))) __PYX_ERR(0, 338, __pyx_L1_error) - __pyx_v_ret = __pyx_t_10; - - /* "msgpack/_packer.pyx":339 - * if ret != 0: break - * ret = self._pack(v) - * if ret != 0: break # <<<<<<<<<<<<<< - * if ret == -1: - * raise MemoryError - */ - __pyx_t_2 = ((__pyx_v_ret != 0) != 0); - if (__pyx_t_2) { - goto __pyx_L5_break; - } - - /* "msgpack/_packer.pyx":335 - * cdef int ret = msgpack_pack_map(&self.pk, len(pairs)) - * if ret == 0: - * for k, v in pairs: # <<<<<<<<<<<<<< - * ret = self._pack(k) - * if ret != 0: break - */ - } - __pyx_L5_break:; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "msgpack/_packer.pyx":334 - * """ - * cdef int ret = msgpack_pack_map(&self.pk, len(pairs)) - * if ret == 0: # <<<<<<<<<<<<<< - * for k, v in pairs: - * ret = self._pack(k) - */ - } - - /* "msgpack/_packer.pyx":340 - * ret = self._pack(v) - * if ret != 0: break - * if ret == -1: # <<<<<<<<<<<<<< - * raise MemoryError - * elif ret: # should not happen - */ - __pyx_t_2 = ((__pyx_v_ret == -1L) != 0); - if (unlikely(__pyx_t_2)) { - - /* "msgpack/_packer.pyx":341 - * if ret != 0: break - * if ret == -1: - * raise MemoryError # <<<<<<<<<<<<<< - * elif ret: # should not happen - * raise TypeError - */ - PyErr_NoMemory(); __PYX_ERR(0, 341, __pyx_L1_error) - - /* "msgpack/_packer.pyx":340 - * ret = self._pack(v) - * if ret != 0: break - * if ret == -1: # <<<<<<<<<<<<<< - * raise MemoryError - * elif ret: # should not happen - */ - } - - /* "msgpack/_packer.pyx":342 - * if ret == -1: - * raise MemoryError - * elif ret: # should not happen # <<<<<<<<<<<<<< - * raise TypeError - * if self.autoreset: - */ - __pyx_t_2 = (__pyx_v_ret != 0); - if (unlikely(__pyx_t_2)) { - - /* "msgpack/_packer.pyx":343 - * raise MemoryError - * elif ret: # should not happen - * raise TypeError # <<<<<<<<<<<<<< - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - */ - __Pyx_Raise(__pyx_builtin_TypeError, 0, 0, 0); - __PYX_ERR(0, 343, __pyx_L1_error) - - /* "msgpack/_packer.pyx":342 - * if ret == -1: - * raise MemoryError - * elif ret: # should not happen # <<<<<<<<<<<<<< - * raise TypeError - * if self.autoreset: - */ - } - - /* "msgpack/_packer.pyx":344 - * elif ret: # should not happen - * raise TypeError - * if self.autoreset: # <<<<<<<<<<<<<< - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 - */ - __pyx_t_2 = (__pyx_v_self->autoreset != 0); - if (__pyx_t_2) { - - /* "msgpack/_packer.pyx":345 - * raise TypeError - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) # <<<<<<<<<<<<<< - * self.pk.length = 0 - * return buf - */ - __pyx_t_3 = PyBytes_FromStringAndSize(__pyx_v_self->pk.buf, __pyx_v_self->pk.length); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 345, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_v_buf = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; - - /* "msgpack/_packer.pyx":346 - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 # <<<<<<<<<<<<<< - * return buf - * - */ - __pyx_v_self->pk.length = 0; - - /* "msgpack/_packer.pyx":347 - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 - * return buf # <<<<<<<<<<<<<< - * - * def reset(self): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_buf); - __pyx_r = __pyx_v_buf; - goto __pyx_L0; - - /* "msgpack/_packer.pyx":344 - * elif ret: # should not happen - * raise TypeError - * if self.autoreset: # <<<<<<<<<<<<<< - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * self.pk.length = 0 - */ - } - - /* "msgpack/_packer.pyx":326 - * return buf - * - * def pack_map_pairs(self, object pairs): # <<<<<<<<<<<<<< - * """ - * Pack *pairs* as msgpack map type. - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.pack_map_pairs", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_k); - __Pyx_XDECREF(__pyx_v_v); - __Pyx_XDECREF(__pyx_v_buf); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_packer.pyx":349 - * return buf - * - * def reset(self): # <<<<<<<<<<<<<< - * """Reset internal buffer. - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_17reset(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_6Packer_16reset[] = "Packer.reset(self)\nReset internal buffer.\n\n This method is usaful only when autoreset=False.\n "; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_17reset(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("reset (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_6Packer_16reset(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_16reset(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("reset", 0); - - /* "msgpack/_packer.pyx":354 - * This method is usaful only when autoreset=False. - * """ - * self.pk.length = 0 # <<<<<<<<<<<<<< - * - * def bytes(self): - */ - __pyx_v_self->pk.length = 0; - - /* "msgpack/_packer.pyx":349 - * return buf - * - * def reset(self): # <<<<<<<<<<<<<< - * """Reset internal buffer. - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_packer.pyx":356 - * self.pk.length = 0 - * - * def bytes(self): # <<<<<<<<<<<<<< - * """Return internal buffer contents as bytes object""" - * return PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_19bytes(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_6Packer_18bytes[] = "Packer.bytes(self)\nReturn internal buffer contents as bytes object"; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_19bytes(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("bytes (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_6Packer_18bytes(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_18bytes(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("bytes", 0); - - /* "msgpack/_packer.pyx":358 - * def bytes(self): - * """Return internal buffer contents as bytes object""" - * return PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) # <<<<<<<<<<<<<< - * - * def getbuffer(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PyBytes_FromStringAndSize(__pyx_v_self->pk.buf, __pyx_v_self->pk.length); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 358, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "msgpack/_packer.pyx":356 - * self.pk.length = 0 - * - * def bytes(self): # <<<<<<<<<<<<<< - * """Return internal buffer contents as bytes object""" - * return PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.bytes", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_packer.pyx":360 - * return PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * - * def getbuffer(self): # <<<<<<<<<<<<<< - * """Return view of internal buffer.""" - * return buff_to_buff(self.pk.buf, self.pk.length) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_21getbuffer(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_6Packer_20getbuffer[] = "Packer.getbuffer(self)\nReturn view of internal buffer."; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_21getbuffer(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("getbuffer (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_6Packer_20getbuffer(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_20getbuffer(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("getbuffer", 0); - - /* "msgpack/_packer.pyx":362 - * def getbuffer(self): - * """Return view of internal buffer.""" - * return buff_to_buff(self.pk.buf, self.pk.length) # <<<<<<<<<<<<<< - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = buff_to_buff(__pyx_v_self->pk.buf, __pyx_v_self->pk.length); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 362, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "msgpack/_packer.pyx":360 - * return PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - * - * def getbuffer(self): # <<<<<<<<<<<<<< - * """Return view of internal buffer.""" - * return buff_to_buff(self.pk.buf, self.pk.length) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.getbuffer", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_23__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_6Packer_22__reduce_cython__[] = "Packer.__reduce_cython__(self)"; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_23__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_6Packer_22__reduce_cython__(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_22__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("__reduce_cython__", 0); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__13, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 2, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(2, 2, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_25__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_6Packer_24__setstate_cython__[] = "Packer.__setstate_cython__(self, __pyx_state)"; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_6Packer_25__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_6Packer_24__setstate_cython__(((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_6Packer_24__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7msgpack_9_cmsgpack_Packer *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("__setstate_cython__", 0); - - /* "(tree fragment)":4 - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__14, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(2, 4, __pyx_L1_error) - - /* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Packer.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":57 - * void unpack_clear(unpack_context* ctx) - * - * cdef inline init_ctx(unpack_context *ctx, # <<<<<<<<<<<<<< - * object object_hook, object object_pairs_hook, - * object list_hook, object ext_hook, - */ - -static CYTHON_INLINE PyObject *__pyx_f_7msgpack_9_cmsgpack_init_ctx(unpack_context *__pyx_v_ctx, PyObject *__pyx_v_object_hook, PyObject *__pyx_v_object_pairs_hook, PyObject *__pyx_v_list_hook, PyObject *__pyx_v_ext_hook, int __pyx_v_use_list, int __pyx_v_raw, int __pyx_v_strict_map_key, char const *__pyx_v_encoding, char const *__pyx_v_unicode_errors, Py_ssize_t __pyx_v_max_str_len, Py_ssize_t __pyx_v_max_bin_len, Py_ssize_t __pyx_v_max_array_len, Py_ssize_t __pyx_v_max_map_len, Py_ssize_t __pyx_v_max_ext_len) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - __Pyx_RefNannySetupContext("init_ctx", 0); - - /* "msgpack/_unpacker.pyx":65 - * Py_ssize_t max_array_len, Py_ssize_t max_map_len, - * Py_ssize_t max_ext_len): - * unpack_init(ctx) # <<<<<<<<<<<<<< - * ctx.user.use_list = use_list - * ctx.user.raw = raw - */ - unpack_init(__pyx_v_ctx); - - /* "msgpack/_unpacker.pyx":66 - * Py_ssize_t max_ext_len): - * unpack_init(ctx) - * ctx.user.use_list = use_list # <<<<<<<<<<<<<< - * ctx.user.raw = raw - * ctx.user.strict_map_key = strict_map_key - */ - __pyx_v_ctx->user.use_list = __pyx_v_use_list; - - /* "msgpack/_unpacker.pyx":67 - * unpack_init(ctx) - * ctx.user.use_list = use_list - * ctx.user.raw = raw # <<<<<<<<<<<<<< - * ctx.user.strict_map_key = strict_map_key - * ctx.user.object_hook = ctx.user.list_hook = NULL - */ - __pyx_v_ctx->user.raw = __pyx_v_raw; - - /* "msgpack/_unpacker.pyx":68 - * ctx.user.use_list = use_list - * ctx.user.raw = raw - * ctx.user.strict_map_key = strict_map_key # <<<<<<<<<<<<<< - * ctx.user.object_hook = ctx.user.list_hook = NULL - * ctx.user.max_str_len = max_str_len - */ - __pyx_v_ctx->user.strict_map_key = __pyx_v_strict_map_key; - - /* "msgpack/_unpacker.pyx":69 - * ctx.user.raw = raw - * ctx.user.strict_map_key = strict_map_key - * ctx.user.object_hook = ctx.user.list_hook = NULL # <<<<<<<<<<<<<< - * ctx.user.max_str_len = max_str_len - * ctx.user.max_bin_len = max_bin_len - */ - __pyx_v_ctx->user.object_hook = ((PyObject *)NULL); - __pyx_v_ctx->user.list_hook = ((PyObject *)NULL); - - /* "msgpack/_unpacker.pyx":70 - * ctx.user.strict_map_key = strict_map_key - * ctx.user.object_hook = ctx.user.list_hook = NULL - * ctx.user.max_str_len = max_str_len # <<<<<<<<<<<<<< - * ctx.user.max_bin_len = max_bin_len - * ctx.user.max_array_len = max_array_len - */ - __pyx_v_ctx->user.max_str_len = __pyx_v_max_str_len; - - /* "msgpack/_unpacker.pyx":71 - * ctx.user.object_hook = ctx.user.list_hook = NULL - * ctx.user.max_str_len = max_str_len - * ctx.user.max_bin_len = max_bin_len # <<<<<<<<<<<<<< - * ctx.user.max_array_len = max_array_len - * ctx.user.max_map_len = max_map_len - */ - __pyx_v_ctx->user.max_bin_len = __pyx_v_max_bin_len; - - /* "msgpack/_unpacker.pyx":72 - * ctx.user.max_str_len = max_str_len - * ctx.user.max_bin_len = max_bin_len - * ctx.user.max_array_len = max_array_len # <<<<<<<<<<<<<< - * ctx.user.max_map_len = max_map_len - * ctx.user.max_ext_len = max_ext_len - */ - __pyx_v_ctx->user.max_array_len = __pyx_v_max_array_len; - - /* "msgpack/_unpacker.pyx":73 - * ctx.user.max_bin_len = max_bin_len - * ctx.user.max_array_len = max_array_len - * ctx.user.max_map_len = max_map_len # <<<<<<<<<<<<<< - * ctx.user.max_ext_len = max_ext_len - * - */ - __pyx_v_ctx->user.max_map_len = __pyx_v_max_map_len; - - /* "msgpack/_unpacker.pyx":74 - * ctx.user.max_array_len = max_array_len - * ctx.user.max_map_len = max_map_len - * ctx.user.max_ext_len = max_ext_len # <<<<<<<<<<<<<< - * - * if object_hook is not None and object_pairs_hook is not None: - */ - __pyx_v_ctx->user.max_ext_len = __pyx_v_max_ext_len; - - /* "msgpack/_unpacker.pyx":76 - * ctx.user.max_ext_len = max_ext_len - * - * if object_hook is not None and object_pairs_hook is not None: # <<<<<<<<<<<<<< - * raise TypeError("object_pairs_hook and object_hook are mutually exclusive.") - * - */ - __pyx_t_2 = (__pyx_v_object_hook != Py_None); - __pyx_t_3 = (__pyx_t_2 != 0); - if (__pyx_t_3) { - } else { - __pyx_t_1 = __pyx_t_3; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_3 = (__pyx_v_object_pairs_hook != Py_None); - __pyx_t_2 = (__pyx_t_3 != 0); - __pyx_t_1 = __pyx_t_2; - __pyx_L4_bool_binop_done:; - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_unpacker.pyx":77 - * - * if object_hook is not None and object_pairs_hook is not None: - * raise TypeError("object_pairs_hook and object_hook are mutually exclusive.") # <<<<<<<<<<<<<< - * - * if object_hook is not None: - */ - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__15, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 77, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 77, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":76 - * ctx.user.max_ext_len = max_ext_len - * - * if object_hook is not None and object_pairs_hook is not None: # <<<<<<<<<<<<<< - * raise TypeError("object_pairs_hook and object_hook are mutually exclusive.") - * - */ - } - - /* "msgpack/_unpacker.pyx":79 - * raise TypeError("object_pairs_hook and object_hook are mutually exclusive.") - * - * if object_hook is not None: # <<<<<<<<<<<<<< - * if not PyCallable_Check(object_hook): - * raise TypeError("object_hook must be a callable.") - */ - __pyx_t_1 = (__pyx_v_object_hook != Py_None); - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "msgpack/_unpacker.pyx":80 - * - * if object_hook is not None: - * if not PyCallable_Check(object_hook): # <<<<<<<<<<<<<< - * raise TypeError("object_hook must be a callable.") - * ctx.user.object_hook = object_hook - */ - __pyx_t_2 = ((!(PyCallable_Check(__pyx_v_object_hook) != 0)) != 0); - if (unlikely(__pyx_t_2)) { - - /* "msgpack/_unpacker.pyx":81 - * if object_hook is not None: - * if not PyCallable_Check(object_hook): - * raise TypeError("object_hook must be a callable.") # <<<<<<<<<<<<<< - * ctx.user.object_hook = object_hook - * - */ - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__16, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 81, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 81, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":80 - * - * if object_hook is not None: - * if not PyCallable_Check(object_hook): # <<<<<<<<<<<<<< - * raise TypeError("object_hook must be a callable.") - * ctx.user.object_hook = object_hook - */ - } - - /* "msgpack/_unpacker.pyx":82 - * if not PyCallable_Check(object_hook): - * raise TypeError("object_hook must be a callable.") - * ctx.user.object_hook = object_hook # <<<<<<<<<<<<<< - * - * if object_pairs_hook is None: - */ - __pyx_v_ctx->user.object_hook = ((PyObject *)__pyx_v_object_hook); - - /* "msgpack/_unpacker.pyx":79 - * raise TypeError("object_pairs_hook and object_hook are mutually exclusive.") - * - * if object_hook is not None: # <<<<<<<<<<<<<< - * if not PyCallable_Check(object_hook): - * raise TypeError("object_hook must be a callable.") - */ - } - - /* "msgpack/_unpacker.pyx":84 - * ctx.user.object_hook = object_hook - * - * if object_pairs_hook is None: # <<<<<<<<<<<<<< - * ctx.user.has_pairs_hook = False - * else: - */ - __pyx_t_2 = (__pyx_v_object_pairs_hook == Py_None); - __pyx_t_1 = (__pyx_t_2 != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":85 - * - * if object_pairs_hook is None: - * ctx.user.has_pairs_hook = False # <<<<<<<<<<<<<< - * else: - * if not PyCallable_Check(object_pairs_hook): - */ - __pyx_v_ctx->user.has_pairs_hook = 0; - - /* "msgpack/_unpacker.pyx":84 - * ctx.user.object_hook = object_hook - * - * if object_pairs_hook is None: # <<<<<<<<<<<<<< - * ctx.user.has_pairs_hook = False - * else: - */ - goto __pyx_L8; - } - - /* "msgpack/_unpacker.pyx":87 - * ctx.user.has_pairs_hook = False - * else: - * if not PyCallable_Check(object_pairs_hook): # <<<<<<<<<<<<<< - * raise TypeError("object_pairs_hook must be a callable.") - * ctx.user.object_hook = object_pairs_hook - */ - /*else*/ { - __pyx_t_1 = ((!(PyCallable_Check(__pyx_v_object_pairs_hook) != 0)) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_unpacker.pyx":88 - * else: - * if not PyCallable_Check(object_pairs_hook): - * raise TypeError("object_pairs_hook must be a callable.") # <<<<<<<<<<<<<< - * ctx.user.object_hook = object_pairs_hook - * ctx.user.has_pairs_hook = True - */ - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__17, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 88, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 88, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":87 - * ctx.user.has_pairs_hook = False - * else: - * if not PyCallable_Check(object_pairs_hook): # <<<<<<<<<<<<<< - * raise TypeError("object_pairs_hook must be a callable.") - * ctx.user.object_hook = object_pairs_hook - */ - } - - /* "msgpack/_unpacker.pyx":89 - * if not PyCallable_Check(object_pairs_hook): - * raise TypeError("object_pairs_hook must be a callable.") - * ctx.user.object_hook = object_pairs_hook # <<<<<<<<<<<<<< - * ctx.user.has_pairs_hook = True - * - */ - __pyx_v_ctx->user.object_hook = ((PyObject *)__pyx_v_object_pairs_hook); - - /* "msgpack/_unpacker.pyx":90 - * raise TypeError("object_pairs_hook must be a callable.") - * ctx.user.object_hook = object_pairs_hook - * ctx.user.has_pairs_hook = True # <<<<<<<<<<<<<< - * - * if list_hook is not None: - */ - __pyx_v_ctx->user.has_pairs_hook = 1; - } - __pyx_L8:; - - /* "msgpack/_unpacker.pyx":92 - * ctx.user.has_pairs_hook = True - * - * if list_hook is not None: # <<<<<<<<<<<<<< - * if not PyCallable_Check(list_hook): - * raise TypeError("list_hook must be a callable.") - */ - __pyx_t_1 = (__pyx_v_list_hook != Py_None); - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "msgpack/_unpacker.pyx":93 - * - * if list_hook is not None: - * if not PyCallable_Check(list_hook): # <<<<<<<<<<<<<< - * raise TypeError("list_hook must be a callable.") - * ctx.user.list_hook = list_hook - */ - __pyx_t_2 = ((!(PyCallable_Check(__pyx_v_list_hook) != 0)) != 0); - if (unlikely(__pyx_t_2)) { - - /* "msgpack/_unpacker.pyx":94 - * if list_hook is not None: - * if not PyCallable_Check(list_hook): - * raise TypeError("list_hook must be a callable.") # <<<<<<<<<<<<<< - * ctx.user.list_hook = list_hook - * - */ - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__18, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 94, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 94, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":93 - * - * if list_hook is not None: - * if not PyCallable_Check(list_hook): # <<<<<<<<<<<<<< - * raise TypeError("list_hook must be a callable.") - * ctx.user.list_hook = list_hook - */ - } - - /* "msgpack/_unpacker.pyx":95 - * if not PyCallable_Check(list_hook): - * raise TypeError("list_hook must be a callable.") - * ctx.user.list_hook = list_hook # <<<<<<<<<<<<<< - * - * if ext_hook is not None: - */ - __pyx_v_ctx->user.list_hook = ((PyObject *)__pyx_v_list_hook); - - /* "msgpack/_unpacker.pyx":92 - * ctx.user.has_pairs_hook = True - * - * if list_hook is not None: # <<<<<<<<<<<<<< - * if not PyCallable_Check(list_hook): - * raise TypeError("list_hook must be a callable.") - */ - } - - /* "msgpack/_unpacker.pyx":97 - * ctx.user.list_hook = list_hook - * - * if ext_hook is not None: # <<<<<<<<<<<<<< - * if not PyCallable_Check(ext_hook): - * raise TypeError("ext_hook must be a callable.") - */ - __pyx_t_2 = (__pyx_v_ext_hook != Py_None); - __pyx_t_1 = (__pyx_t_2 != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":98 - * - * if ext_hook is not None: - * if not PyCallable_Check(ext_hook): # <<<<<<<<<<<<<< - * raise TypeError("ext_hook must be a callable.") - * ctx.user.ext_hook = ext_hook - */ - __pyx_t_1 = ((!(PyCallable_Check(__pyx_v_ext_hook) != 0)) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_unpacker.pyx":99 - * if ext_hook is not None: - * if not PyCallable_Check(ext_hook): - * raise TypeError("ext_hook must be a callable.") # <<<<<<<<<<<<<< - * ctx.user.ext_hook = ext_hook - * - */ - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__19, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 99, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 99, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":98 - * - * if ext_hook is not None: - * if not PyCallable_Check(ext_hook): # <<<<<<<<<<<<<< - * raise TypeError("ext_hook must be a callable.") - * ctx.user.ext_hook = ext_hook - */ - } - - /* "msgpack/_unpacker.pyx":100 - * if not PyCallable_Check(ext_hook): - * raise TypeError("ext_hook must be a callable.") - * ctx.user.ext_hook = ext_hook # <<<<<<<<<<<<<< - * - * ctx.user.encoding = encoding - */ - __pyx_v_ctx->user.ext_hook = ((PyObject *)__pyx_v_ext_hook); - - /* "msgpack/_unpacker.pyx":97 - * ctx.user.list_hook = list_hook - * - * if ext_hook is not None: # <<<<<<<<<<<<<< - * if not PyCallable_Check(ext_hook): - * raise TypeError("ext_hook must be a callable.") - */ - } - - /* "msgpack/_unpacker.pyx":102 - * ctx.user.ext_hook = ext_hook - * - * ctx.user.encoding = encoding # <<<<<<<<<<<<<< - * ctx.user.unicode_errors = unicode_errors - * - */ - __pyx_v_ctx->user.encoding = __pyx_v_encoding; - - /* "msgpack/_unpacker.pyx":103 - * - * ctx.user.encoding = encoding - * ctx.user.unicode_errors = unicode_errors # <<<<<<<<<<<<<< - * - * def default_read_extended_type(typecode, data): - */ - __pyx_v_ctx->user.unicode_errors = __pyx_v_unicode_errors; - - /* "msgpack/_unpacker.pyx":57 - * void unpack_clear(unpack_context* ctx) - * - * cdef inline init_ctx(unpack_context *ctx, # <<<<<<<<<<<<<< - * object object_hook, object object_pairs_hook, - * object list_hook, object ext_hook, - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("msgpack._cmsgpack.init_ctx", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":105 - * ctx.user.unicode_errors = unicode_errors - * - * def default_read_extended_type(typecode, data): # <<<<<<<<<<<<<< - * raise NotImplementedError("Cannot decode extended type with typecode=%d" % typecode) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_1default_read_extended_type(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_default_read_extended_type[] = "default_read_extended_type(typecode, data)"; -static PyMethodDef __pyx_mdef_7msgpack_9_cmsgpack_1default_read_extended_type = {"default_read_extended_type", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7msgpack_9_cmsgpack_1default_read_extended_type, METH_VARARGS|METH_KEYWORDS, __pyx_doc_7msgpack_9_cmsgpack_default_read_extended_type}; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_1default_read_extended_type(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_typecode = 0; - CYTHON_UNUSED PyObject *__pyx_v_data = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("default_read_extended_type (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_typecode,&__pyx_n_s_data,0}; - PyObject* values[2] = {0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_typecode)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("default_read_extended_type", 1, 2, 2, 1); __PYX_ERR(1, 105, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "default_read_extended_type") < 0)) __PYX_ERR(1, 105, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - } - __pyx_v_typecode = values[0]; - __pyx_v_data = values[1]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("default_read_extended_type", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 105, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("msgpack._cmsgpack.default_read_extended_type", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_default_read_extended_type(__pyx_self, __pyx_v_typecode, __pyx_v_data); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_default_read_extended_type(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_typecode, CYTHON_UNUSED PyObject *__pyx_v_data) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - __Pyx_RefNannySetupContext("default_read_extended_type", 0); - - /* "msgpack/_unpacker.pyx":106 - * - * def default_read_extended_type(typecode, data): - * raise NotImplementedError("Cannot decode extended type with typecode=%d" % typecode) # <<<<<<<<<<<<<< - * - * cdef inline int get_data_from_buffer(object obj, - */ - __pyx_t_1 = __Pyx_PyUnicode_FormatSafe(__pyx_kp_u_Cannot_decode_extended_type_with, __pyx_v_typecode); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 106, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_builtin_NotImplementedError, __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 106, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_Raise(__pyx_t_2, 0, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __PYX_ERR(1, 106, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":105 - * ctx.user.unicode_errors = unicode_errors - * - * def default_read_extended_type(typecode, data): # <<<<<<<<<<<<<< - * raise NotImplementedError("Cannot decode extended type with typecode=%d" % typecode) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("msgpack._cmsgpack.default_read_extended_type", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":108 - * raise NotImplementedError("Cannot decode extended type with typecode=%d" % typecode) - * - * cdef inline int get_data_from_buffer(object obj, # <<<<<<<<<<<<<< - * Py_buffer *view, - * char **buf, - */ - -static CYTHON_INLINE int __pyx_f_7msgpack_9_cmsgpack_get_data_from_buffer(PyObject *__pyx_v_obj, Py_buffer *__pyx_v_view, char **__pyx_v_buf, Py_ssize_t *__pyx_v_buffer_len, int *__pyx_v_new_protocol) { - PyObject *__pyx_v_contiguous = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - Py_ssize_t __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - char *__pyx_t_6; - __Pyx_RefNannySetupContext("get_data_from_buffer", 0); - - /* "msgpack/_unpacker.pyx":115 - * cdef object contiguous - * cdef Py_buffer tmp - * if PyObject_CheckBuffer(obj): # <<<<<<<<<<<<<< - * new_protocol[0] = 1 - * if PyObject_GetBuffer(obj, view, PyBUF_FULL_RO) == -1: - */ - __pyx_t_1 = (PyObject_CheckBuffer(__pyx_v_obj) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":116 - * cdef Py_buffer tmp - * if PyObject_CheckBuffer(obj): - * new_protocol[0] = 1 # <<<<<<<<<<<<<< - * if PyObject_GetBuffer(obj, view, PyBUF_FULL_RO) == -1: - * raise - */ - (__pyx_v_new_protocol[0]) = 1; - - /* "msgpack/_unpacker.pyx":117 - * if PyObject_CheckBuffer(obj): - * new_protocol[0] = 1 - * if PyObject_GetBuffer(obj, view, PyBUF_FULL_RO) == -1: # <<<<<<<<<<<<<< - * raise - * if view.itemsize != 1: - */ - __pyx_t_2 = PyObject_GetBuffer(__pyx_v_obj, __pyx_v_view, PyBUF_FULL_RO); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(1, 117, __pyx_L1_error) - __pyx_t_1 = ((__pyx_t_2 == -1L) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_unpacker.pyx":118 - * new_protocol[0] = 1 - * if PyObject_GetBuffer(obj, view, PyBUF_FULL_RO) == -1: - * raise # <<<<<<<<<<<<<< - * if view.itemsize != 1: - * PyBuffer_Release(view) - */ - __Pyx_ReraiseException(); __PYX_ERR(1, 118, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":117 - * if PyObject_CheckBuffer(obj): - * new_protocol[0] = 1 - * if PyObject_GetBuffer(obj, view, PyBUF_FULL_RO) == -1: # <<<<<<<<<<<<<< - * raise - * if view.itemsize != 1: - */ - } - - /* "msgpack/_unpacker.pyx":119 - * if PyObject_GetBuffer(obj, view, PyBUF_FULL_RO) == -1: - * raise - * if view.itemsize != 1: # <<<<<<<<<<<<<< - * PyBuffer_Release(view) - * raise BufferError("cannot unpack from multi-byte object") - */ - __pyx_t_1 = ((__pyx_v_view->itemsize != 1) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_unpacker.pyx":120 - * raise - * if view.itemsize != 1: - * PyBuffer_Release(view) # <<<<<<<<<<<<<< - * raise BufferError("cannot unpack from multi-byte object") - * if PyBuffer_IsContiguous(view, b'A') == 0: - */ - PyBuffer_Release(__pyx_v_view); - - /* "msgpack/_unpacker.pyx":121 - * if view.itemsize != 1: - * PyBuffer_Release(view) - * raise BufferError("cannot unpack from multi-byte object") # <<<<<<<<<<<<<< - * if PyBuffer_IsContiguous(view, b'A') == 0: - * PyBuffer_Release(view) - */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_BufferError, __pyx_tuple__20, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 121, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(1, 121, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":119 - * if PyObject_GetBuffer(obj, view, PyBUF_FULL_RO) == -1: - * raise - * if view.itemsize != 1: # <<<<<<<<<<<<<< - * PyBuffer_Release(view) - * raise BufferError("cannot unpack from multi-byte object") - */ - } - - /* "msgpack/_unpacker.pyx":122 - * PyBuffer_Release(view) - * raise BufferError("cannot unpack from multi-byte object") - * if PyBuffer_IsContiguous(view, b'A') == 0: # <<<<<<<<<<<<<< - * PyBuffer_Release(view) - * # create a contiguous copy and get buffer - */ - __pyx_t_1 = ((PyBuffer_IsContiguous(__pyx_v_view, 'A') == 0) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":123 - * raise BufferError("cannot unpack from multi-byte object") - * if PyBuffer_IsContiguous(view, b'A') == 0: - * PyBuffer_Release(view) # <<<<<<<<<<<<<< - * # create a contiguous copy and get buffer - * contiguous = PyMemoryView_GetContiguous(obj, PyBUF_READ, b'C') - */ - PyBuffer_Release(__pyx_v_view); - - /* "msgpack/_unpacker.pyx":125 - * PyBuffer_Release(view) - * # create a contiguous copy and get buffer - * contiguous = PyMemoryView_GetContiguous(obj, PyBUF_READ, b'C') # <<<<<<<<<<<<<< - * PyObject_GetBuffer(contiguous, view, PyBUF_SIMPLE) - * # view must hold the only reference to contiguous, - */ - __pyx_t_3 = PyMemoryView_GetContiguous(__pyx_v_obj, PyBUF_READ, 'C'); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 125, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_v_contiguous = __pyx_t_3; - __pyx_t_3 = 0; - - /* "msgpack/_unpacker.pyx":126 - * # create a contiguous copy and get buffer - * contiguous = PyMemoryView_GetContiguous(obj, PyBUF_READ, b'C') - * PyObject_GetBuffer(contiguous, view, PyBUF_SIMPLE) # <<<<<<<<<<<<<< - * # view must hold the only reference to contiguous, - * # so memory is freed when view is released - */ - __pyx_t_2 = PyObject_GetBuffer(__pyx_v_contiguous, __pyx_v_view, PyBUF_SIMPLE); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(1, 126, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":129 - * # view must hold the only reference to contiguous, - * # so memory is freed when view is released - * Py_DECREF(contiguous) # <<<<<<<<<<<<<< - * buffer_len[0] = view.len - * buf[0] = view.buf - */ - Py_DECREF(__pyx_v_contiguous); - - /* "msgpack/_unpacker.pyx":122 - * PyBuffer_Release(view) - * raise BufferError("cannot unpack from multi-byte object") - * if PyBuffer_IsContiguous(view, b'A') == 0: # <<<<<<<<<<<<<< - * PyBuffer_Release(view) - * # create a contiguous copy and get buffer - */ - } - - /* "msgpack/_unpacker.pyx":130 - * # so memory is freed when view is released - * Py_DECREF(contiguous) - * buffer_len[0] = view.len # <<<<<<<<<<<<<< - * buf[0] = view.buf - * return 1 - */ - __pyx_t_4 = __pyx_v_view->len; - (__pyx_v_buffer_len[0]) = __pyx_t_4; - - /* "msgpack/_unpacker.pyx":131 - * Py_DECREF(contiguous) - * buffer_len[0] = view.len - * buf[0] = view.buf # <<<<<<<<<<<<<< - * return 1 - * else: - */ - (__pyx_v_buf[0]) = ((char *)__pyx_v_view->buf); - - /* "msgpack/_unpacker.pyx":132 - * buffer_len[0] = view.len - * buf[0] = view.buf - * return 1 # <<<<<<<<<<<<<< - * else: - * new_protocol[0] = 0 - */ - __pyx_r = 1; - goto __pyx_L0; - - /* "msgpack/_unpacker.pyx":115 - * cdef object contiguous - * cdef Py_buffer tmp - * if PyObject_CheckBuffer(obj): # <<<<<<<<<<<<<< - * new_protocol[0] = 1 - * if PyObject_GetBuffer(obj, view, PyBUF_FULL_RO) == -1: - */ - } - - /* "msgpack/_unpacker.pyx":134 - * return 1 - * else: - * new_protocol[0] = 0 # <<<<<<<<<<<<<< - * if PyObject_AsReadBuffer(obj, buf, buffer_len) == -1: - * raise BufferError("could not get memoryview") - */ - /*else*/ { - (__pyx_v_new_protocol[0]) = 0; - - /* "msgpack/_unpacker.pyx":135 - * else: - * new_protocol[0] = 0 - * if PyObject_AsReadBuffer(obj, buf, buffer_len) == -1: # <<<<<<<<<<<<<< - * raise BufferError("could not get memoryview") - * PyErr_WarnEx(RuntimeWarning, - */ - __pyx_t_2 = PyObject_AsReadBuffer(__pyx_v_obj, ((void const **)__pyx_v_buf), __pyx_v_buffer_len); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(1, 135, __pyx_L1_error) - __pyx_t_1 = ((__pyx_t_2 == -1L) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_unpacker.pyx":136 - * new_protocol[0] = 0 - * if PyObject_AsReadBuffer(obj, buf, buffer_len) == -1: - * raise BufferError("could not get memoryview") # <<<<<<<<<<<<<< - * PyErr_WarnEx(RuntimeWarning, - * "using old buffer interface to unpack %s; " - */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_BufferError, __pyx_tuple__21, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 136, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(1, 136, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":135 - * else: - * new_protocol[0] = 0 - * if PyObject_AsReadBuffer(obj, buf, buffer_len) == -1: # <<<<<<<<<<<<<< - * raise BufferError("could not get memoryview") - * PyErr_WarnEx(RuntimeWarning, - */ - } - - /* "msgpack/_unpacker.pyx":140 - * "using old buffer interface to unpack %s; " - * "this leads to unpacking errors if slicing is used and " - * "will be removed in a future version" % type(obj), # <<<<<<<<<<<<<< - * 1) - * return 1 - */ - __pyx_t_3 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_7cpython_4type_type), __pyx_v_obj); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 140, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_5 = PyUnicode_Format(__pyx_kp_u_using_old_buffer_interface_to_un, __pyx_t_3); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 140, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_6 = __Pyx_PyObject_AsWritableString(__pyx_t_5); if (unlikely((!__pyx_t_6) && PyErr_Occurred())) __PYX_ERR(1, 140, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":137 - * if PyObject_AsReadBuffer(obj, buf, buffer_len) == -1: - * raise BufferError("could not get memoryview") - * PyErr_WarnEx(RuntimeWarning, # <<<<<<<<<<<<<< - * "using old buffer interface to unpack %s; " - * "this leads to unpacking errors if slicing is used and " - */ - __pyx_t_2 = PyErr_WarnEx(__pyx_builtin_RuntimeWarning, __pyx_t_6, 1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(1, 137, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - - /* "msgpack/_unpacker.pyx":142 - * "will be removed in a future version" % type(obj), - * 1) - * return 1 # <<<<<<<<<<<<<< - * - * def unpackb(object packed, object object_hook=None, object list_hook=None, - */ - __pyx_r = 1; - goto __pyx_L0; - } - - /* "msgpack/_unpacker.pyx":108 - * raise NotImplementedError("Cannot decode extended type with typecode=%d" % typecode) - * - * cdef inline int get_data_from_buffer(object obj, # <<<<<<<<<<<<<< - * Py_buffer *view, - * char **buf, - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("msgpack._cmsgpack.get_data_from_buffer", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_contiguous); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":144 - * return 1 - * - * def unpackb(object packed, object object_hook=None, object list_hook=None, # <<<<<<<<<<<<<< - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * encoding=None, unicode_errors=None, - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_3unpackb(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_2unpackb[] = "unpackb(packed, object_hook=None, list_hook=None, bool use_list=True, bool raw=True, bool strict_map_key=False, encoding=None, unicode_errors=None, object_pairs_hook=None, ext_hook=ExtType, Py_ssize_t max_str_len=-1, Py_ssize_t max_bin_len=-1, Py_ssize_t max_array_len=-1, Py_ssize_t max_map_len=-1, Py_ssize_t max_ext_len=-1)\n\n Unpack packed_bytes to object. Returns an unpacked object.\n\n Raises ``ExtraData`` when *packed* contains extra bytes.\n Raises ``ValueError`` when *packed* is incomplete.\n Raises ``FormatError`` when *packed* is not valid msgpack.\n Raises ``StackError`` when *packed* contains too nested.\n Other exceptions can be raised during unpacking.\n\n See :class:`Unpacker` for options.\n\n *max_xxx_len* options are configured automatically from ``len(packed)``.\n "; -static PyMethodDef __pyx_mdef_7msgpack_9_cmsgpack_3unpackb = {"unpackb", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7msgpack_9_cmsgpack_3unpackb, METH_VARARGS|METH_KEYWORDS, __pyx_doc_7msgpack_9_cmsgpack_2unpackb}; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_3unpackb(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_packed = 0; - PyObject *__pyx_v_object_hook = 0; - PyObject *__pyx_v_list_hook = 0; - int __pyx_v_use_list; - int __pyx_v_raw; - int __pyx_v_strict_map_key; - PyObject *__pyx_v_encoding = 0; - PyObject *__pyx_v_unicode_errors = 0; - PyObject *__pyx_v_object_pairs_hook = 0; - PyObject *__pyx_v_ext_hook = 0; - Py_ssize_t __pyx_v_max_str_len; - Py_ssize_t __pyx_v_max_bin_len; - Py_ssize_t __pyx_v_max_array_len; - Py_ssize_t __pyx_v_max_map_len; - Py_ssize_t __pyx_v_max_ext_len; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("unpackb (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_packed,&__pyx_n_s_object_hook,&__pyx_n_s_list_hook,&__pyx_n_s_use_list,&__pyx_n_s_raw,&__pyx_n_s_strict_map_key,&__pyx_n_s_encoding,&__pyx_n_s_unicode_errors,&__pyx_n_s_object_pairs_hook,&__pyx_n_s_ext_hook,&__pyx_n_s_max_str_len,&__pyx_n_s_max_bin_len,&__pyx_n_s_max_array_len,&__pyx_n_s_max_map_len,&__pyx_n_s_max_ext_len,0}; - PyObject* values[15] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; - values[1] = ((PyObject *)Py_None); - values[2] = ((PyObject *)Py_None); - - /* "msgpack/_unpacker.pyx":146 - * def unpackb(object packed, object object_hook=None, object list_hook=None, - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * encoding=None, unicode_errors=None, # <<<<<<<<<<<<<< - * object_pairs_hook=None, ext_hook=ExtType, - * Py_ssize_t max_str_len=-1, - */ - values[6] = ((PyObject *)Py_None); - values[7] = ((PyObject *)Py_None); - - /* "msgpack/_unpacker.pyx":147 - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * encoding=None, unicode_errors=None, - * object_pairs_hook=None, ext_hook=ExtType, # <<<<<<<<<<<<<< - * Py_ssize_t max_str_len=-1, - * Py_ssize_t max_bin_len=-1, - */ - values[8] = ((PyObject *)Py_None); - values[9] = __pyx_k__22; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 15: values[14] = PyTuple_GET_ITEM(__pyx_args, 14); - CYTHON_FALLTHROUGH; - case 14: values[13] = PyTuple_GET_ITEM(__pyx_args, 13); - CYTHON_FALLTHROUGH; - case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); - CYTHON_FALLTHROUGH; - case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); - CYTHON_FALLTHROUGH; - case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); - CYTHON_FALLTHROUGH; - case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - CYTHON_FALLTHROUGH; - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - CYTHON_FALLTHROUGH; - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - CYTHON_FALLTHROUGH; - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_packed)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_object_hook); - if (value) { values[1] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 2: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_list_hook); - if (value) { values[2] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 3: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_use_list); - if (value) { values[3] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 4: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_raw); - if (value) { values[4] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 5: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_strict_map_key); - if (value) { values[5] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 6: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_encoding); - if (value) { values[6] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 7: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_unicode_errors); - if (value) { values[7] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 8: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_object_pairs_hook); - if (value) { values[8] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 9: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_ext_hook); - if (value) { values[9] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 10: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_str_len); - if (value) { values[10] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 11: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_bin_len); - if (value) { values[11] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 12: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_array_len); - if (value) { values[12] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 13: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_map_len); - if (value) { values[13] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 14: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_ext_len); - if (value) { values[14] = value; kw_args--; } - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "unpackb") < 0)) __PYX_ERR(1, 144, __pyx_L3_error) - } - } else { - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 15: values[14] = PyTuple_GET_ITEM(__pyx_args, 14); - CYTHON_FALLTHROUGH; - case 14: values[13] = PyTuple_GET_ITEM(__pyx_args, 13); - CYTHON_FALLTHROUGH; - case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); - CYTHON_FALLTHROUGH; - case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); - CYTHON_FALLTHROUGH; - case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); - CYTHON_FALLTHROUGH; - case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - CYTHON_FALLTHROUGH; - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - CYTHON_FALLTHROUGH; - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - CYTHON_FALLTHROUGH; - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_packed = values[0]; - __pyx_v_object_hook = values[1]; - __pyx_v_list_hook = values[2]; - if (values[3]) { - __pyx_v_use_list = __Pyx_PyObject_IsTrue(values[3]); if (unlikely((__pyx_v_use_list == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 145, __pyx_L3_error) - } else { - - /* "msgpack/_unpacker.pyx":145 - * - * def unpackb(object packed, object object_hook=None, object list_hook=None, - * bint use_list=True, bint raw=True, bint strict_map_key=False, # <<<<<<<<<<<<<< - * encoding=None, unicode_errors=None, - * object_pairs_hook=None, ext_hook=ExtType, - */ - __pyx_v_use_list = ((int)1); - } - if (values[4]) { - __pyx_v_raw = __Pyx_PyObject_IsTrue(values[4]); if (unlikely((__pyx_v_raw == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 145, __pyx_L3_error) - } else { - __pyx_v_raw = ((int)1); - } - if (values[5]) { - __pyx_v_strict_map_key = __Pyx_PyObject_IsTrue(values[5]); if (unlikely((__pyx_v_strict_map_key == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 145, __pyx_L3_error) - } else { - __pyx_v_strict_map_key = ((int)0); - } - __pyx_v_encoding = values[6]; - __pyx_v_unicode_errors = values[7]; - __pyx_v_object_pairs_hook = values[8]; - __pyx_v_ext_hook = values[9]; - if (values[10]) { - __pyx_v_max_str_len = __Pyx_PyIndex_AsSsize_t(values[10]); if (unlikely((__pyx_v_max_str_len == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 148, __pyx_L3_error) - } else { - __pyx_v_max_str_len = ((Py_ssize_t)-1L); - } - if (values[11]) { - __pyx_v_max_bin_len = __Pyx_PyIndex_AsSsize_t(values[11]); if (unlikely((__pyx_v_max_bin_len == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 149, __pyx_L3_error) - } else { - __pyx_v_max_bin_len = ((Py_ssize_t)-1L); - } - if (values[12]) { - __pyx_v_max_array_len = __Pyx_PyIndex_AsSsize_t(values[12]); if (unlikely((__pyx_v_max_array_len == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 150, __pyx_L3_error) - } else { - __pyx_v_max_array_len = ((Py_ssize_t)-1L); - } - if (values[13]) { - __pyx_v_max_map_len = __Pyx_PyIndex_AsSsize_t(values[13]); if (unlikely((__pyx_v_max_map_len == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 151, __pyx_L3_error) - } else { - __pyx_v_max_map_len = ((Py_ssize_t)-1L); - } - if (values[14]) { - __pyx_v_max_ext_len = __Pyx_PyIndex_AsSsize_t(values[14]); if (unlikely((__pyx_v_max_ext_len == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 152, __pyx_L3_error) - } else { - __pyx_v_max_ext_len = ((Py_ssize_t)-1L); - } - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("unpackb", 0, 1, 15, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 144, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("msgpack._cmsgpack.unpackb", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_2unpackb(__pyx_self, __pyx_v_packed, __pyx_v_object_hook, __pyx_v_list_hook, __pyx_v_use_list, __pyx_v_raw, __pyx_v_strict_map_key, __pyx_v_encoding, __pyx_v_unicode_errors, __pyx_v_object_pairs_hook, __pyx_v_ext_hook, __pyx_v_max_str_len, __pyx_v_max_bin_len, __pyx_v_max_array_len, __pyx_v_max_map_len, __pyx_v_max_ext_len); - - /* "msgpack/_unpacker.pyx":144 - * return 1 - * - * def unpackb(object packed, object object_hook=None, object list_hook=None, # <<<<<<<<<<<<<< - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * encoding=None, unicode_errors=None, - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_2unpackb(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_packed, PyObject *__pyx_v_object_hook, PyObject *__pyx_v_list_hook, int __pyx_v_use_list, int __pyx_v_raw, int __pyx_v_strict_map_key, PyObject *__pyx_v_encoding, PyObject *__pyx_v_unicode_errors, PyObject *__pyx_v_object_pairs_hook, PyObject *__pyx_v_ext_hook, Py_ssize_t __pyx_v_max_str_len, Py_ssize_t __pyx_v_max_bin_len, Py_ssize_t __pyx_v_max_array_len, Py_ssize_t __pyx_v_max_map_len, Py_ssize_t __pyx_v_max_ext_len) { - unpack_context __pyx_v_ctx; - Py_ssize_t __pyx_v_off; - int __pyx_v_ret; - Py_buffer __pyx_v_view; - char *__pyx_v_buf; - Py_ssize_t __pyx_v_buf_len; - char const *__pyx_v_cenc; - char const *__pyx_v_cerr; - int __pyx_v_new_protocol; - PyObject *__pyx_v_obj = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - int __pyx_t_3; - char const *__pyx_t_4; - char const *__pyx_t_5; - PyObject *__pyx_t_6 = NULL; - int __pyx_t_7; - char const *__pyx_t_8; - PyObject *__pyx_t_9 = NULL; - PyObject *__pyx_t_10 = NULL; - PyObject *__pyx_t_11 = NULL; - PyObject *__pyx_t_12 = NULL; - PyObject *__pyx_t_13 = NULL; - PyObject *__pyx_t_14 = NULL; - PyObject *__pyx_t_15 = NULL; - PyObject *__pyx_t_16 = NULL; - PyObject *__pyx_t_17 = NULL; - PyObject *__pyx_t_18 = NULL; - __Pyx_RefNannySetupContext("unpackb", 0); - - /* "msgpack/_unpacker.pyx":167 - * """ - * cdef unpack_context ctx - * cdef Py_ssize_t off = 0 # <<<<<<<<<<<<<< - * cdef int ret - * - */ - __pyx_v_off = 0; - - /* "msgpack/_unpacker.pyx":171 - * - * cdef Py_buffer view - * cdef char* buf = NULL # <<<<<<<<<<<<<< - * cdef Py_ssize_t buf_len - * cdef const char* cenc = NULL - */ - __pyx_v_buf = NULL; - - /* "msgpack/_unpacker.pyx":173 - * cdef char* buf = NULL - * cdef Py_ssize_t buf_len - * cdef const char* cenc = NULL # <<<<<<<<<<<<<< - * cdef const char* cerr = NULL - * cdef int new_protocol = 0 - */ - __pyx_v_cenc = NULL; - - /* "msgpack/_unpacker.pyx":174 - * cdef Py_ssize_t buf_len - * cdef const char* cenc = NULL - * cdef const char* cerr = NULL # <<<<<<<<<<<<<< - * cdef int new_protocol = 0 - * - */ - __pyx_v_cerr = NULL; - - /* "msgpack/_unpacker.pyx":175 - * cdef const char* cenc = NULL - * cdef const char* cerr = NULL - * cdef int new_protocol = 0 # <<<<<<<<<<<<<< - * - * if encoding is not None: - */ - __pyx_v_new_protocol = 0; - - /* "msgpack/_unpacker.pyx":177 - * cdef int new_protocol = 0 - * - * if encoding is not None: # <<<<<<<<<<<<<< - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1) - * cenc = encoding - */ - __pyx_t_1 = (__pyx_v_encoding != Py_None); - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "msgpack/_unpacker.pyx":178 - * - * if encoding is not None: - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1) # <<<<<<<<<<<<<< - * cenc = encoding - * - */ - __pyx_t_3 = PyErr_WarnEx(__pyx_builtin_DeprecationWarning, ((char *)"encoding is deprecated, Use raw=False instead."), 1); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(1, 178, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":179 - * if encoding is not None: - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1) - * cenc = encoding # <<<<<<<<<<<<<< - * - * if unicode_errors is not None: - */ - __pyx_t_4 = __Pyx_PyObject_AsString(__pyx_v_encoding); if (unlikely((!__pyx_t_4) && PyErr_Occurred())) __PYX_ERR(1, 179, __pyx_L1_error) - __pyx_v_cenc = __pyx_t_4; - - /* "msgpack/_unpacker.pyx":177 - * cdef int new_protocol = 0 - * - * if encoding is not None: # <<<<<<<<<<<<<< - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1) - * cenc = encoding - */ - } - - /* "msgpack/_unpacker.pyx":181 - * cenc = encoding - * - * if unicode_errors is not None: # <<<<<<<<<<<<<< - * cerr = unicode_errors - * - */ - __pyx_t_2 = (__pyx_v_unicode_errors != Py_None); - __pyx_t_1 = (__pyx_t_2 != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":182 - * - * if unicode_errors is not None: - * cerr = unicode_errors # <<<<<<<<<<<<<< - * - * get_data_from_buffer(packed, &view, &buf, &buf_len, &new_protocol) - */ - __pyx_t_5 = __Pyx_PyObject_AsString(__pyx_v_unicode_errors); if (unlikely((!__pyx_t_5) && PyErr_Occurred())) __PYX_ERR(1, 182, __pyx_L1_error) - __pyx_v_cerr = __pyx_t_5; - - /* "msgpack/_unpacker.pyx":181 - * cenc = encoding - * - * if unicode_errors is not None: # <<<<<<<<<<<<<< - * cerr = unicode_errors - * - */ - } - - /* "msgpack/_unpacker.pyx":184 - * cerr = unicode_errors - * - * get_data_from_buffer(packed, &view, &buf, &buf_len, &new_protocol) # <<<<<<<<<<<<<< - * - * if max_str_len == -1: - */ - __pyx_t_3 = __pyx_f_7msgpack_9_cmsgpack_get_data_from_buffer(__pyx_v_packed, (&__pyx_v_view), (&__pyx_v_buf), (&__pyx_v_buf_len), (&__pyx_v_new_protocol)); if (unlikely(__pyx_t_3 == ((int)0))) __PYX_ERR(1, 184, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":186 - * get_data_from_buffer(packed, &view, &buf, &buf_len, &new_protocol) - * - * if max_str_len == -1: # <<<<<<<<<<<<<< - * max_str_len = buf_len - * if max_bin_len == -1: - */ - __pyx_t_1 = ((__pyx_v_max_str_len == -1L) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":187 - * - * if max_str_len == -1: - * max_str_len = buf_len # <<<<<<<<<<<<<< - * if max_bin_len == -1: - * max_bin_len = buf_len - */ - __pyx_v_max_str_len = __pyx_v_buf_len; - - /* "msgpack/_unpacker.pyx":186 - * get_data_from_buffer(packed, &view, &buf, &buf_len, &new_protocol) - * - * if max_str_len == -1: # <<<<<<<<<<<<<< - * max_str_len = buf_len - * if max_bin_len == -1: - */ - } - - /* "msgpack/_unpacker.pyx":188 - * if max_str_len == -1: - * max_str_len = buf_len - * if max_bin_len == -1: # <<<<<<<<<<<<<< - * max_bin_len = buf_len - * if max_array_len == -1: - */ - __pyx_t_1 = ((__pyx_v_max_bin_len == -1L) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":189 - * max_str_len = buf_len - * if max_bin_len == -1: - * max_bin_len = buf_len # <<<<<<<<<<<<<< - * if max_array_len == -1: - * max_array_len = buf_len - */ - __pyx_v_max_bin_len = __pyx_v_buf_len; - - /* "msgpack/_unpacker.pyx":188 - * if max_str_len == -1: - * max_str_len = buf_len - * if max_bin_len == -1: # <<<<<<<<<<<<<< - * max_bin_len = buf_len - * if max_array_len == -1: - */ - } - - /* "msgpack/_unpacker.pyx":190 - * if max_bin_len == -1: - * max_bin_len = buf_len - * if max_array_len == -1: # <<<<<<<<<<<<<< - * max_array_len = buf_len - * if max_map_len == -1: - */ - __pyx_t_1 = ((__pyx_v_max_array_len == -1L) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":191 - * max_bin_len = buf_len - * if max_array_len == -1: - * max_array_len = buf_len # <<<<<<<<<<<<<< - * if max_map_len == -1: - * max_map_len = buf_len//2 - */ - __pyx_v_max_array_len = __pyx_v_buf_len; - - /* "msgpack/_unpacker.pyx":190 - * if max_bin_len == -1: - * max_bin_len = buf_len - * if max_array_len == -1: # <<<<<<<<<<<<<< - * max_array_len = buf_len - * if max_map_len == -1: - */ - } - - /* "msgpack/_unpacker.pyx":192 - * if max_array_len == -1: - * max_array_len = buf_len - * if max_map_len == -1: # <<<<<<<<<<<<<< - * max_map_len = buf_len//2 - * if max_ext_len == -1: - */ - __pyx_t_1 = ((__pyx_v_max_map_len == -1L) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":193 - * max_array_len = buf_len - * if max_map_len == -1: - * max_map_len = buf_len//2 # <<<<<<<<<<<<<< - * if max_ext_len == -1: - * max_ext_len = buf_len - */ - __pyx_v_max_map_len = __Pyx_div_Py_ssize_t(__pyx_v_buf_len, 2); - - /* "msgpack/_unpacker.pyx":192 - * if max_array_len == -1: - * max_array_len = buf_len - * if max_map_len == -1: # <<<<<<<<<<<<<< - * max_map_len = buf_len//2 - * if max_ext_len == -1: - */ - } - - /* "msgpack/_unpacker.pyx":194 - * if max_map_len == -1: - * max_map_len = buf_len//2 - * if max_ext_len == -1: # <<<<<<<<<<<<<< - * max_ext_len = buf_len - * - */ - __pyx_t_1 = ((__pyx_v_max_ext_len == -1L) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":195 - * max_map_len = buf_len//2 - * if max_ext_len == -1: - * max_ext_len = buf_len # <<<<<<<<<<<<<< - * - * try: - */ - __pyx_v_max_ext_len = __pyx_v_buf_len; - - /* "msgpack/_unpacker.pyx":194 - * if max_map_len == -1: - * max_map_len = buf_len//2 - * if max_ext_len == -1: # <<<<<<<<<<<<<< - * max_ext_len = buf_len - * - */ - } - - /* "msgpack/_unpacker.pyx":197 - * max_ext_len = buf_len - * - * try: # <<<<<<<<<<<<<< - * init_ctx(&ctx, object_hook, object_pairs_hook, list_hook, ext_hook, - * use_list, raw, strict_map_key, cenc, cerr, - */ - /*try:*/ { - - /* "msgpack/_unpacker.pyx":198 - * - * try: - * init_ctx(&ctx, object_hook, object_pairs_hook, list_hook, ext_hook, # <<<<<<<<<<<<<< - * use_list, raw, strict_map_key, cenc, cerr, - * max_str_len, max_bin_len, max_array_len, max_map_len, max_ext_len) - */ - __pyx_t_6 = __pyx_f_7msgpack_9_cmsgpack_init_ctx((&__pyx_v_ctx), __pyx_v_object_hook, __pyx_v_object_pairs_hook, __pyx_v_list_hook, __pyx_v_ext_hook, __pyx_v_use_list, __pyx_v_raw, __pyx_v_strict_map_key, __pyx_v_cenc, __pyx_v_cerr, __pyx_v_max_str_len, __pyx_v_max_bin_len, __pyx_v_max_array_len, __pyx_v_max_map_len, __pyx_v_max_ext_len); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 198, __pyx_L11_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - - /* "msgpack/_unpacker.pyx":201 - * use_list, raw, strict_map_key, cenc, cerr, - * max_str_len, max_bin_len, max_array_len, max_map_len, max_ext_len) - * ret = unpack_construct(&ctx, buf, buf_len, &off) # <<<<<<<<<<<<<< - * finally: - * if new_protocol: - */ - __pyx_t_3 = unpack_construct((&__pyx_v_ctx), __pyx_v_buf, __pyx_v_buf_len, (&__pyx_v_off)); if (unlikely(__pyx_t_3 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(1, 201, __pyx_L11_error) - __pyx_v_ret = __pyx_t_3; - } - - /* "msgpack/_unpacker.pyx":203 - * ret = unpack_construct(&ctx, buf, buf_len, &off) - * finally: - * if new_protocol: # <<<<<<<<<<<<<< - * PyBuffer_Release(&view); - * - */ - /*finally:*/ { - /*normal exit:*/{ - __pyx_t_1 = (__pyx_v_new_protocol != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":204 - * finally: - * if new_protocol: - * PyBuffer_Release(&view); # <<<<<<<<<<<<<< - * - * if ret == 1: - */ - PyBuffer_Release((&__pyx_v_view)); - - /* "msgpack/_unpacker.pyx":203 - * ret = unpack_construct(&ctx, buf, buf_len, &off) - * finally: - * if new_protocol: # <<<<<<<<<<<<<< - * PyBuffer_Release(&view); - * - */ - } - goto __pyx_L12; - } - __pyx_L11_error:; - /*exception exit:*/{ - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14); - if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_9, &__pyx_t_10, &__pyx_t_11) < 0)) __Pyx_ErrFetch(&__pyx_t_9, &__pyx_t_10, &__pyx_t_11); - __Pyx_XGOTREF(__pyx_t_9); - __Pyx_XGOTREF(__pyx_t_10); - __Pyx_XGOTREF(__pyx_t_11); - __Pyx_XGOTREF(__pyx_t_12); - __Pyx_XGOTREF(__pyx_t_13); - __Pyx_XGOTREF(__pyx_t_14); - __pyx_t_3 = __pyx_lineno; __pyx_t_7 = __pyx_clineno; __pyx_t_8 = __pyx_filename; - { - __pyx_t_1 = (__pyx_v_new_protocol != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":204 - * finally: - * if new_protocol: - * PyBuffer_Release(&view); # <<<<<<<<<<<<<< - * - * if ret == 1: - */ - PyBuffer_Release((&__pyx_v_view)); - - /* "msgpack/_unpacker.pyx":203 - * ret = unpack_construct(&ctx, buf, buf_len, &off) - * finally: - * if new_protocol: # <<<<<<<<<<<<<< - * PyBuffer_Release(&view); - * - */ - } - } - if (PY_MAJOR_VERSION >= 3) { - __Pyx_XGIVEREF(__pyx_t_12); - __Pyx_XGIVEREF(__pyx_t_13); - __Pyx_XGIVEREF(__pyx_t_14); - __Pyx_ExceptionReset(__pyx_t_12, __pyx_t_13, __pyx_t_14); - } - __Pyx_XGIVEREF(__pyx_t_9); - __Pyx_XGIVEREF(__pyx_t_10); - __Pyx_XGIVEREF(__pyx_t_11); - __Pyx_ErrRestore(__pyx_t_9, __pyx_t_10, __pyx_t_11); - __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; - __pyx_lineno = __pyx_t_3; __pyx_clineno = __pyx_t_7; __pyx_filename = __pyx_t_8; - goto __pyx_L1_error; - } - __pyx_L12:; - } - - /* "msgpack/_unpacker.pyx":206 - * PyBuffer_Release(&view); - * - * if ret == 1: # <<<<<<<<<<<<<< - * obj = unpack_data(&ctx) - * if off < buf_len: - */ - __pyx_t_1 = ((__pyx_v_ret == 1) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":207 - * - * if ret == 1: - * obj = unpack_data(&ctx) # <<<<<<<<<<<<<< - * if off < buf_len: - * raise ExtraData(obj, PyBytes_FromStringAndSize(buf+off, buf_len-off)) - */ - __pyx_t_6 = unpack_data((&__pyx_v_ctx)); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 207, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_v_obj = __pyx_t_6; - __pyx_t_6 = 0; - - /* "msgpack/_unpacker.pyx":208 - * if ret == 1: - * obj = unpack_data(&ctx) - * if off < buf_len: # <<<<<<<<<<<<<< - * raise ExtraData(obj, PyBytes_FromStringAndSize(buf+off, buf_len-off)) - * return obj - */ - __pyx_t_1 = ((__pyx_v_off < __pyx_v_buf_len) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_unpacker.pyx":209 - * obj = unpack_data(&ctx) - * if off < buf_len: - * raise ExtraData(obj, PyBytes_FromStringAndSize(buf+off, buf_len-off)) # <<<<<<<<<<<<<< - * return obj - * unpack_clear(&ctx) - */ - __Pyx_GetModuleGlobalName(__pyx_t_15, __pyx_n_s_ExtraData); if (unlikely(!__pyx_t_15)) __PYX_ERR(1, 209, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_15); - __pyx_t_16 = PyBytes_FromStringAndSize((__pyx_v_buf + __pyx_v_off), (__pyx_v_buf_len - __pyx_v_off)); if (unlikely(!__pyx_t_16)) __PYX_ERR(1, 209, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_16); - __pyx_t_17 = NULL; - __pyx_t_7 = 0; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_15))) { - __pyx_t_17 = PyMethod_GET_SELF(__pyx_t_15); - if (likely(__pyx_t_17)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_15); - __Pyx_INCREF(__pyx_t_17); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_15, function); - __pyx_t_7 = 1; - } - } - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_15)) { - PyObject *__pyx_temp[3] = {__pyx_t_17, __pyx_v_obj, __pyx_t_16}; - __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_15, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 209, __pyx_L1_error) - __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; - } else - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_15)) { - PyObject *__pyx_temp[3] = {__pyx_t_17, __pyx_v_obj, __pyx_t_16}; - __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_15, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 209, __pyx_L1_error) - __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; - } else - #endif - { - __pyx_t_18 = PyTuple_New(2+__pyx_t_7); if (unlikely(!__pyx_t_18)) __PYX_ERR(1, 209, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_18); - if (__pyx_t_17) { - __Pyx_GIVEREF(__pyx_t_17); PyTuple_SET_ITEM(__pyx_t_18, 0, __pyx_t_17); __pyx_t_17 = NULL; - } - __Pyx_INCREF(__pyx_v_obj); - __Pyx_GIVEREF(__pyx_v_obj); - PyTuple_SET_ITEM(__pyx_t_18, 0+__pyx_t_7, __pyx_v_obj); - __Pyx_GIVEREF(__pyx_t_16); - PyTuple_SET_ITEM(__pyx_t_18, 1+__pyx_t_7, __pyx_t_16); - __pyx_t_16 = 0; - __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_15, __pyx_t_18, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 209, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_18); __pyx_t_18 = 0; - } - __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; - __Pyx_Raise(__pyx_t_6, 0, 0, 0); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __PYX_ERR(1, 209, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":208 - * if ret == 1: - * obj = unpack_data(&ctx) - * if off < buf_len: # <<<<<<<<<<<<<< - * raise ExtraData(obj, PyBytes_FromStringAndSize(buf+off, buf_len-off)) - * return obj - */ - } - - /* "msgpack/_unpacker.pyx":210 - * if off < buf_len: - * raise ExtraData(obj, PyBytes_FromStringAndSize(buf+off, buf_len-off)) - * return obj # <<<<<<<<<<<<<< - * unpack_clear(&ctx) - * if ret == 0: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_obj); - __pyx_r = __pyx_v_obj; - goto __pyx_L0; - - /* "msgpack/_unpacker.pyx":206 - * PyBuffer_Release(&view); - * - * if ret == 1: # <<<<<<<<<<<<<< - * obj = unpack_data(&ctx) - * if off < buf_len: - */ - } - - /* "msgpack/_unpacker.pyx":211 - * raise ExtraData(obj, PyBytes_FromStringAndSize(buf+off, buf_len-off)) - * return obj - * unpack_clear(&ctx) # <<<<<<<<<<<<<< - * if ret == 0: - * raise ValueError("Unpack failed: incomplete input") - */ - unpack_clear((&__pyx_v_ctx)); - - /* "msgpack/_unpacker.pyx":212 - * return obj - * unpack_clear(&ctx) - * if ret == 0: # <<<<<<<<<<<<<< - * raise ValueError("Unpack failed: incomplete input") - * elif ret == -2: - */ - switch (__pyx_v_ret) { - case 0: - - /* "msgpack/_unpacker.pyx":213 - * unpack_clear(&ctx) - * if ret == 0: - * raise ValueError("Unpack failed: incomplete input") # <<<<<<<<<<<<<< - * elif ret == -2: - * raise FormatError - */ - __pyx_t_6 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__23, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 213, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_Raise(__pyx_t_6, 0, 0, 0); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __PYX_ERR(1, 213, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":212 - * return obj - * unpack_clear(&ctx) - * if ret == 0: # <<<<<<<<<<<<<< - * raise ValueError("Unpack failed: incomplete input") - * elif ret == -2: - */ - break; - case -2L: - - /* "msgpack/_unpacker.pyx":215 - * raise ValueError("Unpack failed: incomplete input") - * elif ret == -2: - * raise FormatError # <<<<<<<<<<<<<< - * elif ret == -3: - * raise StackError - */ - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_FormatError); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 215, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_Raise(__pyx_t_6, 0, 0, 0); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __PYX_ERR(1, 215, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":214 - * if ret == 0: - * raise ValueError("Unpack failed: incomplete input") - * elif ret == -2: # <<<<<<<<<<<<<< - * raise FormatError - * elif ret == -3: - */ - break; - case -3L: - - /* "msgpack/_unpacker.pyx":217 - * raise FormatError - * elif ret == -3: - * raise StackError # <<<<<<<<<<<<<< - * raise ValueError("Unpack failed: error = %d" % (ret,)) - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_StackError); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 217, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_Raise(__pyx_t_6, 0, 0, 0); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __PYX_ERR(1, 217, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":216 - * elif ret == -2: - * raise FormatError - * elif ret == -3: # <<<<<<<<<<<<<< - * raise StackError - * raise ValueError("Unpack failed: error = %d" % (ret,)) - */ - break; - default: break; - } - - /* "msgpack/_unpacker.pyx":218 - * elif ret == -3: - * raise StackError - * raise ValueError("Unpack failed: error = %d" % (ret,)) # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_6 = __Pyx_PyUnicode_From_int(__pyx_v_ret, 0, ' ', 'd'); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 218, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_15 = __Pyx_PyUnicode_Concat(__pyx_kp_u_Unpack_failed_error, __pyx_t_6); if (unlikely(!__pyx_t_15)) __PYX_ERR(1, 218, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_15); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __pyx_t_6 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_15); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 218, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; - __Pyx_Raise(__pyx_t_6, 0, 0, 0); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __PYX_ERR(1, 218, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":144 - * return 1 - * - * def unpackb(object packed, object object_hook=None, object list_hook=None, # <<<<<<<<<<<<<< - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * encoding=None, unicode_errors=None, - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_15); - __Pyx_XDECREF(__pyx_t_16); - __Pyx_XDECREF(__pyx_t_17); - __Pyx_XDECREF(__pyx_t_18); - __Pyx_AddTraceback("msgpack._cmsgpack.unpackb", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_obj); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":221 - * - * - * def unpack(object stream, **kwargs): # <<<<<<<<<<<<<< - * PyErr_WarnEx( - * DeprecationWarning, - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_5unpack(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_4unpack[] = "unpack(stream, **kwargs)"; -static PyMethodDef __pyx_mdef_7msgpack_9_cmsgpack_5unpack = {"unpack", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7msgpack_9_cmsgpack_5unpack, METH_VARARGS|METH_KEYWORDS, __pyx_doc_7msgpack_9_cmsgpack_4unpack}; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_5unpack(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_stream = 0; - PyObject *__pyx_v_kwargs = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("unpack (wrapper)", 0); - __pyx_v_kwargs = PyDict_New(); if (unlikely(!__pyx_v_kwargs)) return NULL; - __Pyx_GOTREF(__pyx_v_kwargs); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_stream,0}; - PyObject* values[1] = {0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_stream)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, __pyx_v_kwargs, values, pos_args, "unpack") < 0)) __PYX_ERR(1, 221, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - } - __pyx_v_stream = values[0]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("unpack", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 221, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_DECREF(__pyx_v_kwargs); __pyx_v_kwargs = 0; - __Pyx_AddTraceback("msgpack._cmsgpack.unpack", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_4unpack(__pyx_self, __pyx_v_stream, __pyx_v_kwargs); - - /* function exit code */ - __Pyx_XDECREF(__pyx_v_kwargs); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_4unpack(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_stream, PyObject *__pyx_v_kwargs) { - PyObject *__pyx_v_data = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - __Pyx_RefNannySetupContext("unpack", 0); - - /* "msgpack/_unpacker.pyx":222 - * - * def unpack(object stream, **kwargs): - * PyErr_WarnEx( # <<<<<<<<<<<<<< - * DeprecationWarning, - * "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", 1) - */ - __pyx_t_1 = PyErr_WarnEx(__pyx_builtin_DeprecationWarning, ((char *)"Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead."), 1); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(1, 222, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":225 - * DeprecationWarning, - * "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", 1) - * data = stream.read() # <<<<<<<<<<<<<< - * return unpackb(data, **kwargs) - * - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_stream, __pyx_n_s_read); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 225, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4) : __Pyx_PyObject_CallNoArg(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 225, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_data = __pyx_t_2; - __pyx_t_2 = 0; - - /* "msgpack/_unpacker.pyx":226 - * "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", 1) - * data = stream.read() - * return unpackb(data, **kwargs) # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_unpackb); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 226, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 226, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_v_data); - __Pyx_GIVEREF(__pyx_v_data); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_data); - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_3, __pyx_v_kwargs); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 226, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - - /* "msgpack/_unpacker.pyx":221 - * - * - * def unpack(object stream, **kwargs): # <<<<<<<<<<<<<< - * PyErr_WarnEx( - * DeprecationWarning, - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("msgpack._cmsgpack.unpack", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_data); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":337 - * cdef uint64_t stream_offset - * - * def __cinit__(self): # <<<<<<<<<<<<<< - * self.buf = NULL - * - */ - -/* Python wrapper */ -static int __pyx_pw_7msgpack_9_cmsgpack_8Unpacker_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_7msgpack_9_cmsgpack_8Unpacker_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); - if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { - __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} - if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1; - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker___cinit__(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7msgpack_9_cmsgpack_8Unpacker___cinit__(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self) { - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__cinit__", 0); - - /* "msgpack/_unpacker.pyx":338 - * - * def __cinit__(self): - * self.buf = NULL # <<<<<<<<<<<<<< - * - * def __dealloc__(self): - */ - __pyx_v_self->buf = NULL; - - /* "msgpack/_unpacker.pyx":337 - * cdef uint64_t stream_offset - * - * def __cinit__(self): # <<<<<<<<<<<<<< - * self.buf = NULL - * - */ - - /* function exit code */ - __pyx_r = 0; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":340 - * self.buf = NULL - * - * def __dealloc__(self): # <<<<<<<<<<<<<< - * PyMem_Free(self.buf) - * self.buf = NULL - */ - -/* Python wrapper */ -static void __pyx_pw_7msgpack_9_cmsgpack_8Unpacker_3__dealloc__(PyObject *__pyx_v_self); /*proto*/ -static void __pyx_pw_7msgpack_9_cmsgpack_8Unpacker_3__dealloc__(PyObject *__pyx_v_self) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0); - __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_2__dealloc__(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); -} - -static void __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_2__dealloc__(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__dealloc__", 0); - - /* "msgpack/_unpacker.pyx":341 - * - * def __dealloc__(self): - * PyMem_Free(self.buf) # <<<<<<<<<<<<<< - * self.buf = NULL - * - */ - PyMem_Free(__pyx_v_self->buf); - - /* "msgpack/_unpacker.pyx":342 - * def __dealloc__(self): - * PyMem_Free(self.buf) - * self.buf = NULL # <<<<<<<<<<<<<< - * - * def __init__(self, file_like=None, Py_ssize_t read_size=0, - */ - __pyx_v_self->buf = NULL; - - /* "msgpack/_unpacker.pyx":340 - * self.buf = NULL - * - * def __dealloc__(self): # <<<<<<<<<<<<<< - * PyMem_Free(self.buf) - * self.buf = NULL - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); -} - -/* "msgpack/_unpacker.pyx":344 - * self.buf = NULL - * - * def __init__(self, file_like=None, Py_ssize_t read_size=0, # <<<<<<<<<<<<<< - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * object object_hook=None, object object_pairs_hook=None, object list_hook=None, - */ - -/* Python wrapper */ -static int __pyx_pw_7msgpack_9_cmsgpack_8Unpacker_5__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_7msgpack_9_cmsgpack_8Unpacker_5__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_file_like = 0; - Py_ssize_t __pyx_v_read_size; - int __pyx_v_use_list; - int __pyx_v_raw; - int __pyx_v_strict_map_key; - PyObject *__pyx_v_object_hook = 0; - PyObject *__pyx_v_object_pairs_hook = 0; - PyObject *__pyx_v_list_hook = 0; - PyObject *__pyx_v_encoding = 0; - PyObject *__pyx_v_unicode_errors = 0; - Py_ssize_t __pyx_v_max_buffer_size; - PyObject *__pyx_v_ext_hook = 0; - Py_ssize_t __pyx_v_max_str_len; - Py_ssize_t __pyx_v_max_bin_len; - Py_ssize_t __pyx_v_max_array_len; - Py_ssize_t __pyx_v_max_map_len; - Py_ssize_t __pyx_v_max_ext_len; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_file_like,&__pyx_n_s_read_size,&__pyx_n_s_use_list,&__pyx_n_s_raw,&__pyx_n_s_strict_map_key,&__pyx_n_s_object_hook,&__pyx_n_s_object_pairs_hook,&__pyx_n_s_list_hook,&__pyx_n_s_encoding,&__pyx_n_s_unicode_errors,&__pyx_n_s_max_buffer_size,&__pyx_n_s_ext_hook,&__pyx_n_s_max_str_len,&__pyx_n_s_max_bin_len,&__pyx_n_s_max_array_len,&__pyx_n_s_max_map_len,&__pyx_n_s_max_ext_len,0}; - PyObject* values[17] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; - values[0] = ((PyObject *)Py_None); - - /* "msgpack/_unpacker.pyx":346 - * def __init__(self, file_like=None, Py_ssize_t read_size=0, - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * object object_hook=None, object object_pairs_hook=None, object list_hook=None, # <<<<<<<<<<<<<< - * encoding=None, unicode_errors=None, Py_ssize_t max_buffer_size=0, - * object ext_hook=ExtType, - */ - values[5] = ((PyObject *)Py_None); - values[6] = ((PyObject *)Py_None); - values[7] = ((PyObject *)Py_None); - - /* "msgpack/_unpacker.pyx":347 - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * object object_hook=None, object object_pairs_hook=None, object list_hook=None, - * encoding=None, unicode_errors=None, Py_ssize_t max_buffer_size=0, # <<<<<<<<<<<<<< - * object ext_hook=ExtType, - * Py_ssize_t max_str_len=-1, - */ - values[8] = ((PyObject *)Py_None); - values[9] = ((PyObject *)Py_None); - values[11] = __pyx_k__24; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 17: values[16] = PyTuple_GET_ITEM(__pyx_args, 16); - CYTHON_FALLTHROUGH; - case 16: values[15] = PyTuple_GET_ITEM(__pyx_args, 15); - CYTHON_FALLTHROUGH; - case 15: values[14] = PyTuple_GET_ITEM(__pyx_args, 14); - CYTHON_FALLTHROUGH; - case 14: values[13] = PyTuple_GET_ITEM(__pyx_args, 13); - CYTHON_FALLTHROUGH; - case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); - CYTHON_FALLTHROUGH; - case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); - CYTHON_FALLTHROUGH; - case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); - CYTHON_FALLTHROUGH; - case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - CYTHON_FALLTHROUGH; - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - CYTHON_FALLTHROUGH; - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - CYTHON_FALLTHROUGH; - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_file_like); - if (value) { values[0] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 1: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_read_size); - if (value) { values[1] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 2: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_use_list); - if (value) { values[2] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 3: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_raw); - if (value) { values[3] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 4: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_strict_map_key); - if (value) { values[4] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 5: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_object_hook); - if (value) { values[5] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 6: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_object_pairs_hook); - if (value) { values[6] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 7: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_list_hook); - if (value) { values[7] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 8: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_encoding); - if (value) { values[8] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 9: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_unicode_errors); - if (value) { values[9] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 10: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_buffer_size); - if (value) { values[10] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 11: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_ext_hook); - if (value) { values[11] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 12: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_str_len); - if (value) { values[12] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 13: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_bin_len); - if (value) { values[13] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 14: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_array_len); - if (value) { values[14] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 15: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_map_len); - if (value) { values[15] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 16: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_ext_len); - if (value) { values[16] = value; kw_args--; } - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(1, 344, __pyx_L3_error) - } - } else { - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 17: values[16] = PyTuple_GET_ITEM(__pyx_args, 16); - CYTHON_FALLTHROUGH; - case 16: values[15] = PyTuple_GET_ITEM(__pyx_args, 15); - CYTHON_FALLTHROUGH; - case 15: values[14] = PyTuple_GET_ITEM(__pyx_args, 14); - CYTHON_FALLTHROUGH; - case 14: values[13] = PyTuple_GET_ITEM(__pyx_args, 13); - CYTHON_FALLTHROUGH; - case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); - CYTHON_FALLTHROUGH; - case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); - CYTHON_FALLTHROUGH; - case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); - CYTHON_FALLTHROUGH; - case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - CYTHON_FALLTHROUGH; - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - CYTHON_FALLTHROUGH; - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - CYTHON_FALLTHROUGH; - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_file_like = values[0]; - if (values[1]) { - __pyx_v_read_size = __Pyx_PyIndex_AsSsize_t(values[1]); if (unlikely((__pyx_v_read_size == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 344, __pyx_L3_error) - } else { - __pyx_v_read_size = ((Py_ssize_t)0); - } - if (values[2]) { - __pyx_v_use_list = __Pyx_PyObject_IsTrue(values[2]); if (unlikely((__pyx_v_use_list == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 345, __pyx_L3_error) - } else { - - /* "msgpack/_unpacker.pyx":345 - * - * def __init__(self, file_like=None, Py_ssize_t read_size=0, - * bint use_list=True, bint raw=True, bint strict_map_key=False, # <<<<<<<<<<<<<< - * object object_hook=None, object object_pairs_hook=None, object list_hook=None, - * encoding=None, unicode_errors=None, Py_ssize_t max_buffer_size=0, - */ - __pyx_v_use_list = ((int)1); - } - if (values[3]) { - __pyx_v_raw = __Pyx_PyObject_IsTrue(values[3]); if (unlikely((__pyx_v_raw == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 345, __pyx_L3_error) - } else { - __pyx_v_raw = ((int)1); - } - if (values[4]) { - __pyx_v_strict_map_key = __Pyx_PyObject_IsTrue(values[4]); if (unlikely((__pyx_v_strict_map_key == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 345, __pyx_L3_error) - } else { - __pyx_v_strict_map_key = ((int)0); - } - __pyx_v_object_hook = values[5]; - __pyx_v_object_pairs_hook = values[6]; - __pyx_v_list_hook = values[7]; - __pyx_v_encoding = values[8]; - __pyx_v_unicode_errors = values[9]; - if (values[10]) { - __pyx_v_max_buffer_size = __Pyx_PyIndex_AsSsize_t(values[10]); if (unlikely((__pyx_v_max_buffer_size == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 347, __pyx_L3_error) - } else { - __pyx_v_max_buffer_size = ((Py_ssize_t)0); - } - __pyx_v_ext_hook = values[11]; - if (values[12]) { - __pyx_v_max_str_len = __Pyx_PyIndex_AsSsize_t(values[12]); if (unlikely((__pyx_v_max_str_len == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 349, __pyx_L3_error) - } else { - __pyx_v_max_str_len = ((Py_ssize_t)-1L); - } - if (values[13]) { - __pyx_v_max_bin_len = __Pyx_PyIndex_AsSsize_t(values[13]); if (unlikely((__pyx_v_max_bin_len == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 350, __pyx_L3_error) - } else { - __pyx_v_max_bin_len = ((Py_ssize_t)-1L); - } - if (values[14]) { - __pyx_v_max_array_len = __Pyx_PyIndex_AsSsize_t(values[14]); if (unlikely((__pyx_v_max_array_len == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 351, __pyx_L3_error) - } else { - __pyx_v_max_array_len = ((Py_ssize_t)-1L); - } - if (values[15]) { - __pyx_v_max_map_len = __Pyx_PyIndex_AsSsize_t(values[15]); if (unlikely((__pyx_v_max_map_len == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 352, __pyx_L3_error) - } else { - __pyx_v_max_map_len = ((Py_ssize_t)-1L); - } - if (values[16]) { - __pyx_v_max_ext_len = __Pyx_PyIndex_AsSsize_t(values[16]); if (unlikely((__pyx_v_max_ext_len == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 353, __pyx_L3_error) - } else { - __pyx_v_max_ext_len = ((Py_ssize_t)-1L); - } - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 0, 0, 17, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 344, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_4__init__(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self), __pyx_v_file_like, __pyx_v_read_size, __pyx_v_use_list, __pyx_v_raw, __pyx_v_strict_map_key, __pyx_v_object_hook, __pyx_v_object_pairs_hook, __pyx_v_list_hook, __pyx_v_encoding, __pyx_v_unicode_errors, __pyx_v_max_buffer_size, __pyx_v_ext_hook, __pyx_v_max_str_len, __pyx_v_max_bin_len, __pyx_v_max_array_len, __pyx_v_max_map_len, __pyx_v_max_ext_len); - - /* "msgpack/_unpacker.pyx":344 - * self.buf = NULL - * - * def __init__(self, file_like=None, Py_ssize_t read_size=0, # <<<<<<<<<<<<<< - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * object object_hook=None, object object_pairs_hook=None, object list_hook=None, - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_4__init__(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self, PyObject *__pyx_v_file_like, Py_ssize_t __pyx_v_read_size, int __pyx_v_use_list, int __pyx_v_raw, int __pyx_v_strict_map_key, PyObject *__pyx_v_object_hook, PyObject *__pyx_v_object_pairs_hook, PyObject *__pyx_v_list_hook, PyObject *__pyx_v_encoding, PyObject *__pyx_v_unicode_errors, Py_ssize_t __pyx_v_max_buffer_size, PyObject *__pyx_v_ext_hook, Py_ssize_t __pyx_v_max_str_len, Py_ssize_t __pyx_v_max_bin_len, Py_ssize_t __pyx_v_max_array_len, Py_ssize_t __pyx_v_max_map_len, Py_ssize_t __pyx_v_max_ext_len) { - char const *__pyx_v_cenc; - char const *__pyx_v_cerr; - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - Py_ssize_t __pyx_t_3; - Py_ssize_t __pyx_t_4; - long __pyx_t_5; - int __pyx_t_6; - int __pyx_t_7; - char const *__pyx_t_8; - char const *__pyx_t_9; - __Pyx_RefNannySetupContext("__init__", 0); - - /* "msgpack/_unpacker.pyx":354 - * Py_ssize_t max_map_len=-1, - * Py_ssize_t max_ext_len=-1): - * cdef const char *cenc=NULL, # <<<<<<<<<<<<<< - * cdef const char *cerr=NULL - * - */ - __pyx_v_cenc = NULL; - - /* "msgpack/_unpacker.pyx":355 - * Py_ssize_t max_ext_len=-1): - * cdef const char *cenc=NULL, - * cdef const char *cerr=NULL # <<<<<<<<<<<<<< - * - * self.object_hook = object_hook - */ - __pyx_v_cerr = NULL; - - /* "msgpack/_unpacker.pyx":357 - * cdef const char *cerr=NULL - * - * self.object_hook = object_hook # <<<<<<<<<<<<<< - * self.object_pairs_hook = object_pairs_hook - * self.list_hook = list_hook - */ - __Pyx_INCREF(__pyx_v_object_hook); - __Pyx_GIVEREF(__pyx_v_object_hook); - __Pyx_GOTREF(__pyx_v_self->object_hook); - __Pyx_DECREF(__pyx_v_self->object_hook); - __pyx_v_self->object_hook = __pyx_v_object_hook; - - /* "msgpack/_unpacker.pyx":358 - * - * self.object_hook = object_hook - * self.object_pairs_hook = object_pairs_hook # <<<<<<<<<<<<<< - * self.list_hook = list_hook - * self.ext_hook = ext_hook - */ - __Pyx_INCREF(__pyx_v_object_pairs_hook); - __Pyx_GIVEREF(__pyx_v_object_pairs_hook); - __Pyx_GOTREF(__pyx_v_self->object_pairs_hook); - __Pyx_DECREF(__pyx_v_self->object_pairs_hook); - __pyx_v_self->object_pairs_hook = __pyx_v_object_pairs_hook; - - /* "msgpack/_unpacker.pyx":359 - * self.object_hook = object_hook - * self.object_pairs_hook = object_pairs_hook - * self.list_hook = list_hook # <<<<<<<<<<<<<< - * self.ext_hook = ext_hook - * - */ - __Pyx_INCREF(__pyx_v_list_hook); - __Pyx_GIVEREF(__pyx_v_list_hook); - __Pyx_GOTREF(__pyx_v_self->list_hook); - __Pyx_DECREF(__pyx_v_self->list_hook); - __pyx_v_self->list_hook = __pyx_v_list_hook; - - /* "msgpack/_unpacker.pyx":360 - * self.object_pairs_hook = object_pairs_hook - * self.list_hook = list_hook - * self.ext_hook = ext_hook # <<<<<<<<<<<<<< - * - * self.file_like = file_like - */ - __Pyx_INCREF(__pyx_v_ext_hook); - __Pyx_GIVEREF(__pyx_v_ext_hook); - __Pyx_GOTREF(__pyx_v_self->ext_hook); - __Pyx_DECREF(__pyx_v_self->ext_hook); - __pyx_v_self->ext_hook = __pyx_v_ext_hook; - - /* "msgpack/_unpacker.pyx":362 - * self.ext_hook = ext_hook - * - * self.file_like = file_like # <<<<<<<<<<<<<< - * if file_like: - * self.file_like_read = file_like.read - */ - __Pyx_INCREF(__pyx_v_file_like); - __Pyx_GIVEREF(__pyx_v_file_like); - __Pyx_GOTREF(__pyx_v_self->file_like); - __Pyx_DECREF(__pyx_v_self->file_like); - __pyx_v_self->file_like = __pyx_v_file_like; - - /* "msgpack/_unpacker.pyx":363 - * - * self.file_like = file_like - * if file_like: # <<<<<<<<<<<<<< - * self.file_like_read = file_like.read - * if not PyCallable_Check(self.file_like_read): - */ - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_file_like); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(1, 363, __pyx_L1_error) - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":364 - * self.file_like = file_like - * if file_like: - * self.file_like_read = file_like.read # <<<<<<<<<<<<<< - * if not PyCallable_Check(self.file_like_read): - * raise TypeError("`file_like.read` must be a callable.") - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_file_like, __pyx_n_s_read); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 364, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_v_self->file_like_read); - __Pyx_DECREF(__pyx_v_self->file_like_read); - __pyx_v_self->file_like_read = __pyx_t_2; - __pyx_t_2 = 0; - - /* "msgpack/_unpacker.pyx":365 - * if file_like: - * self.file_like_read = file_like.read - * if not PyCallable_Check(self.file_like_read): # <<<<<<<<<<<<<< - * raise TypeError("`file_like.read` must be a callable.") - * - */ - __pyx_t_2 = __pyx_v_self->file_like_read; - __Pyx_INCREF(__pyx_t_2); - __pyx_t_1 = ((!(PyCallable_Check(__pyx_t_2) != 0)) != 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_unpacker.pyx":366 - * self.file_like_read = file_like.read - * if not PyCallable_Check(self.file_like_read): - * raise TypeError("`file_like.read` must be a callable.") # <<<<<<<<<<<<<< - * - * if max_str_len == -1: - */ - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__25, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 366, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_Raise(__pyx_t_2, 0, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __PYX_ERR(1, 366, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":365 - * if file_like: - * self.file_like_read = file_like.read - * if not PyCallable_Check(self.file_like_read): # <<<<<<<<<<<<<< - * raise TypeError("`file_like.read` must be a callable.") - * - */ - } - - /* "msgpack/_unpacker.pyx":363 - * - * self.file_like = file_like - * if file_like: # <<<<<<<<<<<<<< - * self.file_like_read = file_like.read - * if not PyCallable_Check(self.file_like_read): - */ - } - - /* "msgpack/_unpacker.pyx":368 - * raise TypeError("`file_like.read` must be a callable.") - * - * if max_str_len == -1: # <<<<<<<<<<<<<< - * max_str_len = max_buffer_size or 1024*1024 - * if max_bin_len == -1: - */ - __pyx_t_1 = ((__pyx_v_max_str_len == -1L) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":369 - * - * if max_str_len == -1: - * max_str_len = max_buffer_size or 1024*1024 # <<<<<<<<<<<<<< - * if max_bin_len == -1: - * max_bin_len = max_buffer_size or 1024*1024 - */ - if (!__pyx_v_max_buffer_size) { - } else { - __pyx_t_3 = __pyx_v_max_buffer_size; - goto __pyx_L6_bool_binop_done; - } - __pyx_t_3 = 0x100000; - __pyx_L6_bool_binop_done:; - __pyx_v_max_str_len = __pyx_t_3; - - /* "msgpack/_unpacker.pyx":368 - * raise TypeError("`file_like.read` must be a callable.") - * - * if max_str_len == -1: # <<<<<<<<<<<<<< - * max_str_len = max_buffer_size or 1024*1024 - * if max_bin_len == -1: - */ - } - - /* "msgpack/_unpacker.pyx":370 - * if max_str_len == -1: - * max_str_len = max_buffer_size or 1024*1024 - * if max_bin_len == -1: # <<<<<<<<<<<<<< - * max_bin_len = max_buffer_size or 1024*1024 - * if max_array_len == -1: - */ - __pyx_t_1 = ((__pyx_v_max_bin_len == -1L) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":371 - * max_str_len = max_buffer_size or 1024*1024 - * if max_bin_len == -1: - * max_bin_len = max_buffer_size or 1024*1024 # <<<<<<<<<<<<<< - * if max_array_len == -1: - * max_array_len = max_buffer_size or 128*1024 - */ - if (!__pyx_v_max_buffer_size) { - } else { - __pyx_t_3 = __pyx_v_max_buffer_size; - goto __pyx_L9_bool_binop_done; - } - __pyx_t_3 = 0x100000; - __pyx_L9_bool_binop_done:; - __pyx_v_max_bin_len = __pyx_t_3; - - /* "msgpack/_unpacker.pyx":370 - * if max_str_len == -1: - * max_str_len = max_buffer_size or 1024*1024 - * if max_bin_len == -1: # <<<<<<<<<<<<<< - * max_bin_len = max_buffer_size or 1024*1024 - * if max_array_len == -1: - */ - } - - /* "msgpack/_unpacker.pyx":372 - * if max_bin_len == -1: - * max_bin_len = max_buffer_size or 1024*1024 - * if max_array_len == -1: # <<<<<<<<<<<<<< - * max_array_len = max_buffer_size or 128*1024 - * if max_map_len == -1: - */ - __pyx_t_1 = ((__pyx_v_max_array_len == -1L) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":373 - * max_bin_len = max_buffer_size or 1024*1024 - * if max_array_len == -1: - * max_array_len = max_buffer_size or 128*1024 # <<<<<<<<<<<<<< - * if max_map_len == -1: - * max_map_len = max_buffer_size//2 or 32*1024 - */ - if (!__pyx_v_max_buffer_size) { - } else { - __pyx_t_3 = __pyx_v_max_buffer_size; - goto __pyx_L12_bool_binop_done; - } - __pyx_t_3 = 0x20000; - __pyx_L12_bool_binop_done:; - __pyx_v_max_array_len = __pyx_t_3; - - /* "msgpack/_unpacker.pyx":372 - * if max_bin_len == -1: - * max_bin_len = max_buffer_size or 1024*1024 - * if max_array_len == -1: # <<<<<<<<<<<<<< - * max_array_len = max_buffer_size or 128*1024 - * if max_map_len == -1: - */ - } - - /* "msgpack/_unpacker.pyx":374 - * if max_array_len == -1: - * max_array_len = max_buffer_size or 128*1024 - * if max_map_len == -1: # <<<<<<<<<<<<<< - * max_map_len = max_buffer_size//2 or 32*1024 - * if max_ext_len == -1: - */ - __pyx_t_1 = ((__pyx_v_max_map_len == -1L) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":375 - * max_array_len = max_buffer_size or 128*1024 - * if max_map_len == -1: - * max_map_len = max_buffer_size//2 or 32*1024 # <<<<<<<<<<<<<< - * if max_ext_len == -1: - * max_ext_len = max_buffer_size or 1024*1024 - */ - __pyx_t_4 = __Pyx_div_Py_ssize_t(__pyx_v_max_buffer_size, 2); - if (!__pyx_t_4) { - } else { - __pyx_t_3 = __pyx_t_4; - goto __pyx_L15_bool_binop_done; - } - __pyx_t_3 = 0x8000; - __pyx_L15_bool_binop_done:; - __pyx_v_max_map_len = __pyx_t_3; - - /* "msgpack/_unpacker.pyx":374 - * if max_array_len == -1: - * max_array_len = max_buffer_size or 128*1024 - * if max_map_len == -1: # <<<<<<<<<<<<<< - * max_map_len = max_buffer_size//2 or 32*1024 - * if max_ext_len == -1: - */ - } - - /* "msgpack/_unpacker.pyx":376 - * if max_map_len == -1: - * max_map_len = max_buffer_size//2 or 32*1024 - * if max_ext_len == -1: # <<<<<<<<<<<<<< - * max_ext_len = max_buffer_size or 1024*1024 - * - */ - __pyx_t_1 = ((__pyx_v_max_ext_len == -1L) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":377 - * max_map_len = max_buffer_size//2 or 32*1024 - * if max_ext_len == -1: - * max_ext_len = max_buffer_size or 1024*1024 # <<<<<<<<<<<<<< - * - * if not max_buffer_size: - */ - if (!__pyx_v_max_buffer_size) { - } else { - __pyx_t_3 = __pyx_v_max_buffer_size; - goto __pyx_L18_bool_binop_done; - } - __pyx_t_3 = 0x100000; - __pyx_L18_bool_binop_done:; - __pyx_v_max_ext_len = __pyx_t_3; - - /* "msgpack/_unpacker.pyx":376 - * if max_map_len == -1: - * max_map_len = max_buffer_size//2 or 32*1024 - * if max_ext_len == -1: # <<<<<<<<<<<<<< - * max_ext_len = max_buffer_size or 1024*1024 - * - */ - } - - /* "msgpack/_unpacker.pyx":379 - * max_ext_len = max_buffer_size or 1024*1024 - * - * if not max_buffer_size: # <<<<<<<<<<<<<< - * max_buffer_size = INT_MAX - * if read_size > max_buffer_size: - */ - __pyx_t_1 = ((!(__pyx_v_max_buffer_size != 0)) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":380 - * - * if not max_buffer_size: - * max_buffer_size = INT_MAX # <<<<<<<<<<<<<< - * if read_size > max_buffer_size: - * raise ValueError("read_size should be less or equal to max_buffer_size") - */ - __pyx_v_max_buffer_size = INT_MAX; - - /* "msgpack/_unpacker.pyx":379 - * max_ext_len = max_buffer_size or 1024*1024 - * - * if not max_buffer_size: # <<<<<<<<<<<<<< - * max_buffer_size = INT_MAX - * if read_size > max_buffer_size: - */ - } - - /* "msgpack/_unpacker.pyx":381 - * if not max_buffer_size: - * max_buffer_size = INT_MAX - * if read_size > max_buffer_size: # <<<<<<<<<<<<<< - * raise ValueError("read_size should be less or equal to max_buffer_size") - * if not read_size: - */ - __pyx_t_1 = ((__pyx_v_read_size > __pyx_v_max_buffer_size) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_unpacker.pyx":382 - * max_buffer_size = INT_MAX - * if read_size > max_buffer_size: - * raise ValueError("read_size should be less or equal to max_buffer_size") # <<<<<<<<<<<<<< - * if not read_size: - * read_size = min(max_buffer_size, 1024**2) - */ - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__26, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 382, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_Raise(__pyx_t_2, 0, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __PYX_ERR(1, 382, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":381 - * if not max_buffer_size: - * max_buffer_size = INT_MAX - * if read_size > max_buffer_size: # <<<<<<<<<<<<<< - * raise ValueError("read_size should be less or equal to max_buffer_size") - * if not read_size: - */ - } - - /* "msgpack/_unpacker.pyx":383 - * if read_size > max_buffer_size: - * raise ValueError("read_size should be less or equal to max_buffer_size") - * if not read_size: # <<<<<<<<<<<<<< - * read_size = min(max_buffer_size, 1024**2) - * self.max_buffer_size = max_buffer_size - */ - __pyx_t_1 = ((!(__pyx_v_read_size != 0)) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":384 - * raise ValueError("read_size should be less or equal to max_buffer_size") - * if not read_size: - * read_size = min(max_buffer_size, 1024**2) # <<<<<<<<<<<<<< - * self.max_buffer_size = max_buffer_size - * self.read_size = read_size - */ - __pyx_t_5 = 0x100000; - __pyx_t_3 = __pyx_v_max_buffer_size; - if (((__pyx_t_5 < __pyx_t_3) != 0)) { - __pyx_t_4 = __pyx_t_5; - } else { - __pyx_t_4 = __pyx_t_3; - } - __pyx_v_read_size = __pyx_t_4; - - /* "msgpack/_unpacker.pyx":383 - * if read_size > max_buffer_size: - * raise ValueError("read_size should be less or equal to max_buffer_size") - * if not read_size: # <<<<<<<<<<<<<< - * read_size = min(max_buffer_size, 1024**2) - * self.max_buffer_size = max_buffer_size - */ - } - - /* "msgpack/_unpacker.pyx":385 - * if not read_size: - * read_size = min(max_buffer_size, 1024**2) - * self.max_buffer_size = max_buffer_size # <<<<<<<<<<<<<< - * self.read_size = read_size - * self.buf = PyMem_Malloc(read_size) - */ - __pyx_v_self->max_buffer_size = __pyx_v_max_buffer_size; - - /* "msgpack/_unpacker.pyx":386 - * read_size = min(max_buffer_size, 1024**2) - * self.max_buffer_size = max_buffer_size - * self.read_size = read_size # <<<<<<<<<<<<<< - * self.buf = PyMem_Malloc(read_size) - * if self.buf == NULL: - */ - __pyx_v_self->read_size = __pyx_v_read_size; - - /* "msgpack/_unpacker.pyx":387 - * self.max_buffer_size = max_buffer_size - * self.read_size = read_size - * self.buf = PyMem_Malloc(read_size) # <<<<<<<<<<<<<< - * if self.buf == NULL: - * raise MemoryError("Unable to allocate internal buffer.") - */ - __pyx_v_self->buf = ((char *)PyMem_Malloc(__pyx_v_read_size)); - - /* "msgpack/_unpacker.pyx":388 - * self.read_size = read_size - * self.buf = PyMem_Malloc(read_size) - * if self.buf == NULL: # <<<<<<<<<<<<<< - * raise MemoryError("Unable to allocate internal buffer.") - * self.buf_size = read_size - */ - __pyx_t_1 = ((__pyx_v_self->buf == NULL) != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_unpacker.pyx":389 - * self.buf = PyMem_Malloc(read_size) - * if self.buf == NULL: - * raise MemoryError("Unable to allocate internal buffer.") # <<<<<<<<<<<<<< - * self.buf_size = read_size - * self.buf_head = 0 - */ - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_MemoryError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 389, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_Raise(__pyx_t_2, 0, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __PYX_ERR(1, 389, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":388 - * self.read_size = read_size - * self.buf = PyMem_Malloc(read_size) - * if self.buf == NULL: # <<<<<<<<<<<<<< - * raise MemoryError("Unable to allocate internal buffer.") - * self.buf_size = read_size - */ - } - - /* "msgpack/_unpacker.pyx":390 - * if self.buf == NULL: - * raise MemoryError("Unable to allocate internal buffer.") - * self.buf_size = read_size # <<<<<<<<<<<<<< - * self.buf_head = 0 - * self.buf_tail = 0 - */ - __pyx_v_self->buf_size = __pyx_v_read_size; - - /* "msgpack/_unpacker.pyx":391 - * raise MemoryError("Unable to allocate internal buffer.") - * self.buf_size = read_size - * self.buf_head = 0 # <<<<<<<<<<<<<< - * self.buf_tail = 0 - * self.stream_offset = 0 - */ - __pyx_v_self->buf_head = 0; - - /* "msgpack/_unpacker.pyx":392 - * self.buf_size = read_size - * self.buf_head = 0 - * self.buf_tail = 0 # <<<<<<<<<<<<<< - * self.stream_offset = 0 - * - */ - __pyx_v_self->buf_tail = 0; - - /* "msgpack/_unpacker.pyx":393 - * self.buf_head = 0 - * self.buf_tail = 0 - * self.stream_offset = 0 # <<<<<<<<<<<<<< - * - * if encoding is not None: - */ - __pyx_v_self->stream_offset = 0; - - /* "msgpack/_unpacker.pyx":395 - * self.stream_offset = 0 - * - * if encoding is not None: # <<<<<<<<<<<<<< - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1) - * self.encoding = encoding - */ - __pyx_t_1 = (__pyx_v_encoding != Py_None); - __pyx_t_6 = (__pyx_t_1 != 0); - if (__pyx_t_6) { - - /* "msgpack/_unpacker.pyx":396 - * - * if encoding is not None: - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1) # <<<<<<<<<<<<<< - * self.encoding = encoding - * cenc = encoding - */ - __pyx_t_7 = PyErr_WarnEx(__pyx_builtin_DeprecationWarning, ((char *)"encoding is deprecated, Use raw=False instead."), 1); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(1, 396, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":397 - * if encoding is not None: - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1) - * self.encoding = encoding # <<<<<<<<<<<<<< - * cenc = encoding - * - */ - __Pyx_INCREF(__pyx_v_encoding); - __Pyx_GIVEREF(__pyx_v_encoding); - __Pyx_GOTREF(__pyx_v_self->encoding); - __Pyx_DECREF(__pyx_v_self->encoding); - __pyx_v_self->encoding = __pyx_v_encoding; - - /* "msgpack/_unpacker.pyx":398 - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1) - * self.encoding = encoding - * cenc = encoding # <<<<<<<<<<<<<< - * - * if unicode_errors is not None: - */ - __pyx_t_8 = __Pyx_PyObject_AsString(__pyx_v_encoding); if (unlikely((!__pyx_t_8) && PyErr_Occurred())) __PYX_ERR(1, 398, __pyx_L1_error) - __pyx_v_cenc = __pyx_t_8; - - /* "msgpack/_unpacker.pyx":395 - * self.stream_offset = 0 - * - * if encoding is not None: # <<<<<<<<<<<<<< - * PyErr_WarnEx(DeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1) - * self.encoding = encoding - */ - } - - /* "msgpack/_unpacker.pyx":400 - * cenc = encoding - * - * if unicode_errors is not None: # <<<<<<<<<<<<<< - * self.unicode_errors = unicode_errors - * cerr = unicode_errors - */ - __pyx_t_6 = (__pyx_v_unicode_errors != Py_None); - __pyx_t_1 = (__pyx_t_6 != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":401 - * - * if unicode_errors is not None: - * self.unicode_errors = unicode_errors # <<<<<<<<<<<<<< - * cerr = unicode_errors - * - */ - __Pyx_INCREF(__pyx_v_unicode_errors); - __Pyx_GIVEREF(__pyx_v_unicode_errors); - __Pyx_GOTREF(__pyx_v_self->unicode_errors); - __Pyx_DECREF(__pyx_v_self->unicode_errors); - __pyx_v_self->unicode_errors = __pyx_v_unicode_errors; - - /* "msgpack/_unpacker.pyx":402 - * if unicode_errors is not None: - * self.unicode_errors = unicode_errors - * cerr = unicode_errors # <<<<<<<<<<<<<< - * - * init_ctx(&self.ctx, object_hook, object_pairs_hook, list_hook, - */ - __pyx_t_9 = __Pyx_PyObject_AsString(__pyx_v_unicode_errors); if (unlikely((!__pyx_t_9) && PyErr_Occurred())) __PYX_ERR(1, 402, __pyx_L1_error) - __pyx_v_cerr = __pyx_t_9; - - /* "msgpack/_unpacker.pyx":400 - * cenc = encoding - * - * if unicode_errors is not None: # <<<<<<<<<<<<<< - * self.unicode_errors = unicode_errors - * cerr = unicode_errors - */ - } - - /* "msgpack/_unpacker.pyx":404 - * cerr = unicode_errors - * - * init_ctx(&self.ctx, object_hook, object_pairs_hook, list_hook, # <<<<<<<<<<<<<< - * ext_hook, use_list, raw, strict_map_key, cenc, cerr, - * max_str_len, max_bin_len, max_array_len, - */ - __pyx_t_2 = __pyx_f_7msgpack_9_cmsgpack_init_ctx((&__pyx_v_self->ctx), __pyx_v_object_hook, __pyx_v_object_pairs_hook, __pyx_v_list_hook, __pyx_v_ext_hook, __pyx_v_use_list, __pyx_v_raw, __pyx_v_strict_map_key, __pyx_v_cenc, __pyx_v_cerr, __pyx_v_max_str_len, __pyx_v_max_bin_len, __pyx_v_max_array_len, __pyx_v_max_map_len, __pyx_v_max_ext_len); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 404, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "msgpack/_unpacker.pyx":344 - * self.buf = NULL - * - * def __init__(self, file_like=None, Py_ssize_t read_size=0, # <<<<<<<<<<<<<< - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * object object_hook=None, object object_pairs_hook=None, object list_hook=None, - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":409 - * max_map_len, max_ext_len) - * - * def feed(self, object next_bytes): # <<<<<<<<<<<<<< - * """Append `next_bytes` to internal buffer.""" - * cdef Py_buffer pybuff - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_7feed(PyObject *__pyx_v_self, PyObject *__pyx_v_next_bytes); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_6feed[] = "Unpacker.feed(self, next_bytes)\nAppend `next_bytes` to internal buffer."; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_7feed(PyObject *__pyx_v_self, PyObject *__pyx_v_next_bytes) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("feed (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_6feed(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self), ((PyObject *)__pyx_v_next_bytes)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_6feed(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self, PyObject *__pyx_v_next_bytes) { - Py_buffer __pyx_v_pybuff; - int __pyx_v_new_protocol; - char *__pyx_v_buf; - Py_ssize_t __pyx_v_buf_len; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_t_4; - int __pyx_t_5; - char const *__pyx_t_6; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - PyObject *__pyx_t_10 = NULL; - PyObject *__pyx_t_11 = NULL; - PyObject *__pyx_t_12 = NULL; - __Pyx_RefNannySetupContext("feed", 0); - - /* "msgpack/_unpacker.pyx":412 - * """Append `next_bytes` to internal buffer.""" - * cdef Py_buffer pybuff - * cdef int new_protocol = 0 # <<<<<<<<<<<<<< - * cdef char* buf - * cdef Py_ssize_t buf_len - */ - __pyx_v_new_protocol = 0; - - /* "msgpack/_unpacker.pyx":416 - * cdef Py_ssize_t buf_len - * - * if self.file_like is not None: # <<<<<<<<<<<<<< - * raise AssertionError( - * "unpacker.feed() is not be able to use with `file_like`.") - */ - __pyx_t_1 = (__pyx_v_self->file_like != Py_None); - __pyx_t_2 = (__pyx_t_1 != 0); - if (unlikely(__pyx_t_2)) { - - /* "msgpack/_unpacker.pyx":417 - * - * if self.file_like is not None: - * raise AssertionError( # <<<<<<<<<<<<<< - * "unpacker.feed() is not be able to use with `file_like`.") - * - */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_AssertionError, __pyx_tuple__27, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 417, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(1, 417, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":416 - * cdef Py_ssize_t buf_len - * - * if self.file_like is not None: # <<<<<<<<<<<<<< - * raise AssertionError( - * "unpacker.feed() is not be able to use with `file_like`.") - */ - } - - /* "msgpack/_unpacker.pyx":420 - * "unpacker.feed() is not be able to use with `file_like`.") - * - * get_data_from_buffer(next_bytes, &pybuff, &buf, &buf_len, &new_protocol) # <<<<<<<<<<<<<< - * try: - * self.append_buffer(buf, buf_len) - */ - __pyx_t_4 = __pyx_f_7msgpack_9_cmsgpack_get_data_from_buffer(__pyx_v_next_bytes, (&__pyx_v_pybuff), (&__pyx_v_buf), (&__pyx_v_buf_len), (&__pyx_v_new_protocol)); if (unlikely(__pyx_t_4 == ((int)0))) __PYX_ERR(1, 420, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":421 - * - * get_data_from_buffer(next_bytes, &pybuff, &buf, &buf_len, &new_protocol) - * try: # <<<<<<<<<<<<<< - * self.append_buffer(buf, buf_len) - * finally: - */ - /*try:*/ { - - /* "msgpack/_unpacker.pyx":422 - * get_data_from_buffer(next_bytes, &pybuff, &buf, &buf_len, &new_protocol) - * try: - * self.append_buffer(buf, buf_len) # <<<<<<<<<<<<<< - * finally: - * if new_protocol: - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self->__pyx_vtab)->append_buffer(__pyx_v_self, __pyx_v_buf, __pyx_v_buf_len); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 422, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - - /* "msgpack/_unpacker.pyx":424 - * self.append_buffer(buf, buf_len) - * finally: - * if new_protocol: # <<<<<<<<<<<<<< - * PyBuffer_Release(&pybuff) - * - */ - /*finally:*/ { - /*normal exit:*/{ - __pyx_t_2 = (__pyx_v_new_protocol != 0); - if (__pyx_t_2) { - - /* "msgpack/_unpacker.pyx":425 - * finally: - * if new_protocol: - * PyBuffer_Release(&pybuff) # <<<<<<<<<<<<<< - * - * cdef append_buffer(self, void* _buf, Py_ssize_t _buf_len): - */ - PyBuffer_Release((&__pyx_v_pybuff)); - - /* "msgpack/_unpacker.pyx":424 - * self.append_buffer(buf, buf_len) - * finally: - * if new_protocol: # <<<<<<<<<<<<<< - * PyBuffer_Release(&pybuff) - * - */ - } - goto __pyx_L6; - } - __pyx_L5_error:; - /*exception exit:*/{ - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __pyx_t_7 = 0; __pyx_t_8 = 0; __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12); - if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_7, &__pyx_t_8, &__pyx_t_9) < 0)) __Pyx_ErrFetch(&__pyx_t_7, &__pyx_t_8, &__pyx_t_9); - __Pyx_XGOTREF(__pyx_t_7); - __Pyx_XGOTREF(__pyx_t_8); - __Pyx_XGOTREF(__pyx_t_9); - __Pyx_XGOTREF(__pyx_t_10); - __Pyx_XGOTREF(__pyx_t_11); - __Pyx_XGOTREF(__pyx_t_12); - __pyx_t_4 = __pyx_lineno; __pyx_t_5 = __pyx_clineno; __pyx_t_6 = __pyx_filename; - { - __pyx_t_2 = (__pyx_v_new_protocol != 0); - if (__pyx_t_2) { - - /* "msgpack/_unpacker.pyx":425 - * finally: - * if new_protocol: - * PyBuffer_Release(&pybuff) # <<<<<<<<<<<<<< - * - * cdef append_buffer(self, void* _buf, Py_ssize_t _buf_len): - */ - PyBuffer_Release((&__pyx_v_pybuff)); - - /* "msgpack/_unpacker.pyx":424 - * self.append_buffer(buf, buf_len) - * finally: - * if new_protocol: # <<<<<<<<<<<<<< - * PyBuffer_Release(&pybuff) - * - */ - } - } - if (PY_MAJOR_VERSION >= 3) { - __Pyx_XGIVEREF(__pyx_t_10); - __Pyx_XGIVEREF(__pyx_t_11); - __Pyx_XGIVEREF(__pyx_t_12); - __Pyx_ExceptionReset(__pyx_t_10, __pyx_t_11, __pyx_t_12); - } - __Pyx_XGIVEREF(__pyx_t_7); - __Pyx_XGIVEREF(__pyx_t_8); - __Pyx_XGIVEREF(__pyx_t_9); - __Pyx_ErrRestore(__pyx_t_7, __pyx_t_8, __pyx_t_9); - __pyx_t_7 = 0; __pyx_t_8 = 0; __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; - __pyx_lineno = __pyx_t_4; __pyx_clineno = __pyx_t_5; __pyx_filename = __pyx_t_6; - goto __pyx_L1_error; - } - __pyx_L6:; - } - - /* "msgpack/_unpacker.pyx":409 - * max_map_len, max_ext_len) - * - * def feed(self, object next_bytes): # <<<<<<<<<<<<<< - * """Append `next_bytes` to internal buffer.""" - * cdef Py_buffer pybuff - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.feed", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":427 - * PyBuffer_Release(&pybuff) - * - * cdef append_buffer(self, void* _buf, Py_ssize_t _buf_len): # <<<<<<<<<<<<<< - * cdef: - * char* buf = self.buf - */ - -static PyObject *__pyx_f_7msgpack_9_cmsgpack_8Unpacker_append_buffer(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self, void *__pyx_v__buf, Py_ssize_t __pyx_v__buf_len) { - char *__pyx_v_buf; - char *__pyx_v_new_buf; - Py_ssize_t __pyx_v_head; - Py_ssize_t __pyx_v_tail; - Py_ssize_t __pyx_v_buf_size; - Py_ssize_t __pyx_v_new_size; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - char *__pyx_t_1; - Py_ssize_t __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - Py_ssize_t __pyx_t_5; - Py_ssize_t __pyx_t_6; - __Pyx_RefNannySetupContext("append_buffer", 0); - - /* "msgpack/_unpacker.pyx":429 - * cdef append_buffer(self, void* _buf, Py_ssize_t _buf_len): - * cdef: - * char* buf = self.buf # <<<<<<<<<<<<<< - * char* new_buf - * Py_ssize_t head = self.buf_head - */ - __pyx_t_1 = __pyx_v_self->buf; - __pyx_v_buf = __pyx_t_1; - - /* "msgpack/_unpacker.pyx":431 - * char* buf = self.buf - * char* new_buf - * Py_ssize_t head = self.buf_head # <<<<<<<<<<<<<< - * Py_ssize_t tail = self.buf_tail - * Py_ssize_t buf_size = self.buf_size - */ - __pyx_t_2 = __pyx_v_self->buf_head; - __pyx_v_head = __pyx_t_2; - - /* "msgpack/_unpacker.pyx":432 - * char* new_buf - * Py_ssize_t head = self.buf_head - * Py_ssize_t tail = self.buf_tail # <<<<<<<<<<<<<< - * Py_ssize_t buf_size = self.buf_size - * Py_ssize_t new_size - */ - __pyx_t_2 = __pyx_v_self->buf_tail; - __pyx_v_tail = __pyx_t_2; - - /* "msgpack/_unpacker.pyx":433 - * Py_ssize_t head = self.buf_head - * Py_ssize_t tail = self.buf_tail - * Py_ssize_t buf_size = self.buf_size # <<<<<<<<<<<<<< - * Py_ssize_t new_size - * - */ - __pyx_t_2 = __pyx_v_self->buf_size; - __pyx_v_buf_size = __pyx_t_2; - - /* "msgpack/_unpacker.pyx":436 - * Py_ssize_t new_size - * - * if tail + _buf_len > buf_size: # <<<<<<<<<<<<<< - * if ((tail - head) + _buf_len) <= buf_size: - * # move to front. - */ - __pyx_t_3 = (((__pyx_v_tail + __pyx_v__buf_len) > __pyx_v_buf_size) != 0); - if (__pyx_t_3) { - - /* "msgpack/_unpacker.pyx":437 - * - * if tail + _buf_len > buf_size: - * if ((tail - head) + _buf_len) <= buf_size: # <<<<<<<<<<<<<< - * # move to front. - * memmove(buf, buf + head, tail - head) - */ - __pyx_t_3 = ((((__pyx_v_tail - __pyx_v_head) + __pyx_v__buf_len) <= __pyx_v_buf_size) != 0); - if (__pyx_t_3) { - - /* "msgpack/_unpacker.pyx":439 - * if ((tail - head) + _buf_len) <= buf_size: - * # move to front. - * memmove(buf, buf + head, tail - head) # <<<<<<<<<<<<<< - * tail -= head - * head = 0 - */ - (void)(memmove(__pyx_v_buf, (__pyx_v_buf + __pyx_v_head), (__pyx_v_tail - __pyx_v_head))); - - /* "msgpack/_unpacker.pyx":440 - * # move to front. - * memmove(buf, buf + head, tail - head) - * tail -= head # <<<<<<<<<<<<<< - * head = 0 - * else: - */ - __pyx_v_tail = (__pyx_v_tail - __pyx_v_head); - - /* "msgpack/_unpacker.pyx":441 - * memmove(buf, buf + head, tail - head) - * tail -= head - * head = 0 # <<<<<<<<<<<<<< - * else: - * # expand buffer. - */ - __pyx_v_head = 0; - - /* "msgpack/_unpacker.pyx":437 - * - * if tail + _buf_len > buf_size: - * if ((tail - head) + _buf_len) <= buf_size: # <<<<<<<<<<<<<< - * # move to front. - * memmove(buf, buf + head, tail - head) - */ - goto __pyx_L4; - } - - /* "msgpack/_unpacker.pyx":444 - * else: - * # expand buffer. - * new_size = (tail-head) + _buf_len # <<<<<<<<<<<<<< - * if new_size > self.max_buffer_size: - * raise BufferFull - */ - /*else*/ { - __pyx_v_new_size = ((__pyx_v_tail - __pyx_v_head) + __pyx_v__buf_len); - - /* "msgpack/_unpacker.pyx":445 - * # expand buffer. - * new_size = (tail-head) + _buf_len - * if new_size > self.max_buffer_size: # <<<<<<<<<<<<<< - * raise BufferFull - * new_size = min(new_size*2, self.max_buffer_size) - */ - __pyx_t_3 = ((__pyx_v_new_size > __pyx_v_self->max_buffer_size) != 0); - if (unlikely(__pyx_t_3)) { - - /* "msgpack/_unpacker.pyx":446 - * new_size = (tail-head) + _buf_len - * if new_size > self.max_buffer_size: - * raise BufferFull # <<<<<<<<<<<<<< - * new_size = min(new_size*2, self.max_buffer_size) - * new_buf = PyMem_Malloc(new_size) - */ - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_BufferFull); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 446, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 446, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":445 - * # expand buffer. - * new_size = (tail-head) + _buf_len - * if new_size > self.max_buffer_size: # <<<<<<<<<<<<<< - * raise BufferFull - * new_size = min(new_size*2, self.max_buffer_size) - */ - } - - /* "msgpack/_unpacker.pyx":447 - * if new_size > self.max_buffer_size: - * raise BufferFull - * new_size = min(new_size*2, self.max_buffer_size) # <<<<<<<<<<<<<< - * new_buf = PyMem_Malloc(new_size) - * if new_buf == NULL: - */ - __pyx_t_2 = __pyx_v_self->max_buffer_size; - __pyx_t_5 = (__pyx_v_new_size * 2); - if (((__pyx_t_2 < __pyx_t_5) != 0)) { - __pyx_t_6 = __pyx_t_2; - } else { - __pyx_t_6 = __pyx_t_5; - } - __pyx_v_new_size = __pyx_t_6; - - /* "msgpack/_unpacker.pyx":448 - * raise BufferFull - * new_size = min(new_size*2, self.max_buffer_size) - * new_buf = PyMem_Malloc(new_size) # <<<<<<<<<<<<<< - * if new_buf == NULL: - * # self.buf still holds old buffer and will be freed during - */ - __pyx_v_new_buf = ((char *)PyMem_Malloc(__pyx_v_new_size)); - - /* "msgpack/_unpacker.pyx":449 - * new_size = min(new_size*2, self.max_buffer_size) - * new_buf = PyMem_Malloc(new_size) - * if new_buf == NULL: # <<<<<<<<<<<<<< - * # self.buf still holds old buffer and will be freed during - * # obj destruction - */ - __pyx_t_3 = ((__pyx_v_new_buf == NULL) != 0); - if (unlikely(__pyx_t_3)) { - - /* "msgpack/_unpacker.pyx":452 - * # self.buf still holds old buffer and will be freed during - * # obj destruction - * raise MemoryError("Unable to enlarge internal buffer.") # <<<<<<<<<<<<<< - * memcpy(new_buf, buf + head, tail - head) - * PyMem_Free(buf) - */ - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_MemoryError, __pyx_tuple__28, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 452, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 452, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":449 - * new_size = min(new_size*2, self.max_buffer_size) - * new_buf = PyMem_Malloc(new_size) - * if new_buf == NULL: # <<<<<<<<<<<<<< - * # self.buf still holds old buffer and will be freed during - * # obj destruction - */ - } - - /* "msgpack/_unpacker.pyx":453 - * # obj destruction - * raise MemoryError("Unable to enlarge internal buffer.") - * memcpy(new_buf, buf + head, tail - head) # <<<<<<<<<<<<<< - * PyMem_Free(buf) - * - */ - (void)(memcpy(__pyx_v_new_buf, (__pyx_v_buf + __pyx_v_head), (__pyx_v_tail - __pyx_v_head))); - - /* "msgpack/_unpacker.pyx":454 - * raise MemoryError("Unable to enlarge internal buffer.") - * memcpy(new_buf, buf + head, tail - head) - * PyMem_Free(buf) # <<<<<<<<<<<<<< - * - * buf = new_buf - */ - PyMem_Free(__pyx_v_buf); - - /* "msgpack/_unpacker.pyx":456 - * PyMem_Free(buf) - * - * buf = new_buf # <<<<<<<<<<<<<< - * buf_size = new_size - * tail -= head - */ - __pyx_v_buf = __pyx_v_new_buf; - - /* "msgpack/_unpacker.pyx":457 - * - * buf = new_buf - * buf_size = new_size # <<<<<<<<<<<<<< - * tail -= head - * head = 0 - */ - __pyx_v_buf_size = __pyx_v_new_size; - - /* "msgpack/_unpacker.pyx":458 - * buf = new_buf - * buf_size = new_size - * tail -= head # <<<<<<<<<<<<<< - * head = 0 - * - */ - __pyx_v_tail = (__pyx_v_tail - __pyx_v_head); - - /* "msgpack/_unpacker.pyx":459 - * buf_size = new_size - * tail -= head - * head = 0 # <<<<<<<<<<<<<< - * - * memcpy(buf + tail, (_buf), _buf_len) - */ - __pyx_v_head = 0; - } - __pyx_L4:; - - /* "msgpack/_unpacker.pyx":436 - * Py_ssize_t new_size - * - * if tail + _buf_len > buf_size: # <<<<<<<<<<<<<< - * if ((tail - head) + _buf_len) <= buf_size: - * # move to front. - */ - } - - /* "msgpack/_unpacker.pyx":461 - * head = 0 - * - * memcpy(buf + tail, (_buf), _buf_len) # <<<<<<<<<<<<<< - * self.buf = buf - * self.buf_head = head - */ - (void)(memcpy((__pyx_v_buf + __pyx_v_tail), ((char *)__pyx_v__buf), __pyx_v__buf_len)); - - /* "msgpack/_unpacker.pyx":462 - * - * memcpy(buf + tail, (_buf), _buf_len) - * self.buf = buf # <<<<<<<<<<<<<< - * self.buf_head = head - * self.buf_size = buf_size - */ - __pyx_v_self->buf = __pyx_v_buf; - - /* "msgpack/_unpacker.pyx":463 - * memcpy(buf + tail, (_buf), _buf_len) - * self.buf = buf - * self.buf_head = head # <<<<<<<<<<<<<< - * self.buf_size = buf_size - * self.buf_tail = tail + _buf_len - */ - __pyx_v_self->buf_head = __pyx_v_head; - - /* "msgpack/_unpacker.pyx":464 - * self.buf = buf - * self.buf_head = head - * self.buf_size = buf_size # <<<<<<<<<<<<<< - * self.buf_tail = tail + _buf_len - * - */ - __pyx_v_self->buf_size = __pyx_v_buf_size; - - /* "msgpack/_unpacker.pyx":465 - * self.buf_head = head - * self.buf_size = buf_size - * self.buf_tail = tail + _buf_len # <<<<<<<<<<<<<< - * - * cdef read_from_file(self): - */ - __pyx_v_self->buf_tail = (__pyx_v_tail + __pyx_v__buf_len); - - /* "msgpack/_unpacker.pyx":427 - * PyBuffer_Release(&pybuff) - * - * cdef append_buffer(self, void* _buf, Py_ssize_t _buf_len): # <<<<<<<<<<<<<< - * cdef: - * char* buf = self.buf - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.append_buffer", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":467 - * self.buf_tail = tail + _buf_len - * - * cdef read_from_file(self): # <<<<<<<<<<<<<< - * next_bytes = self.file_like_read( - * min(self.read_size, - */ - -static PyObject *__pyx_f_7msgpack_9_cmsgpack_8Unpacker_read_from_file(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self) { - PyObject *__pyx_v_next_bytes = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - Py_ssize_t __pyx_t_3; - Py_ssize_t __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - int __pyx_t_8; - char *__pyx_t_9; - __Pyx_RefNannySetupContext("read_from_file", 0); - - /* "msgpack/_unpacker.pyx":470 - * next_bytes = self.file_like_read( - * min(self.read_size, - * self.max_buffer_size - (self.buf_tail - self.buf_head) # <<<<<<<<<<<<<< - * )) - * if next_bytes: - */ - __pyx_t_2 = (__pyx_v_self->max_buffer_size - (__pyx_v_self->buf_tail - __pyx_v_self->buf_head)); - - /* "msgpack/_unpacker.pyx":469 - * cdef read_from_file(self): - * next_bytes = self.file_like_read( - * min(self.read_size, # <<<<<<<<<<<<<< - * self.max_buffer_size - (self.buf_tail - self.buf_head) - * )) - */ - __pyx_t_3 = __pyx_v_self->read_size; - - /* "msgpack/_unpacker.pyx":470 - * next_bytes = self.file_like_read( - * min(self.read_size, - * self.max_buffer_size - (self.buf_tail - self.buf_head) # <<<<<<<<<<<<<< - * )) - * if next_bytes: - */ - if (((__pyx_t_2 < __pyx_t_3) != 0)) { - __pyx_t_4 = __pyx_t_2; - } else { - __pyx_t_4 = __pyx_t_3; - } - __pyx_t_5 = PyInt_FromSsize_t(__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 470, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_INCREF(__pyx_v_self->file_like_read); - __pyx_t_6 = __pyx_v_self->file_like_read; __pyx_t_7 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_7)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_7); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - } - } - __pyx_t_1 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_7, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_5); - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 468, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __pyx_v_next_bytes = __pyx_t_1; - __pyx_t_1 = 0; - - /* "msgpack/_unpacker.pyx":472 - * self.max_buffer_size - (self.buf_tail - self.buf_head) - * )) - * if next_bytes: # <<<<<<<<<<<<<< - * self.append_buffer(PyBytes_AsString(next_bytes), PyBytes_Size(next_bytes)) - * else: - */ - __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_v_next_bytes); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(1, 472, __pyx_L1_error) - if (__pyx_t_8) { - - /* "msgpack/_unpacker.pyx":473 - * )) - * if next_bytes: - * self.append_buffer(PyBytes_AsString(next_bytes), PyBytes_Size(next_bytes)) # <<<<<<<<<<<<<< - * else: - * self.file_like = None - */ - __pyx_t_9 = PyBytes_AsString(__pyx_v_next_bytes); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(1, 473, __pyx_L1_error) - __pyx_t_4 = PyBytes_Size(__pyx_v_next_bytes); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1L))) __PYX_ERR(1, 473, __pyx_L1_error) - __pyx_t_1 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self->__pyx_vtab)->append_buffer(__pyx_v_self, __pyx_t_9, __pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 473, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "msgpack/_unpacker.pyx":472 - * self.max_buffer_size - (self.buf_tail - self.buf_head) - * )) - * if next_bytes: # <<<<<<<<<<<<<< - * self.append_buffer(PyBytes_AsString(next_bytes), PyBytes_Size(next_bytes)) - * else: - */ - goto __pyx_L3; - } - - /* "msgpack/_unpacker.pyx":475 - * self.append_buffer(PyBytes_AsString(next_bytes), PyBytes_Size(next_bytes)) - * else: - * self.file_like = None # <<<<<<<<<<<<<< - * - * cdef object _unpack(self, execute_fn execute, bint iter=0): - */ - /*else*/ { - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(__pyx_v_self->file_like); - __Pyx_DECREF(__pyx_v_self->file_like); - __pyx_v_self->file_like = Py_None; - } - __pyx_L3:; - - /* "msgpack/_unpacker.pyx":467 - * self.buf_tail = tail + _buf_len - * - * cdef read_from_file(self): # <<<<<<<<<<<<<< - * next_bytes = self.file_like_read( - * min(self.read_size, - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.read_from_file", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_next_bytes); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":477 - * self.file_like = None - * - * cdef object _unpack(self, execute_fn execute, bint iter=0): # <<<<<<<<<<<<<< - * cdef int ret - * cdef object obj - */ - -static PyObject *__pyx_f_7msgpack_9_cmsgpack_8Unpacker__unpack(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self, execute_fn __pyx_v_execute, struct __pyx_opt_args_7msgpack_9_cmsgpack_8Unpacker__unpack *__pyx_optional_args) { - int __pyx_v_iter = ((int)0); - int __pyx_v_ret; - PyObject *__pyx_v_obj = 0; - Py_ssize_t __pyx_v_prev_head; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - Py_ssize_t __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - int __pyx_t_8; - __Pyx_RefNannySetupContext("_unpack", 0); - if (__pyx_optional_args) { - if (__pyx_optional_args->__pyx_n > 0) { - __pyx_v_iter = __pyx_optional_args->iter; - } - } - - /* "msgpack/_unpacker.pyx":482 - * cdef Py_ssize_t prev_head - * - * if self.buf_head >= self.buf_tail and self.file_like is not None: # <<<<<<<<<<<<<< - * self.read_from_file() - * - */ - __pyx_t_2 = ((__pyx_v_self->buf_head >= __pyx_v_self->buf_tail) != 0); - if (__pyx_t_2) { - } else { - __pyx_t_1 = __pyx_t_2; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_2 = (__pyx_v_self->file_like != Py_None); - __pyx_t_3 = (__pyx_t_2 != 0); - __pyx_t_1 = __pyx_t_3; - __pyx_L4_bool_binop_done:; - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":483 - * - * if self.buf_head >= self.buf_tail and self.file_like is not None: - * self.read_from_file() # <<<<<<<<<<<<<< - * - * while 1: - */ - __pyx_t_4 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self->__pyx_vtab)->read_from_file(__pyx_v_self); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 483, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - - /* "msgpack/_unpacker.pyx":482 - * cdef Py_ssize_t prev_head - * - * if self.buf_head >= self.buf_tail and self.file_like is not None: # <<<<<<<<<<<<<< - * self.read_from_file() - * - */ - } - - /* "msgpack/_unpacker.pyx":485 - * self.read_from_file() - * - * while 1: # <<<<<<<<<<<<<< - * prev_head = self.buf_head - * if prev_head >= self.buf_tail: - */ - while (1) { - - /* "msgpack/_unpacker.pyx":486 - * - * while 1: - * prev_head = self.buf_head # <<<<<<<<<<<<<< - * if prev_head >= self.buf_tail: - * if iter: - */ - __pyx_t_5 = __pyx_v_self->buf_head; - __pyx_v_prev_head = __pyx_t_5; - - /* "msgpack/_unpacker.pyx":487 - * while 1: - * prev_head = self.buf_head - * if prev_head >= self.buf_tail: # <<<<<<<<<<<<<< - * if iter: - * raise StopIteration("No more data to unpack.") - */ - __pyx_t_1 = ((__pyx_v_prev_head >= __pyx_v_self->buf_tail) != 0); - if (__pyx_t_1) { - - /* "msgpack/_unpacker.pyx":488 - * prev_head = self.buf_head - * if prev_head >= self.buf_tail: - * if iter: # <<<<<<<<<<<<<< - * raise StopIteration("No more data to unpack.") - * else: - */ - __pyx_t_1 = (__pyx_v_iter != 0); - if (unlikely(__pyx_t_1)) { - - /* "msgpack/_unpacker.pyx":489 - * if prev_head >= self.buf_tail: - * if iter: - * raise StopIteration("No more data to unpack.") # <<<<<<<<<<<<<< - * else: - * raise OutOfData("No more data to unpack.") - */ - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_StopIteration, __pyx_tuple__29, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 489, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 489, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":488 - * prev_head = self.buf_head - * if prev_head >= self.buf_tail: - * if iter: # <<<<<<<<<<<<<< - * raise StopIteration("No more data to unpack.") - * else: - */ - } - - /* "msgpack/_unpacker.pyx":491 - * raise StopIteration("No more data to unpack.") - * else: - * raise OutOfData("No more data to unpack.") # <<<<<<<<<<<<<< - * - * ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head) - */ - /*else*/ { - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_OutOfData); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 491, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_7 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_7)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_7); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - } - } - __pyx_t_4 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_7, __pyx_kp_u_No_more_data_to_unpack) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_kp_u_No_more_data_to_unpack); - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 491, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 491, __pyx_L1_error) - } - - /* "msgpack/_unpacker.pyx":487 - * while 1: - * prev_head = self.buf_head - * if prev_head >= self.buf_tail: # <<<<<<<<<<<<<< - * if iter: - * raise StopIteration("No more data to unpack.") - */ - } - - /* "msgpack/_unpacker.pyx":493 - * raise OutOfData("No more data to unpack.") - * - * ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head) # <<<<<<<<<<<<<< - * self.stream_offset += self.buf_head - prev_head - * - */ - __pyx_t_8 = __pyx_v_execute((&__pyx_v_self->ctx), __pyx_v_self->buf, __pyx_v_self->buf_tail, (&__pyx_v_self->buf_head)); if (unlikely(__pyx_t_8 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(1, 493, __pyx_L1_error) - __pyx_v_ret = __pyx_t_8; - - /* "msgpack/_unpacker.pyx":494 - * - * ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head) - * self.stream_offset += self.buf_head - prev_head # <<<<<<<<<<<<<< - * - * if ret == 1: - */ - __pyx_v_self->stream_offset = (__pyx_v_self->stream_offset + (__pyx_v_self->buf_head - __pyx_v_prev_head)); - - /* "msgpack/_unpacker.pyx":496 - * self.stream_offset += self.buf_head - prev_head - * - * if ret == 1: # <<<<<<<<<<<<<< - * obj = unpack_data(&self.ctx) - * unpack_init(&self.ctx) - */ - switch (__pyx_v_ret) { - case 1: - - /* "msgpack/_unpacker.pyx":497 - * - * if ret == 1: - * obj = unpack_data(&self.ctx) # <<<<<<<<<<<<<< - * unpack_init(&self.ctx) - * return obj - */ - __pyx_t_4 = unpack_data((&__pyx_v_self->ctx)); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 497, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_v_obj = __pyx_t_4; - __pyx_t_4 = 0; - - /* "msgpack/_unpacker.pyx":498 - * if ret == 1: - * obj = unpack_data(&self.ctx) - * unpack_init(&self.ctx) # <<<<<<<<<<<<<< - * return obj - * elif ret == 0: - */ - unpack_init((&__pyx_v_self->ctx)); - - /* "msgpack/_unpacker.pyx":499 - * obj = unpack_data(&self.ctx) - * unpack_init(&self.ctx) - * return obj # <<<<<<<<<<<<<< - * elif ret == 0: - * if self.file_like is not None: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_obj); - __pyx_r = __pyx_v_obj; - goto __pyx_L0; - - /* "msgpack/_unpacker.pyx":496 - * self.stream_offset += self.buf_head - prev_head - * - * if ret == 1: # <<<<<<<<<<<<<< - * obj = unpack_data(&self.ctx) - * unpack_init(&self.ctx) - */ - break; - case 0: - - /* "msgpack/_unpacker.pyx":501 - * return obj - * elif ret == 0: - * if self.file_like is not None: # <<<<<<<<<<<<<< - * self.read_from_file() - * continue - */ - __pyx_t_1 = (__pyx_v_self->file_like != Py_None); - __pyx_t_3 = (__pyx_t_1 != 0); - if (__pyx_t_3) { - - /* "msgpack/_unpacker.pyx":502 - * elif ret == 0: - * if self.file_like is not None: - * self.read_from_file() # <<<<<<<<<<<<<< - * continue - * if iter: - */ - __pyx_t_4 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self->__pyx_vtab)->read_from_file(__pyx_v_self); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 502, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - - /* "msgpack/_unpacker.pyx":503 - * if self.file_like is not None: - * self.read_from_file() - * continue # <<<<<<<<<<<<<< - * if iter: - * raise StopIteration("No more data to unpack.") - */ - goto __pyx_L6_continue; - - /* "msgpack/_unpacker.pyx":501 - * return obj - * elif ret == 0: - * if self.file_like is not None: # <<<<<<<<<<<<<< - * self.read_from_file() - * continue - */ - } - - /* "msgpack/_unpacker.pyx":504 - * self.read_from_file() - * continue - * if iter: # <<<<<<<<<<<<<< - * raise StopIteration("No more data to unpack.") - * else: - */ - __pyx_t_3 = (__pyx_v_iter != 0); - if (unlikely(__pyx_t_3)) { - - /* "msgpack/_unpacker.pyx":505 - * continue - * if iter: - * raise StopIteration("No more data to unpack.") # <<<<<<<<<<<<<< - * else: - * raise OutOfData("No more data to unpack.") - */ - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_StopIteration, __pyx_tuple__29, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 505, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 505, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":504 - * self.read_from_file() - * continue - * if iter: # <<<<<<<<<<<<<< - * raise StopIteration("No more data to unpack.") - * else: - */ - } - - /* "msgpack/_unpacker.pyx":507 - * raise StopIteration("No more data to unpack.") - * else: - * raise OutOfData("No more data to unpack.") # <<<<<<<<<<<<<< - * elif ret == -2: - * raise FormatError - */ - /*else*/ { - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_OutOfData); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 507, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_7 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_7)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_7); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - } - } - __pyx_t_4 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_7, __pyx_kp_u_No_more_data_to_unpack) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_kp_u_No_more_data_to_unpack); - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 507, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 507, __pyx_L1_error) - } - - /* "msgpack/_unpacker.pyx":500 - * unpack_init(&self.ctx) - * return obj - * elif ret == 0: # <<<<<<<<<<<<<< - * if self.file_like is not None: - * self.read_from_file() - */ - break; - case -2L: - - /* "msgpack/_unpacker.pyx":509 - * raise OutOfData("No more data to unpack.") - * elif ret == -2: - * raise FormatError # <<<<<<<<<<<<<< - * elif ret == -3: - * raise StackError - */ - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_FormatError); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 509, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 509, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":508 - * else: - * raise OutOfData("No more data to unpack.") - * elif ret == -2: # <<<<<<<<<<<<<< - * raise FormatError - * elif ret == -3: - */ - break; - case -3L: - - /* "msgpack/_unpacker.pyx":511 - * raise FormatError - * elif ret == -3: - * raise StackError # <<<<<<<<<<<<<< - * else: - * raise ValueError("Unpack failed: error = %d" % (ret,)) - */ - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_StackError); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 511, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 511, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":510 - * elif ret == -2: - * raise FormatError - * elif ret == -3: # <<<<<<<<<<<<<< - * raise StackError - * else: - */ - break; - default: - - /* "msgpack/_unpacker.pyx":513 - * raise StackError - * else: - * raise ValueError("Unpack failed: error = %d" % (ret,)) # <<<<<<<<<<<<<< - * - * def read_bytes(self, Py_ssize_t nbytes): - */ - __pyx_t_4 = __Pyx_PyUnicode_From_int(__pyx_v_ret, 0, ' ', 'd'); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 513, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_6 = __Pyx_PyUnicode_Concat(__pyx_kp_u_Unpack_failed_error, __pyx_t_4); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 513, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 513, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(1, 513, __pyx_L1_error) - break; - } - __pyx_L6_continue:; - } - - /* "msgpack/_unpacker.pyx":477 - * self.file_like = None - * - * cdef object _unpack(self, execute_fn execute, bint iter=0): # <<<<<<<<<<<<<< - * cdef int ret - * cdef object obj - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker._unpack", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_obj); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":515 - * raise ValueError("Unpack failed: error = %d" % (ret,)) - * - * def read_bytes(self, Py_ssize_t nbytes): # <<<<<<<<<<<<<< - * """Read a specified number of raw bytes from the stream""" - * cdef Py_ssize_t nread - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_9read_bytes(PyObject *__pyx_v_self, PyObject *__pyx_arg_nbytes); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_8read_bytes[] = "Unpacker.read_bytes(self, Py_ssize_t nbytes)\nRead a specified number of raw bytes from the stream"; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_9read_bytes(PyObject *__pyx_v_self, PyObject *__pyx_arg_nbytes) { - Py_ssize_t __pyx_v_nbytes; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("read_bytes (wrapper)", 0); - assert(__pyx_arg_nbytes); { - __pyx_v_nbytes = __Pyx_PyIndex_AsSsize_t(__pyx_arg_nbytes); if (unlikely((__pyx_v_nbytes == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 515, __pyx_L3_error) - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.read_bytes", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_8read_bytes(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self), ((Py_ssize_t)__pyx_v_nbytes)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_8read_bytes(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self, Py_ssize_t __pyx_v_nbytes) { - Py_ssize_t __pyx_v_nread; - PyObject *__pyx_v_ret = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - Py_ssize_t __pyx_t_1; - Py_ssize_t __pyx_t_2; - Py_ssize_t __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - int __pyx_t_6; - int __pyx_t_7; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - PyObject *__pyx_t_10 = NULL; - __Pyx_RefNannySetupContext("read_bytes", 0); - - /* "msgpack/_unpacker.pyx":518 - * """Read a specified number of raw bytes from the stream""" - * cdef Py_ssize_t nread - * nread = min(self.buf_tail - self.buf_head, nbytes) # <<<<<<<<<<<<<< - * ret = PyBytes_FromStringAndSize(self.buf + self.buf_head, nread) - * self.buf_head += nread - */ - __pyx_t_1 = __pyx_v_nbytes; - __pyx_t_2 = (__pyx_v_self->buf_tail - __pyx_v_self->buf_head); - if (((__pyx_t_1 < __pyx_t_2) != 0)) { - __pyx_t_3 = __pyx_t_1; - } else { - __pyx_t_3 = __pyx_t_2; - } - __pyx_v_nread = __pyx_t_3; - - /* "msgpack/_unpacker.pyx":519 - * cdef Py_ssize_t nread - * nread = min(self.buf_tail - self.buf_head, nbytes) - * ret = PyBytes_FromStringAndSize(self.buf + self.buf_head, nread) # <<<<<<<<<<<<<< - * self.buf_head += nread - * if len(ret) < nbytes and self.file_like is not None: - */ - __pyx_t_4 = PyBytes_FromStringAndSize((__pyx_v_self->buf + __pyx_v_self->buf_head), __pyx_v_nread); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 519, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_v_ret = __pyx_t_4; - __pyx_t_4 = 0; - - /* "msgpack/_unpacker.pyx":520 - * nread = min(self.buf_tail - self.buf_head, nbytes) - * ret = PyBytes_FromStringAndSize(self.buf + self.buf_head, nread) - * self.buf_head += nread # <<<<<<<<<<<<<< - * if len(ret) < nbytes and self.file_like is not None: - * ret += self.file_like.read(nbytes - len(ret)) - */ - __pyx_v_self->buf_head = (__pyx_v_self->buf_head + __pyx_v_nread); - - /* "msgpack/_unpacker.pyx":521 - * ret = PyBytes_FromStringAndSize(self.buf + self.buf_head, nread) - * self.buf_head += nread - * if len(ret) < nbytes and self.file_like is not None: # <<<<<<<<<<<<<< - * ret += self.file_like.read(nbytes - len(ret)) - * return ret - */ - __pyx_t_3 = PyObject_Length(__pyx_v_ret); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 521, __pyx_L1_error) - __pyx_t_6 = ((__pyx_t_3 < __pyx_v_nbytes) != 0); - if (__pyx_t_6) { - } else { - __pyx_t_5 = __pyx_t_6; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_6 = (__pyx_v_self->file_like != Py_None); - __pyx_t_7 = (__pyx_t_6 != 0); - __pyx_t_5 = __pyx_t_7; - __pyx_L4_bool_binop_done:; - if (__pyx_t_5) { - - /* "msgpack/_unpacker.pyx":522 - * self.buf_head += nread - * if len(ret) < nbytes and self.file_like is not None: - * ret += self.file_like.read(nbytes - len(ret)) # <<<<<<<<<<<<<< - * return ret - * - */ - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->file_like, __pyx_n_s_read); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 522, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_3 = PyObject_Length(__pyx_v_ret); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 522, __pyx_L1_error) - __pyx_t_9 = PyInt_FromSsize_t((__pyx_v_nbytes - __pyx_t_3)); if (unlikely(!__pyx_t_9)) __PYX_ERR(1, 522, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_9); - __pyx_t_10 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { - __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_8); - if (likely(__pyx_t_10)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); - __Pyx_INCREF(__pyx_t_10); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_8, function); - } - } - __pyx_t_4 = (__pyx_t_10) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_10, __pyx_t_9) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_9); - __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 522, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_8 = PyNumber_InPlaceAdd(__pyx_v_ret, __pyx_t_4); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 522, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF_SET(__pyx_v_ret, __pyx_t_8); - __pyx_t_8 = 0; - - /* "msgpack/_unpacker.pyx":521 - * ret = PyBytes_FromStringAndSize(self.buf + self.buf_head, nread) - * self.buf_head += nread - * if len(ret) < nbytes and self.file_like is not None: # <<<<<<<<<<<<<< - * ret += self.file_like.read(nbytes - len(ret)) - * return ret - */ - } - - /* "msgpack/_unpacker.pyx":523 - * if len(ret) < nbytes and self.file_like is not None: - * ret += self.file_like.read(nbytes - len(ret)) - * return ret # <<<<<<<<<<<<<< - * - * def unpack(self): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_ret); - __pyx_r = __pyx_v_ret; - goto __pyx_L0; - - /* "msgpack/_unpacker.pyx":515 - * raise ValueError("Unpack failed: error = %d" % (ret,)) - * - * def read_bytes(self, Py_ssize_t nbytes): # <<<<<<<<<<<<<< - * """Read a specified number of raw bytes from the stream""" - * cdef Py_ssize_t nread - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_XDECREF(__pyx_t_10); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.read_bytes", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_ret); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":525 - * return ret - * - * def unpack(self): # <<<<<<<<<<<<<< - * """Unpack one object - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_11unpack(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_10unpack[] = "Unpacker.unpack(self)\nUnpack one object\n\n Raises `OutOfData` when there are no more bytes to unpack.\n "; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_11unpack(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("unpack (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_10unpack(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_10unpack(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("unpack", 0); - - /* "msgpack/_unpacker.pyx":530 - * Raises `OutOfData` when there are no more bytes to unpack. - * """ - * return self._unpack(unpack_construct) # <<<<<<<<<<<<<< - * - * def skip(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self->__pyx_vtab)->_unpack(__pyx_v_self, unpack_construct, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 530, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "msgpack/_unpacker.pyx":525 - * return ret - * - * def unpack(self): # <<<<<<<<<<<<<< - * """Unpack one object - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.unpack", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":532 - * return self._unpack(unpack_construct) - * - * def skip(self): # <<<<<<<<<<<<<< - * """Read and ignore one object, returning None - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_13skip(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_12skip[] = "Unpacker.skip(self)\nRead and ignore one object, returning None\n\n Raises `OutOfData` when there are no more bytes to unpack.\n "; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_13skip(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("skip (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_12skip(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_12skip(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("skip", 0); - - /* "msgpack/_unpacker.pyx":537 - * Raises `OutOfData` when there are no more bytes to unpack. - * """ - * return self._unpack(unpack_skip) # <<<<<<<<<<<<<< - * - * def read_array_header(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self->__pyx_vtab)->_unpack(__pyx_v_self, unpack_skip, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 537, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "msgpack/_unpacker.pyx":532 - * return self._unpack(unpack_construct) - * - * def skip(self): # <<<<<<<<<<<<<< - * """Read and ignore one object, returning None - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.skip", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":539 - * return self._unpack(unpack_skip) - * - * def read_array_header(self): # <<<<<<<<<<<<<< - * """assuming the next object is an array, return its size n, such that - * the next n unpack() calls will iterate over its contents. - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_15read_array_header(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_14read_array_header[] = "Unpacker.read_array_header(self)\nassuming the next object is an array, return its size n, such that\n the next n unpack() calls will iterate over its contents.\n\n Raises `OutOfData` when there are no more bytes to unpack.\n "; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_15read_array_header(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("read_array_header (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_14read_array_header(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_14read_array_header(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("read_array_header", 0); - - /* "msgpack/_unpacker.pyx":545 - * Raises `OutOfData` when there are no more bytes to unpack. - * """ - * return self._unpack(read_array_header) # <<<<<<<<<<<<<< - * - * def read_map_header(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self->__pyx_vtab)->_unpack(__pyx_v_self, read_array_header, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 545, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "msgpack/_unpacker.pyx":539 - * return self._unpack(unpack_skip) - * - * def read_array_header(self): # <<<<<<<<<<<<<< - * """assuming the next object is an array, return its size n, such that - * the next n unpack() calls will iterate over its contents. - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.read_array_header", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":547 - * return self._unpack(read_array_header) - * - * def read_map_header(self): # <<<<<<<<<<<<<< - * """assuming the next object is a map, return its size n, such that the - * next n * 2 unpack() calls will iterate over its key-value pairs. - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_17read_map_header(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_16read_map_header[] = "Unpacker.read_map_header(self)\nassuming the next object is a map, return its size n, such that the\n next n * 2 unpack() calls will iterate over its key-value pairs.\n\n Raises `OutOfData` when there are no more bytes to unpack.\n "; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_17read_map_header(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("read_map_header (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_16read_map_header(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_16read_map_header(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("read_map_header", 0); - - /* "msgpack/_unpacker.pyx":553 - * Raises `OutOfData` when there are no more bytes to unpack. - * """ - * return self._unpack(read_map_header) # <<<<<<<<<<<<<< - * - * def tell(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self->__pyx_vtab)->_unpack(__pyx_v_self, read_map_header, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 553, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "msgpack/_unpacker.pyx":547 - * return self._unpack(read_array_header) - * - * def read_map_header(self): # <<<<<<<<<<<<<< - * """assuming the next object is a map, return its size n, such that the - * next n * 2 unpack() calls will iterate over its key-value pairs. - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.read_map_header", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":555 - * return self._unpack(read_map_header) - * - * def tell(self): # <<<<<<<<<<<<<< - * return self.stream_offset - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_19tell(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_18tell[] = "Unpacker.tell(self)"; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_19tell(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("tell (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_18tell(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_18tell(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("tell", 0); - - /* "msgpack/_unpacker.pyx":556 - * - * def tell(self): - * return self.stream_offset # <<<<<<<<<<<<<< - * - * def __iter__(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_v_self->stream_offset); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 556, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "msgpack/_unpacker.pyx":555 - * return self._unpack(read_map_header) - * - * def tell(self): # <<<<<<<<<<<<<< - * return self.stream_offset - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.tell", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":558 - * return self.stream_offset - * - * def __iter__(self): # <<<<<<<<<<<<<< - * return self - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_21__iter__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_21__iter__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_20__iter__(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_20__iter__(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__iter__", 0); - - /* "msgpack/_unpacker.pyx":559 - * - * def __iter__(self): - * return self # <<<<<<<<<<<<<< - * - * def __next__(self): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(((PyObject *)__pyx_v_self)); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "msgpack/_unpacker.pyx":558 - * return self.stream_offset - * - * def __iter__(self): # <<<<<<<<<<<<<< - * return self - * - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "msgpack/_unpacker.pyx":561 - * return self - * - * def __next__(self): # <<<<<<<<<<<<<< - * return self._unpack(unpack_construct, 1) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_23__next__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_23__next__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__next__ (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_22__next__(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_22__next__(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - struct __pyx_opt_args_7msgpack_9_cmsgpack_8Unpacker__unpack __pyx_t_2; - __Pyx_RefNannySetupContext("__next__", 0); - - /* "msgpack/_unpacker.pyx":562 - * - * def __next__(self): - * return self._unpack(unpack_construct, 1) # <<<<<<<<<<<<<< - * - * # for debug. - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2.__pyx_n = 1; - __pyx_t_2.iter = 1; - __pyx_t_1 = ((struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self->__pyx_vtab)->_unpack(__pyx_v_self, unpack_construct, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 562, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "msgpack/_unpacker.pyx":561 - * return self - * - * def __next__(self): # <<<<<<<<<<<<<< - * return self._unpack(unpack_construct, 1) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.__next__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_25__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_24__reduce_cython__[] = "Unpacker.__reduce_cython__(self)"; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_25__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_24__reduce_cython__(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_24__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("__reduce_cython__", 0); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__30, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 2, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(2, 2, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_27__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ -static char __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_26__setstate_cython__[] = "Unpacker.__setstate_cython__(self, __pyx_state)"; -static PyObject *__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_27__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7msgpack_9_cmsgpack_8Unpacker_26__setstate_cython__(((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7msgpack_9_cmsgpack_8Unpacker_26__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("__setstate_cython__", 0); - - /* "(tree fragment)":4 - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__31, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(2, 4, __pyx_L1_error) - - /* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("msgpack._cmsgpack.Unpacker.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} -static struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Packer __pyx_vtable_7msgpack_9_cmsgpack_Packer; - -static PyObject *__pyx_tp_new_7msgpack_9_cmsgpack_Packer(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_7msgpack_9_cmsgpack_Packer *p; - PyObject *o; - if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - p = ((struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)o); - p->__pyx_vtab = __pyx_vtabptr_7msgpack_9_cmsgpack_Packer; - p->_default = Py_None; Py_INCREF(Py_None); - p->_bencoding = Py_None; Py_INCREF(Py_None); - p->_berrors = Py_None; Py_INCREF(Py_None); - p->use_float = ((PyBoolObject *)Py_None); Py_INCREF(Py_None); - if (unlikely(__pyx_pw_7msgpack_9_cmsgpack_6Packer_1__cinit__(o, __pyx_empty_tuple, NULL) < 0)) goto bad; - return o; - bad: - Py_DECREF(o); o = 0; - return NULL; -} - -static void __pyx_tp_dealloc_7msgpack_9_cmsgpack_Packer(PyObject *o) { - struct __pyx_obj_7msgpack_9_cmsgpack_Packer *p = (struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - #endif - PyObject_GC_UnTrack(o); - { - PyObject *etype, *eval, *etb; - PyErr_Fetch(&etype, &eval, &etb); - ++Py_REFCNT(o); - __pyx_pw_7msgpack_9_cmsgpack_6Packer_5__dealloc__(o); - --Py_REFCNT(o); - PyErr_Restore(etype, eval, etb); - } - Py_CLEAR(p->_default); - Py_CLEAR(p->_bencoding); - Py_CLEAR(p->_berrors); - Py_CLEAR(p->use_float); - (*Py_TYPE(o)->tp_free)(o); -} - -static int __pyx_tp_traverse_7msgpack_9_cmsgpack_Packer(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_7msgpack_9_cmsgpack_Packer *p = (struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)o; - if (p->_default) { - e = (*v)(p->_default, a); if (e) return e; - } - if (p->_bencoding) { - e = (*v)(p->_bencoding, a); if (e) return e; - } - if (p->_berrors) { - e = (*v)(p->_berrors, a); if (e) return e; - } - if (p->use_float) { - e = (*v)(((PyObject *)p->use_float), a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_7msgpack_9_cmsgpack_Packer(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_7msgpack_9_cmsgpack_Packer *p = (struct __pyx_obj_7msgpack_9_cmsgpack_Packer *)o; - tmp = ((PyObject*)p->_default); - p->_default = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_bencoding); - p->_bencoding = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_berrors); - p->_berrors = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->use_float); - p->use_float = ((PyBoolObject *)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyMethodDef __pyx_methods_7msgpack_9_cmsgpack_Packer[] = { - {"pack", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_6Packer_7pack, METH_O, __pyx_doc_7msgpack_9_cmsgpack_6Packer_6pack}, - {"pack_ext_type", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7msgpack_9_cmsgpack_6Packer_9pack_ext_type, METH_VARARGS|METH_KEYWORDS, __pyx_doc_7msgpack_9_cmsgpack_6Packer_8pack_ext_type}, - {"pack_array_header", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_6Packer_11pack_array_header, METH_O, __pyx_doc_7msgpack_9_cmsgpack_6Packer_10pack_array_header}, - {"pack_map_header", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_6Packer_13pack_map_header, METH_O, __pyx_doc_7msgpack_9_cmsgpack_6Packer_12pack_map_header}, - {"pack_map_pairs", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_6Packer_15pack_map_pairs, METH_O, __pyx_doc_7msgpack_9_cmsgpack_6Packer_14pack_map_pairs}, - {"reset", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_6Packer_17reset, METH_NOARGS, __pyx_doc_7msgpack_9_cmsgpack_6Packer_16reset}, - {"bytes", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_6Packer_19bytes, METH_NOARGS, __pyx_doc_7msgpack_9_cmsgpack_6Packer_18bytes}, - {"getbuffer", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_6Packer_21getbuffer, METH_NOARGS, __pyx_doc_7msgpack_9_cmsgpack_6Packer_20getbuffer}, - {"__reduce_cython__", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_6Packer_23__reduce_cython__, METH_NOARGS, __pyx_doc_7msgpack_9_cmsgpack_6Packer_22__reduce_cython__}, - {"__setstate_cython__", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_6Packer_25__setstate_cython__, METH_O, __pyx_doc_7msgpack_9_cmsgpack_6Packer_24__setstate_cython__}, - {0, 0, 0, 0} -}; - -static PyTypeObject __pyx_type_7msgpack_9_cmsgpack_Packer = { - PyVarObject_HEAD_INIT(0, 0) - "msgpack._cmsgpack.Packer", /*tp_name*/ - sizeof(struct __pyx_obj_7msgpack_9_cmsgpack_Packer), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7msgpack_9_cmsgpack_Packer, /*tp_dealloc*/ - 0, /*tp_print*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - "Packer(default=None, encoding=None, unicode_errors=None, bool use_single_float=False, bool autoreset=True, bool use_bin_type=False, bool strict_types=False)\n\n MessagePack Packer\n\n usage::\n\n packer = Packer()\n astream.write(packer.pack(a))\n astream.write(packer.pack(b))\n\n Packer's constructor has some keyword arguments:\n\n :param callable default:\n Convert user type to builtin type that Packer supports.\n See also simplejson's document.\n\n :param bool use_single_float:\n Use single precision float type for float. (default: False)\n\n :param bool autoreset:\n Reset buffer after each pack and return its content as `bytes`. (default: True).\n If set this to false, use `bytes()` to get content and `.reset()` to clear buffer.\n\n :param bool use_bin_type:\n Use bin type introduced in msgpack spec 2.0 for bytes.\n It also enables str8 type for unicode.\n Current default value is false, but it will be changed to true\n in future version. You should specify it explicitly.\n\n :param bool strict_types:\n If set to true, types will be checked to be exact. Derived classes\n from serializeable types will not be serialized and will be\n treated as unsupported type and forwarded to default.\n Additionally tuples will not be serialized as lists.\n This is useful when trying to implement accurate serialization\n for python types.\n\n :param str unicode_errors:\n Error handler for encoding unicode. (default: 'strict')\n\n :param str encoding:\n (deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8')\n ", /*tp_doc*/ - __pyx_tp_traverse_7msgpack_9_cmsgpack_Packer, /*tp_traverse*/ - __pyx_tp_clear_7msgpack_9_cmsgpack_Packer, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_7msgpack_9_cmsgpack_Packer, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pw_7msgpack_9_cmsgpack_6Packer_3__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7msgpack_9_cmsgpack_Packer, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif -}; -static struct __pyx_vtabstruct_7msgpack_9_cmsgpack_Unpacker __pyx_vtable_7msgpack_9_cmsgpack_Unpacker; - -static PyObject *__pyx_tp_new_7msgpack_9_cmsgpack_Unpacker(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *p; - PyObject *o; - if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - p = ((struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)o); - p->__pyx_vtab = __pyx_vtabptr_7msgpack_9_cmsgpack_Unpacker; - p->file_like = Py_None; Py_INCREF(Py_None); - p->file_like_read = Py_None; Py_INCREF(Py_None); - p->object_hook = Py_None; Py_INCREF(Py_None); - p->object_pairs_hook = Py_None; Py_INCREF(Py_None); - p->list_hook = Py_None; Py_INCREF(Py_None); - p->ext_hook = Py_None; Py_INCREF(Py_None); - p->encoding = Py_None; Py_INCREF(Py_None); - p->unicode_errors = Py_None; Py_INCREF(Py_None); - if (unlikely(__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_1__cinit__(o, __pyx_empty_tuple, NULL) < 0)) goto bad; - return o; - bad: - Py_DECREF(o); o = 0; - return NULL; -} - -static void __pyx_tp_dealloc_7msgpack_9_cmsgpack_Unpacker(PyObject *o) { - struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *p = (struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - #endif - PyObject_GC_UnTrack(o); - { - PyObject *etype, *eval, *etb; - PyErr_Fetch(&etype, &eval, &etb); - ++Py_REFCNT(o); - __pyx_pw_7msgpack_9_cmsgpack_8Unpacker_3__dealloc__(o); - --Py_REFCNT(o); - PyErr_Restore(etype, eval, etb); - } - Py_CLEAR(p->file_like); - Py_CLEAR(p->file_like_read); - Py_CLEAR(p->object_hook); - Py_CLEAR(p->object_pairs_hook); - Py_CLEAR(p->list_hook); - Py_CLEAR(p->ext_hook); - Py_CLEAR(p->encoding); - Py_CLEAR(p->unicode_errors); - (*Py_TYPE(o)->tp_free)(o); -} - -static int __pyx_tp_traverse_7msgpack_9_cmsgpack_Unpacker(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *p = (struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)o; - if (p->file_like) { - e = (*v)(p->file_like, a); if (e) return e; - } - if (p->file_like_read) { - e = (*v)(p->file_like_read, a); if (e) return e; - } - if (p->object_hook) { - e = (*v)(p->object_hook, a); if (e) return e; - } - if (p->object_pairs_hook) { - e = (*v)(p->object_pairs_hook, a); if (e) return e; - } - if (p->list_hook) { - e = (*v)(p->list_hook, a); if (e) return e; - } - if (p->ext_hook) { - e = (*v)(p->ext_hook, a); if (e) return e; - } - if (p->encoding) { - e = (*v)(p->encoding, a); if (e) return e; - } - if (p->unicode_errors) { - e = (*v)(p->unicode_errors, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_7msgpack_9_cmsgpack_Unpacker(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *p = (struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *)o; - tmp = ((PyObject*)p->file_like); - p->file_like = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->file_like_read); - p->file_like_read = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->object_hook); - p->object_hook = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->object_pairs_hook); - p->object_pairs_hook = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->list_hook); - p->list_hook = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->ext_hook); - p->ext_hook = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->encoding); - p->encoding = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->unicode_errors); - p->unicode_errors = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyObject *__pyx_specialmethod___pyx_pw_7msgpack_9_cmsgpack_8Unpacker_23__next__(PyObject *self, CYTHON_UNUSED PyObject *arg) {return __pyx_pw_7msgpack_9_cmsgpack_8Unpacker_23__next__(self);} - -static PyMethodDef __pyx_methods_7msgpack_9_cmsgpack_Unpacker[] = { - {"feed", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_7feed, METH_O, __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_6feed}, - {"read_bytes", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_9read_bytes, METH_O, __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_8read_bytes}, - {"unpack", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_11unpack, METH_NOARGS, __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_10unpack}, - {"skip", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_13skip, METH_NOARGS, __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_12skip}, - {"read_array_header", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_15read_array_header, METH_NOARGS, __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_14read_array_header}, - {"read_map_header", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_17read_map_header, METH_NOARGS, __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_16read_map_header}, - {"tell", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_19tell, METH_NOARGS, __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_18tell}, - {"__next__", (PyCFunction)__pyx_specialmethod___pyx_pw_7msgpack_9_cmsgpack_8Unpacker_23__next__, METH_NOARGS|METH_COEXIST, 0}, - {"__reduce_cython__", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_25__reduce_cython__, METH_NOARGS, __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_24__reduce_cython__}, - {"__setstate_cython__", (PyCFunction)__pyx_pw_7msgpack_9_cmsgpack_8Unpacker_27__setstate_cython__, METH_O, __pyx_doc_7msgpack_9_cmsgpack_8Unpacker_26__setstate_cython__}, - {0, 0, 0, 0} -}; - -static PyTypeObject __pyx_type_7msgpack_9_cmsgpack_Unpacker = { - PyVarObject_HEAD_INIT(0, 0) - "msgpack._cmsgpack.Unpacker", /*tp_name*/ - sizeof(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7msgpack_9_cmsgpack_Unpacker, /*tp_dealloc*/ - 0, /*tp_print*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - "Unpacker(file_like=None, Py_ssize_t read_size=0, bool use_list=True, bool raw=True, bool strict_map_key=False, object_hook=None, object_pairs_hook=None, list_hook=None, encoding=None, unicode_errors=None, Py_ssize_t max_buffer_size=0, ext_hook=ExtType, Py_ssize_t max_str_len=-1, Py_ssize_t max_bin_len=-1, Py_ssize_t max_array_len=-1, Py_ssize_t max_map_len=-1, Py_ssize_t max_ext_len=-1)\nStreaming unpacker.\n\n Arguments:\n\n :param file_like:\n File-like object having `.read(n)` method.\n If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable.\n\n :param int read_size:\n Used as `file_like.read(read_size)`. (default: `min(1024**2, max_buffer_size)`)\n\n :param bool use_list:\n If true, unpack msgpack array to Python list.\n Otherwise, unpack to Python tuple. (default: True)\n\n :param bool raw:\n If true, unpack msgpack raw to Python bytes (default).\n Otherwise, unpack to Python str (or unicode on Python 2) by decoding\n with UTF-8 encoding (recommended).\n Currently, the default is true, but it will be changed to false in\n near future. So you must specify it explicitly for keeping backward\n compatibility.\n\n *encoding* option which is deprecated overrides this option.\n\n :param bool strict_map_key:\n If true, only str or bytes are accepted for map (dict) keys.\n It's False by default for backward-compatibility.\n But it will be True from msgpack 1.0.\n\n :param callable object_hook:\n When specified, it should be callable.\n Unpacker calls it with a dict argument after unpacking msgpack map.\n (See also simplejson)\n\n :param callable object_pairs_hook:\n When specified, it should be callable.\n Unpacker calls it with a list of key-value pairs after unpacking msgpack map.\n (See also simplejson)\n\n :param int max_buffer_size:\n Limits size of data w""aiting unpacked. 0 means system's INT_MAX (default).\n Raises `BufferFull` exception when it is insufficient.\n You should set this parameter when unpacking data from untrusted source.\n\n :param int max_str_len:\n Deprecated, use *max_buffer_size* instead.\n Limits max length of str. (default: max_buffer_size or 1024*1024)\n\n :param int max_bin_len:\n Deprecated, use *max_buffer_size* instead.\n Limits max length of bin. (default: max_buffer_size or 1024*1024)\n\n :param int max_array_len:\n Limits max length of array. (default: max_buffer_size or 128*1024)\n\n :param int max_map_len:\n Limits max length of map. (default: max_buffer_size//2 or 32*1024)\n\n :param int max_ext_len:\n Deprecated, use *max_buffer_size* instead.\n Limits max size of ext type. (default: max_buffer_size or 1024*1024)\n\n :param str encoding:\n Deprecated, use ``raw=False`` instead.\n Encoding used for decoding msgpack raw.\n If it is None (default), msgpack raw is deserialized to Python bytes.\n\n :param str unicode_errors:\n Error handler used for decoding str type. (default: `'strict'`)\n\n\n Example of streaming deserialize from file-like object::\n\n unpacker = Unpacker(file_like, raw=False, max_buffer_size=10*1024*1024)\n for o in unpacker:\n process(o)\n\n Example of streaming deserialize from socket::\n\n unpacker = Unpacker(raw=False, max_buffer_size=10*1024*1024)\n while True:\n buf = sock.recv(1024**2)\n if not buf:\n break\n unpacker.feed(buf)\n for o in unpacker:\n process(o)\n\n Raises ``ExtraData`` when *packed* contains extra bytes.\n Raises ``OutOfData`` when *packed* is incomplete.\n Raises ``FormatError`` when *packed* is not valid msgpack.\n Raises ``StackError`` when *packed* contains too nested.\n Other exceptions ca""n be raised during unpacking.\n ", /*tp_doc*/ - __pyx_tp_traverse_7msgpack_9_cmsgpack_Unpacker, /*tp_traverse*/ - __pyx_tp_clear_7msgpack_9_cmsgpack_Unpacker, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - __pyx_pw_7msgpack_9_cmsgpack_8Unpacker_21__iter__, /*tp_iter*/ - __pyx_pw_7msgpack_9_cmsgpack_8Unpacker_23__next__, /*tp_iternext*/ - __pyx_methods_7msgpack_9_cmsgpack_Unpacker, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pw_7msgpack_9_cmsgpack_8Unpacker_5__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7msgpack_9_cmsgpack_Unpacker, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif -}; - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__cmsgpack(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__cmsgpack}, - {0, NULL} -}; -#endif - -static struct PyModuleDef __pyx_moduledef = { - PyModuleDef_HEAD_INIT, - "_cmsgpack", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ -}; -#endif -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif - -static __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_n_s_AssertionError, __pyx_k_AssertionError, sizeof(__pyx_k_AssertionError), 0, 0, 1, 1}, - {&__pyx_n_s_BufferError, __pyx_k_BufferError, sizeof(__pyx_k_BufferError), 0, 0, 1, 1}, - {&__pyx_n_s_BufferFull, __pyx_k_BufferFull, sizeof(__pyx_k_BufferFull), 0, 0, 1, 1}, - {&__pyx_kp_u_Cannot_decode_extended_type_with, __pyx_k_Cannot_decode_extended_type_with, sizeof(__pyx_k_Cannot_decode_extended_type_with), 0, 1, 0, 0}, - {&__pyx_n_s_DeprecationWarning, __pyx_k_DeprecationWarning, sizeof(__pyx_k_DeprecationWarning), 0, 0, 1, 1}, - {&__pyx_kp_u_EXT_data_is_too_large, __pyx_k_EXT_data_is_too_large, sizeof(__pyx_k_EXT_data_is_too_large), 0, 1, 0, 0}, - {&__pyx_n_s_ExtType, __pyx_k_ExtType, sizeof(__pyx_k_ExtType), 0, 0, 1, 1}, - {&__pyx_n_s_ExtraData, __pyx_k_ExtraData, sizeof(__pyx_k_ExtraData), 0, 0, 1, 1}, - {&__pyx_n_s_FormatError, __pyx_k_FormatError, sizeof(__pyx_k_FormatError), 0, 0, 1, 1}, - {&__pyx_kp_u_Integer_value_out_of_range, __pyx_k_Integer_value_out_of_range, sizeof(__pyx_k_Integer_value_out_of_range), 0, 1, 0, 0}, - {&__pyx_n_s_MemoryError, __pyx_k_MemoryError, sizeof(__pyx_k_MemoryError), 0, 0, 1, 1}, - {&__pyx_kp_u_No_more_data_to_unpack, __pyx_k_No_more_data_to_unpack, sizeof(__pyx_k_No_more_data_to_unpack), 0, 1, 0, 0}, - {&__pyx_n_s_NotImplementedError, __pyx_k_NotImplementedError, sizeof(__pyx_k_NotImplementedError), 0, 0, 1, 1}, - {&__pyx_n_s_OutOfData, __pyx_k_OutOfData, sizeof(__pyx_k_OutOfData), 0, 0, 1, 1}, - {&__pyx_n_s_OverflowError, __pyx_k_OverflowError, sizeof(__pyx_k_OverflowError), 0, 0, 1, 1}, - {&__pyx_n_s_Packer, __pyx_k_Packer, sizeof(__pyx_k_Packer), 0, 0, 1, 1}, - {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1}, - {&__pyx_n_s_RuntimeWarning, __pyx_k_RuntimeWarning, sizeof(__pyx_k_RuntimeWarning), 0, 0, 1, 1}, - {&__pyx_n_s_StackError, __pyx_k_StackError, sizeof(__pyx_k_StackError), 0, 0, 1, 1}, - {&__pyx_n_s_StopIteration, __pyx_k_StopIteration, sizeof(__pyx_k_StopIteration), 0, 0, 1, 1}, - {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, - {&__pyx_kp_u_Unable_to_allocate_internal_buff, __pyx_k_Unable_to_allocate_internal_buff, sizeof(__pyx_k_Unable_to_allocate_internal_buff), 0, 1, 0, 0}, - {&__pyx_kp_u_Unable_to_enlarge_internal_buffe, __pyx_k_Unable_to_enlarge_internal_buffe, sizeof(__pyx_k_Unable_to_enlarge_internal_buffe), 0, 1, 0, 0}, - {&__pyx_kp_u_Unpack_failed_error, __pyx_k_Unpack_failed_error, sizeof(__pyx_k_Unpack_failed_error), 0, 1, 0, 0}, - {&__pyx_kp_u_Unpack_failed_incomplete_input, __pyx_k_Unpack_failed_incomplete_input, sizeof(__pyx_k_Unpack_failed_incomplete_input), 0, 1, 0, 0}, - {&__pyx_n_s_Unpacker, __pyx_k_Unpacker, sizeof(__pyx_k_Unpacker), 0, 0, 1, 1}, - {&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1}, - {&__pyx_n_s_autoreset, __pyx_k_autoreset, sizeof(__pyx_k_autoreset), 0, 0, 1, 1}, - {&__pyx_n_s_buf, __pyx_k_buf, sizeof(__pyx_k_buf), 0, 0, 1, 1}, - {&__pyx_n_s_buf_len, __pyx_k_buf_len, sizeof(__pyx_k_buf_len), 0, 0, 1, 1}, - {&__pyx_kp_u_cannot_unpack_from_multi_byte_ob, __pyx_k_cannot_unpack_from_multi_byte_ob, sizeof(__pyx_k_cannot_unpack_from_multi_byte_ob), 0, 1, 0, 0}, - {&__pyx_n_s_cenc, __pyx_k_cenc, sizeof(__pyx_k_cenc), 0, 0, 1, 1}, - {&__pyx_n_s_cerr, __pyx_k_cerr, sizeof(__pyx_k_cerr), 0, 0, 1, 1}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_code, __pyx_k_code, sizeof(__pyx_k_code), 0, 0, 1, 1}, - {&__pyx_kp_u_could_not_get_buffer_for_memoryv, __pyx_k_could_not_get_buffer_for_memoryv, sizeof(__pyx_k_could_not_get_buffer_for_memoryv), 0, 1, 0, 0}, - {&__pyx_kp_u_could_not_get_memoryview, __pyx_k_could_not_get_memoryview, sizeof(__pyx_k_could_not_get_memoryview), 0, 1, 0, 0}, - {&__pyx_n_s_ctx, __pyx_k_ctx, sizeof(__pyx_k_ctx), 0, 0, 1, 1}, - {&__pyx_n_u_d, __pyx_k_d, sizeof(__pyx_k_d), 0, 1, 0, 1}, - {&__pyx_n_s_data, __pyx_k_data, sizeof(__pyx_k_data), 0, 0, 1, 1}, - {&__pyx_n_s_ddtrace_vendor_msgpack, __pyx_k_ddtrace_vendor_msgpack, sizeof(__pyx_k_ddtrace_vendor_msgpack), 0, 0, 1, 1}, - {&__pyx_n_s_ddtrace_vendor_msgpack_exception, __pyx_k_ddtrace_vendor_msgpack_exception, sizeof(__pyx_k_ddtrace_vendor_msgpack_exception), 0, 0, 1, 1}, - {&__pyx_n_s_default, __pyx_k_default, sizeof(__pyx_k_default), 0, 0, 1, 1}, - {&__pyx_kp_u_default_must_be_a_callable, __pyx_k_default_must_be_a_callable, sizeof(__pyx_k_default_must_be_a_callable), 0, 1, 0, 0}, - {&__pyx_n_s_default_read_extended_type, __pyx_k_default_read_extended_type, sizeof(__pyx_k_default_read_extended_type), 0, 0, 1, 1}, - {&__pyx_kp_u_dict_is_too_large, __pyx_k_dict_is_too_large, sizeof(__pyx_k_dict_is_too_large), 0, 1, 0, 0}, - {&__pyx_n_s_encoding, __pyx_k_encoding, sizeof(__pyx_k_encoding), 0, 0, 1, 1}, - {&__pyx_n_s_ext_hook, __pyx_k_ext_hook, sizeof(__pyx_k_ext_hook), 0, 0, 1, 1}, - {&__pyx_kp_u_ext_hook_must_be_a_callable, __pyx_k_ext_hook_must_be_a_callable, sizeof(__pyx_k_ext_hook_must_be_a_callable), 0, 1, 0, 0}, - {&__pyx_n_s_file_like, __pyx_k_file_like, sizeof(__pyx_k_file_like), 0, 0, 1, 1}, - {&__pyx_kp_u_file_like_read_must_be_a_callab, __pyx_k_file_like_read_must_be_a_callab, sizeof(__pyx_k_file_like_read_must_be_a_callab), 0, 1, 0, 0}, - {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_kp_u_internal_error, __pyx_k_internal_error, sizeof(__pyx_k_internal_error), 0, 1, 0, 0}, - {&__pyx_n_s_items, __pyx_k_items, sizeof(__pyx_k_items), 0, 0, 1, 1}, - {&__pyx_n_s_kwargs, __pyx_k_kwargs, sizeof(__pyx_k_kwargs), 0, 0, 1, 1}, - {&__pyx_n_s_list_hook, __pyx_k_list_hook, sizeof(__pyx_k_list_hook), 0, 0, 1, 1}, - {&__pyx_kp_u_list_hook_must_be_a_callable, __pyx_k_list_hook_must_be_a_callable, sizeof(__pyx_k_list_hook_must_be_a_callable), 0, 1, 0, 0}, - {&__pyx_kp_u_list_is_too_large, __pyx_k_list_is_too_large, sizeof(__pyx_k_list_is_too_large), 0, 1, 0, 0}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_s_max_array_len, __pyx_k_max_array_len, sizeof(__pyx_k_max_array_len), 0, 0, 1, 1}, - {&__pyx_n_s_max_bin_len, __pyx_k_max_bin_len, sizeof(__pyx_k_max_bin_len), 0, 0, 1, 1}, - {&__pyx_n_s_max_buffer_size, __pyx_k_max_buffer_size, sizeof(__pyx_k_max_buffer_size), 0, 0, 1, 1}, - {&__pyx_n_s_max_ext_len, __pyx_k_max_ext_len, sizeof(__pyx_k_max_ext_len), 0, 0, 1, 1}, - {&__pyx_n_s_max_map_len, __pyx_k_max_map_len, sizeof(__pyx_k_max_map_len), 0, 0, 1, 1}, - {&__pyx_n_s_max_str_len, __pyx_k_max_str_len, sizeof(__pyx_k_max_str_len), 0, 0, 1, 1}, - {&__pyx_kp_u_memoryview_is_too_large, __pyx_k_memoryview_is_too_large, sizeof(__pyx_k_memoryview_is_too_large), 0, 1, 0, 0}, - {&__pyx_n_s_msgpack__cmsgpack, __pyx_k_msgpack__cmsgpack, sizeof(__pyx_k_msgpack__cmsgpack), 0, 0, 1, 1}, - {&__pyx_kp_s_msgpack__unpacker_pyx, __pyx_k_msgpack__unpacker_pyx, sizeof(__pyx_k_msgpack__unpacker_pyx), 0, 0, 1, 0}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_new_protocol, __pyx_k_new_protocol, sizeof(__pyx_k_new_protocol), 0, 0, 1, 1}, - {&__pyx_kp_s_no_default___reduce___due_to_non, __pyx_k_no_default___reduce___due_to_non, sizeof(__pyx_k_no_default___reduce___due_to_non), 0, 0, 1, 0}, - {&__pyx_n_s_obj, __pyx_k_obj, sizeof(__pyx_k_obj), 0, 0, 1, 1}, - {&__pyx_n_s_object_hook, __pyx_k_object_hook, sizeof(__pyx_k_object_hook), 0, 0, 1, 1}, - {&__pyx_kp_u_object_hook_must_be_a_callable, __pyx_k_object_hook_must_be_a_callable, sizeof(__pyx_k_object_hook_must_be_a_callable), 0, 1, 0, 0}, - {&__pyx_n_s_object_pairs_hook, __pyx_k_object_pairs_hook, sizeof(__pyx_k_object_pairs_hook), 0, 0, 1, 1}, - {&__pyx_kp_u_object_pairs_hook_and_object_hoo, __pyx_k_object_pairs_hook_and_object_hoo, sizeof(__pyx_k_object_pairs_hook_and_object_hoo), 0, 1, 0, 0}, - {&__pyx_kp_u_object_pairs_hook_must_be_a_call, __pyx_k_object_pairs_hook_must_be_a_call, sizeof(__pyx_k_object_pairs_hook_must_be_a_call), 0, 1, 0, 0}, - {&__pyx_n_s_off, __pyx_k_off, sizeof(__pyx_k_off), 0, 0, 1, 1}, - {&__pyx_n_s_pack, __pyx_k_pack, sizeof(__pyx_k_pack), 0, 0, 1, 1}, - {&__pyx_n_s_packed, __pyx_k_packed, sizeof(__pyx_k_packed), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, - {&__pyx_n_s_raw, __pyx_k_raw, sizeof(__pyx_k_raw), 0, 0, 1, 1}, - {&__pyx_n_s_read, __pyx_k_read, sizeof(__pyx_k_read), 0, 0, 1, 1}, - {&__pyx_n_s_read_size, __pyx_k_read_size, sizeof(__pyx_k_read_size), 0, 0, 1, 1}, - {&__pyx_kp_u_read_size_should_be_less_or_equa, __pyx_k_read_size_should_be_less_or_equa, sizeof(__pyx_k_read_size_should_be_less_or_equa), 0, 1, 0, 0}, - {&__pyx_kp_u_recursion_limit_exceeded, __pyx_k_recursion_limit_exceeded, sizeof(__pyx_k_recursion_limit_exceeded), 0, 1, 0, 0}, - {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, - {&__pyx_n_s_ret, __pyx_k_ret, sizeof(__pyx_k_ret), 0, 0, 1, 1}, - {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, - {&__pyx_n_s_stream, __pyx_k_stream, sizeof(__pyx_k_stream), 0, 0, 1, 1}, - {&__pyx_n_s_strict_map_key, __pyx_k_strict_map_key, sizeof(__pyx_k_strict_map_key), 0, 0, 1, 1}, - {&__pyx_n_s_strict_types, __pyx_k_strict_types, sizeof(__pyx_k_strict_types), 0, 0, 1, 1}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_n_s_typecode, __pyx_k_typecode, sizeof(__pyx_k_typecode), 0, 0, 1, 1}, - {&__pyx_n_s_unicode_errors, __pyx_k_unicode_errors, sizeof(__pyx_k_unicode_errors), 0, 0, 1, 1}, - {&__pyx_kp_u_unicode_string_is_too_large, __pyx_k_unicode_string_is_too_large, sizeof(__pyx_k_unicode_string_is_too_large), 0, 1, 0, 0}, - {&__pyx_n_s_unpack, __pyx_k_unpack, sizeof(__pyx_k_unpack), 0, 0, 1, 1}, - {&__pyx_n_s_unpackb, __pyx_k_unpackb, sizeof(__pyx_k_unpackb), 0, 0, 1, 1}, - {&__pyx_kp_u_unpacker_feed_is_not_be_able_to, __pyx_k_unpacker_feed_is_not_be_able_to, sizeof(__pyx_k_unpacker_feed_is_not_be_able_to), 0, 1, 0, 0}, - {&__pyx_n_s_use_bin_type, __pyx_k_use_bin_type, sizeof(__pyx_k_use_bin_type), 0, 0, 1, 1}, - {&__pyx_n_s_use_list, __pyx_k_use_list, sizeof(__pyx_k_use_list), 0, 0, 1, 1}, - {&__pyx_n_s_use_single_float, __pyx_k_use_single_float, sizeof(__pyx_k_use_single_float), 0, 0, 1, 1}, - {&__pyx_kp_u_using_old_buffer_interface_to_un, __pyx_k_using_old_buffer_interface_to_un, sizeof(__pyx_k_using_old_buffer_interface_to_un), 0, 1, 0, 0}, - {&__pyx_n_s_view, __pyx_k_view, sizeof(__pyx_k_view), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} -}; -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_MemoryError = __Pyx_GetBuiltinName(__pyx_n_s_MemoryError); if (!__pyx_builtin_MemoryError) __PYX_ERR(0, 111, __pyx_L1_error) - __pyx_builtin_DeprecationWarning = __Pyx_GetBuiltinName(__pyx_n_s_DeprecationWarning); if (!__pyx_builtin_DeprecationWarning) __PYX_ERR(0, 119, __pyx_L1_error) - __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 126, __pyx_L1_error) - __pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_n_s_ValueError); if (!__pyx_builtin_ValueError) __PYX_ERR(0, 163, __pyx_L1_error) - __pyx_builtin_OverflowError = __Pyx_GetBuiltinName(__pyx_n_s_OverflowError); if (!__pyx_builtin_OverflowError) __PYX_ERR(0, 183, __pyx_L1_error) - __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(0, 290, __pyx_L1_error) - __pyx_builtin_NotImplementedError = __Pyx_GetBuiltinName(__pyx_n_s_NotImplementedError); if (!__pyx_builtin_NotImplementedError) __PYX_ERR(1, 106, __pyx_L1_error) - __pyx_builtin_BufferError = __Pyx_GetBuiltinName(__pyx_n_s_BufferError); if (!__pyx_builtin_BufferError) __PYX_ERR(1, 121, __pyx_L1_error) - __pyx_builtin_RuntimeWarning = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeWarning); if (!__pyx_builtin_RuntimeWarning) __PYX_ERR(1, 137, __pyx_L1_error) - __pyx_builtin_AssertionError = __Pyx_GetBuiltinName(__pyx_n_s_AssertionError); if (!__pyx_builtin_AssertionError) __PYX_ERR(1, 417, __pyx_L1_error) - __pyx_builtin_StopIteration = __Pyx_GetBuiltinName(__pyx_n_s_StopIteration); if (!__pyx_builtin_StopIteration) __PYX_ERR(1, 489, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "msgpack/_packer.pyx":111 - * self.pk.buf = PyMem_Malloc(buf_size) - * if self.pk.buf == NULL: - * raise MemoryError("Unable to allocate internal buffer.") # <<<<<<<<<<<<<< - * self.pk.buf_size = buf_size - * self.pk.length = 0 - */ - __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_u_Unable_to_allocate_internal_buff); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 111, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple_); - __Pyx_GIVEREF(__pyx_tuple_); - - /* "msgpack/_packer.pyx":126 - * if default is not None: - * if not PyCallable_Check(default): - * raise TypeError("default must be a callable.") # <<<<<<<<<<<<<< - * self._default = default - * - */ - __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_u_default_must_be_a_callable); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 126, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__2); - __Pyx_GIVEREF(__pyx_tuple__2); - - /* "msgpack/_packer.pyx":163 - * - * if nest_limit < 0: - * raise ValueError("recursion limit exceeded.") # <<<<<<<<<<<<<< - * - * while True: - */ - __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_u_recursion_limit_exceeded); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 163, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__4); - __Pyx_GIVEREF(__pyx_tuple__4); - - /* "msgpack/_packer.pyx":189 - * continue - * else: - * raise OverflowError("Integer value out of range") # <<<<<<<<<<<<<< - * elif PyInt_CheckExact(o) if strict_types else PyInt_Check(o): - * longval = o - */ - __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_Integer_value_out_of_range); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(0, 189, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__5); - __Pyx_GIVEREF(__pyx_tuple__5); - - /* "msgpack/_packer.pyx":212 - * ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT); - * if ret == -2: - * raise ValueError("unicode string is too large") # <<<<<<<<<<<<<< - * else: - * o = PyUnicode_AsEncodedString(o, self.encoding, self.unicode_errors) - */ - __pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_u_unicode_string_is_too_large); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(0, 212, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__6); - __Pyx_GIVEREF(__pyx_tuple__6); - - /* "msgpack/_packer.pyx":226 - * L = len(d) - * if L > ITEM_LIMIT: - * raise ValueError("dict is too large") # <<<<<<<<<<<<<< - * ret = msgpack_pack_map(&self.pk, L) - * if ret == 0: - */ - __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_u_dict_is_too_large); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(0, 226, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__7); - __Pyx_GIVEREF(__pyx_tuple__7); - - /* "msgpack/_packer.pyx":251 - * L = len(o.data) - * if L > ITEM_LIMIT: - * raise ValueError("EXT data is too large") # <<<<<<<<<<<<<< - * ret = msgpack_pack_ext(&self.pk, longval, L) - * ret = msgpack_pack_raw_body(&self.pk, rawval, L) - */ - __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_u_EXT_data_is_too_large); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(0, 251, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__8); - __Pyx_GIVEREF(__pyx_tuple__8); - - /* "msgpack/_packer.pyx":257 - * L = len(o) - * if L > ITEM_LIMIT: - * raise ValueError("list is too large") # <<<<<<<<<<<<<< - * ret = msgpack_pack_array(&self.pk, L) - * if ret == 0: - */ - __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_u_list_is_too_large); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(0, 257, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__9); - __Pyx_GIVEREF(__pyx_tuple__9); - - /* "msgpack/_packer.pyx":265 - * elif PyMemoryView_Check(o): - * if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0: - * raise ValueError("could not get buffer for memoryview") # <<<<<<<<<<<<<< - * L = view.len - * if L > ITEM_LIMIT: - */ - __pyx_tuple__10 = PyTuple_Pack(1, __pyx_kp_u_could_not_get_buffer_for_memoryv); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(0, 265, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__10); - __Pyx_GIVEREF(__pyx_tuple__10); - - /* "msgpack/_packer.pyx":269 - * if L > ITEM_LIMIT: - * PyBuffer_Release(&view); - * raise ValueError("memoryview is too large") # <<<<<<<<<<<<<< - * ret = msgpack_pack_bin(&self.pk, L) - * if ret == 0: - */ - __pyx_tuple__11 = PyTuple_Pack(1, __pyx_kp_u_memoryview_is_too_large); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(0, 269, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__11); - __Pyx_GIVEREF(__pyx_tuple__11); - - /* "msgpack/_packer.pyx":290 - * raise - * if ret: # should not happen. - * raise RuntimeError("internal error") # <<<<<<<<<<<<<< - * if self.autoreset: - * buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - */ - __pyx_tuple__12 = PyTuple_Pack(1, __pyx_kp_u_internal_error); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 290, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__12); - __Pyx_GIVEREF(__pyx_tuple__12); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - __pyx_tuple__13 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(2, 2, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__13); - __Pyx_GIVEREF(__pyx_tuple__13); - - /* "(tree fragment)":4 - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - */ - __pyx_tuple__14 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(2, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__14); - __Pyx_GIVEREF(__pyx_tuple__14); - - /* "msgpack/_unpacker.pyx":77 - * - * if object_hook is not None and object_pairs_hook is not None: - * raise TypeError("object_pairs_hook and object_hook are mutually exclusive.") # <<<<<<<<<<<<<< - * - * if object_hook is not None: - */ - __pyx_tuple__15 = PyTuple_Pack(1, __pyx_kp_u_object_pairs_hook_and_object_hoo); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(1, 77, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__15); - __Pyx_GIVEREF(__pyx_tuple__15); - - /* "msgpack/_unpacker.pyx":81 - * if object_hook is not None: - * if not PyCallable_Check(object_hook): - * raise TypeError("object_hook must be a callable.") # <<<<<<<<<<<<<< - * ctx.user.object_hook = object_hook - * - */ - __pyx_tuple__16 = PyTuple_Pack(1, __pyx_kp_u_object_hook_must_be_a_callable); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(1, 81, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__16); - __Pyx_GIVEREF(__pyx_tuple__16); - - /* "msgpack/_unpacker.pyx":88 - * else: - * if not PyCallable_Check(object_pairs_hook): - * raise TypeError("object_pairs_hook must be a callable.") # <<<<<<<<<<<<<< - * ctx.user.object_hook = object_pairs_hook - * ctx.user.has_pairs_hook = True - */ - __pyx_tuple__17 = PyTuple_Pack(1, __pyx_kp_u_object_pairs_hook_must_be_a_call); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(1, 88, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__17); - __Pyx_GIVEREF(__pyx_tuple__17); - - /* "msgpack/_unpacker.pyx":94 - * if list_hook is not None: - * if not PyCallable_Check(list_hook): - * raise TypeError("list_hook must be a callable.") # <<<<<<<<<<<<<< - * ctx.user.list_hook = list_hook - * - */ - __pyx_tuple__18 = PyTuple_Pack(1, __pyx_kp_u_list_hook_must_be_a_callable); if (unlikely(!__pyx_tuple__18)) __PYX_ERR(1, 94, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__18); - __Pyx_GIVEREF(__pyx_tuple__18); - - /* "msgpack/_unpacker.pyx":99 - * if ext_hook is not None: - * if not PyCallable_Check(ext_hook): - * raise TypeError("ext_hook must be a callable.") # <<<<<<<<<<<<<< - * ctx.user.ext_hook = ext_hook - * - */ - __pyx_tuple__19 = PyTuple_Pack(1, __pyx_kp_u_ext_hook_must_be_a_callable); if (unlikely(!__pyx_tuple__19)) __PYX_ERR(1, 99, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__19); - __Pyx_GIVEREF(__pyx_tuple__19); - - /* "msgpack/_unpacker.pyx":121 - * if view.itemsize != 1: - * PyBuffer_Release(view) - * raise BufferError("cannot unpack from multi-byte object") # <<<<<<<<<<<<<< - * if PyBuffer_IsContiguous(view, b'A') == 0: - * PyBuffer_Release(view) - */ - __pyx_tuple__20 = PyTuple_Pack(1, __pyx_kp_u_cannot_unpack_from_multi_byte_ob); if (unlikely(!__pyx_tuple__20)) __PYX_ERR(1, 121, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__20); - __Pyx_GIVEREF(__pyx_tuple__20); - - /* "msgpack/_unpacker.pyx":136 - * new_protocol[0] = 0 - * if PyObject_AsReadBuffer(obj, buf, buffer_len) == -1: - * raise BufferError("could not get memoryview") # <<<<<<<<<<<<<< - * PyErr_WarnEx(RuntimeWarning, - * "using old buffer interface to unpack %s; " - */ - __pyx_tuple__21 = PyTuple_Pack(1, __pyx_kp_u_could_not_get_memoryview); if (unlikely(!__pyx_tuple__21)) __PYX_ERR(1, 136, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__21); - __Pyx_GIVEREF(__pyx_tuple__21); - - /* "msgpack/_unpacker.pyx":213 - * unpack_clear(&ctx) - * if ret == 0: - * raise ValueError("Unpack failed: incomplete input") # <<<<<<<<<<<<<< - * elif ret == -2: - * raise FormatError - */ - __pyx_tuple__23 = PyTuple_Pack(1, __pyx_kp_u_Unpack_failed_incomplete_input); if (unlikely(!__pyx_tuple__23)) __PYX_ERR(1, 213, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__23); - __Pyx_GIVEREF(__pyx_tuple__23); - - /* "msgpack/_unpacker.pyx":366 - * self.file_like_read = file_like.read - * if not PyCallable_Check(self.file_like_read): - * raise TypeError("`file_like.read` must be a callable.") # <<<<<<<<<<<<<< - * - * if max_str_len == -1: - */ - __pyx_tuple__25 = PyTuple_Pack(1, __pyx_kp_u_file_like_read_must_be_a_callab); if (unlikely(!__pyx_tuple__25)) __PYX_ERR(1, 366, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__25); - __Pyx_GIVEREF(__pyx_tuple__25); - - /* "msgpack/_unpacker.pyx":382 - * max_buffer_size = INT_MAX - * if read_size > max_buffer_size: - * raise ValueError("read_size should be less or equal to max_buffer_size") # <<<<<<<<<<<<<< - * if not read_size: - * read_size = min(max_buffer_size, 1024**2) - */ - __pyx_tuple__26 = PyTuple_Pack(1, __pyx_kp_u_read_size_should_be_less_or_equa); if (unlikely(!__pyx_tuple__26)) __PYX_ERR(1, 382, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__26); - __Pyx_GIVEREF(__pyx_tuple__26); - - /* "msgpack/_unpacker.pyx":417 - * - * if self.file_like is not None: - * raise AssertionError( # <<<<<<<<<<<<<< - * "unpacker.feed() is not be able to use with `file_like`.") - * - */ - __pyx_tuple__27 = PyTuple_Pack(1, __pyx_kp_u_unpacker_feed_is_not_be_able_to); if (unlikely(!__pyx_tuple__27)) __PYX_ERR(1, 417, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__27); - __Pyx_GIVEREF(__pyx_tuple__27); - - /* "msgpack/_unpacker.pyx":452 - * # self.buf still holds old buffer and will be freed during - * # obj destruction - * raise MemoryError("Unable to enlarge internal buffer.") # <<<<<<<<<<<<<< - * memcpy(new_buf, buf + head, tail - head) - * PyMem_Free(buf) - */ - __pyx_tuple__28 = PyTuple_Pack(1, __pyx_kp_u_Unable_to_enlarge_internal_buffe); if (unlikely(!__pyx_tuple__28)) __PYX_ERR(1, 452, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__28); - __Pyx_GIVEREF(__pyx_tuple__28); - - /* "msgpack/_unpacker.pyx":489 - * if prev_head >= self.buf_tail: - * if iter: - * raise StopIteration("No more data to unpack.") # <<<<<<<<<<<<<< - * else: - * raise OutOfData("No more data to unpack.") - */ - __pyx_tuple__29 = PyTuple_Pack(1, __pyx_kp_u_No_more_data_to_unpack); if (unlikely(!__pyx_tuple__29)) __PYX_ERR(1, 489, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__29); - __Pyx_GIVEREF(__pyx_tuple__29); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - __pyx_tuple__30 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__30)) __PYX_ERR(2, 2, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__30); - __Pyx_GIVEREF(__pyx_tuple__30); - - /* "(tree fragment)":4 - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - */ - __pyx_tuple__31 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__31)) __PYX_ERR(2, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__31); - __Pyx_GIVEREF(__pyx_tuple__31); - - /* "msgpack/_unpacker.pyx":105 - * ctx.user.unicode_errors = unicode_errors - * - * def default_read_extended_type(typecode, data): # <<<<<<<<<<<<<< - * raise NotImplementedError("Cannot decode extended type with typecode=%d" % typecode) - * - */ - __pyx_tuple__32 = PyTuple_Pack(2, __pyx_n_s_typecode, __pyx_n_s_data); if (unlikely(!__pyx_tuple__32)) __PYX_ERR(1, 105, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__32); - __Pyx_GIVEREF(__pyx_tuple__32); - __pyx_codeobj__33 = (PyObject*)__Pyx_PyCode_New(2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__32, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_msgpack__unpacker_pyx, __pyx_n_s_default_read_extended_type, 105, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__33)) __PYX_ERR(1, 105, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":144 - * return 1 - * - * def unpackb(object packed, object object_hook=None, object list_hook=None, # <<<<<<<<<<<<<< - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * encoding=None, unicode_errors=None, - */ - __pyx_tuple__34 = PyTuple_Pack(25, __pyx_n_s_packed, __pyx_n_s_object_hook, __pyx_n_s_list_hook, __pyx_n_s_use_list, __pyx_n_s_raw, __pyx_n_s_strict_map_key, __pyx_n_s_encoding, __pyx_n_s_unicode_errors, __pyx_n_s_object_pairs_hook, __pyx_n_s_ext_hook, __pyx_n_s_max_str_len, __pyx_n_s_max_bin_len, __pyx_n_s_max_array_len, __pyx_n_s_max_map_len, __pyx_n_s_max_ext_len, __pyx_n_s_ctx, __pyx_n_s_off, __pyx_n_s_ret, __pyx_n_s_view, __pyx_n_s_buf, __pyx_n_s_buf_len, __pyx_n_s_cenc, __pyx_n_s_cerr, __pyx_n_s_new_protocol, __pyx_n_s_obj); if (unlikely(!__pyx_tuple__34)) __PYX_ERR(1, 144, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__34); - __Pyx_GIVEREF(__pyx_tuple__34); - __pyx_codeobj__35 = (PyObject*)__Pyx_PyCode_New(15, 0, 25, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__34, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_msgpack__unpacker_pyx, __pyx_n_s_unpackb, 144, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__35)) __PYX_ERR(1, 144, __pyx_L1_error) - - /* "msgpack/_unpacker.pyx":221 - * - * - * def unpack(object stream, **kwargs): # <<<<<<<<<<<<<< - * PyErr_WarnEx( - * DeprecationWarning, - */ - __pyx_tuple__36 = PyTuple_Pack(3, __pyx_n_s_stream, __pyx_n_s_kwargs, __pyx_n_s_data); if (unlikely(!__pyx_tuple__36)) __PYX_ERR(1, 221, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__36); - __Pyx_GIVEREF(__pyx_tuple__36); - __pyx_codeobj__37 = (PyObject*)__Pyx_PyCode_New(1, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARKEYWORDS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__36, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_msgpack__unpacker_pyx, __pyx_n_s_unpack, 221, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__37)) __PYX_ERR(1, 221, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(3, 1, __pyx_L1_error); - __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(3, 1, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - __pyx_vtabptr_7msgpack_9_cmsgpack_Packer = &__pyx_vtable_7msgpack_9_cmsgpack_Packer; - __pyx_vtable_7msgpack_9_cmsgpack_Packer._pack = (int (*)(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *, PyObject *, struct __pyx_opt_args_7msgpack_9_cmsgpack_6Packer__pack *__pyx_optional_args))__pyx_f_7msgpack_9_cmsgpack_6Packer__pack; - __pyx_vtable_7msgpack_9_cmsgpack_Packer.pack = (PyObject *(*)(struct __pyx_obj_7msgpack_9_cmsgpack_Packer *, PyObject *, int __pyx_skip_dispatch))__pyx_f_7msgpack_9_cmsgpack_6Packer_pack; - if (PyType_Ready(&__pyx_type_7msgpack_9_cmsgpack_Packer) < 0) __PYX_ERR(0, 54, __pyx_L1_error) - __pyx_type_7msgpack_9_cmsgpack_Packer.tp_print = 0; - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7msgpack_9_cmsgpack_Packer.tp_dictoffset && __pyx_type_7msgpack_9_cmsgpack_Packer.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7msgpack_9_cmsgpack_Packer.tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - if (__Pyx_SetVtable(__pyx_type_7msgpack_9_cmsgpack_Packer.tp_dict, __pyx_vtabptr_7msgpack_9_cmsgpack_Packer) < 0) __PYX_ERR(0, 54, __pyx_L1_error) - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_Packer, (PyObject *)&__pyx_type_7msgpack_9_cmsgpack_Packer) < 0) __PYX_ERR(0, 54, __pyx_L1_error) - if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7msgpack_9_cmsgpack_Packer) < 0) __PYX_ERR(0, 54, __pyx_L1_error) - __pyx_ptype_7msgpack_9_cmsgpack_Packer = &__pyx_type_7msgpack_9_cmsgpack_Packer; - __pyx_vtabptr_7msgpack_9_cmsgpack_Unpacker = &__pyx_vtable_7msgpack_9_cmsgpack_Unpacker; - __pyx_vtable_7msgpack_9_cmsgpack_Unpacker.append_buffer = (PyObject *(*)(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *, void *, Py_ssize_t))__pyx_f_7msgpack_9_cmsgpack_8Unpacker_append_buffer; - __pyx_vtable_7msgpack_9_cmsgpack_Unpacker.read_from_file = (PyObject *(*)(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *))__pyx_f_7msgpack_9_cmsgpack_8Unpacker_read_from_file; - __pyx_vtable_7msgpack_9_cmsgpack_Unpacker._unpack = (PyObject *(*)(struct __pyx_obj_7msgpack_9_cmsgpack_Unpacker *, execute_fn, struct __pyx_opt_args_7msgpack_9_cmsgpack_8Unpacker__unpack *__pyx_optional_args))__pyx_f_7msgpack_9_cmsgpack_8Unpacker__unpack; - if (PyType_Ready(&__pyx_type_7msgpack_9_cmsgpack_Unpacker) < 0) __PYX_ERR(1, 229, __pyx_L1_error) - __pyx_type_7msgpack_9_cmsgpack_Unpacker.tp_print = 0; - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7msgpack_9_cmsgpack_Unpacker.tp_dictoffset && __pyx_type_7msgpack_9_cmsgpack_Unpacker.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7msgpack_9_cmsgpack_Unpacker.tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - if (__Pyx_SetVtable(__pyx_type_7msgpack_9_cmsgpack_Unpacker.tp_dict, __pyx_vtabptr_7msgpack_9_cmsgpack_Unpacker) < 0) __PYX_ERR(1, 229, __pyx_L1_error) - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_Unpacker, (PyObject *)&__pyx_type_7msgpack_9_cmsgpack_Unpacker) < 0) __PYX_ERR(1, 229, __pyx_L1_error) - if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7msgpack_9_cmsgpack_Unpacker) < 0) __PYX_ERR(1, 229, __pyx_L1_error) - __pyx_ptype_7msgpack_9_cmsgpack_Unpacker = &__pyx_type_7msgpack_9_cmsgpack_Unpacker; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(4, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", - #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 - sizeof(PyTypeObject), - #else - sizeof(PyHeapTypeObject), - #endif - __Pyx_ImportType_CheckSize_Warn); - if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(4, 9, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __Pyx_ImportType_CheckSize_Warn); - if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(5, 8, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(6, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __Pyx_ImportType_CheckSize_Warn); - if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(6, 15, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#if PY_MAJOR_VERSION < 3 -#ifdef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC void -#else -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#endif -#else -#ifdef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_cmsgpack(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_cmsgpack(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__cmsgpack(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__cmsgpack(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { - result = PyDict_SetItemString(moddict, to_name, value); - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__cmsgpack(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_cmsgpack' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__cmsgpack(void)", 0); - if (__Pyx_check_binary_version() < 0) __PYX_ERR(3, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(3, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(3, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(3, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init() < 0) __PYX_ERR(3, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init() < 0) __PYX_ERR(3, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init() < 0) __PYX_ERR(3, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init() < 0) __PYX_ERR(3, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init() < 0) __PYX_ERR(3, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(3, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - #ifdef WITH_THREAD /* Python build with threading support? */ - PyEval_InitThreads(); - #endif - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_cmsgpack", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - #endif - if (unlikely(!__pyx_m)) __PYX_ERR(3, 1, __pyx_L1_error) - #endif - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(3, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(3, 1, __pyx_L1_error) - __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(3, 1, __pyx_L1_error) - #if CYTHON_COMPILING_IN_PYPY - Py_INCREF(__pyx_b); - #endif - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(3, 1, __pyx_L1_error); - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitGlobals() < 0) __PYX_ERR(3, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(3, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_msgpack___cmsgpack) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(3, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(3, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "msgpack._cmsgpack")) { - if (unlikely(PyDict_SetItemString(modules, "msgpack._cmsgpack", __pyx_m) < 0)) __PYX_ERR(3, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(3, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(3, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - if (unlikely(__Pyx_modinit_type_init_code() != 0)) goto __pyx_L1_error; - if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error; - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(3, 1, __pyx_L1_error) - #endif - - /* "msgpack/_packer.pyx":6 - * from cpython.bytearray cimport PyByteArray_Check, PyByteArray_CheckExact - * - * from ddtrace.vendor.msgpack import ExtType # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_ExtType); - __Pyx_GIVEREF(__pyx_n_s_ExtType); - PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_ExtType); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_ddtrace_vendor_msgpack, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_ExtType); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_ExtType, __pyx_t_1) < 0) __PYX_ERR(0, 6, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "msgpack/_packer.pyx":42 - * object buff_to_buff(char *, Py_ssize_t) - * - * cdef int DEFAULT_RECURSE_LIMIT=511 # <<<<<<<<<<<<<< - * cdef long long ITEM_LIMIT = (2**32)-1 - * - */ - __pyx_v_7msgpack_9_cmsgpack_DEFAULT_RECURSE_LIMIT = 0x1FF; - - /* "msgpack/_packer.pyx":43 - * - * cdef int DEFAULT_RECURSE_LIMIT=511 - * cdef long long ITEM_LIMIT = (2**32)-1 # <<<<<<<<<<<<<< - * - * - */ - __pyx_v_7msgpack_9_cmsgpack_ITEM_LIMIT = 0xFFFFFFFF; - - /* "msgpack/_packer.pyx":148 - * self.pk.buf = NULL - * - * cdef int _pack(self, object o, int nest_limit=DEFAULT_RECURSE_LIMIT) except -1: # <<<<<<<<<<<<<< - * cdef long long llval - * cdef unsigned long long ullval - */ - __pyx_k__3 = __pyx_v_7msgpack_9_cmsgpack_DEFAULT_RECURSE_LIMIT; - - /* "msgpack/_unpacker.pyx":16 - * - * from ddtrace.vendor.msgpack.exceptions import ( - * BufferFull, # <<<<<<<<<<<<<< - * OutOfData, - * ExtraData, - */ - __pyx_t_2 = PyList_New(5); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_BufferFull); - __Pyx_GIVEREF(__pyx_n_s_BufferFull); - PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_BufferFull); - __Pyx_INCREF(__pyx_n_s_OutOfData); - __Pyx_GIVEREF(__pyx_n_s_OutOfData); - PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_OutOfData); - __Pyx_INCREF(__pyx_n_s_ExtraData); - __Pyx_GIVEREF(__pyx_n_s_ExtraData); - PyList_SET_ITEM(__pyx_t_2, 2, __pyx_n_s_ExtraData); - __Pyx_INCREF(__pyx_n_s_FormatError); - __Pyx_GIVEREF(__pyx_n_s_FormatError); - PyList_SET_ITEM(__pyx_t_2, 3, __pyx_n_s_FormatError); - __Pyx_INCREF(__pyx_n_s_StackError); - __Pyx_GIVEREF(__pyx_n_s_StackError); - PyList_SET_ITEM(__pyx_t_2, 4, __pyx_n_s_StackError); - - /* "msgpack/_unpacker.pyx":15 - * ctypedef unsigned long long uint64_t - * - * from ddtrace.vendor.msgpack.exceptions import ( # <<<<<<<<<<<<<< - * BufferFull, - * OutOfData, - */ - __pyx_t_1 = __Pyx_Import(__pyx_n_s_ddtrace_vendor_msgpack_exception, __pyx_t_2, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_BufferFull); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_BufferFull, __pyx_t_2) < 0) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_OutOfData); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_OutOfData, __pyx_t_2) < 0) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_ExtraData); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_ExtraData, __pyx_t_2) < 0) __PYX_ERR(1, 18, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_FormatError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_FormatError, __pyx_t_2) < 0) __PYX_ERR(1, 19, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_StackError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_StackError, __pyx_t_2) < 0) __PYX_ERR(1, 20, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "msgpack/_unpacker.pyx":22 - * StackError, - * ) - * from ddtrace.vendor.msgpack import ExtType # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_ExtType); - __Pyx_GIVEREF(__pyx_n_s_ExtType); - PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_ExtType); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_ddtrace_vendor_msgpack, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_ExtType); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_ExtType, __pyx_t_1) < 0) __PYX_ERR(1, 22, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "msgpack/_unpacker.pyx":105 - * ctx.user.unicode_errors = unicode_errors - * - * def default_read_extended_type(typecode, data): # <<<<<<<<<<<<<< - * raise NotImplementedError("Cannot decode extended type with typecode=%d" % typecode) - * - */ - __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7msgpack_9_cmsgpack_1default_read_extended_type, NULL, __pyx_n_s_msgpack__cmsgpack); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 105, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_default_read_extended_type, __pyx_t_2) < 0) __PYX_ERR(1, 105, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "msgpack/_unpacker.pyx":147 - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * encoding=None, unicode_errors=None, - * object_pairs_hook=None, ext_hook=ExtType, # <<<<<<<<<<<<<< - * Py_ssize_t max_str_len=-1, - * Py_ssize_t max_bin_len=-1, - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_ExtType); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 147, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_k__22 = __pyx_t_2; - __Pyx_GIVEREF(__pyx_t_2); - __pyx_t_2 = 0; - - /* "msgpack/_unpacker.pyx":144 - * return 1 - * - * def unpackb(object packed, object object_hook=None, object list_hook=None, # <<<<<<<<<<<<<< - * bint use_list=True, bint raw=True, bint strict_map_key=False, - * encoding=None, unicode_errors=None, - */ - __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7msgpack_9_cmsgpack_3unpackb, NULL, __pyx_n_s_msgpack__cmsgpack); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 144, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_unpackb, __pyx_t_2) < 0) __PYX_ERR(1, 144, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "msgpack/_unpacker.pyx":221 - * - * - * def unpack(object stream, **kwargs): # <<<<<<<<<<<<<< - * PyErr_WarnEx( - * DeprecationWarning, - */ - __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7msgpack_9_cmsgpack_5unpack, NULL, __pyx_n_s_msgpack__cmsgpack); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 221, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_unpack, __pyx_t_2) < 0) __PYX_ERR(1, 221, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "msgpack/_unpacker.pyx":348 - * object object_hook=None, object object_pairs_hook=None, object list_hook=None, - * encoding=None, unicode_errors=None, Py_ssize_t max_buffer_size=0, - * object ext_hook=ExtType, # <<<<<<<<<<<<<< - * Py_ssize_t max_str_len=-1, - * Py_ssize_t max_bin_len=-1, - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_ExtType); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 348, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_k__24 = __pyx_t_2; - __Pyx_GIVEREF(__pyx_t_2); - __pyx_t_2 = 0; - - /* "msgpack/_cmsgpack.pyx":1 - * # coding: utf-8 # <<<<<<<<<<<<<< - * #cython: embedsignature=True, c_string_encoding=ascii, language_level=3 - * include "_packer.pyx" - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(3, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(3, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - if (__pyx_m) { - if (__pyx_d) { - __Pyx_AddTraceback("init msgpack._cmsgpack", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - Py_CLEAR(__pyx_m); - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init msgpack._cmsgpack"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); - if (unlikely(!result)) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* KeywordStringCheck */ -static int __Pyx_CheckKeywordStrings( - PyObject *kwdict, - const char* function_name, - int kw_allowed) -{ - PyObject* key = 0; - Py_ssize_t pos = 0; -#if CYTHON_COMPILING_IN_PYPY - if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0)) - goto invalid_keyword; - return 1; -#else - while (PyDict_Next(kwdict, &pos, &key, 0)) { - #if PY_MAJOR_VERSION < 3 - if (unlikely(!PyString_Check(key))) - #endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } - if ((!kw_allowed) && unlikely(key)) - goto invalid_keyword; - return 1; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - return 0; -#endif -invalid_keyword: - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION < 3 - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif - return 0; -} - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = func->ob_type->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -} -#endif - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, - CYTHON_UNUSED PyObject *cause) { - __Pyx_PyThreadState_declare - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { -#if CYTHON_COMPILING_IN_PYPY - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#else - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* RaiseDoubleKeywords */ -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, - PyObject* kw_name) -{ - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION >= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - while (PyDict_Next(kwds, &pos, &key, &value)) { - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; - continue; - } - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = (**name == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION < 3 - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - return -1; -} - -/* ExtTypeTest */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { - if (unlikely(!type)) { - PyErr_SetString(PyExc_SystemError, "Missing type object"); - return 0; - } - if (likely(__Pyx_TypeCheck(obj, type))) - return 1; - PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", - Py_TYPE(obj)->tp_name, type->tp_name); - return 0; -} - -/* GetTopmostException */ -#if CYTHON_USE_EXC_INFO_STACK -static _PyErr_StackItem * -__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) -{ - _PyErr_StackItem *exc_info = tstate->exc_info; - while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && - exc_info->previous_item != NULL) - { - exc_info = exc_info->previous_item; - } - return exc_info; -} -#endif - -/* SaveResetException */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); - *type = exc_info->exc_type; - *value = exc_info->exc_value; - *tb = exc_info->exc_traceback; - #else - *type = tstate->exc_type; - *value = tstate->exc_value; - *tb = tstate->exc_traceback; - #endif - Py_XINCREF(*type); - Py_XINCREF(*value); - Py_XINCREF(*tb); -} -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = type; - exc_info->exc_value = value; - exc_info->exc_traceback = tb; - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = type; - tstate->exc_value = value; - tstate->exc_traceback = tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -#endif - -/* PyErrExceptionMatches */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; icurexc_type; - if (exc_type == err) return 1; - if (unlikely(!exc_type)) return 0; - if (unlikely(PyTuple_Check(err))) - return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); - return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); -} -#endif - -/* GetException */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) -#endif -{ - PyObject *local_type, *local_value, *local_tb; -#if CYTHON_FAST_THREAD_STATE - PyObject *tmp_type, *tmp_value, *tmp_tb; - local_type = tstate->curexc_type; - local_value = tstate->curexc_value; - local_tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -#else - PyErr_Fetch(&local_type, &local_value, &local_tb); -#endif - PyErr_NormalizeException(&local_type, &local_value, &local_tb); -#if CYTHON_FAST_THREAD_STATE - if (unlikely(tstate->curexc_type)) -#else - if (unlikely(PyErr_Occurred())) -#endif - goto bad; - #if PY_MAJOR_VERSION >= 3 - if (local_tb) { - if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) - goto bad; - } - #endif - Py_XINCREF(local_tb); - Py_XINCREF(local_type); - Py_XINCREF(local_value); - *type = local_type; - *value = local_value; - *tb = local_tb; -#if CYTHON_FAST_THREAD_STATE - #if CYTHON_USE_EXC_INFO_STACK - { - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = local_type; - exc_info->exc_value = local_value; - exc_info->exc_traceback = local_tb; - } - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = local_type; - tstate->exc_value = local_value; - tstate->exc_traceback = local_tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#else - PyErr_SetExcInfo(local_type, local_value, local_tb); -#endif - return 0; -bad: - *type = 0; - *value = 0; - *tb = 0; - Py_XDECREF(local_type); - Py_XDECREF(local_value); - Py_XDECREF(local_tb); - return -1; -} - -/* PyCFunctionFastCall */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { - PyCFunctionObject *func = (PyCFunctionObject*)func_obj; - PyCFunction meth = PyCFunction_GET_FUNCTION(func); - PyObject *self = PyCFunction_GET_SELF(func); - int flags = PyCFunction_GET_FLAGS(func); - assert(PyCFunction_Check(func)); - assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); - assert(nargs >= 0); - assert(nargs == 0 || args != NULL); - /* _PyCFunction_FastCallDict() must not be called with an exception set, - because it may clear it (directly or indirectly) and so the - caller loses its exception */ - assert(!PyErr_Occurred()); - if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { - return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); - } else { - return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); - } -} -#endif - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { - return NULL; - } - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif -#endif - -/* PyObjectCall2Args */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { - PyObject *args, *result = NULL; - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyFunction_FastCall(function, args, 2); - } - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyCFunction_FastCall(function, args, 2); - } - #endif - args = PyTuple_New(2); - if (unlikely(!args)) goto done; - Py_INCREF(arg1); - PyTuple_SET_ITEM(args, 0, arg1); - Py_INCREF(arg2); - PyTuple_SET_ITEM(args, 1, arg2); - Py_INCREF(function); - result = __Pyx_PyObject_Call(function, args, NULL); - Py_DECREF(args); - Py_DECREF(function); -done: - return result; -} - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = PyCFunction_GET_FUNCTION(func); - self = PyCFunction_GET_SELF(func); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallOneArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_New(1); - if (unlikely(!args)) return NULL; - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 0, arg); - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, &arg, 1); - } -#endif - if (likely(PyCFunction_Check(func))) { - if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { - return __Pyx_PyObject_CallMethO(func, arg); -#if CYTHON_FAST_PYCCALL - } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { - return __Pyx_PyCFunction_FastCall(func, &arg, 1); -#endif - } - } - return __Pyx__PyObject_CallOneArg(func, arg); -} -#else -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_Pack(1, arg); - if (unlikely(!args)) return NULL; - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -#endif - -/* SwapException */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = *type; - exc_info->exc_value = *value; - exc_info->exc_traceback = *tb; - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = *type; - tstate->exc_value = *value; - tstate->exc_traceback = *tb; - #endif - *type = tmp_type; - *value = tmp_value; - *tb = tmp_tb; -} -#else -static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); - PyErr_SetExcInfo(*type, *value, *tb); - *type = tmp_type; - *value = tmp_value; - *tb = tmp_tb; -} -#endif - -/* IterFinish */ -static CYTHON_INLINE int __Pyx_IterFinish(void) { -#if CYTHON_FAST_THREAD_STATE - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* exc_type = tstate->curexc_type; - if (unlikely(exc_type)) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) { - PyObject *exc_value, *exc_tb; - exc_value = tstate->curexc_value; - exc_tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; - Py_DECREF(exc_type); - Py_XDECREF(exc_value); - Py_XDECREF(exc_tb); - return 0; - } else { - return -1; - } - } - return 0; -#else - if (unlikely(PyErr_Occurred())) { - if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { - PyErr_Clear(); - return 0; - } else { - return -1; - } - } - return 0; -#endif -} - -/* PyObjectCallNoArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, NULL, 0); - } -#endif -#ifdef __Pyx_CyFunction_USED - if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) -#else - if (likely(PyCFunction_Check(func))) -#endif - { - if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { - return __Pyx_PyObject_CallMethO(func, NULL); - } - } - return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); -} -#endif - -/* PyObjectGetMethod */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { - PyObject *attr; -#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP - PyTypeObject *tp = Py_TYPE(obj); - PyObject *descr; - descrgetfunc f = NULL; - PyObject **dictptr, *dict; - int meth_found = 0; - assert (*method == NULL); - if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; - } - if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { - return 0; - } - descr = _PyType_Lookup(tp, name); - if (likely(descr != NULL)) { - Py_INCREF(descr); -#if PY_MAJOR_VERSION >= 3 - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type))) - #endif -#else - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr))) - #endif -#endif - { - meth_found = 1; - } else { - f = Py_TYPE(descr)->tp_descr_get; - if (f != NULL && PyDescr_IsData(descr)) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - } - } - dictptr = _PyObject_GetDictPtr(obj); - if (dictptr != NULL && (dict = *dictptr) != NULL) { - Py_INCREF(dict); - attr = __Pyx_PyDict_GetItemStr(dict, name); - if (attr != NULL) { - Py_INCREF(attr); - Py_DECREF(dict); - Py_XDECREF(descr); - goto try_unpack; - } - Py_DECREF(dict); - } - if (meth_found) { - *method = descr; - return 1; - } - if (f != NULL) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - if (descr != NULL) { - *method = descr; - return 0; - } - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'%.50s' object has no attribute '%U'", - tp->tp_name, name); -#else - "'%.50s' object has no attribute '%.400s'", - tp->tp_name, PyString_AS_STRING(name)); -#endif - return 0; -#else - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; -#endif -try_unpack: -#if CYTHON_UNPACK_METHODS - if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { - PyObject *function = PyMethod_GET_FUNCTION(attr); - Py_INCREF(function); - Py_DECREF(attr); - *method = function; - return 1; - } -#endif - *method = attr; - return 0; -} - -/* PyObjectCallMethod0 */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { - PyObject *method = NULL, *result = NULL; - int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); - if (likely(is_method)) { - result = __Pyx_PyObject_CallOneArg(method, obj); - Py_DECREF(method); - return result; - } - if (unlikely(!method)) goto bad; - result = __Pyx_PyObject_CallNoArg(method); - Py_DECREF(method); -bad: - return result; -} - -/* RaiseNeedMoreValuesToUnpack */ -static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { - PyErr_Format(PyExc_ValueError, - "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", - index, (index == 1) ? "" : "s"); -} - -/* RaiseTooManyValuesToUnpack */ -static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { - PyErr_Format(PyExc_ValueError, - "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); -} - -/* UnpackItemEndCheck */ -static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { - if (unlikely(retval)) { - Py_DECREF(retval); - __Pyx_RaiseTooManyValuesError(expected); - return -1; - } else { - return __Pyx_IterFinish(); - } - return 0; -} - -/* RaiseNoneIterError */ -static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); -} - -/* UnpackTupleError */ -static void __Pyx_UnpackTupleError(PyObject *t, Py_ssize_t index) { - if (t == Py_None) { - __Pyx_RaiseNoneNotIterableError(); - } else if (PyTuple_GET_SIZE(t) < index) { - __Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(t)); - } else { - __Pyx_RaiseTooManyValuesError(index); - } -} - -/* UnpackTuple2 */ -static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( - PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, int decref_tuple) { - PyObject *value1 = NULL, *value2 = NULL; -#if CYTHON_COMPILING_IN_PYPY - value1 = PySequence_ITEM(tuple, 0); if (unlikely(!value1)) goto bad; - value2 = PySequence_ITEM(tuple, 1); if (unlikely(!value2)) goto bad; -#else - value1 = PyTuple_GET_ITEM(tuple, 0); Py_INCREF(value1); - value2 = PyTuple_GET_ITEM(tuple, 1); Py_INCREF(value2); -#endif - if (decref_tuple) { - Py_DECREF(tuple); - } - *pvalue1 = value1; - *pvalue2 = value2; - return 0; -#if CYTHON_COMPILING_IN_PYPY -bad: - Py_XDECREF(value1); - Py_XDECREF(value2); - if (decref_tuple) { Py_XDECREF(tuple); } - return -1; -#endif -} -static int __Pyx_unpack_tuple2_generic(PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, - int has_known_size, int decref_tuple) { - Py_ssize_t index; - PyObject *value1 = NULL, *value2 = NULL, *iter = NULL; - iternextfunc iternext; - iter = PyObject_GetIter(tuple); - if (unlikely(!iter)) goto bad; - if (decref_tuple) { Py_DECREF(tuple); tuple = NULL; } - iternext = Py_TYPE(iter)->tp_iternext; - value1 = iternext(iter); if (unlikely(!value1)) { index = 0; goto unpacking_failed; } - value2 = iternext(iter); if (unlikely(!value2)) { index = 1; goto unpacking_failed; } - if (!has_known_size && unlikely(__Pyx_IternextUnpackEndCheck(iternext(iter), 2))) goto bad; - Py_DECREF(iter); - *pvalue1 = value1; - *pvalue2 = value2; - return 0; -unpacking_failed: - if (!has_known_size && __Pyx_IterFinish() == 0) - __Pyx_RaiseNeedMoreValuesError(index); -bad: - Py_XDECREF(iter); - Py_XDECREF(value1); - Py_XDECREF(value2); - if (decref_tuple) { Py_XDECREF(tuple); } - return -1; -} - -/* dict_iter */ -static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* iterable, int is_dict, PyObject* method_name, - Py_ssize_t* p_orig_length, int* p_source_is_dict) { - is_dict = is_dict || likely(PyDict_CheckExact(iterable)); - *p_source_is_dict = is_dict; - if (is_dict) { -#if !CYTHON_COMPILING_IN_PYPY - *p_orig_length = PyDict_Size(iterable); - Py_INCREF(iterable); - return iterable; -#elif PY_MAJOR_VERSION >= 3 - static PyObject *py_items = NULL, *py_keys = NULL, *py_values = NULL; - PyObject **pp = NULL; - if (method_name) { - const char *name = PyUnicode_AsUTF8(method_name); - if (strcmp(name, "iteritems") == 0) pp = &py_items; - else if (strcmp(name, "iterkeys") == 0) pp = &py_keys; - else if (strcmp(name, "itervalues") == 0) pp = &py_values; - if (pp) { - if (!*pp) { - *pp = PyUnicode_FromString(name + 4); - if (!*pp) - return NULL; - } - method_name = *pp; - } - } -#endif - } - *p_orig_length = 0; - if (method_name) { - PyObject* iter; - iterable = __Pyx_PyObject_CallMethod0(iterable, method_name); - if (!iterable) - return NULL; -#if !CYTHON_COMPILING_IN_PYPY - if (PyTuple_CheckExact(iterable) || PyList_CheckExact(iterable)) - return iterable; -#endif - iter = PyObject_GetIter(iterable); - Py_DECREF(iterable); - return iter; - } - return PyObject_GetIter(iterable); -} -static CYTHON_INLINE int __Pyx_dict_iter_next( - PyObject* iter_obj, CYTHON_NCP_UNUSED Py_ssize_t orig_length, CYTHON_NCP_UNUSED Py_ssize_t* ppos, - PyObject** pkey, PyObject** pvalue, PyObject** pitem, int source_is_dict) { - PyObject* next_item; -#if !CYTHON_COMPILING_IN_PYPY - if (source_is_dict) { - PyObject *key, *value; - if (unlikely(orig_length != PyDict_Size(iter_obj))) { - PyErr_SetString(PyExc_RuntimeError, "dictionary changed size during iteration"); - return -1; - } - if (unlikely(!PyDict_Next(iter_obj, ppos, &key, &value))) { - return 0; - } - if (pitem) { - PyObject* tuple = PyTuple_New(2); - if (unlikely(!tuple)) { - return -1; - } - Py_INCREF(key); - Py_INCREF(value); - PyTuple_SET_ITEM(tuple, 0, key); - PyTuple_SET_ITEM(tuple, 1, value); - *pitem = tuple; - } else { - if (pkey) { - Py_INCREF(key); - *pkey = key; - } - if (pvalue) { - Py_INCREF(value); - *pvalue = value; - } - } - return 1; - } else if (PyTuple_CheckExact(iter_obj)) { - Py_ssize_t pos = *ppos; - if (unlikely(pos >= PyTuple_GET_SIZE(iter_obj))) return 0; - *ppos = pos + 1; - next_item = PyTuple_GET_ITEM(iter_obj, pos); - Py_INCREF(next_item); - } else if (PyList_CheckExact(iter_obj)) { - Py_ssize_t pos = *ppos; - if (unlikely(pos >= PyList_GET_SIZE(iter_obj))) return 0; - *ppos = pos + 1; - next_item = PyList_GET_ITEM(iter_obj, pos); - Py_INCREF(next_item); - } else -#endif - { - next_item = PyIter_Next(iter_obj); - if (unlikely(!next_item)) { - return __Pyx_IterFinish(); - } - } - if (pitem) { - *pitem = next_item; - } else if (pkey && pvalue) { - if (__Pyx_unpack_tuple2(next_item, pkey, pvalue, source_is_dict, source_is_dict, 1)) - return -1; - } else if (pkey) { - *pkey = next_item; - } else { - *pvalue = next_item; - } - return 1; -} - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* ReRaiseException */ -static CYTHON_INLINE void __Pyx_ReraiseException(void) { - PyObject *type = NULL, *value = NULL, *tb = NULL; -#if CYTHON_FAST_THREAD_STATE - PyThreadState *tstate = PyThreadState_GET(); - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); - type = exc_info->exc_type; - value = exc_info->exc_value; - tb = exc_info->exc_traceback; - #else - type = tstate->exc_type; - value = tstate->exc_value; - tb = tstate->exc_traceback; - #endif -#else - PyErr_GetExcInfo(&type, &value, &tb); -#endif - if (!type || type == Py_None) { -#if !CYTHON_FAST_THREAD_STATE - Py_XDECREF(type); - Py_XDECREF(value); - Py_XDECREF(tb); -#endif - PyErr_SetString(PyExc_RuntimeError, - "No active exception to reraise"); - } else { -#if CYTHON_FAST_THREAD_STATE - Py_INCREF(type); - Py_XINCREF(value); - Py_XINCREF(tb); -#endif - PyErr_Restore(type, value, tb); - } -} - -/* None */ -static CYTHON_INLINE Py_ssize_t __Pyx_div_Py_ssize_t(Py_ssize_t a, Py_ssize_t b) { - Py_ssize_t q = a / b; - Py_ssize_t r = a - q*b; - q -= ((r != 0) & ((r ^ b) < 0)); - return q; -} - -/* CIntToDigits */ -static const char DIGIT_PAIRS_10[2*10*10+1] = { - "00010203040506070809" - "10111213141516171819" - "20212223242526272829" - "30313233343536373839" - "40414243444546474849" - "50515253545556575859" - "60616263646566676869" - "70717273747576777879" - "80818283848586878889" - "90919293949596979899" -}; -static const char DIGIT_PAIRS_8[2*8*8+1] = { - "0001020304050607" - "1011121314151617" - "2021222324252627" - "3031323334353637" - "4041424344454647" - "5051525354555657" - "6061626364656667" - "7071727374757677" -}; -static const char DIGITS_HEX[2*16+1] = { - "0123456789abcdef" - "0123456789ABCDEF" -}; - -/* BuildPyUnicode */ -static PyObject* __Pyx_PyUnicode_BuildFromAscii(Py_ssize_t ulength, char* chars, int clength, - int prepend_sign, char padding_char) { - PyObject *uval; - Py_ssize_t uoffset = ulength - clength; -#if CYTHON_USE_UNICODE_INTERNALS - Py_ssize_t i; -#if CYTHON_PEP393_ENABLED - void *udata; - uval = PyUnicode_New(ulength, 127); - if (unlikely(!uval)) return NULL; - udata = PyUnicode_DATA(uval); -#else - Py_UNICODE *udata; - uval = PyUnicode_FromUnicode(NULL, ulength); - if (unlikely(!uval)) return NULL; - udata = PyUnicode_AS_UNICODE(uval); -#endif - if (uoffset > 0) { - i = 0; - if (prepend_sign) { - __Pyx_PyUnicode_WRITE(PyUnicode_1BYTE_KIND, udata, 0, '-'); - i++; - } - for (; i < uoffset; i++) { - __Pyx_PyUnicode_WRITE(PyUnicode_1BYTE_KIND, udata, i, padding_char); - } - } - for (i=0; i < clength; i++) { - __Pyx_PyUnicode_WRITE(PyUnicode_1BYTE_KIND, udata, uoffset+i, chars[i]); - } -#else - { - PyObject *sign = NULL, *padding = NULL; - uval = NULL; - if (uoffset > 0) { - prepend_sign = !!prepend_sign; - if (uoffset > prepend_sign) { - padding = PyUnicode_FromOrdinal(padding_char); - if (likely(padding) && uoffset > prepend_sign + 1) { - PyObject *tmp; - PyObject *repeat = PyInt_FromSize_t(uoffset - prepend_sign); - if (unlikely(!repeat)) goto done_or_error; - tmp = PyNumber_Multiply(padding, repeat); - Py_DECREF(repeat); - Py_DECREF(padding); - padding = tmp; - } - if (unlikely(!padding)) goto done_or_error; - } - if (prepend_sign) { - sign = PyUnicode_FromOrdinal('-'); - if (unlikely(!sign)) goto done_or_error; - } - } - uval = PyUnicode_DecodeASCII(chars, clength, NULL); - if (likely(uval) && padding) { - PyObject *tmp = PyNumber_Add(padding, uval); - Py_DECREF(uval); - uval = tmp; - } - if (likely(uval) && sign) { - PyObject *tmp = PyNumber_Add(sign, uval); - Py_DECREF(uval); - uval = tmp; - } -done_or_error: - Py_XDECREF(padding); - Py_XDECREF(sign); - } -#endif - return uval; -} - -/* CIntToPyUnicode */ -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned short uint16_t; - #else - typedef unsigned __int16 uint16_t; - #endif - #endif -#else - #include -#endif -#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) -#define GCC_DIAGNOSTIC -#endif -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_From_int(int value, Py_ssize_t width, char padding_char, char format_char) { - char digits[sizeof(int)*3+2]; - char *dpos, *end = digits + sizeof(int)*3+2; - const char *hex_digits = DIGITS_HEX; - Py_ssize_t length, ulength; - int prepend_sign, last_one_off; - int remaining; -#ifdef GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const int neg_one = (int) -1, const_zero = (int) 0; -#ifdef GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; - if (format_char == 'X') { - hex_digits += 16; - format_char = 'x'; - } - remaining = value; - last_one_off = 0; - dpos = end; - do { - int digit_pos; - switch (format_char) { - case 'o': - digit_pos = abs((int)(remaining % (8*8))); - remaining = (int) (remaining / (8*8)); - dpos -= 2; - *(uint16_t*)dpos = ((const uint16_t*)DIGIT_PAIRS_8)[digit_pos]; - last_one_off = (digit_pos < 8); - break; - case 'd': - digit_pos = abs((int)(remaining % (10*10))); - remaining = (int) (remaining / (10*10)); - dpos -= 2; - *(uint16_t*)dpos = ((const uint16_t*)DIGIT_PAIRS_10)[digit_pos]; - last_one_off = (digit_pos < 10); - break; - case 'x': - *(--dpos) = hex_digits[abs((int)(remaining % 16))]; - remaining = (int) (remaining / 16); - break; - default: - assert(0); - break; - } - } while (unlikely(remaining != 0)); - if (last_one_off) { - assert(*dpos == '0'); - dpos++; - } - length = end - dpos; - ulength = length; - prepend_sign = 0; - if (!is_unsigned && value <= neg_one) { - if (padding_char == ' ' || width <= length + 1) { - *(--dpos) = '-'; - ++length; - } else { - prepend_sign = 1; - } - ++ulength; - } - if (width > ulength) { - ulength = width; - } - if (ulength == 1) { - return PyUnicode_FromOrdinal(*dpos); - } - return __Pyx_PyUnicode_BuildFromAscii(ulength, dpos, (int) length, prepend_sign, padding_char); -} - -/* PyObject_GenericGetAttrNoDict */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'%.50s' object has no attribute '%U'", - tp->tp_name, attr_name); -#else - "'%.50s' object has no attribute '%.400s'", - tp->tp_name, PyString_AS_STRING(attr_name)); -#endif - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { - PyObject *descr; - PyTypeObject *tp = Py_TYPE(obj); - if (unlikely(!PyString_Check(attr_name))) { - return PyObject_GenericGetAttr(obj, attr_name); - } - assert(!tp->tp_dictoffset); - descr = _PyType_Lookup(tp, attr_name); - if (unlikely(!descr)) { - return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); - } - Py_INCREF(descr); - #if PY_MAJOR_VERSION < 3 - if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) - #endif - { - descrgetfunc f = Py_TYPE(descr)->tp_descr_get; - if (unlikely(f)) { - PyObject *res = f(descr, obj, (PyObject *)tp); - Py_DECREF(descr); - return res; - } - } - return descr; -} -#endif - -/* PyObject_GenericGetAttr */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { - if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { - return PyObject_GenericGetAttr(obj, attr_name); - } - return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); -} -#endif - -/* SetVTable */ -static int __Pyx_SetVtable(PyObject *dict, void *vtable) { -#if PY_VERSION_HEX >= 0x02070000 - PyObject *ob = PyCapsule_New(vtable, 0, 0); -#else - PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); -#endif - if (!ob) - goto bad; - if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) - goto bad; - Py_DECREF(ob); - return 0; -bad: - Py_XDECREF(ob); - return -1; -} - -/* SetupReduce */ -static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { - int ret; - PyObject *name_attr; - name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); - if (likely(name_attr)) { - ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); - } else { - ret = -1; - } - if (unlikely(ret < 0)) { - PyErr_Clear(); - ret = 0; - } - Py_XDECREF(name_attr); - return ret; -} -static int __Pyx_setup_reduce(PyObject* type_obj) { - int ret = 0; - PyObject *object_reduce = NULL; - PyObject *object_reduce_ex = NULL; - PyObject *reduce = NULL; - PyObject *reduce_ex = NULL; - PyObject *reduce_cython = NULL; - PyObject *setstate = NULL; - PyObject *setstate_cython = NULL; -#if CYTHON_USE_PYTYPE_LOOKUP - if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto GOOD; -#else - if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto GOOD; -#endif -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; -#else - object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; -#endif - reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto BAD; - if (reduce_ex == object_reduce_ex) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; -#else - object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; -#endif - reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto BAD; - if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { - reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto BAD; - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto BAD; - setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); - if (!setstate) PyErr_Clear(); - if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { - setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto BAD; - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto BAD; - } - PyType_Modified((PyTypeObject*)type_obj); - } - } - goto GOOD; -BAD: - if (!PyErr_Occurred()) - PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); - ret = -1; -GOOD: -#if !CYTHON_USE_PYTYPE_LOOKUP - Py_XDECREF(object_reduce); - Py_XDECREF(object_reduce_ex); -#endif - Py_XDECREF(reduce); - Py_XDECREF(reduce_ex); - Py_XDECREF(reduce_cython); - Py_XDECREF(setstate); - Py_XDECREF(setstate_cython); - return ret; -} - -/* TypeImport */ -#ifndef __PYX_HAVE_RT_ImportType -#define __PYX_HAVE_RT_ImportType -static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, - size_t size, enum __Pyx_ImportType_CheckSize check_size) -{ - PyObject *result = 0; - char warning[200]; - Py_ssize_t basicsize; -#ifdef Py_LIMITED_API - PyObject *py_basicsize; -#endif - result = PyObject_GetAttrString(module, class_name); - if (!result) - goto bad; - if (!PyType_Check(result)) { - PyErr_Format(PyExc_TypeError, - "%.200s.%.200s is not a type object", - module_name, class_name); - goto bad; - } -#ifndef Py_LIMITED_API - basicsize = ((PyTypeObject *)result)->tp_basicsize; -#else - py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); - if (!py_basicsize) - goto bad; - basicsize = PyLong_AsSsize_t(py_basicsize); - Py_DECREF(py_basicsize); - py_basicsize = 0; - if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) - goto bad; -#endif - if ((size_t)basicsize < size) { - PyErr_Format(PyExc_ValueError, - "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - goto bad; - } - if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { - PyErr_Format(PyExc_ValueError, - "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - goto bad; - } - else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { - PyOS_snprintf(warning, sizeof(warning), - "%s.%s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; - } - return (PyTypeObject *)result; -bad: - Py_XDECREF(result); - return NULL; -} -#endif - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *empty_list = 0; - PyObject *module = 0; - PyObject *global_dict = 0; - PyObject *empty_dict = 0; - PyObject *list; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (!py_import) - goto bad; - #endif - if (from_list) - list = from_list; - else { - empty_list = PyList_New(0); - if (!empty_list) - goto bad; - list = empty_list; - } - global_dict = PyModule_GetDict(__pyx_m); - if (!global_dict) - goto bad; - empty_dict = PyDict_New(); - if (!empty_dict) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if (strchr(__Pyx_MODULE_NAME, '.')) { - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, 1); - if (!module) { - if (!PyErr_ExceptionMatches(PyExc_ImportError)) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (!py_level) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, level); - #endif - } - } -bad: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - Py_XDECREF(empty_list); - Py_XDECREF(empty_dict); - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; - #if PY_MAJOR_VERSION < 3 - py_srcfile = PyString_FromString(filename); - #else - py_srcfile = PyUnicode_FromString(filename); - #endif - if (!py_srcfile) goto bad; - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); - #endif - } - if (!py_funcname) goto bad; - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - Py_DECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_srcfile); - Py_XDECREF(py_funcname); - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) goto bad; - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_PY_LONG_LONG(unsigned PY_LONG_LONG value) { - const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG) ((unsigned PY_LONG_LONG) 0 - (unsigned PY_LONG_LONG) 1), const_zero = (unsigned PY_LONG_LONG) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(unsigned PY_LONG_LONG) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(unsigned PY_LONG_LONG) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(unsigned PY_LONG_LONG), - little, !is_unsigned); - } -} - -/* CIntFromPy */ -static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_As_PY_LONG_LONG(PyObject *x) { - const PY_LONG_LONG neg_one = (PY_LONG_LONG) ((PY_LONG_LONG) 0 - (PY_LONG_LONG) 1), const_zero = (PY_LONG_LONG) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(PY_LONG_LONG) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (PY_LONG_LONG) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (PY_LONG_LONG) 0; - case 1: __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, digit, digits[0]) - case 2: - if (8 * sizeof(PY_LONG_LONG) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(PY_LONG_LONG) >= 2 * PyLong_SHIFT) { - return (PY_LONG_LONG) (((((PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(PY_LONG_LONG) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(PY_LONG_LONG) >= 3 * PyLong_SHIFT) { - return (PY_LONG_LONG) (((((((PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(PY_LONG_LONG) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(PY_LONG_LONG) >= 4 * PyLong_SHIFT) { - return (PY_LONG_LONG) (((((((((PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (PY_LONG_LONG) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(PY_LONG_LONG) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(PY_LONG_LONG, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(PY_LONG_LONG) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(PY_LONG_LONG, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (PY_LONG_LONG) 0; - case -1: __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, digit, +digits[0]) - case -2: - if (8 * sizeof(PY_LONG_LONG) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { - return (PY_LONG_LONG) (((PY_LONG_LONG)-1)*(((((PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(PY_LONG_LONG) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { - return (PY_LONG_LONG) ((((((PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { - return (PY_LONG_LONG) (((PY_LONG_LONG)-1)*(((((((PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(PY_LONG_LONG) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { - return (PY_LONG_LONG) ((((((((PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { - return (PY_LONG_LONG) (((PY_LONG_LONG)-1)*(((((((((PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(PY_LONG_LONG) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { - return (PY_LONG_LONG) ((((((((((PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0]))); - } - } - break; - } -#endif - if (sizeof(PY_LONG_LONG) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(PY_LONG_LONG, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(PY_LONG_LONG) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(PY_LONG_LONG, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - PY_LONG_LONG val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (PY_LONG_LONG) -1; - } - } else { - PY_LONG_LONG val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (PY_LONG_LONG) -1; - val = __Pyx_PyInt_As_PY_LONG_LONG(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to PY_LONG_LONG"); - return (PY_LONG_LONG) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to PY_LONG_LONG"); - return (PY_LONG_LONG) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_As_unsigned_PY_LONG_LONG(PyObject *x) { - const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG) ((unsigned PY_LONG_LONG) 0 - (unsigned PY_LONG_LONG) 1), const_zero = (unsigned PY_LONG_LONG) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(unsigned PY_LONG_LONG) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (unsigned PY_LONG_LONG) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (unsigned PY_LONG_LONG) 0; - case 1: __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, digit, digits[0]) - case 2: - if (8 * sizeof(unsigned PY_LONG_LONG) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(unsigned PY_LONG_LONG) >= 2 * PyLong_SHIFT) { - return (unsigned PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(unsigned PY_LONG_LONG) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(unsigned PY_LONG_LONG) >= 3 * PyLong_SHIFT) { - return (unsigned PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(unsigned PY_LONG_LONG) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(unsigned PY_LONG_LONG) >= 4 * PyLong_SHIFT) { - return (unsigned PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (unsigned PY_LONG_LONG) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(unsigned PY_LONG_LONG, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (unsigned PY_LONG_LONG) 0; - case -1: __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, digit, +digits[0]) - case -2: - if (8 * sizeof(unsigned PY_LONG_LONG) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(unsigned PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { - return (unsigned PY_LONG_LONG) (((unsigned PY_LONG_LONG)-1)*(((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(unsigned PY_LONG_LONG) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(unsigned PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { - return (unsigned PY_LONG_LONG) ((((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(unsigned PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(unsigned PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { - return (unsigned PY_LONG_LONG) (((unsigned PY_LONG_LONG)-1)*(((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(unsigned PY_LONG_LONG) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(unsigned PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { - return (unsigned PY_LONG_LONG) ((((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(unsigned PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(unsigned PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { - return (unsigned PY_LONG_LONG) (((unsigned PY_LONG_LONG)-1)*(((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(unsigned PY_LONG_LONG) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(unsigned PY_LONG_LONG, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(unsigned PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { - return (unsigned PY_LONG_LONG) ((((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]))); - } - } - break; - } -#endif - if (sizeof(unsigned PY_LONG_LONG) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(unsigned PY_LONG_LONG, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(unsigned PY_LONG_LONG, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - unsigned PY_LONG_LONG val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (unsigned PY_LONG_LONG) -1; - } - } else { - unsigned PY_LONG_LONG val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (unsigned PY_LONG_LONG) -1; - val = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to unsigned PY_LONG_LONG"); - return (unsigned PY_LONG_LONG) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned PY_LONG_LONG"); - return (unsigned PY_LONG_LONG) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(long) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(long) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) - case -2: - if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } -#endif - if (sizeof(long) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - long val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (long) -1; - } - } else { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE char __Pyx_PyInt_As_char(PyObject *x) { - const char neg_one = (char) ((char) 0 - (char) 1), const_zero = (char) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(char) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(char, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (char) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (char) 0; - case 1: __PYX_VERIFY_RETURN_INT(char, digit, digits[0]) - case 2: - if (8 * sizeof(char) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(char, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(char) >= 2 * PyLong_SHIFT) { - return (char) (((((char)digits[1]) << PyLong_SHIFT) | (char)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(char) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(char, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(char) >= 3 * PyLong_SHIFT) { - return (char) (((((((char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(char) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(char, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(char) >= 4 * PyLong_SHIFT) { - return (char) (((((((((char)digits[3]) << PyLong_SHIFT) | (char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (char) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(char) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(char, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(char) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(char, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (char) 0; - case -1: __PYX_VERIFY_RETURN_INT(char, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(char, digit, +digits[0]) - case -2: - if (8 * sizeof(char) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(char, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(char) - 1 > 2 * PyLong_SHIFT) { - return (char) (((char)-1)*(((((char)digits[1]) << PyLong_SHIFT) | (char)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(char) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(char, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(char) - 1 > 2 * PyLong_SHIFT) { - return (char) ((((((char)digits[1]) << PyLong_SHIFT) | (char)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(char) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(char, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(char) - 1 > 3 * PyLong_SHIFT) { - return (char) (((char)-1)*(((((((char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(char) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(char, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(char) - 1 > 3 * PyLong_SHIFT) { - return (char) ((((((((char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(char) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(char, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(char) - 1 > 4 * PyLong_SHIFT) { - return (char) (((char)-1)*(((((((((char)digits[3]) << PyLong_SHIFT) | (char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(char) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(char, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(char) - 1 > 4 * PyLong_SHIFT) { - return (char) ((((((((((char)digits[3]) << PyLong_SHIFT) | (char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0]))); - } - } - break; - } -#endif - if (sizeof(char) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(char, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(char) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(char, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - char val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (char) -1; - } - } else { - char val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (char) -1; - val = __Pyx_PyInt_As_char(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to char"); - return (char) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to char"); - return (char) -1; -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); - } -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { - const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(int) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(int) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) - case -2: - if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } -#endif - if (sizeof(int) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - int val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (int) -1; - } - } else { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = a->tp_base; - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; - if (!res) { - res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } - return res; -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; ip) { - #if PY_MAJOR_VERSION < 3 - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - #else - if (t->is_unicode | t->is_str) { - if (t->intern) { - *t->p = PyUnicode_InternFromString(t->s); - } else if (t->encoding) { - *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); - } else { - *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); - } - } else { - *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); - } - #endif - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - ++t; - } - return 0; -} - -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type %.200s). " - "The ability to return an instance of a strict subclass of int " - "is deprecated, and may be removed in a future version of Python.", - Py_TYPE(result)->tp_name)) { - Py_DECREF(result); - return NULL; - } - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type %.200s)", - type_name, type_name, Py_TYPE(result)->tp_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)b)->ob_digit; - const Py_ssize_t size = Py_SIZE(b); - if (likely(__Pyx_sst_abs(size) <= 1)) { - ival = likely(size) ? digits[0] : 0; - if (size == -1) ival = -ival; - return ival; - } else { - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -#endif /* Py_PYTHON_H */ diff --git a/ddtrace/vendor/msgpack/_cmsgpack.pyx b/ddtrace/vendor/msgpack/_cmsgpack.pyx deleted file mode 100644 index 8ebdbf58..00000000 --- a/ddtrace/vendor/msgpack/_cmsgpack.pyx +++ /dev/null @@ -1,4 +0,0 @@ -# coding: utf-8 -#cython: embedsignature=True, c_string_encoding=ascii, language_level=3 -include "_packer.pyx" -include "_unpacker.pyx" diff --git a/ddtrace/vendor/msgpack/_packer.pyx b/ddtrace/vendor/msgpack/_packer.pyx deleted file mode 100644 index c0e5a5c4..00000000 --- a/ddtrace/vendor/msgpack/_packer.pyx +++ /dev/null @@ -1,362 +0,0 @@ -# coding: utf-8 - -from cpython cimport * -from cpython.bytearray cimport PyByteArray_Check, PyByteArray_CheckExact - -from ddtrace.vendor.msgpack import ExtType - - -cdef extern from "Python.h": - - int PyMemoryView_Check(object obj) - char* PyUnicode_AsUTF8AndSize(object obj, Py_ssize_t *l) except NULL - - -cdef extern from "pack.h": - struct msgpack_packer: - char* buf - size_t length - size_t buf_size - bint use_bin_type - - int msgpack_pack_int(msgpack_packer* pk, int d) - int msgpack_pack_nil(msgpack_packer* pk) - int msgpack_pack_true(msgpack_packer* pk) - int msgpack_pack_false(msgpack_packer* pk) - int msgpack_pack_long(msgpack_packer* pk, long d) - int msgpack_pack_long_long(msgpack_packer* pk, long long d) - int msgpack_pack_unsigned_long_long(msgpack_packer* pk, unsigned long long d) - int msgpack_pack_float(msgpack_packer* pk, float d) - int msgpack_pack_double(msgpack_packer* pk, double d) - int msgpack_pack_array(msgpack_packer* pk, size_t l) - int msgpack_pack_map(msgpack_packer* pk, size_t l) - int msgpack_pack_raw(msgpack_packer* pk, size_t l) - int msgpack_pack_bin(msgpack_packer* pk, size_t l) - int msgpack_pack_raw_body(msgpack_packer* pk, char* body, size_t l) - int msgpack_pack_ext(msgpack_packer* pk, char typecode, size_t l) - int msgpack_pack_unicode(msgpack_packer* pk, object o, long long limit) - -cdef extern from "buff_converter.h": - object buff_to_buff(char *, Py_ssize_t) - -cdef int DEFAULT_RECURSE_LIMIT=511 -cdef long long ITEM_LIMIT = (2**32)-1 - - -cdef inline int PyBytesLike_Check(object o): - return PyBytes_Check(o) or PyByteArray_Check(o) - - -cdef inline int PyBytesLike_CheckExact(object o): - return PyBytes_CheckExact(o) or PyByteArray_CheckExact(o) - - -cdef class Packer(object): - """ - MessagePack Packer - - usage:: - - packer = Packer() - astream.write(packer.pack(a)) - astream.write(packer.pack(b)) - - Packer's constructor has some keyword arguments: - - :param callable default: - Convert user type to builtin type that Packer supports. - See also simplejson's document. - - :param bool use_single_float: - Use single precision float type for float. (default: False) - - :param bool autoreset: - Reset buffer after each pack and return its content as `bytes`. (default: True). - If set this to false, use `bytes()` to get content and `.reset()` to clear buffer. - - :param bool use_bin_type: - Use bin type introduced in msgpack spec 2.0 for bytes. - It also enables str8 type for unicode. - Current default value is false, but it will be changed to true - in future version. You should specify it explicitly. - - :param bool strict_types: - If set to true, types will be checked to be exact. Derived classes - from serializeable types will not be serialized and will be - treated as unsupported type and forwarded to default. - Additionally tuples will not be serialized as lists. - This is useful when trying to implement accurate serialization - for python types. - - :param str unicode_errors: - Error handler for encoding unicode. (default: 'strict') - - :param str encoding: - (deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8') - """ - cdef msgpack_packer pk - cdef object _default - cdef object _bencoding - cdef object _berrors - cdef const char *encoding - cdef const char *unicode_errors - cdef bint strict_types - cdef bool use_float - cdef bint autoreset - - def __cinit__(self): - cdef int buf_size = 1024*1024 - self.pk.buf = PyMem_Malloc(buf_size) - if self.pk.buf == NULL: - raise MemoryError("Unable to allocate internal buffer.") - self.pk.buf_size = buf_size - self.pk.length = 0 - - def __init__(self, default=None, encoding=None, unicode_errors=None, - bint use_single_float=False, bint autoreset=True, bint use_bin_type=False, - bint strict_types=False): - if encoding is not None: - PyErr_WarnEx(DeprecationWarning, "encoding is deprecated.", 1) - self.use_float = use_single_float - self.strict_types = strict_types - self.autoreset = autoreset - self.pk.use_bin_type = use_bin_type - if default is not None: - if not PyCallable_Check(default): - raise TypeError("default must be a callable.") - self._default = default - - self._bencoding = encoding - if encoding is None: - if PY_MAJOR_VERSION < 3: - self.encoding = 'utf-8' - else: - self.encoding = NULL - else: - self.encoding = self._bencoding - - self._berrors = unicode_errors - if unicode_errors is None: - self.unicode_errors = NULL - else: - self.unicode_errors = self._berrors - - def __dealloc__(self): - PyMem_Free(self.pk.buf) - self.pk.buf = NULL - - cdef int _pack(self, object o, int nest_limit=DEFAULT_RECURSE_LIMIT) except -1: - cdef long long llval - cdef unsigned long long ullval - cdef long longval - cdef float fval - cdef double dval - cdef char* rawval - cdef int ret - cdef dict d - cdef Py_ssize_t L - cdef int default_used = 0 - cdef bint strict_types = self.strict_types - cdef Py_buffer view - - if nest_limit < 0: - raise ValueError("recursion limit exceeded.") - - while True: - if o is None: - ret = msgpack_pack_nil(&self.pk) - elif PyBool_Check(o) if strict_types else isinstance(o, bool): - if o: - ret = msgpack_pack_true(&self.pk) - else: - ret = msgpack_pack_false(&self.pk) - elif PyLong_CheckExact(o) if strict_types else PyLong_Check(o): - # PyInt_Check(long) is True for Python 3. - # So we should test long before int. - try: - if o > 0: - ullval = o - ret = msgpack_pack_unsigned_long_long(&self.pk, ullval) - else: - llval = o - ret = msgpack_pack_long_long(&self.pk, llval) - except OverflowError as oe: - if not default_used and self._default is not None: - o = self._default(o) - default_used = True - continue - else: - raise OverflowError("Integer value out of range") - elif PyInt_CheckExact(o) if strict_types else PyInt_Check(o): - longval = o - ret = msgpack_pack_long(&self.pk, longval) - elif PyFloat_CheckExact(o) if strict_types else PyFloat_Check(o): - if self.use_float: - fval = o - ret = msgpack_pack_float(&self.pk, fval) - else: - dval = o - ret = msgpack_pack_double(&self.pk, dval) - elif PyBytesLike_CheckExact(o) if strict_types else PyBytesLike_Check(o): - L = len(o) - if L > ITEM_LIMIT: - PyErr_Format(ValueError, b"%.200s object is too large", Py_TYPE(o).tp_name) - rawval = o - ret = msgpack_pack_bin(&self.pk, L) - if ret == 0: - ret = msgpack_pack_raw_body(&self.pk, rawval, L) - elif PyUnicode_CheckExact(o) if strict_types else PyUnicode_Check(o): - if self.encoding == NULL and self.unicode_errors == NULL: - ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT); - if ret == -2: - raise ValueError("unicode string is too large") - else: - o = PyUnicode_AsEncodedString(o, self.encoding, self.unicode_errors) - L = len(o) - if L > ITEM_LIMIT: - raise ValueError("unicode string is too large") - ret = msgpack_pack_raw(&self.pk, L) - if ret == 0: - rawval = o - ret = msgpack_pack_raw_body(&self.pk, rawval, L) - elif PyDict_CheckExact(o): - d = o - L = len(d) - if L > ITEM_LIMIT: - raise ValueError("dict is too large") - ret = msgpack_pack_map(&self.pk, L) - if ret == 0: - for k, v in d.items(): - ret = self._pack(k, nest_limit-1) - if ret != 0: break - ret = self._pack(v, nest_limit-1) - if ret != 0: break - elif not strict_types and PyDict_Check(o): - L = len(o) - if L > ITEM_LIMIT: - raise ValueError("dict is too large") - ret = msgpack_pack_map(&self.pk, L) - if ret == 0: - for k, v in o.items(): - ret = self._pack(k, nest_limit-1) - if ret != 0: break - ret = self._pack(v, nest_limit-1) - if ret != 0: break - elif type(o) is ExtType if strict_types else isinstance(o, ExtType): - # This should be before Tuple because ExtType is namedtuple. - longval = o.code - rawval = o.data - L = len(o.data) - if L > ITEM_LIMIT: - raise ValueError("EXT data is too large") - ret = msgpack_pack_ext(&self.pk, longval, L) - ret = msgpack_pack_raw_body(&self.pk, rawval, L) - elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)): - L = len(o) - if L > ITEM_LIMIT: - raise ValueError("list is too large") - ret = msgpack_pack_array(&self.pk, L) - if ret == 0: - for v in o: - ret = self._pack(v, nest_limit-1) - if ret != 0: break - elif PyMemoryView_Check(o): - if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0: - raise ValueError("could not get buffer for memoryview") - L = view.len - if L > ITEM_LIMIT: - PyBuffer_Release(&view); - raise ValueError("memoryview is too large") - ret = msgpack_pack_bin(&self.pk, L) - if ret == 0: - ret = msgpack_pack_raw_body(&self.pk, view.buf, L) - PyBuffer_Release(&view); - elif not default_used and self._default: - o = self._default(o) - default_used = 1 - continue - else: - PyErr_Format(TypeError, b"can not serialize '%.200s' object", Py_TYPE(o).tp_name) - return ret - - cpdef pack(self, object obj): - cdef int ret - try: - ret = self._pack(obj, DEFAULT_RECURSE_LIMIT) - except: - self.pk.length = 0 - raise - if ret: # should not happen. - raise RuntimeError("internal error") - if self.autoreset: - buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - self.pk.length = 0 - return buf - - def pack_ext_type(self, typecode, data): - msgpack_pack_ext(&self.pk, typecode, len(data)) - msgpack_pack_raw_body(&self.pk, data, len(data)) - - def pack_array_header(self, long long size): - if size > ITEM_LIMIT: - raise ValueError - cdef int ret = msgpack_pack_array(&self.pk, size) - if ret == -1: - raise MemoryError - elif ret: # should not happen - raise TypeError - if self.autoreset: - buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - self.pk.length = 0 - return buf - - def pack_map_header(self, long long size): - if size > ITEM_LIMIT: - raise ValueError - cdef int ret = msgpack_pack_map(&self.pk, size) - if ret == -1: - raise MemoryError - elif ret: # should not happen - raise TypeError - if self.autoreset: - buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - self.pk.length = 0 - return buf - - def pack_map_pairs(self, object pairs): - """ - Pack *pairs* as msgpack map type. - - *pairs* should be a sequence of pairs. - (`len(pairs)` and `for k, v in pairs:` should be supported.) - """ - cdef int ret = msgpack_pack_map(&self.pk, len(pairs)) - if ret == 0: - for k, v in pairs: - ret = self._pack(k) - if ret != 0: break - ret = self._pack(v) - if ret != 0: break - if ret == -1: - raise MemoryError - elif ret: # should not happen - raise TypeError - if self.autoreset: - buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - self.pk.length = 0 - return buf - - def reset(self): - """Reset internal buffer. - - This method is usaful only when autoreset=False. - """ - self.pk.length = 0 - - def bytes(self): - """Return internal buffer contents as bytes object""" - return PyBytes_FromStringAndSize(self.pk.buf, self.pk.length) - - def getbuffer(self): - """Return view of internal buffer.""" - return buff_to_buff(self.pk.buf, self.pk.length) diff --git a/ddtrace/vendor/msgpack/_unpacker.pyx b/ddtrace/vendor/msgpack/_unpacker.pyx deleted file mode 100644 index 5239ba7b..00000000 --- a/ddtrace/vendor/msgpack/_unpacker.pyx +++ /dev/null @@ -1,569 +0,0 @@ -# coding: utf-8 - -from cpython cimport * - -cdef extern from "Python.h": - ctypedef struct PyObject - cdef int PyObject_AsReadBuffer(object o, const void** buff, Py_ssize_t* buf_len) except -1 - object PyMemoryView_GetContiguous(object obj, int buffertype, char order) - -from libc.stdlib cimport * -from libc.string cimport * -from libc.limits cimport * -ctypedef unsigned long long uint64_t - -from ddtrace.vendor.msgpack.exceptions import ( - BufferFull, - OutOfData, - ExtraData, - FormatError, - StackError, -) -from ddtrace.vendor.msgpack import ExtType - - -cdef extern from "unpack.h": - ctypedef struct msgpack_user: - bint use_list - bint raw - bint has_pairs_hook # call object_hook with k-v pairs - bint strict_map_key - PyObject* object_hook - PyObject* list_hook - PyObject* ext_hook - char *encoding - char *unicode_errors - Py_ssize_t max_str_len - Py_ssize_t max_bin_len - Py_ssize_t max_array_len - Py_ssize_t max_map_len - Py_ssize_t max_ext_len - - ctypedef struct unpack_context: - msgpack_user user - PyObject* obj - Py_ssize_t count - - ctypedef int (*execute_fn)(unpack_context* ctx, const char* data, - Py_ssize_t len, Py_ssize_t* off) except? -1 - execute_fn unpack_construct - execute_fn unpack_skip - execute_fn read_array_header - execute_fn read_map_header - void unpack_init(unpack_context* ctx) - object unpack_data(unpack_context* ctx) - void unpack_clear(unpack_context* ctx) - -cdef inline init_ctx(unpack_context *ctx, - object object_hook, object object_pairs_hook, - object list_hook, object ext_hook, - bint use_list, bint raw, bint strict_map_key, - const char* encoding, const char* unicode_errors, - Py_ssize_t max_str_len, Py_ssize_t max_bin_len, - Py_ssize_t max_array_len, Py_ssize_t max_map_len, - Py_ssize_t max_ext_len): - unpack_init(ctx) - ctx.user.use_list = use_list - ctx.user.raw = raw - ctx.user.strict_map_key = strict_map_key - ctx.user.object_hook = ctx.user.list_hook = NULL - ctx.user.max_str_len = max_str_len - ctx.user.max_bin_len = max_bin_len - ctx.user.max_array_len = max_array_len - ctx.user.max_map_len = max_map_len - ctx.user.max_ext_len = max_ext_len - - if object_hook is not None and object_pairs_hook is not None: - raise TypeError("object_pairs_hook and object_hook are mutually exclusive.") - - if object_hook is not None: - if not PyCallable_Check(object_hook): - raise TypeError("object_hook must be a callable.") - ctx.user.object_hook = object_hook - - if object_pairs_hook is None: - ctx.user.has_pairs_hook = False - else: - if not PyCallable_Check(object_pairs_hook): - raise TypeError("object_pairs_hook must be a callable.") - ctx.user.object_hook = object_pairs_hook - ctx.user.has_pairs_hook = True - - if list_hook is not None: - if not PyCallable_Check(list_hook): - raise TypeError("list_hook must be a callable.") - ctx.user.list_hook = list_hook - - if ext_hook is not None: - if not PyCallable_Check(ext_hook): - raise TypeError("ext_hook must be a callable.") - ctx.user.ext_hook = ext_hook - - ctx.user.encoding = encoding - ctx.user.unicode_errors = unicode_errors - -def default_read_extended_type(typecode, data): - raise NotImplementedError("Cannot decode extended type with typecode=%d" % typecode) - -cdef inline int get_data_from_buffer(object obj, - Py_buffer *view, - char **buf, - Py_ssize_t *buffer_len, - int *new_protocol) except 0: - cdef object contiguous - cdef Py_buffer tmp - if PyObject_CheckBuffer(obj): - new_protocol[0] = 1 - if PyObject_GetBuffer(obj, view, PyBUF_FULL_RO) == -1: - raise - if view.itemsize != 1: - PyBuffer_Release(view) - raise BufferError("cannot unpack from multi-byte object") - if PyBuffer_IsContiguous(view, b'A') == 0: - PyBuffer_Release(view) - # create a contiguous copy and get buffer - contiguous = PyMemoryView_GetContiguous(obj, PyBUF_READ, b'C') - PyObject_GetBuffer(contiguous, view, PyBUF_SIMPLE) - # view must hold the only reference to contiguous, - # so memory is freed when view is released - Py_DECREF(contiguous) - buffer_len[0] = view.len - buf[0] = view.buf - return 1 - else: - new_protocol[0] = 0 - if PyObject_AsReadBuffer(obj, buf, buffer_len) == -1: - raise BufferError("could not get memoryview") - PyErr_WarnEx(RuntimeWarning, - "using old buffer interface to unpack %s; " - "this leads to unpacking errors if slicing is used and " - "will be removed in a future version" % type(obj), - 1) - return 1 - -def unpackb(object packed, object object_hook=None, object list_hook=None, - bint use_list=True, bint raw=True, bint strict_map_key=False, - encoding=None, unicode_errors=None, - object_pairs_hook=None, ext_hook=ExtType, - Py_ssize_t max_str_len=-1, - Py_ssize_t max_bin_len=-1, - Py_ssize_t max_array_len=-1, - Py_ssize_t max_map_len=-1, - Py_ssize_t max_ext_len=-1): - """ - Unpack packed_bytes to object. Returns an unpacked object. - - Raises ``ExtraData`` when *packed* contains extra bytes. - Raises ``ValueError`` when *packed* is incomplete. - Raises ``FormatError`` when *packed* is not valid msgpack. - Raises ``StackError`` when *packed* contains too nested. - Other exceptions can be raised during unpacking. - - See :class:`Unpacker` for options. - - *max_xxx_len* options are configured automatically from ``len(packed)``. - """ - cdef unpack_context ctx - cdef Py_ssize_t off = 0 - cdef int ret - - cdef Py_buffer view - cdef char* buf = NULL - cdef Py_ssize_t buf_len - cdef const char* cenc = NULL - cdef const char* cerr = NULL - cdef int new_protocol = 0 - - if encoding is not None: - PyErr_WarnEx(DeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1) - cenc = encoding - - if unicode_errors is not None: - cerr = unicode_errors - - get_data_from_buffer(packed, &view, &buf, &buf_len, &new_protocol) - - if max_str_len == -1: - max_str_len = buf_len - if max_bin_len == -1: - max_bin_len = buf_len - if max_array_len == -1: - max_array_len = buf_len - if max_map_len == -1: - max_map_len = buf_len//2 - if max_ext_len == -1: - max_ext_len = buf_len - - try: - init_ctx(&ctx, object_hook, object_pairs_hook, list_hook, ext_hook, - use_list, raw, strict_map_key, cenc, cerr, - max_str_len, max_bin_len, max_array_len, max_map_len, max_ext_len) - ret = unpack_construct(&ctx, buf, buf_len, &off) - finally: - if new_protocol: - PyBuffer_Release(&view); - - if ret == 1: - obj = unpack_data(&ctx) - if off < buf_len: - raise ExtraData(obj, PyBytes_FromStringAndSize(buf+off, buf_len-off)) - return obj - unpack_clear(&ctx) - if ret == 0: - raise ValueError("Unpack failed: incomplete input") - elif ret == -2: - raise FormatError - elif ret == -3: - raise StackError - raise ValueError("Unpack failed: error = %d" % (ret,)) - - -def unpack(object stream, **kwargs): - PyErr_WarnEx( - DeprecationWarning, - "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", 1) - data = stream.read() - return unpackb(data, **kwargs) - - -cdef class Unpacker(object): - """Streaming unpacker. - - Arguments: - - :param file_like: - File-like object having `.read(n)` method. - If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable. - - :param int read_size: - Used as `file_like.read(read_size)`. (default: `min(1024**2, max_buffer_size)`) - - :param bool use_list: - If true, unpack msgpack array to Python list. - Otherwise, unpack to Python tuple. (default: True) - - :param bool raw: - If true, unpack msgpack raw to Python bytes (default). - Otherwise, unpack to Python str (or unicode on Python 2) by decoding - with UTF-8 encoding (recommended). - Currently, the default is true, but it will be changed to false in - near future. So you must specify it explicitly for keeping backward - compatibility. - - *encoding* option which is deprecated overrides this option. - - :param bool strict_map_key: - If true, only str or bytes are accepted for map (dict) keys. - It's False by default for backward-compatibility. - But it will be True from msgpack 1.0. - - :param callable object_hook: - When specified, it should be callable. - Unpacker calls it with a dict argument after unpacking msgpack map. - (See also simplejson) - - :param callable object_pairs_hook: - When specified, it should be callable. - Unpacker calls it with a list of key-value pairs after unpacking msgpack map. - (See also simplejson) - - :param int max_buffer_size: - Limits size of data waiting unpacked. 0 means system's INT_MAX (default). - Raises `BufferFull` exception when it is insufficient. - You should set this parameter when unpacking data from untrusted source. - - :param int max_str_len: - Deprecated, use *max_buffer_size* instead. - Limits max length of str. (default: max_buffer_size or 1024*1024) - - :param int max_bin_len: - Deprecated, use *max_buffer_size* instead. - Limits max length of bin. (default: max_buffer_size or 1024*1024) - - :param int max_array_len: - Limits max length of array. (default: max_buffer_size or 128*1024) - - :param int max_map_len: - Limits max length of map. (default: max_buffer_size//2 or 32*1024) - - :param int max_ext_len: - Deprecated, use *max_buffer_size* instead. - Limits max size of ext type. (default: max_buffer_size or 1024*1024) - - :param str encoding: - Deprecated, use ``raw=False`` instead. - Encoding used for decoding msgpack raw. - If it is None (default), msgpack raw is deserialized to Python bytes. - - :param str unicode_errors: - Error handler used for decoding str type. (default: `'strict'`) - - - Example of streaming deserialize from file-like object:: - - unpacker = Unpacker(file_like, raw=False, max_buffer_size=10*1024*1024) - for o in unpacker: - process(o) - - Example of streaming deserialize from socket:: - - unpacker = Unpacker(raw=False, max_buffer_size=10*1024*1024) - while True: - buf = sock.recv(1024**2) - if not buf: - break - unpacker.feed(buf) - for o in unpacker: - process(o) - - Raises ``ExtraData`` when *packed* contains extra bytes. - Raises ``OutOfData`` when *packed* is incomplete. - Raises ``FormatError`` when *packed* is not valid msgpack. - Raises ``StackError`` when *packed* contains too nested. - Other exceptions can be raised during unpacking. - """ - cdef unpack_context ctx - cdef char* buf - cdef Py_ssize_t buf_size, buf_head, buf_tail - cdef object file_like - cdef object file_like_read - cdef Py_ssize_t read_size - # To maintain refcnt. - cdef object object_hook, object_pairs_hook, list_hook, ext_hook - cdef object encoding, unicode_errors - cdef Py_ssize_t max_buffer_size - cdef uint64_t stream_offset - - def __cinit__(self): - self.buf = NULL - - def __dealloc__(self): - PyMem_Free(self.buf) - self.buf = NULL - - def __init__(self, file_like=None, Py_ssize_t read_size=0, - bint use_list=True, bint raw=True, bint strict_map_key=False, - object object_hook=None, object object_pairs_hook=None, object list_hook=None, - encoding=None, unicode_errors=None, Py_ssize_t max_buffer_size=0, - object ext_hook=ExtType, - Py_ssize_t max_str_len=-1, - Py_ssize_t max_bin_len=-1, - Py_ssize_t max_array_len=-1, - Py_ssize_t max_map_len=-1, - Py_ssize_t max_ext_len=-1): - cdef const char *cenc=NULL, - cdef const char *cerr=NULL - - self.object_hook = object_hook - self.object_pairs_hook = object_pairs_hook - self.list_hook = list_hook - self.ext_hook = ext_hook - - self.file_like = file_like - if file_like: - self.file_like_read = file_like.read - if not PyCallable_Check(self.file_like_read): - raise TypeError("`file_like.read` must be a callable.") - - if max_str_len == -1: - max_str_len = max_buffer_size or 1024*1024 - if max_bin_len == -1: - max_bin_len = max_buffer_size or 1024*1024 - if max_array_len == -1: - max_array_len = max_buffer_size or 128*1024 - if max_map_len == -1: - max_map_len = max_buffer_size//2 or 32*1024 - if max_ext_len == -1: - max_ext_len = max_buffer_size or 1024*1024 - - if not max_buffer_size: - max_buffer_size = INT_MAX - if read_size > max_buffer_size: - raise ValueError("read_size should be less or equal to max_buffer_size") - if not read_size: - read_size = min(max_buffer_size, 1024**2) - self.max_buffer_size = max_buffer_size - self.read_size = read_size - self.buf = PyMem_Malloc(read_size) - if self.buf == NULL: - raise MemoryError("Unable to allocate internal buffer.") - self.buf_size = read_size - self.buf_head = 0 - self.buf_tail = 0 - self.stream_offset = 0 - - if encoding is not None: - PyErr_WarnEx(DeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1) - self.encoding = encoding - cenc = encoding - - if unicode_errors is not None: - self.unicode_errors = unicode_errors - cerr = unicode_errors - - init_ctx(&self.ctx, object_hook, object_pairs_hook, list_hook, - ext_hook, use_list, raw, strict_map_key, cenc, cerr, - max_str_len, max_bin_len, max_array_len, - max_map_len, max_ext_len) - - def feed(self, object next_bytes): - """Append `next_bytes` to internal buffer.""" - cdef Py_buffer pybuff - cdef int new_protocol = 0 - cdef char* buf - cdef Py_ssize_t buf_len - - if self.file_like is not None: - raise AssertionError( - "unpacker.feed() is not be able to use with `file_like`.") - - get_data_from_buffer(next_bytes, &pybuff, &buf, &buf_len, &new_protocol) - try: - self.append_buffer(buf, buf_len) - finally: - if new_protocol: - PyBuffer_Release(&pybuff) - - cdef append_buffer(self, void* _buf, Py_ssize_t _buf_len): - cdef: - char* buf = self.buf - char* new_buf - Py_ssize_t head = self.buf_head - Py_ssize_t tail = self.buf_tail - Py_ssize_t buf_size = self.buf_size - Py_ssize_t new_size - - if tail + _buf_len > buf_size: - if ((tail - head) + _buf_len) <= buf_size: - # move to front. - memmove(buf, buf + head, tail - head) - tail -= head - head = 0 - else: - # expand buffer. - new_size = (tail-head) + _buf_len - if new_size > self.max_buffer_size: - raise BufferFull - new_size = min(new_size*2, self.max_buffer_size) - new_buf = PyMem_Malloc(new_size) - if new_buf == NULL: - # self.buf still holds old buffer and will be freed during - # obj destruction - raise MemoryError("Unable to enlarge internal buffer.") - memcpy(new_buf, buf + head, tail - head) - PyMem_Free(buf) - - buf = new_buf - buf_size = new_size - tail -= head - head = 0 - - memcpy(buf + tail, (_buf), _buf_len) - self.buf = buf - self.buf_head = head - self.buf_size = buf_size - self.buf_tail = tail + _buf_len - - cdef read_from_file(self): - next_bytes = self.file_like_read( - min(self.read_size, - self.max_buffer_size - (self.buf_tail - self.buf_head) - )) - if next_bytes: - self.append_buffer(PyBytes_AsString(next_bytes), PyBytes_Size(next_bytes)) - else: - self.file_like = None - - cdef object _unpack(self, execute_fn execute, bint iter=0): - cdef int ret - cdef object obj - cdef Py_ssize_t prev_head - - if self.buf_head >= self.buf_tail and self.file_like is not None: - self.read_from_file() - - while 1: - prev_head = self.buf_head - if prev_head >= self.buf_tail: - if iter: - raise StopIteration("No more data to unpack.") - else: - raise OutOfData("No more data to unpack.") - - ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head) - self.stream_offset += self.buf_head - prev_head - - if ret == 1: - obj = unpack_data(&self.ctx) - unpack_init(&self.ctx) - return obj - elif ret == 0: - if self.file_like is not None: - self.read_from_file() - continue - if iter: - raise StopIteration("No more data to unpack.") - else: - raise OutOfData("No more data to unpack.") - elif ret == -2: - raise FormatError - elif ret == -3: - raise StackError - else: - raise ValueError("Unpack failed: error = %d" % (ret,)) - - def read_bytes(self, Py_ssize_t nbytes): - """Read a specified number of raw bytes from the stream""" - cdef Py_ssize_t nread - nread = min(self.buf_tail - self.buf_head, nbytes) - ret = PyBytes_FromStringAndSize(self.buf + self.buf_head, nread) - self.buf_head += nread - if len(ret) < nbytes and self.file_like is not None: - ret += self.file_like.read(nbytes - len(ret)) - return ret - - def unpack(self): - """Unpack one object - - Raises `OutOfData` when there are no more bytes to unpack. - """ - return self._unpack(unpack_construct) - - def skip(self): - """Read and ignore one object, returning None - - Raises `OutOfData` when there are no more bytes to unpack. - """ - return self._unpack(unpack_skip) - - def read_array_header(self): - """assuming the next object is an array, return its size n, such that - the next n unpack() calls will iterate over its contents. - - Raises `OutOfData` when there are no more bytes to unpack. - """ - return self._unpack(read_array_header) - - def read_map_header(self): - """assuming the next object is a map, return its size n, such that the - next n * 2 unpack() calls will iterate over its key-value pairs. - - Raises `OutOfData` when there are no more bytes to unpack. - """ - return self._unpack(read_map_header) - - def tell(self): - return self.stream_offset - - def __iter__(self): - return self - - def __next__(self): - return self._unpack(unpack_construct, 1) - - # for debug. - #def _buf(self): - # return PyString_FromStringAndSize(self.buf, self.buf_tail) - - #def _off(self): - # return self.buf_head diff --git a/ddtrace/vendor/msgpack/_version.py b/ddtrace/vendor/msgpack/_version.py deleted file mode 100644 index 926c5e7b..00000000 --- a/ddtrace/vendor/msgpack/_version.py +++ /dev/null @@ -1 +0,0 @@ -version = (0, 6, 1) diff --git a/ddtrace/vendor/msgpack/buff_converter.h b/ddtrace/vendor/msgpack/buff_converter.h deleted file mode 100644 index bc7227ae..00000000 --- a/ddtrace/vendor/msgpack/buff_converter.h +++ /dev/null @@ -1,28 +0,0 @@ -#include "Python.h" - -/* cython does not support this preprocessor check => write it in raw C */ -#if PY_MAJOR_VERSION == 2 -static PyObject * -buff_to_buff(char *buff, Py_ssize_t size) -{ - return PyBuffer_FromMemory(buff, size); -} - -#elif (PY_MAJOR_VERSION == 3) && (PY_MINOR_VERSION >= 3) -static PyObject * -buff_to_buff(char *buff, Py_ssize_t size) -{ - return PyMemoryView_FromMemory(buff, size, PyBUF_READ); -} -#else -static PyObject * -buff_to_buff(char *buff, Py_ssize_t size) -{ - Py_buffer pybuf; - if (PyBuffer_FillInfo(&pybuf, NULL, buff, size, 1, PyBUF_FULL_RO) == -1) { - return NULL; - } - - return PyMemoryView_FromBuffer(&pybuf); -} -#endif diff --git a/ddtrace/vendor/msgpack/exceptions.py b/ddtrace/vendor/msgpack/exceptions.py deleted file mode 100644 index d6d2615c..00000000 --- a/ddtrace/vendor/msgpack/exceptions.py +++ /dev/null @@ -1,48 +0,0 @@ -class UnpackException(Exception): - """Base class for some exceptions raised while unpacking. - - NOTE: unpack may raise exception other than subclass of - UnpackException. If you want to catch all error, catch - Exception instead. - """ - - -class BufferFull(UnpackException): - pass - - -class OutOfData(UnpackException): - pass - - -class FormatError(ValueError, UnpackException): - """Invalid msgpack format""" - - -class StackError(ValueError, UnpackException): - """Too nested""" - - -# Deprecated. Use ValueError instead -UnpackValueError = ValueError - - -class ExtraData(UnpackValueError): - """ExtraData is raised when there is trailing data. - - This exception is raised while only one-shot (not streaming) - unpack. - """ - - def __init__(self, unpacked, extra): - self.unpacked = unpacked - self.extra = extra - - def __str__(self): - return "unpack(b) received extra data." - - -# Deprecated. Use Exception instead to catch all exception during packing. -PackException = Exception -PackValueError = ValueError -PackOverflowError = OverflowError diff --git a/ddtrace/vendor/msgpack/fallback.py b/ddtrace/vendor/msgpack/fallback.py deleted file mode 100644 index 3836e830..00000000 --- a/ddtrace/vendor/msgpack/fallback.py +++ /dev/null @@ -1,1027 +0,0 @@ -"""Fallback pure Python implementation of msgpack""" - -import sys -import struct -import warnings - - -if sys.version_info[0] == 2: - PY2 = True - int_types = (int, long) - def dict_iteritems(d): - return d.iteritems() -else: - PY2 = False - int_types = int - unicode = str - xrange = range - def dict_iteritems(d): - return d.items() - -if sys.version_info < (3, 5): - # Ugly hack... - RecursionError = RuntimeError - - def _is_recursionerror(e): - return len(e.args) == 1 and isinstance(e.args[0], str) and \ - e.args[0].startswith('maximum recursion depth exceeded') -else: - def _is_recursionerror(e): - return True - -if hasattr(sys, 'pypy_version_info'): - # cStringIO is slow on PyPy, StringIO is faster. However: PyPy's own - # StringBuilder is fastest. - from __pypy__ import newlist_hint - try: - from __pypy__.builders import BytesBuilder as StringBuilder - except ImportError: - from __pypy__.builders import StringBuilder - USING_STRINGBUILDER = True - class StringIO(object): - def __init__(self, s=b''): - if s: - self.builder = StringBuilder(len(s)) - self.builder.append(s) - else: - self.builder = StringBuilder() - def write(self, s): - if isinstance(s, memoryview): - s = s.tobytes() - elif isinstance(s, bytearray): - s = bytes(s) - self.builder.append(s) - def getvalue(self): - return self.builder.build() -else: - USING_STRINGBUILDER = False - from io import BytesIO as StringIO - newlist_hint = lambda size: [] - - -from .exceptions import ( - BufferFull, - OutOfData, - ExtraData, - FormatError, - StackError, -) - -from . import ExtType - - -EX_SKIP = 0 -EX_CONSTRUCT = 1 -EX_READ_ARRAY_HEADER = 2 -EX_READ_MAP_HEADER = 3 - -TYPE_IMMEDIATE = 0 -TYPE_ARRAY = 1 -TYPE_MAP = 2 -TYPE_RAW = 3 -TYPE_BIN = 4 -TYPE_EXT = 5 - -DEFAULT_RECURSE_LIMIT = 511 - - -def _check_type_strict(obj, t, type=type, tuple=tuple): - if type(t) is tuple: - return type(obj) in t - else: - return type(obj) is t - - -def _get_data_from_buffer(obj): - try: - view = memoryview(obj) - except TypeError: - # try to use legacy buffer protocol if 2.7, otherwise re-raise - if PY2: - view = memoryview(buffer(obj)) - warnings.warn("using old buffer interface to unpack %s; " - "this leads to unpacking errors if slicing is used and " - "will be removed in a future version" % type(obj), - RuntimeWarning, stacklevel=3) - else: - raise - if view.itemsize != 1: - raise ValueError("cannot unpack from multi-byte object") - return view - - -def unpack(stream, **kwargs): - warnings.warn( - "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", - DeprecationWarning, stacklevel=2) - data = stream.read() - return unpackb(data, **kwargs) - - -def unpackb(packed, **kwargs): - """ - Unpack an object from `packed`. - - Raises ``ExtraData`` when *packed* contains extra bytes. - Raises ``ValueError`` when *packed* is incomplete. - Raises ``FormatError`` when *packed* is not valid msgpack. - Raises ``StackError`` when *packed* contains too nested. - Other exceptions can be raised during unpacking. - - See :class:`Unpacker` for options. - """ - unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs) - unpacker.feed(packed) - try: - ret = unpacker._unpack() - except OutOfData: - raise ValueError("Unpack failed: incomplete input") - except RecursionError as e: - if _is_recursionerror(e): - raise StackError - raise - if unpacker._got_extradata(): - raise ExtraData(ret, unpacker._get_extradata()) - return ret - - -if sys.version_info < (2, 7, 6): - def _unpack_from(f, b, o=0): - """Explicit typcast for legacy struct.unpack_from""" - return struct.unpack_from(f, bytes(b), o) -else: - _unpack_from = struct.unpack_from - - -class Unpacker(object): - """Streaming unpacker. - - arguments: - - :param file_like: - File-like object having `.read(n)` method. - If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable. - - :param int read_size: - Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`) - - :param bool use_list: - If true, unpack msgpack array to Python list. - Otherwise, unpack to Python tuple. (default: True) - - :param bool raw: - If true, unpack msgpack raw to Python bytes (default). - Otherwise, unpack to Python str (or unicode on Python 2) by decoding - with UTF-8 encoding (recommended). - Currently, the default is true, but it will be changed to false in - near future. So you must specify it explicitly for keeping backward - compatibility. - - *encoding* option which is deprecated overrides this option. - - :param bool strict_map_key: - If true, only str or bytes are accepted for map (dict) keys. - It's False by default for backward-compatibility. - But it will be True from msgpack 1.0. - - :param callable object_hook: - When specified, it should be callable. - Unpacker calls it with a dict argument after unpacking msgpack map. - (See also simplejson) - - :param callable object_pairs_hook: - When specified, it should be callable. - Unpacker calls it with a list of key-value pairs after unpacking msgpack map. - (See also simplejson) - - :param str encoding: - Encoding used for decoding msgpack raw. - If it is None (default), msgpack raw is deserialized to Python bytes. - - :param str unicode_errors: - (deprecated) Used for decoding msgpack raw with *encoding*. - (default: `'strict'`) - - :param int max_buffer_size: - Limits size of data waiting unpacked. 0 means system's INT_MAX (default). - Raises `BufferFull` exception when it is insufficient. - You should set this parameter when unpacking data from untrusted source. - - :param int max_str_len: - Deprecated, use *max_buffer_size* instead. - Limits max length of str. (default: max_buffer_size or 1024*1024) - - :param int max_bin_len: - Deprecated, use *max_buffer_size* instead. - Limits max length of bin. (default: max_buffer_size or 1024*1024) - - :param int max_array_len: - Limits max length of array. - (default: max_buffer_size or 128*1024) - - :param int max_map_len: - Limits max length of map. - (default: max_buffer_size//2 or 32*1024) - - :param int max_ext_len: - Deprecated, use *max_buffer_size* instead. - Limits max size of ext type. (default: max_buffer_size or 1024*1024) - - Example of streaming deserialize from file-like object:: - - unpacker = Unpacker(file_like, raw=False, max_buffer_size=10*1024*1024) - for o in unpacker: - process(o) - - Example of streaming deserialize from socket:: - - unpacker = Unpacker(raw=False, max_buffer_size=10*1024*1024) - while True: - buf = sock.recv(1024**2) - if not buf: - break - unpacker.feed(buf) - for o in unpacker: - process(o) - - Raises ``ExtraData`` when *packed* contains extra bytes. - Raises ``OutOfData`` when *packed* is incomplete. - Raises ``FormatError`` when *packed* is not valid msgpack. - Raises ``StackError`` when *packed* contains too nested. - Other exceptions can be raised during unpacking. - """ - - def __init__(self, file_like=None, read_size=0, use_list=True, raw=True, strict_map_key=False, - object_hook=None, object_pairs_hook=None, list_hook=None, - encoding=None, unicode_errors=None, max_buffer_size=0, - ext_hook=ExtType, - max_str_len=-1, - max_bin_len=-1, - max_array_len=-1, - max_map_len=-1, - max_ext_len=-1): - if encoding is not None: - warnings.warn( - "encoding is deprecated, Use raw=False instead.", - DeprecationWarning, stacklevel=2) - - if unicode_errors is None: - unicode_errors = 'strict' - - if file_like is None: - self._feeding = True - else: - if not callable(file_like.read): - raise TypeError("`file_like.read` must be callable") - self.file_like = file_like - self._feeding = False - - #: array of bytes fed. - self._buffer = bytearray() - #: Which position we currently reads - self._buff_i = 0 - - # When Unpacker is used as an iterable, between the calls to next(), - # the buffer is not "consumed" completely, for efficiency sake. - # Instead, it is done sloppily. To make sure we raise BufferFull at - # the correct moments, we have to keep track of how sloppy we were. - # Furthermore, when the buffer is incomplete (that is: in the case - # we raise an OutOfData) we need to rollback the buffer to the correct - # state, which _buf_checkpoint records. - self._buf_checkpoint = 0 - - if max_str_len == -1: - max_str_len = max_buffer_size or 1024*1024 - if max_bin_len == -1: - max_bin_len = max_buffer_size or 1024*1024 - if max_array_len == -1: - max_array_len = max_buffer_size or 128*1024 - if max_map_len == -1: - max_map_len = max_buffer_size//2 or 32*1024 - if max_ext_len == -1: - max_ext_len = max_buffer_size or 1024*1024 - - self._max_buffer_size = max_buffer_size or 2**31-1 - if read_size > self._max_buffer_size: - raise ValueError("read_size must be smaller than max_buffer_size") - self._read_size = read_size or min(self._max_buffer_size, 16*1024) - self._raw = bool(raw) - self._strict_map_key = bool(strict_map_key) - self._encoding = encoding - self._unicode_errors = unicode_errors - self._use_list = use_list - self._list_hook = list_hook - self._object_hook = object_hook - self._object_pairs_hook = object_pairs_hook - self._ext_hook = ext_hook - self._max_str_len = max_str_len - self._max_bin_len = max_bin_len - self._max_array_len = max_array_len - self._max_map_len = max_map_len - self._max_ext_len = max_ext_len - self._stream_offset = 0 - - if list_hook is not None and not callable(list_hook): - raise TypeError('`list_hook` is not callable') - if object_hook is not None and not callable(object_hook): - raise TypeError('`object_hook` is not callable') - if object_pairs_hook is not None and not callable(object_pairs_hook): - raise TypeError('`object_pairs_hook` is not callable') - if object_hook is not None and object_pairs_hook is not None: - raise TypeError("object_pairs_hook and object_hook are mutually " - "exclusive") - if not callable(ext_hook): - raise TypeError("`ext_hook` is not callable") - - def feed(self, next_bytes): - assert self._feeding - view = _get_data_from_buffer(next_bytes) - if (len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size): - raise BufferFull - - # Strip buffer before checkpoint before reading file. - if self._buf_checkpoint > 0: - del self._buffer[:self._buf_checkpoint] - self._buff_i -= self._buf_checkpoint - self._buf_checkpoint = 0 - - # Use extend here: INPLACE_ADD += doesn't reliably typecast memoryview in jython - self._buffer.extend(view) - - def _consume(self): - """ Gets rid of the used parts of the buffer. """ - self._stream_offset += self._buff_i - self._buf_checkpoint - self._buf_checkpoint = self._buff_i - - def _got_extradata(self): - return self._buff_i < len(self._buffer) - - def _get_extradata(self): - return self._buffer[self._buff_i:] - - def read_bytes(self, n): - return self._read(n) - - def _read(self, n): - # (int) -> bytearray - self._reserve(n) - i = self._buff_i - self._buff_i = i+n - return self._buffer[i:i+n] - - def _reserve(self, n): - remain_bytes = len(self._buffer) - self._buff_i - n - - # Fast path: buffer has n bytes already - if remain_bytes >= 0: - return - - if self._feeding: - self._buff_i = self._buf_checkpoint - raise OutOfData - - # Strip buffer before checkpoint before reading file. - if self._buf_checkpoint > 0: - del self._buffer[:self._buf_checkpoint] - self._buff_i -= self._buf_checkpoint - self._buf_checkpoint = 0 - - # Read from file - remain_bytes = -remain_bytes - while remain_bytes > 0: - to_read_bytes = max(self._read_size, remain_bytes) - read_data = self.file_like.read(to_read_bytes) - if not read_data: - break - assert isinstance(read_data, bytes) - self._buffer += read_data - remain_bytes -= len(read_data) - - if len(self._buffer) < n + self._buff_i: - self._buff_i = 0 # rollback - raise OutOfData - - def _read_header(self, execute=EX_CONSTRUCT): - typ = TYPE_IMMEDIATE - n = 0 - obj = None - self._reserve(1) - b = self._buffer[self._buff_i] - self._buff_i += 1 - if b & 0b10000000 == 0: - obj = b - elif b & 0b11100000 == 0b11100000: - obj = -1 - (b ^ 0xff) - elif b & 0b11100000 == 0b10100000: - n = b & 0b00011111 - typ = TYPE_RAW - if n > self._max_str_len: - raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) - obj = self._read(n) - elif b & 0b11110000 == 0b10010000: - n = b & 0b00001111 - typ = TYPE_ARRAY - if n > self._max_array_len: - raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) - elif b & 0b11110000 == 0b10000000: - n = b & 0b00001111 - typ = TYPE_MAP - if n > self._max_map_len: - raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) - elif b == 0xc0: - obj = None - elif b == 0xc2: - obj = False - elif b == 0xc3: - obj = True - elif b == 0xc4: - typ = TYPE_BIN - self._reserve(1) - n = self._buffer[self._buff_i] - self._buff_i += 1 - if n > self._max_bin_len: - raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) - obj = self._read(n) - elif b == 0xc5: - typ = TYPE_BIN - self._reserve(2) - n = _unpack_from(">H", self._buffer, self._buff_i)[0] - self._buff_i += 2 - if n > self._max_bin_len: - raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) - obj = self._read(n) - elif b == 0xc6: - typ = TYPE_BIN - self._reserve(4) - n = _unpack_from(">I", self._buffer, self._buff_i)[0] - self._buff_i += 4 - if n > self._max_bin_len: - raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) - obj = self._read(n) - elif b == 0xc7: # ext 8 - typ = TYPE_EXT - self._reserve(2) - L, n = _unpack_from('Bb', self._buffer, self._buff_i) - self._buff_i += 2 - if L > self._max_ext_len: - raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) - obj = self._read(L) - elif b == 0xc8: # ext 16 - typ = TYPE_EXT - self._reserve(3) - L, n = _unpack_from('>Hb', self._buffer, self._buff_i) - self._buff_i += 3 - if L > self._max_ext_len: - raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) - obj = self._read(L) - elif b == 0xc9: # ext 32 - typ = TYPE_EXT - self._reserve(5) - L, n = _unpack_from('>Ib', self._buffer, self._buff_i) - self._buff_i += 5 - if L > self._max_ext_len: - raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) - obj = self._read(L) - elif b == 0xca: - self._reserve(4) - obj = _unpack_from(">f", self._buffer, self._buff_i)[0] - self._buff_i += 4 - elif b == 0xcb: - self._reserve(8) - obj = _unpack_from(">d", self._buffer, self._buff_i)[0] - self._buff_i += 8 - elif b == 0xcc: - self._reserve(1) - obj = self._buffer[self._buff_i] - self._buff_i += 1 - elif b == 0xcd: - self._reserve(2) - obj = _unpack_from(">H", self._buffer, self._buff_i)[0] - self._buff_i += 2 - elif b == 0xce: - self._reserve(4) - obj = _unpack_from(">I", self._buffer, self._buff_i)[0] - self._buff_i += 4 - elif b == 0xcf: - self._reserve(8) - obj = _unpack_from(">Q", self._buffer, self._buff_i)[0] - self._buff_i += 8 - elif b == 0xd0: - self._reserve(1) - obj = _unpack_from("b", self._buffer, self._buff_i)[0] - self._buff_i += 1 - elif b == 0xd1: - self._reserve(2) - obj = _unpack_from(">h", self._buffer, self._buff_i)[0] - self._buff_i += 2 - elif b == 0xd2: - self._reserve(4) - obj = _unpack_from(">i", self._buffer, self._buff_i)[0] - self._buff_i += 4 - elif b == 0xd3: - self._reserve(8) - obj = _unpack_from(">q", self._buffer, self._buff_i)[0] - self._buff_i += 8 - elif b == 0xd4: # fixext 1 - typ = TYPE_EXT - if self._max_ext_len < 1: - raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len)) - self._reserve(2) - n, obj = _unpack_from("b1s", self._buffer, self._buff_i) - self._buff_i += 2 - elif b == 0xd5: # fixext 2 - typ = TYPE_EXT - if self._max_ext_len < 2: - raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len)) - self._reserve(3) - n, obj = _unpack_from("b2s", self._buffer, self._buff_i) - self._buff_i += 3 - elif b == 0xd6: # fixext 4 - typ = TYPE_EXT - if self._max_ext_len < 4: - raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len)) - self._reserve(5) - n, obj = _unpack_from("b4s", self._buffer, self._buff_i) - self._buff_i += 5 - elif b == 0xd7: # fixext 8 - typ = TYPE_EXT - if self._max_ext_len < 8: - raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len)) - self._reserve(9) - n, obj = _unpack_from("b8s", self._buffer, self._buff_i) - self._buff_i += 9 - elif b == 0xd8: # fixext 16 - typ = TYPE_EXT - if self._max_ext_len < 16: - raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len)) - self._reserve(17) - n, obj = _unpack_from("b16s", self._buffer, self._buff_i) - self._buff_i += 17 - elif b == 0xd9: - typ = TYPE_RAW - self._reserve(1) - n = self._buffer[self._buff_i] - self._buff_i += 1 - if n > self._max_str_len: - raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) - obj = self._read(n) - elif b == 0xda: - typ = TYPE_RAW - self._reserve(2) - n, = _unpack_from(">H", self._buffer, self._buff_i) - self._buff_i += 2 - if n > self._max_str_len: - raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) - obj = self._read(n) - elif b == 0xdb: - typ = TYPE_RAW - self._reserve(4) - n, = _unpack_from(">I", self._buffer, self._buff_i) - self._buff_i += 4 - if n > self._max_str_len: - raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) - obj = self._read(n) - elif b == 0xdc: - typ = TYPE_ARRAY - self._reserve(2) - n, = _unpack_from(">H", self._buffer, self._buff_i) - self._buff_i += 2 - if n > self._max_array_len: - raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) - elif b == 0xdd: - typ = TYPE_ARRAY - self._reserve(4) - n, = _unpack_from(">I", self._buffer, self._buff_i) - self._buff_i += 4 - if n > self._max_array_len: - raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) - elif b == 0xde: - self._reserve(2) - n, = _unpack_from(">H", self._buffer, self._buff_i) - self._buff_i += 2 - if n > self._max_map_len: - raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) - typ = TYPE_MAP - elif b == 0xdf: - self._reserve(4) - n, = _unpack_from(">I", self._buffer, self._buff_i) - self._buff_i += 4 - if n > self._max_map_len: - raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) - typ = TYPE_MAP - else: - raise FormatError("Unknown header: 0x%x" % b) - return typ, n, obj - - def _unpack(self, execute=EX_CONSTRUCT): - typ, n, obj = self._read_header(execute) - - if execute == EX_READ_ARRAY_HEADER: - if typ != TYPE_ARRAY: - raise ValueError("Expected array") - return n - if execute == EX_READ_MAP_HEADER: - if typ != TYPE_MAP: - raise ValueError("Expected map") - return n - # TODO should we eliminate the recursion? - if typ == TYPE_ARRAY: - if execute == EX_SKIP: - for i in xrange(n): - # TODO check whether we need to call `list_hook` - self._unpack(EX_SKIP) - return - ret = newlist_hint(n) - for i in xrange(n): - ret.append(self._unpack(EX_CONSTRUCT)) - if self._list_hook is not None: - ret = self._list_hook(ret) - # TODO is the interaction between `list_hook` and `use_list` ok? - return ret if self._use_list else tuple(ret) - if typ == TYPE_MAP: - if execute == EX_SKIP: - for i in xrange(n): - # TODO check whether we need to call hooks - self._unpack(EX_SKIP) - self._unpack(EX_SKIP) - return - if self._object_pairs_hook is not None: - ret = self._object_pairs_hook( - (self._unpack(EX_CONSTRUCT), - self._unpack(EX_CONSTRUCT)) - for _ in xrange(n)) - else: - ret = {} - for _ in xrange(n): - key = self._unpack(EX_CONSTRUCT) - if self._strict_map_key and type(key) not in (unicode, bytes): - raise ValueError("%s is not allowed for map key" % str(type(key))) - ret[key] = self._unpack(EX_CONSTRUCT) - if self._object_hook is not None: - ret = self._object_hook(ret) - return ret - if execute == EX_SKIP: - return - if typ == TYPE_RAW: - if self._encoding is not None: - obj = obj.decode(self._encoding, self._unicode_errors) - elif self._raw: - obj = bytes(obj) - else: - obj = obj.decode('utf_8') - return obj - if typ == TYPE_EXT: - return self._ext_hook(n, bytes(obj)) - if typ == TYPE_BIN: - return bytes(obj) - assert typ == TYPE_IMMEDIATE - return obj - - def __iter__(self): - return self - - def __next__(self): - try: - ret = self._unpack(EX_CONSTRUCT) - self._consume() - return ret - except OutOfData: - self._consume() - raise StopIteration - except RecursionError: - raise StackError - - next = __next__ - - def skip(self): - self._unpack(EX_SKIP) - self._consume() - - def unpack(self): - try: - ret = self._unpack(EX_CONSTRUCT) - except RecursionError: - raise StackError - self._consume() - return ret - - def read_array_header(self): - ret = self._unpack(EX_READ_ARRAY_HEADER) - self._consume() - return ret - - def read_map_header(self): - ret = self._unpack(EX_READ_MAP_HEADER) - self._consume() - return ret - - def tell(self): - return self._stream_offset - - -class Packer(object): - """ - MessagePack Packer - - usage: - - packer = Packer() - astream.write(packer.pack(a)) - astream.write(packer.pack(b)) - - Packer's constructor has some keyword arguments: - - :param callable default: - Convert user type to builtin type that Packer supports. - See also simplejson's document. - - :param bool use_single_float: - Use single precision float type for float. (default: False) - - :param bool autoreset: - Reset buffer after each pack and return its content as `bytes`. (default: True). - If set this to false, use `bytes()` to get content and `.reset()` to clear buffer. - - :param bool use_bin_type: - Use bin type introduced in msgpack spec 2.0 for bytes. - It also enables str8 type for unicode. - - :param bool strict_types: - If set to true, types will be checked to be exact. Derived classes - from serializeable types will not be serialized and will be - treated as unsupported type and forwarded to default. - Additionally tuples will not be serialized as lists. - This is useful when trying to implement accurate serialization - for python types. - - :param str encoding: - (deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8') - - :param str unicode_errors: - Error handler for encoding unicode. (default: 'strict') - """ - def __init__(self, default=None, encoding=None, unicode_errors=None, - use_single_float=False, autoreset=True, use_bin_type=False, - strict_types=False): - if encoding is None: - encoding = 'utf_8' - else: - warnings.warn( - "encoding is deprecated, Use raw=False instead.", - DeprecationWarning, stacklevel=2) - - if unicode_errors is None: - unicode_errors = 'strict' - - self._strict_types = strict_types - self._use_float = use_single_float - self._autoreset = autoreset - self._use_bin_type = use_bin_type - self._encoding = encoding - self._unicode_errors = unicode_errors - self._buffer = StringIO() - if default is not None: - if not callable(default): - raise TypeError("default must be callable") - self._default = default - - def _pack(self, obj, nest_limit=DEFAULT_RECURSE_LIMIT, - check=isinstance, check_type_strict=_check_type_strict): - default_used = False - if self._strict_types: - check = check_type_strict - list_types = list - else: - list_types = (list, tuple) - while True: - if nest_limit < 0: - raise ValueError("recursion limit exceeded") - if obj is None: - return self._buffer.write(b"\xc0") - if check(obj, bool): - if obj: - return self._buffer.write(b"\xc3") - return self._buffer.write(b"\xc2") - if check(obj, int_types): - if 0 <= obj < 0x80: - return self._buffer.write(struct.pack("B", obj)) - if -0x20 <= obj < 0: - return self._buffer.write(struct.pack("b", obj)) - if 0x80 <= obj <= 0xff: - return self._buffer.write(struct.pack("BB", 0xcc, obj)) - if -0x80 <= obj < 0: - return self._buffer.write(struct.pack(">Bb", 0xd0, obj)) - if 0xff < obj <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xcd, obj)) - if -0x8000 <= obj < -0x80: - return self._buffer.write(struct.pack(">Bh", 0xd1, obj)) - if 0xffff < obj <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xce, obj)) - if -0x80000000 <= obj < -0x8000: - return self._buffer.write(struct.pack(">Bi", 0xd2, obj)) - if 0xffffffff < obj <= 0xffffffffffffffff: - return self._buffer.write(struct.pack(">BQ", 0xcf, obj)) - if -0x8000000000000000 <= obj < -0x80000000: - return self._buffer.write(struct.pack(">Bq", 0xd3, obj)) - if not default_used and self._default is not None: - obj = self._default(obj) - default_used = True - continue - raise OverflowError("Integer value out of range") - if check(obj, (bytes, bytearray)): - n = len(obj) - if n >= 2**32: - raise ValueError("%s is too large" % type(obj).__name__) - self._pack_bin_header(n) - return self._buffer.write(obj) - if check(obj, unicode): - if self._encoding is None: - raise TypeError( - "Can't encode unicode string: " - "no encoding is specified") - obj = obj.encode(self._encoding, self._unicode_errors) - n = len(obj) - if n >= 2**32: - raise ValueError("String is too large") - self._pack_raw_header(n) - return self._buffer.write(obj) - if check(obj, memoryview): - n = len(obj) * obj.itemsize - if n >= 2**32: - raise ValueError("Memoryview is too large") - self._pack_bin_header(n) - return self._buffer.write(obj) - if check(obj, float): - if self._use_float: - return self._buffer.write(struct.pack(">Bf", 0xca, obj)) - return self._buffer.write(struct.pack(">Bd", 0xcb, obj)) - if check(obj, ExtType): - code = obj.code - data = obj.data - assert isinstance(code, int) - assert isinstance(data, bytes) - L = len(data) - if L == 1: - self._buffer.write(b'\xd4') - elif L == 2: - self._buffer.write(b'\xd5') - elif L == 4: - self._buffer.write(b'\xd6') - elif L == 8: - self._buffer.write(b'\xd7') - elif L == 16: - self._buffer.write(b'\xd8') - elif L <= 0xff: - self._buffer.write(struct.pack(">BB", 0xc7, L)) - elif L <= 0xffff: - self._buffer.write(struct.pack(">BH", 0xc8, L)) - else: - self._buffer.write(struct.pack(">BI", 0xc9, L)) - self._buffer.write(struct.pack("b", code)) - self._buffer.write(data) - return - if check(obj, list_types): - n = len(obj) - self._pack_array_header(n) - for i in xrange(n): - self._pack(obj[i], nest_limit - 1) - return - if check(obj, dict): - return self._pack_map_pairs(len(obj), dict_iteritems(obj), - nest_limit - 1) - if not default_used and self._default is not None: - obj = self._default(obj) - default_used = 1 - continue - raise TypeError("Cannot serialize %r" % (obj, )) - - def pack(self, obj): - try: - self._pack(obj) - except: - self._buffer = StringIO() # force reset - raise - if self._autoreset: - ret = self._buffer.getvalue() - self._buffer = StringIO() - return ret - - def pack_map_pairs(self, pairs): - self._pack_map_pairs(len(pairs), pairs) - if self._autoreset: - ret = self._buffer.getvalue() - self._buffer = StringIO() - return ret - - def pack_array_header(self, n): - if n >= 2**32: - raise ValueError - self._pack_array_header(n) - if self._autoreset: - ret = self._buffer.getvalue() - self._buffer = StringIO() - return ret - - def pack_map_header(self, n): - if n >= 2**32: - raise ValueError - self._pack_map_header(n) - if self._autoreset: - ret = self._buffer.getvalue() - self._buffer = StringIO() - return ret - - def pack_ext_type(self, typecode, data): - if not isinstance(typecode, int): - raise TypeError("typecode must have int type.") - if not 0 <= typecode <= 127: - raise ValueError("typecode should be 0-127") - if not isinstance(data, bytes): - raise TypeError("data must have bytes type") - L = len(data) - if L > 0xffffffff: - raise ValueError("Too large data") - if L == 1: - self._buffer.write(b'\xd4') - elif L == 2: - self._buffer.write(b'\xd5') - elif L == 4: - self._buffer.write(b'\xd6') - elif L == 8: - self._buffer.write(b'\xd7') - elif L == 16: - self._buffer.write(b'\xd8') - elif L <= 0xff: - self._buffer.write(b'\xc7' + struct.pack('B', L)) - elif L <= 0xffff: - self._buffer.write(b'\xc8' + struct.pack('>H', L)) - else: - self._buffer.write(b'\xc9' + struct.pack('>I', L)) - self._buffer.write(struct.pack('B', typecode)) - self._buffer.write(data) - - def _pack_array_header(self, n): - if n <= 0x0f: - return self._buffer.write(struct.pack('B', 0x90 + n)) - if n <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xdc, n)) - if n <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xdd, n)) - raise ValueError("Array is too large") - - def _pack_map_header(self, n): - if n <= 0x0f: - return self._buffer.write(struct.pack('B', 0x80 + n)) - if n <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xde, n)) - if n <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xdf, n)) - raise ValueError("Dict is too large") - - def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): - self._pack_map_header(n) - for (k, v) in pairs: - self._pack(k, nest_limit - 1) - self._pack(v, nest_limit - 1) - - def _pack_raw_header(self, n): - if n <= 0x1f: - self._buffer.write(struct.pack('B', 0xa0 + n)) - elif self._use_bin_type and n <= 0xff: - self._buffer.write(struct.pack('>BB', 0xd9, n)) - elif n <= 0xffff: - self._buffer.write(struct.pack(">BH", 0xda, n)) - elif n <= 0xffffffff: - self._buffer.write(struct.pack(">BI", 0xdb, n)) - else: - raise ValueError('Raw is too large') - - def _pack_bin_header(self, n): - if not self._use_bin_type: - return self._pack_raw_header(n) - elif n <= 0xff: - return self._buffer.write(struct.pack('>BB', 0xc4, n)) - elif n <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xc5, n)) - elif n <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xc6, n)) - else: - raise ValueError('Bin is too large') - - def bytes(self): - """Return internal buffer contents as bytes object""" - return self._buffer.getvalue() - - def reset(self): - """Reset internal buffer. - - This method is usaful only when autoreset=False. - """ - self._buffer = StringIO() - - def getbuffer(self): - """Return view of internal buffer.""" - if USING_STRINGBUILDER or PY2: - return memoryview(self.bytes()) - else: - return self._buffer.getbuffer() diff --git a/ddtrace/vendor/msgpack/pack.h b/ddtrace/vendor/msgpack/pack.h deleted file mode 100644 index 4f3ce1d9..00000000 --- a/ddtrace/vendor/msgpack/pack.h +++ /dev/null @@ -1,119 +0,0 @@ -/* - * MessagePack for Python packing routine - * - * Copyright (C) 2009 Naoki INADA - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include -#include -#include "sysdep.h" -#include -#include - -#ifdef __cplusplus -extern "C" { -#endif - -#ifdef _MSC_VER -#define inline __inline -#endif - -typedef struct msgpack_packer { - char *buf; - size_t length; - size_t buf_size; - bool use_bin_type; -} msgpack_packer; - -typedef struct Packer Packer; - -static inline int msgpack_pack_write(msgpack_packer* pk, const char *data, size_t l) -{ - char* buf = pk->buf; - size_t bs = pk->buf_size; - size_t len = pk->length; - - if (len + l > bs) { - bs = (len + l) * 2; - buf = (char*)PyMem_Realloc(buf, bs); - if (!buf) { - PyErr_NoMemory(); - return -1; - } - } - memcpy(buf + len, data, l); - len += l; - - pk->buf = buf; - pk->buf_size = bs; - pk->length = len; - return 0; -} - -#define msgpack_pack_append_buffer(user, buf, len) \ - return msgpack_pack_write(user, (const char*)buf, len) - -#include "pack_template.h" - -// return -2 when o is too long -static inline int -msgpack_pack_unicode(msgpack_packer *pk, PyObject *o, long long limit) -{ -#if PY_MAJOR_VERSION >= 3 - assert(PyUnicode_Check(o)); - - Py_ssize_t len; - const char* buf = PyUnicode_AsUTF8AndSize(o, &len); - if (buf == NULL) - return -1; - - if (len > limit) { - return -2; - } - - int ret = msgpack_pack_raw(pk, len); - if (ret) return ret; - - return msgpack_pack_raw_body(pk, buf, len); -#else - PyObject *bytes; - Py_ssize_t len; - int ret; - - // py2 - bytes = PyUnicode_AsUTF8String(o); - if (bytes == NULL) - return -1; - - len = PyString_GET_SIZE(bytes); - if (len > limit) { - Py_DECREF(bytes); - return -2; - } - - ret = msgpack_pack_raw(pk, len); - if (ret) { - Py_DECREF(bytes); - return -1; - } - ret = msgpack_pack_raw_body(pk, PyString_AS_STRING(bytes), len); - Py_DECREF(bytes); - return ret; -#endif -} - -#ifdef __cplusplus -} -#endif diff --git a/ddtrace/vendor/msgpack/pack_template.h b/ddtrace/vendor/msgpack/pack_template.h deleted file mode 100644 index 69982f4d..00000000 --- a/ddtrace/vendor/msgpack/pack_template.h +++ /dev/null @@ -1,778 +0,0 @@ -/* - * MessagePack packing routine template - * - * Copyright (C) 2008-2010 FURUHASHI Sadayuki - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#if defined(__LITTLE_ENDIAN__) -#define TAKE8_8(d) ((uint8_t*)&d)[0] -#define TAKE8_16(d) ((uint8_t*)&d)[0] -#define TAKE8_32(d) ((uint8_t*)&d)[0] -#define TAKE8_64(d) ((uint8_t*)&d)[0] -#elif defined(__BIG_ENDIAN__) -#define TAKE8_8(d) ((uint8_t*)&d)[0] -#define TAKE8_16(d) ((uint8_t*)&d)[1] -#define TAKE8_32(d) ((uint8_t*)&d)[3] -#define TAKE8_64(d) ((uint8_t*)&d)[7] -#endif - -#ifndef msgpack_pack_append_buffer -#error msgpack_pack_append_buffer callback is not defined -#endif - - -/* - * Integer - */ - -#define msgpack_pack_real_uint8(x, d) \ -do { \ - if(d < (1<<7)) { \ - /* fixnum */ \ - msgpack_pack_append_buffer(x, &TAKE8_8(d), 1); \ - } else { \ - /* unsigned 8 */ \ - unsigned char buf[2] = {0xcc, TAKE8_8(d)}; \ - msgpack_pack_append_buffer(x, buf, 2); \ - } \ -} while(0) - -#define msgpack_pack_real_uint16(x, d) \ -do { \ - if(d < (1<<7)) { \ - /* fixnum */ \ - msgpack_pack_append_buffer(x, &TAKE8_16(d), 1); \ - } else if(d < (1<<8)) { \ - /* unsigned 8 */ \ - unsigned char buf[2] = {0xcc, TAKE8_16(d)}; \ - msgpack_pack_append_buffer(x, buf, 2); \ - } else { \ - /* unsigned 16 */ \ - unsigned char buf[3]; \ - buf[0] = 0xcd; _msgpack_store16(&buf[1], (uint16_t)d); \ - msgpack_pack_append_buffer(x, buf, 3); \ - } \ -} while(0) - -#define msgpack_pack_real_uint32(x, d) \ -do { \ - if(d < (1<<8)) { \ - if(d < (1<<7)) { \ - /* fixnum */ \ - msgpack_pack_append_buffer(x, &TAKE8_32(d), 1); \ - } else { \ - /* unsigned 8 */ \ - unsigned char buf[2] = {0xcc, TAKE8_32(d)}; \ - msgpack_pack_append_buffer(x, buf, 2); \ - } \ - } else { \ - if(d < (1<<16)) { \ - /* unsigned 16 */ \ - unsigned char buf[3]; \ - buf[0] = 0xcd; _msgpack_store16(&buf[1], (uint16_t)d); \ - msgpack_pack_append_buffer(x, buf, 3); \ - } else { \ - /* unsigned 32 */ \ - unsigned char buf[5]; \ - buf[0] = 0xce; _msgpack_store32(&buf[1], (uint32_t)d); \ - msgpack_pack_append_buffer(x, buf, 5); \ - } \ - } \ -} while(0) - -#define msgpack_pack_real_uint64(x, d) \ -do { \ - if(d < (1ULL<<8)) { \ - if(d < (1ULL<<7)) { \ - /* fixnum */ \ - msgpack_pack_append_buffer(x, &TAKE8_64(d), 1); \ - } else { \ - /* unsigned 8 */ \ - unsigned char buf[2] = {0xcc, TAKE8_64(d)}; \ - msgpack_pack_append_buffer(x, buf, 2); \ - } \ - } else { \ - if(d < (1ULL<<16)) { \ - /* unsigned 16 */ \ - unsigned char buf[3]; \ - buf[0] = 0xcd; _msgpack_store16(&buf[1], (uint16_t)d); \ - msgpack_pack_append_buffer(x, buf, 3); \ - } else if(d < (1ULL<<32)) { \ - /* unsigned 32 */ \ - unsigned char buf[5]; \ - buf[0] = 0xce; _msgpack_store32(&buf[1], (uint32_t)d); \ - msgpack_pack_append_buffer(x, buf, 5); \ - } else { \ - /* unsigned 64 */ \ - unsigned char buf[9]; \ - buf[0] = 0xcf; _msgpack_store64(&buf[1], d); \ - msgpack_pack_append_buffer(x, buf, 9); \ - } \ - } \ -} while(0) - -#define msgpack_pack_real_int8(x, d) \ -do { \ - if(d < -(1<<5)) { \ - /* signed 8 */ \ - unsigned char buf[2] = {0xd0, TAKE8_8(d)}; \ - msgpack_pack_append_buffer(x, buf, 2); \ - } else { \ - /* fixnum */ \ - msgpack_pack_append_buffer(x, &TAKE8_8(d), 1); \ - } \ -} while(0) - -#define msgpack_pack_real_int16(x, d) \ -do { \ - if(d < -(1<<5)) { \ - if(d < -(1<<7)) { \ - /* signed 16 */ \ - unsigned char buf[3]; \ - buf[0] = 0xd1; _msgpack_store16(&buf[1], (int16_t)d); \ - msgpack_pack_append_buffer(x, buf, 3); \ - } else { \ - /* signed 8 */ \ - unsigned char buf[2] = {0xd0, TAKE8_16(d)}; \ - msgpack_pack_append_buffer(x, buf, 2); \ - } \ - } else if(d < (1<<7)) { \ - /* fixnum */ \ - msgpack_pack_append_buffer(x, &TAKE8_16(d), 1); \ - } else { \ - if(d < (1<<8)) { \ - /* unsigned 8 */ \ - unsigned char buf[2] = {0xcc, TAKE8_16(d)}; \ - msgpack_pack_append_buffer(x, buf, 2); \ - } else { \ - /* unsigned 16 */ \ - unsigned char buf[3]; \ - buf[0] = 0xcd; _msgpack_store16(&buf[1], (uint16_t)d); \ - msgpack_pack_append_buffer(x, buf, 3); \ - } \ - } \ -} while(0) - -#define msgpack_pack_real_int32(x, d) \ -do { \ - if(d < -(1<<5)) { \ - if(d < -(1<<15)) { \ - /* signed 32 */ \ - unsigned char buf[5]; \ - buf[0] = 0xd2; _msgpack_store32(&buf[1], (int32_t)d); \ - msgpack_pack_append_buffer(x, buf, 5); \ - } else if(d < -(1<<7)) { \ - /* signed 16 */ \ - unsigned char buf[3]; \ - buf[0] = 0xd1; _msgpack_store16(&buf[1], (int16_t)d); \ - msgpack_pack_append_buffer(x, buf, 3); \ - } else { \ - /* signed 8 */ \ - unsigned char buf[2] = {0xd0, TAKE8_32(d)}; \ - msgpack_pack_append_buffer(x, buf, 2); \ - } \ - } else if(d < (1<<7)) { \ - /* fixnum */ \ - msgpack_pack_append_buffer(x, &TAKE8_32(d), 1); \ - } else { \ - if(d < (1<<8)) { \ - /* unsigned 8 */ \ - unsigned char buf[2] = {0xcc, TAKE8_32(d)}; \ - msgpack_pack_append_buffer(x, buf, 2); \ - } else if(d < (1<<16)) { \ - /* unsigned 16 */ \ - unsigned char buf[3]; \ - buf[0] = 0xcd; _msgpack_store16(&buf[1], (uint16_t)d); \ - msgpack_pack_append_buffer(x, buf, 3); \ - } else { \ - /* unsigned 32 */ \ - unsigned char buf[5]; \ - buf[0] = 0xce; _msgpack_store32(&buf[1], (uint32_t)d); \ - msgpack_pack_append_buffer(x, buf, 5); \ - } \ - } \ -} while(0) - -#define msgpack_pack_real_int64(x, d) \ -do { \ - if(d < -(1LL<<5)) { \ - if(d < -(1LL<<15)) { \ - if(d < -(1LL<<31)) { \ - /* signed 64 */ \ - unsigned char buf[9]; \ - buf[0] = 0xd3; _msgpack_store64(&buf[1], d); \ - msgpack_pack_append_buffer(x, buf, 9); \ - } else { \ - /* signed 32 */ \ - unsigned char buf[5]; \ - buf[0] = 0xd2; _msgpack_store32(&buf[1], (int32_t)d); \ - msgpack_pack_append_buffer(x, buf, 5); \ - } \ - } else { \ - if(d < -(1<<7)) { \ - /* signed 16 */ \ - unsigned char buf[3]; \ - buf[0] = 0xd1; _msgpack_store16(&buf[1], (int16_t)d); \ - msgpack_pack_append_buffer(x, buf, 3); \ - } else { \ - /* signed 8 */ \ - unsigned char buf[2] = {0xd0, TAKE8_64(d)}; \ - msgpack_pack_append_buffer(x, buf, 2); \ - } \ - } \ - } else if(d < (1<<7)) { \ - /* fixnum */ \ - msgpack_pack_append_buffer(x, &TAKE8_64(d), 1); \ - } else { \ - if(d < (1LL<<16)) { \ - if(d < (1<<8)) { \ - /* unsigned 8 */ \ - unsigned char buf[2] = {0xcc, TAKE8_64(d)}; \ - msgpack_pack_append_buffer(x, buf, 2); \ - } else { \ - /* unsigned 16 */ \ - unsigned char buf[3]; \ - buf[0] = 0xcd; _msgpack_store16(&buf[1], (uint16_t)d); \ - msgpack_pack_append_buffer(x, buf, 3); \ - } \ - } else { \ - if(d < (1LL<<32)) { \ - /* unsigned 32 */ \ - unsigned char buf[5]; \ - buf[0] = 0xce; _msgpack_store32(&buf[1], (uint32_t)d); \ - msgpack_pack_append_buffer(x, buf, 5); \ - } else { \ - /* unsigned 64 */ \ - unsigned char buf[9]; \ - buf[0] = 0xcf; _msgpack_store64(&buf[1], d); \ - msgpack_pack_append_buffer(x, buf, 9); \ - } \ - } \ - } \ -} while(0) - - -static inline int msgpack_pack_uint8(msgpack_packer* x, uint8_t d) -{ - msgpack_pack_real_uint8(x, d); -} - -static inline int msgpack_pack_uint16(msgpack_packer* x, uint16_t d) -{ - msgpack_pack_real_uint16(x, d); -} - -static inline int msgpack_pack_uint32(msgpack_packer* x, uint32_t d) -{ - msgpack_pack_real_uint32(x, d); -} - -static inline int msgpack_pack_uint64(msgpack_packer* x, uint64_t d) -{ - msgpack_pack_real_uint64(x, d); -} - -static inline int msgpack_pack_int8(msgpack_packer* x, int8_t d) -{ - msgpack_pack_real_int8(x, d); -} - -static inline int msgpack_pack_int16(msgpack_packer* x, int16_t d) -{ - msgpack_pack_real_int16(x, d); -} - -static inline int msgpack_pack_int32(msgpack_packer* x, int32_t d) -{ - msgpack_pack_real_int32(x, d); -} - -static inline int msgpack_pack_int64(msgpack_packer* x, int64_t d) -{ - msgpack_pack_real_int64(x, d); -} - - -//#ifdef msgpack_pack_inline_func_cint - -static inline int msgpack_pack_short(msgpack_packer* x, short d) -{ -#if defined(SIZEOF_SHORT) -#if SIZEOF_SHORT == 2 - msgpack_pack_real_int16(x, d); -#elif SIZEOF_SHORT == 4 - msgpack_pack_real_int32(x, d); -#else - msgpack_pack_real_int64(x, d); -#endif - -#elif defined(SHRT_MAX) -#if SHRT_MAX == 0x7fff - msgpack_pack_real_int16(x, d); -#elif SHRT_MAX == 0x7fffffff - msgpack_pack_real_int32(x, d); -#else - msgpack_pack_real_int64(x, d); -#endif - -#else -if(sizeof(short) == 2) { - msgpack_pack_real_int16(x, d); -} else if(sizeof(short) == 4) { - msgpack_pack_real_int32(x, d); -} else { - msgpack_pack_real_int64(x, d); -} -#endif -} - -static inline int msgpack_pack_int(msgpack_packer* x, int d) -{ -#if defined(SIZEOF_INT) -#if SIZEOF_INT == 2 - msgpack_pack_real_int16(x, d); -#elif SIZEOF_INT == 4 - msgpack_pack_real_int32(x, d); -#else - msgpack_pack_real_int64(x, d); -#endif - -#elif defined(INT_MAX) -#if INT_MAX == 0x7fff - msgpack_pack_real_int16(x, d); -#elif INT_MAX == 0x7fffffff - msgpack_pack_real_int32(x, d); -#else - msgpack_pack_real_int64(x, d); -#endif - -#else -if(sizeof(int) == 2) { - msgpack_pack_real_int16(x, d); -} else if(sizeof(int) == 4) { - msgpack_pack_real_int32(x, d); -} else { - msgpack_pack_real_int64(x, d); -} -#endif -} - -static inline int msgpack_pack_long(msgpack_packer* x, long d) -{ -#if defined(SIZEOF_LONG) -#if SIZEOF_LONG == 2 - msgpack_pack_real_int16(x, d); -#elif SIZEOF_LONG == 4 - msgpack_pack_real_int32(x, d); -#else - msgpack_pack_real_int64(x, d); -#endif - -#elif defined(LONG_MAX) -#if LONG_MAX == 0x7fffL - msgpack_pack_real_int16(x, d); -#elif LONG_MAX == 0x7fffffffL - msgpack_pack_real_int32(x, d); -#else - msgpack_pack_real_int64(x, d); -#endif - -#else -if(sizeof(long) == 2) { - msgpack_pack_real_int16(x, d); -} else if(sizeof(long) == 4) { - msgpack_pack_real_int32(x, d); -} else { - msgpack_pack_real_int64(x, d); -} -#endif -} - -static inline int msgpack_pack_long_long(msgpack_packer* x, long long d) -{ -#if defined(SIZEOF_LONG_LONG) -#if SIZEOF_LONG_LONG == 2 - msgpack_pack_real_int16(x, d); -#elif SIZEOF_LONG_LONG == 4 - msgpack_pack_real_int32(x, d); -#else - msgpack_pack_real_int64(x, d); -#endif - -#elif defined(LLONG_MAX) -#if LLONG_MAX == 0x7fffL - msgpack_pack_real_int16(x, d); -#elif LLONG_MAX == 0x7fffffffL - msgpack_pack_real_int32(x, d); -#else - msgpack_pack_real_int64(x, d); -#endif - -#else -if(sizeof(long long) == 2) { - msgpack_pack_real_int16(x, d); -} else if(sizeof(long long) == 4) { - msgpack_pack_real_int32(x, d); -} else { - msgpack_pack_real_int64(x, d); -} -#endif -} - -static inline int msgpack_pack_unsigned_short(msgpack_packer* x, unsigned short d) -{ -#if defined(SIZEOF_SHORT) -#if SIZEOF_SHORT == 2 - msgpack_pack_real_uint16(x, d); -#elif SIZEOF_SHORT == 4 - msgpack_pack_real_uint32(x, d); -#else - msgpack_pack_real_uint64(x, d); -#endif - -#elif defined(USHRT_MAX) -#if USHRT_MAX == 0xffffU - msgpack_pack_real_uint16(x, d); -#elif USHRT_MAX == 0xffffffffU - msgpack_pack_real_uint32(x, d); -#else - msgpack_pack_real_uint64(x, d); -#endif - -#else -if(sizeof(unsigned short) == 2) { - msgpack_pack_real_uint16(x, d); -} else if(sizeof(unsigned short) == 4) { - msgpack_pack_real_uint32(x, d); -} else { - msgpack_pack_real_uint64(x, d); -} -#endif -} - -static inline int msgpack_pack_unsigned_int(msgpack_packer* x, unsigned int d) -{ -#if defined(SIZEOF_INT) -#if SIZEOF_INT == 2 - msgpack_pack_real_uint16(x, d); -#elif SIZEOF_INT == 4 - msgpack_pack_real_uint32(x, d); -#else - msgpack_pack_real_uint64(x, d); -#endif - -#elif defined(UINT_MAX) -#if UINT_MAX == 0xffffU - msgpack_pack_real_uint16(x, d); -#elif UINT_MAX == 0xffffffffU - msgpack_pack_real_uint32(x, d); -#else - msgpack_pack_real_uint64(x, d); -#endif - -#else -if(sizeof(unsigned int) == 2) { - msgpack_pack_real_uint16(x, d); -} else if(sizeof(unsigned int) == 4) { - msgpack_pack_real_uint32(x, d); -} else { - msgpack_pack_real_uint64(x, d); -} -#endif -} - -static inline int msgpack_pack_unsigned_long(msgpack_packer* x, unsigned long d) -{ -#if defined(SIZEOF_LONG) -#if SIZEOF_LONG == 2 - msgpack_pack_real_uint16(x, d); -#elif SIZEOF_LONG == 4 - msgpack_pack_real_uint32(x, d); -#else - msgpack_pack_real_uint64(x, d); -#endif - -#elif defined(ULONG_MAX) -#if ULONG_MAX == 0xffffUL - msgpack_pack_real_uint16(x, d); -#elif ULONG_MAX == 0xffffffffUL - msgpack_pack_real_uint32(x, d); -#else - msgpack_pack_real_uint64(x, d); -#endif - -#else -if(sizeof(unsigned long) == 2) { - msgpack_pack_real_uint16(x, d); -} else if(sizeof(unsigned long) == 4) { - msgpack_pack_real_uint32(x, d); -} else { - msgpack_pack_real_uint64(x, d); -} -#endif -} - -static inline int msgpack_pack_unsigned_long_long(msgpack_packer* x, unsigned long long d) -{ -#if defined(SIZEOF_LONG_LONG) -#if SIZEOF_LONG_LONG == 2 - msgpack_pack_real_uint16(x, d); -#elif SIZEOF_LONG_LONG == 4 - msgpack_pack_real_uint32(x, d); -#else - msgpack_pack_real_uint64(x, d); -#endif - -#elif defined(ULLONG_MAX) -#if ULLONG_MAX == 0xffffUL - msgpack_pack_real_uint16(x, d); -#elif ULLONG_MAX == 0xffffffffUL - msgpack_pack_real_uint32(x, d); -#else - msgpack_pack_real_uint64(x, d); -#endif - -#else -if(sizeof(unsigned long long) == 2) { - msgpack_pack_real_uint16(x, d); -} else if(sizeof(unsigned long long) == 4) { - msgpack_pack_real_uint32(x, d); -} else { - msgpack_pack_real_uint64(x, d); -} -#endif -} - -//#undef msgpack_pack_inline_func_cint -//#endif - - - -/* - * Float - */ - -static inline int msgpack_pack_float(msgpack_packer* x, float d) -{ - unsigned char buf[5]; - buf[0] = 0xca; - _PyFloat_Pack4(d, &buf[1], 0); - msgpack_pack_append_buffer(x, buf, 5); -} - -static inline int msgpack_pack_double(msgpack_packer* x, double d) -{ - unsigned char buf[9]; - buf[0] = 0xcb; - _PyFloat_Pack8(d, &buf[1], 0); - msgpack_pack_append_buffer(x, buf, 9); -} - - -/* - * Nil - */ - -static inline int msgpack_pack_nil(msgpack_packer* x) -{ - static const unsigned char d = 0xc0; - msgpack_pack_append_buffer(x, &d, 1); -} - - -/* - * Boolean - */ - -static inline int msgpack_pack_true(msgpack_packer* x) -{ - static const unsigned char d = 0xc3; - msgpack_pack_append_buffer(x, &d, 1); -} - -static inline int msgpack_pack_false(msgpack_packer* x) -{ - static const unsigned char d = 0xc2; - msgpack_pack_append_buffer(x, &d, 1); -} - - -/* - * Array - */ - -static inline int msgpack_pack_array(msgpack_packer* x, unsigned int n) -{ - if(n < 16) { - unsigned char d = 0x90 | n; - msgpack_pack_append_buffer(x, &d, 1); - } else if(n < 65536) { - unsigned char buf[3]; - buf[0] = 0xdc; _msgpack_store16(&buf[1], (uint16_t)n); - msgpack_pack_append_buffer(x, buf, 3); - } else { - unsigned char buf[5]; - buf[0] = 0xdd; _msgpack_store32(&buf[1], (uint32_t)n); - msgpack_pack_append_buffer(x, buf, 5); - } -} - - -/* - * Map - */ - -static inline int msgpack_pack_map(msgpack_packer* x, unsigned int n) -{ - if(n < 16) { - unsigned char d = 0x80 | n; - msgpack_pack_append_buffer(x, &TAKE8_8(d), 1); - } else if(n < 65536) { - unsigned char buf[3]; - buf[0] = 0xde; _msgpack_store16(&buf[1], (uint16_t)n); - msgpack_pack_append_buffer(x, buf, 3); - } else { - unsigned char buf[5]; - buf[0] = 0xdf; _msgpack_store32(&buf[1], (uint32_t)n); - msgpack_pack_append_buffer(x, buf, 5); - } -} - - -/* - * Raw - */ - -static inline int msgpack_pack_raw(msgpack_packer* x, size_t l) -{ - if (l < 32) { - unsigned char d = 0xa0 | (uint8_t)l; - msgpack_pack_append_buffer(x, &TAKE8_8(d), 1); - } else if (x->use_bin_type && l < 256) { // str8 is new format introduced with bin. - unsigned char buf[2] = {0xd9, (uint8_t)l}; - msgpack_pack_append_buffer(x, buf, 2); - } else if (l < 65536) { - unsigned char buf[3]; - buf[0] = 0xda; _msgpack_store16(&buf[1], (uint16_t)l); - msgpack_pack_append_buffer(x, buf, 3); - } else { - unsigned char buf[5]; - buf[0] = 0xdb; _msgpack_store32(&buf[1], (uint32_t)l); - msgpack_pack_append_buffer(x, buf, 5); - } -} - -/* - * bin - */ -static inline int msgpack_pack_bin(msgpack_packer *x, size_t l) -{ - if (!x->use_bin_type) { - return msgpack_pack_raw(x, l); - } - if (l < 256) { - unsigned char buf[2] = {0xc4, (unsigned char)l}; - msgpack_pack_append_buffer(x, buf, 2); - } else if (l < 65536) { - unsigned char buf[3] = {0xc5}; - _msgpack_store16(&buf[1], (uint16_t)l); - msgpack_pack_append_buffer(x, buf, 3); - } else { - unsigned char buf[5] = {0xc6}; - _msgpack_store32(&buf[1], (uint32_t)l); - msgpack_pack_append_buffer(x, buf, 5); - } -} - -static inline int msgpack_pack_raw_body(msgpack_packer* x, const void* b, size_t l) -{ - if (l > 0) msgpack_pack_append_buffer(x, (const unsigned char*)b, l); - return 0; -} - -/* - * Ext - */ -static inline int msgpack_pack_ext(msgpack_packer* x, char typecode, size_t l) -{ - if (l == 1) { - unsigned char buf[2]; - buf[0] = 0xd4; - buf[1] = (unsigned char)typecode; - msgpack_pack_append_buffer(x, buf, 2); - } - else if(l == 2) { - unsigned char buf[2]; - buf[0] = 0xd5; - buf[1] = (unsigned char)typecode; - msgpack_pack_append_buffer(x, buf, 2); - } - else if(l == 4) { - unsigned char buf[2]; - buf[0] = 0xd6; - buf[1] = (unsigned char)typecode; - msgpack_pack_append_buffer(x, buf, 2); - } - else if(l == 8) { - unsigned char buf[2]; - buf[0] = 0xd7; - buf[1] = (unsigned char)typecode; - msgpack_pack_append_buffer(x, buf, 2); - } - else if(l == 16) { - unsigned char buf[2]; - buf[0] = 0xd8; - buf[1] = (unsigned char)typecode; - msgpack_pack_append_buffer(x, buf, 2); - } - else if(l < 256) { - unsigned char buf[3]; - buf[0] = 0xc7; - buf[1] = l; - buf[2] = (unsigned char)typecode; - msgpack_pack_append_buffer(x, buf, 3); - } else if(l < 65536) { - unsigned char buf[4]; - buf[0] = 0xc8; - _msgpack_store16(&buf[1], (uint16_t)l); - buf[3] = (unsigned char)typecode; - msgpack_pack_append_buffer(x, buf, 4); - } else { - unsigned char buf[6]; - buf[0] = 0xc9; - _msgpack_store32(&buf[1], (uint32_t)l); - buf[5] = (unsigned char)typecode; - msgpack_pack_append_buffer(x, buf, 6); - } - -} - - - -#undef msgpack_pack_append_buffer - -#undef TAKE8_8 -#undef TAKE8_16 -#undef TAKE8_32 -#undef TAKE8_64 - -#undef msgpack_pack_real_uint8 -#undef msgpack_pack_real_uint16 -#undef msgpack_pack_real_uint32 -#undef msgpack_pack_real_uint64 -#undef msgpack_pack_real_int8 -#undef msgpack_pack_real_int16 -#undef msgpack_pack_real_int32 -#undef msgpack_pack_real_int64 diff --git a/ddtrace/vendor/msgpack/sysdep.h b/ddtrace/vendor/msgpack/sysdep.h deleted file mode 100644 index ed9c1bc0..00000000 --- a/ddtrace/vendor/msgpack/sysdep.h +++ /dev/null @@ -1,194 +0,0 @@ -/* - * MessagePack system dependencies - * - * Copyright (C) 2008-2010 FURUHASHI Sadayuki - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -#ifndef MSGPACK_SYSDEP_H__ -#define MSGPACK_SYSDEP_H__ - -#include -#include -#if defined(_MSC_VER) && _MSC_VER < 1600 -typedef __int8 int8_t; -typedef unsigned __int8 uint8_t; -typedef __int16 int16_t; -typedef unsigned __int16 uint16_t; -typedef __int32 int32_t; -typedef unsigned __int32 uint32_t; -typedef __int64 int64_t; -typedef unsigned __int64 uint64_t; -#elif defined(_MSC_VER) // && _MSC_VER >= 1600 -#include -#else -#include -#include -#endif - -#ifdef _WIN32 -#define _msgpack_atomic_counter_header -typedef long _msgpack_atomic_counter_t; -#define _msgpack_sync_decr_and_fetch(ptr) InterlockedDecrement(ptr) -#define _msgpack_sync_incr_and_fetch(ptr) InterlockedIncrement(ptr) -#elif defined(__GNUC__) && ((__GNUC__*10 + __GNUC_MINOR__) < 41) -#define _msgpack_atomic_counter_header "gcc_atomic.h" -#else -typedef unsigned int _msgpack_atomic_counter_t; -#define _msgpack_sync_decr_and_fetch(ptr) __sync_sub_and_fetch(ptr, 1) -#define _msgpack_sync_incr_and_fetch(ptr) __sync_add_and_fetch(ptr, 1) -#endif - -#ifdef _WIN32 - -#ifdef __cplusplus -/* numeric_limits::min,max */ -#ifdef max -#undef max -#endif -#ifdef min -#undef min -#endif -#endif - -#else -#include /* __BYTE_ORDER */ -#endif - -#if !defined(__LITTLE_ENDIAN__) && !defined(__BIG_ENDIAN__) -#if __BYTE_ORDER == __LITTLE_ENDIAN -#define __LITTLE_ENDIAN__ -#elif __BYTE_ORDER == __BIG_ENDIAN -#define __BIG_ENDIAN__ -#elif _WIN32 -#define __LITTLE_ENDIAN__ -#endif -#endif - - -#ifdef __LITTLE_ENDIAN__ - -#ifdef _WIN32 -# if defined(ntohs) -# define _msgpack_be16(x) ntohs(x) -# elif defined(_byteswap_ushort) || (defined(_MSC_VER) && _MSC_VER >= 1400) -# define _msgpack_be16(x) ((uint16_t)_byteswap_ushort((unsigned short)x)) -# else -# define _msgpack_be16(x) ( \ - ((((uint16_t)x) << 8) ) | \ - ((((uint16_t)x) >> 8) ) ) -# endif -#else -# define _msgpack_be16(x) ntohs(x) -#endif - -#ifdef _WIN32 -# if defined(ntohl) -# define _msgpack_be32(x) ntohl(x) -# elif defined(_byteswap_ulong) || (defined(_MSC_VER) && _MSC_VER >= 1400) -# define _msgpack_be32(x) ((uint32_t)_byteswap_ulong((unsigned long)x)) -# else -# define _msgpack_be32(x) \ - ( ((((uint32_t)x) << 24) ) | \ - ((((uint32_t)x) << 8) & 0x00ff0000U ) | \ - ((((uint32_t)x) >> 8) & 0x0000ff00U ) | \ - ((((uint32_t)x) >> 24) ) ) -# endif -#else -# define _msgpack_be32(x) ntohl(x) -#endif - -#if defined(_byteswap_uint64) || (defined(_MSC_VER) && _MSC_VER >= 1400) -# define _msgpack_be64(x) (_byteswap_uint64(x)) -#elif defined(bswap_64) -# define _msgpack_be64(x) bswap_64(x) -#elif defined(__DARWIN_OSSwapInt64) -# define _msgpack_be64(x) __DARWIN_OSSwapInt64(x) -#else -#define _msgpack_be64(x) \ - ( ((((uint64_t)x) << 56) ) | \ - ((((uint64_t)x) << 40) & 0x00ff000000000000ULL ) | \ - ((((uint64_t)x) << 24) & 0x0000ff0000000000ULL ) | \ - ((((uint64_t)x) << 8) & 0x000000ff00000000ULL ) | \ - ((((uint64_t)x) >> 8) & 0x00000000ff000000ULL ) | \ - ((((uint64_t)x) >> 24) & 0x0000000000ff0000ULL ) | \ - ((((uint64_t)x) >> 40) & 0x000000000000ff00ULL ) | \ - ((((uint64_t)x) >> 56) ) ) -#endif - -#define _msgpack_load16(cast, from) ((cast)( \ - (((uint16_t)((uint8_t*)(from))[0]) << 8) | \ - (((uint16_t)((uint8_t*)(from))[1]) ) )) - -#define _msgpack_load32(cast, from) ((cast)( \ - (((uint32_t)((uint8_t*)(from))[0]) << 24) | \ - (((uint32_t)((uint8_t*)(from))[1]) << 16) | \ - (((uint32_t)((uint8_t*)(from))[2]) << 8) | \ - (((uint32_t)((uint8_t*)(from))[3]) ) )) - -#define _msgpack_load64(cast, from) ((cast)( \ - (((uint64_t)((uint8_t*)(from))[0]) << 56) | \ - (((uint64_t)((uint8_t*)(from))[1]) << 48) | \ - (((uint64_t)((uint8_t*)(from))[2]) << 40) | \ - (((uint64_t)((uint8_t*)(from))[3]) << 32) | \ - (((uint64_t)((uint8_t*)(from))[4]) << 24) | \ - (((uint64_t)((uint8_t*)(from))[5]) << 16) | \ - (((uint64_t)((uint8_t*)(from))[6]) << 8) | \ - (((uint64_t)((uint8_t*)(from))[7]) ) )) - -#else - -#define _msgpack_be16(x) (x) -#define _msgpack_be32(x) (x) -#define _msgpack_be64(x) (x) - -#define _msgpack_load16(cast, from) ((cast)( \ - (((uint16_t)((uint8_t*)from)[0]) << 8) | \ - (((uint16_t)((uint8_t*)from)[1]) ) )) - -#define _msgpack_load32(cast, from) ((cast)( \ - (((uint32_t)((uint8_t*)from)[0]) << 24) | \ - (((uint32_t)((uint8_t*)from)[1]) << 16) | \ - (((uint32_t)((uint8_t*)from)[2]) << 8) | \ - (((uint32_t)((uint8_t*)from)[3]) ) )) - -#define _msgpack_load64(cast, from) ((cast)( \ - (((uint64_t)((uint8_t*)from)[0]) << 56) | \ - (((uint64_t)((uint8_t*)from)[1]) << 48) | \ - (((uint64_t)((uint8_t*)from)[2]) << 40) | \ - (((uint64_t)((uint8_t*)from)[3]) << 32) | \ - (((uint64_t)((uint8_t*)from)[4]) << 24) | \ - (((uint64_t)((uint8_t*)from)[5]) << 16) | \ - (((uint64_t)((uint8_t*)from)[6]) << 8) | \ - (((uint64_t)((uint8_t*)from)[7]) ) )) -#endif - - -#define _msgpack_store16(to, num) \ - do { uint16_t val = _msgpack_be16(num); memcpy(to, &val, 2); } while(0) -#define _msgpack_store32(to, num) \ - do { uint32_t val = _msgpack_be32(num); memcpy(to, &val, 4); } while(0) -#define _msgpack_store64(to, num) \ - do { uint64_t val = _msgpack_be64(num); memcpy(to, &val, 8); } while(0) - -/* -#define _msgpack_load16(cast, from) \ - ({ cast val; memcpy(&val, (char*)from, 2); _msgpack_be16(val); }) -#define _msgpack_load32(cast, from) \ - ({ cast val; memcpy(&val, (char*)from, 4); _msgpack_be32(val); }) -#define _msgpack_load64(cast, from) \ - ({ cast val; memcpy(&val, (char*)from, 8); _msgpack_be64(val); }) -*/ - - -#endif /* msgpack/sysdep.h */ diff --git a/ddtrace/vendor/msgpack/unpack.h b/ddtrace/vendor/msgpack/unpack.h deleted file mode 100644 index 85dbbed5..00000000 --- a/ddtrace/vendor/msgpack/unpack.h +++ /dev/null @@ -1,287 +0,0 @@ -/* - * MessagePack for Python unpacking routine - * - * Copyright (C) 2009 Naoki INADA - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define MSGPACK_EMBED_STACK_SIZE (1024) -#include "unpack_define.h" - -typedef struct unpack_user { - bool use_list; - bool raw; - bool has_pairs_hook; - bool strict_map_key; - PyObject *object_hook; - PyObject *list_hook; - PyObject *ext_hook; - const char *encoding; - const char *unicode_errors; - Py_ssize_t max_str_len, max_bin_len, max_array_len, max_map_len, max_ext_len; -} unpack_user; - -typedef PyObject* msgpack_unpack_object; -struct unpack_context; -typedef struct unpack_context unpack_context; -typedef int (*execute_fn)(unpack_context *ctx, const char* data, Py_ssize_t len, Py_ssize_t* off); - -static inline msgpack_unpack_object unpack_callback_root(unpack_user* u) -{ - return NULL; -} - -static inline int unpack_callback_uint16(unpack_user* u, uint16_t d, msgpack_unpack_object* o) -{ - PyObject *p = PyInt_FromLong((long)d); - if (!p) - return -1; - *o = p; - return 0; -} -static inline int unpack_callback_uint8(unpack_user* u, uint8_t d, msgpack_unpack_object* o) -{ - return unpack_callback_uint16(u, d, o); -} - - -static inline int unpack_callback_uint32(unpack_user* u, uint32_t d, msgpack_unpack_object* o) -{ - PyObject *p = PyInt_FromSize_t((size_t)d); - if (!p) - return -1; - *o = p; - return 0; -} - -static inline int unpack_callback_uint64(unpack_user* u, uint64_t d, msgpack_unpack_object* o) -{ - PyObject *p; - if (d > LONG_MAX) { - p = PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG)d); - } else { - p = PyInt_FromLong((long)d); - } - if (!p) - return -1; - *o = p; - return 0; -} - -static inline int unpack_callback_int32(unpack_user* u, int32_t d, msgpack_unpack_object* o) -{ - PyObject *p = PyInt_FromLong(d); - if (!p) - return -1; - *o = p; - return 0; -} - -static inline int unpack_callback_int16(unpack_user* u, int16_t d, msgpack_unpack_object* o) -{ - return unpack_callback_int32(u, d, o); -} - -static inline int unpack_callback_int8(unpack_user* u, int8_t d, msgpack_unpack_object* o) -{ - return unpack_callback_int32(u, d, o); -} - -static inline int unpack_callback_int64(unpack_user* u, int64_t d, msgpack_unpack_object* o) -{ - PyObject *p; - if (d > LONG_MAX || d < LONG_MIN) { - p = PyLong_FromLongLong((PY_LONG_LONG)d); - } else { - p = PyInt_FromLong((long)d); - } - *o = p; - return 0; -} - -static inline int unpack_callback_double(unpack_user* u, double d, msgpack_unpack_object* o) -{ - PyObject *p = PyFloat_FromDouble(d); - if (!p) - return -1; - *o = p; - return 0; -} - -static inline int unpack_callback_float(unpack_user* u, float d, msgpack_unpack_object* o) -{ - return unpack_callback_double(u, d, o); -} - -static inline int unpack_callback_nil(unpack_user* u, msgpack_unpack_object* o) -{ Py_INCREF(Py_None); *o = Py_None; return 0; } - -static inline int unpack_callback_true(unpack_user* u, msgpack_unpack_object* o) -{ Py_INCREF(Py_True); *o = Py_True; return 0; } - -static inline int unpack_callback_false(unpack_user* u, msgpack_unpack_object* o) -{ Py_INCREF(Py_False); *o = Py_False; return 0; } - -static inline int unpack_callback_array(unpack_user* u, unsigned int n, msgpack_unpack_object* o) -{ - if (n > u->max_array_len) { - PyErr_Format(PyExc_ValueError, "%u exceeds max_array_len(%zd)", n, u->max_array_len); - return -1; - } - PyObject *p = u->use_list ? PyList_New(n) : PyTuple_New(n); - - if (!p) - return -1; - *o = p; - return 0; -} - -static inline int unpack_callback_array_item(unpack_user* u, unsigned int current, msgpack_unpack_object* c, msgpack_unpack_object o) -{ - if (u->use_list) - PyList_SET_ITEM(*c, current, o); - else - PyTuple_SET_ITEM(*c, current, o); - return 0; -} - -static inline int unpack_callback_array_end(unpack_user* u, msgpack_unpack_object* c) -{ - if (u->list_hook) { - PyObject *new_c = PyObject_CallFunctionObjArgs(u->list_hook, *c, NULL); - if (!new_c) - return -1; - Py_DECREF(*c); - *c = new_c; - } - return 0; -} - -static inline int unpack_callback_map(unpack_user* u, unsigned int n, msgpack_unpack_object* o) -{ - if (n > u->max_map_len) { - PyErr_Format(PyExc_ValueError, "%u exceeds max_map_len(%zd)", n, u->max_map_len); - return -1; - } - PyObject *p; - if (u->has_pairs_hook) { - p = PyList_New(n); // Or use tuple? - } - else { - p = PyDict_New(); - } - if (!p) - return -1; - *o = p; - return 0; -} - -static inline int unpack_callback_map_item(unpack_user* u, unsigned int current, msgpack_unpack_object* c, msgpack_unpack_object k, msgpack_unpack_object v) -{ - if (u->strict_map_key && !PyUnicode_CheckExact(k) && !PyBytes_CheckExact(k)) { - PyErr_Format(PyExc_ValueError, "%.100s is not allowed for map key", Py_TYPE(k)->tp_name); - return -1; - } - if (u->has_pairs_hook) { - msgpack_unpack_object item = PyTuple_Pack(2, k, v); - if (!item) - return -1; - Py_DECREF(k); - Py_DECREF(v); - PyList_SET_ITEM(*c, current, item); - return 0; - } - else if (PyDict_SetItem(*c, k, v) == 0) { - Py_DECREF(k); - Py_DECREF(v); - return 0; - } - return -1; -} - -static inline int unpack_callback_map_end(unpack_user* u, msgpack_unpack_object* c) -{ - if (u->object_hook) { - PyObject *new_c = PyObject_CallFunctionObjArgs(u->object_hook, *c, NULL); - if (!new_c) - return -1; - - Py_DECREF(*c); - *c = new_c; - } - return 0; -} - -static inline int unpack_callback_raw(unpack_user* u, const char* b, const char* p, unsigned int l, msgpack_unpack_object* o) -{ - if (l > u->max_str_len) { - PyErr_Format(PyExc_ValueError, "%u exceeds max_str_len(%zd)", l, u->max_str_len); - return -1; - } - - PyObject *py; - - if (u->encoding) { - py = PyUnicode_Decode(p, l, u->encoding, u->unicode_errors); - } else if (u->raw) { - py = PyBytes_FromStringAndSize(p, l); - } else { - py = PyUnicode_DecodeUTF8(p, l, u->unicode_errors); - } - if (!py) - return -1; - *o = py; - return 0; -} - -static inline int unpack_callback_bin(unpack_user* u, const char* b, const char* p, unsigned int l, msgpack_unpack_object* o) -{ - if (l > u->max_bin_len) { - PyErr_Format(PyExc_ValueError, "%u exceeds max_bin_len(%zd)", l, u->max_bin_len); - return -1; - } - - PyObject *py = PyBytes_FromStringAndSize(p, l); - if (!py) - return -1; - *o = py; - return 0; -} - -static inline int unpack_callback_ext(unpack_user* u, const char* base, const char* pos, - unsigned int length, msgpack_unpack_object* o) -{ - PyObject *py; - int8_t typecode = (int8_t)*pos++; - if (!u->ext_hook) { - PyErr_SetString(PyExc_AssertionError, "u->ext_hook cannot be NULL"); - return -1; - } - if (length-1 > u->max_ext_len) { - PyErr_Format(PyExc_ValueError, "%u exceeds max_ext_len(%zd)", length, u->max_ext_len); - return -1; - } - // length also includes the typecode, so the actual data is length-1 -#if PY_MAJOR_VERSION == 2 - py = PyObject_CallFunction(u->ext_hook, "(is#)", (int)typecode, pos, (Py_ssize_t)length-1); -#else - py = PyObject_CallFunction(u->ext_hook, "(iy#)", (int)typecode, pos, (Py_ssize_t)length-1); -#endif - if (!py) - return -1; - *o = py; - return 0; -} - -#include "unpack_template.h" diff --git a/ddtrace/vendor/msgpack/unpack_define.h b/ddtrace/vendor/msgpack/unpack_define.h deleted file mode 100644 index 0dd708d1..00000000 --- a/ddtrace/vendor/msgpack/unpack_define.h +++ /dev/null @@ -1,95 +0,0 @@ -/* - * MessagePack unpacking routine template - * - * Copyright (C) 2008-2010 FURUHASHI Sadayuki - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -#ifndef MSGPACK_UNPACK_DEFINE_H__ -#define MSGPACK_UNPACK_DEFINE_H__ - -#include "msgpack/sysdep.h" -#include -#include -#include -#include - -#ifdef __cplusplus -extern "C" { -#endif - - -#ifndef MSGPACK_EMBED_STACK_SIZE -#define MSGPACK_EMBED_STACK_SIZE 32 -#endif - - -// CS is first byte & 0x1f -typedef enum { - CS_HEADER = 0x00, // nil - - //CS_ = 0x01, - //CS_ = 0x02, // false - //CS_ = 0x03, // true - - CS_BIN_8 = 0x04, - CS_BIN_16 = 0x05, - CS_BIN_32 = 0x06, - - CS_EXT_8 = 0x07, - CS_EXT_16 = 0x08, - CS_EXT_32 = 0x09, - - CS_FLOAT = 0x0a, - CS_DOUBLE = 0x0b, - CS_UINT_8 = 0x0c, - CS_UINT_16 = 0x0d, - CS_UINT_32 = 0x0e, - CS_UINT_64 = 0x0f, - CS_INT_8 = 0x10, - CS_INT_16 = 0x11, - CS_INT_32 = 0x12, - CS_INT_64 = 0x13, - - //CS_FIXEXT1 = 0x14, - //CS_FIXEXT2 = 0x15, - //CS_FIXEXT4 = 0x16, - //CS_FIXEXT8 = 0x17, - //CS_FIXEXT16 = 0x18, - - CS_RAW_8 = 0x19, - CS_RAW_16 = 0x1a, - CS_RAW_32 = 0x1b, - CS_ARRAY_16 = 0x1c, - CS_ARRAY_32 = 0x1d, - CS_MAP_16 = 0x1e, - CS_MAP_32 = 0x1f, - - ACS_RAW_VALUE, - ACS_BIN_VALUE, - ACS_EXT_VALUE, -} msgpack_unpack_state; - - -typedef enum { - CT_ARRAY_ITEM, - CT_MAP_KEY, - CT_MAP_VALUE, -} msgpack_container_type; - - -#ifdef __cplusplus -} -#endif - -#endif /* msgpack/unpack_define.h */ diff --git a/ddtrace/vendor/msgpack/unpack_template.h b/ddtrace/vendor/msgpack/unpack_template.h deleted file mode 100644 index 9924b9c6..00000000 --- a/ddtrace/vendor/msgpack/unpack_template.h +++ /dev/null @@ -1,454 +0,0 @@ -/* - * MessagePack unpacking routine template - * - * Copyright (C) 2008-2010 FURUHASHI Sadayuki - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef USE_CASE_RANGE -#if !defined(_MSC_VER) -#define USE_CASE_RANGE -#endif -#endif - -typedef struct unpack_stack { - PyObject* obj; - Py_ssize_t size; - Py_ssize_t count; - unsigned int ct; - PyObject* map_key; -} unpack_stack; - -struct unpack_context { - unpack_user user; - unsigned int cs; - unsigned int trail; - unsigned int top; - /* - unpack_stack* stack; - unsigned int stack_size; - unpack_stack embed_stack[MSGPACK_EMBED_STACK_SIZE]; - */ - unpack_stack stack[MSGPACK_EMBED_STACK_SIZE]; -}; - - -static inline void unpack_init(unpack_context* ctx) -{ - ctx->cs = CS_HEADER; - ctx->trail = 0; - ctx->top = 0; - /* - ctx->stack = ctx->embed_stack; - ctx->stack_size = MSGPACK_EMBED_STACK_SIZE; - */ - ctx->stack[0].obj = unpack_callback_root(&ctx->user); -} - -/* -static inline void unpack_destroy(unpack_context* ctx) -{ - if(ctx->stack_size != MSGPACK_EMBED_STACK_SIZE) { - free(ctx->stack); - } -} -*/ - -static inline PyObject* unpack_data(unpack_context* ctx) -{ - return (ctx)->stack[0].obj; -} - -static inline void unpack_clear(unpack_context *ctx) -{ - Py_CLEAR(ctx->stack[0].obj); -} - -template -static inline int unpack_execute(unpack_context* ctx, const char* data, Py_ssize_t len, Py_ssize_t* off) -{ - assert(len >= *off); - - const unsigned char* p = (unsigned char*)data + *off; - const unsigned char* const pe = (unsigned char*)data + len; - const void* n = p; - - unsigned int trail = ctx->trail; - unsigned int cs = ctx->cs; - unsigned int top = ctx->top; - unpack_stack* stack = ctx->stack; - /* - unsigned int stack_size = ctx->stack_size; - */ - unpack_user* user = &ctx->user; - - PyObject* obj = NULL; - unpack_stack* c = NULL; - - int ret; - -#define construct_cb(name) \ - construct && unpack_callback ## name - -#define push_simple_value(func) \ - if(construct_cb(func)(user, &obj) < 0) { goto _failed; } \ - goto _push -#define push_fixed_value(func, arg) \ - if(construct_cb(func)(user, arg, &obj) < 0) { goto _failed; } \ - goto _push -#define push_variable_value(func, base, pos, len) \ - if(construct_cb(func)(user, \ - (const char*)base, (const char*)pos, len, &obj) < 0) { goto _failed; } \ - goto _push - -#define again_fixed_trail(_cs, trail_len) \ - trail = trail_len; \ - cs = _cs; \ - goto _fixed_trail_again -#define again_fixed_trail_if_zero(_cs, trail_len, ifzero) \ - trail = trail_len; \ - if(trail == 0) { goto ifzero; } \ - cs = _cs; \ - goto _fixed_trail_again - -#define start_container(func, count_, ct_) \ - if(top >= MSGPACK_EMBED_STACK_SIZE) { ret = -3; goto _end; } \ - if(construct_cb(func)(user, count_, &stack[top].obj) < 0) { goto _failed; } \ - if((count_) == 0) { obj = stack[top].obj; \ - if (construct_cb(func##_end)(user, &obj) < 0) { goto _failed; } \ - goto _push; } \ - stack[top].ct = ct_; \ - stack[top].size = count_; \ - stack[top].count = 0; \ - ++top; \ - goto _header_again - -#define NEXT_CS(p) ((unsigned int)*p & 0x1f) - -#ifdef USE_CASE_RANGE -#define SWITCH_RANGE_BEGIN switch(*p) { -#define SWITCH_RANGE(FROM, TO) case FROM ... TO: -#define SWITCH_RANGE_DEFAULT default: -#define SWITCH_RANGE_END } -#else -#define SWITCH_RANGE_BEGIN { if(0) { -#define SWITCH_RANGE(FROM, TO) } else if(FROM <= *p && *p <= TO) { -#define SWITCH_RANGE_DEFAULT } else { -#define SWITCH_RANGE_END } } -#endif - - if(p == pe) { goto _out; } - do { - switch(cs) { - case CS_HEADER: - SWITCH_RANGE_BEGIN - SWITCH_RANGE(0x00, 0x7f) // Positive Fixnum - push_fixed_value(_uint8, *(uint8_t*)p); - SWITCH_RANGE(0xe0, 0xff) // Negative Fixnum - push_fixed_value(_int8, *(int8_t*)p); - SWITCH_RANGE(0xc0, 0xdf) // Variable - switch(*p) { - case 0xc0: // nil - push_simple_value(_nil); - //case 0xc1: // never used - case 0xc2: // false - push_simple_value(_false); - case 0xc3: // true - push_simple_value(_true); - case 0xc4: // bin 8 - again_fixed_trail(NEXT_CS(p), 1); - case 0xc5: // bin 16 - again_fixed_trail(NEXT_CS(p), 2); - case 0xc6: // bin 32 - again_fixed_trail(NEXT_CS(p), 4); - case 0xc7: // ext 8 - again_fixed_trail(NEXT_CS(p), 1); - case 0xc8: // ext 16 - again_fixed_trail(NEXT_CS(p), 2); - case 0xc9: // ext 32 - again_fixed_trail(NEXT_CS(p), 4); - case 0xca: // float - case 0xcb: // double - case 0xcc: // unsigned int 8 - case 0xcd: // unsigned int 16 - case 0xce: // unsigned int 32 - case 0xcf: // unsigned int 64 - case 0xd0: // signed int 8 - case 0xd1: // signed int 16 - case 0xd2: // signed int 32 - case 0xd3: // signed int 64 - again_fixed_trail(NEXT_CS(p), 1 << (((unsigned int)*p) & 0x03)); - case 0xd4: // fixext 1 - case 0xd5: // fixext 2 - case 0xd6: // fixext 4 - case 0xd7: // fixext 8 - again_fixed_trail_if_zero(ACS_EXT_VALUE, - (1 << (((unsigned int)*p) & 0x03))+1, - _ext_zero); - case 0xd8: // fixext 16 - again_fixed_trail_if_zero(ACS_EXT_VALUE, 16+1, _ext_zero); - case 0xd9: // str 8 - again_fixed_trail(NEXT_CS(p), 1); - case 0xda: // raw 16 - case 0xdb: // raw 32 - case 0xdc: // array 16 - case 0xdd: // array 32 - case 0xde: // map 16 - case 0xdf: // map 32 - again_fixed_trail(NEXT_CS(p), 2 << (((unsigned int)*p) & 0x01)); - default: - ret = -2; - goto _end; - } - SWITCH_RANGE(0xa0, 0xbf) // FixRaw - again_fixed_trail_if_zero(ACS_RAW_VALUE, ((unsigned int)*p & 0x1f), _raw_zero); - SWITCH_RANGE(0x90, 0x9f) // FixArray - start_container(_array, ((unsigned int)*p) & 0x0f, CT_ARRAY_ITEM); - SWITCH_RANGE(0x80, 0x8f) // FixMap - start_container(_map, ((unsigned int)*p) & 0x0f, CT_MAP_KEY); - - SWITCH_RANGE_DEFAULT - ret = -2; - goto _end; - SWITCH_RANGE_END - // end CS_HEADER - - - _fixed_trail_again: - ++p; - - default: - if((size_t)(pe - p) < trail) { goto _out; } - n = p; p += trail - 1; - switch(cs) { - case CS_EXT_8: - again_fixed_trail_if_zero(ACS_EXT_VALUE, *(uint8_t*)n+1, _ext_zero); - case CS_EXT_16: - again_fixed_trail_if_zero(ACS_EXT_VALUE, - _msgpack_load16(uint16_t,n)+1, - _ext_zero); - case CS_EXT_32: - again_fixed_trail_if_zero(ACS_EXT_VALUE, - _msgpack_load32(uint32_t,n)+1, - _ext_zero); - case CS_FLOAT: { - double f = _PyFloat_Unpack4((unsigned char*)n, 0); - push_fixed_value(_float, f); } - case CS_DOUBLE: { - double f = _PyFloat_Unpack8((unsigned char*)n, 0); - push_fixed_value(_double, f); } - case CS_UINT_8: - push_fixed_value(_uint8, *(uint8_t*)n); - case CS_UINT_16: - push_fixed_value(_uint16, _msgpack_load16(uint16_t,n)); - case CS_UINT_32: - push_fixed_value(_uint32, _msgpack_load32(uint32_t,n)); - case CS_UINT_64: - push_fixed_value(_uint64, _msgpack_load64(uint64_t,n)); - - case CS_INT_8: - push_fixed_value(_int8, *(int8_t*)n); - case CS_INT_16: - push_fixed_value(_int16, _msgpack_load16(int16_t,n)); - case CS_INT_32: - push_fixed_value(_int32, _msgpack_load32(int32_t,n)); - case CS_INT_64: - push_fixed_value(_int64, _msgpack_load64(int64_t,n)); - - case CS_BIN_8: - again_fixed_trail_if_zero(ACS_BIN_VALUE, *(uint8_t*)n, _bin_zero); - case CS_BIN_16: - again_fixed_trail_if_zero(ACS_BIN_VALUE, _msgpack_load16(uint16_t,n), _bin_zero); - case CS_BIN_32: - again_fixed_trail_if_zero(ACS_BIN_VALUE, _msgpack_load32(uint32_t,n), _bin_zero); - case ACS_BIN_VALUE: - _bin_zero: - push_variable_value(_bin, data, n, trail); - - case CS_RAW_8: - again_fixed_trail_if_zero(ACS_RAW_VALUE, *(uint8_t*)n, _raw_zero); - case CS_RAW_16: - again_fixed_trail_if_zero(ACS_RAW_VALUE, _msgpack_load16(uint16_t,n), _raw_zero); - case CS_RAW_32: - again_fixed_trail_if_zero(ACS_RAW_VALUE, _msgpack_load32(uint32_t,n), _raw_zero); - case ACS_RAW_VALUE: - _raw_zero: - push_variable_value(_raw, data, n, trail); - - case ACS_EXT_VALUE: - _ext_zero: - push_variable_value(_ext, data, n, trail); - - case CS_ARRAY_16: - start_container(_array, _msgpack_load16(uint16_t,n), CT_ARRAY_ITEM); - case CS_ARRAY_32: - /* FIXME security guard */ - start_container(_array, _msgpack_load32(uint32_t,n), CT_ARRAY_ITEM); - - case CS_MAP_16: - start_container(_map, _msgpack_load16(uint16_t,n), CT_MAP_KEY); - case CS_MAP_32: - /* FIXME security guard */ - start_container(_map, _msgpack_load32(uint32_t,n), CT_MAP_KEY); - - default: - goto _failed; - } - } - -_push: - if(top == 0) { goto _finish; } - c = &stack[top-1]; - switch(c->ct) { - case CT_ARRAY_ITEM: - if(construct_cb(_array_item)(user, c->count, &c->obj, obj) < 0) { goto _failed; } - if(++c->count == c->size) { - obj = c->obj; - if (construct_cb(_array_end)(user, &obj) < 0) { goto _failed; } - --top; - /*printf("stack pop %d\n", top);*/ - goto _push; - } - goto _header_again; - case CT_MAP_KEY: - c->map_key = obj; - c->ct = CT_MAP_VALUE; - goto _header_again; - case CT_MAP_VALUE: - if(construct_cb(_map_item)(user, c->count, &c->obj, c->map_key, obj) < 0) { goto _failed; } - if(++c->count == c->size) { - obj = c->obj; - if (construct_cb(_map_end)(user, &obj) < 0) { goto _failed; } - --top; - /*printf("stack pop %d\n", top);*/ - goto _push; - } - c->ct = CT_MAP_KEY; - goto _header_again; - - default: - goto _failed; - } - -_header_again: - cs = CS_HEADER; - ++p; - } while(p != pe); - goto _out; - - -_finish: - if (!construct) - unpack_callback_nil(user, &obj); - stack[0].obj = obj; - ++p; - ret = 1; - /*printf("-- finish --\n"); */ - goto _end; - -_failed: - /*printf("** FAILED **\n"); */ - ret = -1; - goto _end; - -_out: - ret = 0; - goto _end; - -_end: - ctx->cs = cs; - ctx->trail = trail; - ctx->top = top; - *off = p - (const unsigned char*)data; - - return ret; -#undef construct_cb -} - -#undef SWITCH_RANGE_BEGIN -#undef SWITCH_RANGE -#undef SWITCH_RANGE_DEFAULT -#undef SWITCH_RANGE_END -#undef push_simple_value -#undef push_fixed_value -#undef push_variable_value -#undef again_fixed_trail -#undef again_fixed_trail_if_zero -#undef start_container - -template -static inline int unpack_container_header(unpack_context* ctx, const char* data, Py_ssize_t len, Py_ssize_t* off) -{ - assert(len >= *off); - uint32_t size; - const unsigned char *const p = (unsigned char*)data + *off; - -#define inc_offset(inc) \ - if (len - *off < inc) \ - return 0; \ - *off += inc; - - switch (*p) { - case var_offset: - inc_offset(3); - size = _msgpack_load16(uint16_t, p + 1); - break; - case var_offset + 1: - inc_offset(5); - size = _msgpack_load32(uint32_t, p + 1); - break; -#ifdef USE_CASE_RANGE - case fixed_offset + 0x0 ... fixed_offset + 0xf: -#else - case fixed_offset + 0x0: - case fixed_offset + 0x1: - case fixed_offset + 0x2: - case fixed_offset + 0x3: - case fixed_offset + 0x4: - case fixed_offset + 0x5: - case fixed_offset + 0x6: - case fixed_offset + 0x7: - case fixed_offset + 0x8: - case fixed_offset + 0x9: - case fixed_offset + 0xa: - case fixed_offset + 0xb: - case fixed_offset + 0xc: - case fixed_offset + 0xd: - case fixed_offset + 0xe: - case fixed_offset + 0xf: -#endif - ++*off; - size = ((unsigned int)*p) & 0x0f; - break; - default: - PyErr_SetString(PyExc_ValueError, "Unexpected type header on stream"); - return -1; - } - unpack_callback_uint32(&ctx->user, size, &ctx->stack[0].obj); - return 1; -} - -#undef SWITCH_RANGE_BEGIN -#undef SWITCH_RANGE -#undef SWITCH_RANGE_DEFAULT -#undef SWITCH_RANGE_END - -static const execute_fn unpack_construct = &unpack_execute; -static const execute_fn unpack_skip = &unpack_execute; -static const execute_fn read_array_header = &unpack_container_header<0x90, 0xdc>; -static const execute_fn read_map_header = &unpack_container_header<0x80, 0xde>; - -#undef NEXT_CS - -/* vim: set ts=4 sw=4 sts=4 expandtab */ diff --git a/docker-compose.yml b/docker-compose.yml index a33dbe66..e93b8abc 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -76,9 +76,9 @@ services: ddagent: image: datadog/agent-dev:gbbr-apm-build environment: - - DD_BIND_HOST=0.0.0.0 - - DD_API_KEY=invalid_key_but_this_is_fine - - DD_APM_RECEIVER_SOCKET=/tmp/ddagent/trace.sock + - OTEL_BIND_HOST=0.0.0.0 + - OTEL_API_KEY=invalid_key_but_this_is_fine + - OTEL_APM_RECEIVER_SOCKET=/tmp/ddagent/trace.sock ports: - "127.0.0.1:8126:8126" volumes: @@ -97,15 +97,15 @@ services: image: datadog/docker-library:ddtrace_py environment: - TOX_SKIP_DIST=True - - TEST_DATADOG_INTEGRATION=1 - - TEST_DATADOG_INTEGRATION_UDS=1 + - TEST_OPENTELEMETRY_INTEGRATION=1 + - TEST_OPENTELEMETRY_INTEGRATION_UDS=1 network_mode: host working_dir: /src volumes: - ddagent:/tmp/ddagent - - ./ddtrace:/src/ddtrace:ro - # DEV: Make ddtrace/vendor rw so Tox can build C-extensions - - ./ddtrace/vendor:/src/ddtrace/vendor:rw + - ./oteltrace:/src/oteltrace:ro + # DEV: Make oteltrace/vendor rw so Tox can build C-extensions + - ./oteltrace/vendor:/src/oteltrace/vendor:rw - ./tests:/src/tests:ro - ./setup.cfg:/src/setup.cfg:ro - ./setup.py:/src/setup.py:ro diff --git a/docs/Makefile b/docs/Makefile index 7b1ce33c..43969dc7 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -91,9 +91,9 @@ qthelp: @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/ddtrace.qhcp" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/oteltrace.qhcp" @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/ddtrace.qhc" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/oteltrace.qhc" .PHONY: applehelp applehelp: @@ -110,8 +110,8 @@ devhelp: @echo @echo "Build finished." @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/ddtrace" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/ddtrace" + @echo "# mkdir -p $$HOME/.local/share/devhelp/oteltrace" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/oteltrace" @echo "# devhelp" .PHONY: epub diff --git a/docs/advanced_usage.rst b/docs/advanced_usage.rst index 10671be5..335f8b0c 100644 --- a/docs/advanced_usage.rst +++ b/docs/advanced_usage.rst @@ -1,24 +1,21 @@ Advanced Usage ============== -Agent Configuration -------------------- - -If the Datadog Agent is on a separate host from your application, you can modify -the default ``ddtrace.tracer`` object to utilize another hostname and port. Here -is a small example showcasing this:: - - from ddtrace import tracer +Sending traces to different backends +------------------------------------ - tracer.configure(hostname=, port=, https=) +`oteltrace` can export traces to different backends by using different api implementations, it +can be controlled by passing an `api` object to :func:`oteltrace.Tracer.configure`. -By default, these will be set to ``localhost``, ``8126``, and ``False`` respectively. +The :class:`oteltrace.api_otel_exporter.APIOtel` class uses an OpenTelemetry exporters as backend. -You can also use a Unix Domain Socket to connect to the agent:: +Sending traces to different backends +------------------------------------ - from ddtrace import tracer +`oteltrace` can export traces to different backends by using different api implementations, it +can be controlled by passing an `api` object to :func:`oteltrace.Tracer.configure`. - tracer.configure(uds_path="/path/to/socket") +The :class:`oteltrace.api_otel_exporter.APIOtel` class uses an OpenTelemetry exporters as backend. Distributed Tracing @@ -29,7 +26,7 @@ To trace requests across hosts, the spans on the secondary hosts must be linked - On the server side, it means to read propagated attributes and set them to the active tracing context. - On the client side, it means to propagate the attributes, commonly as a header/metadata. -`ddtrace` already provides default propagators but you can also implement your own. +`oteltrace` already provides default propagators (``w3c``, ``b3`` and ``datadog``) but you can also implement your own. Web Frameworks ^^^^^^^^^^^^^^ @@ -72,8 +69,8 @@ on the other side, the metadata is retrieved and the trace can continue. To propagate the tracing information, HTTP headers are used to transmit the required metadata to piece together the trace. -.. autoclass:: ddtrace.propagation.http.HTTPPropagator - :members: +:func:`oteltrace.propagation.http.HTTPPropagator` returns an instance of the configured +propagator. Custom ^^^^^^ @@ -159,7 +156,7 @@ stored and remain incomplete. If you change the priority, we recommend you do it as soon as possible, when the root span has just been created:: - from ddtrace.ext.priority import USER_REJECT, USER_KEEP + from oteltrace.ext.priority import USER_REJECT, USER_KEEP context = tracer.context_provider.active() @@ -176,7 +173,7 @@ dropped in the client. The ``RateSampler`` randomly samples a percentage of traces:: - from ddtrace.sampler import RateSampler + from oteltrace.sampler import RateSampler # Sample rate is between 0 (nothing sampled) to 1 (everything sampled). # Keep 20% of the traces. @@ -189,7 +186,7 @@ Trace Search & Analytics Use `Trace Search & Analytics `_ to filter application performance metrics and APM Events by user-defined tags. An APM event is generated every time a trace is generated. -Enabling APM events for all web frameworks can be accomplished by setting the environment variable ``DD_TRACE_ANALYTICS_ENABLED=true``: +Enabling APM events for all web frameworks can be accomplished by setting the environment variable ``OTEL_TRACE_ANALYTICS_ENABLED=true``: * :ref:`aiohttp` * :ref:`bottle` @@ -203,54 +200,54 @@ Enabling APM events for all web frameworks can be accomplished by setting the en * :ref:`tornado` -For most libraries, APM events can be enabled with the environment variable ``DD_{INTEGRATION}_ANALYTICS_ENABLED=true``: +For most libraries, APM events can be enabled with the environment variable ``OTEL_{INTEGRATION}_ANALYTICS_ENABLED=true``: +----------------------+----------------------------------------+ | Library | Environment Variable | +======================+========================================+ -| :ref:`aiobotocore` | ``DD_AIOBOTOCORE_ANALYTICS_ENABLED`` | +| :ref:`aiobotocore` | ``OTEL_AIOBOTOCORE_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`aiopg` | ``DD_AIOPG_ANALYTICS_ENABLED`` | +| :ref:`aiopg` | ``OTEL_AIOPG_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`boto` | ``DD_BOTO_ANALYTICS_ENABLED`` | +| :ref:`boto` | ``OTEL_BOTO_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`botocore` | ``DD_BOTOCORE_ANALYTICS_ENABLED`` | +| :ref:`botocore` | ``OTEL_BOTOCORE_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`bottle` | ``DD_BOTTLE_ANALYTICS_ENABLED`` | +| :ref:`bottle` | ``OTEL_BOTTLE_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`cassandra` | ``DD_CASSANDRA_ANALYTICS_ENABLED`` | +| :ref:`cassandra` | ``OTEL_CASSANDRA_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`elasticsearch` | ``DD_ELASTICSEARCH_ANALYTICS_ENABLED`` | +| :ref:`elasticsearch` | ``OTEL_ELASTICSEARCH_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`falcon` | ``DD_FALCON_ANALYTICS_ENABLED`` | +| :ref:`falcon` | ``OTEL_FALCON_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`flask` | ``DD_FLASK_ANALYTICS_ENABLED`` | +| :ref:`flask` | ``OTEL_FLASK_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`flask_cache` | ``DD_FLASK_CACHE_ANALYTICS_ENABLED`` | +| :ref:`flask_cache` | ``OTEL_FLASK_CACHE_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`grpc` | ``DD_GRPC_ANALYTICS_ENABLED`` | +| :ref:`grpc` | ``OTEL_GRPC_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`httplib` | ``DD_HTTPLIB_ANALYTICS_ENABLED`` | +| :ref:`httplib` | ``OTEL_HTTPLIB_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`kombu` | ``DD_KOMBU_ANALYTICS_ENABLED`` | +| :ref:`kombu` | ``OTEL_KOMBU_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`molten` | ``DD_MOLTEN_ANALYTICS_ENABLED`` | +| :ref:`molten` | ``OTEL_MOLTEN_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`pylibmc` | ``DD_PYLIBMC_ANALYTICS_ENABLED`` | +| :ref:`pylibmc` | ``OTEL_PYLIBMC_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`pylons` | ``DD_PYLONS_ANALYTICS_ENABLED`` | +| :ref:`pylons` | ``OTEL_PYLONS_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`pymemcache` | ``DD_PYMEMCACHE_ANALYTICS_ENABLED`` | +| :ref:`pymemcache` | ``OTEL_PYMEMCACHE_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`pymongo` | ``DD_PYMONGO_ANALYTICS_ENABLED`` | +| :ref:`pymongo` | ``OTEL_PYMONGO_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`redis` | ``DD_REDIS_ANALYTICS_ENABLED`` | +| :ref:`redis` | ``OTEL_REDIS_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`rediscluster` | ``DD_REDISCLUSTER_ANALYTICS_ENABLED`` | +| :ref:`rediscluster` | ``OTEL_REDISCLUSTER_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`sqlalchemy` | ``DD_SQLALCHEMY_ANALYTICS_ENABLED`` | +| :ref:`sqlalchemy` | ``OTEL_SQLALCHEMY_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ -| :ref:`vertica` | ``DD_VERTICA_ANALYTICS_ENABLED`` | +| :ref:`vertica` | ``OTEL_VERTICA_ANALYTICS_ENABLED`` | +----------------------+----------------------------------------+ For datastore libraries that extend another, use the setting for the underlying library: @@ -258,17 +255,17 @@ For datastore libraries that extend another, use the setting for the underlying +------------------------+----------------------------------+ | Library | Environment Variable | +========================+==================================+ -| :ref:`mongoengine` | ``DD_PYMONGO_ANALYTICS_ENABLED`` | +| :ref:`mongoengine` | ``OTEL_PYMONGO_ANALYTICS_ENABLED`` | +------------------------+----------------------------------+ -| :ref:`mysql-connector` | ``DD_DBAPI2_ANALYTICS_ENABLED`` | +| :ref:`mysql-connector` | ``OTEL_DBAPI2_ANALYTICS_ENABLED`` | +------------------------+----------------------------------+ -| :ref:`mysqldb` | ``DD_DBAPI2_ANALYTICS_ENABLED`` | +| :ref:`mysqldb` | ``OTEL_DBAPI2_ANALYTICS_ENABLED`` | +------------------------+----------------------------------+ -| :ref:`psycopg2` | ``DD_DBAPI2_ANALYTICS_ENABLED`` | +| :ref:`psycopg2` | ``OTEL_DBAPI2_ANALYTICS_ENABLED`` | +------------------------+----------------------------------+ -| :ref:`pymysql` | ``DD_DBAPI2_ANALYTICS_ENABLED`` | +| :ref:`pymysql` | ``OTEL_DBAPI2_ANALYTICS_ENABLED`` | +------------------------+----------------------------------+ -| :ref:`sqllite` | ``DD_DBAPI2_ANALYTICS_ENABLED`` | +| :ref:`sqllite` | ``OTEL_DBAPI2_ANALYTICS_ENABLED`` | +------------------------+----------------------------------+ Where environment variables are not used for configuring the tracer, the instructions for configuring trace analytics is provided in the library documentation: @@ -282,7 +279,7 @@ Where environment variables are not used for configuring the tracer, the instruc Resolving deprecation warnings ------------------------------ Before upgrading, it’s a good idea to resolve any deprecation warnings raised by your project. -These warnings must be fixed before upgrading, otherwise the ``ddtrace`` library +These warnings must be fixed before upgrading, otherwise the ``oteltrace`` library will not work as expected. Our deprecation messages include the version where the behavior is altered or removed. @@ -318,7 +315,7 @@ discarded depending on the output. The library comes with a ``FilterRequestsOnUrl`` filter that can be used to filter out incoming requests to specific urls: -.. autoclass:: ddtrace.filters.FilterRequestsOnUrl +.. autoclass:: oteltrace.filters.FilterRequestsOnUrl :members: **Write a custom filter** @@ -345,7 +342,7 @@ next step of the pipeline or ``None`` if the trace should be discarded:: Logs Injection -------------- -.. automodule:: ddtrace.contrib.logging +.. automodule:: oteltrace.contrib.logging HTTP layer ---------- @@ -360,7 +357,7 @@ Configuration can be provided both at the global level and at the integration le Examples:: - from ddtrace import config + from oteltrace import config # Global config config.http.trace_query_string = True @@ -380,7 +377,7 @@ Configuration can be provided both at the global level and at the integration le Examples:: - from ddtrace import config + from oteltrace import config # Global config config.trace_headers([ @@ -422,180 +419,89 @@ structure like in the following example:: } -.. _adv_opentracing: - -OpenTracing ------------ - - -The Datadog opentracer can be configured via the ``config`` dictionary -parameter to the tracer which accepts the following described fields. See below -for usage. - -+---------------------+----------------------------------------+---------------+ -| Configuration Key | Description | Default Value | -+=====================+========================================+===============+ -| `enabled` | enable or disable the tracer | `True` | -+---------------------+----------------------------------------+---------------+ -| `debug` | enable debug logging | `False` | -+---------------------+----------------------------------------+---------------+ -| `agent_hostname` | hostname of the Datadog agent to use | `localhost` | -+---------------------+----------------------------------------+---------------+ -| `agent_https` | use https to connect to the agent | `False` | -+---------------------+----------------------------------------+---------------+ -| `agent_port` | port the Datadog agent is listening on | `8126` | -+---------------------+----------------------------------------+---------------+ -| `global_tags` | tags that will be applied to each span | `{}` | -+---------------------+----------------------------------------+---------------+ -| `sampler` | see `Sampling`_ | `AllSampler` | -+---------------------+----------------------------------------+---------------+ -| `priority_sampling` | see `Priority Sampling`_ | `True` | -+---------------------+----------------------------------------+---------------+ -| `settings` | see `Advanced Usage`_ | `{}` | -+---------------------+----------------------------------------+---------------+ - - -Usage -^^^^^ - -**Manual tracing** - -To explicitly trace:: - - import time - import opentracing - from ddtrace.opentracer import Tracer, set_global_tracer - - def init_tracer(service_name): - config = { - 'agent_hostname': 'localhost', - 'agent_port': 8126, - } - tracer = Tracer(service_name, config=config) - set_global_tracer(tracer) - return tracer - - def my_operation(): - span = opentracing.tracer.start_span('my_operation_name') - span.set_tag('my_interesting_tag', 'my_interesting_value') - time.sleep(0.05) - span.finish() - - init_tracer('my_service_name') - my_operation() - -**Context Manager Tracing** - -To trace a function using the span context manager:: - - import time - import opentracing - from ddtrace.opentracer import Tracer, set_global_tracer - - def init_tracer(service_name): - config = { - 'agent_hostname': 'localhost', - 'agent_port': 8126, - } - tracer = Tracer(service_name, config=config) - set_global_tracer(tracer) - return tracer - - def my_operation(): - with opentracing.tracer.start_span('my_operation_name') as span: - span.set_tag('my_interesting_tag', 'my_interesting_value') - time.sleep(0.05) - - init_tracer('my_service_name') - my_operation() +.. _oteltracerun: -See our tracing trace-examples_ repository for concrete, runnable examples of -the Datadog opentracer. - -.. _trace-examples: https://github.com/DataDog/trace-examples/tree/master/python - -See also the `Python OpenTracing`_ repository for usage of the tracer. - -.. _Python OpenTracing: https://github.com/opentracing/opentracing-python - - -**Alongside Datadog tracer** - -The Datadog OpenTracing tracer can be used alongside the Datadog tracer. This -provides the advantage of providing tracing information collected by -``ddtrace`` in addition to OpenTracing. The simplest way to do this is to use -the :ref:`ddtrace-run` command to invoke your OpenTraced -application. - - -**Opentracer API** - -.. autoclass:: ddtrace.opentracer.Tracer - :members: - :special-members: __init__ - - -.. _ddtracerun: - -``ddtrace-run`` +``oteltrace-run`` --------------- -``ddtrace-run`` will trace :ref:`supported` web frameworks +``oteltrace-run`` will trace :ref:`supported` web frameworks and database modules without the need for changing your code:: - $ ddtrace-run -h + $ oteltrace-run -h Execute the given Python program, after configuring it - to emit Datadog traces. + to emit OpenTelemetry traces. Append command line arguments to your program as usual. - Usage: [ENV_VARS] ddtrace-run + Usage: [ENV_VARS] oteltrace-run -The available environment variables for ``ddtrace-run`` are: +The available environment variables for ``oteltrace-run`` are: -* ``DATADOG_TRACE_ENABLED=true|false`` (default: true): Enable web framework and +* ``OPENTELEMETRY_TRACE_ENABLED=true|false`` (default: true): Enable web framework and library instrumentation. When false, your application code will not generate any traces. -* ``DATADOG_ENV`` (no default): Set an application's environment e.g. ``prod``, +* ``OPENTELEMETRY_ENV`` (no default): Set an application's environment e.g. ``prod``, ``pre-prod``, ``stage`` -* ``DATADOG_TRACE_DEBUG=true|false`` (default: false): Enable debug logging in +* ``OPENTELEMETRY_TRACE_DEBUG=true|false`` (default: false): Enable debug logging in the tracer -* ``DATADOG_SERVICE_NAME`` (no default): override the service name to be used +* ``OPENTELEMETRY_SERVICE_NAME`` (no default): override the service name to be used for this program. This value is passed through when setting up middleware for web framework integrations (e.g. pylons, flask, django). For tracing without a web integration, prefer setting the service name in code. -* ``DATADOG_PATCH_MODULES=module:patch,module:patch...`` e.g. +* ``OPENTELEMETRY_PATCH_MODULES=module:patch,module:patch...`` e.g. ``boto:true,redis:false``: override the modules patched for this execution of the program (default: none) -* ``DATADOG_TRACE_AGENT_HOSTNAME=localhost``: override the address of the trace +* ``OPENTELEMETRY_TRACE_AGENT_HOSTNAME=localhost``: override the address of the trace agent host that the default tracer will attempt to submit to (default: ``localhost``) -* ``DATADOG_TRACE_AGENT_PORT=8126``: override the port that the default tracer +* ``OPENTELEMETRY_TRACE_AGENT_PORT=8126``: override the port that the default tracer will submit to (default: 8126) -* ``DATADOG_PRIORITY_SAMPLING`` (default: true): enables :ref:`Priority +* ``OPENTELEMETRY_PRIORITY_SAMPLING`` (default: true): enables :ref:`Priority Sampling` -* ``DD_LOGS_INJECTION`` (default: false): enables :ref:`Logs Injection` +* ``OTEL_LOGS_INJECTION`` (default: false): enables :ref:`Logs Injection` + +Exporter Configuration +^^^^^^^^^^^^^^^^^^^^^^ + +``oteltrace-run`` uses OpenTelemetry SDK exporters to send the traces to +different backends. +The exporter to be used is configured using the following env variables: + +* ``OTEL_EXPORTER_MODULE`` specifies the python module where the exporter is implemented. +* ``OTEL_EXPORTER_FACTORY`` defines a function to be called to get an instance of the exporter. + +The specific configuration for each type of exporter is defined by using the +``OTEL_EXPORTER_OPTIONS_*`` env variables. +The text after ``OTEL_EXPORTER_OPTIONS_`` will be passed to +``OTEL_EXPORTER_FACTORY`` as kwargs. -``ddtrace-run`` respects a variety of common entrypoints for web applications: +Propagator Configuration +^^^^^^^^^^^^^^^^^^^^^^^^ -- ``ddtrace-run python my_app.py`` -- ``ddtrace-run python manage.py runserver`` -- ``ddtrace-run gunicorn myapp.wsgi:application`` -- ``ddtrace-run uwsgi --http :9090 --wsgi-file my_app.py`` +``oteltrace-run`` supports different formats to distribute the trace context. +The propagator used is defined by the ``OTEL_TRACER_PROPAGATOR`` env variable. +Currently ``w3c`` (default), ``b3`` and ``datadog`` are supported. + +``oteltrace-run`` respects a variety of common entrypoints for web applications: + +- ``oteltrace-run python my_app.py`` +- ``oteltrace-run python manage.py runserver`` +- ``oteltrace-run gunicorn myapp.wsgi:application`` +- ``oteltrace-run uwsgi --http :9090 --wsgi-file my_app.py`` Pass along command-line arguments as your program would normally expect them:: -$ ddtrace-run gunicorn myapp.wsgi:application --max-requests 1000 --statsd-host localhost:8125 +$ oteltrace-run gunicorn myapp.wsgi:application --max-requests 1000 --statsd-host localhost:8125 If you're running in a Kubernetes cluster and still don't see your traces, make sure your application has a route to the tracing Agent. An easy way to test this is with a:: $ pip install ipython -$ DATADOG_TRACE_DEBUG=true ddtrace-run ipython +$ OPENTELEMETRY_TRACE_DEBUG=true oteltrace-run ipython Because iPython uses SQLite, it will be automatically instrumented and your traces should be sent off. If an error occurs, a message will be displayed in @@ -607,20 +513,26 @@ API ``Tracer`` ^^^^^^^^^^ -.. autoclass:: ddtrace.Tracer +.. autoclass:: oteltrace.Tracer :members: :special-members: __init__ ``Span`` ^^^^^^^^ -.. autoclass:: ddtrace.Span +.. autoclass:: oteltrace.Span :members: :special-members: __init__ ``Pin`` ^^^^^^^ -.. autoclass:: ddtrace.Pin +.. autoclass:: oteltrace.Pin + :members: + :special-members: __init__ + +``APIOtel`` +^^^^^^^^^^^ +.. autoclass:: oteltrace.api_otel_exporter.APIOtel :members: :special-members: __init__ @@ -629,11 +541,11 @@ API ``patch_all`` ^^^^^^^^^^^^^ -.. autofunction:: ddtrace.monkey.patch_all +.. autofunction:: oteltrace.monkey.patch_all ``patch`` ^^^^^^^^^ -.. autofunction:: ddtrace.monkey.patch +.. autofunction:: oteltrace.monkey.patch .. toctree:: :maxdepth: 2 diff --git a/docs/async_integrations.rst b/docs/async_integrations.rst index 6be816c1..156b9529 100644 --- a/docs/async_integrations.rst +++ b/docs/async_integrations.rst @@ -6,7 +6,7 @@ Asynchronous Libraries asyncio ^^^^^^^ -.. automodule:: ddtrace.contrib.asyncio +.. automodule:: oteltrace.contrib.asyncio .. _gevent: @@ -14,5 +14,5 @@ asyncio gevent ^^^^^^ -.. automodule:: ddtrace.contrib.gevent +.. automodule:: oteltrace.contrib.gevent diff --git a/docs/basic_usage.rst b/docs/basic_usage.rst index 069e95c8..2d00a06d 100644 --- a/docs/basic_usage.rst +++ b/docs/basic_usage.rst @@ -3,41 +3,41 @@ Basic Usage =========== -With ``ddtrace`` installed, the application can be instrumented. +With ``oteltrace`` installed, the application can be instrumented. Auto Instrumentation -------------------- -``ddtrace-run`` +``oteltrace-run`` ^^^^^^^^^^^^^^^ -Python applications can easily be instrumented with ``ddtrace`` by using the -included ``ddtrace-run`` command. Simply prefix your Python execution command -with ``ddtrace-run`` in order to auto-instrument the libraries in your +Python applications can easily be instrumented with ``oteltrace`` by using the +included ``oteltrace-run`` command. Simply prefix your Python execution command +with ``oteltrace-run`` in order to auto-instrument the libraries in your application. For example, if the command to run your application is:: $ python app.py -then to auto-instrument using Datadog, the corresponding command is:: +then to auto-instrument using OpenTelemetry, the corresponding command is:: -$ ddtrace-run python app.py +$ oteltrace-run python app.py -For more advanced usage of ``ddtrace-run`` refer to the documentation :ref:`here`. +For more advanced usage of ``oteltrace-run``, like using an OpenTelemetry exporter, refer to the documentation :ref:`here`. ``patch_all`` ^^^^^^^^^^^^^ To manually invoke the automatic instrumentation use ``patch_all``:: - from ddtrace import patch_all + from oteltrace import patch_all patch_all() To toggle instrumentation for a particular module:: - from ddtrace import patch_all + from oteltrace import patch_all patch_all(redis=False, cassandra=False) By default all supported libraries will be patched when @@ -54,14 +54,14 @@ documentation. Manual Instrumentation ---------------------- -If you would like to extend the functionality of the ``ddtrace`` library or gain +If you would like to extend the functionality of the ``oteltrace`` library or gain finer control over instrumenting your application, several techniques are provided by the library. Decorator ^^^^^^^^^ -``ddtrace`` provides a decorator that can be used to trace a particular method +``oteltrace`` provides a decorator that can be used to trace a particular method in your application:: @tracer.wrap() @@ -70,12 +70,12 @@ in your application:: # ... # ... -API details of the decorator can be found here :py:meth:`ddtrace.Tracer.wrap`. +API details of the decorator can be found here :py:meth:`oteltrace.Tracer.wrap`. Context Manager ^^^^^^^^^^^^^^^ -To trace an arbitrary block of code, you can use the :py:mod:`ddtrace.Span` +To trace an arbitrary block of code, you can use the :py:mod:`oteltrace.Span` context manager:: # trace some interesting operation @@ -84,7 +84,7 @@ context manager:: # ... # ... -Further API details can be found here :py:meth:`ddtrace.Tracer`. +Further API details can be found here :py:meth:`oteltrace.Tracer`. Using the API ^^^^^^^^^^^^^ @@ -98,10 +98,10 @@ you may require:: # do some operation(s) of interest in between # NOTE: make sure to call span.finish() or the entire trace will not be sent - # to Datadog + # to OpenTelemetry span.finish() API details of the decorator can be found here: -- :py:meth:`ddtrace.Tracer.trace` -- :py:meth:`ddtrace.Span.finish`. +- :py:meth:`oteltrace.Tracer.trace` +- :py:meth:`oteltrace.Span.finish`. diff --git a/docs/conf.py b/docs/conf.py index 2659074b..9cd3a36f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# ddtrace documentation build configuration file, created by +# oteltrace documentation build configuration file, created by # sphinx-quickstart on Thu Jul 7 17:25:05 2016. # # This file is execfile()d with the current directory set to its @@ -22,7 +22,7 @@ from datetime import datetime -# append the ddtrace path to syspath +# append the oteltrace path to syspath sys.path.insert(0, os.path.abspath('..')) @@ -58,9 +58,9 @@ # General information about the project. year = datetime.now().year -project = u'ddtrace' -copyright = u'2016-{}, Datadog, Inc.'.format(year) -author = u'Datadog, Inc.' +project = u'oteltrace' +copyright = u'2016-{}, DataDog, Inc.'.format(year) +author = u'DataDog, Inc.' # document in order of source autodoc_member_order = 'bysource' @@ -144,7 +144,7 @@ # documentation. # html_theme_options = { - 'description': 'Datadog\'s Python tracing client', + 'description': 'OpenTelemetry\'s Python tracing client', } # Add any paths that contain custom themes here, relative to this directory. @@ -153,7 +153,7 @@ # The name for this set of Sphinx documents. # " v documentation" by default. # -# html_title = u'ddtrace v0.2' +# html_title = u'oteltrace v0.2' # A shorter title for the navigation bar. Default is the same as html_title. # @@ -260,7 +260,7 @@ # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'ddtracedoc' +htmlhelp_basename = 'oteltracedoc' # -- Options for LaTeX output --------------------------------------------- @@ -286,8 +286,8 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'ddtrace.tex', u'ddtrace Documentation', - u'Datadog, Inc', 'manual'), + (master_doc, 'oteltrace.tex', u'oteltrace Documentation', + u'DataDog, Inc', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -322,7 +322,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'ddtrace', u'ddtrace Documentation', + (master_doc, 'oteltrace', u'oteltrace Documentation', [author], 1) ] @@ -337,8 +337,8 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'ddtrace', u'ddtrace Documentation', - author, 'ddtrace', 'One line description of project.', + (master_doc, 'oteltrace', u'oteltrace Documentation', + author, 'oteltrace', 'One line description of project.', 'Miscellaneous'), ] diff --git a/docs/contributing.rst b/docs/contributing.rst index d7e8af61..fe9d4d92 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -3,7 +3,7 @@ ============== When contributing to this repository, we advise you to discuss the change you -wish to make via an `issue `_. +wish to make via an `issue `_. Branches ======== @@ -48,8 +48,8 @@ That ensures that: Internal API ============ -The `ddtrace.internal` module contains code that must only be used inside -`ddtrace` itself. Relying on the API of this module is dangerous and can break +The `oteltrace.internal` module contains code that must only be used inside +`oteltrace` itself. Relying on the API of this module is dangerous and can break at anytime. Don't do it. Python Versions and Implementations Support diff --git a/docs/db_integrations.rst b/docs/db_integrations.rst index a5c5ddc2..49a8eda3 100644 --- a/docs/db_integrations.rst +++ b/docs/db_integrations.rst @@ -6,7 +6,7 @@ Datastore Libraries Algoliasearch ------------- -.. automodule:: ddtrace.contrib.algoliasearch +.. automodule:: oteltrace.contrib.algoliasearch .. _cassandra: @@ -14,7 +14,7 @@ Algoliasearch Cassandra --------- -.. automodule:: ddtrace.contrib.cassandra +.. automodule:: oteltrace.contrib.cassandra .. _consul: @@ -22,7 +22,7 @@ Cassandra Consul ------ -.. automodule:: ddtrace.contrib.consul +.. automodule:: oteltrace.contrib.consul .. _elasticsearch: @@ -30,7 +30,7 @@ Consul Elasticsearch ------------- -.. automodule:: ddtrace.contrib.elasticsearch +.. automodule:: oteltrace.contrib.elasticsearch .. _flask_cache: @@ -38,7 +38,7 @@ Elasticsearch Flask Cache ----------- -.. automodule:: ddtrace.contrib.flask_cache +.. automodule:: oteltrace.contrib.flask_cache .. _mongodb: @@ -51,7 +51,7 @@ MongoDB Mongoengine ^^^^^^^^^^^ -.. automodule:: ddtrace.contrib.mongoengine +.. automodule:: oteltrace.contrib.mongoengine .. _pymongo: @@ -59,7 +59,7 @@ Mongoengine Pymongo ^^^^^^^ -.. automodule:: ddtrace.contrib.pymongo +.. automodule:: oteltrace.contrib.pymongo Memcached @@ -70,14 +70,14 @@ Memcached pylibmc ^^^^^^^ -.. automodule:: ddtrace.contrib.pylibmc +.. automodule:: oteltrace.contrib.pylibmc .. _pymemcache: pymemcache ^^^^^^^^^^ -.. automodule:: ddtrace.contrib.pymemcache +.. automodule:: oteltrace.contrib.pymemcache MySQL ----- @@ -87,7 +87,7 @@ MySQL mysql-connector ^^^^^^^^^^^^^^^ -.. automodule:: ddtrace.contrib.mysql +.. automodule:: oteltrace.contrib.mysql .. _mysqlclient: @@ -97,14 +97,14 @@ mysql-connector mysqlclient/MySQL-python ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. automodule:: ddtrace.contrib.mysqldb +.. automodule:: oteltrace.contrib.mysqldb .. _pymysql: pymysql ^^^^^^^ -.. automodule:: ddtrace.contrib.pymysql +.. automodule:: oteltrace.contrib.pymysql Postgres @@ -115,7 +115,7 @@ Postgres aiopg ^^^^^ -.. automodule:: ddtrace.contrib.aiopg +.. automodule:: oteltrace.contrib.aiopg .. _psycopg2: @@ -123,7 +123,7 @@ aiopg psycopg ^^^^^^^ -.. automodule:: ddtrace.contrib.psycopg +.. automodule:: oteltrace.contrib.psycopg @@ -135,7 +135,7 @@ Redis redis ^^^^^ -.. automodule:: ddtrace.contrib.redis +.. automodule:: oteltrace.contrib.redis .. _rediscluster: @@ -143,7 +143,7 @@ redis redis-py-cluster ^^^^^^^^^^^^^^^^ -.. automodule:: ddtrace.contrib.rediscluster +.. automodule:: oteltrace.contrib.rediscluster .. _sqlalchemy: @@ -151,7 +151,7 @@ redis-py-cluster SQLAlchemy ---------- -.. automodule:: ddtrace.contrib.sqlalchemy +.. automodule:: oteltrace.contrib.sqlalchemy .. _sqllite: @@ -159,11 +159,11 @@ SQLAlchemy SQLite ------ -.. automodule:: ddtrace.contrib.sqlite3 +.. automodule:: oteltrace.contrib.sqlite3 .. _vertica: Vertica ------- -.. automodule:: ddtrace.contrib.vertica +.. automodule:: oteltrace.contrib.vertica diff --git a/docs/index.rst b/docs/index.rst index e3166228..83ae7b4c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,9 +1,9 @@ .. include:: ./shared.rst -Datadog Python Trace Client +OpenTelemetry Python Trace Client =========================== -``ddtrace`` is Datadog's Python tracing client. It is used to trace requests as +``oteltrace`` is OpenTelemetry's Python tracing client. It is used to trace requests as they flow across web servers, databases and microservices. This enables developers to have greater visibility into bottlenecks and troublesome requests in their application. @@ -16,7 +16,7 @@ For a basic product overview: check out the `setup documentation`_. For details about developing and contributing: refer to the `development guide`_. -For descriptions of the terminology of Datadog APM: take a look at the `official +For descriptions of the terminology of OpenTelemetry APM: take a look at the `official documentation`_. @@ -27,7 +27,7 @@ Supported Libraries We officially support Python 2.7, 3.4 and above. -The versions listed are the versions that we have tested, but ``ddtrace`` can +The versions listed are the versions that we have tested, but ``oteltrace`` can still be compatible with other versions of these libraries. If a version of a library you use is unsupported, feel free to contribute or request it by contacting support. @@ -119,7 +119,7 @@ contacting support. .. [1] Libraries that are automatically instrumented when the - :ref:`ddtrace-run` command is used or the ``patch_all()`` method + :ref:`oteltrace-run` command is used or the ``patch_all()`` method is called. Always use ``patch()`` and ``patch_all()`` as soon as possible in your Python entrypoint. diff --git a/docs/installation_quickstart.rst b/docs/installation_quickstart.rst index ec19ddc2..5bacc61c 100644 --- a/docs/installation_quickstart.rst +++ b/docs/installation_quickstart.rst @@ -15,71 +15,20 @@ Installation Install with :code:`pip`:: -$ pip install ddtrace +$ pip install oteltrace We strongly suggest pinning the version of the library you deploy. Quickstart ---------- -Getting started with ``ddtrace`` is as easy as prefixing your python -entry-point command with ``ddtrace-run``. +Getting started with ``oteltrace`` is as easy as prefixing your python +entry-point command with ``oteltrace-run``. For example if you start your application with ``python app.py`` then run:: - $ ddtrace-run python app.py + $ oteltrace-run python app.py -For more advanced usage of ``ddtrace-run`` refer to the documentation :ref:`here`. +For more advanced usage of ``oteltrace-run`` refer to the documentation :ref:`here`. To find out how to trace your own code manually refer to the documentation :ref:`here`. - - -OpenTracing ------------ - -``ddtrace`` also provides an OpenTracing API to the Datadog tracer so -that you can use the Datadog tracer in your OpenTracing-compatible -applications. - -Installation -^^^^^^^^^^^^ - -Include OpenTracing with ``ddtrace``:: - - $ pip install ddtrace[opentracing] - -To include the OpenTracing dependency in your project with ``ddtrace``, ensure -you have the following in ``setup.py``:: - - install_requires=[ - "ddtrace[opentracing]", - ], - -Configuration -^^^^^^^^^^^^^ - -The OpenTracing convention for initializing a tracer is to define an -initialization method that will configure and instantiate a new tracer and -overwrite the global ``opentracing.tracer`` reference. - -Typically this method looks something like:: - - from ddtrace.opentracer import Tracer, set_global_tracer - - def init_tracer(service_name): - """ - Initialize a new Datadog opentracer and set it as the - global tracer. - - This overwrites the opentracing.tracer reference. - """ - config = { - 'agent_hostname': 'localhost', - 'agent_port': 8126, - } - tracer = Tracer(service_name, config=config) - set_global_tracer(tracer) - return tracer - -For more advanced usage of OpenTracing in ``ddtrace`` refer to the -documentation :ref:`here`. diff --git a/docs/other_integrations.rst b/docs/other_integrations.rst index 66875311..31e004d4 100644 --- a/docs/other_integrations.rst +++ b/docs/other_integrations.rst @@ -11,7 +11,7 @@ Boto aiobotocore ^^^^^^^^^^^ -.. automodule:: ddtrace.contrib.aiobotocore +.. automodule:: oteltrace.contrib.aiobotocore .. _boto2: @@ -19,7 +19,7 @@ aiobotocore Boto2 ^^^^^ -.. automodule:: ddtrace.contrib.boto +.. automodule:: oteltrace.contrib.boto .. _botocore: @@ -27,7 +27,7 @@ Boto2 Botocore ^^^^^^^^ -.. automodule:: ddtrace.contrib.botocore +.. automodule:: oteltrace.contrib.botocore @@ -36,7 +36,7 @@ Botocore Futures ------- -.. automodule:: ddtrace.contrib.futures +.. automodule:: oteltrace.contrib.futures .. _celery: @@ -44,7 +44,7 @@ Futures Celery ------ -.. automodule:: ddtrace.contrib.celery +.. automodule:: oteltrace.contrib.celery .. _kombu: @@ -52,7 +52,7 @@ Celery Kombu ------ -.. automodule:: ddtrace.contrib.kombu +.. automodule:: oteltrace.contrib.kombu .. _httplib: @@ -60,32 +60,32 @@ Kombu httplib ------- -.. automodule:: ddtrace.contrib.httplib +.. automodule:: oteltrace.contrib.httplib .. _requests: Requests -------- -.. automodule:: ddtrace.contrib.requests +.. automodule:: oteltrace.contrib.requests .. _grpc: Grpc ---- -.. automodule:: ddtrace.contrib.grpc +.. automodule:: oteltrace.contrib.grpc .. _jinja2: Jinja2 ------ -.. automodule:: ddtrace.contrib.jinja2 +.. automodule:: oteltrace.contrib.jinja2 .. _mako: Mako ------ -.. automodule:: ddtrace.contrib.mako +.. automodule:: oteltrace.contrib.mako diff --git a/docs/shared.rst b/docs/shared.rst index b5d591ee..7c59005f 100644 --- a/docs/shared.rst +++ b/docs/shared.rst @@ -2,4 +2,4 @@ .. _official documentation: https://docs.datadoghq.com/tracing/visualization/ -.. _development guide: https://github.com/datadog/dd-trace-py#development +.. _development guide: https://github.com/datadog/otel-trace-py#development diff --git a/docs/web_integrations.rst b/docs/web_integrations.rst index 4145558c..635d7011 100644 --- a/docs/web_integrations.rst +++ b/docs/web_integrations.rst @@ -1,8 +1,8 @@ Web Frameworks -------------- -``ddtrace`` provides tracing support for many Python web frameworks. For each -framework ``ddtrace`` supports: +``oteltrace`` provides tracing support for many Python web frameworks. For each +framework ``oteltrace`` supports: - tracing of requests [*]_: trace requests through middleware and back - distributed tracing [*]_: trace requests across application boundaries @@ -17,7 +17,7 @@ framework ``ddtrace`` supports: aiohttp ^^^^^^^ -.. automodule:: ddtrace.contrib.aiohttp +.. automodule:: oteltrace.contrib.aiohttp .. _bottle: @@ -25,7 +25,7 @@ aiohttp Bottle ^^^^^^ -.. automodule:: ddtrace.contrib.bottle +.. automodule:: oteltrace.contrib.bottle .. _djangorestframework: .. _django: @@ -33,7 +33,7 @@ Bottle Django ^^^^^^ -.. automodule:: ddtrace.contrib.django +.. automodule:: oteltrace.contrib.django .. _falcon: @@ -41,7 +41,7 @@ Django Falcon ^^^^^^ -.. automodule:: ddtrace.contrib.falcon +.. automodule:: oteltrace.contrib.falcon .. _flask: @@ -50,21 +50,21 @@ Flask ^^^^^ -.. automodule:: ddtrace.contrib.flask +.. automodule:: oteltrace.contrib.flask .. _molten: Molten ^^^^^^ -.. automodule:: ddtrace.contrib.molten +.. automodule:: oteltrace.contrib.molten .. _pylons: Pylons ^^^^^^ -.. automodule:: ddtrace.contrib.pylons +.. automodule:: oteltrace.contrib.pylons .. _pyramid: @@ -72,7 +72,7 @@ Pylons Pyramid ^^^^^^^ -.. automodule:: ddtrace.contrib.pyramid +.. automodule:: oteltrace.contrib.pyramid .. _tornado: @@ -80,5 +80,5 @@ Pyramid Tornado ^^^^^^^ -.. automodule:: ddtrace.contrib.tornado +.. automodule:: oteltrace.contrib.tornado diff --git a/examples/Readme.rst b/examples/Readme.rst new file mode 100644 index 00000000..66f65bfe --- /dev/null +++ b/examples/Readme.rst @@ -0,0 +1,8 @@ +Examples +======== + +This folder contains two examples that shows the usage of ``oteltrace`` with +OpenTelemetry exporters and also how to change the HTTP trace context propagator. + +- `example 1 <./example1>`_ shows how to autointrument an application by using ``oteltrace-run``. +- `example 2 <./example2>`_ shows the usage of the ``oteltrace`` API and ``patch_all()``. \ No newline at end of file diff --git a/examples/example1/Readme.rst b/examples/example1/Readme.rst new file mode 100644 index 00000000..f638ee35 --- /dev/null +++ b/examples/example1/Readme.rst @@ -0,0 +1,126 @@ +Example 1: Using the OpenTelemetry Jaeger Exporter with ``oteltrace-run`` +========================================================================== + +This example shows how to configure the OpenTelemetry Jaeger exporter with +``oteltrace-run``. + +.. _jaeger_exporter_install: + +Installation +------------ + +This example requires ``oteltrace-py``, ``opentelemetry-api``, ``opentelemetry-sdk`` and the Jaeger exporter. + +We recommend to use virtualenv to create a new environment to run these examples. + +:: + + # create the virtual environment + virtualenv oteltracepy + # activate it + source oteltracepy/bin/activate + +Installing oteltrace-py and dependencies +**************************************** + +:: + + # install oteltrace-py (will install opentelemetry as well) + cd oteltrace-py + pip install -e . + + # install dependencies for the examples + pip install Flask requests + +Installing the OpenTelemetry Jaeger exporter +******************************************** + +The OpenTelemetry Jaeger exporter doesn't have a PyPI package yet, follow +these instructions to install it from source: + +:: + + git clone https://github.com/open-telemetry/opentelemetry-python + cd opentelemetry-python + pip install -e ./ext/opentelemetry-ext-jaeger + +.. _run_jaeger_agent: + +Running the Jaeger agent +------------------------ + +The easiest way to run the Jaeger agent is by using the ``all-in-one`` docker image. +For more details please visit ``_. + +:: + + docker run --rm \ + -p 6831:6831/udp \ + -p 6832:6832/udp \ + -p 16686:16686 \ + jaegertracing/all-in-one:1.13 + +Configuring the exporter +------------------------ + +In order to use an OpenTelemetry SDK exporter with ``oteltrace-run`` a series +of env variables has to be set before executing. + +The ``OTEL_EXPORTER_MODULE`` variable defines a python module where the +exporter is implemented and ``OTEL_EXPORTER_FACTORY`` is a function to be +called inside that module to get an instance of the exporter. +When ``oteltrace-run`` is executed it loads ``OTEL_EXPORTER_MODULE`` and then +calls ``OTEL_EXPORTER_FACTORY`` to get an instance of the exporter. + +:: + + # module where the opentelemetry SDK exporter is implemented + export OTEL_EXPORTER_MODULE=opentelemetry.ext.jaeger + # factory function that returns an instance of the exporter (constructor in this case) + export OTEL_EXPORTER_FACTORY=JaegerSpanExporter + + +Specific configuration options for the exporter can be set by using the +``OTEL_EXPORTER_OPTIONS_`` env variables. +The list of different variables defined by the user is passed to the factory +function. + +For this example it's only needed to configure the ``service_name`` parameter +as other default values will work out of the box. + +:: + + # parameters to be passed to the factory function + export OTEL_EXPORTER_OPTIONS_service_name="example1" + +Optional: Choosing the HTTP trace context propagator +---------------------------------------------------- + +``oteltrace`` implements the ``b3``, ``w3c`` and ``DataDog`` HTTP trace +context propagators. + +The ``OTEL_TRACER_PROPAGATOR`` env variable determines the propagator to be +used. + +:: + + # use w3c trace propagator + export OTEL_TRACER_PROPAGATOR=w3c + + +Running the example +------------------- + +Finally, ``oteltrace-run`` can be executed. + +:: + + oteltrace-run python examples/example1/example1.py + +Using another terminal, perform a request to the example service: + +:: + + curl http://127.0.0.1:8055/ + +Now you can open the Jaeger UI in your browser http://localhost:16686/ to see the traces. diff --git a/examples/example1/example1.py b/examples/example1/example1.py new file mode 100644 index 00000000..33cf5147 --- /dev/null +++ b/examples/example1/example1.py @@ -0,0 +1,30 @@ +from flask import Flask +import requests + +app = Flask(__name__) + + +@app.route('/') +def hello_world(): + r1 = requests.get(url='http://localhost:8055/word1') + r2 = requests.get(url='http://localhost:8055/word2') + return r1.text + ' ' + r2.text + + +@app.route('/word1') +def hello_word1(): + r1 = requests.get(url='http://localhost:8055/word1/details') + return 'Welcome ' + r1.text + + +@app.route('/word1/details') +def hello_word1_details(): + return 'to' + + +@app.route('/word2') +def hello_word2(): + return 'OpenTelemetry' + + +app.run(host='127.0.0.1', port=8055) diff --git a/examples/example2/Readme.rst b/examples/example2/Readme.rst new file mode 100644 index 00000000..7afe9ed1 --- /dev/null +++ b/examples/example2/Readme.rst @@ -0,0 +1,75 @@ +Example 2: Using ``patch_all`` +============================== + +This example shows how to configure the OpenTelemetry Jaeger and the HTTP +propagator using the oteltrace API. + +Please be sure that you have installed the Jaeger OpenTelemetry exporter and +that the Jaeger Agent is running as stated in `example1 <../example1/Readme.rst>`_ + + +Configuring the exporter +------------------------ + +In order to use an OpenTelemetry exporter it has to be created and then +provide it in an APIOtel object to ``tracer.configure()``. +The following code snippet is an example of how it can be done with Jaeger: + +:: + + import oteltrace + + from oteltrace.api_otel_exporter import APIOtel + from opentelemetry.ext.jaeger import JaegerSpanExporter + + + # create Jaeger Exporter + jaeger_exporter = JaegerSpanExporter( + service_name='example2', + ) + + oteltrace.tracer.configure( + api=APIOtel(exporter=jaeger_exporter), + ) + +:: + + # parameters to be passed to the factory function + export OTEL_EXPORTER_OPTIONS_service_name="example2" + +Optional: Choosing the HTTP trace context propagator +---------------------------------------------------- + +``oteltrace`` allows to specify the HTTP propagator to be used by the +different integrations. +``w3c``, ``b3`` and ``DataDog`` propagators are shipped with ``oteltrace``, +but also custom implementation could be passed to ``tracer.configure()``. + +The following code snippet shows how to set ``w3c`` as the HTTP propagator. + +:: + + import oteltrace + + from oteltrace.propagation.w3c import W3CHTTPPropagator + + oteltrace.tracer.configure( + http_propagator=W3CHTTPPropagator, + ) + +Running the example +------------------- + +Finally, ``oteltrace-run`` can be executed. + +:: + + python examples/example2/example2.py + +Perform a request to the example service: + +:: + + curl http://127.0.0.1:8055/ + +Now you can open the Jaeger UI in your browser http://localhost:16686/ to see the traces. diff --git a/examples/example2/example2.py b/examples/example2/example2.py new file mode 100644 index 00000000..f0b67234 --- /dev/null +++ b/examples/example2/example2.py @@ -0,0 +1,50 @@ +from oteltrace import patch_all +patch_all() + +from oteltrace import tracer # noqa: E402 +from oteltrace.api_otel_exporter import APIOtel # noqa: E402 +from opentelemetry.ext.jaeger import JaegerSpanExporter # noqa: E402 +from oteltrace.propagation.w3c import W3CHTTPPropagator # noqa: E402 + +# create Jaeger Exporter +jaeger_exporter = JaegerSpanExporter( + service_name='example2', +) + +# configure tracer with Jaeger exporter and w3c as http propagator +tracer.configure( + api=APIOtel(exporter=jaeger_exporter), + http_propagator=W3CHTTPPropagator, +) + +# same code of example1 +from flask import Flask # noqa: E402 +import requests # noqa: E402 + +app = Flask(__name__) + + +@app.route('/') +def hello_world(): + r1 = requests.get(url='http://localhost:8055/word1') + r2 = requests.get(url='http://localhost:8055/word2') + return r1.text + ' ' + r2.text + + +@app.route('/word1') +def hello_word1(): + r1 = requests.get(url='http://localhost:8055/word1/details') + return 'Welcome ' + r1.text + + +@app.route('/word1/details') +def hello_word1_details(): + return 'to' + + +@app.route('/word2') +def hello_word2(): + return 'OpenTelemetry' + + +app.run(host='127.0.0.1', port=8055) diff --git a/ddtrace/__init__.py b/oteltrace/__init__.py similarity index 100% rename from ddtrace/__init__.py rename to oteltrace/__init__.py diff --git a/ddtrace/_worker.py b/oteltrace/_worker.py similarity index 100% rename from ddtrace/_worker.py rename to oteltrace/_worker.py diff --git a/oteltrace/api_otel_exporter.py b/oteltrace/api_otel_exporter.py new file mode 100644 index 00000000..ce08fcfb --- /dev/null +++ b/oteltrace/api_otel_exporter.py @@ -0,0 +1,106 @@ +# project +from .internal.logger import get_logger + +# opentelemetry +from opentelemetry import trace as trace_api +from opentelemetry.sdk import trace + +log = get_logger(__name__) + + +class Response(object): + """ + Custom API Response object to represent a response from calling the API. + + We do this to ensure we know expected properties will exist, and so we + can call `resp.read()` and load the body once into an instance before we + close the HTTPConnection used for the request. + """ + + def get_json(self): + """Helper to parse the body of this request as JSON""" + return None + + +class APIOtel(object): + """ + Export data to OpenTelemetry using an SDK exporter + """ + + def __init__(self, exporter): + self._exporter = exporter + + def send_traces(self, traces): + """Send traces to the API. + + :param traces: A list of traces. + :return: The list of API HTTP responses. + """ + responses = [] + + spans = [] + for tr in traces: + for span in tr: + spans.append(self._span_to_otel_span(span)) + self._exporter.export(spans) + return responses + + def _span_to_otel_span(self, span): + # create out-of-band span (it would be better to create a SpanView) + + # context for current span + context = trace_api.SpanContext( + trace_id=span.trace_id, span_id=span.span_id + ) + + # parent (if one is set) + parent = None + if span.parent_id is not None: + parent = trace_api.SpanContext( + trace_id=span.trace_id, span_id=span.parent_id + ) + + # attributes + attributes = {} + # ddog members to opentelemetry attributes. + # https://github.com/DataDog/dd-trace-py/blob/1f04d0fcfb3974611967004a22882b55db77433e/oteltrace/opentracer/span.py#L113 + # TODO: use constants + if span.span_type is not None: + # TODO(Mauricio): OpenTracing maps to 'span.type', I think + # component is the right one for OpenTelemetry + attributes['component'] = span.span_type + + if span.service is not None: + attributes['service.name'] = span.service + + if span.resource is not None: + attributes['resource.name'] = span.resource + + if span.context.sampling_priority is not None: + attributes['sampling.priority'] = span.context.sampling_priority + + # tags + for tag in span.meta.items(): + key = tag[0] + value = tag[1] + + if key == 'out.host': + key = 'peer.hostname' + elif key == 'out.port': + key = 'peer.port' + + attributes[key] = value + + # build span with all that info + otel_span = trace.Span( + name=span.name, + context=context, + parent=parent, + attributes=attributes + # TODO: attributes, events? links? + ) + + otel_span.start_time = int(span.start * 10**9) + otel_span.end_time = int((span.start + span.duration) * 10**9) + + return otel_span diff --git a/ddtrace/bootstrap/__init__.py b/oteltrace/bootstrap/__init__.py similarity index 100% rename from ddtrace/bootstrap/__init__.py rename to oteltrace/bootstrap/__init__.py diff --git a/oteltrace/bootstrap/sitecustomize.py b/oteltrace/bootstrap/sitecustomize.py new file mode 100644 index 00000000..e3cad4e2 --- /dev/null +++ b/oteltrace/bootstrap/sitecustomize.py @@ -0,0 +1,207 @@ +""" +Bootstrapping code that is run when using the `oteltrace-run` Python entrypoint +Add all monkey-patching that needs to run by default here +""" + +import os +import imp +import sys +import logging +import importlib + +from oteltrace.propagation.datadog import DatadogHTTPPropagator +from oteltrace.propagation.w3c import W3CHTTPPropagator +from oteltrace.propagation.b3 import B3HTTPPropagator + +from oteltrace import api_otel_exporter + +from oteltrace.utils.formats import asbool, get_env +from oteltrace.internal.logger import get_logger +from oteltrace import constants + +logs_injection = asbool(get_env('logs', 'injection')) +OTEL_LOG_FORMAT = '%(asctime)s %(levelname)s [%(name)s] [%(filename)s:%(lineno)d] {}- %(message)s'.format( + '[otel.trace_id=%(otel.trace_id)s otel.span_id=%(otel.span_id)s] ' if logs_injection else '' +) + +if logs_injection: + # immediately patch logging if trace id injected + from oteltrace import patch + patch(logging=True) + +debug = os.environ.get('OPENTELEMETRY_TRACE_DEBUG') + +# Set here a default logging format for basicConfig + +# DEV: Once basicConfig is called here, future calls to it cannot be used to +# change the formatter since it applies the formatter to the root handler only +# upon initializing it the first time. +# See https://github.com/python/cpython/blob/112e4afd582515fcdcc0cde5012a4866e5cfda12/Lib/logging/__init__.py#L1550 +if debug and debug.lower() == 'true': + logging.basicConfig(level=logging.DEBUG, format=OTEL_LOG_FORMAT) +else: + logging.basicConfig(format=OTEL_LOG_FORMAT) + +log = get_logger(__name__) + +EXTRA_PATCHED_MODULES = { + 'bottle': True, + 'django': True, + 'falcon': True, + 'flask': True, + 'pylons': True, + 'pyramid': True, +} + + +def update_patched_modules(): + modules_to_patch = os.environ.get('OPENTELEMETRY_PATCH_MODULES') + if not modules_to_patch: + return + for patch in modules_to_patch.split(','): + if len(patch.split(':')) != 2: + log.debug('skipping malformed patch instruction') + continue + + module, should_patch = patch.split(':') + if should_patch.lower() not in ['true', 'false']: + log.debug('skipping malformed patch instruction for %s', module) + continue + + EXTRA_PATCHED_MODULES.update({module: should_patch.lower() == 'true'}) + + +def add_global_tags(tracer): + tags = {} + for tag in os.environ.get('OTEL_TRACE_GLOBAL_TAGS', '').split(','): + tag_name, _, tag_value = tag.partition(':') + if not tag_name or not tag_value: + log.debug('skipping malformed tracer tag') + continue + + tags[tag_name] = tag_value + tracer.set_tags(tags) + + +OTEL_MODULE = 'OTEL_EXPORTER_MODULE' +OTEL_FACTORY = 'OTEL_EXPORTER_FACTORY' +OTEL_OPT_PREFIX = 'OTEL_EXPORTER_OPTIONS_' + + +def get_otel_exporter_options(): + ops = {} + + for var in os.environ: + if var.startswith(OTEL_OPT_PREFIX): + opt_name = var[len(OTEL_OPT_PREFIX):] + ops[opt_name] = os.environ.get(var) + + return ops + + +def load_otel_exporter(): + exporter_module = os.environ.get(OTEL_MODULE) + if exporter_module is None: + log.error('%s is not defined.', OTEL_MODULE) + return None + + exporter_type = os.environ.get(OTEL_FACTORY) + if exporter_type is None: + log.error('%s is not defined.', OTEL_FACTORY) + return None + + try: + otel_module = importlib.import_module(exporter_module) + otel_callback = getattr(otel_module, exporter_type) + opt = get_otel_exporter_options() + return otel_callback(**opt) + except (ImportError, SyntaxError, AttributeError): + log.exception('Error creating exporter instance.') + return None + + +OTEL_TRACER_PROPAGATOR = 'OTEL_TRACER_PROPAGATOR' +OTEL_TRACER_PROPAGATOR_W3C = 'w3c' +OTEL_TRACER_PROPAGATOR_B3 = 'b3' +OTEL_TRACER_PROPAGATOR_DATADOG = 'datadog' +OTEL_TRACER_PROPAGATOR_DEFAULT = OTEL_TRACER_PROPAGATOR_W3C + +OTEL_TRACER_PROPAGATOR_MAP = { + OTEL_TRACER_PROPAGATOR_W3C: W3CHTTPPropagator, + OTEL_TRACER_PROPAGATOR_B3: B3HTTPPropagator, + OTEL_TRACER_PROPAGATOR_DATADOG: DatadogHTTPPropagator, +} + + +def get_http_propagator_factory(): + """Returns an http propagator factory based on set env variables""" + prop = os.getenv(OTEL_TRACER_PROPAGATOR, OTEL_TRACER_PROPAGATOR_DEFAULT) + return OTEL_TRACER_PROPAGATOR_MAP[prop.lower()] + + +try: + from oteltrace import tracer + patch = True + + # Respect OPENTELEMETRY_* environment variables in global tracer configuration + # TODO: these variables are deprecated; use utils method and update our documentation + # correct prefix should be OTEL_* + enabled = os.environ.get('OPENTELEMETRY_TRACE_ENABLED') + priority_sampling = os.environ.get('OPENTELEMETRY_PRIORITY_SAMPLING') + opts = {} + + if enabled and enabled.lower() == 'false': + opts['enabled'] = False + patch = False + if priority_sampling: + opts['priority_sampling'] = asbool(priority_sampling) + + opts['collect_metrics'] = asbool(get_env('runtime_metrics', 'enabled')) + + opts['api'] = api_otel_exporter.APIOtel(exporter=load_otel_exporter()) + + opts['http_propagator'] = get_http_propagator_factory() + + if opts: + tracer.configure(**opts) + + if logs_injection: + EXTRA_PATCHED_MODULES.update({'logging': True}) + + if patch: + update_patched_modules() + from oteltrace import patch_all + patch_all(**EXTRA_PATCHED_MODULES) + + if 'OPENTELEMETRY_ENV' in os.environ: + tracer.set_tags({constants.ENV_KEY: os.environ['OPENTELEMETRY_ENV']}) + + if 'OTEL_TRACE_GLOBAL_TAGS' in os.environ: + add_global_tags(tracer) + + # Ensure sitecustomize.py is properly called if available in application directories: + # * exclude `bootstrap_dir` from the search + # * find a user `sitecustomize.py` module + # * import that module via `imp` + bootstrap_dir = os.path.dirname(__file__) + path = list(sys.path) + + if bootstrap_dir in path: + path.remove(bootstrap_dir) + + try: + (f, path, description) = imp.find_module('sitecustomize', path) + except ImportError: + pass + else: + # `sitecustomize.py` found, load it + log.debug('sitecustomize from user found in: %s', path) + imp.load_module('sitecustomize', f, path, description) + + # Loading status used in tests to detect if the `sitecustomize` has been + # properly loaded without exceptions. This must be the last action in the module + # when the execution ends with a success. + loaded = True +except Exception: + loaded = False + log.warning('error configuring OpenTelemetry tracing', exc_info=True) diff --git a/ddtrace/commands/__init__.py b/oteltrace/commands/__init__.py similarity index 100% rename from ddtrace/commands/__init__.py rename to oteltrace/commands/__init__.py diff --git a/ddtrace/commands/ddtrace_run.py b/oteltrace/commands/oteltrace_run.py similarity index 55% rename from ddtrace/commands/ddtrace_run.py rename to oteltrace/commands/oteltrace_run.py index a13bdec3..01c2c8b0 100755 --- a/ddtrace/commands/ddtrace_run.py +++ b/oteltrace/commands/oteltrace_run.py @@ -4,41 +4,39 @@ import sys import logging -debug = os.environ.get('DATADOG_TRACE_DEBUG') +debug = os.environ.get('OPENTELEMETRY_TRACE_DEBUG') if debug and debug.lower() == 'true': logging.basicConfig(level=logging.DEBUG) -# Do not use `ddtrace.internal.logger.get_logger` here -# DEV: It isn't really necessary to use `DDLogger` here so we want to -# defer importing `ddtrace` until we actually need it. +# Do not use `oteltrace.internal.logger.get_logger` here +# DEV: It isn't really necessary to use `OtelLogger` here so we want to +# defer importing `oteltrace` until we actually need it. # As well, no actual rate limiting would apply here since we only # have a few logged lines log = logging.getLogger(__name__) USAGE = """ -Execute the given Python program after configuring it to emit Datadog traces. +Execute the given Python program after configuring it to emit OpenTelemetry traces. Append command line arguments to your program as usual. -Usage: [ENV_VARS] ddtrace-run +Usage: [ENV_VARS] oteltrace-run Available environment variables: - DATADOG_ENV : override an application's environment (no default) - DATADOG_TRACE_ENABLED=true|false : override the value of tracer.enabled (default: true) - DATADOG_TRACE_DEBUG=true|false : enabled debug logging (default: false) - DATADOG_PATCH_MODULES=module:patch,module:patch... e.g. boto:true,redis:false : override the modules patched for this execution of the program (default: none) - DATADOG_TRACE_AGENT_HOSTNAME=localhost: override the address of the trace agent host that the default tracer will attempt to submit to (default: localhost) - DATADOG_TRACE_AGENT_PORT=8126: override the port that the default tracer will submit to (default: 8126) - DATADOG_SERVICE_NAME : override the service name to be used for this program (no default) + OPENTELEMETRY_ENV : override an application's environment (no default) + OPENTELEMETRY_TRACE_ENABLED=true|false : override the value of tracer.enabled (default: true) + OPENTELEMETRY_TRACE_DEBUG=true|false : enabled debug logging (default: false) + OPENTELEMETRY_PATCH_MODULES=module:patch,module:patch... e.g. boto:true,redis:false : override the modules patched for this execution of the program (default: none) + OPENTELEMETRY_SERVICE_NAME : override the service name to be used for this program (no default) This value is passed through when setting up middleware for web framework integrations. (e.g. pylons, flask, django) For tracing without a web integration, prefer setting the service name in code. - DATADOG_PRIORITY_SAMPLING=true|false : (default: false): enables Priority Sampling. + OPENTELEMETRY_PRIORITY_SAMPLING=true|false : (default: false): enables Priority Sampling. """ # noqa: E501 -def _ddtrace_root(): - from ddtrace import __file__ +def _oteltrace_root(): + from oteltrace import __file__ return os.path.dirname(__file__) @@ -63,11 +61,11 @@ def main(): log.debug('sys.argv: %s', sys.argv) - root_dir = _ddtrace_root() - log.debug('ddtrace root: %s', root_dir) + root_dir = _oteltrace_root() + log.debug('oteltrace root: %s', root_dir) bootstrap_dir = os.path.join(root_dir, 'bootstrap') - log.debug('ddtrace bootstrap: %s', bootstrap_dir) + log.debug('oteltrace bootstrap: %s', bootstrap_dir) _add_bootstrap_to_pythonpath(bootstrap_dir) log.debug('PYTHONPATH: %s', os.environ['PYTHONPATH']) diff --git a/ddtrace/compat.py b/oteltrace/compat.py similarity index 99% rename from ddtrace/compat.py rename to oteltrace/compat.py index 907c41fc..914133f2 100644 --- a/ddtrace/compat.py +++ b/oteltrace/compat.py @@ -3,7 +3,7 @@ import sys import textwrap -from ddtrace.vendor import six +from oteltrace.vendor import six __all__ = [ 'httplib', diff --git a/ddtrace/constants.py b/oteltrace/constants.py similarity index 50% rename from ddtrace/constants.py rename to oteltrace/constants.py index 803e98a5..79834ef5 100644 --- a/ddtrace/constants.py +++ b/oteltrace/constants.py @@ -1,12 +1,12 @@ FILTERS_KEY = 'FILTERS' SAMPLE_RATE_METRIC_KEY = '_sample_rate' SAMPLING_PRIORITY_KEY = '_sampling_priority_v1' -ANALYTICS_SAMPLE_RATE_KEY = '_dd1.sr.eausr' -SAMPLING_AGENT_DECISION = '_dd.agent_psr' -SAMPLING_RULE_DECISION = '_dd.rule_psr' -SAMPLING_LIMIT_DECISION = '_dd.limit_psr' -ORIGIN_KEY = '_dd.origin' -HOSTNAME_KEY = '_dd.hostname' +ANALYTICS_SAMPLE_RATE_KEY = '_otel1.sr.eausr' +SAMPLING_AGENT_DECISION = '_otel.agent_psr' +SAMPLING_RULE_DECISION = '_otel.rule_psr' +SAMPLING_LIMIT_DECISION = '_otel.limit_psr' +ORIGIN_KEY = '_otel.origin' +HOSTNAME_KEY = '_otel.hostname' ENV_KEY = 'env' NUMERIC_TAGS = (ANALYTICS_SAMPLE_RATE_KEY, ) diff --git a/ddtrace/context.py b/oteltrace/context.py similarity index 98% rename from ddtrace/context.py rename to oteltrace/context.py index 7feff0c7..c949ce1b 100644 --- a/ddtrace/context.py +++ b/oteltrace/context.py @@ -28,7 +28,7 @@ class Context(object): _partial_flush_enabled = asbool(get_env('tracer', 'partial_flush_enabled', 'false')) _partial_flush_min_spans = int(get_env('tracer', 'partial_flush_min_spans', 500)) - def __init__(self, trace_id=None, span_id=None, sampling_priority=None, _dd_origin=None): + def __init__(self, trace_id=None, span_id=None, sampling_priority=None, _otel_origin=None): """ Initialize a new thread-safe ``Context``. @@ -43,7 +43,7 @@ def __init__(self, trace_id=None, span_id=None, sampling_priority=None, _dd_orig self._parent_trace_id = trace_id self._parent_span_id = span_id self._sampling_priority = sampling_priority - self._dd_origin = _dd_origin + self._otel_origin = _otel_origin @property def trace_id(self): @@ -168,7 +168,7 @@ def get(self): # attach the sampling priority to the context root span if sampled and sampling_priority is not None and trace: trace[0].set_metric(SAMPLING_PRIORITY_KEY, sampling_priority) - origin = self._dd_origin + origin = self._otel_origin # attach the origin to the root span tag if sampled and origin is not None and trace: trace[0].set_tag(ORIGIN_KEY, origin) @@ -196,7 +196,7 @@ def get(self): # attach the sampling priority to the context root span if sampled and sampling_priority is not None and trace: trace[0].set_metric(SAMPLING_PRIORITY_KEY, sampling_priority) - origin = self._dd_origin + origin = self._otel_origin # attach the origin to the root span tag if sampled and origin is not None and trace: trace[0].set_tag(ORIGIN_KEY, origin) diff --git a/ddtrace/contrib/__init__.py b/oteltrace/contrib/__init__.py similarity index 100% rename from ddtrace/contrib/__init__.py rename to oteltrace/contrib/__init__.py diff --git a/ddtrace/contrib/aiobotocore/__init__.py b/oteltrace/contrib/aiobotocore/__init__.py similarity index 96% rename from ddtrace/contrib/aiobotocore/__init__.py rename to oteltrace/contrib/aiobotocore/__init__.py index 0f5a5a19..6f050cf5 100644 --- a/ddtrace/contrib/aiobotocore/__init__.py +++ b/oteltrace/contrib/aiobotocore/__init__.py @@ -6,7 +6,7 @@ :: import aiobotocore.session - from ddtrace import patch + from oteltrace import patch # If not patched yet, you can patch botocore specifically patch(aiobotocore=True) diff --git a/ddtrace/contrib/aiobotocore/patch.py b/oteltrace/contrib/aiobotocore/patch.py similarity index 92% rename from ddtrace/contrib/aiobotocore/patch.py rename to oteltrace/contrib/aiobotocore/patch.py index 9e5905e1..58a7e0b8 100644 --- a/ddtrace/contrib/aiobotocore/patch.py +++ b/oteltrace/contrib/aiobotocore/patch.py @@ -1,6 +1,6 @@ import asyncio -from ddtrace.vendor import wrapt -from ddtrace import config +from oteltrace.vendor import wrapt +from oteltrace import config import aiobotocore.client from aiobotocore.endpoint import ClientResponseContentProxy @@ -18,17 +18,17 @@ def patch(): - if getattr(aiobotocore.client, '_datadog_patch', False): + if getattr(aiobotocore.client, '_opentelemetry_patch', False): return - setattr(aiobotocore.client, '_datadog_patch', True) + setattr(aiobotocore.client, '_opentelemetry_patch', True) wrapt.wrap_function_wrapper('aiobotocore.client', 'AioBaseClient._make_api_call', _wrapped_api_call) Pin(service='aws', app='aws', app_type='web').onto(aiobotocore.client.AioBaseClient) def unpatch(): - if getattr(aiobotocore.client, '_datadog_patch', False): - setattr(aiobotocore.client, '_datadog_patch', False) + if getattr(aiobotocore.client, '_opentelemetry_patch', False): + setattr(aiobotocore.client, '_opentelemetry_patch', False) unwrap(aiobotocore.client.AioBaseClient, '_make_api_call') diff --git a/ddtrace/contrib/aiohttp/__init__.py b/oteltrace/contrib/aiohttp/__init__.py similarity index 82% rename from ddtrace/contrib/aiohttp/__init__.py rename to oteltrace/contrib/aiohttp/__init__.py index 881634c5..d3f4768a 100644 --- a/ddtrace/contrib/aiohttp/__init__.py +++ b/oteltrace/contrib/aiohttp/__init__.py @@ -3,8 +3,8 @@ Auto instrumentation is available using the ``trace_app`` function:: from aiohttp import web - from ddtrace import tracer, patch - from ddtrace.contrib.aiohttp import trace_app + from oteltrace import tracer, patch + from oteltrace.contrib.aiohttp import trace_app # patch third-party modules like aiohttp_jinja2 patch(aiohttp=True) @@ -17,16 +17,16 @@ trace_app(app, tracer, service='async-api') web.run_app(app, port=8000) -Integration settings are attached to your application under the ``datadog_trace`` +Integration settings are attached to your application under the ``opentelemetry_trace`` namespace. You can read or update them as follows:: # disables distributed tracing for all received requests - app['datadog_trace']['distributed_tracing_enabled'] = False + app['opentelemetry_trace']['distributed_tracing_enabled'] = False Available settings are: -* ``tracer`` (default: ``ddtrace.tracer``): set the default tracer instance that is used to - trace `aiohttp` internals. By default the `ddtrace` tracer is used. +* ``tracer`` (default: ``oteltrace.tracer``): set the default tracer instance that is used to + trace `aiohttp` internals. By default the `oteltrace` tracer is used. * ``service`` (default: ``aiohttp-web``): set the service name used by the tracer. Usually this configuration must be updated with a meaningful name. * ``distributed_tracing_enabled`` (default: ``True``): enable distributed tracing during @@ -42,7 +42,7 @@ to the ``request`` object, so that it can be used in the application code:: async def home_handler(request): - ctx = request['datadog_context'] + ctx = request['opentelemetry_context'] # do something with the tracing Context """ from ...utils.importlib import require_modules diff --git a/ddtrace/contrib/aiohttp/middlewares.py b/oteltrace/contrib/aiohttp/middlewares.py similarity index 93% rename from ddtrace/contrib/aiohttp/middlewares.py rename to oteltrace/contrib/aiohttp/middlewares.py index 8bd3128b..ba2bfc23 100644 --- a/ddtrace/contrib/aiohttp/middlewares.py +++ b/oteltrace/contrib/aiohttp/middlewares.py @@ -8,10 +8,10 @@ from ...settings import config -CONFIG_KEY = 'datadog_trace' -REQUEST_CONTEXT_KEY = 'datadog_context' -REQUEST_CONFIG_KEY = '__datadog_trace_config' -REQUEST_SPAN_KEY = '__datadog_request_span' +CONFIG_KEY = 'opentelemetry_trace' +REQUEST_CONTEXT_KEY = 'opentelemetry_context' +REQUEST_CONFIG_KEY = '__opentelemetry_trace_config' +REQUEST_SPAN_KEY = '__opentelemetry_request_span' @asyncio.coroutine @@ -123,11 +123,11 @@ def trace_app(app, tracer, service='aiohttp-web'): """ # safe-guard: don't trace an application twice - if getattr(app, '__datadog_trace', False): + if getattr(app, '__opentelemetry_trace', False): return - setattr(app, '__datadog_trace', True) + setattr(app, '__opentelemetry_trace', True) - # configure datadog settings + # configure opentelemetry settings app[CONFIG_KEY] = { 'tracer': tracer, 'service': service, diff --git a/ddtrace/contrib/aiohttp/patch.py b/oteltrace/contrib/aiohttp/patch.py similarity index 71% rename from ddtrace/contrib/aiohttp/patch.py rename to oteltrace/contrib/aiohttp/patch.py index 0a5f0c15..3971436a 100644 --- a/ddtrace/contrib/aiohttp/patch.py +++ b/oteltrace/contrib/aiohttp/patch.py @@ -1,4 +1,4 @@ -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt from ...pin import Pin from ...utils.wrappers import unwrap @@ -20,9 +20,9 @@ def patch(): * aiohttp_jinja2 """ if template_module: - if getattr(aiohttp_jinja2, '__datadog_patch', False): + if getattr(aiohttp_jinja2, '__opentelemetry_patch', False): return - setattr(aiohttp_jinja2, '__datadog_patch', True) + setattr(aiohttp_jinja2, '__opentelemetry_patch', True) _w = wrapt.wrap_function_wrapper _w('aiohttp_jinja2', 'render_template', _trace_render_template) @@ -34,6 +34,6 @@ def unpatch(): Remove tracing from patched modules. """ if template_module: - if getattr(aiohttp_jinja2, '__datadog_patch', False): - setattr(aiohttp_jinja2, '__datadog_patch', False) + if getattr(aiohttp_jinja2, '__opentelemetry_patch', False): + setattr(aiohttp_jinja2, '__opentelemetry_patch', False) unwrap(aiohttp_jinja2, 'render_template') diff --git a/ddtrace/contrib/aiohttp/template.py b/oteltrace/contrib/aiohttp/template.py similarity index 97% rename from ddtrace/contrib/aiohttp/template.py rename to oteltrace/contrib/aiohttp/template.py index 8dcbef55..eec562c1 100644 --- a/ddtrace/contrib/aiohttp/template.py +++ b/oteltrace/contrib/aiohttp/template.py @@ -1,6 +1,6 @@ import aiohttp_jinja2 -from ddtrace import Pin +from oteltrace import Pin from ...ext import http diff --git a/ddtrace/contrib/aiopg/__init__.py b/oteltrace/contrib/aiopg/__init__.py similarity index 95% rename from ddtrace/contrib/aiopg/__init__.py rename to oteltrace/contrib/aiopg/__init__.py index df1580e1..ab843fda 100644 --- a/ddtrace/contrib/aiopg/__init__.py +++ b/oteltrace/contrib/aiopg/__init__.py @@ -1,7 +1,7 @@ """ Instrument aiopg to report a span for each executed Postgres queries:: - from ddtrace import Pin, patch + from oteltrace import Pin, patch import aiopg # If not patched yet, you can patch aiopg specifically diff --git a/ddtrace/contrib/aiopg/connection.py b/oteltrace/contrib/aiopg/connection.py similarity index 95% rename from ddtrace/contrib/aiopg/connection.py rename to oteltrace/contrib/aiopg/connection.py index da21fd94..88a69b9b 100644 --- a/ddtrace/contrib/aiopg/connection.py +++ b/oteltrace/contrib/aiopg/connection.py @@ -1,5 +1,5 @@ import asyncio -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt from aiopg.utils import _ContextManager @@ -17,7 +17,7 @@ def __init__(self, cursor, pin): super(AIOTracedCursor, self).__init__(cursor) pin.onto(self) name = pin.app or 'sql' - self._datadog_name = '%s.query' % name + self._opentelemetry_name = '%s.query' % name @asyncio.coroutine def _trace_method(self, method, resource, extra_tags, *args, **kwargs): @@ -27,7 +27,7 @@ def _trace_method(self, method, resource, extra_tags, *args, **kwargs): return result service = pin.service - with pin.tracer.trace(self._datadog_name, service=service, + with pin.tracer.trace(self._opentelemetry_name, service=service, resource=resource) as s: s.span_type = sql.TYPE s.set_tag(sql.QUERY, resource) diff --git a/ddtrace/contrib/aiopg/patch.py b/oteltrace/contrib/aiopg/patch.py similarity index 85% rename from ddtrace/contrib/aiopg/patch.py rename to oteltrace/contrib/aiopg/patch.py index 62160cf9..9e090790 100644 --- a/ddtrace/contrib/aiopg/patch.py +++ b/oteltrace/contrib/aiopg/patch.py @@ -3,7 +3,7 @@ import aiopg.connection import psycopg2.extensions -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt from .connection import AIOTracedConnection from ..psycopg.patch import _patch_extensions, \ @@ -15,17 +15,17 @@ def patch(): """ Patch monkey patches psycopg's connection function so that the connection's functions are traced. """ - if getattr(aiopg, '_datadog_patch', False): + if getattr(aiopg, '_opentelemetry_patch', False): return - setattr(aiopg, '_datadog_patch', True) + setattr(aiopg, '_opentelemetry_patch', True) wrapt.wrap_function_wrapper(aiopg.connection, '_connect', patched_connect) _patch_extensions(_aiopg_extensions) # do this early just in case def unpatch(): - if getattr(aiopg, '_datadog_patch', False): - setattr(aiopg, '_datadog_patch', False) + if getattr(aiopg, '_opentelemetry_patch', False): + setattr(aiopg, '_opentelemetry_patch', False) _u(aiopg.connection, '_connect') _unpatch_extensions(_aiopg_extensions) diff --git a/ddtrace/contrib/algoliasearch/__init__.py b/oteltrace/contrib/algoliasearch/__init__.py similarity index 74% rename from ddtrace/contrib/algoliasearch/__init__.py rename to oteltrace/contrib/algoliasearch/__init__.py index ff5cc604..d04dcb77 100644 --- a/ddtrace/contrib/algoliasearch/__init__.py +++ b/oteltrace/contrib/algoliasearch/__init__.py @@ -3,7 +3,7 @@ :: - from ddtrace import patch_all + from oteltrace import patch_all patch_all() from algoliasearch import algoliasearch @@ -14,9 +14,9 @@ Configuration ~~~~~~~~~~~~~ -.. py:data:: ddtrace.config.algoliasearch['collect_query_text'] +.. py:data:: oteltrace.config.algoliasearch['collect_query_text'] - Whether to pass the text of your query onto Datadog. Since this may contain sensitive data it's off by default + Whether to pass the text of your query onto OpenTelemetry. Since this may contain sensitive data it's off by default Default: ``False`` diff --git a/ddtrace/contrib/algoliasearch/patch.py b/oteltrace/contrib/algoliasearch/patch.py similarity index 88% rename from ddtrace/contrib/algoliasearch/patch.py rename to oteltrace/contrib/algoliasearch/patch.py index 633e48d1..ff41b4fa 100644 --- a/ddtrace/contrib/algoliasearch/patch.py +++ b/oteltrace/contrib/algoliasearch/patch.py @@ -1,10 +1,10 @@ -from ddtrace.ext import AppTypes -from ddtrace.pin import Pin -from ddtrace.settings import config -from ddtrace.utils.wrappers import unwrap as _u -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.ext import AppTypes +from oteltrace.pin import Pin +from oteltrace.settings import config +from oteltrace.utils.wrappers import unwrap as _u +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w -DD_PATCH_ATTR = '_datadog_patch' +OTEL_PATCH_ATTR = '_opentelemetry_patch' SERVICE_NAME = 'algoliasearch' APP_NAME = 'algoliasearch' @@ -28,10 +28,10 @@ def patch(): if algoliasearch_version == (0, 0): return - if getattr(algoliasearch, DD_PATCH_ATTR, False): + if getattr(algoliasearch, OTEL_PATCH_ATTR, False): return - setattr(algoliasearch, '_datadog_patch', True) + setattr(algoliasearch, '_opentelemetry_patch', True) pin = Pin( service=config.algoliasearch.service_name, app=APP_NAME, @@ -53,8 +53,8 @@ def unpatch(): if algoliasearch_version == (0, 0): return - if getattr(algoliasearch, DD_PATCH_ATTR, False): - setattr(algoliasearch, DD_PATCH_ATTR, False) + if getattr(algoliasearch, OTEL_PATCH_ATTR, False): + setattr(algoliasearch, OTEL_PATCH_ATTR, False) if algoliasearch_version < (2, 0) and algoliasearch_version >= (1, 0): _u(algoliasearch.index.Index, 'search') @@ -68,7 +68,7 @@ def unpatch(): # DEV: this maps serves the dual purpose of enumerating the algoliasearch.search() query_args that # will be sent along as tags, as well as converting arguments names into tag names compliant with # tag naming recommendations set out here: https://docs.datadoghq.com/tagging/ -QUERY_ARGS_DD_TAG_MAP = { +QUERY_ARGS_OTEL_TAG_MAP = { 'page': 'page', 'hitsPerPage': 'hits_per_page', 'attributesToRetrieve': 'attributes_to_retrieve', @@ -113,7 +113,7 @@ def _patched_search(func, instance, wrapt_args, wrapt_kwargs): query_args = wrapt_kwargs.get(function_query_arg_name, wrapt_args[1] if len(wrapt_args) > 1 else None) if query_args and isinstance(query_args, dict): - for query_arg, tag_name in QUERY_ARGS_DD_TAG_MAP.items(): + for query_arg, tag_name in QUERY_ARGS_OTEL_TAG_MAP.items(): value = query_args.get(query_arg) if value is not None: span.set_tag('query.args.{}'.format(tag_name), value) diff --git a/ddtrace/contrib/asyncio/__init__.py b/oteltrace/contrib/asyncio/__init__.py similarity index 91% rename from ddtrace/contrib/asyncio/__init__.py rename to oteltrace/contrib/asyncio/__init__.py index 57f7e99e..14ba6936 100644 --- a/ddtrace/contrib/asyncio/__init__.py +++ b/oteltrace/contrib/asyncio/__init__.py @@ -4,8 +4,8 @@ of ``asyncio``. To trace asynchronous execution, you must:: import asyncio - from ddtrace import tracer - from ddtrace.contrib.asyncio import context_provider + from oteltrace import tracer + from oteltrace.contrib.asyncio import context_provider # enable asyncio support tracer.configure(context_provider=context_provider) @@ -20,8 +20,8 @@ async def some_work(): loop.close() If ``contextvars`` is available, we use the -:class:`ddtrace.provider.DefaultContextProvider`, otherwise we use the legacy -:class:`ddtrace.contrib.asyncio.provider.AsyncioContextProvider`. +:class:`oteltrace.provider.DefaultContextProvider`, otherwise we use the legacy +:class:`oteltrace.contrib.asyncio.provider.AsyncioContextProvider`. In addition, helpers are provided to simplify how the tracing ``Context`` is handled between scheduled coroutines and ``Future`` invoked in separated diff --git a/ddtrace/contrib/asyncio/compat.py b/oteltrace/contrib/asyncio/compat.py similarity index 100% rename from ddtrace/contrib/asyncio/compat.py rename to oteltrace/contrib/asyncio/compat.py diff --git a/ddtrace/contrib/asyncio/helpers.py b/oteltrace/contrib/asyncio/helpers.py similarity index 97% rename from ddtrace/contrib/asyncio/helpers.py rename to oteltrace/contrib/asyncio/helpers.py index 2a3d0f40..8c36e833 100644 --- a/ddtrace/contrib/asyncio/helpers.py +++ b/oteltrace/contrib/asyncio/helpers.py @@ -4,7 +4,7 @@ Context and Spans in instrumented ``asyncio`` code. """ import asyncio -import ddtrace +import oteltrace from .provider import CONTEXT_ATTR from .wrappers import wrapped_create_task @@ -29,7 +29,7 @@ def ensure_future(coro_or_future, *, loop=None, tracer=None): task already has a Context, it will be attached to the new Task so the Trace list will be preserved. """ - tracer = tracer or ddtrace.tracer + tracer = tracer or oteltrace.tracer current_ctx = tracer.get_call_context() task = asyncio.ensure_future(coro_or_future, loop=loop) set_call_context(task, current_ctx) @@ -54,7 +54,7 @@ def run_in_executor(loop, executor, func, *args, tracer=None): the latest active Span when the new thread was created. In this new thread, we fallback to the thread-local ``Context`` storage. """ - tracer = tracer or ddtrace.tracer + tracer = tracer or oteltrace.tracer ctx = Context() current_ctx = tracer.get_call_context() ctx._current_span = current_ctx._current_span diff --git a/ddtrace/contrib/asyncio/patch.py b/oteltrace/contrib/asyncio/patch.py similarity index 70% rename from ddtrace/contrib/asyncio/patch.py rename to oteltrace/contrib/asyncio/patch.py index 4d38f0fa..014c855b 100644 --- a/ddtrace/contrib/asyncio/patch.py +++ b/oteltrace/contrib/asyncio/patch.py @@ -1,6 +1,6 @@ import asyncio -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w from ...internal.context_manager import CONTEXTVARS_IS_AVAILABLE from .wrappers import wrapped_create_task, wrapped_create_task_contextvars @@ -11,9 +11,9 @@ def patch(): """Patches current loop `create_task()` method to enable spawned tasks to parent to the base task context. """ - if getattr(asyncio, '_datadog_patch', False): + if getattr(asyncio, '_opentelemetry_patch', False): return - setattr(asyncio, '_datadog_patch', True) + setattr(asyncio, '_opentelemetry_patch', True) loop = asyncio.get_event_loop() if CONTEXTVARS_IS_AVAILABLE: @@ -25,8 +25,8 @@ def patch(): def unpatch(): """Remove tracing from patched modules.""" - if getattr(asyncio, '_datadog_patch', False): - setattr(asyncio, '_datadog_patch', False) + if getattr(asyncio, '_opentelemetry_patch', False): + setattr(asyncio, '_opentelemetry_patch', False) loop = asyncio.get_event_loop() _u(loop, 'create_task') diff --git a/ddtrace/contrib/asyncio/provider.py b/oteltrace/contrib/asyncio/provider.py similarity index 98% rename from ddtrace/contrib/asyncio/provider.py rename to oteltrace/contrib/asyncio/provider.py index 6748e2ed..d9a9eb7d 100644 --- a/ddtrace/contrib/asyncio/provider.py +++ b/oteltrace/contrib/asyncio/provider.py @@ -4,7 +4,7 @@ from ...provider import DefaultContextProvider # Task attribute used to set/get the Context instance -CONTEXT_ATTR = '__datadog_context' +CONTEXT_ATTR = '__opentelemetry_context' class AsyncioContextProvider(DefaultContextProvider): diff --git a/ddtrace/contrib/asyncio/wrappers.py b/oteltrace/contrib/asyncio/wrappers.py similarity index 90% rename from ddtrace/contrib/asyncio/wrappers.py rename to oteltrace/contrib/asyncio/wrappers.py index 00d7d8db..67c849b9 100644 --- a/ddtrace/contrib/asyncio/wrappers.py +++ b/oteltrace/contrib/asyncio/wrappers.py @@ -1,4 +1,4 @@ -import ddtrace +import oteltrace from .compat import asyncio_current_task from .provider import CONTEXT_ATTR @@ -39,7 +39,7 @@ def wrapped_create_task_contextvars(wrapped, instance, args, kwargs): ``Context`` to the new ``Task``. This function is useful to connect traces of detached executions. Uses contextvars for task-local storage. """ - current_task_ctx = ddtrace.tracer.get_call_context() + current_task_ctx = oteltrace.tracer.get_call_context() if not current_task_ctx: # no current context exists so nothing special to be done in handling @@ -49,10 +49,10 @@ def wrapped_create_task_contextvars(wrapped, instance, args, kwargs): # clone and activate current task's context for new task to support # detached executions new_task_ctx = current_task_ctx.clone() - ddtrace.tracer.context_provider.activate(new_task_ctx) + oteltrace.tracer.context_provider.activate(new_task_ctx) try: # activated context will now be copied to new task return wrapped(*args, **kwargs) finally: # reactivate current task context - ddtrace.tracer.context_provider.activate(current_task_ctx) + oteltrace.tracer.context_provider.activate(current_task_ctx) diff --git a/ddtrace/contrib/boto/__init__.py b/oteltrace/contrib/boto/__init__.py similarity index 95% rename from ddtrace/contrib/boto/__init__.py rename to oteltrace/contrib/boto/__init__.py index f5b2b1fd..73c57cd3 100644 --- a/ddtrace/contrib/boto/__init__.py +++ b/oteltrace/contrib/boto/__init__.py @@ -3,7 +3,7 @@ This integration is automatically patched when using ``patch_all()``:: import boto.ec2 - from ddtrace import patch + from oteltrace import patch # If not patched yet, you can patch boto specifically patch(boto=True) diff --git a/ddtrace/contrib/boto/patch.py b/oteltrace/contrib/boto/patch.py similarity index 91% rename from ddtrace/contrib/boto/patch.py rename to oteltrace/contrib/boto/patch.py index 62911238..ad03b649 100644 --- a/ddtrace/contrib/boto/patch.py +++ b/oteltrace/contrib/boto/patch.py @@ -1,8 +1,8 @@ import boto.connection -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt import inspect -from ddtrace import config +from oteltrace import config from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...pin import Pin from ...ext import http, aws @@ -33,9 +33,9 @@ def patch(): different services for connection. For exemple EC2 uses AWSQueryConnection and S3 uses AWSAuthConnection """ - if getattr(boto.connection, '_datadog_patch', False): + if getattr(boto.connection, '_opentelemetry_patch', False): return - setattr(boto.connection, '_datadog_patch', True) + setattr(boto.connection, '_opentelemetry_patch', True) wrapt.wrap_function_wrapper( 'boto.connection', 'AWSQueryConnection.make_request', patched_query_request @@ -52,8 +52,8 @@ def patch(): def unpatch(): - if getattr(boto.connection, '_datadog_patch', False): - setattr(boto.connection, '_datadog_patch', False) + if getattr(boto.connection, '_opentelemetry_patch', False): + setattr(boto.connection, '_opentelemetry_patch', False) unwrap(boto.connection.AWSQueryConnection, 'make_request') unwrap(boto.connection.AWSAuthConnection, 'make_request') @@ -114,10 +114,10 @@ def patched_auth_request(original_func, instance, args, kwargs): # Catching the name of the operation that called make_request() operation_name = None - # Go up the stack until we get the first non-ddtrace module + # Go up the stack until we get the first non-oteltrace module # DEV: For `lambda.list_functions()` this should be: - # - ddtrace.contrib.boto.patch - # - ddtrace.vendor.wrapt.wrappers + # - oteltrace.contrib.boto.patch + # - oteltrace.vendor.wrapt.wrappers # - boto.awslambda.layer1 (make_request) # - boto.awslambda.layer1 (list_functions) # But can vary depending on Python versions; that's why we use an heuristic diff --git a/ddtrace/contrib/botocore/__init__.py b/oteltrace/contrib/botocore/__init__.py similarity index 96% rename from ddtrace/contrib/botocore/__init__.py rename to oteltrace/contrib/botocore/__init__.py index adba2c01..c873970b 100644 --- a/ddtrace/contrib/botocore/__init__.py +++ b/oteltrace/contrib/botocore/__init__.py @@ -5,7 +5,7 @@ This integration is automatically patched when using ``patch_all()``:: import botocore.session - from ddtrace import patch + from oteltrace import patch # If not patched yet, you can patch botocore specifically patch(botocore=True) diff --git a/ddtrace/contrib/botocore/patch.py b/oteltrace/contrib/botocore/patch.py similarity index 87% rename from ddtrace/contrib/botocore/patch.py rename to oteltrace/contrib/botocore/patch.py index 066ba021..ae70f2dc 100644 --- a/ddtrace/contrib/botocore/patch.py +++ b/oteltrace/contrib/botocore/patch.py @@ -2,8 +2,8 @@ Trace queries to aws api done via botocore client """ # 3p -from ddtrace.vendor import wrapt -from ddtrace import config +from oteltrace.vendor import wrapt +from oteltrace import config import botocore.client # project @@ -23,17 +23,17 @@ def patch(): - if getattr(botocore.client, '_datadog_patch', False): + if getattr(botocore.client, '_opentelemetry_patch', False): return - setattr(botocore.client, '_datadog_patch', True) + setattr(botocore.client, '_opentelemetry_patch', True) wrapt.wrap_function_wrapper('botocore.client', 'BaseClient._make_api_call', patched_api_call) Pin(service='aws', app='aws', app_type='web').onto(botocore.client.BaseClient) def unpatch(): - if getattr(botocore.client, '_datadog_patch', False): - setattr(botocore.client, '_datadog_patch', False) + if getattr(botocore.client, '_opentelemetry_patch', False): + setattr(botocore.client, '_opentelemetry_patch', False) unwrap(botocore.client.BaseClient, '_make_api_call') diff --git a/ddtrace/contrib/bottle/__init__.py b/oteltrace/contrib/bottle/__init__.py similarity index 85% rename from ddtrace/contrib/bottle/__init__.py rename to oteltrace/contrib/bottle/__init__.py index bcf3a571..4982a565 100644 --- a/ddtrace/contrib/bottle/__init__.py +++ b/oteltrace/contrib/bottle/__init__.py @@ -3,8 +3,8 @@ plugin to your app:: import bottle - from ddtrace import tracer - from ddtrace.contrib.bottle import TracePlugin + from oteltrace import tracer + from oteltrace.contrib.bottle import TracePlugin app = bottle.Bottle() plugin = TracePlugin(service="my-web-app") diff --git a/ddtrace/contrib/bottle/patch.py b/oteltrace/contrib/bottle/patch.py similarity index 63% rename from ddtrace/contrib/bottle/patch.py rename to oteltrace/contrib/bottle/patch.py index 802b4e07..ae617642 100644 --- a/ddtrace/contrib/bottle/patch.py +++ b/oteltrace/contrib/bottle/patch.py @@ -4,23 +4,23 @@ import bottle -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt def patch(): """Patch the bottle.Bottle class """ - if getattr(bottle, '_datadog_patch', False): + if getattr(bottle, '_opentelemetry_patch', False): return - setattr(bottle, '_datadog_patch', True) + setattr(bottle, '_opentelemetry_patch', True) wrapt.wrap_function_wrapper('bottle', 'Bottle.__init__', traced_init) def traced_init(wrapped, instance, args, kwargs): wrapped(*args, **kwargs) - service = os.environ.get('DATADOG_SERVICE_NAME') or 'bottle' + service = os.environ.get('OPENTELEMETRY_SERVICE_NAME') or 'bottle' plugin = TracePlugin(service=service) instance.install(plugin) diff --git a/ddtrace/contrib/bottle/trace.py b/oteltrace/contrib/bottle/trace.py similarity index 97% rename from ddtrace/contrib/bottle/trace.py rename to oteltrace/contrib/bottle/trace.py index 544dacc9..054649fb 100644 --- a/ddtrace/contrib/bottle/trace.py +++ b/oteltrace/contrib/bottle/trace.py @@ -2,7 +2,7 @@ from bottle import response, request, HTTPError # stdlib -import ddtrace +import oteltrace # project from ...constants import ANALYTICS_SAMPLE_RATE_KEY @@ -19,7 +19,7 @@ class TracePlugin(object): def __init__(self, service='bottle', tracer=None, distributed_tracing=True): self.service = service - self.tracer = tracer or ddtrace.tracer + self.tracer = tracer or oteltrace.tracer self.distributed_tracing = distributed_tracing def apply(self, callback, route): diff --git a/ddtrace/contrib/cassandra/__init__.py b/oteltrace/contrib/cassandra/__init__.py similarity index 96% rename from ddtrace/contrib/cassandra/__init__.py rename to oteltrace/contrib/cassandra/__init__.py index bc2bf638..c774d561 100644 --- a/ddtrace/contrib/cassandra/__init__.py +++ b/oteltrace/contrib/cassandra/__init__.py @@ -3,7 +3,7 @@ ``patch_all`` will automatically patch your Cluster instance to make it work. :: - from ddtrace import Pin, patch + from oteltrace import Pin, patch from cassandra.cluster import Cluster # If not patched yet, you can patch cassandra specifically diff --git a/ddtrace/contrib/cassandra/patch.py b/oteltrace/contrib/cassandra/patch.py similarity index 100% rename from ddtrace/contrib/cassandra/patch.py rename to oteltrace/contrib/cassandra/patch.py diff --git a/ddtrace/contrib/cassandra/session.py b/oteltrace/contrib/cassandra/session.py similarity index 99% rename from ddtrace/contrib/cassandra/session.py rename to oteltrace/contrib/cassandra/session.py index e6a788be..26d04e6f 100644 --- a/ddtrace/contrib/cassandra/session.py +++ b/oteltrace/contrib/cassandra/session.py @@ -21,8 +21,8 @@ RESOURCE_MAX_LENGTH = 5000 SERVICE = 'cassandra' -CURRENT_SPAN = '_ddtrace_current_span' -PAGE_NUMBER = '_ddtrace_page_number' +CURRENT_SPAN = '_oteltrace_current_span' +PAGE_NUMBER = '_oteltrace_page_number' # Original connect connect function _connect = cassandra.cluster.Cluster.connect diff --git a/ddtrace/contrib/celery/__init__.py b/oteltrace/contrib/celery/__init__.py similarity index 95% rename from ddtrace/contrib/celery/__init__.py rename to oteltrace/contrib/celery/__init__.py index 1acd7fb7..13cb6067 100644 --- a/ddtrace/contrib/celery/__init__.py +++ b/oteltrace/contrib/celery/__init__.py @@ -6,7 +6,7 @@ will produce tracing data. To trace your Celery application, call the patch method:: import celery - from ddtrace import patch + from oteltrace import patch patch(celery=True) app = celery.Celery() @@ -22,7 +22,7 @@ def run(self): To change Celery service name, you can use the ``Config`` API as follows:: - from ddtrace import config + from oteltrace import config # change service names for producers and workers config.celery['producer_service_name'] = 'task-queue' diff --git a/ddtrace/contrib/celery/app.py b/oteltrace/contrib/celery/app.py similarity index 80% rename from ddtrace/contrib/celery/app.py rename to oteltrace/contrib/celery/app.py index abe1b20a..39782e30 100644 --- a/ddtrace/contrib/celery/app.py +++ b/oteltrace/contrib/celery/app.py @@ -1,8 +1,8 @@ from celery import signals -from ddtrace import Pin, config -from ddtrace.pin import _DD_PIN_NAME -from ddtrace.ext import AppTypes +from oteltrace import Pin, config +from oteltrace.pin import _OTEL_PIN_NAME +from oteltrace.ext import AppTypes from .constants import APP from .signals import ( @@ -19,9 +19,9 @@ def patch_app(app, pin=None): """Attach the Pin class to the application and connect our handlers to Celery signals. """ - if getattr(app, '__datadog_patch', False): + if getattr(app, '__opentelemetry_patch', False): return - setattr(app, '__datadog_patch', True) + setattr(app, '__opentelemetry_patch', True) # attach the PIN object pin = pin or Pin( @@ -45,13 +45,13 @@ def unpatch_app(app): """Remove the Pin instance from the application and disconnect our handlers from Celery signal framework. """ - if not getattr(app, '__datadog_patch', False): + if not getattr(app, '__opentelemetry_patch', False): return - setattr(app, '__datadog_patch', False) + setattr(app, '__opentelemetry_patch', False) pin = Pin.get_from(app) if pin is not None: - delattr(app, _DD_PIN_NAME) + delattr(app, _OTEL_PIN_NAME) signals.task_prerun.disconnect(trace_prerun) signals.task_postrun.disconnect(trace_postrun) diff --git a/ddtrace/contrib/celery/constants.py b/oteltrace/contrib/celery/constants.py similarity index 71% rename from ddtrace/contrib/celery/constants.py rename to oteltrace/contrib/celery/constants.py index 407c2125..6d219c92 100644 --- a/ddtrace/contrib/celery/constants.py +++ b/oteltrace/contrib/celery/constants.py @@ -1,7 +1,7 @@ from os import getenv # Celery Context key -CTX_KEY = '__dd_task_span' +CTX_KEY = '__otel_task_span' # Span names PRODUCER_ROOT_SPAN = 'celery.apply' @@ -18,5 +18,5 @@ APP = 'celery' # `getenv()` call must be kept for backward compatibility; we may remove it # later when we do a full migration to the `Config` class -PRODUCER_SERVICE = getenv('DATADOG_SERVICE_NAME') or 'celery-producer' -WORKER_SERVICE = getenv('DATADOG_SERVICE_NAME') or 'celery-worker' +PRODUCER_SERVICE = getenv('OPENTELEMETRY_SERVICE_NAME') or 'celery-producer' +WORKER_SERVICE = getenv('OPENTELEMETRY_SERVICE_NAME') or 'celery-worker' diff --git a/ddtrace/contrib/celery/patch.py b/oteltrace/contrib/celery/patch.py similarity index 96% rename from ddtrace/contrib/celery/patch.py rename to oteltrace/contrib/celery/patch.py index b6e97938..11769b18 100644 --- a/ddtrace/contrib/celery/patch.py +++ b/oteltrace/contrib/celery/patch.py @@ -1,6 +1,6 @@ import celery -from ddtrace import config +from oteltrace import config from .app import patch_app, unpatch_app from .constants import PRODUCER_SERVICE, WORKER_SERVICE diff --git a/ddtrace/contrib/celery/signals.py b/oteltrace/contrib/celery/signals.py similarity index 99% rename from ddtrace/contrib/celery/signals.py rename to oteltrace/contrib/celery/signals.py index 06fdac6d..66940350 100644 --- a/ddtrace/contrib/celery/signals.py +++ b/oteltrace/contrib/celery/signals.py @@ -1,4 +1,4 @@ -from ddtrace import Pin, config +from oteltrace import Pin, config from celery import registry diff --git a/ddtrace/contrib/celery/task.py b/oteltrace/contrib/celery/task.py similarity index 72% rename from ddtrace/contrib/celery/task.py rename to oteltrace/contrib/celery/task.py index be6c1dd1..b0e157c5 100644 --- a/ddtrace/contrib/celery/task.py +++ b/oteltrace/contrib/celery/task.py @@ -5,12 +5,12 @@ def patch_task(task, pin=None): """Deprecated API. The new API uses signals that can be activated via - patch(celery=True) or through `ddtrace-run` script. Using this API + patch(celery=True) or through `oteltrace-run` script. Using this API enables instrumentation on all tasks. """ deprecation( - name='ddtrace.contrib.celery.patch_task', - message='Use `patch(celery=True)` or `ddtrace-run` script instead', + name='oteltrace.contrib.celery.patch_task', + message='Use `patch(celery=True)` or `oteltrace-run` script instead', version='1.0.0', ) @@ -25,7 +25,7 @@ def unpatch_task(task): affect instrumented tasks. """ deprecation( - name='ddtrace.contrib.celery.patch_task', + name='oteltrace.contrib.celery.patch_task', message='Use `unpatch()` instead', version='1.0.0', ) diff --git a/ddtrace/contrib/celery/utils.py b/oteltrace/contrib/celery/utils.py similarity index 97% rename from ddtrace/contrib/celery/utils.py rename to oteltrace/contrib/celery/utils.py index 71fda21d..38fc8dbf 100644 --- a/ddtrace/contrib/celery/utils.py +++ b/oteltrace/contrib/celery/utils.py @@ -41,7 +41,7 @@ def tags_from_context(context): def attach_span(task, task_id, span, is_publish=False): """Helper to propagate a `Span` for the given `Task` instance. This - function uses a `WeakValueDictionary` that stores a Datadog Span using + function uses a `WeakValueDictionary` that stores a OpenTelemetry Span using the `(task_id, is_publish)` as a key. This is useful when information must be propagated from one Celery signal to another. diff --git a/ddtrace/contrib/consul/__init__.py b/oteltrace/contrib/consul/__init__.py similarity index 95% rename from ddtrace/contrib/consul/__init__.py rename to oteltrace/contrib/consul/__init__.py index a90eaf38..421e0b76 100644 --- a/ddtrace/contrib/consul/__init__.py +++ b/oteltrace/contrib/consul/__init__.py @@ -5,7 +5,7 @@ ``patch_all`` will automatically patch your Consul client to make it work. :: - from ddtrace import Pin, patch + from oteltrace import Pin, patch import consul # If not patched yet, you can patch consul specifically diff --git a/ddtrace/contrib/consul/patch.py b/oteltrace/contrib/consul/patch.py similarity index 81% rename from ddtrace/contrib/consul/patch.py rename to oteltrace/contrib/consul/patch.py index 646357c3..5950864a 100644 --- a/ddtrace/contrib/consul/patch.py +++ b/oteltrace/contrib/consul/patch.py @@ -1,8 +1,8 @@ import consul -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w -from ddtrace import config +from oteltrace import config from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...ext import consul as consulx from ...pin import Pin @@ -13,9 +13,9 @@ def patch(): - if getattr(consul, '__datadog_patch', False): + if getattr(consul, '__opentelemetry_patch', False): return - setattr(consul, '__datadog_patch', True) + setattr(consul, '__opentelemetry_patch', True) pin = Pin(service=consulx.SERVICE, app=consulx.APP, app_type=consulx.APP_TYPE) pin.onto(consul.Consul.KV) @@ -25,9 +25,9 @@ def patch(): def unpatch(): - if not getattr(consul, '__datadog_patch', False): + if not getattr(consul, '__opentelemetry_patch', False): return - setattr(consul, '__datadog_patch', False) + setattr(consul, '__opentelemetry_patch', False) for f_name in _KV_FUNCS: _u(consul.Consul.KV, f_name) diff --git a/ddtrace/contrib/dbapi/__init__.py b/oteltrace/contrib/dbapi/__init__.py similarity index 89% rename from ddtrace/contrib/dbapi/__init__.py rename to oteltrace/contrib/dbapi/__init__.py index 3d3d2059..bbc74e7a 100644 --- a/ddtrace/contrib/dbapi/__init__.py +++ b/oteltrace/contrib/dbapi/__init__.py @@ -25,7 +25,7 @@ def __init__(self, cursor, pin): super(TracedCursor, self).__init__(cursor) pin.onto(self) name = pin.app or 'sql' - self._self_datadog_name = '{}.query'.format(name) + self._self_opentelemetry_name = '{}.query'.format(name) self._self_last_execute_operation = None def _trace_method(self, method, name, resource, extra_tags, *args, **kwargs): @@ -46,7 +46,7 @@ def _trace_method(self, method, name, resource, extra_tags, *args, **kwargs): with pin.tracer.trace(name, service=service, resource=resource) as s: s.span_type = sql.TYPE # No reason to tag the query since it is set as the resource by the agent. See: - # https://github.com/DataDog/datadog-trace-agent/blob/bda1ebbf170dd8c5879be993bdd4dbae70d10fda/obfuscate/sql.go#L232 + # https://github.com/opentelemetry/datadog-trace-agent/blob/bda1ebbf170dd8c5879be993bdd4dbae70d10fda/obfuscate/sql.go#L232 s.set_tags(pin.tags) s.set_tags(extra_tags) @@ -78,7 +78,7 @@ def executemany(self, query, *args, **kwargs): # FIXME[matt] properly handle kwargs here. arg names can be different # with different libs. return self._trace_method( - self.__wrapped__.executemany, self._self_datadog_name, query, {'sql.executemany': 'true'}, + self.__wrapped__.executemany, self._self_opentelemetry_name, query, {'sql.executemany': 'true'}, query, *args, **kwargs) def execute(self, query, *args, **kwargs): @@ -88,12 +88,13 @@ def execute(self, query, *args, **kwargs): # Always return the result as-is # DEV: Some libraries return `None`, others `int`, and others the cursor objects # These differences should be overriden at the integration specific layer (e.g. in `sqlite3/patch.py`) - return self._trace_method(self.__wrapped__.execute, self._self_datadog_name, query, {}, query, *args, **kwargs) + return self._trace_method(self.__wrapped__.execute, self._self_opentelemetry_name, query, {}, query, *args, + **kwargs) def callproc(self, proc, args): """ Wraps the cursor.callproc method""" self._self_last_execute_operation = proc - return self._trace_method(self.__wrapped__.callproc, self._self_datadog_name, proc, {}, proc, args) + return self._trace_method(self.__wrapped__.callproc, self._self_opentelemetry_name, proc, {}, proc, args) def __enter__(self): # previous versions of the dbapi didn't support context managers. let's @@ -113,19 +114,19 @@ class FetchTracedCursor(TracedCursor): """ def fetchone(self, *args, **kwargs): """ Wraps the cursor.fetchone method""" - span_name = '{}.{}'.format(self._self_datadog_name, 'fetchone') + span_name = '{}.{}'.format(self._self_opentelemetry_name, 'fetchone') return self._trace_method(self.__wrapped__.fetchone, span_name, self._self_last_execute_operation, {}, *args, **kwargs) def fetchall(self, *args, **kwargs): """ Wraps the cursor.fetchall method""" - span_name = '{}.{}'.format(self._self_datadog_name, 'fetchall') + span_name = '{}.{}'.format(self._self_opentelemetry_name, 'fetchall') return self._trace_method(self.__wrapped__.fetchall, span_name, self._self_last_execute_operation, {}, *args, **kwargs) def fetchmany(self, *args, **kwargs): """ Wraps the cursor.fetchmany method""" - span_name = '{}.{}'.format(self._self_datadog_name, 'fetchmany') + span_name = '{}.{}'.format(self._self_opentelemetry_name, 'fetchmany') # We want to trace the information about how many rows were requested. Note that this number may be larger # the number of rows actually returned if less then requested are available from the query. size_tag_key = 'db.fetch.size' @@ -154,7 +155,7 @@ def __init__(self, conn, pin=None, cursor_cls=None): super(TracedConnection, self).__init__(conn) name = _get_vendor(conn) - self._self_datadog_name = '{}.connection'.format(name) + self._self_opentelemetry_name = '{}.connection'.format(name) db_pin = pin or Pin(service=name, app=name, app_type=AppTypes.db) db_pin.onto(self) # wrapt requires prefix of `_self` for attributes that are only in the @@ -181,11 +182,11 @@ def cursor(self, *args, **kwargs): return self._self_cursor_cls(cursor, pin) def commit(self, *args, **kwargs): - span_name = '{}.{}'.format(self._self_datadog_name, 'commit') + span_name = '{}.{}'.format(self._self_opentelemetry_name, 'commit') return self._trace_method(self.__wrapped__.commit, span_name, {}, *args, **kwargs) def rollback(self, *args, **kwargs): - span_name = '{}.{}'.format(self._self_datadog_name, 'rollback') + span_name = '{}.{}'.format(self._self_opentelemetry_name, 'rollback') return self._trace_method(self.__wrapped__.rollback, span_name, {}, *args, **kwargs) diff --git a/ddtrace/contrib/django/__init__.py b/oteltrace/contrib/django/__init__.py similarity index 89% rename from ddtrace/contrib/django/__init__.py rename to oteltrace/contrib/django/__init__.py index f3405dd4..3656d841 100644 --- a/ddtrace/contrib/django/__init__.py +++ b/oteltrace/contrib/django/__init__.py @@ -4,7 +4,7 @@ **Note:** by default the tracer is **disabled** (will not send spans) when the Django setting ``DEBUG`` is ``True``. This can be overridden by explicitly enabling -the tracer with ``DATADOG_TRACE['ENABLED'] = True``, as described below. +the tracer with ``OPENTELEMETRY_TRACE['ENABLED'] = True``, as described below. To enable the Django integration, add the application to your installed apps, as follows:: @@ -13,20 +13,20 @@ # your Django apps... # the order is not important - 'ddtrace.contrib.django', + 'oteltrace.contrib.django', ] -The configuration for this integration is namespaced under the ``DATADOG_TRACE`` +The configuration for this integration is namespaced under the ``OPENTELEMETRY_TRACE`` Django setting. For example, your ``settings.py`` may contain:: - DATADOG_TRACE = { + OPENTELEMETRY_TRACE = { 'DEFAULT_SERVICE': 'my-django-app', 'TAGS': {'env': 'production'}, } -If you need to access to Datadog settings, you can:: +If you need to access to OpenTelemetry settings, you can:: - from ddtrace.contrib.django.conf import settings + from oteltrace.contrib.django.conf import settings tracer = settings.TRACER tracer.trace("something") @@ -37,7 +37,7 @@ LOGGING = { 'loggers': { - 'ddtrace': { + 'oteltrace': { 'handlers': ['console'], 'level': 'WARNING', }, @@ -55,8 +55,8 @@ by the tracer. Change this name if you want to see django cache spans as a cache application. * ``TAGS`` (default: ``{}``): set global tags that should be applied to all spans. -* ``TRACER`` (default: ``ddtrace.tracer``): set the default tracer - instance that is used to trace Django internals. By default the ``ddtrace`` +* ``TRACER`` (default: ``oteltrace.tracer``): set the default tracer + instance that is used to trace Django internals. By default the ``oteltrace`` tracer is used. * ``ENABLED`` (default: ``not django_settings.DEBUG``): defines if the tracer is enabled or not. If set to false, the code is still instrumented but no spans @@ -98,4 +98,4 @@ # define the Django app configuration -default_app_config = 'ddtrace.contrib.django.apps.TracerConfig' +default_app_config = 'oteltrace.contrib.django.apps.TracerConfig' diff --git a/ddtrace/contrib/django/apps.py b/oteltrace/contrib/django/apps.py similarity index 87% rename from ddtrace/contrib/django/apps.py rename to oteltrace/contrib/django/apps.py index 7cec2eed..c7c8ea2c 100644 --- a/ddtrace/contrib/django/apps.py +++ b/oteltrace/contrib/django/apps.py @@ -6,8 +6,8 @@ class TracerConfig(AppConfig): - name = 'ddtrace.contrib.django' - label = 'datadog_django' + name = 'oteltrace.contrib.django' + label = 'opentelemetry_django' def ready(self): """ diff --git a/ddtrace/contrib/django/cache.py b/oteltrace/contrib/django/cache.py similarity index 88% rename from ddtrace/contrib/django/cache.py rename to oteltrace/contrib/django/cache.py index 5bb35677..f5fce939 100644 --- a/ddtrace/contrib/django/cache.py +++ b/oteltrace/contrib/django/cache.py @@ -10,7 +10,7 @@ log = get_logger(__name__) # code instrumentation -DATADOG_NAMESPACE = '__datadog_original_{method}' +OPENTELEMETRY_NAMESPACE = '__opentelemetry_original_{method}' TRACED_METHODS = [ 'get', 'set', @@ -52,7 +52,7 @@ def _trace_operation(fn, method_name): @wraps(fn) def wrapped(self, *args, **kwargs): # get the original function method - method = getattr(self, DATADOG_NAMESPACE.format(method=method_name)) + method = getattr(self, OPENTELEMETRY_NAMESPACE.format(method=method_name)) with tracer.trace('django.cache', span_type=TYPE, service=cache_service_name) as span: # update the resource name and tag the cache backend span.resource = _resource_from_cache_prefix(method_name, self) @@ -77,11 +77,11 @@ def _wrap_method(cls, method_name): return # prevent patching each backend's method more than once - if hasattr(cls, DATADOG_NAMESPACE.format(method=method_name)): + if hasattr(cls, OPENTELEMETRY_NAMESPACE.format(method=method_name)): log.debug('{} already traced'.format(method_name)) else: method = getattr(cls, method_name) - setattr(cls, DATADOG_NAMESPACE.format(method=method_name), method) + setattr(cls, OPENTELEMETRY_NAMESPACE.format(method=method_name), method) setattr(cls, method_name, _trace_operation(method, method_name)) # trace all backends @@ -93,12 +93,12 @@ def _wrap_method(cls, method_name): def unpatch_method(cls, method_name): - method = getattr(cls, DATADOG_NAMESPACE.format(method=method_name), None) + method = getattr(cls, OPENTELEMETRY_NAMESPACE.format(method=method_name), None) if method is None: log.debug('nothing to do, the class is not patched') return setattr(cls, method_name, method) - delattr(cls, DATADOG_NAMESPACE.format(method=method_name)) + delattr(cls, OPENTELEMETRY_NAMESPACE.format(method=method_name)) def unpatch_cache(): diff --git a/ddtrace/contrib/django/compat.py b/oteltrace/contrib/django/compat.py similarity index 100% rename from ddtrace/contrib/django/compat.py rename to oteltrace/contrib/django/compat.py diff --git a/ddtrace/contrib/django/conf.py b/oteltrace/contrib/django/conf.py similarity index 77% rename from ddtrace/contrib/django/conf.py rename to oteltrace/contrib/django/conf.py index 33d69ea8..e4ba100c 100644 --- a/ddtrace/contrib/django/conf.py +++ b/oteltrace/contrib/django/conf.py @@ -1,13 +1,13 @@ """ -Settings for Datadog tracer are all namespaced in the DATADOG_TRACE setting. +Settings for OpenTelemetry tracer are all namespaced in the OPENTELEMETRY_TRACE setting. For example your project's `settings.py` file might look like this: -DATADOG_TRACE = { +OPENTELEMETRY_TRACE = { 'TRACER': 'myapp.tracer', } This module provides the `setting` object, that is used to access -Datadog settings, checking for user settings first, then falling +OpenTelemetry settings, checking for user settings first, then falling back to the defaults. """ from __future__ import unicode_literals @@ -39,7 +39,7 @@ 'ANALYTICS_SAMPLE_RATE': True, 'TRACE_QUERY_STRING': None, 'TAGS': {}, - 'TRACER': 'ddtrace.tracer', + 'TRACER': 'oteltrace.tracer', } # List of settings that may be in string import notation. @@ -72,12 +72,12 @@ def import_from_string(val, setting_name): raise ImportError(msg) -class DatadogSettings(object): +class OpenTelemetrySettings(object): """ - A settings object, that allows Datadog settings to be accessed as properties. + A settings object, that allows OpenTelemetry settings to be accessed as properties. For example: - from ddtrace.contrib.django.conf import settings + from oteltrace.contrib.django.conf import settings tracer = settings.TRACER @@ -89,23 +89,23 @@ def __init__(self, user_settings=None, defaults=None, import_strings=None): self._user_settings = self.__check_user_settings(user_settings) self.defaults = defaults or DEFAULTS - if os.environ.get('DATADOG_ENV'): - self.defaults['TAGS'].update({'env': os.environ.get('DATADOG_ENV')}) - if os.environ.get('DATADOG_SERVICE_NAME'): - self.defaults['DEFAULT_SERVICE'] = os.environ.get('DATADOG_SERVICE_NAME') + if os.environ.get('OPENTELEMETRY_ENV'): + self.defaults['TAGS'].update({'env': os.environ.get('OPENTELEMETRY_ENV')}) + if os.environ.get('OPENTELEMETRY_SERVICE_NAME'): + self.defaults['DEFAULT_SERVICE'] = os.environ.get('OPENTELEMETRY_SERVICE_NAME') - host = os.environ.get('DD_AGENT_HOST', os.environ.get('DATADOG_TRACE_AGENT_HOSTNAME')) + host = os.environ.get('OTEL_AGENT_HOST', os.environ.get('OPENTELEMETRY_TRACE_AGENT_HOSTNAME')) if host: self.defaults['AGENT_HOSTNAME'] = host - port = os.environ.get('DD_TRACE_AGENT_PORT', os.environ.get('DATADOG_TRACE_AGENT_PORT')) + port = os.environ.get('OTEL_TRACE_AGENT_PORT', os.environ.get('OPENTELEMETRY_TRACE_AGENT_PORT')) if port: # if the agent port is a string, the underlying library that creates the socket # stops working try: port = int(port) except ValueError: - log.warning('DD_TRACE_AGENT_PORT is not an integer value; default to 8126') + log.warning('OTEL_TRACE_AGENT_PORT is not an integer value; default to 8126') else: self.defaults['AGENT_PORT'] = port @@ -114,7 +114,7 @@ def __init__(self, user_settings=None, defaults=None, import_strings=None): @property def user_settings(self): if not hasattr(self, '_user_settings'): - self._user_settings = getattr(django_settings, 'DATADOG_TRACE', {}) + self._user_settings = getattr(django_settings, 'OPENTELEMETRY_TRACE', {}) # TODO[manu]: prevents docs import errors; provide a better implementation if 'ENABLED' not in self._user_settings: @@ -141,7 +141,7 @@ def __getattr__(self, attr): return val def __check_user_settings(self, user_settings): - SETTINGS_DOC = 'http://pypi.datadoghq.com/trace/docs/#module-ddtrace.contrib.django' + SETTINGS_DOC = 'http://pypi.datadoghq.com/trace/docs/#module-oteltrace.contrib.django' for setting in REMOVED_SETTINGS: if setting in user_settings: raise RuntimeError( @@ -150,7 +150,7 @@ def __check_user_settings(self, user_settings): return user_settings -settings = DatadogSettings(None, DEFAULTS, IMPORT_STRINGS) +settings = OpenTelemetrySettings(None, DEFAULTS, IMPORT_STRINGS) def reload_settings(*args, **kwargs): @@ -159,5 +159,5 @@ def reload_settings(*args, **kwargs): """ global settings setting, value = kwargs['setting'], kwargs['value'] - if setting == 'DATADOG_TRACE': - settings = DatadogSettings(value, DEFAULTS, IMPORT_STRINGS) + if setting == 'OPENTELEMETRY_TRACE': + settings = OpenTelemetrySettings(value, DEFAULTS, IMPORT_STRINGS) diff --git a/ddtrace/contrib/django/db.py b/oteltrace/contrib/django/db.py similarity index 91% rename from ddtrace/contrib/django/db.py rename to oteltrace/contrib/django/db.py index f5b6c804..cead47e8 100644 --- a/ddtrace/contrib/django/db.py +++ b/oteltrace/contrib/django/db.py @@ -10,8 +10,8 @@ log = get_logger(__name__) -CURSOR_ATTR = '_datadog_original_cursor' -ALL_CONNS_ATTR = '_datadog_original_connections_all' +CURSOR_ATTR = '_opentelemetry_original_cursor' +ALL_CONNS_ATTR = '_opentelemetry_original_connections_all' def patch_db(tracer): @@ -62,7 +62,7 @@ def cursor(): } pin = Pin(service, tags=tags, tracer=tracer, app=prefix) - return DbApiTracedCursor(conn._datadog_original_cursor(), pin) + return DbApiTracedCursor(conn._opentelemetry_original_cursor(), pin) conn.cursor = cursor diff --git a/ddtrace/contrib/django/middleware.py b/oteltrace/contrib/django/middleware.py similarity index 95% rename from ddtrace/contrib/django/middleware.py rename to oteltrace/contrib/django/middleware.py index 678a2ba4..a1a7dbd0 100644 --- a/ddtrace/contrib/django/middleware.py +++ b/oteltrace/contrib/django/middleware.py @@ -23,8 +23,8 @@ log = get_logger(__name__) -EXCEPTION_MIDDLEWARE = 'ddtrace.contrib.django.TraceExceptionMiddleware' -TRACE_MIDDLEWARE = 'ddtrace.contrib.django.TraceMiddleware' +EXCEPTION_MIDDLEWARE = 'oteltrace.contrib.django.TraceExceptionMiddleware' +TRACE_MIDDLEWARE = 'oteltrace.contrib.django.TraceMiddleware' MIDDLEWARE = 'MIDDLEWARE' MIDDLEWARE_CLASSES = 'MIDDLEWARE_CLASSES' @@ -197,13 +197,13 @@ def process_response(self, request, response): def _get_req_span(request): - """ Return the datadog span from the given request. """ - return getattr(request, '_datadog_request_span', None) + """ Return the opentelemetry span from the given request. """ + return getattr(request, '_opentelemetry_request_span', None) def _set_req_span(request, span): - """ Set the datadog span on the given request. """ - return setattr(request, '_datadog_request_span', span) + """ Set the opentelemetry span on the given request. """ + return setattr(request, '_opentelemetry_request_span', span) def _set_auth_tags(span, request): diff --git a/ddtrace/contrib/django/patch.py b/oteltrace/contrib/django/patch.py similarity index 87% rename from ddtrace/contrib/django/patch.py rename to oteltrace/contrib/django/patch.py index 9dd6a54d..07a787f8 100644 --- a/ddtrace/contrib/django/patch.py +++ b/oteltrace/contrib/django/patch.py @@ -1,5 +1,5 @@ # 3rd party -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt import django from django.db import connections @@ -18,9 +18,9 @@ def patch(): """Patch the instrumented methods """ - if getattr(django, '_datadog_patch', False): + if getattr(django, '_opentelemetry_patch', False): return - setattr(django, '_datadog_patch', True) + setattr(django, '_opentelemetry_patch', True) _w = wrapt.wrap_function_wrapper _w('django', 'setup', traced_setup) @@ -29,12 +29,12 @@ def patch(): def traced_setup(wrapped, instance, args, kwargs): from django.conf import settings - if 'ddtrace.contrib.django' not in settings.INSTALLED_APPS: + if 'oteltrace.contrib.django' not in settings.INSTALLED_APPS: if isinstance(settings.INSTALLED_APPS, tuple): # INSTALLED_APPS is a tuple < 1.9 - settings.INSTALLED_APPS = settings.INSTALLED_APPS + ('ddtrace.contrib.django', ) + settings.INSTALLED_APPS = settings.INSTALLED_APPS + ('oteltrace.contrib.django', ) else: - settings.INSTALLED_APPS.append('ddtrace.contrib.django') + settings.INSTALLED_APPS.append('oteltrace.contrib.django') wrapped(*args, **kwargs) @@ -55,8 +55,6 @@ def apply_django_patches(patch_rest_framework): # AgentWriter, it breaks all tests. The configure() behavior must # be changed to use it in this integration tracer.enabled = settings.ENABLED - tracer.writer.api.hostname = settings.AGENT_HOSTNAME - tracer.writer.api.port = settings.AGENT_PORT if settings.AUTO_INSTRUMENT: # trace Django internals diff --git a/ddtrace/contrib/django/restframework.py b/oteltrace/contrib/django/restframework.py similarity index 80% rename from ddtrace/contrib/django/restframework.py rename to oteltrace/contrib/django/restframework.py index 1970111e..4b11c1f2 100644 --- a/ddtrace/contrib/django/restframework.py +++ b/oteltrace/contrib/django/restframework.py @@ -1,4 +1,4 @@ -from ddtrace.vendor.wrapt import wrap_function_wrapper as wrap +from oteltrace.vendor.wrapt import wrap_function_wrapper as wrap from rest_framework.views import APIView @@ -26,10 +26,10 @@ def _traced_handle_exception(wrapped, instance, args, kwargs): return wrapped(*args, **kwargs) # do not patch if already patched - if getattr(APIView, '_datadog_patch', False): + if getattr(APIView, '_opentelemetry_patch', False): return else: - setattr(APIView, '_datadog_patch', True) + setattr(APIView, '_opentelemetry_patch', True) # trace the handle_exception method wrap('rest_framework.views', 'APIView.handle_exception', _traced_handle_exception) @@ -37,6 +37,6 @@ def _traced_handle_exception(wrapped, instance, args, kwargs): def unpatch_restframework(): """ Unpatches rest_framework app.""" - if getattr(APIView, '_datadog_patch', False): - setattr(APIView, '_datadog_patch', False) + if getattr(APIView, '_opentelemetry_patch', False): + setattr(APIView, '_opentelemetry_patch', False) unwrap(APIView, 'handle_exception') diff --git a/ddtrace/contrib/django/templates.py b/oteltrace/contrib/django/templates.py similarity index 91% rename from ddtrace/contrib/django/templates.py rename to oteltrace/contrib/django/templates.py index c3717e5e..5d88471c 100644 --- a/ddtrace/contrib/django/templates.py +++ b/oteltrace/contrib/django/templates.py @@ -10,7 +10,7 @@ log = get_logger(__name__) -RENDER_ATTR = '_datadog_original_render' +RENDER_ATTR = '_opentelemetry_original_render' def patch_template(tracer): @@ -30,7 +30,7 @@ def patch_template(tracer): def traced_render(self, context): with tracer.trace('django.template', span_type=http.TEMPLATE) as span: try: - return Template._datadog_original_render(self, context) + return Template._opentelemetry_original_render(self, context) finally: template_name = self.name or getattr(context, 'template_name', None) or 'unknown' span.resource = template_name diff --git a/ddtrace/contrib/django/utils.py b/oteltrace/contrib/django/utils.py similarity index 100% rename from ddtrace/contrib/django/utils.py rename to oteltrace/contrib/django/utils.py diff --git a/ddtrace/contrib/elasticsearch/__init__.py b/oteltrace/contrib/elasticsearch/__init__.py similarity index 97% rename from ddtrace/contrib/elasticsearch/__init__.py rename to oteltrace/contrib/elasticsearch/__init__.py index 2d710ee4..55e9cd1f 100644 --- a/ddtrace/contrib/elasticsearch/__init__.py +++ b/oteltrace/contrib/elasticsearch/__init__.py @@ -3,7 +3,7 @@ ``patch_all`` will automatically patch your Elasticsearch instance to make it work. :: - from ddtrace import Pin, patch + from oteltrace import Pin, patch from elasticsearch import Elasticsearch # If not patched yet, you can patch elasticsearch specifically diff --git a/ddtrace/contrib/elasticsearch/elasticsearch.py b/oteltrace/contrib/elasticsearch/elasticsearch.py similarity index 100% rename from ddtrace/contrib/elasticsearch/elasticsearch.py rename to oteltrace/contrib/elasticsearch/elasticsearch.py diff --git a/ddtrace/contrib/elasticsearch/patch.py b/oteltrace/contrib/elasticsearch/patch.py similarity index 89% rename from ddtrace/contrib/elasticsearch/patch.py rename to oteltrace/contrib/elasticsearch/patch.py index 65d5689b..b2a90ebc 100644 --- a/ddtrace/contrib/elasticsearch/patch.py +++ b/oteltrace/contrib/elasticsearch/patch.py @@ -1,6 +1,6 @@ from importlib import import_module -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w from .quantize import quantize @@ -28,9 +28,9 @@ def patch(): def _patch(elasticsearch): - if getattr(elasticsearch, '_datadog_patch', False): + if getattr(elasticsearch, '_opentelemetry_patch', False): return - setattr(elasticsearch, '_datadog_patch', True) + setattr(elasticsearch, '_opentelemetry_patch', True) _w(elasticsearch.transport, 'Transport.perform_request', _get_perform_request(elasticsearch)) Pin(service=metadata.SERVICE, app=metadata.APP, app_type=AppTypes.db).onto(elasticsearch.transport.Transport) @@ -41,8 +41,8 @@ def unpatch(): def _unpatch(elasticsearch): - if getattr(elasticsearch, '_datadog_patch', False): - setattr(elasticsearch, '_datadog_patch', False) + if getattr(elasticsearch, '_opentelemetry_patch', False): + setattr(elasticsearch, '_opentelemetry_patch', False) _u(elasticsearch.transport.Transport, 'perform_request') @@ -109,7 +109,7 @@ def _perform_request(func, instance, args, kwargs): return _perform_request -# Backwards compatibility for anyone who decided to import `ddtrace.contrib.elasticsearch.patch._perform_request` +# Backwards compatibility for anyone who decided to import `oteltrace.contrib.elasticsearch.patch._perform_request` # DEV: `_perform_request` is a `wrapt.FunctionWrapper` try: # DEV: Import as `es` to not shadow loop variables above diff --git a/ddtrace/contrib/elasticsearch/quantize.py b/oteltrace/contrib/elasticsearch/quantize.py similarity index 100% rename from ddtrace/contrib/elasticsearch/quantize.py rename to oteltrace/contrib/elasticsearch/quantize.py diff --git a/ddtrace/contrib/elasticsearch/transport.py b/oteltrace/contrib/elasticsearch/transport.py similarity index 85% rename from ddtrace/contrib/elasticsearch/transport.py rename to oteltrace/contrib/elasticsearch/transport.py index ca556f17..6556efd1 100644 --- a/ddtrace/contrib/elasticsearch/transport.py +++ b/oteltrace/contrib/elasticsearch/transport.py @@ -14,24 +14,24 @@ @deprecated(message='Use patching instead (see the docs).', version='1.0.0') -def get_traced_transport(datadog_tracer, datadog_service=DEFAULT_SERVICE): +def get_traced_transport(opentelemetry_tracer, opentelemetry_service=DEFAULT_SERVICE): class TracedTransport(elasticsearch.Transport): - """ Extend elasticseach transport layer to allow Datadog + """ Extend elasticseach transport layer to allow OpenTelemetry tracer to catch any performed request. """ - _datadog_tracer = datadog_tracer - _datadog_service = datadog_service + _opentelemetry_tracer = opentelemetry_tracer + _opentelemetry_service = opentelemetry_service def perform_request(self, method, url, params=None, body=None): - with self._datadog_tracer.trace('elasticsearch.query') as s: + with self._opentelemetry_tracer.trace('elasticsearch.query') as s: # Don't instrument if the trace is not sampled if not s.sampled: return super(TracedTransport, self).perform_request( method, url, params=params, body=body) - s.service = self._datadog_service + s.service = self._opentelemetry_service s.span_type = SPAN_TYPE s.set_tag(metadata.METHOD, method) s.set_tag(metadata.URL, url) diff --git a/ddtrace/contrib/falcon/__init__.py b/oteltrace/contrib/falcon/__init__.py similarity index 79% rename from ddtrace/contrib/falcon/__init__.py rename to oteltrace/contrib/falcon/__init__.py index ca69d3f0..dc014b6b 100644 --- a/ddtrace/contrib/falcon/__init__.py +++ b/oteltrace/contrib/falcon/__init__.py @@ -2,8 +2,8 @@ To trace the falcon web framework, install the trace middleware:: import falcon - from ddtrace import tracer - from ddtrace.contrib.falcon import TraceMiddleware + from oteltrace import tracer + from oteltrace.contrib.falcon import TraceMiddleware mw = TraceMiddleware(tracer, 'my-falcon-app') falcon.API(middleware=[mw]) @@ -11,17 +11,17 @@ You can also use the autopatching functionality:: import falcon - from ddtrace import tracer, patch + from oteltrace import tracer, patch patch(falcon=True) app = falcon.API() To disable distributed tracing when using autopatching, set the -``DATADOG_FALCON_DISTRIBUTED_TRACING`` environment variable to ``False``. +``OPENTELEMETRY_FALCON_DISTRIBUTED_TRACING`` environment variable to ``False``. To enable generating APM events for Trace Search & Analytics, set the -``DD_FALCON_ANALYTICS_ENABLED`` environment variable to ``True``. +``OTEL_FALCON_ANALYTICS_ENABLED`` environment variable to ``True``. **Supported span hooks** @@ -36,7 +36,7 @@ Example:: import falcon - from ddtrace import config, patch_all + from oteltrace import config, patch_all patch_all() app = falcon.API() diff --git a/ddtrace/contrib/falcon/middleware.py b/oteltrace/contrib/falcon/middleware.py similarity index 96% rename from ddtrace/contrib/falcon/middleware.py rename to oteltrace/contrib/falcon/middleware.py index 81a07a7f..27e2a9ba 100644 --- a/ddtrace/contrib/falcon/middleware.py +++ b/oteltrace/contrib/falcon/middleware.py @@ -1,8 +1,8 @@ import sys -from ddtrace.ext import http as httpx -from ddtrace.http import store_request_headers, store_response_headers -from ddtrace.propagation.http import HTTPPropagator +from oteltrace.ext import http as httpx +from oteltrace.http import store_request_headers, store_response_headers +from oteltrace.propagation.http import HTTPPropagator from ...compat import iteritems from ...constants import ANALYTICS_SAMPLE_RATE_KEY diff --git a/ddtrace/contrib/falcon/patch.py b/oteltrace/contrib/falcon/patch.py similarity index 71% rename from ddtrace/contrib/falcon/patch.py rename to oteltrace/contrib/falcon/patch.py index 5eef31f6..ee79204e 100644 --- a/ddtrace/contrib/falcon/patch.py +++ b/oteltrace/contrib/falcon/patch.py @@ -1,8 +1,8 @@ import os -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt import falcon -from ddtrace import tracer +from oteltrace import tracer from .middleware import TraceMiddleware from ...utils.formats import asbool, get_env @@ -13,16 +13,16 @@ def patch(): Patch falcon.API to include contrib.falcon.TraceMiddleware by default """ - if getattr(falcon, '_datadog_patch', False): + if getattr(falcon, '_opentelemetry_patch', False): return - setattr(falcon, '_datadog_patch', True) + setattr(falcon, '_opentelemetry_patch', True) wrapt.wrap_function_wrapper('falcon', 'API.__init__', traced_init) def traced_init(wrapped, instance, args, kwargs): mw = kwargs.pop('middleware', []) - service = os.environ.get('DATADOG_SERVICE_NAME') or 'falcon' + service = os.environ.get('OPENTELEMETRY_SERVICE_NAME') or 'falcon' distributed_tracing = asbool(get_env('falcon', 'distributed_tracing', True)) mw.insert(0, TraceMiddleware(tracer, service, distributed_tracing)) diff --git a/ddtrace/contrib/flask/__init__.py b/oteltrace/contrib/flask/__init__.py similarity index 68% rename from ddtrace/contrib/flask/__init__.py rename to oteltrace/contrib/flask/__init__.py index 369dcf83..bdfd7e11 100644 --- a/ddtrace/contrib/flask/__init__.py +++ b/oteltrace/contrib/flask/__init__.py @@ -6,7 +6,7 @@ To configure tracing manually:: - from ddtrace import patch_all + from oteltrace import patch_all patch_all() from flask import Flask @@ -23,55 +23,55 @@ def index(): app.run() -You may also enable Flask tracing automatically via ddtrace-run:: +You may also enable Flask tracing automatically via oteltrace-run:: - ddtrace-run python app.py + oteltrace-run python app.py Configuration ~~~~~~~~~~~~~ -.. py:data:: ddtrace.config.flask['distributed_tracing_enabled'] +.. py:data:: oteltrace.config.flask['distributed_tracing_enabled'] Whether to parse distributed tracing headers from requests received by your Flask app. Default: ``True`` -.. py:data:: ddtrace.config.flask['analytics_enabled'] +.. py:data:: oteltrace.config.flask['analytics_enabled'] Whether to generate APM events for Flask in Trace Search & Analytics. - Can also be enabled with the ``DD_FLASK_ANALYTICS_ENABLED`` environment variable. + Can also be enabled with the ``OTEL_FLASK_ANALYTICS_ENABLED`` environment variable. Default: ``None`` -.. py:data:: ddtrace.config.flask['service_name'] +.. py:data:: oteltrace.config.flask['service_name'] The service name reported for your Flask app. - Can also be configured via the ``DATADOG_SERVICE_NAME`` environment variable. + Can also be configured via the ``OPENTELEMETRY_SERVICE_NAME`` environment variable. Default: ``'flask'`` -.. py:data:: ddtrace.config.flask['collect_view_args'] +.. py:data:: oteltrace.config.flask['collect_view_args'] Whether to add request tags for view function argument values. Default: ``True`` -.. py:data:: ddtrace.config.flask['template_default_name'] +.. py:data:: oteltrace.config.flask['template_default_name'] The default template name to use when one does not exist. Default: ```` -.. py:data:: ddtrace.config.flask['trace_signals'] +.. py:data:: oteltrace.config.flask['trace_signals'] Whether to trace Flask signals (``before_request``, ``after_request``, etc). Default: ``True`` -.. py:data:: ddtrace.config.flask['extra_error_codes'] +.. py:data:: oteltrace.config.flask['extra_error_codes'] A list of response codes that should get marked as errors. @@ -82,7 +82,7 @@ def index(): Example:: - from ddtrace import config + from oteltrace import config # Enable distributed tracing config.flask['distributed_tracing_enabled'] = True @@ -103,7 +103,7 @@ def index(): with require_modules(required_modules) as missing_modules: if not missing_modules: - # DEV: We do this so we can `@mock.patch('ddtrace.contrib.flask._patch.')` in tests + # DEV: We do this so we can `@mock.patch('oteltrace.contrib.flask._patch.')` in tests from . import patch as _patch from .middleware import TraceMiddleware diff --git a/ddtrace/contrib/flask/helpers.py b/oteltrace/contrib/flask/helpers.py similarity index 98% rename from ddtrace/contrib/flask/helpers.py rename to oteltrace/contrib/flask/helpers.py index 38dcd250..7d153b62 100644 --- a/ddtrace/contrib/flask/helpers.py +++ b/oteltrace/contrib/flask/helpers.py @@ -1,4 +1,4 @@ -from ddtrace import Pin +from oteltrace import Pin import flask diff --git a/ddtrace/contrib/flask/middleware.py b/oteltrace/contrib/flask/middleware.py similarity index 90% rename from ddtrace/contrib/flask/middleware.py rename to oteltrace/contrib/flask/middleware.py index 9c46f428..e937e5a8 100644 --- a/ddtrace/contrib/flask/middleware.py +++ b/oteltrace/contrib/flask/middleware.py @@ -22,8 +22,8 @@ def __init__(self, app, tracer, service='flask', use_signals=True, distributed_t log.debug('flask: initializing trace middleware') # Attach settings to the inner application middleware. This is required if double - # instrumentation happens (i.e. `ddtrace-run` with `TraceMiddleware`). In that - # case, `ddtrace-run` instruments the application, but then users code is unable + # instrumentation happens (i.e. `oteltrace-run` with `TraceMiddleware`). In that + # case, `oteltrace-run` instruments the application, but then users code is unable # to update settings such as `distributed_tracing` flag. This step can be removed # when the `Config` object is used self.app._tracer = tracer @@ -32,9 +32,9 @@ def __init__(self, app, tracer, service='flask', use_signals=True, distributed_t self.use_signals = use_signals # safe-guard to avoid double instrumentation - if getattr(app, '__dd_instrumentation', False): + if getattr(app, '__otel_instrumentation', False): return - setattr(app, '__dd_instrumentation', True) + setattr(app, '__otel_instrumentation', True) # Install hooks which time requests. self.app.before_request(self._before_request) @@ -43,7 +43,7 @@ def __init__(self, app, tracer, service='flask', use_signals=True, distributed_t # Add exception handling signals. This will annotate exceptions that # are caught and handled in custom user code. - # See https://github.com/DataDog/dd-trace-py/issues/390 + # See https://github.com/opentelemetry/otel-trace-py/issues/390 if use_signals and not signals.signals_available: log.debug(_blinker_not_installed_msg) self.use_signals = use_signals and signals.signals_available @@ -91,8 +91,8 @@ def _teardown_request(self, exception): will be passed in. """ # when we teardown the span, ensure we have a clean slate. - span = getattr(g, 'flask_datadog_span', None) - setattr(g, 'flask_datadog_span', None) + span = getattr(g, 'flask_opentelemetry_span', None) + setattr(g, 'flask_opentelemetry_span', None) if not span: return @@ -109,7 +109,7 @@ def _start_span(self): if context.trace_id: self.app._tracer.context_provider.activate(context) try: - g.flask_datadog_span = self.app._tracer.trace( + g.flask_opentelemetry_span = self.app._tracer.trace( SPAN_NAME, service=self.app._service, span_type=http.TYPE, @@ -118,7 +118,7 @@ def _start_span(self): log.debug('flask: error tracing request', exc_info=True) def _process_response(self, response): - span = getattr(g, 'flask_datadog_span', None) + span = getattr(g, 'flask_opentelemetry_span', None) if not (span and span.sampled): return @@ -127,7 +127,7 @@ def _process_response(self, response): def _request_exception(self, *args, **kwargs): exception = kwargs.get('exception', None) - span = getattr(g, 'flask_datadog_span', None) + span = getattr(g, 'flask_opentelemetry_span', None) if span and exception: _set_error_on_span(span, exception) @@ -160,7 +160,7 @@ def _finish_span(self, span, exception=None): url = request.base_url or '' # Let users specify their own resource in middleware if they so desire. - # See case https://github.com/DataDog/dd-trace-py/issues/353 + # See case https://github.com/opentelemetry/otel-trace-py/issues/353 if span.resource == SPAN_NAME: resource = endpoint or code span.resource = compat.to_unicode(resource).lower() diff --git a/ddtrace/contrib/flask/patch.py b/oteltrace/contrib/flask/patch.py similarity index 97% rename from ddtrace/contrib/flask/patch.py rename to oteltrace/contrib/flask/patch.py index 3d28e0cb..f87b535d 100644 --- a/ddtrace/contrib/flask/patch.py +++ b/oteltrace/contrib/flask/patch.py @@ -2,10 +2,10 @@ import flask import werkzeug -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w -from ddtrace import compat -from ddtrace import config, Pin +from oteltrace import compat +from oteltrace import config, Pin from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...ext import AppTypes @@ -26,8 +26,8 @@ # Configure default configuration config._add('flask', dict( # Flask service configuration - # DEV: Environment variable 'DATADOG_SERVICE_NAME' used for backwards compatibility - service_name=os.environ.get('DATADOG_SERVICE_NAME') or 'flask', + # DEV: Environment variable 'OPENTELEMETRY_SERVICE_NAME' used for backwards compatibility + service_name=os.environ.get('OPENTELEMETRY_SERVICE_NAME') or 'flask', app='flask', app_type=AppTypes.web, @@ -63,9 +63,9 @@ def patch(): Patch `flask` module for tracing """ # Check to see if we have patched Flask yet or not - if getattr(flask, '_datadog_patch', False): + if getattr(flask, '_opentelemetry_patch', False): return - setattr(flask, '_datadog_patch', True) + setattr(flask, '_opentelemetry_patch', True) # Attach service pin to `flask.app.Flask` Pin( @@ -171,9 +171,9 @@ def patch(): def unpatch(): - if not getattr(flask, '_datadog_patch', False): + if not getattr(flask, '_opentelemetry_patch', False): return - setattr(flask, '_datadog_patch', False) + setattr(flask, '_opentelemetry_patch', False) props = [ # Flask diff --git a/ddtrace/contrib/flask/wrappers.py b/oteltrace/contrib/flask/wrappers.py similarity index 96% rename from ddtrace/contrib/flask/wrappers.py rename to oteltrace/contrib/flask/wrappers.py index e4942622..6ca6108e 100644 --- a/ddtrace/contrib/flask/wrappers.py +++ b/oteltrace/contrib/flask/wrappers.py @@ -1,4 +1,4 @@ -from ddtrace.vendor.wrapt import function_wrapper +from oteltrace.vendor.wrapt import function_wrapper from ...pin import Pin from ...utils.importlib import func_name diff --git a/ddtrace/contrib/flask_cache/__init__.py b/oteltrace/contrib/flask_cache/__init__.py similarity index 77% rename from ddtrace/contrib/flask_cache/__init__.py rename to oteltrace/contrib/flask_cache/__init__.py index d8cfe303..fc8c7622 100644 --- a/ddtrace/contrib/flask_cache/__init__.py +++ b/oteltrace/contrib/flask_cache/__init__.py @@ -2,10 +2,10 @@ The flask cache tracer will track any access to a cache backend. You can use this tracer together with the Flask tracer middleware. -To install the tracer, ``from ddtrace import tracer`` needs to be added:: +To install the tracer, ``from oteltrace import tracer`` needs to be added:: - from ddtrace import tracer - from ddtrace.contrib.flask_cache import get_traced_cache + from oteltrace import tracer + from oteltrace.contrib.flask_cache import get_traced_cache and the tracer needs to be initialized:: @@ -15,8 +15,8 @@ from flask import Flask - from ddtrace import tracer - from ddtrace.contrib.flask_cache import get_traced_cache + from oteltrace import tracer + from oteltrace.contrib.flask_cache import get_traced_cache app = Flask(__name__) diff --git a/ddtrace/contrib/flask_cache/tracers.py b/oteltrace/contrib/flask_cache/tracers.py similarity index 92% rename from ddtrace/contrib/flask_cache/tracers.py rename to oteltrace/contrib/flask_cache/tracers.py index a83e33c3..54e6f62a 100644 --- a/ddtrace/contrib/flask_cache/tracers.py +++ b/oteltrace/contrib/flask_cache/tracers.py @@ -1,5 +1,5 @@ """ -Datadog trace code for flask_cache +OpenTelemetry trace code for flask_cache """ # stdlib @@ -25,7 +25,7 @@ CONTACT_POINTS = 'flask_cache.contact_points' -def get_traced_cache(ddtracer, service=DEFAULT_SERVICE, meta=None): +def get_traced_cache(oteltracer, service=DEFAULT_SERVICE, meta=None): """ Return a traced Cache object that behaves exactly as the ``flask.ext.cache.Cache class`` """ @@ -36,23 +36,23 @@ class TracedCache(Cache): * get, set, add, delete, clear * all many_ operations """ - _datadog_tracer = ddtracer - _datadog_service = service - _datadog_meta = meta + _opentelemetry_tracer = oteltracer + _opentelemetry_service = service + _opentelemetry_meta = meta def __trace(self, cmd): """ Start a tracing with default attributes and tags """ # create a new span - s = self._datadog_tracer.trace( + s = self._opentelemetry_tracer.trace( cmd, span_type=TYPE, - service=self._datadog_service + service=self._opentelemetry_service ) # set span tags s.set_tag(CACHE_BACKEND, self.config.get('CACHE_TYPE')) - s.set_tags(self._datadog_meta) + s.set_tags(self._opentelemetry_meta) # set analytics sample rate s.set_tag( ANALYTICS_SAMPLE_RATE_KEY, diff --git a/ddtrace/contrib/flask_cache/utils.py b/oteltrace/contrib/flask_cache/utils.py similarity index 100% rename from ddtrace/contrib/flask_cache/utils.py rename to oteltrace/contrib/flask_cache/utils.py diff --git a/ddtrace/contrib/futures/__init__.py b/oteltrace/contrib/futures/__init__.py similarity index 95% rename from ddtrace/contrib/futures/__init__.py rename to oteltrace/contrib/futures/__init__.py index 126c9e46..31aa8c8b 100644 --- a/ddtrace/contrib/futures/__init__.py +++ b/oteltrace/contrib/futures/__init__.py @@ -8,7 +8,7 @@ is not enabled by default with the `patch_all()` method and must be activated as follows:: - from ddtrace import patch, patch_all + from oteltrace import patch, patch_all patch(futures=True) # or, when instrumenting all libraries diff --git a/ddtrace/contrib/futures/patch.py b/oteltrace/contrib/futures/patch.py similarity index 58% rename from ddtrace/contrib/futures/patch.py rename to oteltrace/contrib/futures/patch.py index dd9e5d8b..0ea89123 100644 --- a/ddtrace/contrib/futures/patch.py +++ b/oteltrace/contrib/futures/patch.py @@ -1,6 +1,6 @@ from concurrent import futures -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w from .threading import _wrap_submit from ...utils.wrappers import unwrap as _u @@ -8,17 +8,17 @@ def patch(): """Enables Context Propagation between threads""" - if getattr(futures, '__datadog_patch', False): + if getattr(futures, '__opentelemetry_patch', False): return - setattr(futures, '__datadog_patch', True) + setattr(futures, '__opentelemetry_patch', True) _w('concurrent.futures', 'ThreadPoolExecutor.submit', _wrap_submit) def unpatch(): """Disables Context Propagation between threads""" - if not getattr(futures, '__datadog_patch', False): + if not getattr(futures, '__opentelemetry_patch', False): return - setattr(futures, '__datadog_patch', False) + setattr(futures, '__opentelemetry_patch', False) _u(futures.ThreadPoolExecutor, 'submit') diff --git a/ddtrace/contrib/futures/threading.py b/oteltrace/contrib/futures/threading.py similarity index 89% rename from ddtrace/contrib/futures/threading.py rename to oteltrace/contrib/futures/threading.py index dcaef6ca..04fd7035 100644 --- a/ddtrace/contrib/futures/threading.py +++ b/oteltrace/contrib/futures/threading.py @@ -1,4 +1,4 @@ -import ddtrace +import oteltrace def _wrap_submit(func, instance, args, kwargs): @@ -18,8 +18,8 @@ def _wrap_submit(func, instance, args, kwargs): # The resolution is to not create/propagate a new context if one does not exist, but let the # future's thread create the context instead. current_ctx = None - if ddtrace.tracer.context_provider._has_active_context(): - current_ctx = ddtrace.tracer.context_provider.active() + if oteltrace.tracer.context_provider._has_active_context(): + current_ctx = oteltrace.tracer.context_provider.active() # If we have a context then make sure we clone it # DEV: We don't know if the future will finish executing before the parent span finishes @@ -42,5 +42,5 @@ def _wrap_execution(ctx, fn, args, kwargs): variable because it's outside the asynchronous loop. """ if ctx is not None: - ddtrace.tracer.context_provider.activate(ctx) + oteltrace.tracer.context_provider.activate(ctx) return fn(*args, **kwargs) diff --git a/ddtrace/contrib/gevent/__init__.py b/oteltrace/contrib/gevent/__init__.py similarity index 97% rename from ddtrace/contrib/gevent/__init__.py rename to oteltrace/contrib/gevent/__init__.py index 5628e737..9fd9475c 100644 --- a/ddtrace/contrib/gevent/__init__.py +++ b/oteltrace/contrib/gevent/__init__.py @@ -9,7 +9,7 @@ patch ``gevent`` **before importing** the library:: # patch before importing gevent - from ddtrace import patch, tracer + from oteltrace import patch, tracer patch(gevent=True) # use gevent as usual with or without the monkey module diff --git a/ddtrace/contrib/gevent/greenlet.py b/oteltrace/contrib/gevent/greenlet.py similarity index 100% rename from ddtrace/contrib/gevent/greenlet.py rename to oteltrace/contrib/gevent/greenlet.py diff --git a/ddtrace/contrib/gevent/patch.py b/oteltrace/contrib/gevent/patch.py similarity index 85% rename from ddtrace/contrib/gevent/patch.py rename to oteltrace/contrib/gevent/patch.py index d4dc985c..6bb8cbca 100644 --- a/ddtrace/contrib/gevent/patch.py +++ b/oteltrace/contrib/gevent/patch.py @@ -1,6 +1,6 @@ import gevent import gevent.pool -import ddtrace +import oteltrace from .greenlet import TracedGreenlet, TracedIMap, TracedIMapUnordered, GEVENT_VERSION from .provider import GeventContextProvider @@ -15,24 +15,24 @@ def patch(): """ Patch the gevent module so that all references to the - internal ``Greenlet`` class points to the ``DatadogGreenlet`` + internal ``Greenlet`` class points to the ``OpenTelemetryGreenlet`` class. This action ensures that if a user extends the ``Greenlet`` class, the ``TracedGreenlet`` is used as a parent class. """ _replace(TracedGreenlet, TracedIMap, TracedIMapUnordered) - ddtrace.tracer.configure(context_provider=GeventContextProvider()) + oteltrace.tracer.configure(context_provider=GeventContextProvider()) def unpatch(): """ Restore the original ``Greenlet``. This function must be invoked - before executing application code, otherwise the ``DatadogGreenlet`` + before executing application code, otherwise the ``OpenTelemetryGreenlet`` class may be used during initialization. """ _replace(__Greenlet, __IMap, __IMapUnordered) - ddtrace.tracer.configure(context_provider=DefaultContextProvider()) + oteltrace.tracer.configure(context_provider=DefaultContextProvider()) def _replace(g_class, imap_class, imap_unordered_class): diff --git a/ddtrace/contrib/gevent/provider.py b/oteltrace/contrib/gevent/provider.py similarity index 97% rename from ddtrace/contrib/gevent/provider.py rename to oteltrace/contrib/gevent/provider.py index ebae325e..43152480 100644 --- a/ddtrace/contrib/gevent/provider.py +++ b/oteltrace/contrib/gevent/provider.py @@ -5,7 +5,7 @@ # Greenlet attribute used to set/get the Context instance -CONTEXT_ATTR = '__datadog_context' +CONTEXT_ATTR = '__opentelemetry_context' class GeventContextProvider(BaseContextProvider): diff --git a/ddtrace/contrib/grpc/__init__.py b/oteltrace/contrib/grpc/__init__.py similarity index 90% rename from ddtrace/contrib/grpc/__init__.py rename to oteltrace/contrib/grpc/__init__.py index 3d6876e1..5183d6c8 100644 --- a/ddtrace/contrib/grpc/__init__.py +++ b/oteltrace/contrib/grpc/__init__.py @@ -2,12 +2,12 @@ The gRPC integration traces the client and server using interceptor pattern. gRPC will be automatically instrumented with ``patch_all``, or when using -the ``ddtrace-run`` command. +the ``oteltrace-run`` command. gRPC is instrumented on import. To instrument gRPC manually use the ``patch`` function.:: import grpc - from ddtrace import patch + from oteltrace import patch patch(grpc=True) # use grpc like usual @@ -16,7 +16,7 @@ ``Pin`` API:: import grpc - from ddtrace import Pin, patch, Tracer + from oteltrace import Pin, patch, Tracer patch(grpc=True) custom_tracer = Tracer() @@ -32,7 +32,7 @@ import grpc from grpc.framework.foundation import logging_pool - from ddtrace import Pin, patch, Tracer + from oteltrace import Pin, patch, Tracer patch(grpc=True) custom_tracer = Tracer() diff --git a/ddtrace/contrib/grpc/client_interceptor.py b/oteltrace/contrib/grpc/client_interceptor.py similarity index 96% rename from ddtrace/contrib/grpc/client_interceptor.py rename to oteltrace/contrib/grpc/client_interceptor.py index 647ed01e..8defc9f6 100644 --- a/ddtrace/contrib/grpc/client_interceptor.py +++ b/oteltrace/contrib/grpc/client_interceptor.py @@ -1,10 +1,10 @@ import collections import grpc -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt -from ddtrace import config -from ddtrace.compat import to_unicode -from ddtrace.ext import errors +from oteltrace import config +from oteltrace.compat import to_unicode +from oteltrace.ext import errors from ...internal.logger import get_logger from ...propagation.http import HTTPPropagator from ...constants import ANALYTICS_SAMPLE_RATE_KEY @@ -29,11 +29,11 @@ def intercept_channel(wrapped, instance, args, kwargs): channel = args[0] interceptors = args[1:] if isinstance(getattr(channel, '_interceptor', None), _ClientInterceptor): - dd_interceptor = channel._interceptor + otel_interceptor = channel._interceptor base_channel = getattr(channel, '_channel', None) if base_channel: new_channel = wrapped(channel._channel, *interceptors) - return grpc.intercept_channel(new_channel, dd_interceptor) + return grpc.intercept_channel(new_channel, otel_interceptor) return wrapped(*args, **kwargs) diff --git a/ddtrace/contrib/grpc/constants.py b/oteltrace/contrib/grpc/constants.py similarity index 100% rename from ddtrace/contrib/grpc/constants.py rename to oteltrace/contrib/grpc/constants.py diff --git a/ddtrace/contrib/grpc/patch.py b/oteltrace/contrib/grpc/patch.py similarity index 73% rename from ddtrace/contrib/grpc/patch.py rename to oteltrace/contrib/grpc/patch.py index 0c00c77f..9b41624f 100644 --- a/ddtrace/contrib/grpc/patch.py +++ b/oteltrace/contrib/grpc/patch.py @@ -1,8 +1,8 @@ import grpc import os -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w -from ddtrace import config, Pin +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace import config, Pin from ...utils.wrappers import unwrap as _u @@ -12,7 +12,7 @@ config._add('grpc_server', dict( - service_name=os.environ.get('DATADOG_SERVICE_NAME', constants.GRPC_SERVICE_SERVER), + service_name=os.environ.get('OPENTELEMETRY_SERVICE_NAME', constants.GRPC_SERVICE_SERVER), distributed_tracing_enabled=True, )) @@ -20,8 +20,8 @@ # compatibility but should change in future config._add('grpc', dict( service_name='{}-{}'.format( - os.environ.get('DATADOG_SERVICE_NAME'), constants.GRPC_SERVICE_CLIENT - ) if os.environ.get('DATADOG_SERVICE_NAME') else constants.GRPC_SERVICE_CLIENT, + os.environ.get('OPENTELEMETRY_SERVICE_NAME'), constants.GRPC_SERVICE_CLIENT + ) if os.environ.get('OPENTELEMETRY_SERVICE_NAME') else constants.GRPC_SERVICE_CLIENT, distributed_tracing_enabled=True, )) @@ -37,9 +37,9 @@ def unpatch(): def _patch_client(): - if getattr(constants.GRPC_PIN_MODULE_CLIENT, '__datadog_patch', False): + if getattr(constants.GRPC_PIN_MODULE_CLIENT, '__opentelemetry_patch', False): return - setattr(constants.GRPC_PIN_MODULE_CLIENT, '__datadog_patch', True) + setattr(constants.GRPC_PIN_MODULE_CLIENT, '__opentelemetry_patch', True) Pin(service=config.grpc.service_name).onto(constants.GRPC_PIN_MODULE_CLIENT) @@ -49,9 +49,9 @@ def _patch_client(): def _unpatch_client(): - if not getattr(constants.GRPC_PIN_MODULE_CLIENT, '__datadog_patch', False): + if not getattr(constants.GRPC_PIN_MODULE_CLIENT, '__opentelemetry_patch', False): return - setattr(constants.GRPC_PIN_MODULE_CLIENT, '__datadog_patch', False) + setattr(constants.GRPC_PIN_MODULE_CLIENT, '__opentelemetry_patch', False) pin = Pin.get_from(constants.GRPC_PIN_MODULE_CLIENT) if pin: @@ -62,9 +62,9 @@ def _unpatch_client(): def _patch_server(): - if getattr(constants.GRPC_PIN_MODULE_SERVER, '__datadog_patch', False): + if getattr(constants.GRPC_PIN_MODULE_SERVER, '__opentelemetry_patch', False): return - setattr(constants.GRPC_PIN_MODULE_SERVER, '__datadog_patch', True) + setattr(constants.GRPC_PIN_MODULE_SERVER, '__opentelemetry_patch', True) Pin(service=config.grpc_server.service_name).onto(constants.GRPC_PIN_MODULE_SERVER) @@ -72,9 +72,9 @@ def _patch_server(): def _unpatch_server(): - if not getattr(constants.GRPC_PIN_MODULE_SERVER, '__datadog_patch', False): + if not getattr(constants.GRPC_PIN_MODULE_SERVER, '__opentelemetry_patch', False): return - setattr(constants.GRPC_PIN_MODULE_SERVER, '__datadog_patch', False) + setattr(constants.GRPC_PIN_MODULE_SERVER, '__opentelemetry_patch', False) pin = Pin.get_from(constants.GRPC_PIN_MODULE_SERVER) if pin: diff --git a/ddtrace/contrib/grpc/server_interceptor.py b/oteltrace/contrib/grpc/server_interceptor.py similarity index 97% rename from ddtrace/contrib/grpc/server_interceptor.py rename to oteltrace/contrib/grpc/server_interceptor.py index b6b1e07d..508cb1c6 100644 --- a/ddtrace/contrib/grpc/server_interceptor.py +++ b/oteltrace/contrib/grpc/server_interceptor.py @@ -1,9 +1,9 @@ import grpc -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt -from ddtrace import config -from ddtrace.ext import errors -from ddtrace.compat import to_unicode +from oteltrace import config +from oteltrace.ext import errors +from oteltrace.compat import to_unicode from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...propagation.http import HTTPPropagator diff --git a/ddtrace/contrib/grpc/utils.py b/oteltrace/contrib/grpc/utils.py similarity index 100% rename from ddtrace/contrib/grpc/utils.py rename to oteltrace/contrib/grpc/utils.py diff --git a/ddtrace/contrib/httplib/__init__.py b/oteltrace/contrib/httplib/__init__.py similarity index 76% rename from ddtrace/contrib/httplib/__init__.py rename to oteltrace/contrib/httplib/__init__.py index 0d7883f6..e59bb63f 100644 --- a/ddtrace/contrib/httplib/__init__.py +++ b/oteltrace/contrib/httplib/__init__.py @@ -5,27 +5,27 @@ Usage:: # Patch all supported modules/functions - from ddtrace import patch + from oteltrace import patch patch(httplib=True) # Python 2 import httplib import urllib - resp = urllib.urlopen('http://www.datadog.com/') + resp = urllib.urlopen('http://opentelemetry.io/') # Python 3 import http.client import urllib.request - resp = urllib.request.urlopen('http://www.datadog.com/') + resp = urllib.request.urlopen('http://opentelemetry.io/') ``httplib`` spans do not include a default service name. Before HTTP calls are made, ensure a parent span has been started with a service name to be used for spans generated from those calls:: with tracer.trace('main', service='my-httplib-operation'): - resp = urllib.request.urlopen('http://www.datadog.com/') + resp = urllib.request.urlopen('http://opentelemetry.io/') :ref:`Headers tracing ` is supported for this integration. """ diff --git a/ddtrace/contrib/httplib/patch.py b/oteltrace/contrib/httplib/patch.py similarity index 88% rename from ddtrace/contrib/httplib/patch.py rename to oteltrace/contrib/httplib/patch.py index 65c40af2..3ee25178 100644 --- a/ddtrace/contrib/httplib/patch.py +++ b/oteltrace/contrib/httplib/patch.py @@ -1,5 +1,5 @@ # Third party -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt # Project from ...compat import PY2, httplib, parse @@ -34,7 +34,7 @@ def _wrap_getresponse(func, instance, args, kwargs): finally: try: # Get the span attached to this instance, if available - span = getattr(instance, '_datadog_span', None) + span = getattr(instance, '_opentelemetry_span', None) if span: if resp: span.set_tag(ext_http.STATUS_CODE, resp.status) @@ -42,7 +42,7 @@ def _wrap_getresponse(func, instance, args, kwargs): store_response_headers(dict(resp.getheaders()), span, config.httplib) span.finish() - delattr(instance, '_datadog_span') + delattr(instance, '_opentelemetry_span') except Exception: log.debug('error applying request tags', exc_info=True) @@ -56,7 +56,7 @@ def _wrap_putrequest(func, instance, args, kwargs): try: # Create a new span and attach to this instance (so we can retrieve/update/close later on the response) span = pin.tracer.trace(span_name, span_type=ext_http.TYPE) - setattr(instance, '_datadog_span', span) + setattr(instance, '_opentelemetry_span', span) method, path = args[:2] scheme = 'https' if isinstance(instance, httplib.HTTPSConnection) else 'http' @@ -93,7 +93,7 @@ def _wrap_putrequest(func, instance, args, kwargs): def _wrap_putheader(func, instance, args, kwargs): - span = getattr(instance, '_datadog_span', None) + span = getattr(instance, '_opentelemetry_span', None) if span: store_request_headers({args[0]: args[1]}, span, config.httplib) @@ -105,15 +105,15 @@ def should_skip_request(pin, request): if not pin or not pin.enabled(): return True - api = pin.tracer.writer.api - return request.host == api.hostname and request.port == api.port + # TODO(Mauricio): what logic should be used here? + return False def patch(): """ patch the built-in urllib/httplib/httplib.client methods for tracing""" - if getattr(httplib, '__datadog_patch', False): + if getattr(httplib, '__opentelemetry_patch', False): return - setattr(httplib, '__datadog_patch', True) + setattr(httplib, '__opentelemetry_patch', True) # Patch the desired methods setattr(httplib.HTTPConnection, '__init__', @@ -128,9 +128,9 @@ def patch(): def unpatch(): """ unpatch any previously patched modules """ - if not getattr(httplib, '__datadog_patch', False): + if not getattr(httplib, '__opentelemetry_patch', False): return - setattr(httplib, '__datadog_patch', False) + setattr(httplib, '__opentelemetry_patch', False) _u(httplib.HTTPConnection, '__init__') _u(httplib.HTTPConnection, 'getresponse') diff --git a/ddtrace/contrib/jinja2/__init__.py b/oteltrace/contrib/jinja2/__init__.py similarity index 94% rename from ddtrace/contrib/jinja2/__init__.py rename to oteltrace/contrib/jinja2/__init__.py index 5a39d248..602aba25 100644 --- a/ddtrace/contrib/jinja2/__init__.py +++ b/oteltrace/contrib/jinja2/__init__.py @@ -2,7 +2,7 @@ The ``jinja2`` integration traces templates loading, compilation and rendering. Auto instrumentation is available using the ``patch``. The following is an example:: - from ddtrace import patch + from oteltrace import patch from jinja2 import Environment, FileSystemLoader patch(jinja2=True) @@ -15,7 +15,7 @@ The library can be configured globally and per instance, using the Configuration API:: - from ddtrace import config + from oteltrace import config # Change service name globally config.jinja2['service_name'] = 'jinja-templates' diff --git a/ddtrace/contrib/jinja2/constants.py b/oteltrace/contrib/jinja2/constants.py similarity index 100% rename from ddtrace/contrib/jinja2/constants.py rename to oteltrace/contrib/jinja2/constants.py diff --git a/ddtrace/contrib/jinja2/patch.py b/oteltrace/contrib/jinja2/patch.py similarity index 89% rename from ddtrace/contrib/jinja2/patch.py rename to oteltrace/contrib/jinja2/patch.py index 38a2cc13..7a7c47ef 100644 --- a/ddtrace/contrib/jinja2/patch.py +++ b/oteltrace/contrib/jinja2/patch.py @@ -1,7 +1,7 @@ import jinja2 -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w -from ddtrace import config +from oteltrace import config from ...ext import http from ...utils.formats import get_env @@ -17,10 +17,10 @@ def patch(): - if getattr(jinja2, '__datadog_patch', False): + if getattr(jinja2, '__opentelemetry_patch', False): # already patched return - setattr(jinja2, '__datadog_patch', True) + setattr(jinja2, '__opentelemetry_patch', True) Pin( service=config.jinja2['service_name'], _config=config.jinja2, @@ -32,9 +32,9 @@ def patch(): def unpatch(): - if not getattr(jinja2, '__datadog_patch', False): + if not getattr(jinja2, '__opentelemetry_patch', False): return - setattr(jinja2, '__datadog_patch', False) + setattr(jinja2, '__opentelemetry_patch', False) _u(jinja2.Template, 'render') _u(jinja2.Template, 'generate') _u(jinja2.Environment, 'compile') diff --git a/ddtrace/contrib/kombu/__init__.py b/oteltrace/contrib/kombu/__init__.py similarity index 97% rename from ddtrace/contrib/kombu/__init__.py rename to oteltrace/contrib/kombu/__init__.py index a29639b9..43f15ec8 100644 --- a/ddtrace/contrib/kombu/__init__.py +++ b/oteltrace/contrib/kombu/__init__.py @@ -11,7 +11,7 @@ without the whole trace being dropped. :: - from ddtrace import Pin, patch + from oteltrace import Pin, patch import kombu # If not patched yet, you can patch kombu specifically diff --git a/ddtrace/contrib/kombu/constants.py b/oteltrace/contrib/kombu/constants.py similarity index 100% rename from ddtrace/contrib/kombu/constants.py rename to oteltrace/contrib/kombu/constants.py diff --git a/ddtrace/contrib/kombu/patch.py b/oteltrace/contrib/kombu/patch.py similarity index 93% rename from ddtrace/contrib/kombu/patch.py rename to oteltrace/contrib/kombu/patch.py index a4a2062a..23028328 100644 --- a/ddtrace/contrib/kombu/patch.py +++ b/oteltrace/contrib/kombu/patch.py @@ -1,6 +1,6 @@ # 3p import kombu -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt # project from ...constants import ANALYTICS_SAMPLE_RATE_KEY @@ -35,9 +35,9 @@ def patch(): This duplicated doesn't look nice. The nicer alternative is to use an ObjectProxy on top of Kombu. However, it means that any "import kombu.Connection" won't be instrumented. """ - if getattr(kombu, '_datadog_patch', False): + if getattr(kombu, '_opentelemetry_patch', False): return - setattr(kombu, '_datadog_patch', True) + setattr(kombu, '_opentelemetry_patch', True) _w = wrapt.wrap_function_wrapper # We wrap the _publish method because the publish method: @@ -60,8 +60,8 @@ def patch(): def unpatch(): - if getattr(kombu, '_datadog_patch', False): - setattr(kombu, '_datadog_patch', False) + if getattr(kombu, '_opentelemetry_patch', False): + setattr(kombu, '_opentelemetry_patch', False) unwrap(kombu.Producer, '_publish') unwrap(kombu.Consumer, 'receive') diff --git a/ddtrace/contrib/kombu/utils.py b/oteltrace/contrib/kombu/utils.py similarity index 100% rename from ddtrace/contrib/kombu/utils.py rename to oteltrace/contrib/kombu/utils.py diff --git a/ddtrace/contrib/logging/__init__.py b/oteltrace/contrib/logging/__init__.py similarity index 67% rename from ddtrace/contrib/logging/__init__.py rename to oteltrace/contrib/logging/__init__.py index 1882463d..d48be0f8 100644 --- a/ddtrace/contrib/logging/__init__.py +++ b/oteltrace/contrib/logging/__init__.py @@ -1,23 +1,23 @@ """ -Datadog APM traces can be integrated with Logs by first having the tracing +OpenTelemetry APM traces can be integrated with Logs by first having the tracing library patch the standard library ``logging`` module and updating the log formatter used by an application. This feature enables you to inject the current trace information into a log entry. Before the trace information can be injected into logs, the formatter has to be -updated to include ``dd.trace_id`` and ``dd.span_id`` attributes from the log +updated to include ``otel.trace_id`` and ``otel.span_id`` attributes from the log record. The integration with Logs occurs as long as the log entry includes -``dd.trace_id=%(dd.trace_id)s`` and ``dd.span_id=%(dd.span_id)s``. +``otel.trace_id=%(otel.trace_id)s`` and ``otel.span_id=%(otel.span_id)s``. -ddtrace-run +oteltrace-run ----------- -When using ``ddtrace-run``, enable patching by setting the environment variable -``DD_LOGS_INJECTION=true``. The logger by default will have a format that +When using ``oteltrace-run``, enable patching by setting the environment variable +``OTEL_LOGS_INJECTION=true``. The logger by default will have a format that includes trace information:: import logging - from ddtrace import tracer + from oteltrace import tracer log = logging.getLogger() log.level = logging.INFO @@ -35,12 +35,12 @@ def hello(): If you prefer to instrument manually, patch the logging library then update the log formatter as in the following example:: - from ddtrace import patch_all; patch_all(logging=True) + from oteltrace import patch_all; patch_all(logging=True) import logging - from ddtrace import tracer + from oteltrace import tracer FORMAT = ('%(asctime)s %(levelname)s [%(name)s] [%(filename)s:%(lineno)d] ' - '[dd.trace_id=%(dd.trace_id)s dd.span_id=%(dd.span_id)s] ' + '[otel.trace_id=%(otel.trace_id)s otel.span_id=%(otel.span_id)s] ' '- %(message)s') logging.basicConfig(format=FORMAT) log = logging.getLogger() diff --git a/ddtrace/contrib/logging/patch.py b/oteltrace/contrib/logging/patch.py similarity index 77% rename from ddtrace/contrib/logging/patch.py rename to oteltrace/contrib/logging/patch.py index 84b6fdc3..6159c169 100644 --- a/ddtrace/contrib/logging/patch.py +++ b/oteltrace/contrib/logging/patch.py @@ -1,13 +1,13 @@ import logging -from ddtrace import config +from oteltrace import config from ...helpers import get_correlation_ids from ...utils.wrappers import unwrap as _u from ...vendor.wrapt import wrap_function_wrapper as _w -RECORD_ATTR_TRACE_ID = 'dd.trace_id' -RECORD_ATTR_SPAN_ID = 'dd.span_id' +RECORD_ATTR_TRACE_ID = 'otel.trace_id' +RECORD_ATTR_SPAN_ID = 'otel.span_id' RECORD_ATTR_VALUE_NULL = 0 config._add('logging', dict( @@ -35,15 +35,15 @@ def patch(): Patch ``logging`` module in the Python Standard Library for injection of tracer information by wrapping the base factory method ``Logger.makeRecord`` """ - if getattr(logging, '_datadog_patch', False): + if getattr(logging, '_opentelemetry_patch', False): return - setattr(logging, '_datadog_patch', True) + setattr(logging, '_opentelemetry_patch', True) _w(logging.Logger, 'makeRecord', _w_makeRecord) def unpatch(): - if getattr(logging, '_datadog_patch', False): - setattr(logging, '_datadog_patch', False) + if getattr(logging, '_opentelemetry_patch', False): + setattr(logging, '_opentelemetry_patch', False) _u(logging.Logger, 'makeRecord') diff --git a/ddtrace/contrib/mako/__init__.py b/oteltrace/contrib/mako/__init__.py similarity index 94% rename from ddtrace/contrib/mako/__init__.py rename to oteltrace/contrib/mako/__init__.py index d26d44ba..1a55c41a 100644 --- a/ddtrace/contrib/mako/__init__.py +++ b/oteltrace/contrib/mako/__init__.py @@ -2,7 +2,7 @@ The ``mako`` integration traces templates rendering. Auto instrumentation is available using the ``patch``. The following is an example:: - from ddtrace import patch + from oteltrace import patch from mako.template import Template patch(mako=True) diff --git a/ddtrace/contrib/mako/constants.py b/oteltrace/contrib/mako/constants.py similarity index 100% rename from ddtrace/contrib/mako/constants.py rename to oteltrace/contrib/mako/constants.py diff --git a/ddtrace/contrib/mako/patch.py b/oteltrace/contrib/mako/patch.py similarity index 85% rename from ddtrace/contrib/mako/patch.py rename to oteltrace/contrib/mako/patch.py index ebc179e2..166a6f4f 100644 --- a/ddtrace/contrib/mako/patch.py +++ b/oteltrace/contrib/mako/patch.py @@ -10,10 +10,10 @@ def patch(): - if getattr(mako, '__datadog_patch', False): + if getattr(mako, '__opentelemetry_patch', False): # already patched return - setattr(mako, '__datadog_patch', True) + setattr(mako, '__opentelemetry_patch', True) Pin(service='mako', app='mako', app_type=http.TEMPLATE).onto(Template) @@ -23,9 +23,9 @@ def patch(): def unpatch(): - if not getattr(mako, '__datadog_patch', False): + if not getattr(mako, '__opentelemetry_patch', False): return - setattr(mako, '__datadog_patch', False) + setattr(mako, '__opentelemetry_patch', False) _u(mako.template.Template, 'render') _u(mako.template.Template, 'render_unicode') diff --git a/ddtrace/contrib/molten/__init__.py b/oteltrace/contrib/molten/__init__.py similarity index 57% rename from ddtrace/contrib/molten/__init__.py rename to oteltrace/contrib/molten/__init__.py index e8cd134b..f543b1b0 100644 --- a/ddtrace/contrib/molten/__init__.py +++ b/oteltrace/contrib/molten/__init__.py @@ -1,41 +1,41 @@ """ -The molten web framework is automatically traced by ``ddtrace`` when calling ``patch``:: +The molten web framework is automatically traced by ``oteltrace`` when calling ``patch``:: from molten import App, Route - from ddtrace import patch_all; patch_all(molten=True) + from oteltrace import patch_all; patch_all(molten=True) def hello(name: str, age: int) -> str: return f'Hello {age} year old named {name}!' app = App(routes=[Route('/hello/{name}/{age}', hello)]) -You may also enable molten tracing automatically via ``ddtrace-run``:: +You may also enable molten tracing automatically via ``oteltrace-run``:: - ddtrace-run python app.py + oteltrace-run python app.py Configuration ~~~~~~~~~~~~~ -.. py:data:: ddtrace.config.molten['distributed_tracing'] +.. py:data:: oteltrace.config.molten['distributed_tracing'] Whether to parse distributed tracing headers from requests received by your Molten app. Default: ``True`` -.. py:data:: ddtrace.config.molten['analytics_enabled'] +.. py:data:: oteltrace.config.molten['analytics_enabled'] Whether to generate APM events in Trace Search & Analytics. - Can also be enabled with the ``DD_MOLTEN_ANALYTICS_ENABLED`` environment variable. + Can also be enabled with the ``OTEL_MOLTEN_ANALYTICS_ENABLED`` environment variable. Default: ``None`` -.. py:data:: ddtrace.config.molten['service_name'] +.. py:data:: oteltrace.config.molten['service_name'] The service name reported for your Molten app. - Can also be configured via the ``DD_MOLTEN_SERVICE_NAME`` environment variable. + Can also be configured via the ``OTEL_MOLTEN_SERVICE_NAME`` environment variable. Default: ``'molten'`` """ diff --git a/ddtrace/contrib/molten/patch.py b/oteltrace/contrib/molten/patch.py similarity index 94% rename from ddtrace/contrib/molten/patch.py rename to oteltrace/contrib/molten/patch.py index be059694..45dab429 100644 --- a/ddtrace/contrib/molten/patch.py +++ b/oteltrace/contrib/molten/patch.py @@ -1,5 +1,5 @@ -from ddtrace.vendor import wrapt -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor import wrapt +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w import molten @@ -27,9 +27,9 @@ def patch(): """Patch the instrumented methods """ - if getattr(molten, '_datadog_patch', False): + if getattr(molten, '_opentelemetry_patch', False): return - setattr(molten, '_datadog_patch', True) + setattr(molten, '_opentelemetry_patch', True) pin = Pin( service=config.molten['service_name'], @@ -47,8 +47,8 @@ def patch(): def unpatch(): """Remove instrumentation """ - if getattr(molten, '_datadog_patch', False): - setattr(molten, '_datadog_patch', False) + if getattr(molten, '_opentelemetry_patch', False): + setattr(molten, '_opentelemetry_patch', False) # remove pin pin = Pin.get_from(molten) diff --git a/ddtrace/contrib/molten/wrappers.py b/oteltrace/contrib/molten/wrappers.py similarity index 98% rename from ddtrace/contrib/molten/wrappers.py rename to oteltrace/contrib/molten/wrappers.py index db135565..0dfa69c1 100644 --- a/ddtrace/contrib/molten/wrappers.py +++ b/oteltrace/contrib/molten/wrappers.py @@ -1,4 +1,4 @@ -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt import molten from ... import Pin diff --git a/ddtrace/contrib/mongoengine/__init__.py b/oteltrace/contrib/mongoengine/__init__.py similarity index 95% rename from ddtrace/contrib/mongoengine/__init__.py rename to oteltrace/contrib/mongoengine/__init__.py index 384c04ab..f03ec4fa 100644 --- a/ddtrace/contrib/mongoengine/__init__.py +++ b/oteltrace/contrib/mongoengine/__init__.py @@ -3,7 +3,7 @@ ``patch_all`` will automatically patch your mongoengine connect method to make it work. :: - from ddtrace import Pin, patch + from oteltrace import Pin, patch import mongoengine # If not patched yet, you can patch mongoengine specifically diff --git a/ddtrace/contrib/mongoengine/patch.py b/oteltrace/contrib/mongoengine/patch.py similarity index 100% rename from ddtrace/contrib/mongoengine/patch.py rename to oteltrace/contrib/mongoengine/patch.py diff --git a/ddtrace/contrib/mongoengine/trace.py b/oteltrace/contrib/mongoengine/trace.py similarity index 69% rename from ddtrace/contrib/mongoengine/trace.py rename to oteltrace/contrib/mongoengine/trace.py index 78667789..82b09d8b 100644 --- a/ddtrace/contrib/mongoengine/trace.py +++ b/oteltrace/contrib/mongoengine/trace.py @@ -1,11 +1,11 @@ # 3p -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt # project -import ddtrace -from ddtrace.ext import mongo as mongox -from ddtrace.contrib.pymongo.client import TracedMongoClient +import oteltrace +from oteltrace.ext import mongo as mongox +from oteltrace.contrib.pymongo.client import TracedMongoClient # TODO(Benjamin): we should instrument register_connection instead, because more generic @@ -17,16 +17,16 @@ class WrappedConnect(wrapt.ObjectProxy): def __init__(self, connect): super(WrappedConnect, self).__init__(connect) - ddtrace.Pin(service=mongox.TYPE, tracer=ddtrace.tracer).onto(self) + oteltrace.Pin(service=mongox.TYPE, tracer=oteltrace.tracer).onto(self) def __call__(self, *args, **kwargs): client = self.__wrapped__(*args, **kwargs) - pin = ddtrace.Pin.get_from(self) + pin = oteltrace.Pin.get_from(self) if pin: # mongoengine uses pymongo internally, so we can just piggyback on the # existing pymongo integration and make sure that the connections it # uses internally are traced. client = TracedMongoClient(client) - ddtrace.Pin(service=pin.service, tracer=pin.tracer).onto(client) + oteltrace.Pin(service=pin.service, tracer=pin.tracer).onto(client) return client diff --git a/ddtrace/contrib/mysql/__init__.py b/oteltrace/contrib/mysql/__init__.py similarity index 97% rename from ddtrace/contrib/mysql/__init__.py rename to oteltrace/contrib/mysql/__init__.py index 586e67ca..780b6fde 100644 --- a/ddtrace/contrib/mysql/__init__.py +++ b/oteltrace/contrib/mysql/__init__.py @@ -4,7 +4,7 @@ :: # Make sure to import mysql.connector and not the 'connect' function, # otherwise you won't have access to the patched version - from ddtrace import Pin, patch + from oteltrace import Pin, patch import mysql.connector # If not patched yet, you can patch mysql specifically diff --git a/ddtrace/contrib/mysql/patch.py b/oteltrace/contrib/mysql/patch.py similarity index 91% rename from ddtrace/contrib/mysql/patch.py rename to oteltrace/contrib/mysql/patch.py index 30b49e8b..4c1d72ef 100644 --- a/ddtrace/contrib/mysql/patch.py +++ b/oteltrace/contrib/mysql/patch.py @@ -1,10 +1,10 @@ # 3p -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt import mysql.connector # project -from ddtrace import Pin -from ddtrace.contrib.dbapi import TracedConnection +from oteltrace import Pin +from oteltrace.contrib.dbapi import TracedConnection from ...ext import net, db, AppTypes diff --git a/ddtrace/contrib/mysql/tracers.py b/oteltrace/contrib/mysql/tracers.py similarity index 100% rename from ddtrace/contrib/mysql/tracers.py rename to oteltrace/contrib/mysql/tracers.py diff --git a/ddtrace/contrib/mysqldb/__init__.py b/oteltrace/contrib/mysqldb/__init__.py similarity index 96% rename from ddtrace/contrib/mysqldb/__init__.py rename to oteltrace/contrib/mysqldb/__init__.py index 5050cd1f..35623aa8 100644 --- a/ddtrace/contrib/mysqldb/__init__.py +++ b/oteltrace/contrib/mysqldb/__init__.py @@ -4,7 +4,7 @@ :: # Make sure to import MySQLdb and not the 'connect' function, # otherwise you won't have access to the patched version - from ddtrace import Pin, patch + from oteltrace import Pin, patch import MySQLdb # If not patched yet, you can patch mysqldb specifically diff --git a/ddtrace/contrib/mysqldb/patch.py b/oteltrace/contrib/mysqldb/patch.py similarity index 78% rename from ddtrace/contrib/mysqldb/patch.py rename to oteltrace/contrib/mysqldb/patch.py index ddc72dc7..6fe28f0b 100644 --- a/ddtrace/contrib/mysqldb/patch.py +++ b/oteltrace/contrib/mysqldb/patch.py @@ -1,11 +1,11 @@ # 3p import MySQLdb -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w # project -from ddtrace import Pin -from ddtrace.contrib.dbapi import TracedConnection +from oteltrace import Pin +from oteltrace.contrib.dbapi import TracedConnection from ...ext import net, db, AppTypes from ...utils.wrappers import unwrap as _u @@ -19,9 +19,9 @@ def patch(): # patch only once - if getattr(MySQLdb, '__datadog_patch', False): + if getattr(MySQLdb, '__opentelemetry_patch', False): return - setattr(MySQLdb, '__datadog_patch', True) + setattr(MySQLdb, '__opentelemetry_patch', True) # `Connection` and `connect` are aliases for # `Connect`; patch them too @@ -33,9 +33,9 @@ def patch(): def unpatch(): - if not getattr(MySQLdb, '__datadog_patch', False): + if not getattr(MySQLdb, '__opentelemetry_patch', False): return - setattr(MySQLdb, '__datadog_patch', False) + setattr(MySQLdb, '__opentelemetry_patch', False) # unpatch MySQLdb _u(MySQLdb, 'Connect') diff --git a/ddtrace/contrib/psycopg/__init__.py b/oteltrace/contrib/psycopg/__init__.py similarity index 95% rename from ddtrace/contrib/psycopg/__init__.py rename to oteltrace/contrib/psycopg/__init__.py index 7ff69963..ee61d37c 100644 --- a/ddtrace/contrib/psycopg/__init__.py +++ b/oteltrace/contrib/psycopg/__init__.py @@ -3,7 +3,7 @@ ``patch_all`` will automatically patch your psycopg2 connection to make it work. :: - from ddtrace import Pin, patch + from oteltrace import Pin, patch import psycopg2 # If not patched yet, you can patch psycopg2 specifically diff --git a/ddtrace/contrib/psycopg/connection.py b/oteltrace/contrib/psycopg/connection.py similarity index 68% rename from ddtrace/contrib/psycopg/connection.py rename to oteltrace/contrib/psycopg/connection.py index 703387f9..cdad1ce5 100644 --- a/ddtrace/contrib/psycopg/connection.py +++ b/oteltrace/contrib/psycopg/connection.py @@ -25,8 +25,8 @@ def connection_factory(tracer, service='postgres'): return functools.partial( TracedConnection, - datadog_tracer=tracer, - datadog_service=service, + opentelemetry_tracer=tracer, + opentelemetry_service=service, ) @@ -34,23 +34,23 @@ class TracedCursor(cursor): """Wrapper around cursor creating one span per query""" def __init__(self, *args, **kwargs): - self._datadog_tracer = kwargs.pop('datadog_tracer', None) - self._datadog_service = kwargs.pop('datadog_service', None) - self._datadog_tags = kwargs.pop('datadog_tags', None) + self._opentelemetry_tracer = kwargs.pop('opentelemetry_tracer', None) + self._opentelemetry_service = kwargs.pop('opentelemetry_service', None) + self._opentelemetry_tags = kwargs.pop('opentelemetry_tags', None) super(TracedCursor, self).__init__(*args, **kwargs) def execute(self, query, vars=None): """ just wrap the cursor execution in a span """ - if not self._datadog_tracer: + if not self._opentelemetry_tracer: return cursor.execute(self, query, vars) - with self._datadog_tracer.trace('postgres.query', service=self._datadog_service) as s: + with self._opentelemetry_tracer.trace('postgres.query', service=self._opentelemetry_service) as s: if not s.sampled: return super(TracedCursor, self).execute(query, vars) s.resource = query s.span_type = sql.TYPE - s.set_tags(self._datadog_tags) + s.set_tags(self._opentelemetry_tags) try: return super(TracedCursor, self).execute(query, vars) finally: @@ -66,14 +66,14 @@ class TracedConnection(connection): def __init__(self, *args, **kwargs): - self._datadog_tracer = kwargs.pop('datadog_tracer', None) - self._datadog_service = kwargs.pop('datadog_service', None) + self._opentelemetry_tracer = kwargs.pop('opentelemetry_tracer', None) + self._opentelemetry_service = kwargs.pop('opentelemetry_service', None) super(TracedConnection, self).__init__(*args, **kwargs) # add metadata (from the connection, string, etc) dsn = sql.parse_pg_dsn(self.dsn) - self._datadog_tags = { + self._opentelemetry_tags = { net.TARGET_HOST: dsn.get('host'), net.TARGET_PORT: dsn.get('port'), db.NAME: dsn.get('dbname'), @@ -81,14 +81,14 @@ def __init__(self, *args, **kwargs): 'db.application': dsn.get('application_name'), } - self._datadog_cursor_class = functools.partial( + self._opentelemetry_cursor_class = functools.partial( TracedCursor, - datadog_tracer=self._datadog_tracer, - datadog_service=self._datadog_service, - datadog_tags=self._datadog_tags, + opentelemetry_tracer=self._opentelemetry_tracer, + opentelemetry_service=self._opentelemetry_service, + opentelemetry_tags=self._opentelemetry_tags, ) def cursor(self, *args, **kwargs): """ register our custom cursor factory """ - kwargs.setdefault('cursor_factory', self._datadog_cursor_class) + kwargs.setdefault('cursor_factory', self._opentelemetry_cursor_class) return super(TracedConnection, self).cursor(*args, **kwargs) diff --git a/ddtrace/contrib/psycopg/patch.py b/oteltrace/contrib/psycopg/patch.py similarity index 94% rename from ddtrace/contrib/psycopg/patch.py rename to oteltrace/contrib/psycopg/patch.py index 913a49fd..18a6e175 100644 --- a/ddtrace/contrib/psycopg/patch.py +++ b/oteltrace/contrib/psycopg/patch.py @@ -1,11 +1,11 @@ # 3p import psycopg2 -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt # project -from ddtrace import Pin, config -from ddtrace.contrib import dbapi -from ddtrace.ext import sql, net, db +from oteltrace import Pin, config +from oteltrace.contrib import dbapi +from oteltrace.ext import sql, net, db # Original connect method _connect = psycopg2.connect @@ -27,17 +27,17 @@ def patch(): """ Patch monkey patches psycopg's connection function so that the connection's functions are traced. """ - if getattr(psycopg2, '_datadog_patch', False): + if getattr(psycopg2, '_opentelemetry_patch', False): return - setattr(psycopg2, '_datadog_patch', True) + setattr(psycopg2, '_opentelemetry_patch', True) wrapt.wrap_function_wrapper(psycopg2, 'connect', patched_connect) _patch_extensions(_psycopg2_extensions) # do this early just in case def unpatch(): - if getattr(psycopg2, '_datadog_patch', False): - setattr(psycopg2, '_datadog_patch', False) + if getattr(psycopg2, '_opentelemetry_patch', False): + setattr(psycopg2, '_opentelemetry_patch', False) psycopg2.connect = _connect diff --git a/ddtrace/contrib/pylibmc/__init__.py b/oteltrace/contrib/pylibmc/__init__.py similarity index 96% rename from ddtrace/contrib/pylibmc/__init__.py rename to oteltrace/contrib/pylibmc/__init__.py index 798faffb..3bbae92a 100644 --- a/ddtrace/contrib/pylibmc/__init__.py +++ b/oteltrace/contrib/pylibmc/__init__.py @@ -5,7 +5,7 @@ # Be sure to import pylibmc and not pylibmc.Client directly, # otherwise you won't have access to the patched version - from ddtrace import Pin, patch + from oteltrace import Pin, patch import pylibmc # If not patched yet, you can patch pylibmc specifically diff --git a/ddtrace/contrib/pylibmc/addrs.py b/oteltrace/contrib/pylibmc/addrs.py similarity index 100% rename from ddtrace/contrib/pylibmc/addrs.py rename to oteltrace/contrib/pylibmc/addrs.py diff --git a/ddtrace/contrib/pylibmc/client.py b/oteltrace/contrib/pylibmc/client.py similarity index 96% rename from ddtrace/contrib/pylibmc/client.py rename to oteltrace/contrib/pylibmc/client.py index 04cc0f7a..602fc808 100644 --- a/ddtrace/contrib/pylibmc/client.py +++ b/oteltrace/contrib/pylibmc/client.py @@ -2,11 +2,11 @@ import random # 3p -from ddtrace.vendor.wrapt import ObjectProxy +from oteltrace.vendor.wrapt import ObjectProxy import pylibmc # project -import ddtrace +import oteltrace from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...ext import memcached from ...ext import net @@ -42,7 +42,7 @@ def __init__(self, client=None, service=memcached.SERVICE, tracer=None, *args, * super(TracedClient, self).__init__(client) - pin = ddtrace.Pin(service=service, tracer=tracer) + pin = oteltrace.Pin(service=service, tracer=tracer) pin.onto(self) # attempt to collect the pool of urls this client talks to @@ -55,7 +55,7 @@ def clone(self, *args, **kwargs): # rewrap new connections. cloned = self.__wrapped__.clone(*args, **kwargs) traced_client = TracedClient(cloned) - pin = ddtrace.Pin.get_from(self) + pin = oteltrace.Pin.get_from(self) if pin: pin.clone().onto(traced_client) return traced_client @@ -128,7 +128,7 @@ def _no_span(self): def _span(self, cmd_name): """ Return a span timing the given command. """ - pin = ddtrace.Pin.get_from(self) + pin = oteltrace.Pin.get_from(self) if not pin or not pin.enabled(): return self._no_span() diff --git a/ddtrace/contrib/pylibmc/patch.py b/oteltrace/contrib/pylibmc/patch.py similarity index 100% rename from ddtrace/contrib/pylibmc/patch.py rename to oteltrace/contrib/pylibmc/patch.py diff --git a/ddtrace/contrib/pylons/__init__.py b/oteltrace/contrib/pylons/__init__.py similarity index 88% rename from ddtrace/contrib/pylons/__init__.py rename to oteltrace/contrib/pylons/__init__.py index 88339224..470733f6 100644 --- a/ddtrace/contrib/pylons/__init__.py +++ b/oteltrace/contrib/pylons/__init__.py @@ -5,8 +5,8 @@ from pylons.wsgiapp import PylonsApp - from ddtrace import tracer - from ddtrace.contrib.pylons import PylonsTraceMiddleware + from oteltrace import tracer + from oteltrace.contrib.pylons import PylonsTraceMiddleware app = PylonsApp(...) diff --git a/ddtrace/contrib/pylons/compat.py b/oteltrace/contrib/pylons/compat.py similarity index 100% rename from ddtrace/contrib/pylons/compat.py rename to oteltrace/contrib/pylons/compat.py diff --git a/oteltrace/contrib/pylons/constants.py b/oteltrace/contrib/pylons/constants.py new file mode 100644 index 00000000..2c4502f0 --- /dev/null +++ b/oteltrace/contrib/pylons/constants.py @@ -0,0 +1 @@ +CONFIG_MIDDLEWARE = '__opentelemetry_middleware' diff --git a/ddtrace/contrib/pylons/middleware.py b/oteltrace/contrib/pylons/middleware.py similarity index 100% rename from ddtrace/contrib/pylons/middleware.py rename to oteltrace/contrib/pylons/middleware.py diff --git a/ddtrace/contrib/pylons/patch.py b/oteltrace/contrib/pylons/patch.py similarity index 71% rename from ddtrace/contrib/pylons/patch.py rename to oteltrace/contrib/pylons/patch.py index ad437d8c..f8ac84c4 100644 --- a/ddtrace/contrib/pylons/patch.py +++ b/oteltrace/contrib/pylons/patch.py @@ -1,8 +1,8 @@ import os -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt import pylons.wsgiapp -from ddtrace import tracer, Pin +from oteltrace import tracer, Pin from .middleware import PylonsTraceMiddleware from ...utils.formats import asbool, get_env @@ -11,18 +11,18 @@ def patch(): """Instrument Pylons applications""" - if getattr(pylons.wsgiapp, '_datadog_patch', False): + if getattr(pylons.wsgiapp, '_opentelemetry_patch', False): return - setattr(pylons.wsgiapp, '_datadog_patch', True) + setattr(pylons.wsgiapp, '_opentelemetry_patch', True) wrapt.wrap_function_wrapper('pylons.wsgiapp', 'PylonsApp.__init__', traced_init) def unpatch(): """Disable Pylons tracing""" - if not getattr(pylons.wsgiapp, '__datadog_patch', False): + if not getattr(pylons.wsgiapp, '__opentelemetry_patch', False): return - setattr(pylons.wsgiapp, '__datadog_patch', False) + setattr(pylons.wsgiapp, '__opentelemetry_patch', False) _u(pylons.wsgiapp.PylonsApp, '__init__') @@ -31,7 +31,7 @@ def traced_init(wrapped, instance, args, kwargs): wrapped(*args, **kwargs) # set tracing options and create the TraceMiddleware - service = os.environ.get('DATADOG_SERVICE_NAME', 'pylons') + service = os.environ.get('OPENTELEMETRY_SERVICE_NAME', 'pylons') distributed_tracing = asbool(get_env('pylons', 'distributed_tracing', True)) Pin(service=service, tracer=tracer).onto(instance) traced_app = PylonsTraceMiddleware(instance, tracer, service=service, distributed_tracing=distributed_tracing) diff --git a/ddtrace/contrib/pylons/renderer.py b/oteltrace/contrib/pylons/renderer.py similarity index 83% rename from ddtrace/contrib/pylons/renderer.py rename to oteltrace/contrib/pylons/renderer.py index 45ae49c8..7bd41197 100644 --- a/ddtrace/contrib/pylons/renderer.py +++ b/oteltrace/contrib/pylons/renderer.py @@ -2,7 +2,7 @@ from pylons import config -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w from .compat import legacy_pylons from .constants import CONFIG_MIDDLEWARE @@ -13,9 +13,9 @@ def trace_rendering(): of Pylons and multiple renderers. """ # patch only once - if getattr(pylons.templating, '__datadog_patch', False): + if getattr(pylons.templating, '__opentelemetry_patch', False): return - setattr(pylons.templating, '__datadog_patch', True) + setattr(pylons.templating, '__opentelemetry_patch', True) if legacy_pylons: # Pylons <= 0.9.7 diff --git a/ddtrace/contrib/pymemcache/__init__.py b/oteltrace/contrib/pymemcache/__init__.py similarity index 96% rename from ddtrace/contrib/pymemcache/__init__.py rename to oteltrace/contrib/pymemcache/__init__.py index ad8607a7..2ed76a3a 100644 --- a/ddtrace/contrib/pymemcache/__init__.py +++ b/oteltrace/contrib/pymemcache/__init__.py @@ -2,7 +2,7 @@ ``patch_all`` will automatically patch the pymemcache ``Client``:: - from ddtrace import Pin, patch + from oteltrace import Pin, patch # If not patched yet, patch pymemcache specifically patch(pymemcache=True) diff --git a/ddtrace/contrib/pymemcache/client.py b/oteltrace/contrib/pymemcache/client.py similarity index 99% rename from ddtrace/contrib/pymemcache/client.py rename to oteltrace/contrib/pymemcache/client.py index fded1450..37cb0a1d 100644 --- a/ddtrace/contrib/pymemcache/client.py +++ b/oteltrace/contrib/pymemcache/client.py @@ -1,7 +1,7 @@ import sys # 3p -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt import pymemcache from pymemcache.client.base import Client from pymemcache.exceptions import ( diff --git a/ddtrace/contrib/pymemcache/patch.py b/oteltrace/contrib/pymemcache/patch.py similarity index 54% rename from ddtrace/contrib/pymemcache/patch.py rename to oteltrace/contrib/pymemcache/patch.py index 5fdad8e1..3eb75507 100644 --- a/ddtrace/contrib/pymemcache/patch.py +++ b/oteltrace/contrib/pymemcache/patch.py @@ -1,17 +1,17 @@ import pymemcache -from ddtrace.ext import memcached as memcachedx -from ddtrace.pin import Pin, _DD_PIN_NAME, _DD_PIN_PROXY_NAME +from oteltrace.ext import memcached as memcachedx +from oteltrace.pin import Pin, _OTEL_PIN_NAME, _OTEL_PIN_PROXY_NAME from .client import WrappedClient _Client = pymemcache.client.base.Client def patch(): - if getattr(pymemcache.client, '_datadog_patch', False): + if getattr(pymemcache.client, '_opentelemetry_patch', False): return - setattr(pymemcache.client, '_datadog_patch', True) + setattr(pymemcache.client, '_opentelemetry_patch', True) setattr(pymemcache.client.base, 'Client', WrappedClient) # Create a global pin with default configuration for our pymemcache clients @@ -22,11 +22,11 @@ def patch(): def unpatch(): """Remove pymemcache tracing""" - if not getattr(pymemcache.client, '_datadog_patch', False): + if not getattr(pymemcache.client, '_opentelemetry_patch', False): return - setattr(pymemcache.client, '_datadog_patch', False) + setattr(pymemcache.client, '_opentelemetry_patch', False) setattr(pymemcache.client.base, 'Client', _Client) # Remove any pins that may exist on the pymemcache reference - setattr(pymemcache, _DD_PIN_NAME, None) - setattr(pymemcache, _DD_PIN_PROXY_NAME, None) + setattr(pymemcache, _OTEL_PIN_NAME, None) + setattr(pymemcache, _OTEL_PIN_PROXY_NAME, None) diff --git a/ddtrace/contrib/pymongo/__init__.py b/oteltrace/contrib/pymongo/__init__.py similarity index 96% rename from ddtrace/contrib/pymongo/__init__.py rename to oteltrace/contrib/pymongo/__init__.py index 6e1745dc..c39aa611 100644 --- a/ddtrace/contrib/pymongo/__init__.py +++ b/oteltrace/contrib/pymongo/__init__.py @@ -6,7 +6,7 @@ :: # Be sure to import pymongo and not pymongo.MongoClient directly, # otherwise you won't have access to the patched version - from ddtrace import Pin, patch + from oteltrace import Pin, patch import pymongo # If not patched yet, you can patch pymongo specifically diff --git a/ddtrace/contrib/pymongo/client.py b/oteltrace/contrib/pymongo/client.py similarity index 92% rename from ddtrace/contrib/pymongo/client.py rename to oteltrace/contrib/pymongo/client.py index 59271901..f796b789 100644 --- a/ddtrace/contrib/pymongo/client.py +++ b/oteltrace/contrib/pymongo/client.py @@ -4,10 +4,10 @@ # 3p import pymongo -from ddtrace.vendor.wrapt import ObjectProxy +from oteltrace.vendor.wrapt import ObjectProxy # project -import ddtrace +import oteltrace from ...compat import iteritems from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...ext import AppTypes @@ -27,7 +27,7 @@ @deprecated(message='Use patching instead (see the docs).', version='1.0.0') def trace_mongo_client(client, tracer, service=mongox.TYPE): traced_client = TracedMongoClient(client) - ddtrace.Pin(service=service, tracer=tracer).onto(traced_client) + oteltrace.Pin(service=service, tracer=tracer).onto(traced_client) return traced_client @@ -62,13 +62,13 @@ def __init__(self, client=None, *args, **kwargs): client._topology = TracedTopology(client._topology) # Default Pin - ddtrace.Pin(service=mongox.TYPE, app=mongox.TYPE, app_type=AppTypes.db).onto(self) + oteltrace.Pin(service=mongox.TYPE, app=mongox.TYPE, app_type=AppTypes.db).onto(self) def __setddpin__(self, pin): pin.onto(self._topology) def __getddpin__(self): - return ddtrace.Pin.get_from(self._topology) + return oteltrace.Pin.get_from(self._topology) class TracedTopology(ObjectProxy): @@ -81,7 +81,7 @@ def select_server(self, *args, **kwargs): if not isinstance(s, TracedServer): s = TracedServer(s) # Reattach the pin every time in case it changed since the initial patching - ddtrace.Pin.get_from(self).onto(s) + oteltrace.Pin.get_from(self).onto(s) return s @@ -90,7 +90,7 @@ class TracedServer(ObjectProxy): def __init__(self, server): super(TracedServer, self).__init__(server) - def _datadog_trace_operation(self, operation): + def _opentelemetry_trace_operation(self, operation): cmd = None # Only try to parse something we think is a query. if self._is_query(operation): @@ -99,7 +99,7 @@ def _datadog_trace_operation(self, operation): except Exception: log.exception('error parsing query') - pin = ddtrace.Pin.get_from(self) + pin = oteltrace.Pin.get_from(self) # if we couldn't parse or shouldn't trace the message, just go. if not cmd or not pin or not pin.enabled(): return None @@ -120,7 +120,7 @@ def _datadog_trace_operation(self, operation): # Pymongo >= 3.9 def run_operation_with_response(self, sock_info, operation, *args, **kwargs): - span = self._datadog_trace_operation(operation) + span = self._opentelemetry_trace_operation(operation) if not span: return self.__wrapped__.run_operation_with_response( sock_info, @@ -145,7 +145,7 @@ def run_operation_with_response(self, sock_info, operation, *args, **kwargs): # Pymongo < 3.9 def send_message_with_response(self, operation, *args, **kwargs): - span = self._datadog_trace_operation(operation) + span = self._opentelemetry_trace_operation(operation) if not span: return self.__wrapped__.send_message_with_response( operation, @@ -171,7 +171,7 @@ def get_socket(self, *args, **kwargs): with self.__wrapped__.get_socket(*args, **kwargs) as s: if not isinstance(s, TracedSocket): s = TracedSocket(s) - ddtrace.Pin.get_from(self).onto(s) + oteltrace.Pin.get_from(self).onto(s) yield s @staticmethod @@ -192,7 +192,7 @@ def command(self, dbname, spec, *args, **kwargs): except Exception: log.exception('error parsing spec. skipping trace') - pin = ddtrace.Pin.get_from(self) + pin = oteltrace.Pin.get_from(self) # skip tracing if we don't have a piece of data we need if not dbname or not cmd or not pin or not pin.enabled(): return self.__wrapped__.command(dbname, spec, *args, **kwargs) @@ -208,7 +208,7 @@ def write_command(self, request_id, msg): except Exception: log.exception('error parsing msg') - pin = ddtrace.Pin.get_from(self) + pin = oteltrace.Pin.get_from(self) # if we couldn't parse it, don't try to trace it. if not cmd or not pin or not pin.enabled(): return self.__wrapped__.write_command(request_id, msg) @@ -220,7 +220,7 @@ def write_command(self, request_id, msg): return result def __trace(self, cmd): - pin = ddtrace.Pin.get_from(self) + pin = oteltrace.Pin.get_from(self) s = pin.tracer.trace( 'pymongo.cmd', span_type=mongox.TYPE, diff --git a/ddtrace/contrib/pymongo/parse.py b/oteltrace/contrib/pymongo/parse.py similarity index 100% rename from ddtrace/contrib/pymongo/parse.py rename to oteltrace/contrib/pymongo/parse.py diff --git a/ddtrace/contrib/pymongo/patch.py b/oteltrace/contrib/pymongo/patch.py similarity index 100% rename from ddtrace/contrib/pymongo/patch.py rename to oteltrace/contrib/pymongo/patch.py diff --git a/ddtrace/contrib/pymysql/__init__.py b/oteltrace/contrib/pymysql/__init__.py similarity index 96% rename from ddtrace/contrib/pymysql/__init__.py rename to oteltrace/contrib/pymysql/__init__.py index a471ea18..a947ddf8 100644 --- a/ddtrace/contrib/pymysql/__init__.py +++ b/oteltrace/contrib/pymysql/__init__.py @@ -3,7 +3,7 @@ ``patch_all`` will automatically patch your pymysql connection to make it work. :: - from ddtrace import Pin, patch + from oteltrace import Pin, patch from pymysql import connect # If not patched yet, you can patch pymysql specifically diff --git a/ddtrace/contrib/pymysql/patch.py b/oteltrace/contrib/pymysql/patch.py similarity index 87% rename from ddtrace/contrib/pymysql/patch.py rename to oteltrace/contrib/pymysql/patch.py index 8ea8cf4c..3c266ffe 100644 --- a/ddtrace/contrib/pymysql/patch.py +++ b/oteltrace/contrib/pymysql/patch.py @@ -1,10 +1,10 @@ # 3p -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt import pymysql # project -from ddtrace import Pin -from ddtrace.contrib.dbapi import TracedConnection +from oteltrace import Pin +from oteltrace.contrib.dbapi import TracedConnection from ...ext import net, db, AppTypes CONN_ATTR_BY_TAG = { diff --git a/ddtrace/contrib/pymysql/tracers.py b/oteltrace/contrib/pymysql/tracers.py similarity index 100% rename from ddtrace/contrib/pymysql/tracers.py rename to oteltrace/contrib/pymysql/tracers.py diff --git a/ddtrace/contrib/pyramid/__init__.py b/oteltrace/contrib/pyramid/__init__.py similarity index 58% rename from ddtrace/contrib/pyramid/__init__.py rename to oteltrace/contrib/pyramid/__init__.py index cc61eacf..ecb9b468 100644 --- a/ddtrace/contrib/pyramid/__init__.py +++ b/oteltrace/contrib/pyramid/__init__.py @@ -3,10 +3,10 @@ from pyramid.config import Configurator - from ddtrace.contrib.pyramid import trace_pyramid + from oteltrace.contrib.pyramid import trace_pyramid settings = { - 'datadog_trace_service' : 'my-web-app-name', + 'opentelemetry_trace_service' : 'my-web-app-name', } config = Configurator(settings=settings) @@ -17,18 +17,18 @@ Available settings are: -* ``datadog_trace_service``: change the `pyramid` service name -* ``datadog_trace_enabled``: sets if the Tracer is enabled or not -* ``datadog_distributed_tracing``: set it to ``False`` to disable Distributed Tracing -* ``datadog_analytics_enabled``: set it to ``True`` to enable generating APM events for Trace Search & Analytics +* ``opentelemetry_trace_service``: change the `pyramid` service name +* ``opentelemetry_trace_enabled``: sets if the Tracer is enabled or not +* ``opentelemetry_distributed_tracing``: set it to ``False`` to disable Distributed Tracing +* ``opentelemetry_analytics_enabled``: set it to ``True`` to enable generating APM events for Trace Search & Analytics If you use the ``pyramid.tweens`` settings value to set the tweens for your -application, you need to add ``ddtrace.contrib.pyramid:trace_tween_factory`` +application, you need to add ``oteltrace.contrib.pyramid:trace_tween_factory`` explicitly to the list. For example:: settings = { - 'datadog_trace_service' : 'my-web-app-name', - 'pyramid.tweens', 'your_tween_no_1\\nyour_tween_no_2\\nddtrace.contrib.pyramid:trace_tween_factory', + 'opentelemetry_trace_service' : 'my-web-app-name', + 'pyramid.tweens', 'your_tween_no_1\\nyour_tween_no_2\\noteltrace.contrib.pyramid:trace_tween_factory', } config = Configurator(settings=settings) diff --git a/oteltrace/contrib/pyramid/constants.py b/oteltrace/contrib/pyramid/constants.py new file mode 100644 index 00000000..99a2d23e --- /dev/null +++ b/oteltrace/contrib/pyramid/constants.py @@ -0,0 +1,6 @@ +SETTINGS_SERVICE = 'opentelemetry_trace_service' +SETTINGS_TRACER = 'opentelemetry_tracer' +SETTINGS_TRACE_ENABLED = 'opentelemetry_trace_enabled' +SETTINGS_DISTRIBUTED_TRACING = 'opentelemetry_distributed_tracing' +SETTINGS_ANALYTICS_ENABLED = 'opentelemetry_analytics_enabled' +SETTINGS_ANALYTICS_SAMPLE_RATE = 'opentelemetry_analytics_sample_rate' diff --git a/ddtrace/contrib/pyramid/patch.py b/oteltrace/contrib/pyramid/patch.py similarity index 81% rename from ddtrace/contrib/pyramid/patch.py rename to oteltrace/contrib/pyramid/patch.py index 8224d183..82a37fac 100644 --- a/ddtrace/contrib/pyramid/patch.py +++ b/oteltrace/contrib/pyramid/patch.py @@ -1,6 +1,6 @@ import os -from .trace import trace_pyramid, DD_TWEEN_NAME +from .trace import trace_pyramid, OTEL_TWEEN_NAME from .constants import ( SETTINGS_SERVICE, SETTINGS_DISTRIBUTED_TRACING, SETTINGS_ANALYTICS_ENABLED, SETTINGS_ANALYTICS_SAMPLE_RATE, @@ -10,26 +10,26 @@ import pyramid.config from pyramid.path import caller_package -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt -DD_PATCH = '_datadog_patch' +OTEL_PATCH = '_opentelemetry_patch' def patch(): """ Patch pyramid.config.Configurator """ - if getattr(pyramid.config, DD_PATCH, False): + if getattr(pyramid.config, OTEL_PATCH, False): return - setattr(pyramid.config, DD_PATCH, True) + setattr(pyramid.config, OTEL_PATCH, True) _w = wrapt.wrap_function_wrapper _w('pyramid.config', 'Configurator.__init__', traced_init) def traced_init(wrapped, instance, args, kwargs): settings = kwargs.pop('settings', {}) - service = os.environ.get('DATADOG_SERVICE_NAME') or 'pyramid' + service = os.environ.get('OPENTELEMETRY_SERVICE_NAME') or 'pyramid' distributed_tracing = asbool(get_env('pyramid', 'distributed_tracing', True)) # DEV: integration-specific analytics flag can be not set but still enabled # globally for web frameworks @@ -57,8 +57,8 @@ def traced_init(wrapped, instance, args, kwargs): # function call it, our wrapper will mess things up. if not kwargs.get('package', None): # Get the packge for the third frame up from this one. - # - ddtrace.contrib.pyramid.path - # - ddtrace.vendor.wrapt + # - oteltrace.contrib.pyramid.path + # - oteltrace.vendor.wrapt # - (this is the frame we want) # DEV: Default is `level=2` which will give us the package from `wrapt` kwargs['package'] = caller_package(level=3) @@ -72,13 +72,13 @@ def insert_tween_if_needed(settings): # If the list is empty, pyramid does not consider the tweens have been # set explicitly. # And if our tween is already there, nothing to do - if not tweens or not tweens.strip() or DD_TWEEN_NAME in tweens: + if not tweens or not tweens.strip() or OTEL_TWEEN_NAME in tweens: return # pyramid.tweens.EXCVIEW is the name of built-in exception view provided by # pyramid. We need our tween to be before it, otherwise unhandled # exceptions will be caught before they reach our tween. idx = tweens.find(pyramid.tweens.EXCVIEW) if idx == -1: - settings['pyramid.tweens'] = tweens + '\n' + DD_TWEEN_NAME + settings['pyramid.tweens'] = tweens + '\n' + OTEL_TWEEN_NAME else: - settings['pyramid.tweens'] = tweens[:idx] + DD_TWEEN_NAME + '\n' + tweens[idx:] + settings['pyramid.tweens'] = tweens[:idx] + OTEL_TWEEN_NAME + '\n' + tweens[idx:] diff --git a/ddtrace/contrib/pyramid/trace.py b/oteltrace/contrib/pyramid/trace.py similarity index 90% rename from ddtrace/contrib/pyramid/trace.py rename to oteltrace/contrib/pyramid/trace.py index c6cf08b5..449475a6 100644 --- a/ddtrace/contrib/pyramid/trace.py +++ b/oteltrace/contrib/pyramid/trace.py @@ -1,10 +1,10 @@ import pyramid.renderers from pyramid.settings import asbool from pyramid.httpexceptions import HTTPException -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt # project -import ddtrace +import oteltrace from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...ext import http from ...internal.logger import get_logger @@ -22,17 +22,17 @@ log = get_logger(__name__) -DD_TWEEN_NAME = 'ddtrace.contrib.pyramid:trace_tween_factory' -DD_SPAN = '_datadog_span' +OTEL_TWEEN_NAME = 'oteltrace.contrib.pyramid:trace_tween_factory' +OTEL_SPAN = '_opentelemetry_span' def trace_pyramid(config): - config.include('ddtrace.contrib.pyramid') + config.include('oteltrace.contrib.pyramid') def includeme(config): # Add our tween just before the default exception handler - config.add_tween(DD_TWEEN_NAME, over=pyramid.tweens.EXCVIEW) + config.add_tween(OTEL_TWEEN_NAME, over=pyramid.tweens.EXCVIEW) # ensure we only patch the renderer once. if not isinstance(pyramid.renderers.RendererHelper.render, wrapt.ObjectProxy): wrapt.wrap_function_wrapper('pyramid.renderers', 'RendererHelper.render', trace_render) @@ -44,7 +44,7 @@ def trace_render(func, instance, args, kwargs): if not request: log.debug('No request passed to render, will not be traced') return func(*args, **kwargs) - span = getattr(request, DD_SPAN, None) + span = getattr(request, OTEL_SPAN, None) if not span: log.debug('No span found in request, will not be traced') return func(*args, **kwargs) @@ -58,7 +58,7 @@ def trace_tween_factory(handler, registry): # configuration settings = registry.settings service = settings.get(SETTINGS_SERVICE) or 'pyramid' - tracer = settings.get(SETTINGS_TRACER) or ddtrace.tracer + tracer = settings.get(SETTINGS_TRACER) or oteltrace.tracer enabled = asbool(settings.get(SETTINGS_TRACE_ENABLED, tracer.enabled)) distributed_tracing = asbool(settings.get(SETTINGS_DISTRIBUTED_TRACING, True)) @@ -84,7 +84,7 @@ def trace_tween(request): settings.get(SETTINGS_ANALYTICS_SAMPLE_RATE, True) ) - setattr(request, DD_SPAN, span) # used to find the tracer in templates + setattr(request, OTEL_SPAN, span) # used to find the tracer in templates response = None try: response = handler(request) diff --git a/ddtrace/contrib/redis/__init__.py b/oteltrace/contrib/redis/__init__.py similarity index 95% rename from ddtrace/contrib/redis/__init__.py rename to oteltrace/contrib/redis/__init__.py index 50622016..b2ac9c81 100644 --- a/ddtrace/contrib/redis/__init__.py +++ b/oteltrace/contrib/redis/__init__.py @@ -3,7 +3,7 @@ ``patch_all`` will automatically patch your Redis client to make it work. :: - from ddtrace import Pin, patch + from oteltrace import Pin, patch import redis # If not patched yet, you can patch redis specifically diff --git a/ddtrace/contrib/redis/patch.py b/oteltrace/contrib/redis/patch.py similarity index 93% rename from ddtrace/contrib/redis/patch.py rename to oteltrace/contrib/redis/patch.py index 66fe8deb..52716dba 100644 --- a/ddtrace/contrib/redis/patch.py +++ b/oteltrace/contrib/redis/patch.py @@ -1,9 +1,9 @@ # 3p import redis -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt # project -from ddtrace import config +from oteltrace import config from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...pin import Pin from ...ext import AppTypes, redis as redisx @@ -17,9 +17,9 @@ def patch(): This duplicated doesn't look nice. The nicer alternative is to use an ObjectProxy on top of Redis and StrictRedis. However, it means that any "import redis.Redis" won't be instrumented. """ - if getattr(redis, '_datadog_patch', False): + if getattr(redis, '_opentelemetry_patch', False): return - setattr(redis, '_datadog_patch', True) + setattr(redis, '_opentelemetry_patch', True) _w = wrapt.wrap_function_wrapper @@ -38,8 +38,8 @@ def patch(): def unpatch(): - if getattr(redis, '_datadog_patch', False): - setattr(redis, '_datadog_patch', False) + if getattr(redis, '_opentelemetry_patch', False): + setattr(redis, '_opentelemetry_patch', False) if redis.VERSION < (3, 0, 0): unwrap(redis.StrictRedis, 'execute_command') diff --git a/oteltrace/contrib/redis/tracers.py b/oteltrace/contrib/redis/tracers.py new file mode 100644 index 00000000..7c09e092 --- /dev/null +++ b/oteltrace/contrib/redis/tracers.py @@ -0,0 +1,20 @@ +from redis import StrictRedis + +from ...utils.deprecation import deprecated + + +DEFAULT_SERVICE = 'redis' + + +@deprecated(message='Use patching instead (see the docs).', version='1.0.0') +def get_traced_redis(oteltracer, service=DEFAULT_SERVICE, meta=None): + return _get_traced_redis(oteltracer, StrictRedis, service, meta) + + +@deprecated(message='Use patching instead (see the docs).', version='1.0.0') +def get_traced_redis_from(oteltracer, baseclass, service=DEFAULT_SERVICE, meta=None): + return _get_traced_redis(oteltracer, baseclass, service, meta) + + +def _get_traced_redis(oteltracer, baseclass, service, meta): + return baseclass diff --git a/ddtrace/contrib/redis/util.py b/oteltrace/contrib/redis/util.py similarity index 100% rename from ddtrace/contrib/redis/util.py rename to oteltrace/contrib/redis/util.py diff --git a/ddtrace/contrib/rediscluster/__init__.py b/oteltrace/contrib/rediscluster/__init__.py similarity index 95% rename from ddtrace/contrib/rediscluster/__init__.py rename to oteltrace/contrib/rediscluster/__init__.py index 86ad02f4..26f10e8f 100644 --- a/ddtrace/contrib/rediscluster/__init__.py +++ b/oteltrace/contrib/rediscluster/__init__.py @@ -3,7 +3,7 @@ ``patch_all`` will automatically patch your Redis Cluster client to make it work. :: - from ddtrace import Pin, patch + from oteltrace import Pin, patch import rediscluster # If not patched yet, you can patch redis specifically diff --git a/ddtrace/contrib/rediscluster/patch.py b/oteltrace/contrib/rediscluster/patch.py similarity index 85% rename from ddtrace/contrib/rediscluster/patch.py rename to oteltrace/contrib/rediscluster/patch.py index d73466a1..1fab14b0 100644 --- a/ddtrace/contrib/rediscluster/patch.py +++ b/oteltrace/contrib/rediscluster/patch.py @@ -1,9 +1,9 @@ # 3p import rediscluster -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt # project -from ddtrace import config +from oteltrace import config from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...pin import Pin from ...ext import AppTypes, redis as redisx @@ -15,9 +15,9 @@ def patch(): """Patch the instrumented methods """ - if getattr(rediscluster, '_datadog_patch', False): + if getattr(rediscluster, '_opentelemetry_patch', False): return - setattr(rediscluster, '_datadog_patch', True) + setattr(rediscluster, '_opentelemetry_patch', True) _w = wrapt.wrap_function_wrapper _w('rediscluster', 'StrictRedisCluster.execute_command', traced_execute_command) @@ -27,8 +27,8 @@ def patch(): def unpatch(): - if getattr(rediscluster, '_datadog_patch', False): - setattr(rediscluster, '_datadog_patch', False) + if getattr(rediscluster, '_opentelemetry_patch', False): + setattr(rediscluster, '_opentelemetry_patch', False) unwrap(rediscluster.StrictRedisCluster, 'execute_command') unwrap(rediscluster.StrictRedisCluster, 'pipeline') unwrap(rediscluster.StrictClusterPipeline, 'execute') diff --git a/ddtrace/contrib/requests/__init__.py b/oteltrace/contrib/requests/__init__.py similarity index 92% rename from ddtrace/contrib/requests/__init__.py rename to oteltrace/contrib/requests/__init__.py index c0dc6616..f7941476 100644 --- a/ddtrace/contrib/requests/__init__.py +++ b/oteltrace/contrib/requests/__init__.py @@ -3,7 +3,7 @@ Auto instrumentation is available using the ``patch`` function that **must be called before** importing the ``requests`` library. The following is an example:: - from ddtrace import patch + from oteltrace import patch patch(requests=True) import requests @@ -12,14 +12,14 @@ If you would prefer finer grained control, use a ``TracedSession`` object as you would a ``requests.Session``:: - from ddtrace.contrib.requests import TracedSession + from oteltrace.contrib.requests import TracedSession session = TracedSession() session.get("https://www.datadoghq.com") The library can be configured globally and per instance, using the Configuration API:: - from ddtrace import config + from oteltrace import config # disable distributed tracing globally config.requests['distributed_tracing'] = False diff --git a/ddtrace/contrib/requests/connection.py b/oteltrace/contrib/requests/connection.py similarity index 94% rename from ddtrace/contrib/requests/connection.py rename to oteltrace/contrib/requests/connection.py index 56e49905..6f7b31e2 100644 --- a/ddtrace/contrib/requests/connection.py +++ b/oteltrace/contrib/requests/connection.py @@ -1,6 +1,6 @@ -import ddtrace -from ddtrace import config -from ddtrace.http import store_request_headers, store_response_headers +import oteltrace +from oteltrace import config +from oteltrace.http import store_request_headers, store_response_headers from ...compat import parse from ...constants import ANALYTICS_SAMPLE_RATE_KEY @@ -41,10 +41,10 @@ def _extract_service_name(session, span, hostname=None): def _wrap_send(func, instance, args, kwargs): """Trace the `Session.send` instance method""" # TODO[manu]: we already offer a way to provide the Global Tracer - # and is ddtrace.tracer; it's used only inside our tests and can + # and is oteltrace.tracer; it's used only inside our tests and can # be easily changed by providing a TracingTestCase that sets common # tracing functionalities. - tracer = getattr(instance, 'datadog_tracer', ddtrace.tracer) + tracer = getattr(instance, 'opentelemetry_tracer', oteltrace.tracer) # skip if tracing is not enabled if not tracer.enabled: diff --git a/ddtrace/contrib/requests/constants.py b/oteltrace/contrib/requests/constants.py similarity index 100% rename from ddtrace/contrib/requests/constants.py rename to oteltrace/contrib/requests/constants.py diff --git a/ddtrace/contrib/requests/legacy.py b/oteltrace/contrib/requests/legacy.py similarity index 97% rename from ddtrace/contrib/requests/legacy.py rename to oteltrace/contrib/requests/legacy.py index a71ebbbc..08c1eadb 100644 --- a/ddtrace/contrib/requests/legacy.py +++ b/oteltrace/contrib/requests/legacy.py @@ -1,6 +1,6 @@ # [Deprecation]: this module contains deprecated functions # that will be removed in newer versions of the Tracer. -from ddtrace import config +from oteltrace import config from ...utils.deprecation import deprecation diff --git a/ddtrace/contrib/requests/patch.py b/oteltrace/contrib/requests/patch.py similarity index 80% rename from ddtrace/contrib/requests/patch.py rename to oteltrace/contrib/requests/patch.py index 7317ee13..e6106623 100644 --- a/ddtrace/contrib/requests/patch.py +++ b/oteltrace/contrib/requests/patch.py @@ -1,8 +1,8 @@ import requests -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w -from ddtrace import config +from oteltrace import config from ...pin import Pin from ...utils.formats import asbool, get_env @@ -22,9 +22,9 @@ def patch(): """Activate http calls tracing""" - if getattr(requests, '__datadog_patch', False): + if getattr(requests, '__opentelemetry_patch', False): return - setattr(requests, '__datadog_patch', True) + setattr(requests, '__opentelemetry_patch', True) _w('requests', 'Session.send', _wrap_send) Pin( @@ -44,8 +44,8 @@ def patch(): def unpatch(): """Disable traced sessions""" - if not getattr(requests, '__datadog_patch', False): + if not getattr(requests, '__opentelemetry_patch', False): return - setattr(requests, '__datadog_patch', False) + setattr(requests, '__opentelemetry_patch', False) _u(requests.Session, 'send') diff --git a/ddtrace/contrib/requests/session.py b/oteltrace/contrib/requests/session.py similarity index 84% rename from ddtrace/contrib/requests/session.py rename to oteltrace/contrib/requests/session.py index 85f21a80..6287c564 100644 --- a/ddtrace/contrib/requests/session.py +++ b/oteltrace/contrib/requests/session.py @@ -1,6 +1,6 @@ import requests -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w from .connection import _wrap_send diff --git a/ddtrace/contrib/sqlalchemy/__init__.py b/oteltrace/contrib/sqlalchemy/__init__.py similarity index 95% rename from ddtrace/contrib/sqlalchemy/__init__.py rename to oteltrace/contrib/sqlalchemy/__init__.py index b4758604..374f9f9a 100644 --- a/ddtrace/contrib/sqlalchemy/__init__.py +++ b/oteltrace/contrib/sqlalchemy/__init__.py @@ -3,7 +3,7 @@ using the patch method that **must be called before** importing sqlalchemy:: # patch before importing `create_engine` - from ddtrace import Pin, patch + from oteltrace import Pin, patch patch(sqlalchemy=True) # use SQLAlchemy as usual diff --git a/ddtrace/contrib/sqlalchemy/engine.py b/oteltrace/contrib/sqlalchemy/engine.py similarity index 93% rename from ddtrace/contrib/sqlalchemy/engine.py rename to oteltrace/contrib/sqlalchemy/engine.py index a3fb1af9..0bd9be74 100644 --- a/ddtrace/contrib/sqlalchemy/engine.py +++ b/oteltrace/contrib/sqlalchemy/engine.py @@ -2,8 +2,8 @@ To trace sqlalchemy queries, add instrumentation to the engine class or instance you are using:: - from ddtrace import tracer - from ddtrace.contrib.sqlalchemy import trace_engine + from oteltrace import tracer + from oteltrace.contrib.sqlalchemy import trace_engine from sqlalchemy import create_engine engine = create_engine('sqlite:///:memory:') @@ -15,7 +15,7 @@ from sqlalchemy.event import listen # project -import ddtrace +import oteltrace from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...ext import sql as sqlx @@ -29,10 +29,10 @@ def trace_engine(engine, tracer=None, service=None): Add tracing instrumentation to the given sqlalchemy engine or instance. :param sqlalchemy.Engine engine: a SQLAlchemy engine class or instance - :param ddtrace.Tracer tracer: a tracer instance. will default to the global + :param oteltrace.Tracer tracer: a tracer instance. will default to the global :param str service: the name of the service to trace. """ - tracer = tracer or ddtrace.tracer # by default use global + tracer = tracer or oteltrace.tracer # by default use global EngineTracer(tracer, service, engine) @@ -46,7 +46,7 @@ def _wrap_create_engine(func, module, args, kwargs): # name is used by default; users can update this setting # using the PIN object engine = func(*args, **kwargs) - EngineTracer(ddtrace.tracer, None, engine) + EngineTracer(oteltrace.tracer, None, engine) return engine diff --git a/ddtrace/contrib/sqlalchemy/patch.py b/oteltrace/contrib/sqlalchemy/patch.py similarity index 56% rename from ddtrace/contrib/sqlalchemy/patch.py rename to oteltrace/contrib/sqlalchemy/patch.py index ca3a3552..5f4d4698 100644 --- a/ddtrace/contrib/sqlalchemy/patch.py +++ b/oteltrace/contrib/sqlalchemy/patch.py @@ -1,15 +1,15 @@ import sqlalchemy -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w from .engine import _wrap_create_engine from ...utils.wrappers import unwrap def patch(): - if getattr(sqlalchemy.engine, '__datadog_patch', False): + if getattr(sqlalchemy.engine, '__opentelemetry_patch', False): return - setattr(sqlalchemy.engine, '__datadog_patch', True) + setattr(sqlalchemy.engine, '__opentelemetry_patch', True) # patch the engine creation function _w('sqlalchemy', 'create_engine', _wrap_create_engine) @@ -18,7 +18,7 @@ def patch(): def unpatch(): # unpatch sqlalchemy - if getattr(sqlalchemy.engine, '__datadog_patch', False): - setattr(sqlalchemy.engine, '__datadog_patch', False) + if getattr(sqlalchemy.engine, '__opentelemetry_patch', False): + setattr(sqlalchemy.engine, '__opentelemetry_patch', False) unwrap(sqlalchemy, 'create_engine') unwrap(sqlalchemy.engine, 'create_engine') diff --git a/ddtrace/contrib/sqlite3/__init__.py b/oteltrace/contrib/sqlite3/__init__.py similarity index 94% rename from ddtrace/contrib/sqlite3/__init__.py rename to oteltrace/contrib/sqlite3/__init__.py index ec45ebe7..2d668ad2 100644 --- a/ddtrace/contrib/sqlite3/__init__.py +++ b/oteltrace/contrib/sqlite3/__init__.py @@ -3,7 +3,7 @@ ``patch_all`` will automatically patch your sqlite3 connection to make it work. :: - from ddtrace import Pin, patch + from oteltrace import Pin, patch import sqlite3 # If not patched yet, you can patch sqlite3 specifically diff --git a/ddtrace/contrib/sqlite3/connection.py b/oteltrace/contrib/sqlite3/connection.py similarity index 100% rename from ddtrace/contrib/sqlite3/connection.py rename to oteltrace/contrib/sqlite3/connection.py diff --git a/ddtrace/contrib/sqlite3/patch.py b/oteltrace/contrib/sqlite3/patch.py similarity index 98% rename from ddtrace/contrib/sqlite3/patch.py rename to oteltrace/contrib/sqlite3/patch.py index c60f8322..bb43e1ac 100644 --- a/ddtrace/contrib/sqlite3/patch.py +++ b/oteltrace/contrib/sqlite3/patch.py @@ -1,7 +1,7 @@ # 3p import sqlite3 import sqlite3.dbapi2 -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt # project from ...contrib.dbapi import TracedConnection, TracedCursor, FetchTracedCursor diff --git a/ddtrace/contrib/tornado/__init__.py b/oteltrace/contrib/tornado/__init__.py similarity index 98% rename from ddtrace/contrib/tornado/__init__.py rename to oteltrace/contrib/tornado/__init__.py index e19ba599..5b62fa00 100644 --- a/ddtrace/contrib/tornado/__init__.py +++ b/oteltrace/contrib/tornado/__init__.py @@ -8,7 +8,7 @@ The following is an example:: # patch before importing tornado and concurrent.futures - from ddtrace import tracer, patch + from oteltrace import tracer, patch patch(tornado=True) import tornado.web @@ -72,7 +72,7 @@ def log_exception(self, typ, value, tb): Tornado settings can be used to change some tracing configuration, like:: settings = { - 'datadog_trace': { + 'opentelemetry_trace': { 'default_service': 'my-tornado-app', 'tags': {'env': 'production'}, 'distributed_tracing': False, diff --git a/ddtrace/contrib/tornado/application.py b/oteltrace/contrib/tornado/application.py similarity index 85% rename from ddtrace/contrib/tornado/application.py rename to oteltrace/contrib/tornado/application.py index 079c32d6..188b7da2 100644 --- a/ddtrace/contrib/tornado/application.py +++ b/oteltrace/contrib/tornado/application.py @@ -1,4 +1,4 @@ -import ddtrace +import oteltrace from tornado import template @@ -16,7 +16,7 @@ def tracer_config(__init__, app, args, kwargs): # default settings settings = { - 'tracer': ddtrace.tracer, + 'tracer': oteltrace.tracer, 'default_service': 'tornado-web', 'distributed_tracing': True, 'analytics_enabled': None @@ -42,8 +42,6 @@ def tracer_config(__init__, app, args, kwargs): context_provider=context_provider, wrap_executor=decorators.wrap_executor, enabled=settings.get('enabled', None), - hostname=settings.get('agent_hostname', None), - port=settings.get('agent_port', None), settings=extra_settings, ) @@ -53,4 +51,4 @@ def tracer_config(__init__, app, args, kwargs): tracer.set_tags(tags) # configure the PIN object for template rendering - ddtrace.Pin(app='tornado', service=service, app_type='web', tracer=tracer).onto(template) + oteltrace.Pin(app='tornado', service=service, app_type='web', tracer=tracer).onto(template) diff --git a/ddtrace/contrib/tornado/compat.py b/oteltrace/contrib/tornado/compat.py similarity index 100% rename from ddtrace/contrib/tornado/compat.py rename to oteltrace/contrib/tornado/compat.py diff --git a/oteltrace/contrib/tornado/constants.py b/oteltrace/contrib/tornado/constants.py new file mode 100644 index 00000000..be0ef89d --- /dev/null +++ b/oteltrace/contrib/tornado/constants.py @@ -0,0 +1,9 @@ +""" +This module defines Tornado settings that are shared between +integration modules. +""" +CONFIG_KEY = 'opentelemetry_trace' +REQUEST_CONTEXT_KEY = 'opentelemetry_context' +REQUEST_SPAN_KEY = '__opentelemetry_request_span' +FUTURE_SPAN_KEY = '__opentelemetry_future_span' +PARENT_SPAN_KEY = '__opentelemetry_parent_span' diff --git a/ddtrace/contrib/tornado/decorators.py b/oteltrace/contrib/tornado/decorators.py similarity index 98% rename from ddtrace/contrib/tornado/decorators.py rename to oteltrace/contrib/tornado/decorators.py index eecb465b..28e4f0fd 100644 --- a/ddtrace/contrib/tornado/decorators.py +++ b/oteltrace/contrib/tornado/decorators.py @@ -1,4 +1,4 @@ -import ddtrace +import oteltrace import sys from functools import wraps @@ -70,7 +70,7 @@ def pass_context_decorator(fn): @wraps(fn) def wrapper(*args, **kwargs): # from the current context, retrive the active span - current_ctx = ddtrace.tracer.get_call_context() + current_ctx = oteltrace.tracer.get_call_context() parent_span = getattr(current_ctx, '_current_span', None) # pass the current parent span in the Future call so that @@ -115,7 +115,7 @@ def run_executor_stack_context(fn, args, kwargs, parent_span): the context here will always bring to an empty `Context`. """ with TracerStackContext(): - ctx = ddtrace.tracer.get_call_context() + ctx = oteltrace.tracer.get_call_context() ctx._current_span = parent_span return fn(*args, **kwargs) diff --git a/ddtrace/contrib/tornado/handlers.py b/oteltrace/contrib/tornado/handlers.py similarity index 100% rename from ddtrace/contrib/tornado/handlers.py rename to oteltrace/contrib/tornado/handlers.py diff --git a/ddtrace/contrib/tornado/patch.py b/oteltrace/contrib/tornado/patch.py similarity index 82% rename from ddtrace/contrib/tornado/patch.py rename to oteltrace/contrib/tornado/patch.py index 81a671ba..dae409c8 100644 --- a/ddtrace/contrib/tornado/patch.py +++ b/oteltrace/contrib/tornado/patch.py @@ -1,7 +1,7 @@ -import ddtrace +import oteltrace import tornado -from ddtrace.vendor.wrapt import wrap_function_wrapper as _w +from oteltrace.vendor.wrapt import wrap_function_wrapper as _w from . import handlers, application, decorators, template, compat, context_provider from ...utils.wrappers import unwrap as _u @@ -13,9 +13,9 @@ def patch(): traced using the given ``tracer``. """ # patch only once - if getattr(tornado, '__datadog_patch', False): + if getattr(tornado, '__opentelemetry_patch', False): return - setattr(tornado, '__datadog_patch', True) + setattr(tornado, '__opentelemetry_patch', True) # patch Application to initialize properly our settings and tracer _w('tornado.web', 'Application.__init__', application.tracer_config) @@ -32,7 +32,7 @@ def patch(): compat.wrap_futures() # configure the global tracer - ddtrace.tracer.configure( + oteltrace.tracer.configure( context_provider=context_provider, wrap_executor=decorators.wrap_executor, ) @@ -42,9 +42,9 @@ def unpatch(): """ Remove all tracing functions in a Tornado web application. """ - if not getattr(tornado, '__datadog_patch', False): + if not getattr(tornado, '__opentelemetry_patch', False): return - setattr(tornado, '__datadog_patch', False) + setattr(tornado, '__opentelemetry_patch', False) # unpatch Tornado _u(tornado.web.RequestHandler, '_execute') diff --git a/ddtrace/contrib/tornado/stack_context.py b/oteltrace/contrib/tornado/stack_context.py similarity index 100% rename from ddtrace/contrib/tornado/stack_context.py rename to oteltrace/contrib/tornado/stack_context.py diff --git a/ddtrace/contrib/tornado/template.py b/oteltrace/contrib/tornado/template.py similarity index 97% rename from ddtrace/contrib/tornado/template.py rename to oteltrace/contrib/tornado/template.py index 885bbf1b..c1d37183 100644 --- a/ddtrace/contrib/tornado/template.py +++ b/oteltrace/contrib/tornado/template.py @@ -1,6 +1,6 @@ from tornado import template -from ddtrace import Pin +from oteltrace import Pin from ...ext import http diff --git a/ddtrace/contrib/util.py b/oteltrace/contrib/util.py similarity index 74% rename from ddtrace/contrib/util.py rename to oteltrace/contrib/util.py index ae96cc4c..f7c04548 100644 --- a/ddtrace/contrib/util.py +++ b/oteltrace/contrib/util.py @@ -4,8 +4,8 @@ deprecation( - name='ddtrace.contrib.util', - message='Use `ddtrace.utils.importlib` module instead', + name='oteltrace.contrib.util', + message='Use `oteltrace.utils.importlib` module instead', version='1.0.0', ) diff --git a/ddtrace/contrib/vertica/__init__.py b/oteltrace/contrib/vertica/__init__.py similarity index 88% rename from ddtrace/contrib/vertica/__init__.py rename to oteltrace/contrib/vertica/__init__.py index 763c6685..adc70875 100644 --- a/ddtrace/contrib/vertica/__init__.py +++ b/oteltrace/contrib/vertica/__init__.py @@ -3,12 +3,12 @@ library. Vertica will be automatically instrumented with ``patch_all``, or when using -the ``ddtrace-run`` command. +the ``oteltrace-run`` command. Vertica is instrumented on import. To instrument Vertica manually use the ``patch`` function. Note the ordering of the following statements:: - from ddtrace import patch + from oteltrace import patch patch(vertica=True) import vertica_python @@ -18,7 +18,7 @@ To configure the Vertica integration globally you can use the ``Config`` API:: - from ddtrace import config, patch + from oteltrace import config, patch patch(vertica=True) config.vertica['service_name'] = 'my-vertica-database' @@ -27,7 +27,7 @@ To configure the Vertica integration on an instance-per-instance basis use the ``Pin`` API:: - from ddtrace import Pin, patch, Tracer + from oteltrace import Pin, patch, Tracer patch(vertica=True) import vertica_python diff --git a/ddtrace/contrib/vertica/constants.py b/oteltrace/contrib/vertica/constants.py similarity index 100% rename from ddtrace/contrib/vertica/constants.py rename to oteltrace/contrib/vertica/constants.py diff --git a/ddtrace/contrib/vertica/patch.py b/oteltrace/contrib/vertica/patch.py similarity index 98% rename from ddtrace/contrib/vertica/patch.py rename to oteltrace/contrib/vertica/patch.py index dfe3aecc..902e5ac2 100644 --- a/ddtrace/contrib/vertica/patch.py +++ b/oteltrace/contrib/vertica/patch.py @@ -1,8 +1,8 @@ import importlib -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt -import ddtrace +import oteltrace from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...ext import db as dbx, sql from ...ext import net, AppTypes @@ -170,7 +170,7 @@ def init_wrapper(wrapped, instance, args, kwargs): app=config['app'], app_type=config['app_type'], tags=config.get('tags', {}), - tracer=config.get('tracer', ddtrace.tracer), + tracer=config.get('tracer', oteltrace.tracer), _config=config['patch'][patch_item], ).onto(instance) return r diff --git a/ddtrace/ext/__init__.py b/oteltrace/ext/__init__.py similarity index 100% rename from ddtrace/ext/__init__.py rename to oteltrace/ext/__init__.py diff --git a/ddtrace/ext/apps.py b/oteltrace/ext/apps.py similarity index 100% rename from ddtrace/ext/apps.py rename to oteltrace/ext/apps.py diff --git a/ddtrace/ext/aws.py b/oteltrace/ext/aws.py similarity index 100% rename from ddtrace/ext/aws.py rename to oteltrace/ext/aws.py diff --git a/ddtrace/ext/cassandra.py b/oteltrace/ext/cassandra.py similarity index 100% rename from ddtrace/ext/cassandra.py rename to oteltrace/ext/cassandra.py diff --git a/ddtrace/ext/consul.py b/oteltrace/ext/consul.py similarity index 100% rename from ddtrace/ext/consul.py rename to oteltrace/ext/consul.py diff --git a/ddtrace/ext/db.py b/oteltrace/ext/db.py similarity index 100% rename from ddtrace/ext/db.py rename to oteltrace/ext/db.py diff --git a/ddtrace/ext/elasticsearch.py b/oteltrace/ext/elasticsearch.py similarity index 100% rename from ddtrace/ext/elasticsearch.py rename to oteltrace/ext/elasticsearch.py diff --git a/ddtrace/ext/errors.py b/oteltrace/ext/errors.py similarity index 100% rename from ddtrace/ext/errors.py rename to oteltrace/ext/errors.py diff --git a/ddtrace/ext/http.py b/oteltrace/ext/http.py similarity index 100% rename from ddtrace/ext/http.py rename to oteltrace/ext/http.py diff --git a/ddtrace/ext/kombu.py b/oteltrace/ext/kombu.py similarity index 100% rename from ddtrace/ext/kombu.py rename to oteltrace/ext/kombu.py diff --git a/ddtrace/ext/memcached.py b/oteltrace/ext/memcached.py similarity index 100% rename from ddtrace/ext/memcached.py rename to oteltrace/ext/memcached.py diff --git a/ddtrace/ext/mongo.py b/oteltrace/ext/mongo.py similarity index 100% rename from ddtrace/ext/mongo.py rename to oteltrace/ext/mongo.py diff --git a/ddtrace/ext/net.py b/oteltrace/ext/net.py similarity index 100% rename from ddtrace/ext/net.py rename to oteltrace/ext/net.py diff --git a/ddtrace/ext/priority.py b/oteltrace/ext/priority.py similarity index 93% rename from ddtrace/ext/priority.py rename to oteltrace/ext/priority.py index d7cd27b9..17b78070 100644 --- a/ddtrace/ext/priority.py +++ b/oteltrace/ext/priority.py @@ -4,7 +4,7 @@ For example: -from ddtrace.ext.priority import USER_REJECT, USER_KEEP +from oteltrace.ext.priority import USER_REJECT, USER_KEEP context = tracer.context_provider.active() # Indicate to not keep the trace diff --git a/ddtrace/ext/redis.py b/oteltrace/ext/redis.py similarity index 100% rename from ddtrace/ext/redis.py rename to oteltrace/ext/redis.py diff --git a/ddtrace/ext/sql.py b/oteltrace/ext/sql.py similarity index 96% rename from ddtrace/ext/sql.py rename to oteltrace/ext/sql.py index b9b93c37..99f84125 100644 --- a/ddtrace/ext/sql.py +++ b/oteltrace/ext/sql.py @@ -1,4 +1,4 @@ -from ddtrace.ext import AppTypes +from oteltrace.ext import AppTypes # the type of the spans diff --git a/ddtrace/ext/system.py b/oteltrace/ext/system.py similarity index 100% rename from ddtrace/ext/system.py rename to oteltrace/ext/system.py diff --git a/ddtrace/filters.py b/oteltrace/filters.py similarity index 100% rename from ddtrace/filters.py rename to oteltrace/filters.py diff --git a/ddtrace/helpers.py b/oteltrace/helpers.py similarity index 78% rename from ddtrace/helpers.py rename to oteltrace/helpers.py index 3374b415..66710dc0 100644 --- a/ddtrace/helpers.py +++ b/oteltrace/helpers.py @@ -1,4 +1,4 @@ -import ddtrace +import oteltrace def get_correlation_ids(tracer=None): @@ -6,12 +6,12 @@ def get_correlation_ids(tracer=None): This helper method can be achieved manually and should be considered only a shortcut. The main reason is to abstract the current ``Tracer`` implementation so that these identifiers can be extracted either the - tracer is an OpenTracing tracer or a Datadog tracer. + tracer is an OpenTracing tracer or a OpenTelemetry tracer. OpenTracing users can still extract these values using the ``ScopeManager`` API, though this shortcut is a simple one-liner. The usage is: - from ddtrace import helpers + from oteltrace import helpers trace_id, span_id = helpers.get_correlation_ids() @@ -19,13 +19,13 @@ def get_correlation_ids(tracer=None): """ # Consideration: currently we don't have another way to "define" a # GlobalTracer. In the case of OpenTracing, ``opentracing.tracer`` is exposed - # and we're doing the same here for ``ddtrace.tracer``. Because this helper + # and we're doing the same here for ``oteltrace.tracer``. Because this helper # must work also with OpenTracing, we should take the right used ``Tracer``. - # At the time of writing, it's enough to support our Datadog Tracer. + # At the time of writing, it's enough to support our OpenTelemetry Tracer. # If no tracer passed in, use global tracer if not tracer: - tracer = ddtrace.tracer + tracer = oteltrace.tracer # If tracer is disabled, skip if not tracer.enabled: diff --git a/ddtrace/http/__init__.py b/oteltrace/http/__init__.py similarity index 100% rename from ddtrace/http/__init__.py rename to oteltrace/http/__init__.py diff --git a/ddtrace/http/headers.py b/oteltrace/http/headers.py similarity index 91% rename from ddtrace/http/headers.py rename to oteltrace/http/headers.py index cd3f0712..e6cc38f2 100644 --- a/ddtrace/http/headers.py +++ b/oteltrace/http/headers.py @@ -20,9 +20,9 @@ def store_request_headers(headers, span, integration_config): :param headers: All the request's http headers, will be filtered through the whitelist :type headers: dict or list :param span: The Span instance where tags will be stored - :type span: ddtrace.Span + :type span: oteltrace.Span :param integration_config: An integration specific config object. - :type integration_config: ddtrace.settings.IntegrationConfig + :type integration_config: oteltrace.settings.IntegrationConfig """ _store_headers(headers, span, integration_config, REQUEST) @@ -33,9 +33,9 @@ def store_response_headers(headers, span, integration_config): :param headers: All the response's http headers, will be filtered through the whitelist :type headers: dict or list :param span: The Span instance where tags will be stored - :type span: ddtrace.Span + :type span: oteltrace.Span :param integration_config: An integration specific config object. - :type integration_config: ddtrace.settings.IntegrationConfig + :type integration_config: oteltrace.settings.IntegrationConfig """ _store_headers(headers, span, integration_config, RESPONSE) @@ -45,9 +45,9 @@ def _store_headers(headers, span, integration_config, request_or_response): :param headers: A dict of http headers to be stored in the span :type headers: dict or list :param span: The Span instance where tags will be stored - :type span: ddtrace.span.Span + :type span: oteltrace.span.Span :param integration_config: An integration specific config object. - :type integration_config: ddtrace.settings.IntegrationConfig + :type integration_config: oteltrace.settings.IntegrationConfig """ if not isinstance(headers, dict): try: diff --git a/oteltrace/internal/README.md b/oteltrace/internal/README.md new file mode 100644 index 00000000..a4127d59 --- /dev/null +++ b/oteltrace/internal/README.md @@ -0,0 +1,7 @@ +# Internal +This internal module is used to define and document an internal only API for `oteltrace`. + +These modules are not intended to be used outside of `oteltrace`. + +The APIs found within `oteltrace.internal` are subject to breaking changes at any time +and do not follow the semver versioning scheme of the `oteltrace` package. diff --git a/ddtrace/internal/__init__.py b/oteltrace/internal/__init__.py similarity index 100% rename from ddtrace/internal/__init__.py rename to oteltrace/internal/__init__.py diff --git a/ddtrace/internal/context_manager.py b/oteltrace/internal/context_manager.py similarity index 90% rename from ddtrace/internal/context_manager.py rename to oteltrace/internal/context_manager.py index 73b49128..3a93b5e3 100644 --- a/ddtrace/internal/context_manager.py +++ b/oteltrace/internal/context_manager.py @@ -1,6 +1,6 @@ import abc import threading -from ddtrace.vendor import six +from oteltrace.vendor import six from .logger import get_logger from ..context import Context @@ -9,7 +9,7 @@ try: from contextvars import ContextVar - _DD_CONTEXTVAR = ContextVar('datadog_contextvar', default=None) + _OTEL_CONTEXTVAR = ContextVar('opentelemetry_contextvar', default=None) CONTEXTVARS_IS_AVAILABLE = True except ImportError: CONTEXTVARS_IS_AVAILABLE = False @@ -80,14 +80,14 @@ class ContextVarContextManager(BaseContextManager): async task. """ def _has_active_context(self): - ctx = _DD_CONTEXTVAR.get() + ctx = _OTEL_CONTEXTVAR.get() return ctx is not None def set(self, ctx): - _DD_CONTEXTVAR.set(ctx) + _OTEL_CONTEXTVAR.set(ctx) def get(self): - ctx = _DD_CONTEXTVAR.get() + ctx = _OTEL_CONTEXTVAR.get() if not ctx: ctx = Context() self.set(ctx) @@ -95,7 +95,7 @@ def get(self): return ctx def reset(self): - _DD_CONTEXTVAR.set(None) + _OTEL_CONTEXTVAR.set(None) if CONTEXTVARS_IS_AVAILABLE: diff --git a/ddtrace/internal/hostname.py b/oteltrace/internal/hostname.py similarity index 100% rename from ddtrace/internal/hostname.py rename to oteltrace/internal/hostname.py diff --git a/ddtrace/internal/logger.py b/oteltrace/internal/logger.py similarity index 79% rename from ddtrace/internal/logger.py rename to oteltrace/internal/logger.py index f14037cc..f8274141 100644 --- a/ddtrace/internal/logger.py +++ b/oteltrace/internal/logger.py @@ -6,7 +6,7 @@ def get_logger(name): """ - Retrieve or create a ``DDLogger`` instance. + Retrieve or create a ``OtelLogger`` instance. This function mirrors the behavior of `logging.getLogger`. @@ -17,12 +17,12 @@ def get_logger(name): DEV: We do not want to mess with `logging.setLoggerClass()` That will totally mess with the user's loggers, we want - just our own, selective loggers to be DDLoggers + just our own, selective loggers to be OtelLoggers :param name: The name of the logger to fetch or create :type name: str :return: The logger instance - :rtype: ``DDLogger`` + :rtype: ``OtelLogger`` """ # DEV: `logging.Logger.manager` refers to the single root `logging.Manager` instance # https://github.com/python/cpython/blob/48769a28ad6ef4183508951fa6a378531ace26a4/Lib/logging/__init__.py#L1824-L1826 # noqa @@ -33,7 +33,7 @@ def get_logger(name): # DEV: This is a simplified version of `logging.Manager.getLogger` # https://github.com/python/cpython/blob/48769a28ad6ef4183508951fa6a378531ace26a4/Lib/logging/__init__.py#L1221-L1253 # noqa if name not in manager.loggerDict: - manager.loggerDict[name] = DDLogger(name=name) + manager.loggerDict[name] = OtelLogger(name=name) # Get our logger logger = manager.loggerDict[name] @@ -50,12 +50,12 @@ def get_logger(name): return logger -class DDLogger(logging.Logger): +class OtelLogger(logging.Logger): """ - Custom rate limited logger used by ``ddtrace`` + Custom rate limited logger used by ``oteltrace`` This logger class is used to rate limit the output of - log messages from within the ``ddtrace`` package. + log messages from within the ``oteltrace`` package. """ __slots__ = ('buckets', 'rate_limit') @@ -63,15 +63,15 @@ class DDLogger(logging.Logger): LoggingBucket = collections.namedtuple('LoggingBucket', ('bucket', 'skipped')) def __init__(self, *args, **kwargs): - """Constructor for ``DDLogger``""" - super(DDLogger, self).__init__(*args, **kwargs) + """Constructor for ``OtelLogger``""" + super(OtelLogger, self).__init__(*args, **kwargs) # Dict to keep track of the current time bucket per name/level/pathname/lineno - self.buckets = collections.defaultdict(lambda: DDLogger.LoggingBucket(0, 0)) + self.buckets = collections.defaultdict(lambda: OtelLogger.LoggingBucket(0, 0)) # Allow 1 log record per name/level/pathname/lineno every 60 seconds by default - # Allow configuring via `DD_LOGGING_RATE_LIMIT` - # DEV: `DD_LOGGING_RATE_LIMIT=0` means to disable all rate limiting + # Allow configuring via `OTEL_LOGGING_RATE_LIMIT` + # DEV: `OTEL_LOGGING_RATE_LIMIT=0` means to disable all rate limiting self.rate_limit = int(get_env('logging', 'rate_limit', default=60)) def handle(self, record): @@ -87,9 +87,9 @@ def handle(self, record): :param record: The log record being logged :type record: ``logging.LogRecord`` """ - # If rate limiting has been disabled (`DD_LOGGING_RATE_LIMIT=0`) then apply no rate limit + # If rate limiting has been disabled (`OTEL_LOGGING_RATE_LIMIT=0`) then apply no rate limit if not self.rate_limit: - super(DDLogger, self).handle(record) + super(OtelLogger, self).handle(record) return # Allow 1 log record by name/level/pathname/lineno every X seconds @@ -100,7 +100,7 @@ def handle(self, record): current_bucket = int(record.created / self.rate_limit) # Limit based on logger name, record level, filename, and line number - # ('ddtrace.writer', 'DEBUG', '../site-packages/ddtrace/writer.py', 137) + # ('oteltrace.writer', 'DEBUG', '../site-packages/oteltrace/writer.py', 137) # This way each unique log message can get logged at least once per time period # DEV: LogRecord has `levelname` and `levelno`, we want `levelno` e.g. `logging.DEBUG = 10` key = (record.name, record.levelno, record.pathname, record.lineno) @@ -113,11 +113,11 @@ def handle(self, record): record.msg = '{}, {} additional messages skipped'.format(record.msg, logging_bucket.skipped) # Reset our bucket - self.buckets[key] = DDLogger.LoggingBucket(current_bucket, 0) + self.buckets[key] = OtelLogger.LoggingBucket(current_bucket, 0) # Call the base handle to actually log this record - super(DDLogger, self).handle(record) + super(OtelLogger, self).handle(record) else: # Increment the count of records we have skipped # DEV: `self.buckets[key]` is a tuple which is immutable so recreate instead - self.buckets[key] = DDLogger.LoggingBucket(logging_bucket.bucket, logging_bucket.skipped + 1) + self.buckets[key] = OtelLogger.LoggingBucket(logging_bucket.bucket, logging_bucket.skipped + 1) diff --git a/ddtrace/internal/rate_limiter.py b/oteltrace/internal/rate_limiter.py similarity index 100% rename from ddtrace/internal/rate_limiter.py rename to oteltrace/internal/rate_limiter.py diff --git a/ddtrace/internal/runtime/__init__.py b/oteltrace/internal/runtime/__init__.py similarity index 100% rename from ddtrace/internal/runtime/__init__.py rename to oteltrace/internal/runtime/__init__.py diff --git a/ddtrace/internal/runtime/collector.py b/oteltrace/internal/runtime/collector.py similarity index 100% rename from ddtrace/internal/runtime/collector.py rename to oteltrace/internal/runtime/collector.py diff --git a/ddtrace/internal/runtime/constants.py b/oteltrace/internal/runtime/constants.py similarity index 100% rename from ddtrace/internal/runtime/constants.py rename to oteltrace/internal/runtime/constants.py diff --git a/ddtrace/internal/runtime/container.py b/oteltrace/internal/runtime/container.py similarity index 100% rename from ddtrace/internal/runtime/container.py rename to oteltrace/internal/runtime/container.py diff --git a/ddtrace/internal/runtime/metric_collectors.py b/oteltrace/internal/runtime/metric_collectors.py similarity index 100% rename from ddtrace/internal/runtime/metric_collectors.py rename to oteltrace/internal/runtime/metric_collectors.py diff --git a/ddtrace/internal/runtime/runtime_metrics.py b/oteltrace/internal/runtime/runtime_metrics.py similarity index 100% rename from ddtrace/internal/runtime/runtime_metrics.py rename to oteltrace/internal/runtime/runtime_metrics.py diff --git a/ddtrace/internal/runtime/tag_collectors.py b/oteltrace/internal/runtime/tag_collectors.py similarity index 68% rename from ddtrace/internal/runtime/tag_collectors.py rename to oteltrace/internal/runtime/tag_collectors.py index 12d34417..6381d95d 100644 --- a/ddtrace/internal/runtime/tag_collectors.py +++ b/oteltrace/internal/runtime/tag_collectors.py @@ -15,15 +15,15 @@ class RuntimeTagCollector(ValueCollector): class TracerTagCollector(RuntimeTagCollector): - """ Tag collector for the ddtrace Tracer + """ Tag collector for the oteltrace Tracer """ - required_modules = ['ddtrace'] + required_modules = ['oteltrace'] def collect_fn(self, keys): - ddtrace = self.modules.get('ddtrace') - tags = [(SERVICE, service) for service in ddtrace.tracer._services] - if ENV_KEY in ddtrace.tracer.tags: - tags.append((ENV_KEY, ddtrace.tracer.tags[ENV_KEY])) + oteltrace = self.modules.get('oteltrace') + tags = [(SERVICE, service) for service in oteltrace.tracer._services] + if ENV_KEY in oteltrace.tracer.tags: + tags.append((ENV_KEY, oteltrace.tracer.tags[ENV_KEY])) return tags @@ -42,15 +42,15 @@ class PlatformTagCollector(RuntimeTagCollector): - tracer_version: - e.g. '0.29.0' """ - required_modules = ('platform', 'ddtrace') + required_modules = ('platform', 'oteltrace') def collect_fn(self, keys): platform = self.modules.get('platform') - ddtrace = self.modules.get('ddtrace') + oteltrace = self.modules.get('oteltrace') tags = [ (LANG, 'python'), (LANG_INTERPRETER, platform.python_implementation()), (LANG_VERSION, platform.python_version()), - (TRACER_VERSION, ddtrace.__version__), + (TRACER_VERSION, oteltrace.__version__), ] return tags diff --git a/ddtrace/internal/writer.py b/oteltrace/internal/writer.py similarity index 75% rename from ddtrace/internal/writer.py rename to oteltrace/internal/writer.py index 64dbe15d..906fc77b 100644 --- a/ddtrace/internal/writer.py +++ b/oteltrace/internal/writer.py @@ -4,11 +4,10 @@ import os import time -from .. import api from .. import _worker from ..utils import sizeof from ..internal.logger import get_logger -from ddtrace.vendor.six.moves.queue import Queue, Full, Empty +from oteltrace.vendor.six.moves.queue import Queue, Full, Empty log = get_logger(__name__) @@ -26,10 +25,8 @@ class AgentWriter(_worker.PeriodicWorkerThread): _ENABLE_STATS = False _STATS_EVERY_INTERVAL = 10 - def __init__(self, hostname='localhost', port=8126, uds_path=None, https=False, - shutdown_timeout=DEFAULT_TIMEOUT, - filters=None, priority_sampler=None, - dogstatsd=None): + def __init__(self, shutdown_timeout=DEFAULT_TIMEOUT, filters=None, + priority_sampler=None, metrics_client=None, api=None): super(AgentWriter, self).__init__(interval=self.QUEUE_PROCESSING_INTERVAL, exit_timeout=shutdown_timeout, name=self.__class__.__name__) @@ -37,9 +34,8 @@ def __init__(self, hostname='localhost', port=8126, uds_path=None, https=False, self._filters = filters self._priority_sampler = priority_sampler self._last_error_ts = 0 - self.dogstatsd = dogstatsd - self.api = api.API(hostname, port, uds_path=uds_path, https=https, - priority_sampling=priority_sampler is not None) + self.metrics_client = metrics_client + self.api = api self._stats_rate_counter = 0 self.start() @@ -51,7 +47,7 @@ def _send_stats(self): if not self._ENABLE_STATS: return False - if not self.dogstatsd: + if not self.metrics_client: return False self._stats_rate_counter += 1 @@ -112,41 +108,41 @@ def flush_queue(self): self._priority_sampler.set_sample_rate_by_service(result_traces_json['rate_by_service']) # Dump statistics - # NOTE: Do not use the buffering of dogstatsd as it's not thread-safe - # https://github.com/DataDog/datadogpy/issues/439 + # NOTE: Do not use the buffering of metrics_client as it's not thread-safe + # https://github.com/opentelemetry/datadogpy/issues/439 if send_stats: # Statistics about the queue length, size and number of spans - self.dogstatsd.gauge('datadog.tracer.queue.max_length', self._trace_queue.maxsize) - self.dogstatsd.gauge('datadog.tracer.queue.length', traces_queue_length) - self.dogstatsd.gauge('datadog.tracer.queue.size', traces_queue_size) - self.dogstatsd.gauge('datadog.tracer.queue.spans', traces_queue_spans) + self.metrics_client.gauge('opentelemetry.tracer.queue.max_length', self._trace_queue.maxsize) + self.metrics_client.gauge('opentelemetry.tracer.queue.length', traces_queue_length) + self.metrics_client.gauge('opentelemetry.tracer.queue.size', traces_queue_size) + self.metrics_client.gauge('opentelemetry.tracer.queue.spans', traces_queue_spans) # Statistics about the rate at which spans are inserted in the queue dropped, enqueued, enqueued_lengths, enqueued_size = self._trace_queue.reset_stats() - self.dogstatsd.increment('datadog.tracer.queue.dropped', dropped) - self.dogstatsd.increment('datadog.tracer.queue.accepted', enqueued) - self.dogstatsd.increment('datadog.tracer.queue.accepted_lengths', enqueued_lengths) - self.dogstatsd.increment('datadog.tracer.queue.accepted_size', enqueued_size) + self.metrics_client.increment('opentelemetry.tracer.queue.dropped', dropped) + self.metrics_client.increment('opentelemetry.tracer.queue.accepted', enqueued) + self.metrics_client.increment('opentelemetry.tracer.queue.accepted_lengths', enqueued_lengths) + self.metrics_client.increment('opentelemetry.tracer.queue.accepted_size', enqueued_size) # Statistics about the filtering - self.dogstatsd.increment('datadog.tracer.traces.filtered', traces_filtered) + self.metrics_client.increment('opentelemetry.tracer.traces.filtered', traces_filtered) # Statistics about API - self.dogstatsd.increment('datadog.tracer.api.requests', len(traces_responses)) - self.dogstatsd.increment('datadog.tracer.api.errors', - len(list(t for t in traces_responses + self.metrics_client.increment('opentelemetry.tracer.api.requests', len(traces_responses)) + self.metrics_client.increment('opentelemetry.tracer.api.errors', + len(list(t for t in traces_responses if isinstance(t, Exception)))) for status, grouped_responses in itertools.groupby( sorted((t for t in traces_responses if not isinstance(t, Exception)), key=lambda r: r.status), key=lambda r: r.status): - self.dogstatsd.increment('datadog.tracer.api.responses', - len(list(grouped_responses)), - tags=['status:%d' % status]) + self.metrics_client.increment('opentelemetry.tracer.api.responses', + len(list(grouped_responses)), + tags=['status:%d' % status]) # Statistics about the writer thread if hasattr(time, 'thread_time_ns'): - self.dogstatsd.increment('datadog.tracer.writer.cpu_time', time.thread_time_ns()) + self.metrics_client.increment('opentelemetry.tracer.writer.cpu_time', time.thread_time_ns()) run_periodic = flush_queue on_shutdown = flush_queue @@ -157,21 +153,12 @@ def _log_error_status(self, response): if now > self._last_error_ts + LOG_ERR_INTERVAL: log_level = log.error self._last_error_ts = now - prefix = 'Failed to send traces to Datadog Agent at %s: ' - if isinstance(response, api.Response): - log_level( - prefix + 'HTTP error status %s, reason %s, message %s', - self.api, - response.status, - response.reason, - response.msg, - ) - else: - log_level( - prefix + '%s', - self.api, - response, - ) + prefix = 'Failed to send traces to OpenTelemetry Agent at %s: ' + log_level( + prefix + '%s', + self.api, + response, + ) def _apply_filters(self, traces): """ diff --git a/ddtrace/monkey.py b/oteltrace/monkey.py similarity index 96% rename from ddtrace/monkey.py rename to oteltrace/monkey.py index fa734f08..f28bb9d7 100644 --- a/ddtrace/monkey.py +++ b/oteltrace/monkey.py @@ -10,7 +10,7 @@ import sys import threading -from ddtrace.vendor.wrapt.importer import when_imported +from oteltrace.vendor.wrapt.importer import when_imported from .internal.logger import get_logger @@ -69,7 +69,7 @@ # Modules which are patched on first use # DEV: These modules are patched when the user first imports them, rather than -# explicitly importing and patching them on application startup `ddtrace.patch_all(module=True)` +# explicitly importing and patching them on application startup `oteltrace.patch_all(module=True)` # DEV: This ensures we do not patch a module until it is needed # DEV: => _PATCH_ON_IMPORT = { @@ -89,7 +89,7 @@ def _on_import_factory(module, raise_errors=True): """Factory to create an import hook for the provided module name""" def on_import(hook): # Import and patch module - path = 'ddtrace.contrib.%s' % module + path = 'oteltrace.contrib.%s' % module imported_module = importlib.import_module(path) imported_module.patch() @@ -169,7 +169,7 @@ def _patch_module(module): Returns if the module got patched. Can also raise errors if it fails. """ - path = 'ddtrace.contrib.%s' % module + path = 'oteltrace.contrib.%s' % module with _LOCK: if module in _PATCHED_MODULES and module not in _PATCH_ON_IMPORT: log.debug('already patched: %s', path) diff --git a/ddtrace/pin.py b/oteltrace/pin.py similarity index 84% rename from ddtrace/pin.py rename to oteltrace/pin.py index fe708698..b1411848 100644 --- a/ddtrace/pin.py +++ b/oteltrace/pin.py @@ -1,4 +1,4 @@ -import ddtrace +import oteltrace from .internal.logger import get_logger from .vendor import wrapt @@ -9,8 +9,8 @@ # To set attributes on wrapt proxy objects use this prefix: # http://wrapt.readthedocs.io/en/latest/wrappers.html -_DD_PIN_NAME = '_datadog_pin' -_DD_PIN_PROXY_NAME = '_self_' + _DD_PIN_NAME +_OTEL_PIN_NAME = '_opentelemetry_pin' +_OTEL_PIN_PROXY_NAME = '_self_' + _OTEL_PIN_NAME class Pin(object): @@ -27,7 +27,7 @@ class Pin(object): __slots__ = ['app', 'app_type', 'tags', 'tracer', '_target', '_config', '_initialized'] def __init__(self, service, app=None, app_type=None, tags=None, tracer=None, _config=None): - tracer = tracer or ddtrace.tracer + tracer = tracer or oteltrace.tracer self.app = app self.app_type = app_type self.tags = tags @@ -59,15 +59,15 @@ def __repr__(self): @staticmethod def _find(*objs): """ - Return the first :class:`ddtrace.pin.Pin` found on any of the provided objects or `None` if none were found + Return the first :class:`oteltrace.pin.Pin` found on any of the provided objects or `None` if none were found >>> pin = Pin._find(wrapper, instance, conn, app) - :param *objs: The objects to search for a :class:`ddtrace.pin.Pin` on + :param *objs: The objects to search for a :class:`oteltrace.pin.Pin` on :type objs: List of objects - :rtype: :class:`ddtrace.pin.Pin`, None - :returns: The first found :class:`ddtrace.pin.Pin` or `None` is none was found + :rtype: :class:`oteltrace.pin.Pin`, None + :returns: The first found :class:`oteltrace.pin.Pin` or `None` is none was found """ for obj in objs: pin = Pin.get_from(obj) @@ -84,15 +84,15 @@ def get_from(obj): >>> pin = Pin.get_from(conn) - :param obj: The object to look for a :class:`ddtrace.pin.Pin` on + :param obj: The object to look for a :class:`oteltrace.pin.Pin` on :type obj: object - :rtype: :class:`ddtrace.pin.Pin`, None - :returns: :class:`ddtrace.pin.Pin` associated with the object, or None if none was found + :rtype: :class:`oteltrace.pin.Pin`, None + :returns: :class:`oteltrace.pin.Pin` associated with the object, or None if none was found """ if hasattr(obj, '__getddpin__'): return obj.__getddpin__() - pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME + pin_name = _OTEL_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _OTEL_PIN_NAME pin = getattr(obj, pin_name, None) # detect if the PIN has been inherited from a class if pin is not None and pin._target != id(obj): @@ -139,7 +139,7 @@ def onto(self, obj, send=True): if hasattr(obj, '__setddpin__'): return obj.__setddpin__(self) - pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME + pin_name = _OTEL_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _OTEL_PIN_NAME # set the target reference; any get_from, clones and retarget the new PIN self._target = id(obj) @@ -150,7 +150,7 @@ def onto(self, obj, send=True): def remove_from(self, obj): # Remove pin from the object. try: - pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME + pin_name = _OTEL_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _OTEL_PIN_NAME pin = Pin.get_from(obj) if pin is not None: diff --git a/ddtrace/propagation/__init__.py b/oteltrace/propagation/__init__.py similarity index 100% rename from ddtrace/propagation/__init__.py rename to oteltrace/propagation/__init__.py diff --git a/oteltrace/propagation/b3.py b/oteltrace/propagation/b3.py new file mode 100644 index 00000000..881eaecc --- /dev/null +++ b/oteltrace/propagation/b3.py @@ -0,0 +1,82 @@ +from ..context import Context +from ..ext import priority + + +class B3HTTPPropagator: + """b3 compatible propagator""" + + SINGLE_HEADER_KEY = 'b3' + TRACE_ID_KEY = 'x-b3-traceid' + SPAN_ID_KEY = 'x-b3-spanid' + SAMPLED_KEY = 'x-b3-sampled' + FLAGS_KEY = 'x-b3-flags' + _SAMPLE_PROPAGATE_VALUES = set(['1', 'True', 'true', 'd']) + + _SAMPLING_PRIORITY_MAP = { + priority.USER_REJECT: '0', + priority.AUTO_REJECT: '0', + priority.AUTO_KEEP: '1', + priority.USER_KEEP: '1', + } + + def inject(self, span_context, headers): + # TODO: what should be a default value? + sampled = '0' + if span_context.sampling_priority is not None: + sampled = self._SAMPLING_PRIORITY_MAP[span_context.sampling_priority] + + headers[self.TRACE_ID_KEY] = format_trace_id(span_context.trace_id) + headers[self.SPAN_ID_KEY] = format_span_id(span_context.span_id) + headers[self.SAMPLED_KEY] = sampled + + def extract(self, headers): + trace_id = '0' + span_id = '0' + sampled = '0' + flags = None + + single_header = headers.get(self.SINGLE_HEADER_KEY) + + if single_header: + # The b3 spec calls for the sampling state to be + # "deferred", which is unspecified. This concept does not + # translate to SpanContext, so we set it as recorded. + sampled = '1' + fields = single_header.split('-', 4) + + if len(fields) == 1: + sampled = fields[0] + elif len(fields) == 2: + trace_id, span_id = fields + elif len(fields) == 3: + trace_id, span_id, sampled = fields + elif len(fields) == 4: + trace_id, span_id, sampled, _parent_span_id = fields + else: + return Context() + else: + trace_id = headers.get(self.TRACE_ID_KEY) or trace_id + span_id = headers.get(self.SPAN_ID_KEY) or span_id + sampled = headers.get(self.SAMPLED_KEY) or sampled + flags = headers.get(self.FLAGS_KEY) or flags + + if sampled in self._SAMPLE_PROPAGATE_VALUES or flags == '1': + sampling_priority = priority.AUTO_KEEP + else: + sampling_priority = priority.AUTO_REJECT + + return Context( + trace_id=int(trace_id, 16), + span_id=int(span_id, 16), + sampling_priority=sampling_priority, + ) + + +def format_trace_id(trace_id: int) -> str: + """Format the trace id according to b3 specification.""" + return format(trace_id, '032x') + + +def format_span_id(span_id: int) -> str: + """Format the span id according to b3 specification.""" + return format(span_id, '016x') diff --git a/ddtrace/propagation/http.py b/oteltrace/propagation/datadog.py similarity index 82% rename from ddtrace/propagation/http.py rename to oteltrace/propagation/datadog.py index 0bbe4463..7a53d1cd 100644 --- a/ddtrace/propagation/http.py +++ b/oteltrace/propagation/datadog.py @@ -29,7 +29,7 @@ ) -class HTTPPropagator(object): +class DatadogHTTPPropagator(object): """A HTTP Propagator using HTTP headers as carrier.""" def inject(self, span_context, headers): @@ -38,12 +38,12 @@ def inject(self, span_context, headers): Here is an example using `requests`:: import requests - from ddtrace.propagation.http import HTTPPropagator + from oteltrace.propagation.http import DatadogHTTPPropagator def parent_call(): with tracer.trace('parent_span') as span: headers = {} - propagator = HTTPPropagator() + propagator = DatadogHTTPPropagator() propagator.inject(span.context, headers) url = '' r = requests.get(url, headers=headers) @@ -58,8 +58,8 @@ def parent_call(): if sampling_priority is not None: headers[HTTP_HEADER_SAMPLING_PRIORITY] = str(span_context.sampling_priority) # Propagate origin only if defined - if span_context._dd_origin is not None: - headers[HTTP_HEADER_ORIGIN] = str(span_context._dd_origin) + if span_context._otel_origin is not None: + headers[HTTP_HEADER_ORIGIN] = str(span_context._otel_origin) @staticmethod def extract_header_value(possible_header_names, headers, default=None): @@ -73,7 +73,7 @@ def extract_header_value(possible_header_names, headers, default=None): @staticmethod def extract_trace_id(headers): return int( - HTTPPropagator.extract_header_value( + DatadogHTTPPropagator.extract_header_value( POSSIBLE_HTTP_HEADER_TRACE_IDS, headers, default=0, ) ) @@ -81,20 +81,20 @@ def extract_trace_id(headers): @staticmethod def extract_parent_span_id(headers): return int( - HTTPPropagator.extract_header_value( + DatadogHTTPPropagator.extract_header_value( POSSIBLE_HTTP_HEADER_PARENT_IDS, headers, default=0, ) ) @staticmethod def extract_sampling_priority(headers): - return HTTPPropagator.extract_header_value( + return DatadogHTTPPropagator.extract_header_value( POSSIBLE_HTTP_HEADER_SAMPLING_PRIORITIES, headers, ) @staticmethod def extract_origin(headers): - return HTTPPropagator.extract_header_value( + return DatadogHTTPPropagator.extract_header_value( POSSIBLE_HTTP_HEADER_ORIGIN, headers, ) @@ -103,10 +103,10 @@ def extract(self, headers): Here is an example from a web endpoint:: - from ddtrace.propagation.http import HTTPPropagator + from oteltrace.propagation.http import DatadogHTTPPropagator def my_controller(url, headers): - propagator = HTTPPropagator() + propagator = DatadogHTTPPropagator() context = propagator.extract(headers) tracer.context_provider.activate(context) @@ -120,10 +120,10 @@ def my_controller(url, headers): return Context() try: - trace_id = HTTPPropagator.extract_trace_id(headers) - parent_span_id = HTTPPropagator.extract_parent_span_id(headers) - sampling_priority = HTTPPropagator.extract_sampling_priority(headers) - origin = HTTPPropagator.extract_origin(headers) + trace_id = DatadogHTTPPropagator.extract_trace_id(headers) + parent_span_id = DatadogHTTPPropagator.extract_parent_span_id(headers) + sampling_priority = DatadogHTTPPropagator.extract_sampling_priority(headers) + origin = DatadogHTTPPropagator.extract_origin(headers) if sampling_priority is not None: sampling_priority = int(sampling_priority) @@ -132,7 +132,7 @@ def my_controller(url, headers): trace_id=trace_id, span_id=parent_span_id, sampling_priority=sampling_priority, - _dd_origin=origin, + _otel_origin=origin, ) # If headers are invalid and cannot be parsed, return a new context and log the issue. except Exception as error: diff --git a/oteltrace/propagation/http.py b/oteltrace/propagation/http.py new file mode 100644 index 00000000..8fd0f89f --- /dev/null +++ b/oteltrace/propagation/http.py @@ -0,0 +1,14 @@ +from .datadog import DatadogHTTPPropagator + +_PROGPAGATOR_FACTORY = DatadogHTTPPropagator + + +def set_http_propagator_factory(factory): + """Sets the propagator factory to be used globally""" + global _PROGPAGATOR_FACTORY + _PROGPAGATOR_FACTORY = factory + + +def HTTPPropagator(): + """Returns and instance of the configured propagator""" + return _PROGPAGATOR_FACTORY() diff --git a/ddtrace/propagation/utils.py b/oteltrace/propagation/utils.py similarity index 100% rename from ddtrace/propagation/utils.py rename to oteltrace/propagation/utils.py diff --git a/oteltrace/propagation/w3c.py b/oteltrace/propagation/w3c.py new file mode 100644 index 00000000..12b9e576 --- /dev/null +++ b/oteltrace/propagation/w3c.py @@ -0,0 +1,75 @@ +import re + +from ..context import Context +from ..ext import priority + + +class W3CHTTPPropagator: + """w3c compatible propagator""" + _TRACEPARENT_HEADER_NAME = 'traceparent' + _TRACEPARENT_HEADER_FORMAT = ( + '^[ \t]*([0-9a-f]{2})-([0-9a-f]{32})-([0-9a-f]{16})-([0-9a-f]{2})(-.*)?[ \t]*$' + ) + _TRACEPARENT_HEADER_FORMAT_RE = re.compile(_TRACEPARENT_HEADER_FORMAT) + _SAMPLING_PRIORITY_MAP = { + priority.USER_REJECT: 0, + priority.AUTO_REJECT: 0, + priority.AUTO_KEEP: 1, + priority.USER_KEEP: 1, + } + + def inject(self, span_context, headers): + # TODO: what should be a default value? + sampled = 0 + if span_context.sampling_priority is not None: + sampled = self._SAMPLING_PRIORITY_MAP[span_context.sampling_priority] + + traceparent_string = '00-{:032x}-{:016x}-{:02x}'.format( + span_context.trace_id, span_context.span_id, sampled + ) + + headers[self._TRACEPARENT_HEADER_NAME] = traceparent_string + + def extract(self, headers): + if not headers: + return Context() + + # TODO: lookup ignoring case + header = headers.get(self._TRACEPARENT_HEADER_NAME) + if not header: + return Context() + + match = re.search(self._TRACEPARENT_HEADER_FORMAT_RE, header) + if not match: + return Context() + + version = match.group(1) + trace_id_str = match.group(2) + span_id_str = match.group(3) + trace_options_str = match.group(4) + + if version == '00': + if match.group(5): + return Context() + if version == 'ff': + return Context() + + trace_id = int(trace_id_str, 16) + span_id = int(span_id_str, 16) + trace_options = int(trace_options_str, 16) + # TODO: probably not needed + if trace_id == 0 or span_id == 0: + return Context() + + # There is not a way to identify between USER and AUTO at this point, + # just use AUTO. + if trace_options & 0x1: + sampling_priority = priority.AUTO_KEEP + else: + sampling_priority = priority.AUTO_REJECT + + return Context( + trace_id=trace_id, + span_id=span_id, + sampling_priority=sampling_priority, + ) diff --git a/ddtrace/provider.py b/oteltrace/provider.py similarity index 98% rename from ddtrace/provider.py rename to oteltrace/provider.py index 246fbcec..aa95ea3c 100644 --- a/ddtrace/provider.py +++ b/oteltrace/provider.py @@ -1,5 +1,5 @@ import abc -from ddtrace.vendor import six +from oteltrace.vendor import six from .internal.context_manager import DefaultContextManager diff --git a/ddtrace/sampler.py b/oteltrace/sampler.py similarity index 95% rename from ddtrace/sampler.py rename to oteltrace/sampler.py index 3878edbe..edbec46b 100644 --- a/ddtrace/sampler.py +++ b/oteltrace/sampler.py @@ -68,7 +68,7 @@ class RateByServiceSampler(BaseSampler): @staticmethod def _key(service=None, env=None): - """Compute a key with the same format used by the Datadog agent API.""" + """Compute a key with the same format used by the OpenTelemetry agent API.""" service = service or '' env = env or '' return 'service:' + service + ',env:' + env @@ -108,7 +108,7 @@ def set_sample_rate_by_service(self, rate_by_service): RateByServiceSampler._default_key = RateByServiceSampler._key() -class DatadogSampler(BaseSampler): +class OpenTelemetrySampler(BaseSampler): """ This sampler is currently in ALPHA and it's API may change at any time, use at your own risk. """ @@ -121,7 +121,7 @@ class DatadogSampler(BaseSampler): # TODO: Remove _priority_sampler=None when we no longer use the fallback def __init__(self, rules=None, default_sample_rate=1.0, rate_limit=DEFAULT_RATE_LIMIT, _priority_sampler=None): """ - Constructor for DatadogSampler sampler + Constructor for OpenTelemetrySampler sampler :param rules: List of :class:`SamplingRule` rules to apply to the root span of every trace, default no rules :type rules: :obj:`list` of :class:`SamplingRule` @@ -138,7 +138,7 @@ def __init__(self, rules=None, default_sample_rate=1.0, rate_limit=DEFAULT_RATE_ # Validate that the rules is a list of SampleRules for rule in rules: if not isinstance(rule, SamplingRule): - raise TypeError('Rule {!r} must be a sub-class of type ddtrace.sampler.SamplingRules'.format(rule)) + raise TypeError('Rule {!r} must be a sub-class of type oteltrace.sampler.SamplingRules'.format(rule)) self.rules = rules # Configure rate limiter @@ -160,7 +160,7 @@ def sample(self, span): The span provided should be the root span in the trace. :param span: The root span of a trace - :type span: :class:`ddtrace.span.Span` + :type span: :class:`oteltrace.span.Span` :returns: Whether the span was sampled or not :rtype: :obj:`bool` """ @@ -207,7 +207,7 @@ def sample(self, span): class SamplingRule(object): """ - Definition of a sampling rule used by :class:`DatadogSampler` for applying a sample rate on a span + Definition of a sampling rule used by :class:`OpenTelemetrySampler` for applying a sample rate on a span """ __slots__ = ('_sample_rate', '_sampling_id_threshold', 'service', 'name') @@ -219,7 +219,7 @@ def __init__(self, sample_rate, service=NO_RULE, name=NO_RULE): .. code:: python - DatadogSampler([ + OpenTelemetrySampler([ # Sample 100% of any trace SamplingRule(sample_rate=1.0), @@ -293,7 +293,7 @@ def matches(self, span): Return if this span matches this rule :param span: The span to match against - :type span: :class:`ddtrace.span.Span` + :type span: :class:`oteltrace.span.Span` :returns: Whether this span matches or not :rtype: :obj:`bool` """ @@ -310,7 +310,7 @@ def sample(self, span): Return if this rule chooses to sample the span :param span: The span to sample against - :type span: :class:`ddtrace.span.Span` + :type span: :class:`oteltrace.span.Span` :returns: Whether this span was sampled :rtype: :obj:`bool` """ diff --git a/ddtrace/settings/__init__.py b/oteltrace/settings/__init__.py similarity index 100% rename from ddtrace/settings/__init__.py rename to oteltrace/settings/__init__.py diff --git a/ddtrace/settings/config.py b/oteltrace/settings/config.py similarity index 99% rename from ddtrace/settings/config.py rename to oteltrace/settings/config.py index 88ac02ae..53a25e54 100644 --- a/ddtrace/settings/config.py +++ b/oteltrace/settings/config.py @@ -22,7 +22,7 @@ def __init__(self): self._config = {} self._http = HttpConfig() # Master switch for turning on and off trace search by default - # this weird invocation of get_env is meant to read the DD_ANALYTICS_ENABLED + # this weird invocation of get_env is meant to read the OTEL_ANALYTICS_ENABLED # legacy environment variable. It should be removed in the future legacy_config_value = get_env('analytics', 'enabled', default=False) diff --git a/ddtrace/settings/exceptions.py b/oteltrace/settings/exceptions.py similarity index 100% rename from ddtrace/settings/exceptions.py rename to oteltrace/settings/exceptions.py diff --git a/ddtrace/settings/hooks.py b/oteltrace/settings/hooks.py similarity index 98% rename from ddtrace/settings/hooks.py rename to oteltrace/settings/hooks.py index 81b9eeb9..22342b37 100644 --- a/ddtrace/settings/hooks.py +++ b/oteltrace/settings/hooks.py @@ -93,7 +93,7 @@ def _emit(self, hook, span, *args, **kwargs): :param hook: The hook to call functions for :type hook: str :param span: The span to call the hook with - :type span: :class:`ddtrace.span.Span` + :type span: :class:`oteltrace.span.Span` :param *args: Positional arguments to pass to the hook functions :type args: list :param **kwargs: Keyword arguments to pass to the hook functions diff --git a/ddtrace/settings/http.py b/oteltrace/settings/http.py similarity index 100% rename from ddtrace/settings/http.py rename to oteltrace/settings/http.py diff --git a/ddtrace/settings/integration.py b/oteltrace/settings/integration.py similarity index 99% rename from ddtrace/settings/integration.py rename to oteltrace/settings/integration.py index 3d8b2288..8b999379 100644 --- a/ddtrace/settings/integration.py +++ b/oteltrace/settings/integration.py @@ -12,7 +12,7 @@ class IntegrationConfig(AttrDict): This is what you will get when you do:: - from ddtrace import config + from oteltrace import config # This is an `IntegrationConfig` config.flask diff --git a/ddtrace/span.py b/oteltrace/span.py similarity index 99% rename from ddtrace/span.py rename to oteltrace/span.py index 8ce79498..c474fef1 100644 --- a/ddtrace/span.py +++ b/oteltrace/span.py @@ -62,7 +62,7 @@ def __init__( """ Create a new span. Call `finish` once the traced operation is over. - :param ddtrace.Tracer tracer: the tracer that will submit this span when + :param oteltrace.Tracer tracer: the tracer that will submit this span when finished. :param str name: the name of the traced operation. diff --git a/ddtrace/tracer.py b/oteltrace/tracer.py similarity index 83% rename from ddtrace/tracer.py rename to oteltrace/tracer.py index bc238ca5..872025ad 100644 --- a/ddtrace/tracer.py +++ b/oteltrace/tracer.py @@ -1,21 +1,18 @@ import functools import logging -from os import environ, getpid - +from os import getpid from .constants import FILTERS_KEY, SAMPLE_RATE_METRIC_KEY from .ext import system from .ext.priority import AUTO_REJECT, AUTO_KEEP from .internal.logger import get_logger -from .internal.runtime import RuntimeTags, RuntimeWorker from .internal.writer import AgentWriter from .provider import DefaultContextProvider from .context import Context -from .sampler import AllSampler, DatadogSampler, RateSampler, RateByServiceSampler +from .sampler import AllSampler, OpenTelemetrySampler, RateSampler, RateByServiceSampler from .span import Span -from .utils.formats import get_env from .utils.deprecation import deprecated -from .vendor.dogstatsd import DogStatsd +from .propagation import http as http_propagator_module from . import compat @@ -30,15 +27,11 @@ class Tracer(object): If you're running an application that will serve a single trace per thread, you can use the global tracer instance:: - from ddtrace import tracer + from oteltrace import tracer trace = tracer.trace('app.request', 'web-server').finish() """ _RUNTIME_METRICS_INTERVAL = 10 - DEFAULT_HOSTNAME = environ.get('DD_AGENT_HOST', environ.get('DATADOG_TRACE_AGENT_HOSTNAME', 'localhost')) - DEFAULT_PORT = int(environ.get('DD_TRACE_AGENT_PORT', 8126)) - DEFAULT_DOGSTATSD_PORT = int(get_env('dogstatsd', 'port', 8125)) - def __init__(self): """ Create a new ``Tracer`` instance. A global tracer is already initialized @@ -49,14 +42,10 @@ def __init__(self): self.priority_sampler = None self._runtime_worker = None - self._dogstatsd_host = self.DEFAULT_HOSTNAME - self._dogstatsd_port = self.DEFAULT_DOGSTATSD_PORT # Apply the default configuration self.configure( enabled=True, - hostname=self.DEFAULT_HOSTNAME, - port=self.DEFAULT_PORT, sampler=AllSampler(), context_provider=DefaultContextProvider(), ) @@ -86,8 +75,7 @@ def __call__(self): def global_excepthook(self, type, value, traceback): """The global tracer except hook.""" - self._dogstatsd_client.increment('datadog.tracer.uncaught_exceptions', 1, - tags=['class:%s' % type.__name__]) + self.log.warning('global_excepthook not implemented') def get_call_context(self, *args, **kwargs): """ @@ -95,7 +83,7 @@ def get_call_context(self, *args, **kwargs): automatically called in the ``tracer.trace()``, but it can be used in the application code during manual instrumentation like:: - from ddtrace import import tracer + from oteltrace import import tracer async def web_handler(request): context = tracer.get_call_context() @@ -112,20 +100,15 @@ def context_provider(self): """Returns the current Tracer Context Provider""" return self._context_provider - def configure(self, enabled=None, hostname=None, port=None, uds_path=None, https=None, - dogstatsd_host=None, dogstatsd_port=None, sampler=None, context_provider=None, - wrap_executor=None, priority_sampling=None, settings=None, collect_metrics=None): + def configure(self, enabled=None, sampler=None, context_provider=None, + wrap_executor=None, priority_sampling=None, settings=None, collect_metrics=None, + api=None, http_propagator=None): """ Configure an existing Tracer the easy way. Allow to configure or reconfigure a Tracer instance. :param bool enabled: If True, finished traces will be submitted to the API. Otherwise they'll be dropped. - :param str hostname: Hostname running the Trace Agent - :param int port: Port of the Trace Agent - :param str uds_path: The Unix Domain Socket path of the agent. - :param bool https: Whether to use HTTPS or HTTP. - :param int metric_port: Port of DogStatsd :param object sampler: A custom Sampler instance, locally deciding to totally drop the trace or not. :param object context_provider: The ``ContextProvider`` that will be used to retrieve automatically the current call context. This is an advanced option that usually @@ -136,6 +119,8 @@ def configure(self, enabled=None, hostname=None, port=None, uds_path=None, https :param priority_sampling: enable priority sampling, this is required for complete distributed tracing support. Enabled by default. :param collect_metrics: Whether to enable runtime metrics collection. + :param object api: object to export the traces to a backend. + :param class http_propagator: type of propagator to be used to distribute the tracing context. """ if enabled is not None: self.enabled = enabled @@ -155,50 +140,23 @@ def configure(self, enabled=None, hostname=None, port=None, uds_path=None, https self.sampler = sampler # TODO: Remove when we remove the fallback to priority sampling - if isinstance(self.sampler, DatadogSampler): + if isinstance(self.sampler, OpenTelemetrySampler): self.sampler._priority_sampler = self.priority_sampler - self._dogstatsd_host = dogstatsd_host or self._dogstatsd_host - self._dogstatsd_port = dogstatsd_port or self._dogstatsd_port - self.log.debug('Connecting to DogStatsd on {}:{}'.format( - self._dogstatsd_host, - self._dogstatsd_port, - )) - self._dogstatsd_client = DogStatsd( - host=self._dogstatsd_host, - port=self._dogstatsd_port, - ) - - if hostname is not None or port is not None or uds_path is not None or https is not None or \ - filters is not None or priority_sampling is not None: - # Preserve hostname and port when overriding filters or priority sampling - default_hostname = self.DEFAULT_HOSTNAME - default_port = self.DEFAULT_PORT - if hasattr(self, 'writer') and hasattr(self.writer, 'api'): - default_hostname = self.writer.api.hostname - default_port = self.writer.api.port - if https is None: - https = self.writer.api.https + if filters is not None or priority_sampling is not None or api is not None: self.writer = AgentWriter( - hostname or default_hostname, - port or default_port, - uds_path=uds_path, - https=https, filters=filters, priority_sampler=self.priority_sampler, - dogstatsd=self._dogstatsd_client, + api=api, ) - # HACK: since we recreated our dogstatsd agent, replace the old write one - self.writer.dogstatsd = self._dogstatsd_client - if context_provider is not None: self._context_provider = context_provider if wrap_executor is not None: self._wrap_executor = wrap_executor - # Since we've recreated our dogstatsd agent, we need to restart metric collection with that new agent + # Since we've recreated our metrics_client agent, we need to restart metric collection with that new agent if self._runtime_worker: runtime_metrics_was_running = True self._runtime_worker.stop() @@ -210,6 +168,9 @@ def configure(self, enabled=None, hostname=None, port=None, uds_path=None, https if (collect_metrics is None and runtime_metrics_was_running) or collect_metrics: self._start_runtime_worker() + if http_propagator is not None: + http_propagator_module.set_http_propagator_factory(http_propagator) + def start_span(self, name, child_of=None, service=None, resource=None, span_type=None): """ Return a span that will trace an operation called `name`. This method allows @@ -287,7 +248,7 @@ def start_span(self, name, child_of=None, service=None, resource=None, span_type span.sampled = self.sampler.sample(span) # Old behavior # DEV: The new sampler sets metrics and priority sampling on the span for us - if not isinstance(self.sampler, DatadogSampler): + if not isinstance(self.sampler, OpenTelemetrySampler): if span.sampled: # When doing client sampling in the client, keep the sample rate so that we can # scale up statistics in the next steps of the pipeline. @@ -330,26 +291,20 @@ def start_span(self, name, child_of=None, service=None, resource=None, span_type if service and service not in self._services: self._services.add(service) - # The constant tags for the dogstatsd client needs to updated with any new + # The constant tags for the metrics_client client needs to updated with any new # service(s) that may have been added. - self._update_dogstatsd_constant_tags() + self._update_metrics_client_constant_tags() return span - def _update_dogstatsd_constant_tags(self): + def _update_metrics_client_constant_tags(self): """ Prepare runtime tags for ddstatsd. """ + log.warning('_update_metrics_client_constant_tags() not implemented.') # DEV: ddstatsd expects tags in the form ['key1:value1', 'key2:value2', ...] - tags = [ - '{}:{}'.format(k, v) - for k, v in RuntimeTags() - ] - self.log.debug('Updating constant tags {}'.format(tags)) - self._dogstatsd_client.constant_tags = tags def _start_runtime_worker(self): - self._runtime_worker = RuntimeWorker(self._dogstatsd_client, self._RUNTIME_METRICS_INTERVAL) - self._runtime_worker.start() + log.warning('RuntimeWorker() not implemented.') def _check_new_process(self): """ Checks if the tracer is in a new process (was forked) and performs @@ -370,7 +325,7 @@ def _check_new_process(self): # force an immediate update constant tags since we have reset services # and generated a new runtime id - self._update_dogstatsd_constant_tags() + self._update_metrics_client_constant_tags() def trace(self, name, service=None, resource=None, span_type=None): """ @@ -477,7 +432,7 @@ def write(self, spans): def set_service_info(self, *args, **kwargs): """Set the information about the given service. - :param str service: the internal name of the service (e.g. acme_search, datadog_web) + :param str service: the internal name of the service (e.g. acme_search, opentelemetry_web) :param str app: the off the shelf name of the application (e.g. rails, postgres, custom-app) :param str app_type: the type of the application (e.g. db, web) """ diff --git a/ddtrace/util.py b/oteltrace/util.py similarity index 82% rename from ddtrace/util.py rename to oteltrace/util.py index 51517698..0c3d1e86 100644 --- a/ddtrace/util.py +++ b/oteltrace/util.py @@ -5,8 +5,8 @@ deprecation( - name='ddtrace.util', - message='Use `ddtrace.utils` package instead', + name='oteltrace.util', + message='Use `oteltrace.utils` package instead', version='1.0.0', ) diff --git a/ddtrace/utils/__init__.py b/oteltrace/utils/__init__.py similarity index 100% rename from ddtrace/utils/__init__.py rename to oteltrace/utils/__init__.py diff --git a/ddtrace/utils/attrdict.py b/oteltrace/utils/attrdict.py similarity index 100% rename from ddtrace/utils/attrdict.py rename to oteltrace/utils/attrdict.py diff --git a/ddtrace/utils/config.py b/oteltrace/utils/config.py similarity index 100% rename from ddtrace/utils/config.py rename to oteltrace/utils/config.py diff --git a/ddtrace/utils/deprecation.py b/oteltrace/utils/deprecation.py similarity index 93% rename from ddtrace/utils/deprecation.py rename to oteltrace/utils/deprecation.py index ea852cc7..78a0a451 100644 --- a/ddtrace/utils/deprecation.py +++ b/oteltrace/utils/deprecation.py @@ -3,7 +3,7 @@ from functools import wraps -class RemovedInDDTrace10Warning(DeprecationWarning): +class RemovedInOtelTrace10Warning(DeprecationWarning): pass @@ -22,7 +22,7 @@ def format_message(name, message, version): def warn(message, stacklevel=2): """Helper function used as a ``DeprecationWarning``.""" - warnings.warn(message, RemovedInDDTrace10Warning, stacklevel=stacklevel) + warnings.warn(message, RemovedInOtelTrace10Warning, stacklevel=stacklevel) def deprecation(name='', message='', version=None): diff --git a/ddtrace/utils/formats.py b/oteltrace/utils/formats.py similarity index 84% rename from ddtrace/utils/formats.py rename to oteltrace/utils/formats.py index 363c9c11..ad04c53a 100644 --- a/ddtrace/utils/formats.py +++ b/oteltrace/utils/formats.py @@ -7,23 +7,23 @@ def get_env(integration, variable, default=None): """Retrieves environment variables value for the given integration. It must be used for consistency between integrations. The implementation is backward compatible with legacy nomenclature: - * `DATADOG_` is a legacy prefix with lower priority - * `DD_` environment variables have the highest priority + * `OPENTELEMETRY_` is a legacy prefix with lower priority + * `OTEL_` environment variables have the highest priority * the environment variable is built concatenating `integration` and `variable` arguments * return `default` otherwise """ key = '{}_{}'.format(integration, variable).upper() - legacy_env = 'DATADOG_{}'.format(key) - env = 'DD_{}'.format(key) + legacy_env = 'OPENTELEMETRY_{}'.format(key) + env = 'OTEL_{}'.format(key) value = os.getenv(env) legacy = os.getenv(legacy_env) if legacy: - # Deprecation: `DATADOG_` variables are deprecated + # Deprecation: `OPENTELEMETRY_` variables are deprecated deprecation( - name='DATADOG_', - message='Use `DD_` prefix instead', + name='OPENTELEMETRY_', + message='Use `OTEL_` prefix instead', version='1.0.0', ) diff --git a/ddtrace/utils/hook.py b/oteltrace/utils/hook.py similarity index 100% rename from ddtrace/utils/hook.py rename to oteltrace/utils/hook.py diff --git a/ddtrace/utils/http.py b/oteltrace/utils/http.py similarity index 100% rename from ddtrace/utils/http.py rename to oteltrace/utils/http.py diff --git a/ddtrace/utils/importlib.py b/oteltrace/utils/importlib.py similarity index 100% rename from ddtrace/utils/importlib.py rename to oteltrace/utils/importlib.py diff --git a/ddtrace/utils/merge.py b/oteltrace/utils/merge.py similarity index 100% rename from ddtrace/utils/merge.py rename to oteltrace/utils/merge.py diff --git a/ddtrace/utils/sizeof.py b/oteltrace/utils/sizeof.py similarity index 100% rename from ddtrace/utils/sizeof.py rename to oteltrace/utils/sizeof.py diff --git a/ddtrace/utils/wrappers.py b/oteltrace/utils/wrappers.py similarity index 89% rename from ddtrace/utils/wrappers.py rename to oteltrace/utils/wrappers.py index a8369c86..988c77f8 100644 --- a/ddtrace/utils/wrappers.py +++ b/oteltrace/utils/wrappers.py @@ -1,4 +1,4 @@ -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt import inspect from .deprecation import deprecated @@ -27,7 +27,7 @@ def safe_patch(patchable, key, patch_func, service, meta, tracer): then patchable[key] contains an already patched command! To workaround this, check if patchable or patchable.__class__ are _dogtraced If is isn't, nothing to worry about, patch the key as usual - But if it is, search for a '__dd_orig_{key}' method on the class, which is + But if it is, search for a '__otel_orig_{key}' method on the class, which is the original unpatched method we wish to trace. """ @@ -35,11 +35,11 @@ def _get_original_method(thing, key): orig = None if hasattr(thing, '_dogtraced'): # Search for original method - orig = getattr(thing, '__dd_orig_{}'.format(key), None) + orig = getattr(thing, '__otel_orig_{}'.format(key), None) else: orig = getattr(thing, key) # Set it for the next time we attempt to patch `thing` - setattr(thing, '__dd_orig_{}'.format(key), orig) + setattr(thing, '__otel_orig_{}'.format(key), orig) return orig diff --git a/ddtrace/vendor/__init__.py b/oteltrace/vendor/__init__.py similarity index 53% rename from ddtrace/vendor/__init__.py rename to oteltrace/vendor/__init__.py index a46013f8..ce862dfd 100644 --- a/ddtrace/vendor/__init__.py +++ b/oteltrace/vendor/__init__.py @@ -1,26 +1,13 @@ """ -ddtrace.vendor +oteltrace.vendor ============== -Install vendored dependencies under a different top level package to avoid importing `ddtrace/__init__.py` +Install vendored dependencies under a different top level package to avoid importing `oteltrace/__init__.py` whenever a dependency is imported. Doing this allows us to have a little more control over import order. Dependencies ============ -msgpack -------- - -Website: https://msgpack.org/ -Source: https://github.com/msgpack/msgpack-python -Version: 0.6.1 -License: Apache License, Version 2.0 - -Notes: - If you need to update any `*.pyx` files, be sure to run `cython --cplus msgpack/_cmsgpack.pyx` to regenerate `_cmsgpack.cpp` - - `_packer.pyx` and `_unpacker.pyx` were updated to import from `ddtrace.vendor.msgpack` - six --- @@ -31,7 +18,7 @@ Notes: `six/__init__.py` is just the source code's `six.py` - `curl https://raw.githubusercontent.com/benjaminp/six/1.11.0/six.py > ddtrace/vendor/six/__init__.py` + `curl https://raw.githubusercontent.com/benjaminp/six/1.11.0/six.py > oteltrace/vendor/six/__init__.py` wrapt @@ -47,18 +34,6 @@ `setup.py` will attempt to build the `wrapt/_wrappers.c` C module -dogstatsd ---------- - -Website: https://datadogpy.readthedocs.io/en/latest/ -Source: https://github.com/DataDog/datadogpy -Version: 0.28.0 -License: Copyright (c) 2015, Datadog - -Notes: - `dogstatsd/__init__.py` was updated to include a copy of the `datadogpy` license: https://github.com/DataDog/datadogpy/blob/master/LICENSE - Only `datadog.dogstatsd` module was vendored to avoid unnecessary dependencies - `datadog/util/compat.py` was copied to `dogstatsd/compat.py` monotonic --------- diff --git a/ddtrace/vendor/monotonic/__init__.py b/oteltrace/vendor/monotonic/__init__.py similarity index 100% rename from ddtrace/vendor/monotonic/__init__.py rename to oteltrace/vendor/monotonic/__init__.py diff --git a/ddtrace/vendor/six/__init__.py b/oteltrace/vendor/six/__init__.py similarity index 100% rename from ddtrace/vendor/six/__init__.py rename to oteltrace/vendor/six/__init__.py diff --git a/ddtrace/vendor/wrapt/__init__.py b/oteltrace/vendor/wrapt/__init__.py similarity index 100% rename from ddtrace/vendor/wrapt/__init__.py rename to oteltrace/vendor/wrapt/__init__.py diff --git a/ddtrace/vendor/wrapt/_wrappers.c b/oteltrace/vendor/wrapt/_wrappers.c similarity index 100% rename from ddtrace/vendor/wrapt/_wrappers.c rename to oteltrace/vendor/wrapt/_wrappers.c diff --git a/ddtrace/vendor/wrapt/decorators.py b/oteltrace/vendor/wrapt/decorators.py similarity index 100% rename from ddtrace/vendor/wrapt/decorators.py rename to oteltrace/vendor/wrapt/decorators.py diff --git a/ddtrace/vendor/wrapt/importer.py b/oteltrace/vendor/wrapt/importer.py similarity index 100% rename from ddtrace/vendor/wrapt/importer.py rename to oteltrace/vendor/wrapt/importer.py diff --git a/ddtrace/vendor/wrapt/wrappers.py b/oteltrace/vendor/wrapt/wrappers.py similarity index 100% rename from ddtrace/vendor/wrapt/wrappers.py rename to oteltrace/vendor/wrapt/wrappers.py diff --git a/setup.py b/setup.py index cc6ba0e8..a3f7b084 100644 --- a/setup.py +++ b/setup.py @@ -33,9 +33,9 @@ def run_tests(self): long_description = """ -# dd-trace-py +# otel-trace-py -`ddtrace` is Datadog's tracing library for Python. It is used to trace requests +`oteltrace` is OpenTelemetry's tracing library for Python. It is used to trace requests as they flow across web servers, databases and microservices so that developers have great visiblity into bottlenecks and troublesome requests. @@ -57,10 +57,10 @@ def run_tests(self): # Base `setup()` kwargs without any C-extension registering setup_kwargs = dict( - name='ddtrace', - description='Datadog tracing code', - url='https://github.com/DataDog/dd-trace-py', - author='Datadog, Inc.', + name='oteltrace', + description='OpenTelemetry tracing code', + url='https://github.com/opentelemetry/otel-trace-py', + author='DataDog, Inc.', author_email='dev@datadoghq.com', long_description=long_description, long_description_content_type='text/markdown', @@ -68,18 +68,15 @@ def run_tests(self): packages=find_packages(exclude=['tests*']), install_requires=[ 'psutil>=5.0.0', + 'opentelemetry-api', + 'opentelemetry-sdk', ], - extras_require={ - # users can include opentracing by having: - # install_requires=['ddtrace[opentracing]', ...] - 'opentracing': ['opentracing>=2.0.0'], - }, # plugin tox tests_require=['tox', 'flake8'], cmdclass={'test': Tox}, entry_points={ 'console_scripts': [ - 'ddtrace-run = ddtrace.commands.ddtrace_run:main' + 'oteltrace-run = oteltrace.commands.oteltrace_run:main' ] }, classifiers=[ @@ -91,15 +88,10 @@ def run_tests(self): 'Programming Language :: Python :: 3.7', ], use_scm_version=True, - setup_requires=['setuptools_scm'], + setup_requires=['setuptools_scm', 'opentelemetry-api', 'opentelemetry-sdk'], ) -# The following from here to the end of the file is borrowed from wrapt's and msgpack's `setup.py`: -# https://github.com/GrahamDumpleton/wrapt/blob/4ee35415a4b0d570ee6a9b3a14a6931441aeab4b/setup.py -# https://github.com/msgpack/msgpack-python/blob/381c2eff5f8ee0b8669fd6daf1fd1ecaffe7c931/setup.py -# These helpers are useful for attempting build a C-extension and then retrying without it if it fails - libraries = [] if sys.platform == 'win32': libraries.append('ws2_32') @@ -139,15 +131,8 @@ def build_extension(self, ext): kwargs = copy.deepcopy(setup_kwargs) kwargs['ext_modules'] = [ Extension( - 'ddtrace.vendor.wrapt._wrappers', - sources=['ddtrace/vendor/wrapt/_wrappers.c'], - ), - Extension( - 'ddtrace.vendor.msgpack._cmsgpack', - sources=['ddtrace/vendor/msgpack/_cmsgpack.cpp'], - libraries=libraries, - include_dirs=['ddtrace/vendor/'], - define_macros=macros, + 'oteltrace.vendor.wrapt._wrappers', + sources=['oteltrace/vendor/wrapt/_wrappers.c'], ), ] # DEV: Make sure `cmdclass` exists diff --git a/tests/__init__.py b/tests/__init__.py index 5874e1c3..7a786358 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ -# Do *NOT* `import ddtrace` in here +# Do *NOT* `import oteltrace` in here # DEV: Some tests rely on import order of modules -# in order to properly function. Importing `ddtrace` +# in order to properly function. Importing `oteltrace` # here would mess with those tests since everyone # will load this file by default diff --git a/tests/base/__init__.py b/tests/base/__init__.py index 7c933f11..9da67d12 100644 --- a/tests/base/__init__.py +++ b/tests/base/__init__.py @@ -3,7 +3,7 @@ import sys import unittest -import ddtrace +import oteltrace from ..utils.tracer import DummyTracer from ..utils.span import TestSpanContainer, TestSpan, NO_CHILDREN @@ -30,7 +30,7 @@ def test_case(self): def override_env(env): """ Temporarily override ``os.environ`` with provided values - >>> with self.override_env(dict(DATADOG_TRACE_DEBUG=True)): + >>> with self.override_env(dict(OPENTELEMETRY_TRACE_DEBUG=True)): # Your test """ # Copy the full original environment @@ -54,16 +54,16 @@ def override_global_config(values): # Your test """ # DEV: Uses dict as interface but internally handled as attributes on Config instance - analytics_enabled_original = ddtrace.config.analytics_enabled - report_hostname_original = ddtrace.config.report_hostname + analytics_enabled_original = oteltrace.config.analytics_enabled + report_hostname_original = oteltrace.config.report_hostname - ddtrace.config.analytics_enabled = values.get('analytics_enabled', analytics_enabled_original) - ddtrace.config.report_hostname = values.get('report_hostname', report_hostname_original) + oteltrace.config.analytics_enabled = values.get('analytics_enabled', analytics_enabled_original) + oteltrace.config.report_hostname = values.get('report_hostname', report_hostname_original) try: yield finally: - ddtrace.config.analytics_enabled = analytics_enabled_original - ddtrace.config.report_hostname = report_hostname_original + oteltrace.config.analytics_enabled = analytics_enabled_original + oteltrace.config.report_hostname = report_hostname_original @staticmethod @contextlib.contextmanager @@ -73,7 +73,7 @@ def override_config(integration, values): >>> with self.override_config('flask', dict(service_name='test-service')): # Your test """ - options = getattr(ddtrace.config, integration) + options = getattr(oteltrace.config, integration) original = dict( (key, options.get(key)) @@ -94,7 +94,7 @@ def override_http_config(integration, values): >>> with self.override_http_config('flask', dict(trace_query_string=True)): # Your test """ - options = getattr(ddtrace.config, integration).http + options = getattr(oteltrace.config, integration).http original = {} for key, value in values.items(): @@ -171,10 +171,10 @@ def assert_structure(self, root, children=NO_CHILDREN): @contextlib.contextmanager def override_global_tracer(self, tracer=None): - original = ddtrace.tracer + original = oteltrace.tracer tracer = tracer or self.tracer - setattr(ddtrace, 'tracer', tracer) + setattr(oteltrace, 'tracer', tracer) try: yield finally: - setattr(ddtrace, 'tracer', original) + setattr(oteltrace, 'tracer', original) diff --git a/tests/benchmark.py b/tests/benchmark.py index 8558addc..f11ff7f8 100644 --- a/tests/benchmark.py +++ b/tests/benchmark.py @@ -1,4 +1,4 @@ -from ddtrace import Tracer +from oteltrace import Tracer import pytest from .test_tracer import DummyWriter diff --git a/tests/commands/ddtrace_run_app_name.py b/tests/commands/ddtrace_run_app_name.py deleted file mode 100644 index b48266b7..00000000 --- a/tests/commands/ddtrace_run_app_name.py +++ /dev/null @@ -1,5 +0,0 @@ -from ddtrace.opentracer import Tracer - -if __name__ == '__main__': - tracer = Tracer() - print(tracer._service_name) diff --git a/tests/commands/ddtrace_run_dogstatsd.py b/tests/commands/ddtrace_run_dogstatsd.py deleted file mode 100644 index b39dfaa4..00000000 --- a/tests/commands/ddtrace_run_dogstatsd.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import print_function - -from ddtrace import tracer - -if __name__ == '__main__': - assert tracer._dogstatsd_client.host == '172.10.0.1' - assert tracer._dogstatsd_client.port == 8120 - print('Test success') diff --git a/tests/commands/ddtrace_run_hostname.py b/tests/commands/ddtrace_run_hostname.py deleted file mode 100644 index c2f084c5..00000000 --- a/tests/commands/ddtrace_run_hostname.py +++ /dev/null @@ -1,6 +0,0 @@ -from ddtrace import tracer - -if __name__ == '__main__': - assert tracer.writer.api.hostname == '172.10.0.1' - assert tracer.writer.api.port == 8120 - print('Test success') diff --git a/tests/commands/ddtrace_run_service.py b/tests/commands/ddtrace_run_service.py deleted file mode 100644 index 7062006d..00000000 --- a/tests/commands/ddtrace_run_service.py +++ /dev/null @@ -1,5 +0,0 @@ -import os - -if __name__ == '__main__': - assert os.environ['DATADOG_SERVICE_NAME'] == 'my_test_service' - print('Test success') diff --git a/tests/commands/module_mock.py b/tests/commands/module_mock.py new file mode 100644 index 00000000..5cd80907 --- /dev/null +++ b/tests/commands/module_mock.py @@ -0,0 +1,9 @@ +from unittest import mock + +_EXPORTER = mock.Mock() + + +def build_exporter(**kwargs): + for k, v in kwargs.items(): + setattr(_EXPORTER, k, v) + return _EXPORTER diff --git a/tests/commands/ddtrace_minimal.py b/tests/commands/oteltrace_minimal.py similarity index 51% rename from tests/commands/ddtrace_minimal.py rename to tests/commands/oteltrace_minimal.py index 2f5caf50..ef649d7d 100644 --- a/tests/commands/ddtrace_minimal.py +++ b/tests/commands/oteltrace_minimal.py @@ -1,4 +1,4 @@ -import ddtrace.bootstrap.sitecustomize as module +import oteltrace.bootstrap.sitecustomize as module if __name__ == '__main__': diff --git a/tests/commands/ddtrace_run_argv.py b/tests/commands/oteltrace_run_argv.py similarity index 100% rename from tests/commands/ddtrace_run_argv.py rename to tests/commands/oteltrace_run_argv.py diff --git a/tests/commands/ddtrace_run_debug.py b/tests/commands/oteltrace_run_debug.py similarity index 80% rename from tests/commands/ddtrace_run_debug.py rename to tests/commands/oteltrace_run_debug.py index 543a8585..6e61861c 100644 --- a/tests/commands/ddtrace_run_debug.py +++ b/tests/commands/oteltrace_run_debug.py @@ -1,6 +1,6 @@ import logging -from ddtrace import tracer +from oteltrace import tracer if __name__ == '__main__': assert tracer.log.isEnabledFor(logging.DEBUG) diff --git a/tests/commands/ddtrace_run_disabled.py b/tests/commands/oteltrace_run_disabled.py similarity index 78% rename from tests/commands/ddtrace_run_disabled.py rename to tests/commands/oteltrace_run_disabled.py index 95d13a52..ab995d15 100644 --- a/tests/commands/ddtrace_run_disabled.py +++ b/tests/commands/oteltrace_run_disabled.py @@ -1,4 +1,4 @@ -from ddtrace import tracer, monkey +from oteltrace import tracer, monkey if __name__ == '__main__': assert not tracer.enabled diff --git a/tests/commands/ddtrace_run_enabled.py b/tests/commands/oteltrace_run_enabled.py similarity index 73% rename from tests/commands/ddtrace_run_enabled.py rename to tests/commands/oteltrace_run_enabled.py index cbe4673a..7f8654e2 100644 --- a/tests/commands/ddtrace_run_enabled.py +++ b/tests/commands/oteltrace_run_enabled.py @@ -1,4 +1,4 @@ -from ddtrace import tracer +from oteltrace import tracer if __name__ == '__main__': assert tracer.enabled diff --git a/tests/commands/ddtrace_run_env.py b/tests/commands/oteltrace_run_env.py similarity index 76% rename from tests/commands/ddtrace_run_env.py rename to tests/commands/oteltrace_run_env.py index 45db8cca..fafd4c39 100644 --- a/tests/commands/ddtrace_run_env.py +++ b/tests/commands/oteltrace_run_env.py @@ -1,4 +1,4 @@ -from ddtrace import tracer +from oteltrace import tracer if __name__ == '__main__': assert tracer.tags['env'] == 'test' diff --git a/tests/commands/ddtrace_run_global_tags.py b/tests/commands/oteltrace_run_global_tags.py similarity index 84% rename from tests/commands/ddtrace_run_global_tags.py rename to tests/commands/oteltrace_run_global_tags.py index 5f62e14d..e8a443f3 100644 --- a/tests/commands/ddtrace_run_global_tags.py +++ b/tests/commands/oteltrace_run_global_tags.py @@ -1,4 +1,4 @@ -from ddtrace import tracer +from oteltrace import tracer if __name__ == '__main__': assert tracer.tags['a'] == 'True' diff --git a/tests/commands/ddtrace_run_integration.py b/tests/commands/oteltrace_run_integration.py similarity index 93% rename from tests/commands/ddtrace_run_integration.py rename to tests/commands/oteltrace_run_integration.py index b1eef829..ea58b8c7 100644 --- a/tests/commands/ddtrace_run_integration.py +++ b/tests/commands/oteltrace_run_integration.py @@ -1,11 +1,11 @@ """ An integration test that uses a real Redis client -that we expect to be implicitly traced via `ddtrace-run` +that we expect to be implicitly traced via `oteltrace-run` """ import redis -from ddtrace import Pin +from oteltrace import Pin from tests.contrib.config import REDIS_CONFIG from tests.test_tracer import DummyWriter diff --git a/tests/commands/ddtrace_run_logs_injection.py b/tests/commands/oteltrace_run_logs_injection.py similarity index 53% rename from tests/commands/ddtrace_run_logs_injection.py rename to tests/commands/oteltrace_run_logs_injection.py index 06d5cb6e..1f6fb5c2 100644 --- a/tests/commands/ddtrace_run_logs_injection.py +++ b/tests/commands/oteltrace_run_logs_injection.py @@ -2,10 +2,10 @@ if __name__ == '__main__': # Ensure if module is patched then default log formatter is set up for logs - if getattr(logging, '_datadog_patch'): - assert '[dd.trace_id=%(dd.trace_id)s dd.span_id=%(dd.span_id)s]' in \ + if getattr(logging, '_opentelemetry_patch'): + assert '[otel.trace_id=%(otel.trace_id)s otel.span_id=%(otel.span_id)s]' in \ logging.root.handlers[0].formatter._fmt else: - assert '[dd.trace_id=%(dd.trace_id)s dd.span_id=%(dd.span_id)s]' not in \ + assert '[otel.trace_id=%(otel.trace_id)s otel.span_id=%(otel.span_id)s]' not in \ logging.root.handlers[0].formatter._fmt print('Test success') diff --git a/tests/commands/ddtrace_run_no_debug.py b/tests/commands/oteltrace_run_no_debug.py similarity index 81% rename from tests/commands/ddtrace_run_no_debug.py rename to tests/commands/oteltrace_run_no_debug.py index 3e19f9d1..fc3e7654 100644 --- a/tests/commands/ddtrace_run_no_debug.py +++ b/tests/commands/oteltrace_run_no_debug.py @@ -1,6 +1,6 @@ import logging -from ddtrace import tracer +from oteltrace import tracer if __name__ == '__main__': assert not tracer.log.isEnabledFor(logging.DEBUG) diff --git a/tests/commands/oteltrace_run_otelexporter.py b/tests/commands/oteltrace_run_otelexporter.py new file mode 100644 index 00000000..71793cc0 --- /dev/null +++ b/tests/commands/oteltrace_run_otelexporter.py @@ -0,0 +1,6 @@ +from oteltrace import tracer + +if __name__ == '__main__': + assert tracer.writer.api._exporter.key == '0x9812892467541' + assert tracer.writer.api._exporter.url == 'opentelemetry.io' + print('Test success') diff --git a/tests/commands/ddtrace_run_patched_modules.py b/tests/commands/oteltrace_run_patched_modules.py similarity index 78% rename from tests/commands/ddtrace_run_patched_modules.py rename to tests/commands/oteltrace_run_patched_modules.py index bcddba07..631753ca 100644 --- a/tests/commands/ddtrace_run_patched_modules.py +++ b/tests/commands/oteltrace_run_patched_modules.py @@ -1,4 +1,4 @@ -from ddtrace import monkey +from oteltrace import monkey if __name__ == '__main__': assert 'redis' in monkey.get_patched_modules() diff --git a/tests/commands/ddtrace_run_priority_sampling.py b/tests/commands/oteltrace_run_priority_sampling.py similarity index 77% rename from tests/commands/ddtrace_run_priority_sampling.py rename to tests/commands/oteltrace_run_priority_sampling.py index d4a32ed7..e6215221 100644 --- a/tests/commands/ddtrace_run_priority_sampling.py +++ b/tests/commands/oteltrace_run_priority_sampling.py @@ -1,4 +1,4 @@ -from ddtrace import tracer +from oteltrace import tracer if __name__ == '__main__': assert tracer.priority_sampler is not None diff --git a/tests/commands/oteltrace_run_propagator.py b/tests/commands/oteltrace_run_propagator.py new file mode 100644 index 00000000..5c08d043 --- /dev/null +++ b/tests/commands/oteltrace_run_propagator.py @@ -0,0 +1,6 @@ +from oteltrace.propagation.http import HTTPPropagator +from oteltrace.propagation.b3 import B3HTTPPropagator + +if __name__ == '__main__': + assert isinstance(HTTPPropagator(), B3HTTPPropagator) + print('Test success') diff --git a/tests/commands/oteltrace_run_service.py b/tests/commands/oteltrace_run_service.py new file mode 100644 index 00000000..d5c292c5 --- /dev/null +++ b/tests/commands/oteltrace_run_service.py @@ -0,0 +1,5 @@ +import os + +if __name__ == '__main__': + assert os.environ['OPENTELEMETRY_SERVICE_NAME'] == 'my_test_service' + print('Test success') diff --git a/tests/commands/ddtrace_run_sitecustomize.py b/tests/commands/oteltrace_run_sitecustomize.py similarity index 100% rename from tests/commands/ddtrace_run_sitecustomize.py rename to tests/commands/oteltrace_run_sitecustomize.py diff --git a/tests/commands/test_runner.py b/tests/commands/test_runner.py index 9320ca95..0cf7f5f3 100644 --- a/tests/commands/test_runner.py +++ b/tests/commands/test_runner.py @@ -10,14 +10,14 @@ def inject_sitecustomize(path): the current PYTHONPATH. :param path: package path containing ``sitecustomize.py`` module, starting - from the ddtrace root folder + from the oteltrace root folder :returns: a cloned environment that includes an altered PYTHONPATH with the given `sitecustomize.py` """ - from ddtrace import __file__ as root_file + from oteltrace import __file__ as root_file root_folder = os.path.dirname(root_file) # Copy the current environment and replace the PYTHONPATH. This is - # required otherwise `ddtrace` scripts are not found when `env` kwarg is + # required otherwise `oteltrace` scripts are not found when `env` kwarg is # passed env = os.environ.copy() sitecustomize = os.path.join(root_folder, '..', path) @@ -29,178 +29,135 @@ def inject_sitecustomize(path): return env -class DdtraceRunTest(BaseTestCase): +class OteltraceRunTest(BaseTestCase): def test_service_name_passthrough(self): """ - $DATADOG_SERVICE_NAME gets passed through to the program + $OPENTELEMETRY_SERVICE_NAME gets passed through to the program """ - with self.override_env(dict(DATADOG_SERVICE_NAME='my_test_service')): + with self.override_env(dict(OPENTELEMETRY_SERVICE_NAME='my_test_service')): out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_service.py'] + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_service.py'] ) assert out.startswith(b'Test success') def test_env_name_passthrough(self): """ - $DATADOG_ENV gets passed through to the global tracer as an 'env' tag + $OPENTELEMETRY_ENV gets passed through to the global tracer as an 'env' tag """ - with self.override_env(dict(DATADOG_ENV='test')): + with self.override_env(dict(OPENTELEMETRY_ENV='test')): out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_env.py'] + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_env.py'] ) assert out.startswith(b'Test success') def test_env_enabling(self): """ - DATADOG_TRACE_ENABLED=false allows disabling of the global tracer + OPENTELEMETRY_TRACE_ENABLED=false allows disabling of the global tracer """ - with self.override_env(dict(DATADOG_TRACE_ENABLED='false')): + with self.override_env(dict(OPENTELEMETRY_TRACE_ENABLED='false')): out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_disabled.py'] + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_disabled.py'] ) assert out.startswith(b'Test success') - with self.override_env(dict(DATADOG_TRACE_ENABLED='true')): + with self.override_env(dict(OPENTELEMETRY_TRACE_ENABLED='true')): out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_enabled.py'] + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_enabled.py'] ) assert out.startswith(b'Test success') def test_patched_modules(self): """ - Using `ddtrace-run` registers some generic patched modules + Using `oteltrace-run` registers some generic patched modules """ out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_patched_modules.py'] + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_patched_modules.py'] ) assert out.startswith(b'Test success') def test_integration(self): out = subprocess.check_output( - ['ddtrace-run', 'python', '-m', 'tests.commands.ddtrace_run_integration'] + ['oteltrace-run', 'python', '-m', 'tests.commands.oteltrace_run_integration'] ) assert out.startswith(b'Test success') def test_debug_enabling(self): """ - DATADOG_TRACE_DEBUG=true allows setting debug logging of the global tracer + OPENTELEMETRY_TRACE_DEBUG=true allows setting debug logging of the global tracer """ - with self.override_env(dict(DATADOG_TRACE_DEBUG='false')): + with self.override_env(dict(OPENTELEMETRY_TRACE_DEBUG='false')): out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_no_debug.py'] + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_no_debug.py'] ) assert out.startswith(b'Test success') - with self.override_env(dict(DATADOG_TRACE_DEBUG='true')): + with self.override_env(dict(OPENTELEMETRY_TRACE_DEBUG='true')): out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_debug.py'] - ) - assert out.startswith(b'Test success') - - def test_host_port_from_env(self): - """ - DATADOG_TRACE_AGENT_HOSTNAME|PORT point to the tracer - to the correct host/port for submission - """ - with self.override_env(dict(DATADOG_TRACE_AGENT_HOSTNAME='172.10.0.1', - DATADOG_TRACE_AGENT_PORT='8120')): - out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_hostname.py'] - ) - assert out.startswith(b'Test success') - - def test_host_port_from_env_dd(self): - """ - DD_AGENT_HOST|DD_TRACE_AGENT_PORT point to the tracer - to the correct host/port for submission - """ - with self.override_env(dict(DD_AGENT_HOST='172.10.0.1', - DD_TRACE_AGENT_PORT='8120')): - out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_hostname.py'] - ) - assert out.startswith(b'Test success') - - # Do we get the same results without `ddtrace-run`? - out = subprocess.check_output( - ['python', 'tests/commands/ddtrace_run_hostname.py'] - ) - assert out.startswith(b'Test success') - - def test_runtime_metrics(self): - """ - DD_AGENT_HOST|DD_DOGSTATSD_PORT point to the tracer - to the correct host/port for submission - """ - with self.override_env(dict(DD_RUNTIME_METRICS_ENABLED='True', - DD_AGENT_HOST='172.10.0.1', - DD_DOGSTATSD_PORT='8120')): - out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_dogstatsd.py'] + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_debug.py'] ) assert out.startswith(b'Test success') def test_priority_sampling_from_env(self): """ - DATADOG_PRIORITY_SAMPLING enables Distributed Sampling + OPENTELEMETRY_PRIORITY_SAMPLING enables Distributed Sampling """ - with self.override_env(dict(DATADOG_PRIORITY_SAMPLING='True')): + with self.override_env(dict(OPENTELEMETRY_PRIORITY_SAMPLING='True')): out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_priority_sampling.py'] + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_priority_sampling.py'] ) assert out.startswith(b'Test success') def test_patch_modules_from_env(self): """ - DATADOG_PATCH_MODULES overrides the defaults for patch_all() + OPENTELEMETRY_PATCH_MODULES overrides the defaults for patch_all() """ - from ddtrace.bootstrap.sitecustomize import EXTRA_PATCHED_MODULES, update_patched_modules + from oteltrace.bootstrap.sitecustomize import EXTRA_PATCHED_MODULES, update_patched_modules orig = EXTRA_PATCHED_MODULES.copy() # empty / malformed strings are no-ops - with self.override_env(dict(DATADOG_PATCH_MODULES='')): + with self.override_env(dict(OPENTELEMETRY_PATCH_MODULES='')): update_patched_modules() assert orig == EXTRA_PATCHED_MODULES - with self.override_env(dict(DATADOG_PATCH_MODULES=':')): + with self.override_env(dict(OPENTELEMETRY_PATCH_MODULES=':')): update_patched_modules() assert orig == EXTRA_PATCHED_MODULES - with self.override_env(dict(DATADOG_PATCH_MODULES=',')): + with self.override_env(dict(OPENTELEMETRY_PATCH_MODULES=',')): update_patched_modules() assert orig == EXTRA_PATCHED_MODULES - with self.override_env(dict(DATADOG_PATCH_MODULES=',:')): + with self.override_env(dict(OPENTELEMETRY_PATCH_MODULES=',:')): update_patched_modules() assert orig == EXTRA_PATCHED_MODULES # overrides work in either direction - with self.override_env(dict(DATADOG_PATCH_MODULES='django:false')): + with self.override_env(dict(OPENTELEMETRY_PATCH_MODULES='django:false')): update_patched_modules() assert EXTRA_PATCHED_MODULES['django'] is False - with self.override_env(dict(DATADOG_PATCH_MODULES='boto:true')): + with self.override_env(dict(OPENTELEMETRY_PATCH_MODULES='boto:true')): update_patched_modules() assert EXTRA_PATCHED_MODULES['boto'] is True - with self.override_env(dict(DATADOG_PATCH_MODULES='django:true,boto:false')): + with self.override_env(dict(OPENTELEMETRY_PATCH_MODULES='django:true,boto:false')): update_patched_modules() assert EXTRA_PATCHED_MODULES['boto'] is False assert EXTRA_PATCHED_MODULES['django'] is True - with self.override_env(dict(DATADOG_PATCH_MODULES='django:false,boto:true')): + with self.override_env(dict(OPENTELEMETRY_PATCH_MODULES='django:false,boto:true')): update_patched_modules() assert EXTRA_PATCHED_MODULES['boto'] is True assert EXTRA_PATCHED_MODULES['django'] is False - def test_sitecustomize_without_ddtrace_run_command(self): + def test_sitecustomize_without_oteltrace_run_command(self): # [Regression test]: ensure `sitecustomize` path is removed only if it's # present otherwise it will cause: # ValueError: list.remove(x): x not in list - # as mentioned here: https://github.com/DataDog/dd-trace-py/pull/516 + # as mentioned here: https://github.com/opentelemetry/otel-trace-py/pull/516 env = inject_sitecustomize('') out = subprocess.check_output( - ['python', 'tests/commands/ddtrace_minimal.py'], + ['python', 'tests/commands/oteltrace_minimal.py'], env=env, ) # `out` contains the `loaded` status of the module @@ -213,7 +170,7 @@ def test_sitecustomize_run(self): # defined in users' PYTHONPATH. env = inject_sitecustomize('tests/commands/bootstrap') out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_sitecustomize.py'], + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_sitecustomize.py'], env=env, ) assert out.startswith(b'Test success') @@ -222,40 +179,58 @@ def test_sitecustomize_run_suppressed(self): # ensure `sitecustomize.py` is not loaded if `-S` is used env = inject_sitecustomize('tests/commands/bootstrap') out = subprocess.check_output( - ['ddtrace-run', 'python', '-S', 'tests/commands/ddtrace_run_sitecustomize.py', '-S'], + ['oteltrace-run', 'python', '-S', 'tests/commands/oteltrace_run_sitecustomize.py', '-S'], env=env, ) assert out.startswith(b'Test success') def test_argv_passed(self): out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_argv.py', 'foo', 'bar'] + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_argv.py', 'foo', 'bar'] ) assert out.startswith(b'Test success') - def test_got_app_name(self): - """ - apps run with ddtrace-run have a proper app name - """ - out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_app_name.py'] - ) - assert out.startswith(b'ddtrace_run_app_name.py') - def test_global_trace_tags(self): """ Ensure global tags are passed in from environment """ - with self.override_env(dict(DD_TRACE_GLOBAL_TAGS='a:True,b:0,c:C')): + with self.override_env(dict(OTEL_TRACE_GLOBAL_TAGS='a:True,b:0,c:C')): out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_global_tags.py'] + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_global_tags.py'] ) assert out.startswith(b'Test success') def test_logs_injection(self): """ Ensure logs injection works """ - with self.override_env(dict(DD_LOGS_INJECTION='true')): + with self.override_env(dict(OTEL_LOGS_INJECTION='true')): + out = subprocess.check_output( + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_logs_injection.py'] + ) + assert out.startswith(b'Test success') + + def test_otel_exporter(self): + """ Ensure exporter is properly loaded + """ + oteltrace_run_conf = { + 'OTEL_EXPORTER_MODULE': 'tests.commands.module_mock', + 'OTEL_EXPORTER_FACTORY': 'build_exporter', + 'OTEL_EXPORTER_OPTIONS_key': '0x9812892467541', + 'OTEL_EXPORTER_OPTIONS_url': 'opentelemetry.io', + } + with self.override_env(oteltrace_run_conf): + out = subprocess.check_output( + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_otelexporter.py'] + ) + assert out.startswith(b'Test success') + + def test_propagator(self): + """ Ensure correct propagator is configured + """ + oteltrace_run_conf = { + 'OTEL_TRACER_PROPAGATOR': 'b3', + } + with self.override_env(oteltrace_run_conf): out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/commands/ddtrace_run_logs_injection.py'] + ['oteltrace-run', 'python', 'tests/commands/oteltrace_run_propagator.py'] ) assert out.startswith(b'Test success') diff --git a/tests/contrib/__init__.py b/tests/contrib/__init__.py index 5874e1c3..7a786358 100644 --- a/tests/contrib/__init__.py +++ b/tests/contrib/__init__.py @@ -1,5 +1,5 @@ -# Do *NOT* `import ddtrace` in here +# Do *NOT* `import oteltrace` in here # DEV: Some tests rely on import order of modules -# in order to properly function. Importing `ddtrace` +# in order to properly function. Importing `oteltrace` # here would mess with those tests since everyone # will load this file by default diff --git a/tests/contrib/aiobotocore/py35/test.py b/tests/contrib/aiobotocore/py35/test.py index e597b5f3..c9d889ba 100644 --- a/tests/contrib/aiobotocore/py35/test.py +++ b/tests/contrib/aiobotocore/py35/test.py @@ -1,6 +1,6 @@ import aiobotocore -from ddtrace.contrib.aiobotocore.patch import patch, unpatch +from oteltrace.contrib.aiobotocore.patch import patch, unpatch from ..utils import aiobotocore_client from ...asyncio.utils import AsyncioTestCase, mark_asyncio diff --git a/tests/contrib/aiobotocore/test.py b/tests/contrib/aiobotocore/test.py index 12d65344..1fd73c0c 100644 --- a/tests/contrib/aiobotocore/test.py +++ b/tests/contrib/aiobotocore/test.py @@ -1,10 +1,10 @@ import aiobotocore from botocore.errorfactory import ClientError -from ddtrace.contrib.aiobotocore.patch import patch, unpatch -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.ext import http -from ddtrace.compat import stringify +from oteltrace.contrib.aiobotocore.patch import patch, unpatch +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.ext import http +from oteltrace.compat import stringify from .utils import aiobotocore_client from ..asyncio.utils import AsyncioTestCase, mark_asyncio @@ -236,82 +236,3 @@ def test_double_patch(self): traces = self.tracer.writer.pop_traces() self.assertEqual(len(traces), 1) self.assertEqual(len(traces[0]), 1) - - @mark_asyncio - def test_opentraced_client(self): - from tests.opentracer.utils import init_tracer - - ot_tracer = init_tracer('my_svc', self.tracer) - - with ot_tracer.start_active_span('ot_outer_span'): - with aiobotocore_client('ec2', self.tracer) as ec2: - yield from ec2.describe_instances() - - traces = self.tracer.writer.pop_traces() - print(traces) - self.assertEqual(len(traces), 1) - self.assertEqual(len(traces[0]), 2) - ot_span = traces[0][0] - dd_span = traces[0][1] - - self.assertEqual(ot_span.resource, 'ot_outer_span') - self.assertEqual(ot_span.service, 'my_svc') - - # confirm the parenting - self.assertEqual(ot_span.parent_id, None) - self.assertEqual(dd_span.parent_id, ot_span.span_id) - - self.assertEqual(dd_span.get_tag('aws.agent'), 'aiobotocore') - self.assertEqual(dd_span.get_tag('aws.region'), 'us-west-2') - self.assertEqual(dd_span.get_tag('aws.operation'), 'DescribeInstances') - self.assertEqual(dd_span.get_tag('http.status_code'), '200') - self.assertEqual(dd_span.get_tag('retry_attempts'), '0') - self.assertEqual(dd_span.service, 'aws.ec2') - self.assertEqual(dd_span.resource, 'ec2.describeinstances') - self.assertEqual(dd_span.name, 'ec2.command') - - @mark_asyncio - def test_opentraced_s3_client(self): - from tests.opentracer.utils import init_tracer - - ot_tracer = init_tracer('my_svc', self.tracer) - - with ot_tracer.start_active_span('ot_outer_span'): - with aiobotocore_client('s3', self.tracer) as s3: - yield from s3.list_buckets() - with ot_tracer.start_active_span('ot_inner_span1'): - yield from s3.list_buckets() - with ot_tracer.start_active_span('ot_inner_span2'): - pass - - traces = self.tracer.writer.pop_traces() - self.assertEqual(len(traces), 1) - self.assertEqual(len(traces[0]), 5) - ot_outer_span = traces[0][0] - dd_span = traces[0][1] - ot_inner_span = traces[0][2] - dd_span2 = traces[0][3] - ot_inner_span2 = traces[0][4] - - self.assertEqual(ot_outer_span.resource, 'ot_outer_span') - self.assertEqual(ot_inner_span.resource, 'ot_inner_span1') - self.assertEqual(ot_inner_span2.resource, 'ot_inner_span2') - - # confirm the parenting - self.assertEqual(ot_outer_span.parent_id, None) - self.assertEqual(dd_span.parent_id, ot_outer_span.span_id) - self.assertEqual(ot_inner_span.parent_id, ot_outer_span.span_id) - self.assertEqual(dd_span2.parent_id, ot_inner_span.span_id) - self.assertEqual(ot_inner_span2.parent_id, ot_outer_span.span_id) - - self.assertEqual(dd_span.get_tag('aws.operation'), 'ListBuckets') - self.assertEqual(dd_span.get_tag('http.status_code'), '200') - self.assertEqual(dd_span.service, 'aws.s3') - self.assertEqual(dd_span.resource, 's3.listbuckets') - self.assertEqual(dd_span.name, 's3.command') - - self.assertEqual(dd_span2.get_tag('aws.operation'), 'ListBuckets') - self.assertEqual(dd_span2.get_tag('http.status_code'), '200') - self.assertEqual(dd_span2.service, 'aws.s3') - self.assertEqual(dd_span2.resource, 's3.listbuckets') - self.assertEqual(dd_span2.name, 's3.command') diff --git a/tests/contrib/aiobotocore/utils.py b/tests/contrib/aiobotocore/utils.py index a57d5545..ba80167e 100644 --- a/tests/contrib/aiobotocore/utils.py +++ b/tests/contrib/aiobotocore/utils.py @@ -1,6 +1,6 @@ import aiobotocore.session -from ddtrace import Pin +from oteltrace import Pin from contextlib import contextmanager diff --git a/tests/contrib/aiohttp/app/web.py b/tests/contrib/aiohttp/app/web.py index 1436aeba..d28b628d 100644 --- a/tests/contrib/aiohttp/app/web.py +++ b/tests/contrib/aiohttp/app/web.py @@ -165,4 +165,4 @@ def get_tracer(request): Utility function to retrieve the tracer from the given ``request``. It is meant to be used only for testing purposes. """ - return request['__datadog_request_span'].tracer + return request['__opentelemetry_request_span'].tracer diff --git a/tests/contrib/aiohttp/test_middleware.py b/tests/contrib/aiohttp/test_middleware.py index b4f3b10b..f7f8ee4c 100644 --- a/tests/contrib/aiohttp/test_middleware.py +++ b/tests/contrib/aiohttp/test_middleware.py @@ -2,13 +2,11 @@ from aiohttp.test_utils import unittest_run_loop -from ddtrace.contrib.aiohttp.middlewares import trace_app, trace_middleware, CONFIG_KEY -from ddtrace.ext import http -from ddtrace.sampler import RateSampler -from ddtrace.constants import SAMPLING_PRIORITY_KEY, ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.aiohttp.middlewares import trace_app, trace_middleware, CONFIG_KEY +from oteltrace.ext import http +from oteltrace.sampler import RateSampler +from oteltrace.constants import SAMPLING_PRIORITY_KEY, ANALYTICS_SAMPLE_RATE_KEY -from opentracing.scope_managers.asyncio import AsyncioScopeManager -from tests.opentracer.utils import init_tracer from .utils import TraceTestCase from .app.web import setup_app, noop_middleware @@ -345,7 +343,7 @@ def test_distributed_tracing_with_sampling_false(self): @asyncio.coroutine def test_distributed_tracing_disabled(self): # pass headers for distributed tracing - self.app['datadog_trace']['distributed_tracing_enabled'] = False + self.app['opentelemetry_trace']['distributed_tracing_enabled'] = False tracing_headers = { 'x-datadog-trace-id': '100', 'x-datadog-parent-id': '42', @@ -397,7 +395,7 @@ def test_distributed_tracing_sub_span(self): def _assert_200_parenting(self, traces): """Helper to assert parenting when handling aiohttp requests. - This is used to ensure that parenting is consistent between Datadog + This is used to ensure that parenting is consistent between OpenTelemetry and OpenTracing implementations of tracing. """ assert 2 == len(traces) @@ -426,7 +424,7 @@ def _assert_200_parenting(self, traces): @unittest_run_loop @asyncio.coroutine - def test_parenting_200_dd(self): + def test_parenting_200_otel(self): with self.tracer.trace('aiohttp_op'): request = yield from self.client.request('GET', '/') assert 200 == request.status @@ -436,27 +434,12 @@ def test_parenting_200_dd(self): traces = self.tracer.writer.pop_traces() self._assert_200_parenting(traces) - @unittest_run_loop - @asyncio.coroutine - def test_parenting_200_ot(self): - """OpenTracing version of test_handler.""" - ot_tracer = init_tracer('aiohttp_svc', self.tracer, scope_manager=AsyncioScopeManager()) - - with ot_tracer.start_active_span('aiohttp_op'): - request = yield from self.client.request('GET', '/') - assert 200 == request.status - text = yield from request.text() - - assert "What's tracing?" == text - traces = self.tracer.writer.pop_traces() - self._assert_200_parenting(traces) - @unittest_run_loop @asyncio.coroutine def test_analytics_integration_enabled(self): """ Check trace has analytics sample rate set """ - self.app['datadog_trace']['analytics_enabled'] = True - self.app['datadog_trace']['analytics_sample_rate'] = 0.5 + self.app['opentelemetry_trace']['analytics_enabled'] = True + self.app['opentelemetry_trace']['analytics_sample_rate'] = 0.5 request = yield from self.client.request('GET', '/template/') yield from request.text() @@ -480,7 +463,7 @@ def test_analytics_integration_default(self): @asyncio.coroutine def test_analytics_integration_disabled(self): """ Check trace has analytics sample rate set """ - self.app['datadog_trace']['analytics_enabled'] = False + self.app['opentelemetry_trace']['analytics_enabled'] = False request = yield from self.client.request('GET', '/template/') yield from request.text() diff --git a/tests/contrib/aiohttp/test_request.py b/tests/contrib/aiohttp/test_request.py index b13e848f..cca2232b 100644 --- a/tests/contrib/aiohttp/test_request.py +++ b/tests/contrib/aiohttp/test_request.py @@ -5,9 +5,9 @@ from urllib import request from aiohttp.test_utils import unittest_run_loop -from ddtrace.pin import Pin -from ddtrace.contrib.aiohttp.patch import patch, unpatch -from ddtrace.contrib.aiohttp.middlewares import trace_app +from oteltrace.pin import Pin +from oteltrace.contrib.aiohttp.patch import patch, unpatch +from oteltrace.contrib.aiohttp.middlewares import trace_app from .utils import TraceTestCase diff --git a/tests/contrib/aiohttp/test_request_safety.py b/tests/contrib/aiohttp/test_request_safety.py index 6f4c9394..b2b85426 100644 --- a/tests/contrib/aiohttp/test_request_safety.py +++ b/tests/contrib/aiohttp/test_request_safety.py @@ -5,10 +5,10 @@ from urllib import request from aiohttp.test_utils import unittest_run_loop -from ddtrace.pin import Pin -from ddtrace.provider import DefaultContextProvider -from ddtrace.contrib.aiohttp.patch import patch, unpatch -from ddtrace.contrib.aiohttp.middlewares import trace_app +from oteltrace.pin import Pin +from oteltrace.provider import DefaultContextProvider +from oteltrace.contrib.aiohttp.patch import patch, unpatch +from oteltrace.contrib.aiohttp.middlewares import trace_app from .utils import TraceTestCase diff --git a/tests/contrib/aiohttp/test_templates.py b/tests/contrib/aiohttp/test_templates.py index bf2c1f0a..2a1dd1be 100644 --- a/tests/contrib/aiohttp/test_templates.py +++ b/tests/contrib/aiohttp/test_templates.py @@ -3,8 +3,8 @@ from aiohttp.test_utils import unittest_run_loop -from ddtrace.pin import Pin -from ddtrace.contrib.aiohttp.patch import patch, unpatch +from oteltrace.pin import Pin +from oteltrace.contrib.aiohttp.patch import patch, unpatch from .utils import TraceTestCase from .app.web import set_filesystem_loader, set_package_loader diff --git a/tests/contrib/aiopg/py35/test.py b/tests/contrib/aiopg/py35/test.py index 8c32de75..747773e3 100644 --- a/tests/contrib/aiopg/py35/test.py +++ b/tests/contrib/aiopg/py35/test.py @@ -5,8 +5,8 @@ import aiopg # project -from ddtrace.contrib.aiopg.patch import patch, unpatch -from ddtrace import Pin +from oteltrace.contrib.aiopg.patch import patch, unpatch +from oteltrace import Pin # testing from tests.contrib.config import POSTGRES_CONFIG @@ -59,5 +59,5 @@ async def _test_cursor_ctx_manager(self): @mark_asyncio def test_cursor_ctx_manager(self): # ensure cursors work with context managers - # https://github.com/DataDog/dd-trace-py/issues/228 + # https://github.com/opentelemetry/otel-trace-py/issues/228 yield from self._test_cursor_ctx_manager() diff --git a/tests/contrib/aiopg/py37/test.py b/tests/contrib/aiopg/py37/test.py index 493786b9..3629b0b7 100644 --- a/tests/contrib/aiopg/py37/test.py +++ b/tests/contrib/aiopg/py37/test.py @@ -2,8 +2,8 @@ import aiopg # project -from ddtrace.contrib.aiopg.patch import patch, unpatch -from ddtrace import Pin +from oteltrace.contrib.aiopg.patch import patch, unpatch +from oteltrace import Pin # testing from tests.contrib.config import POSTGRES_CONFIG diff --git a/tests/contrib/aiopg/test.py b/tests/contrib/aiopg/test.py index 62ffa12b..a05ba5a3 100644 --- a/tests/contrib/aiopg/test.py +++ b/tests/contrib/aiopg/test.py @@ -7,12 +7,11 @@ from psycopg2 import extras # project -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.aiopg.patch import patch, unpatch -from ddtrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.aiopg.patch import patch, unpatch +from oteltrace import Pin # testing -from tests.opentracer.utils import init_tracer from tests.contrib.config import POSTGRES_CONFIG from tests.test_tracer import get_dummy_tracer from tests.contrib.asyncio.utils import AsyncioTestCase, mark_asyncio @@ -77,28 +76,6 @@ def assert_conn_is_traced(self, tracer, db, service): assert start <= span.start <= end assert span.duration <= end - start - # Ensure OpenTracing compatibility - ot_tracer = init_tracer('aiopg_svc', tracer) - with ot_tracer.start_active_span('aiopg_op'): - cursor = yield from db.cursor() - yield from cursor.execute(q) - rows = yield from cursor.fetchall() - assert rows == [('foobarblah',)] - spans = writer.pop() - assert len(spans) == 2 - ot_span, dd_span = spans - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - assert ot_span.name == 'aiopg_op' - assert ot_span.service == 'aiopg_svc' - assert dd_span.name == 'postgres.query' - assert dd_span.resource == q - assert dd_span.service == service - assert dd_span.meta['sql.query'] == q - assert dd_span.error == 0 - assert dd_span.span_type == 'sql' - # run a query with an error and ensure all is well q = 'select * from some_non_existant_table' cur = yield from db.cursor() diff --git a/tests/contrib/algoliasearch/test.py b/tests/contrib/algoliasearch/test.py index c717b4e6..643663e0 100644 --- a/tests/contrib/algoliasearch/test.py +++ b/tests/contrib/algoliasearch/test.py @@ -1,7 +1,7 @@ -from ddtrace import config, patch_all -from ddtrace.contrib.algoliasearch.patch import (SEARCH_SPAN_TYPE, patch, - unpatch, algoliasearch_version) -from ddtrace.pin import Pin +from oteltrace import config, patch_all +from oteltrace.contrib.algoliasearch.patch import (SEARCH_SPAN_TYPE, patch, + unpatch, algoliasearch_version) +from oteltrace.pin import Pin from tests.base import BaseTracerTestCase diff --git a/tests/contrib/asyncio/test_helpers.py b/tests/contrib/asyncio/test_helpers.py index 2fc6eb7c..10a41d34 100644 --- a/tests/contrib/asyncio/test_helpers.py +++ b/tests/contrib/asyncio/test_helpers.py @@ -1,9 +1,9 @@ import asyncio import pytest -from ddtrace.context import Context -from ddtrace.internal.context_manager import CONTEXTVARS_IS_AVAILABLE -from ddtrace.contrib.asyncio import helpers +from oteltrace.context import Context +from oteltrace.internal.context_manager import CONTEXTVARS_IS_AVAILABLE +from oteltrace.contrib.asyncio import helpers from .utils import AsyncioTestCase, mark_asyncio diff --git a/tests/contrib/asyncio/test_tracer.py b/tests/contrib/asyncio/test_tracer.py index 9d847832..e187753b 100644 --- a/tests/contrib/asyncio/test_tracer.py +++ b/tests/contrib/asyncio/test_tracer.py @@ -3,13 +3,12 @@ import time -from ddtrace.context import Context -from ddtrace.internal.context_manager import CONTEXTVARS_IS_AVAILABLE -from ddtrace.provider import DefaultContextProvider -from ddtrace.contrib.asyncio.patch import patch, unpatch -from ddtrace.contrib.asyncio.helpers import set_call_context +from oteltrace.context import Context +from oteltrace.internal.context_manager import CONTEXTVARS_IS_AVAILABLE +from oteltrace.provider import DefaultContextProvider +from oteltrace.contrib.asyncio.patch import patch, unpatch +from oteltrace.contrib.asyncio.helpers import set_call_context -from tests.opentracer.utils import init_tracer from .utils import AsyncioTestCase, mark_asyncio @@ -29,12 +28,12 @@ def test_get_call_context(self): # it should return the context attached to the current Task # or create a new one task = asyncio.Task.current_task() - ctx = getattr(task, '__datadog_context', None) + ctx = getattr(task, '__opentelemetry_context', None) assert ctx is None # get the context from the loop creates a new one that # is attached to the Task object ctx = self.tracer.get_call_context() - assert ctx == getattr(task, '__datadog_context', None) + assert ctx == getattr(task, '__opentelemetry_context', None) @mark_asyncio def test_get_call_context_twice(self): @@ -336,57 +335,3 @@ def test_event_loop_double_patch(self): # the event loop patch() self.test_tasks_chaining() - - @mark_asyncio - def test_trace_multiple_coroutines_ot_outer(self): - """OpenTracing version of test_trace_multiple_coroutines.""" - # if multiple coroutines have nested tracing, they must belong - # to the same trace - @asyncio.coroutine - def coro(): - # another traced coroutine - with self.tracer.trace('coroutine_2'): - return 42 - - ot_tracer = init_tracer('asyncio_svc', self.tracer) - with ot_tracer.start_active_span('coroutine_1'): - value = yield from coro() - - # the coroutine has been called correctly - assert 42 == value - # a single trace has been properly reported - traces = self.tracer.writer.pop_traces() - assert 1 == len(traces) - assert 2 == len(traces[0]) - assert 'coroutine_1' == traces[0][0].name - assert 'coroutine_2' == traces[0][1].name - # the parenting is correct - assert traces[0][0] == traces[0][1]._parent - assert traces[0][0].trace_id == traces[0][1].trace_id - - @mark_asyncio - def test_trace_multiple_coroutines_ot_inner(self): - """OpenTracing version of test_trace_multiple_coroutines.""" - # if multiple coroutines have nested tracing, they must belong - # to the same trace - ot_tracer = init_tracer('asyncio_svc', self.tracer) - @asyncio.coroutine - def coro(): - # another traced coroutine - with ot_tracer.start_active_span('coroutine_2'): - return 42 - - with self.tracer.trace('coroutine_1'): - value = yield from coro() - - # the coroutine has been called correctly - assert 42 == value - # a single trace has been properly reported - traces = self.tracer.writer.pop_traces() - assert 1 == len(traces) - assert 2 == len(traces[0]) - assert 'coroutine_1' == traces[0][0].name - assert 'coroutine_2' == traces[0][1].name - # the parenting is correct - assert traces[0][0] == traces[0][1]._parent - assert traces[0][0].trace_id == traces[0][1].trace_id diff --git a/tests/contrib/asyncio/test_tracer_safety.py b/tests/contrib/asyncio/test_tracer_safety.py index 54acebdf..bd802e4f 100644 --- a/tests/contrib/asyncio/test_tracer_safety.py +++ b/tests/contrib/asyncio/test_tracer_safety.py @@ -1,6 +1,6 @@ import asyncio -from ddtrace.provider import DefaultContextProvider +from oteltrace.provider import DefaultContextProvider from .utils import AsyncioTestCase, mark_asyncio @@ -22,7 +22,7 @@ def test_get_call_context(self): assert ctx is not None # test that it behaves the wrong way task = asyncio.Task.current_task() - task_ctx = getattr(task, '__datadog_context', None) + task_ctx = getattr(task, '__opentelemetry_context', None) assert task_ctx is None @mark_asyncio diff --git a/tests/contrib/asyncio/utils.py b/tests/contrib/asyncio/utils.py index 3ffe81aa..9551d6f8 100644 --- a/tests/contrib/asyncio/utils.py +++ b/tests/contrib/asyncio/utils.py @@ -2,7 +2,7 @@ from functools import wraps -from ddtrace.contrib.asyncio import context_provider +from oteltrace.contrib.asyncio import context_provider from ...base import BaseTracerTestCase diff --git a/tests/contrib/boto/test.py b/tests/contrib/boto/test.py index 57ae25d3..cd6d8ce2 100644 --- a/tests/contrib/boto/test.py +++ b/tests/contrib/boto/test.py @@ -9,14 +9,13 @@ from moto import mock_s3, mock_ec2, mock_lambda, mock_sts # project -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.boto.patch import patch, unpatch -from ddtrace.ext import http +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.boto.patch import patch, unpatch +from oteltrace.ext import http # testing from unittest import skipUnless -from tests.opentracer.utils import init_tracer from ...base import BaseTracerTestCase @@ -261,49 +260,3 @@ def test_elasticache_client(self): self.assertEqual(span.get_tag('aws.region'), 'us-west-2') self.assertEqual(span.service, 'test-boto-tracing.elasticache') self.assertEqual(span.resource, 'elasticache') - - @mock_ec2 - def test_ec2_client_ot(self): - """OpenTracing compatibility check of the test_ec2_client test.""" - - ec2 = boto.ec2.connect_to_region('us-west-2') - - ot_tracer = init_tracer('my_svc', self.tracer) - writer = self.tracer.writer - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(ec2) - - with ot_tracer.start_active_span('ot_span'): - ec2.get_all_instances() - spans = writer.pop() - assert spans - self.assertEqual(len(spans), 2) - ot_span, dd_span = spans - - # confirm the parenting - self.assertIsNone(ot_span.parent_id) - self.assertEqual(dd_span.parent_id, ot_span.span_id) - - self.assertEqual(ot_span.resource, 'ot_span') - self.assertEqual(dd_span.get_tag('aws.operation'), 'DescribeInstances') - self.assertEqual(dd_span.get_tag(http.STATUS_CODE), '200') - self.assertEqual(dd_span.get_tag(http.METHOD), 'POST') - self.assertEqual(dd_span.get_tag('aws.region'), 'us-west-2') - - with ot_tracer.start_active_span('ot_span'): - ec2.run_instances(21) - spans = writer.pop() - assert spans - self.assertEqual(len(spans), 2) - ot_span, dd_span = spans - - # confirm the parenting - self.assertIsNone(ot_span.parent_id) - self.assertEqual(dd_span.parent_id, ot_span.span_id) - - self.assertEqual(dd_span.get_tag('aws.operation'), 'RunInstances') - self.assertEqual(dd_span.get_tag(http.STATUS_CODE), '200') - self.assertEqual(dd_span.get_tag(http.METHOD), 'POST') - self.assertEqual(dd_span.get_tag('aws.region'), 'us-west-2') - self.assertEqual(dd_span.service, 'test-boto-tracing.ec2') - self.assertEqual(dd_span.resource, 'ec2.runinstances') - self.assertEqual(dd_span.name, 'ec2.command') diff --git a/tests/contrib/botocore/test.py b/tests/contrib/botocore/test.py index fd6b0833..645833d4 100644 --- a/tests/contrib/botocore/test.py +++ b/tests/contrib/botocore/test.py @@ -3,14 +3,13 @@ from moto import mock_s3, mock_ec2, mock_lambda, mock_sqs, mock_kinesis, mock_kms # project -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.botocore.patch import patch, unpatch -from ddtrace.ext import http -from ddtrace.compat import stringify +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.botocore.patch import patch, unpatch +from oteltrace.ext import http +from oteltrace.compat import stringify # testing -from tests.opentracer.utils import init_tracer from ...base import BaseTracerTestCase @@ -215,35 +214,3 @@ def test_kms_client(self): # checking for protection on sts against security leak self.assertIsNone(span.get_tag('params')) - - @mock_ec2 - def test_traced_client_ot(self): - """OpenTracing version of test_traced_client.""" - ot_tracer = init_tracer('ec2_svc', self.tracer) - - with ot_tracer.start_active_span('ec2_op'): - ec2 = self.session.create_client('ec2', region_name='us-west-2') - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(ec2) - ec2.describe_instances() - - spans = self.get_spans() - assert spans - self.assertEqual(len(spans), 2) - - ot_span, dd_span = spans - - # confirm the parenting - self.assertIsNone(ot_span.parent_id) - self.assertEqual(dd_span.parent_id, ot_span.span_id) - - self.assertEqual(ot_span.name, 'ec2_op') - self.assertEqual(ot_span.service, 'ec2_svc') - - self.assertEqual(dd_span.get_tag('aws.agent'), 'botocore') - self.assertEqual(dd_span.get_tag('aws.region'), 'us-west-2') - self.assertEqual(dd_span.get_tag('aws.operation'), 'DescribeInstances') - self.assertEqual(dd_span.get_tag(http.STATUS_CODE), '200') - self.assertEqual(dd_span.get_tag('retry_attempts'), '0') - self.assertEqual(dd_span.service, 'test-botocore-tracing.ec2') - self.assertEqual(dd_span.resource, 'ec2.describeinstances') - self.assertEqual(dd_span.name, 'ec2.command') diff --git a/tests/contrib/bottle/test.py b/tests/contrib/bottle/test.py index 0d085a8b..74a9f61e 100644 --- a/tests/contrib/bottle/test.py +++ b/tests/contrib/bottle/test.py @@ -1,14 +1,13 @@ import bottle -import ddtrace +import oteltrace import webtest -from tests.opentracer.utils import init_tracer from ...base import BaseTracerTestCase -from ddtrace import compat -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.bottle import TracePlugin -from ddtrace.ext import http +from oteltrace import compat +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.bottle import TracePlugin +from oteltrace.ext import http SERVICE = 'bottle-app' @@ -21,14 +20,14 @@ def setUp(self): super(TraceBottleTest, self).setUp() # provide a dummy tracer - self._original_tracer = ddtrace.tracer - ddtrace.tracer = self.tracer + self._original_tracer = oteltrace.tracer + oteltrace.tracer = self.tracer # provide a Bottle app self.app = bottle.Bottle() def tearDown(self): # restore the tracer - ddtrace.tracer = self._original_tracer + oteltrace.tracer = self._original_tracer def _trace_app(self, tracer=None): self.app.install(TracePlugin(service=SERVICE, tracer=tracer)) @@ -61,7 +60,7 @@ def hi(name): assert s.get_tag('http.status_code') == '200' assert s.get_tag('http.method') == 'GET' assert s.get_tag(http.URL) == 'http://localhost:80/hi/dougie' - if ddtrace.config.bottle.trace_query_string: + if oteltrace.config.bottle.trace_query_string: assert s.get_tag(http.QUERY_STRING) == query_string else: assert http.QUERY_STRING not in s.meta @@ -309,39 +308,3 @@ def hi(name): if span == root: continue self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY)) - - def test_200_ot(self): - ot_tracer = init_tracer('my_svc', self.tracer) - - # setup our test app - @self.app.route('/hi/') - def hi(name): - return 'hi %s' % name - self._trace_app(self.tracer) - - # make a request - with ot_tracer.start_active_span('ot_span'): - resp = self.app.get('/hi/dougie') - - assert resp.status_int == 200 - assert compat.to_unicode(resp.body) == u'hi dougie' - # validate it's traced - spans = self.tracer.writer.pop() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.resource == 'ot_span' - - assert dd_span.name == 'bottle.request' - assert dd_span.service == 'bottle-app' - assert dd_span.resource == 'GET /hi/' - assert dd_span.get_tag('http.status_code') == '200' - assert dd_span.get_tag('http.method') == 'GET' - assert dd_span.get_tag(http.URL) == 'http://localhost:80/hi/dougie' - - services = self.tracer.writer.pop_services() - assert services == {} diff --git a/tests/contrib/bottle/test_autopatch.py b/tests/contrib/bottle/test_autopatch.py index 8bd9c48b..cf0035c2 100644 --- a/tests/contrib/bottle/test_autopatch.py +++ b/tests/contrib/bottle/test_autopatch.py @@ -1,11 +1,11 @@ import bottle -import ddtrace +import oteltrace import webtest from unittest import TestCase from tests.test_tracer import get_dummy_tracer -from ddtrace import compat +from oteltrace import compat SERVICE = 'bottle-app' @@ -18,14 +18,14 @@ class TraceBottleTest(TestCase): def setUp(self): # provide a dummy tracer self.tracer = get_dummy_tracer() - self._original_tracer = ddtrace.tracer - ddtrace.tracer = self.tracer + self._original_tracer = oteltrace.tracer + oteltrace.tracer = self.tracer # provide a Bottle app self.app = bottle.Bottle() def tearDown(self): # restore the tracer - ddtrace.tracer = self._original_tracer + oteltrace.tracer = self._original_tracer def _trace_app(self, tracer=None): self.app = webtest.TestApp(self.app) diff --git a/tests/contrib/bottle/test_distributed.py b/tests/contrib/bottle/test_distributed.py index 86a47af7..e72af3f4 100644 --- a/tests/contrib/bottle/test_distributed.py +++ b/tests/contrib/bottle/test_distributed.py @@ -1,9 +1,9 @@ import bottle import webtest -import ddtrace -from ddtrace import compat -from ddtrace.contrib.bottle import TracePlugin +import oteltrace +from oteltrace import compat +from oteltrace.contrib.bottle import TracePlugin from ...base import BaseTracerTestCase @@ -18,14 +18,14 @@ def setUp(self): super(TraceBottleDistributedTest, self).setUp() # provide a dummy tracer - self._original_tracer = ddtrace.tracer - ddtrace.tracer = self.tracer + self._original_tracer = oteltrace.tracer + oteltrace.tracer = self.tracer # provide a Bottle app self.app = bottle.Bottle() def tearDown(self): # restore the tracer - ddtrace.tracer = self._original_tracer + oteltrace.tracer = self._original_tracer def _trace_app_distributed(self, tracer=None): self.app.install(TracePlugin(service=SERVICE, tracer=tracer)) diff --git a/tests/contrib/cassandra/test.py b/tests/contrib/cassandra/test.py index f21ee1ae..bea4f774 100644 --- a/tests/contrib/cassandra/test.py +++ b/tests/contrib/cassandra/test.py @@ -9,15 +9,14 @@ from cassandra.query import BatchStatement, SimpleStatement # project -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.cassandra.patch import patch, unpatch -from ddtrace.contrib.cassandra.session import get_traced_cassandra, SERVICE -from ddtrace.ext import net, cassandra as cassx, errors -from ddtrace import config, Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.cassandra.patch import patch, unpatch +from oteltrace.contrib.cassandra.session import get_traced_cassandra, SERVICE +from oteltrace.ext import net, cassandra as cassx, errors +from oteltrace import config, Pin # testing from tests.contrib.config import CASSANDRA_CONFIG -from tests.opentracer.utils import init_tracer from tests.test_tracer import get_dummy_tracer # Oftentimes our tests fails because Cassandra connection timeouts during keyspace drop. Slowness in keyspace drop @@ -172,44 +171,6 @@ def test_query_analytics_without_rate(self): # confirm no analytics sample rate set by default assert query.get_metric(ANALYTICS_SAMPLE_RATE_KEY) == 1.0 - def test_query_ot(self): - """Ensure that cassandra works with the opentracer.""" - def execute_fn(session, query): - return session.execute(query) - - session, tracer = self._traced_session() - ot_tracer = init_tracer('cass_svc', tracer) - writer = tracer.writer - - with ot_tracer.start_active_span('cass_op'): - result = execute_fn(session, self.TEST_QUERY) - self._assert_result_correct(result) - - spans = writer.pop() - assert spans, spans - - # another for the actual query - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == 'cass_op' - assert ot_span.service == 'cass_svc' - - assert dd_span.service == self.TEST_SERVICE - assert dd_span.resource == self.TEST_QUERY - assert dd_span.span_type == cassx.TYPE - - assert dd_span.get_tag(cassx.KEYSPACE) == self.TEST_KEYSPACE - assert dd_span.get_tag(net.TARGET_PORT) == self.TEST_PORT - assert dd_span.get_tag(cassx.ROW_COUNT) == '1' - assert dd_span.get_tag(cassx.PAGE_NUMBER) is None - assert dd_span.get_tag(cassx.PAGINATED) == 'False' - assert dd_span.get_tag(net.TARGET_HOST) == '127.0.0.1' - def test_query_async(self): def execute_fn(session, query): event = Event() @@ -236,7 +197,7 @@ def test_span_is_removed_from_future(self): session, tracer = self._traced_session() future = session.execute_async(self.TEST_QUERY) future.result() - span = getattr(future, '_ddtrace_current_span', None) + span = getattr(future, '_oteltrace_current_span', None) assert span is None def test_paginated_query(self): diff --git a/tests/contrib/celery/autopatch.py b/tests/contrib/celery/autopatch.py index b66dfa2a..d57e9130 100644 --- a/tests/contrib/celery/autopatch.py +++ b/tests/contrib/celery/autopatch.py @@ -1,4 +1,4 @@ -from ddtrace import Pin +from oteltrace import Pin if __name__ == '__main__': # have to import celery in order to have the post-import hooks run diff --git a/tests/contrib/celery/base.py b/tests/contrib/celery/base.py index 8c12977f..2bcab11d 100644 --- a/tests/contrib/celery/base.py +++ b/tests/contrib/celery/base.py @@ -1,8 +1,8 @@ from celery import Celery -from ddtrace import Pin -from ddtrace.compat import PY2 -from ddtrace.contrib.celery import patch, unpatch +from oteltrace import Pin +from oteltrace.compat import PY2 +from oteltrace.contrib.celery import patch, unpatch from ..config import REDIS_CONFIG from ...base import BaseTracerTestCase diff --git a/tests/contrib/celery/test_app.py b/tests/contrib/celery/test_app.py index ed26f008..fc5457c7 100644 --- a/tests/contrib/celery/test_app.py +++ b/tests/contrib/celery/test_app.py @@ -1,7 +1,7 @@ import celery -from ddtrace import Pin -from ddtrace.contrib.celery import unpatch_app +from oteltrace import Pin +from oteltrace.contrib.celery import unpatch_app from .base import CeleryBaseTestCase diff --git a/tests/contrib/celery/test_autopatch.py b/tests/contrib/celery/test_autopatch.py index de31352f..b2113bf1 100644 --- a/tests/contrib/celery/test_autopatch.py +++ b/tests/contrib/celery/test_autopatch.py @@ -4,10 +4,10 @@ class DdtraceRunTest(unittest.TestCase): - """Test that celery is patched successfully if run with ddtrace-run.""" + """Test that celery is patched successfully if run with oteltrace-run.""" def test_autopatch(self): out = subprocess.check_output( - ['ddtrace-run', 'python', 'tests/contrib/celery/autopatch.py'] + ['oteltrace-run', 'python', 'tests/contrib/celery/autopatch.py'] ) assert out.startswith(b'Test success') diff --git a/tests/contrib/celery/test_integration.py b/tests/contrib/celery/test_integration.py index a93ec0f9..dd3ce072 100644 --- a/tests/contrib/celery/test_integration.py +++ b/tests/contrib/celery/test_integration.py @@ -1,12 +1,10 @@ import celery from celery.exceptions import Retry -from ddtrace.contrib.celery import patch, unpatch +from oteltrace.contrib.celery import patch, unpatch from .base import CeleryBaseTestCase -from tests.opentracer.utils import init_tracer - class MyException(Exception): pass @@ -393,36 +391,3 @@ def fn_task(): self.assertEqual(1, len(traces[0])) span = traces[0][0] self.assertEqual(span.service, 'task-queue') - - def test_fn_task_apply_async_ot(self): - """OpenTracing version of test_fn_task_apply_async.""" - ot_tracer = init_tracer('celery_svc', self.tracer) - - # it should execute a traced async task that has parameters - @self.app.task - def fn_task_parameters(user, force_logout=False): - return (user, force_logout) - - with ot_tracer.start_active_span('celery_op'): - t = fn_task_parameters.apply_async(args=['user'], kwargs={'force_logout': True}) - assert 'PENDING' == t.status - - traces = self.tracer.writer.pop_traces() - assert 1 == len(traces) - assert 2 == len(traces[0]) - ot_span, dd_span = traces[0] - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == 'celery_op' - assert ot_span.service == 'celery_svc' - - assert dd_span.error == 0 - assert dd_span.name == 'celery.apply' - assert dd_span.resource == 'tests.contrib.celery.test_integration.fn_task_parameters' - assert dd_span.service == 'celery-producer' - assert dd_span.get_tag('celery.id') == t.task_id - assert dd_span.get_tag('celery.action') == 'apply_async' - assert dd_span.get_tag('celery.routing_key') == 'celery' diff --git a/tests/contrib/celery/test_old_style_task.py b/tests/contrib/celery/test_old_style_task.py index 74c646d0..9c745588 100644 --- a/tests/contrib/celery/test_old_style_task.py +++ b/tests/contrib/celery/test_old_style_task.py @@ -10,7 +10,7 @@ def test_apply_async_previous_style_tasks(self): # ensures apply_async is properly patched if Celery 1.0 style tasks # are used even in newer versions. This should extend support to # previous versions of Celery. - # Regression test: https://github.com/DataDog/dd-trace-py/pull/449 + # Regression test: https://github.com/opentelemetry/otel-trace-py/pull/449 class CelerySuperClass(celery.task.Task): abstract = True diff --git a/tests/contrib/celery/test_patch.py b/tests/contrib/celery/test_patch.py index 1fd676c0..e19a1972 100644 --- a/tests/contrib/celery/test_patch.py +++ b/tests/contrib/celery/test_patch.py @@ -1,18 +1,18 @@ import unittest -from ddtrace import Pin +from oteltrace import Pin class CeleryPatchTest(unittest.TestCase): def test_patch_after_import(self): import celery - from ddtrace import patch + from oteltrace import patch patch(celery=True) app = celery.Celery() assert Pin.get_from(app) is not None def test_patch_before_import(self): - from ddtrace import patch + from oteltrace import patch patch(celery=True) import celery diff --git a/tests/contrib/celery/test_task_deprecation.py b/tests/contrib/celery/test_task_deprecation.py index 89daf3b2..2cf4170d 100644 --- a/tests/contrib/celery/test_task_deprecation.py +++ b/tests/contrib/celery/test_task_deprecation.py @@ -3,7 +3,7 @@ from celery import Celery -from ddtrace.contrib.celery import patch_task, unpatch_task, unpatch +from oteltrace.contrib.celery import patch_task, unpatch_task, unpatch class CeleryDeprecatedTaskPatch(unittest.TestCase): diff --git a/tests/contrib/celery/test_utils.py b/tests/contrib/celery/test_utils.py index 88456087..07907d41 100644 --- a/tests/contrib/celery/test_utils.py +++ b/tests/contrib/celery/test_utils.py @@ -1,6 +1,6 @@ import gc -from ddtrace.contrib.celery.utils import ( +from oteltrace.contrib.celery.utils import ( tags_from_context, retrieve_task_id, attach_span, @@ -85,7 +85,7 @@ def fn_task(): span = self.tracer.trace('celery.run') attach_span(fn_task, task_id, span) # delete the Span - weak_dict = getattr(fn_task, '__dd_task_span') + weak_dict = getattr(fn_task, '__otel_task_span') detach_span(fn_task, task_id) assert weak_dict.get((task_id, False)) is None @@ -116,7 +116,7 @@ def fn_task(): # propagate and finish a Span for `fn_task` task_id = '7c6731af-9533-40c3-83a9-25b58f0d837f' attach_span(fn_task, task_id, self.tracer.trace('celery.run')) - weak_dict = getattr(fn_task, '__dd_task_span') + weak_dict = getattr(fn_task, '__otel_task_span') key = (task_id, False) assert weak_dict.get(key) # flush data and force the GC diff --git a/tests/contrib/consul/test.py b/tests/contrib/consul/test.py index 28313ea7..b47a3cc0 100644 --- a/tests/contrib/consul/test.py +++ b/tests/contrib/consul/test.py @@ -1,9 +1,9 @@ import consul -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.ext import consul as consulx -from ddtrace.vendor.wrapt import BoundFunctionWrapper -from ddtrace.contrib.consul.patch import patch, unpatch +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.ext import consul as consulx +from oteltrace.vendor.wrapt import BoundFunctionWrapper +from oteltrace.contrib.consul.patch import patch, unpatch from ..config import CONSUL_CONFIG from ...base import BaseTracerTestCase diff --git a/tests/contrib/dbapi/test_unit.py b/tests/contrib/dbapi/test_unit.py index 57de3731..6cbef8a2 100644 --- a/tests/contrib/dbapi/test_unit.py +++ b/tests/contrib/dbapi/test_unit.py @@ -1,9 +1,9 @@ import mock -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.dbapi import FetchTracedCursor, TracedCursor, TracedConnection -from ddtrace.span import Span +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.dbapi import FetchTracedCursor, TracedCursor, TracedConnection +from oteltrace.span import Span from ...base import BaseTracerTestCase diff --git a/tests/contrib/django/app/settings.py b/tests/contrib/django/app/settings.py index b17e604e..8fd8b37d 100644 --- a/tests/contrib/django/app/settings.py +++ b/tests/contrib/django/app/settings.py @@ -119,10 +119,10 @@ 'django.contrib.sessions', # tracer app - 'ddtrace.contrib.django', + 'oteltrace.contrib.django', ] -DATADOG_TRACE = { +OPENTELEMETRY_TRACE = { # tracer with a DummyWriter 'TRACER': 'tests.contrib.django.utils.tracer', 'ENABLED': True, diff --git a/tests/contrib/django/app/settings_untraced.py b/tests/contrib/django/app/settings_untraced.py index eb9f878b..61ff605d 100644 --- a/tests/contrib/django/app/settings_untraced.py +++ b/tests/contrib/django/app/settings_untraced.py @@ -99,7 +99,7 @@ 'django.contrib.sessions', ] -DATADOG_TRACE = { +OPENTELEMETRY_TRACE = { # tracer with a DummyWriter 'TRACER': 'tests.contrib.django.utils.tracer', 'ENABLED': True, diff --git a/tests/contrib/django/test_autopatching.py b/tests/contrib/django/test_autopatching.py index 270bdd25..809bb58f 100644 --- a/tests/contrib/django/test_autopatching.py +++ b/tests/contrib/django/test_autopatching.py @@ -1,6 +1,6 @@ import django -from ddtrace.monkey import patch +from oteltrace.monkey import patch from .utils import DjangoTraceTestCase from django.conf import settings from unittest import skipIf @@ -14,67 +14,67 @@ def setUp(self): @skipIf(django.VERSION >= (1, 10), 'skip if version above 1.10') def test_autopatching_middleware_classes(self): - assert django._datadog_patch - assert 'ddtrace.contrib.django' in settings.INSTALLED_APPS - assert settings.MIDDLEWARE_CLASSES[0] == 'ddtrace.contrib.django.TraceMiddleware' - assert settings.MIDDLEWARE_CLASSES[-1] == 'ddtrace.contrib.django.TraceExceptionMiddleware' + assert django._opentelemetry_patch + assert 'oteltrace.contrib.django' in settings.INSTALLED_APPS + assert settings.MIDDLEWARE_CLASSES[0] == 'oteltrace.contrib.django.TraceMiddleware' + assert settings.MIDDLEWARE_CLASSES[-1] == 'oteltrace.contrib.django.TraceExceptionMiddleware' @skipIf(django.VERSION >= (1, 10), 'skip if version above 1.10') def test_autopatching_twice_middleware_classes(self): - assert django._datadog_patch + assert django._opentelemetry_patch # Call django.setup() twice and ensure we don't add a duplicate tracer django.setup() - found_app = settings.INSTALLED_APPS.count('ddtrace.contrib.django') + found_app = settings.INSTALLED_APPS.count('oteltrace.contrib.django') assert found_app == 1 - assert settings.MIDDLEWARE_CLASSES[0] == 'ddtrace.contrib.django.TraceMiddleware' - assert settings.MIDDLEWARE_CLASSES[-1] == 'ddtrace.contrib.django.TraceExceptionMiddleware' + assert settings.MIDDLEWARE_CLASSES[0] == 'oteltrace.contrib.django.TraceMiddleware' + assert settings.MIDDLEWARE_CLASSES[-1] == 'oteltrace.contrib.django.TraceExceptionMiddleware' - found_mw = settings.MIDDLEWARE_CLASSES.count('ddtrace.contrib.django.TraceMiddleware') + found_mw = settings.MIDDLEWARE_CLASSES.count('oteltrace.contrib.django.TraceMiddleware') assert found_mw == 1 - found_mw = settings.MIDDLEWARE_CLASSES.count('ddtrace.contrib.django.TraceExceptionMiddleware') + found_mw = settings.MIDDLEWARE_CLASSES.count('oteltrace.contrib.django.TraceExceptionMiddleware') assert found_mw == 1 @skipIf(django.VERSION < (1, 10), 'skip if version is below 1.10') def test_autopatching_middleware(self): - assert django._datadog_patch - assert 'ddtrace.contrib.django' in settings.INSTALLED_APPS - assert settings.MIDDLEWARE[0] == 'ddtrace.contrib.django.TraceMiddleware' + assert django._opentelemetry_patch + assert 'oteltrace.contrib.django' in settings.INSTALLED_APPS + assert settings.MIDDLEWARE[0] == 'oteltrace.contrib.django.TraceMiddleware' # MIDDLEWARE_CLASSES gets created internally in django 1.10 & 1.11 but doesn't # exist at all in 2.0. assert not getattr(settings, 'MIDDLEWARE_CLASSES', None) or \ - 'ddtrace.contrib.django.TraceMiddleware' \ + 'oteltrace.contrib.django.TraceMiddleware' \ not in settings.MIDDLEWARE_CLASSES - assert settings.MIDDLEWARE[-1] == 'ddtrace.contrib.django.TraceExceptionMiddleware' + assert settings.MIDDLEWARE[-1] == 'oteltrace.contrib.django.TraceExceptionMiddleware' assert not getattr(settings, 'MIDDLEWARE_CLASSES', None) or \ - 'ddtrace.contrib.django.TraceExceptionMiddleware' \ + 'oteltrace.contrib.django.TraceExceptionMiddleware' \ not in settings.MIDDLEWARE_CLASSES @skipIf(django.VERSION < (1, 10), 'skip if version is below 1.10') def test_autopatching_twice_middleware(self): - assert django._datadog_patch + assert django._opentelemetry_patch # Call django.setup() twice and ensure we don't add a duplicate tracer django.setup() - found_app = settings.INSTALLED_APPS.count('ddtrace.contrib.django') + found_app = settings.INSTALLED_APPS.count('oteltrace.contrib.django') assert found_app == 1 - assert settings.MIDDLEWARE[0] == 'ddtrace.contrib.django.TraceMiddleware' + assert settings.MIDDLEWARE[0] == 'oteltrace.contrib.django.TraceMiddleware' # MIDDLEWARE_CLASSES gets created internally in django 1.10 & 1.11 but doesn't # exist at all in 2.0. assert not getattr(settings, 'MIDDLEWARE_CLASSES', None) or \ - 'ddtrace.contrib.django.TraceMiddleware' \ + 'oteltrace.contrib.django.TraceMiddleware' \ not in settings.MIDDLEWARE_CLASSES - assert settings.MIDDLEWARE[-1] == 'ddtrace.contrib.django.TraceExceptionMiddleware' + assert settings.MIDDLEWARE[-1] == 'oteltrace.contrib.django.TraceExceptionMiddleware' assert not getattr(settings, 'MIDDLEWARE_CLASSES', None) or \ - 'ddtrace.contrib.django.TraceExceptionMiddleware' \ + 'oteltrace.contrib.django.TraceExceptionMiddleware' \ not in settings.MIDDLEWARE_CLASSES - found_mw = settings.MIDDLEWARE.count('ddtrace.contrib.django.TraceMiddleware') + found_mw = settings.MIDDLEWARE.count('oteltrace.contrib.django.TraceMiddleware') assert found_mw == 1 - found_mw = settings.MIDDLEWARE.count('ddtrace.contrib.django.TraceExceptionMiddleware') + found_mw = settings.MIDDLEWARE.count('oteltrace.contrib.django.TraceExceptionMiddleware') assert found_mw == 1 @@ -84,15 +84,15 @@ def test_autopatching_empty_middleware(self): with self.settings(MIDDLEWARE=[]): patch(django=True) django.setup() - assert django._datadog_patch - assert 'ddtrace.contrib.django' in settings.INSTALLED_APPS - assert settings.MIDDLEWARE[0] == 'ddtrace.contrib.django.TraceMiddleware' + assert django._opentelemetry_patch + assert 'oteltrace.contrib.django' in settings.INSTALLED_APPS + assert settings.MIDDLEWARE[0] == 'oteltrace.contrib.django.TraceMiddleware' # MIDDLEWARE_CLASSES gets created internally in django 1.10 & 1.11 but doesn't # exist at all in 2.0. assert not getattr(settings, 'MIDDLEWARE_CLASSES', None) or \ - 'ddtrace.contrib.django.TraceMiddleware' \ + 'oteltrace.contrib.django.TraceMiddleware' \ not in settings.MIDDLEWARE_CLASSES - assert settings.MIDDLEWARE[-1] == 'ddtrace.contrib.django.TraceExceptionMiddleware' + assert settings.MIDDLEWARE[-1] == 'oteltrace.contrib.django.TraceExceptionMiddleware' assert not getattr(settings, 'MIDDLEWARE_CLASSES', None) or \ - 'ddtrace.contrib.django.TraceExceptionMiddleware' \ + 'oteltrace.contrib.django.TraceExceptionMiddleware' \ not in settings.MIDDLEWARE_CLASSES diff --git a/tests/contrib/django/test_cache_client.py b/tests/contrib/django/test_cache_client.py index 89725073..952ba69b 100644 --- a/tests/contrib/django/test_cache_client.py +++ b/tests/contrib/django/test_cache_client.py @@ -4,7 +4,7 @@ from django.core.cache import caches # testing -from .utils import DjangoTraceTestCase, override_ddtrace_settings +from .utils import DjangoTraceTestCase, override_oteltrace_settings from ...util import assert_dict_issuperset @@ -41,7 +41,7 @@ def test_cache_get(self): assert_dict_issuperset(span.meta, expected_meta) assert start < span.start < span.start + span.duration < end - @override_ddtrace_settings(DEFAULT_CACHE_SERVICE='foo') + @override_oteltrace_settings(DEFAULT_CACHE_SERVICE='foo') def test_cache_service_can_be_overriden(self): # get the default cache cache = caches['default'] @@ -56,7 +56,7 @@ def test_cache_service_can_be_overriden(self): span = spans[0] assert span.service == 'foo' - @override_ddtrace_settings(INSTRUMENT_CACHE=False) + @override_oteltrace_settings(INSTRUMENT_CACHE=False) def test_cache_disabled(self): # get the default cache cache = caches['default'] diff --git a/tests/contrib/django/test_connection.py b/tests/contrib/django/test_connection.py index a18ffc22..19c27409 100644 --- a/tests/contrib/django/test_connection.py +++ b/tests/contrib/django/test_connection.py @@ -4,11 +4,11 @@ # 3rd party from django.contrib.auth.models import User -from ddtrace.contrib.django.conf import settings -from ddtrace.contrib.django.patch import apply_django_patches, connections +from oteltrace.contrib.django.conf import settings +from oteltrace.contrib.django.patch import apply_django_patches, connections # testing -from .utils import DjangoTraceTestCase, override_ddtrace_settings +from .utils import DjangoTraceTestCase, override_oteltrace_settings class DjangoConnectionTest(DjangoTraceTestCase): @@ -42,7 +42,7 @@ def test_django_db_query_in_resource_not_in_tags(self): assert spans[0].resource == 'SELECT COUNT(*) AS "__count" FROM "auth_user"' assert spans[0].get_tag('sql.query') is None - @override_ddtrace_settings(INSTRUMENT_DATABASE=False) + @override_oteltrace_settings(INSTRUMENT_DATABASE=False) def test_connection_disabled(self): # trace a simple query users = User.objects.count() diff --git a/tests/contrib/django/test_instrumentation.py b/tests/contrib/django/test_instrumentation.py index a8960efc..bebd03ed 100644 --- a/tests/contrib/django/test_instrumentation.py +++ b/tests/contrib/django/test_instrumentation.py @@ -1,5 +1,5 @@ # project -from ddtrace.contrib.django.conf import DatadogSettings +from oteltrace.contrib.django.conf import OpenTelemetrySettings # testing from .utils import DjangoTraceTestCase @@ -12,24 +12,22 @@ class DjangoInstrumentationTest(DjangoTraceTestCase): """ def test_tracer_flags(self): assert self.tracer.enabled - assert self.tracer.writer.api.hostname == 'localhost' - assert self.tracer.writer.api.port == 8126 assert self.tracer.tags == {'env': 'test'} def test_environment_vars(self): # Django defaults can be overridden by env vars, ensuring that # environment strings are properly converted with self.override_env(dict( - DATADOG_TRACE_AGENT_HOSTNAME='agent.consul.local', - DATADOG_TRACE_AGENT_PORT='58126' + OPENTELEMETRY_TRACE_AGENT_HOSTNAME='agent.consul.local', + OPENTELEMETRY_TRACE_AGENT_PORT='58126' )): - settings = DatadogSettings() + settings = OpenTelemetrySettings() assert settings.AGENT_HOSTNAME == 'agent.consul.local' assert settings.AGENT_PORT == 58126 def test_environment_var_wrong_port(self): # ensures that a wrong Agent Port doesn't crash the system # and defaults to 8126 - with self.override_env(dict(DATADOG_TRACE_AGENT_PORT='something')): - settings = DatadogSettings() + with self.override_env(dict(OPENTELEMETRY_TRACE_AGENT_PORT='something')): + settings = OpenTelemetrySettings() assert settings.AGENT_PORT == 8126 diff --git a/tests/contrib/django/test_middleware.py b/tests/contrib/django/test_middleware.py index 545aba06..b8109fc6 100644 --- a/tests/contrib/django/test_middleware.py +++ b/tests/contrib/django/test_middleware.py @@ -3,15 +3,14 @@ from django.db import connections # project -from ddtrace import config -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY, SAMPLING_PRIORITY_KEY -from ddtrace.contrib.django.db import unpatch_conn -from ddtrace.ext import errors, http +from oteltrace import config +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY, SAMPLING_PRIORITY_KEY +from oteltrace.contrib.django.db import unpatch_conn +from oteltrace.ext import errors, http # testing -from tests.opentracer.utils import init_tracer from .compat import reverse -from .utils import DjangoTraceTestCase, override_ddtrace_settings +from .utils import DjangoTraceTestCase, override_oteltrace_settings class DjangoMiddlewareTest(DjangoTraceTestCase): @@ -86,7 +85,7 @@ def test_analytics_global_on_integration_default(self): self.assertIsNone(sp_template.get_metric(ANALYTICS_SAMPLE_RATE_KEY)) self.assertIsNone(sp_database.get_metric(ANALYTICS_SAMPLE_RATE_KEY)) - @override_ddtrace_settings(ANALYTICS_ENABLED=True, ANALYTICS_SAMPLE_RATE=0.5) + @override_oteltrace_settings(ANALYTICS_ENABLED=True, ANALYTICS_SAMPLE_RATE=0.5) def test_analytics_global_on_integration_on(self): """ When making a request @@ -129,7 +128,7 @@ def test_analytics_global_off_integration_default(self): self.assertIsNone(sp_template.get_metric(ANALYTICS_SAMPLE_RATE_KEY)) self.assertIsNone(sp_database.get_metric(ANALYTICS_SAMPLE_RATE_KEY)) - @override_ddtrace_settings(ANALYTICS_ENABLED=True, ANALYTICS_SAMPLE_RATE=0.5) + @override_oteltrace_settings(ANALYTICS_ENABLED=True, ANALYTICS_SAMPLE_RATE=0.5) def test_analytics_global_off_integration_on(self): """ When making a request @@ -151,7 +150,7 @@ def test_analytics_global_off_integration_on(self): self.assertIsNone(sp_template.get_metric(ANALYTICS_SAMPLE_RATE_KEY)) self.assertIsNone(sp_database.get_metric(ANALYTICS_SAMPLE_RATE_KEY)) - @override_ddtrace_settings(ANALYTICS_ENABLED=True, ANALYTICS_SAMPLE_RATE=None) + @override_oteltrace_settings(ANALYTICS_ENABLED=True, ANALYTICS_SAMPLE_RATE=None) def test_analytics_global_off_integration_on_and_none(self): """ When making a request @@ -324,7 +323,7 @@ def test_middleware_propagation(self): assert sp_request.parent_id == 42 assert sp_request.get_metric(SAMPLING_PRIORITY_KEY) == 2 - @override_ddtrace_settings(DISTRIBUTED_TRACING=False) + @override_oteltrace_settings(DISTRIBUTED_TRACING=False) def test_middleware_no_propagation(self): # ensures that we properly propagate http context url = reverse('users-list') @@ -398,38 +397,6 @@ def test_middleware_handled_view_exception_client_error(self): assert sp_request.get_tag(errors.ERROR_MSG) is None assert sp_request.get_tag(errors.ERROR_TYPE) is None - def test_middleware_trace_request_ot(self): - """OpenTracing version of test_middleware_trace_request.""" - ot_tracer = init_tracer('my_svc', self.tracer) - - # ensures that the internals are properly traced - url = reverse('users-list') - with ot_tracer.start_active_span('ot_span'): - response = self.client.get(url) - assert response.status_code == 200 - - # check for spans - spans = self.tracer.writer.pop() - assert len(spans) == 4 - ot_span = spans[0] - sp_request = spans[1] - sp_template = spans[2] - sp_database = spans[3] - - # confirm parenting - assert ot_span.parent_id is None - assert sp_request.parent_id == ot_span.span_id - - assert ot_span.resource == 'ot_span' - assert ot_span.service == 'my_svc' - - assert sp_database.get_tag('django.db.vendor') == 'sqlite' - assert sp_template.get_tag('django.template_name') == 'users_list.html' - assert sp_request.get_tag('http.status_code') == '200' - assert sp_request.get_tag(http.URL) == 'http://testserver/users/' - assert sp_request.get_tag('django.user.is_authenticated') == 'False' - assert sp_request.get_tag('http.method') == 'GET' - def test_middleware_trace_request_404(self): """ When making a request to an unknown url in django diff --git a/tests/contrib/django/test_templates.py b/tests/contrib/django/test_templates.py index 8e8b6ea0..4ddce15b 100644 --- a/tests/contrib/django/test_templates.py +++ b/tests/contrib/django/test_templates.py @@ -4,7 +4,7 @@ from django.template import Context, Template # testing -from .utils import DjangoTraceTestCase, override_ddtrace_settings +from .utils import DjangoTraceTestCase, override_oteltrace_settings class DjangoTemplateTest(DjangoTraceTestCase): @@ -32,7 +32,7 @@ def test_template(self): assert span.get_tag('django.template_name') == 'unknown' assert start < span.start < span.start + span.duration < end - @override_ddtrace_settings(INSTRUMENT_TEMPLATE=False) + @override_oteltrace_settings(INSTRUMENT_TEMPLATE=False) def test_template_disabled(self): # prepare a base template using the default engine template = Template('Hello {{name}}!') diff --git a/tests/contrib/django/test_tracing_disabled.py b/tests/contrib/django/test_tracing_disabled.py index 61605f2d..695ab783 100644 --- a/tests/contrib/django/test_tracing_disabled.py +++ b/tests/contrib/django/test_tracing_disabled.py @@ -3,8 +3,8 @@ from django.test import TestCase # project -from ddtrace.tracer import Tracer -from ddtrace.contrib.django.conf import settings +from oteltrace.tracer import Tracer +from oteltrace.contrib.django.conf import settings # testing from ...test_tracer import DummyWriter @@ -23,7 +23,7 @@ def setUp(self): # Restart app with tracing disabled settings.ENABLED = False - self.app = apps.get_app_config('datadog_django') + self.app = apps.get_app_config('opentelemetry_django') self.app.ready() def tearDown(self): diff --git a/tests/contrib/django/test_utils.py b/tests/contrib/django/test_utils.py index 2d7cf074..ed20119b 100644 --- a/tests/contrib/django/test_utils.py +++ b/tests/contrib/django/test_utils.py @@ -2,7 +2,7 @@ from django.test import TestCase # project -from ddtrace.contrib.django.utils import quantize_key_values +from oteltrace.contrib.django.utils import quantize_key_values class DjangoUtilsTest(TestCase): diff --git a/tests/contrib/django/utils.py b/tests/contrib/django/utils.py index 9cd2c420..a09e0938 100644 --- a/tests/contrib/django/utils.py +++ b/tests/contrib/django/utils.py @@ -5,12 +5,12 @@ from django.test import TestCase # project -from ddtrace.tracer import Tracer -from ddtrace.contrib.django.conf import settings -from ddtrace.contrib.django.db import patch_db, unpatch_db -from ddtrace.contrib.django.cache import unpatch_cache -from ddtrace.contrib.django.templates import unpatch_template -from ddtrace.contrib.django.middleware import remove_exception_middleware, remove_trace_middleware +from oteltrace.tracer import Tracer +from oteltrace.contrib.django.conf import settings +from oteltrace.contrib.django.db import patch_db, unpatch_db +from oteltrace.contrib.django.cache import unpatch_cache +from oteltrace.contrib.django.templates import unpatch_template +from oteltrace.contrib.django.middleware import remove_exception_middleware, remove_trace_middleware # testing from ...base import BaseTestCase @@ -25,7 +25,7 @@ class DjangoTraceTestCase(BaseTestCase, TestCase): """ Base class that provides an internal tracer according to given - Datadog settings. This class ensures that the tracer spans are + OpenTelemetry settings. This class ensures that the tracer spans are properly reset after each run. The tracer is available in the ``self.tracer`` attribute. """ @@ -45,7 +45,7 @@ def tearDown(self): self.tracer.writer.pop_traces() -class override_ddtrace_settings(object): +class override_oteltrace_settings(object): def __init__(self, *args, **kwargs): self.items = list(kwargs.items()) @@ -68,7 +68,7 @@ def enable(self): self.backup[name] = getattr(settings, name) setattr(settings, name, value) self.unpatch_all() - app = apps.get_app_config('datadog_django') + app = apps.get_app_config('opentelemetry_django') app.ready() def disable(self): @@ -76,7 +76,7 @@ def disable(self): setattr(settings, name, self.backup[name]) self.unpatch_all() remove_exception_middleware() - app = apps.get_app_config('datadog_django') + app = apps.get_app_config('opentelemetry_django') app.ready() def __call__(self, func): diff --git a/tests/contrib/djangorestframework/app/settings.py b/tests/contrib/djangorestframework/app/settings.py index ac24bd45..b44e362f 100644 --- a/tests/contrib/djangorestframework/app/settings.py +++ b/tests/contrib/djangorestframework/app/settings.py @@ -91,13 +91,13 @@ 'django.contrib.sessions', # tracer app - 'ddtrace.contrib.django', + 'oteltrace.contrib.django', # djangorestframework 'rest_framework' ] -DATADOG_TRACE = { +OPENTELEMETRY_TRACE = { # tracer with a DummyWriter 'TRACER': 'tests.contrib.django.utils.tracer', 'ENABLED': True, diff --git a/tests/contrib/djangorestframework/test_djangorestframework.py b/tests/contrib/djangorestframework/test_djangorestframework.py index 021a4c13..156c0b58 100644 --- a/tests/contrib/djangorestframework/test_djangorestframework.py +++ b/tests/contrib/djangorestframework/test_djangorestframework.py @@ -12,18 +12,18 @@ def setUp(self): # would raise an exception from rest_framework.views import APIView - from ddtrace.contrib.django.restframework import unpatch_restframework + from oteltrace.contrib.django.restframework import unpatch_restframework self.APIView = APIView self.unpatch_restframework = unpatch_restframework def test_setup(self): assert apps.is_installed('rest_framework') - assert hasattr(self.APIView, '_datadog_patch') + assert hasattr(self.APIView, '_opentelemetry_patch') def test_unpatch(self): self.unpatch_restframework() - assert not getattr(self.APIView, '_datadog_patch') + assert not getattr(self.APIView, '_opentelemetry_patch') response = self.client.get('/users/') diff --git a/tests/contrib/elasticsearch/test.py b/tests/contrib/elasticsearch/test.py index aafb704f..55077671 100644 --- a/tests/contrib/elasticsearch/test.py +++ b/tests/contrib/elasticsearch/test.py @@ -2,15 +2,14 @@ import unittest # project -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.ext import http -from ddtrace.contrib.elasticsearch import get_traced_transport -from ddtrace.contrib.elasticsearch.elasticsearch import elasticsearch -from ddtrace.contrib.elasticsearch.patch import patch, unpatch +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.ext import http +from oteltrace.contrib.elasticsearch import get_traced_transport +from oteltrace.contrib.elasticsearch.elasticsearch import elasticsearch +from oteltrace.contrib.elasticsearch.patch import patch, unpatch # testing -from tests.opentracer.utils import init_tracer from ..config import ELASTICSEARCH_CONFIG from ...test_tracer import get_dummy_tracer from ...base import BaseTracerTestCase @@ -21,8 +20,8 @@ class ElasticsearchTest(unittest.TestCase): Elasticsearch integration test suite. Need a running ElasticSearch """ - ES_INDEX = 'ddtrace_index' - ES_TYPE = 'ddtrace_type' + ES_INDEX = 'oteltrace_index' + ES_TYPE = 'oteltrace_type' TEST_SERVICE = 'test' TEST_PORT = str(ELASTICSEARCH_CONFIG['port']) @@ -45,8 +44,8 @@ def test_elasticsearch(self): tracer = get_dummy_tracer() writer = tracer.writer transport_class = get_traced_transport( - datadog_tracer=tracer, - datadog_service=self.TEST_SERVICE) + opentelemetry_tracer=tracer, + opentelemetry_service=self.TEST_SERVICE) es = elasticsearch.Elasticsearch(transport_class=transport_class, port=ELASTICSEARCH_CONFIG['port']) @@ -147,44 +146,6 @@ def test_elasticsearch(self): es.indices.delete(index=self.ES_INDEX, ignore=[400, 404]) es.indices.delete(index=self.ES_INDEX, ignore=[400, 404]) - def test_elasticsearch_ot(self): - """Shortened OpenTracing version of test_elasticsearch.""" - tracer = get_dummy_tracer() - writer = tracer.writer - ot_tracer = init_tracer('my_svc', tracer) - - transport_class = get_traced_transport( - datadog_tracer=tracer, - datadog_service=self.TEST_SERVICE) - - es = elasticsearch.Elasticsearch(transport_class=transport_class, port=ELASTICSEARCH_CONFIG['port']) - - # Test index creation - mapping = {'mapping': {'properties': {'created': {'type': 'date', 'format': 'yyyy-MM-dd'}}}} - - with ot_tracer.start_active_span('ot_span'): - es.indices.create(index=self.ES_INDEX, ignore=400, body=mapping) - - spans = writer.pop() - assert spans - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == 'my_svc' - assert ot_span.resource == 'ot_span' - - assert dd_span.service == self.TEST_SERVICE - assert dd_span.name == 'elasticsearch.query' - assert dd_span.span_type == 'elasticsearch' - assert dd_span.error == 0 - assert dd_span.get_tag('elasticsearch.method') == 'PUT' - assert dd_span.get_tag('elasticsearch.url') == '/%s' % self.ES_INDEX - assert dd_span.resource == 'PUT /%s' % self.ES_INDEX - class ElasticsearchPatchTest(BaseTracerTestCase): """ @@ -193,8 +154,8 @@ class ElasticsearchPatchTest(BaseTracerTestCase): Test cases with patching. Will merge when patching will be the default/only way. """ - ES_INDEX = 'ddtrace_index' - ES_TYPE = 'ddtrace_type' + ES_INDEX = 'oteltrace_index' + ES_TYPE = 'oteltrace_type' TEST_SERVICE = 'test' TEST_PORT = str(ELASTICSEARCH_CONFIG['port']) diff --git a/tests/contrib/falcon/app/app.py b/tests/contrib/falcon/app/app.py index 4f1cb65d..2bda3328 100644 --- a/tests/contrib/falcon/app/app.py +++ b/tests/contrib/falcon/app/app.py @@ -1,6 +1,6 @@ import falcon -from ddtrace.contrib.falcon import TraceMiddleware +from oteltrace.contrib.falcon import TraceMiddleware from . import resources diff --git a/tests/contrib/falcon/test_autopatch.py b/tests/contrib/falcon/test_autopatch.py index 4dd9e0b9..d4f5cdf2 100644 --- a/tests/contrib/falcon/test_autopatch.py +++ b/tests/contrib/falcon/test_autopatch.py @@ -1,6 +1,6 @@ from falcon import testing -import ddtrace +import oteltrace from ...base import BaseTracerTestCase from .app import get_app @@ -20,10 +20,10 @@ def setUp(self): self._service = 'my-falcon' - # Since most integrations do `from ddtrace import tracer` we cannot update do `ddtrace.tracer = self.tracer` - self.original_writer = ddtrace.tracer.writer - ddtrace.tracer.writer = self.tracer.writer - self.tracer = ddtrace.tracer + # Since most integrations do `from oteltrace import tracer` we cannot update do `oteltrace.tracer = self.tracer` + self.original_writer = oteltrace.tracer.writer + oteltrace.tracer.writer = self.tracer.writer + self.tracer = oteltrace.tracer # build a test app without adding a tracer middleware; # reconfigure the global tracer since the autopatch mode @@ -33,4 +33,4 @@ def setUp(self): def tearDown(self): super(AutoPatchTestCase, self).tearDown() - ddtrace.tracer.writer = self.original_writer + oteltrace.tracer.writer = self.original_writer diff --git a/tests/contrib/falcon/test_suite.py b/tests/contrib/falcon/test_suite.py index 1e07ef51..8d88fac4 100644 --- a/tests/contrib/falcon/test_suite.py +++ b/tests/contrib/falcon/test_suite.py @@ -1,8 +1,6 @@ -from ddtrace import config -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.ext import errors as errx, http as httpx - -from tests.opentracer.utils import init_tracer +from oteltrace import config +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.ext import errors as errx, http as httpx class FalconTestCase(object): @@ -204,34 +202,6 @@ def test_404_exception_no_stacktracer(self): assert span.get_tag(errx.ERROR_TYPE) is None assert span.parent_id is None - def test_200_ot(self): - """OpenTracing version of test_200.""" - ot_tracer = init_tracer('my_svc', self.tracer) - - with ot_tracer.start_active_span('ot_span'): - out = self.simulate_get('/200') - - assert out.status_code == 200 - assert out.content.decode('utf-8') == 'Success' - - traces = self.tracer.writer.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 2 - ot_span, dd_span = traces[0] - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == 'my_svc' - assert ot_span.resource == 'ot_span' - - assert dd_span.name == 'falcon.request' - assert dd_span.service == self._service - assert dd_span.resource == 'GET tests.contrib.falcon.app.resources.Resource200' - assert dd_span.get_tag(httpx.STATUS_CODE) == '200' - assert dd_span.get_tag(httpx.URL) == 'http://falconframework.org/200' - def test_falcon_request_hook(self): @config.falcon.hooks.on('request') def on_falcon_request(span, request, response): diff --git a/tests/contrib/flask/__init__.py b/tests/contrib/flask/__init__.py index 8c9a1524..beb75cd7 100644 --- a/tests/contrib/flask/__init__.py +++ b/tests/contrib/flask/__init__.py @@ -1,7 +1,7 @@ -from ddtrace import Pin -from ddtrace.contrib.flask import patch, unpatch +from oteltrace import Pin +from oteltrace.contrib.flask import patch, unpatch import flask -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt from ...base import BaseTracerTestCase diff --git a/tests/contrib/flask/test_blueprint.py b/tests/contrib/flask/test_blueprint.py index c0ed7bc2..56d9860c 100644 --- a/tests/contrib/flask/test_blueprint.py +++ b/tests/contrib/flask/test_blueprint.py @@ -1,7 +1,7 @@ import flask -from ddtrace import Pin -from ddtrace.contrib.flask import unpatch +from oteltrace import Pin +from oteltrace.contrib.flask import unpatch from . import BaseFlaskTestCase diff --git a/tests/contrib/flask/test_flask_helpers.py b/tests/contrib/flask/test_flask_helpers.py index 976e7f16..02317c4e 100644 --- a/tests/contrib/flask/test_flask_helpers.py +++ b/tests/contrib/flask/test_flask_helpers.py @@ -1,8 +1,8 @@ import flask -from ddtrace import Pin -from ddtrace.contrib.flask import unpatch -from ddtrace.compat import StringIO +from oteltrace import Pin +from oteltrace.contrib.flask import unpatch +from oteltrace.compat import StringIO from . import BaseFlaskTestCase diff --git a/tests/contrib/flask/test_hooks.py b/tests/contrib/flask/test_hooks.py index beda6c6d..a3a8db82 100644 --- a/tests/contrib/flask/test_hooks.py +++ b/tests/contrib/flask/test_hooks.py @@ -1,6 +1,6 @@ from flask import Blueprint -from ddtrace.ext import http +from oteltrace.ext import http from . import BaseFlaskTestCase diff --git a/tests/contrib/flask/test_idempotency.py b/tests/contrib/flask/test_idempotency.py index b9c21ffb..315a31c7 100644 --- a/tests/contrib/flask/test_idempotency.py +++ b/tests/contrib/flask/test_idempotency.py @@ -2,10 +2,10 @@ import unittest import flask -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt -from ddtrace.contrib.flask import patch, unpatch -from ddtrace.contrib.flask.patch import _w, _u +from oteltrace.contrib.flask import patch, unpatch +from oteltrace.contrib.flask.patch import _w, _u class FlaskIdempotencyTestCase(unittest.TestCase): @@ -14,29 +14,29 @@ def tearDown(self): unpatch() def assert_is_patched(self): - self.assertTrue(flask._datadog_patch) + self.assertTrue(flask._opentelemetry_patch) self.assertTrue(isinstance(flask.render_template, wrapt.ObjectProxy)) def assert_is_not_patched(self): - self.assertFalse(flask._datadog_patch) + self.assertFalse(flask._opentelemetry_patch) self.assertFalse(isinstance(flask.render_template, wrapt.ObjectProxy)) - def test_datadog_patch(self): + def test_opentelemetry_patch(self): # If we have been patching/testing in other files, # at least make sure this is where we want it - if hasattr(flask, '_datadog_patch'): - self.assertFalse(flask._datadog_patch) + if hasattr(flask, '_opentelemetry_patch'): + self.assertFalse(flask._opentelemetry_patch) - # Patching sets `_datadog_patch` to `True` + # Patching sets `_opentelemetry_patch` to `True` patch() self.assert_is_patched() - # Unpatching sets `_datadog_patch` to `False` + # Unpatching sets `_opentelemetry_patch` to `False` unpatch() self.assert_is_not_patched() # DEV: Use `side_effect` so the original function still gets called - @mock.patch('ddtrace.contrib.flask._patch._w', side_effect=_w) + @mock.patch('oteltrace.contrib.flask._patch._w', side_effect=_w) def test_patch_idempotency(self, _w): # Ensure we didn't do any patching automatically _w.assert_not_called() @@ -56,8 +56,8 @@ def test_patch_idempotency(self, _w): self.assert_is_patched() # DEV: Use `side_effect` so the original function still gets called - @mock.patch('ddtrace.contrib.flask._patch._w', side_effect=_w) - @mock.patch('ddtrace.contrib.flask._patch._u', side_effect=_u) + @mock.patch('oteltrace.contrib.flask._patch._w', side_effect=_w) + @mock.patch('oteltrace.contrib.flask._patch._u', side_effect=_u) def test_unpatch_idempotency(self, _u, _w): # We need to patch in order to unpatch patch() diff --git a/tests/contrib/flask/test_middleware.py b/tests/contrib/flask/test_middleware.py index 783e8040..72cd479f 100644 --- a/tests/contrib/flask/test_middleware.py +++ b/tests/contrib/flask/test_middleware.py @@ -4,11 +4,10 @@ from unittest import TestCase -from ddtrace.contrib.flask import TraceMiddleware -from ddtrace.constants import SAMPLING_PRIORITY_KEY -from ddtrace.ext import http, errors +from oteltrace.contrib.flask import TraceMiddleware +from oteltrace.constants import SAMPLING_PRIORITY_KEY +from oteltrace.ext import http, errors -from tests.opentracer.utils import init_tracer from .web import create_app from ...test_tracer import get_dummy_tracer @@ -31,7 +30,7 @@ def setUp(self): self.app = self.flask_app.test_client() def test_double_instrumentation(self): - # ensure Flask is never instrumented twice when `ddtrace-run` + # ensure Flask is never instrumented twice when `oteltrace-run` # and `TraceMiddleware` are used together. `traced_app` MUST # be assigned otherwise it's not possible to reproduce the # problem (the test scope must keep a strong reference) @@ -43,7 +42,7 @@ def test_double_instrumentation(self): def test_double_instrumentation_config(self): # ensure Flask uses the last set configuration to be sure - # there are no breaking changes for who uses `ddtrace-run` + # there are no breaking changes for who uses `oteltrace-run` # with the `TraceMiddleware` TraceMiddleware( self.flask_app, @@ -350,37 +349,3 @@ def test_custom_span(self): assert s.error == 0 assert s.meta.get(http.STATUS_CODE) == '200' assert s.meta.get(http.METHOD) == 'GET' - - def test_success_200_ot(self): - """OpenTracing version of test_success_200.""" - ot_tracer = init_tracer('my_svc', self.tracer) - writer = self.tracer.writer - - with ot_tracer.start_active_span('ot_span'): - start = time.time() - rv = self.app.get('/') - end = time.time() - - # ensure request worked - assert rv.status_code == 200 - assert rv.data == b'hello' - - # ensure trace worked - assert not self.tracer.current_span(), self.tracer.current_span().pprint() - spans = writer.pop() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.resource == 'ot_span' - assert ot_span.service == 'my_svc' - - assert dd_span.resource == 'index' - assert dd_span.start >= start - assert dd_span.duration <= end - start - assert dd_span.error == 0 - assert dd_span.meta.get(http.STATUS_CODE) == '200' - assert dd_span.meta.get(http.METHOD) == 'GET' diff --git a/tests/contrib/flask/test_request.py b/tests/contrib/flask/test_request.py index aacfd6c4..c3b78886 100644 --- a/tests/contrib/flask/test_request.py +++ b/tests/contrib/flask/test_request.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- -from ddtrace.compat import PY2 -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.flask.patch import flask_version -from ddtrace.ext import http -from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID, HTTP_HEADER_PARENT_ID +from oteltrace.compat import PY2 +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.flask.patch import flask_version +from oteltrace.ext import http +from oteltrace.propagation.datadog import HTTP_HEADER_TRACE_ID, HTTP_HEADER_PARENT_ID from flask import abort from . import BaseFlaskTestCase diff --git a/tests/contrib/flask/test_signals.py b/tests/contrib/flask/test_signals.py index d2c12906..c5553a42 100644 --- a/tests/contrib/flask/test_signals.py +++ b/tests/contrib/flask/test_signals.py @@ -2,9 +2,9 @@ import flask -from ddtrace import Pin -from ddtrace.contrib.flask import unpatch -from ddtrace.contrib.flask.patch import flask_version +from oteltrace import Pin +from oteltrace.contrib.flask import unpatch +from oteltrace.contrib.flask.patch import flask_version from . import BaseFlaskTestCase diff --git a/tests/contrib/flask/test_static.py b/tests/contrib/flask/test_static.py index 9841fcfd..e7b0f87e 100644 --- a/tests/contrib/flask/test_static.py +++ b/tests/contrib/flask/test_static.py @@ -1,4 +1,4 @@ -from ddtrace.ext import http +from oteltrace.ext import http from . import BaseFlaskTestCase diff --git a/tests/contrib/flask/test_template.py b/tests/contrib/flask/test_template.py index fa878143..098c3b04 100644 --- a/tests/contrib/flask/test_template.py +++ b/tests/contrib/flask/test_template.py @@ -1,7 +1,7 @@ import flask -from ddtrace import Pin -from ddtrace.contrib.flask import unpatch +from oteltrace import Pin +from oteltrace.contrib.flask import unpatch from . import BaseFlaskTestCase diff --git a/tests/contrib/flask/test_views.py b/tests/contrib/flask/test_views.py index 8b551af6..46fc3afe 100644 --- a/tests/contrib/flask/test_views.py +++ b/tests/contrib/flask/test_views.py @@ -1,7 +1,7 @@ from flask.views import MethodView, View -from ddtrace.compat import PY2 -from ddtrace.ext import http +from oteltrace.compat import PY2 +from oteltrace.ext import http from . import BaseFlaskTestCase diff --git a/tests/contrib/flask_autopatch/test_flask_autopatch.py b/tests/contrib/flask_autopatch/test_flask_autopatch.py index 23bca594..e132df10 100644 --- a/tests/contrib/flask_autopatch/test_flask_autopatch.py +++ b/tests/contrib/flask_autopatch/test_flask_autopatch.py @@ -3,9 +3,9 @@ import flask -from ddtrace.vendor import wrapt -from ddtrace.ext import http -from ddtrace import Pin +from oteltrace.vendor import wrapt +from oteltrace.ext import http +from oteltrace import Pin from ...test_tracer import get_dummy_tracer @@ -19,14 +19,14 @@ def setUp(self): def test_patched(self): """ - When using ddtrace-run + When using oteltrace-run Then the `flask` module is patched """ # DEV: We have great test coverage in tests.contrib.flask, - # we only need basic tests here to assert `ddtrace-run` patched thingsa + # we only need basic tests here to assert `oteltrace-run` patched thingsa # Assert module is marked as patched - self.assertTrue(flask._datadog_patch) + self.assertTrue(flask._opentelemetry_patch) # Assert our instance of flask.app.Flask is patched self.assertTrue(isinstance(self.app.add_url_rule, wrapt.ObjectProxy)) @@ -38,7 +38,7 @@ def test_patched(self): def test_request(self): """ - When using ddtrace-run + When using oteltrace-run When making a request to flask app We generate the expected spans """ diff --git a/tests/contrib/flask_cache/test.py b/tests/contrib/flask_cache/test.py index a41bf21e..f78aed3c 100644 --- a/tests/contrib/flask_cache/test.py +++ b/tests/contrib/flask_cache/test.py @@ -1,16 +1,15 @@ # -*- coding: utf-8 -*- # project -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.ext import net -from ddtrace.contrib.flask_cache import get_traced_cache -from ddtrace.contrib.flask_cache.tracers import TYPE, CACHE_BACKEND +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.ext import net +from oteltrace.contrib.flask_cache import get_traced_cache +from oteltrace.contrib.flask_cache.tracers import TYPE, CACHE_BACKEND # 3rd party from flask import Flask # testing -from tests.opentracer.utils import init_tracer from ..config import REDIS_CONFIG, MEMCACHED_CONFIG from ...base import BaseTracerTestCase from ...util import assert_dict_issuperset @@ -215,42 +214,6 @@ def test_default_span_tags_memcached(self): self.assertEqual(span.meta[net.TARGET_HOST], '127.0.0.1') self.assertEqual(span.meta[net.TARGET_PORT], self.TEST_MEMCACHED_PORT) - def test_simple_cache_get_ot(self): - """OpenTracing version of test_simple_cache_get.""" - ot_tracer = init_tracer('my_svc', self.tracer) - - # create the TracedCache instance for a Flask app - Cache = get_traced_cache(self.tracer, service=self.SERVICE) - app = Flask(__name__) - cache = Cache(app, config={'CACHE_TYPE': 'simple'}) - - with ot_tracer.start_active_span('ot_span'): - cache.get(u'á_complex_operation') - - spans = self.get_spans() - self.assertEqual(len(spans), 2) - ot_span, dd_span = spans - - # confirm the parenting - self.assertIsNone(ot_span.parent_id) - self.assertEqual(dd_span.parent_id, ot_span.span_id) - - self.assertEqual(ot_span.resource, 'ot_span') - self.assertEqual(ot_span.service, 'my_svc') - - self.assertEqual(dd_span.service, self.SERVICE) - self.assertEqual(dd_span.resource, 'get') - self.assertEqual(dd_span.name, 'flask_cache.cmd') - self.assertEqual(dd_span.span_type, 'cache') - self.assertEqual(dd_span.error, 0) - - expected_meta = { - 'flask_cache.key': u'á_complex_operation', - 'flask_cache.backend': 'simple', - } - - assert_dict_issuperset(dd_span.meta, expected_meta) - def test_analytics_default(self): self.cache.get(u'á_complex_operation') spans = self.get_spans() diff --git a/tests/contrib/flask_cache/test_utils.py b/tests/contrib/flask_cache/test_utils.py index db487d06..787bb4c1 100644 --- a/tests/contrib/flask_cache/test_utils.py +++ b/tests/contrib/flask_cache/test_utils.py @@ -1,9 +1,9 @@ import unittest # project -from ddtrace.tracer import Tracer -from ddtrace.contrib.flask_cache import get_traced_cache -from ddtrace.contrib.flask_cache.utils import _extract_conn_tags, _resource_from_cache_prefix +from oteltrace.tracer import Tracer +from oteltrace.contrib.flask_cache import get_traced_cache +from oteltrace.contrib.flask_cache.utils import _extract_conn_tags, _resource_from_cache_prefix # 3rd party from flask import Flask diff --git a/tests/contrib/flask_cache/test_wrapper_safety.py b/tests/contrib/flask_cache/test_wrapper_safety.py index d9e72417..ed5f413b 100644 --- a/tests/contrib/flask_cache/test_wrapper_safety.py +++ b/tests/contrib/flask_cache/test_wrapper_safety.py @@ -2,10 +2,10 @@ import unittest # project -from ddtrace.ext import net -from ddtrace.tracer import Tracer -from ddtrace.contrib.flask_cache import get_traced_cache -from ddtrace.contrib.flask_cache.tracers import CACHE_BACKEND +from oteltrace.ext import net +from oteltrace.tracer import Tracer +from oteltrace.contrib.flask_cache import get_traced_cache +from oteltrace.contrib.flask_cache.tracers import CACHE_BACKEND # 3rd party from flask import Flask diff --git a/tests/contrib/futures/test_propagation.py b/tests/contrib/futures/test_propagation.py index 530d7b76..b1abbdd5 100644 --- a/tests/contrib/futures/test_propagation.py +++ b/tests/contrib/futures/test_propagation.py @@ -1,9 +1,8 @@ import time import concurrent -from ddtrace.contrib.futures import patch, unpatch +from oteltrace.contrib.futures import patch, unpatch -from tests.opentracer.utils import init_tracer from ...base import BaseTracerTestCase @@ -327,30 +326,3 @@ def fn(): dict(name='executor.thread'), ), ) - - def test_propagation_ot(self): - """OpenTracing version of test_propagation.""" - # it must propagate the tracing context if available - ot_tracer = init_tracer('my_svc', self.tracer) - - def fn(): - # an active context must be available - self.assertTrue(self.tracer.context_provider.active() is not None) - with self.tracer.trace('executor.thread'): - return 42 - - with self.override_global_tracer(): - with ot_tracer.start_active_span('main.thread'): - with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor: - future = executor.submit(fn) - result = future.result() - # assert the right result - self.assertEqual(result, 42) - - # the trace must be completed - self.assert_structure( - dict(name='main.thread'), - ( - dict(name='executor.thread'), - ), - ) diff --git a/tests/contrib/gevent/test_tracer.py b/tests/contrib/gevent/test_tracer.py index 3f2ac54a..51650283 100644 --- a/tests/contrib/gevent/test_tracer.py +++ b/tests/contrib/gevent/test_tracer.py @@ -1,15 +1,13 @@ import gevent import gevent.pool -import ddtrace +import oteltrace -from ddtrace.constants import SAMPLING_PRIORITY_KEY -from ddtrace.context import Context -from ddtrace.contrib.gevent import patch, unpatch -from ddtrace.ext.priority import USER_KEEP +from oteltrace.constants import SAMPLING_PRIORITY_KEY +from oteltrace.context import Context +from oteltrace.contrib.gevent import patch, unpatch +from oteltrace.ext.priority import USER_KEEP from unittest import TestCase -from opentracing.scope_managers.gevent import GeventScopeManager -from tests.opentracer.utils import init_tracer from tests.test_tracer import get_dummy_tracer from .utils import silence_errors @@ -23,8 +21,8 @@ class TestGeventTracer(TestCase): def setUp(self): # use a dummy tracer self.tracer = get_dummy_tracer() - self._original_tracer = ddtrace.tracer - ddtrace.tracer = self.tracer + self._original_tracer = oteltrace.tracer + oteltrace.tracer = self.tracer # trace gevent patch() @@ -32,21 +30,21 @@ def tearDown(self): # clean the active Context self.tracer.context_provider.activate(None) # restore the original tracer - ddtrace.tracer = self._original_tracer + oteltrace.tracer = self._original_tracer # untrace gevent unpatch() def test_main_greenlet(self): # the main greenlet must not be affected by the tracer main_greenlet = gevent.getcurrent() - ctx = getattr(main_greenlet, '__datadog_context', None) + ctx = getattr(main_greenlet, '__opentelemetry_context', None) assert ctx is None def test_main_greenlet_context(self): # the main greenlet must have a ``Context`` if called ctx_tracer = self.tracer.get_call_context() main_greenlet = gevent.getcurrent() - ctx_greenlet = getattr(main_greenlet, '__datadog_context', None) + ctx_greenlet = getattr(main_greenlet, '__opentelemetry_context', None) assert ctx_tracer is ctx_greenlet assert len(ctx_tracer._trace) == 0 @@ -58,7 +56,7 @@ def greenlet(): g = gevent.spawn(greenlet) g.join() ctx = g.value - stored_ctx = getattr(g, '__datadog_context', None) + stored_ctx = getattr(g, '__opentelemetry_context', None) assert stored_ctx is not None assert ctx == stored_ctx @@ -79,7 +77,7 @@ def greenlet(): g = gevent.spawn(greenlet) g.join() - ctx = getattr(g, '__datadog_context', None) + ctx = getattr(g, '__opentelemetry_context', None) assert ctx is None def test_spawn_greenlet(self): @@ -89,7 +87,7 @@ def greenlet(): g = gevent.spawn(greenlet) g.join() - ctx = getattr(g, '__datadog_context', None) + ctx = getattr(g, '__opentelemetry_context', None) assert ctx is not None assert 0 == len(ctx._trace) @@ -101,7 +99,7 @@ def greenlet(): g = gevent.spawn_later(0.01, greenlet) g.join() - ctx = getattr(g, '__datadog_context', None) + ctx = getattr(g, '__opentelemetry_context', None) assert ctx is not None assert 0 == len(ctx._trace) @@ -348,7 +346,7 @@ def _assert_spawn_multiple_greenlets(self, spans): """A helper to assert the parenting of a trace when greenlets are spawned within another greenlet. - This is meant to help maintain compatibility between the Datadog and + This is meant to help maintain compatibility between the OpenTelemetry and OpenTracing tracer implementations. Note that for gevent there is differing behaviour between the context @@ -385,8 +383,8 @@ def _assert_spawn_multiple_greenlets(self, spans): assert worker_2.name == 'greenlet.worker2' assert worker_2.resource == 'greenlet.worker2' - def test_trace_spawn_multiple_greenlets_multiple_traces_dd(self): - """Datadog version of the same test.""" + def test_trace_spawn_multiple_greenlets_multiple_traces_otel(self): + """OpenTelemetry version of the same test.""" def entrypoint(): with self.tracer.trace('greenlet.main') as span: span.resource = 'base' @@ -408,31 +406,3 @@ def green_2(): gevent.spawn(entrypoint).join() spans = self.tracer.writer.pop() self._assert_spawn_multiple_greenlets(spans) - - def test_trace_spawn_multiple_greenlets_multiple_traces_ot(self): - """OpenTracing version of the same test.""" - - ot_tracer = init_tracer('my_svc', self.tracer, scope_manager=GeventScopeManager()) - - def entrypoint(): - with ot_tracer.start_active_span('greenlet.main') as span: - span.resource = 'base' - jobs = [gevent.spawn(green_1), gevent.spawn(green_2)] - gevent.joinall(jobs) - - def green_1(): - with self.tracer.trace('greenlet.worker1') as span: - span.set_tag('worker_id', '1') - gevent.sleep(0.01) - - # note that replacing the `tracer.trace` call here with the - # OpenTracing equivalent will cause the checks to fail - def green_2(): - with ot_tracer.start_active_span('greenlet.worker2') as scope: - scope.span.set_tag('worker_id', '2') - gevent.sleep(0.01) - - gevent.spawn(entrypoint).join() - - spans = self.tracer.writer.pop() - self._assert_spawn_multiple_greenlets(spans) diff --git a/tests/contrib/grpc/test_grpc.py b/tests/contrib/grpc/test_grpc.py index 2554b15c..9ab39da6 100644 --- a/tests/contrib/grpc/test_grpc.py +++ b/tests/contrib/grpc/test_grpc.py @@ -1,10 +1,10 @@ import grpc from grpc.framework.foundation import logging_pool -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.grpc import patch, unpatch -from ddtrace.contrib.grpc import constants -from ddtrace.ext import errors -from ddtrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.grpc import patch, unpatch +from oteltrace.contrib.grpc import constants +from oteltrace.ext import errors +from oteltrace import Pin from ...base import BaseTracerTestCase diff --git a/tests/contrib/grpc/test_grpc_utils.py b/tests/contrib/grpc/test_grpc_utils.py index 5cd07888..514a276d 100644 --- a/tests/contrib/grpc/test_grpc_utils.py +++ b/tests/contrib/grpc/test_grpc_utils.py @@ -1,4 +1,4 @@ -from ddtrace.contrib.grpc.utils import parse_method_path +from oteltrace.contrib.grpc.utils import parse_method_path def test_parse_method_path_with_package(): diff --git a/tests/contrib/httplib/test_httplib.py b/tests/contrib/httplib/test_httplib.py index 9c75ab60..38400a95 100644 --- a/tests/contrib/httplib/test_httplib.py +++ b/tests/contrib/httplib/test_httplib.py @@ -3,18 +3,16 @@ import sys # Third party -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt # Project -from ddtrace import config -from ddtrace.compat import httplib, PY2 -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.httplib import patch, unpatch -from ddtrace.contrib.httplib.patch import should_skip_request -from ddtrace.ext import http -from ddtrace.pin import Pin - -from tests.opentracer.utils import init_tracer +from oteltrace import config +from oteltrace.compat import httplib, PY2 +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.httplib import patch, unpatch +from oteltrace.contrib.httplib.patch import should_skip_request +from oteltrace.ext import http +from oteltrace.pin import Pin from ...base import BaseTracerTestCase from ...util import assert_dict_issuperset, override_global_tracer @@ -116,18 +114,6 @@ def test_should_skip_request(self): pin = Pin.get_from(request) self.assertTrue(should_skip_request(pin, request)) - # Enabled Pin and internal request - self.tracer.enabled = True - request = self.get_http_connection(self.tracer.writer.api.hostname, self.tracer.writer.api.port) - pin = Pin.get_from(request) - self.assertTrue(should_skip_request(pin, request)) - - # Disabled Pin and internal request - self.tracer.enabled = False - request = self.get_http_connection(self.tracer.writer.api.hostname, self.tracer.writer.api.port) - pin = Pin.get_from(request) - self.assertTrue(should_skip_request(pin, request)) - def test_httplib_request_get_request(self, query_string=''): """ When making a GET request via httplib.HTTPConnection.request @@ -368,7 +354,7 @@ def test_httplib_request_and_response_headers(self): # Enabled when configured with self.override_config('hhtplib', {}): - from ddtrace.settings import IntegrationConfig + from oteltrace.settings import IntegrationConfig integration_config = config.httplib # type: IntegrationConfig integration_config.http.trace_headers(['my-header', 'access-control-allow-origin']) conn = self.get_http_connection(SOCKET) @@ -476,41 +462,6 @@ def test_urllib_request_opener(self): self.assertEqual(span.get_tag('http.status_code'), '200') self.assertEqual(span.get_tag('http.url'), URL_200) - def test_httplib_request_get_request_ot(self): - """ OpenTracing version of test with same name. """ - ot_tracer = init_tracer('my_svc', self.tracer) - - with ot_tracer.start_active_span('ot_span'): - conn = self.get_http_connection(SOCKET) - with contextlib.closing(conn): - conn.request('GET', '/status/200') - resp = conn.getresponse() - self.assertEqual(self.to_str(resp.read()), '') - self.assertEqual(resp.status, 200) - - spans = self.tracer.writer.pop() - self.assertEqual(len(spans), 2) - ot_span, dd_span = spans - - # confirm the parenting - self.assertEqual(ot_span.parent_id, None) - self.assertEqual(dd_span.parent_id, ot_span.span_id) - - self.assertEqual(ot_span.service, 'my_svc') - self.assertEqual(ot_span.name, 'ot_span') - - self.assertEqual(dd_span.span_type, 'http') - self.assertEqual(dd_span.name, self.SPAN_NAME) - self.assertEqual(dd_span.error, 0) - assert_dict_issuperset( - dd_span.meta, - { - 'http.method': 'GET', - 'http.status_code': '200', - 'http.url': URL_200, - } - ) - def test_analytics_default(self): conn = self.get_http_connection(SOCKET) with contextlib.closing(conn): diff --git a/tests/contrib/jinja2/test_jinja2.py b/tests/contrib/jinja2/test_jinja2.py index f7d1d0ee..05741d52 100644 --- a/tests/contrib/jinja2/test_jinja2.py +++ b/tests/contrib/jinja2/test_jinja2.py @@ -4,8 +4,8 @@ # 3rd party import jinja2 -from ddtrace import Pin, config -from ddtrace.contrib.jinja2 import patch, unpatch +from oteltrace import Pin, config +from oteltrace.contrib.jinja2 import patch, unpatch from tests.test_tracer import get_dummy_tracer TEST_DIR = os.path.dirname(os.path.realpath(__file__)) diff --git a/tests/contrib/kombu/test.py b/tests/contrib/kombu/test.py index 94d8487a..a6a453bc 100644 --- a/tests/contrib/kombu/test.py +++ b/tests/contrib/kombu/test.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- import kombu -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.kombu.patch import patch, unpatch -from ddtrace.contrib.kombu import utils -from ddtrace.ext import kombu as kombux +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.kombu.patch import patch, unpatch +from oteltrace.contrib.kombu import utils +from oteltrace.ext import kombu as kombux from ..config import RABBITMQ_CONFIG from ...base import BaseTracerTestCase diff --git a/tests/contrib/logging/test_logging.py b/tests/contrib/logging/test_logging.py index b236b2bc..a490a6c1 100644 --- a/tests/contrib/logging/test_logging.py +++ b/tests/contrib/logging/test_logging.py @@ -1,9 +1,9 @@ import logging -from ddtrace.helpers import get_correlation_ids -from ddtrace.compat import StringIO -from ddtrace.contrib.logging import patch, unpatch -from ddtrace.vendor import wrapt +from oteltrace.helpers import get_correlation_ids +from oteltrace.compat import StringIO +from oteltrace.contrib.logging import patch, unpatch +from oteltrace.vendor import wrapt from ...base import BaseTracerTestCase @@ -58,11 +58,11 @@ def func(): # with format string for trace info output, result = capture_function_log( func, - fmt='%(message)s - dd.trace_id=%(dd.trace_id)s dd.span_id=%(dd.span_id)s', + fmt='%(message)s - otel.trace_id=%(otel.trace_id)s otel.span_id=%(otel.span_id)s', ) self.assertEqual( output, - 'Hello! - dd.trace_id={} dd.span_id={}'.format(*result), + 'Hello! - otel.trace_id={} otel.span_id={}'.format(*result), ) # without format string @@ -87,9 +87,9 @@ def func(): # with format string for trace info output, _ = capture_function_log( func, - fmt='%(message)s - dd.trace_id=%(dd.trace_id)s dd.span_id=%(dd.span_id)s', + fmt='%(message)s - otel.trace_id=%(otel.trace_id)s otel.span_id=%(otel.span_id)s', ) self.assertEqual( output, - 'Hello! - dd.trace_id=0 dd.span_id=0', + 'Hello! - otel.trace_id=0 otel.span_id=0', ) diff --git a/tests/contrib/mako/test_mako.py b/tests/contrib/mako/test_mako.py index b663fbcb..88bee6ce 100644 --- a/tests/contrib/mako/test_mako.py +++ b/tests/contrib/mako/test_mako.py @@ -6,9 +6,9 @@ from mako.lookup import TemplateLookup from mako.runtime import Context -from ddtrace import Pin -from ddtrace.contrib.mako import patch, unpatch -from ddtrace.compat import StringIO, to_unicode +from oteltrace import Pin +from oteltrace.contrib.mako import patch, unpatch +from oteltrace.compat import StringIO, to_unicode from tests.test_tracer import get_dummy_tracer TEST_DIR = os.path.dirname(os.path.realpath(__file__)) diff --git a/tests/contrib/molten/test_molten.py b/tests/contrib/molten/test_molten.py index 47b87c4d..642ee03e 100644 --- a/tests/contrib/molten/test_molten.py +++ b/tests/contrib/molten/test_molten.py @@ -1,12 +1,12 @@ import molten from molten.testing import TestClient -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.ext import errors, http -from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID, HTTP_HEADER_PARENT_ID -from ddtrace.contrib.molten import patch, unpatch -from ddtrace.contrib.molten.patch import MOLTEN_VERSION +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.ext import errors, http +from oteltrace.propagation.datadog import HTTP_HEADER_TRACE_ID, HTTP_HEADER_PARENT_ID +from oteltrace.contrib.molten import patch, unpatch +from oteltrace.contrib.molten.patch import MOLTEN_VERSION from ...base import BaseTracerTestCase diff --git a/tests/contrib/molten/test_molten_di.py b/tests/contrib/molten/test_molten_di.py index 238f1bcc..3888027a 100644 --- a/tests/contrib/molten/test_molten_di.py +++ b/tests/contrib/molten/test_molten_di.py @@ -7,8 +7,8 @@ import molten from molten import DependencyInjector -from ddtrace import Pin -from ddtrace.contrib.molten import patch, unpatch +from oteltrace import Pin +from oteltrace.contrib.molten import patch, unpatch from ...test_tracer import get_dummy_tracer diff --git a/tests/contrib/mongoengine/test.py b/tests/contrib/mongoengine/test.py index 11bc7bf4..9c528350 100644 --- a/tests/contrib/mongoengine/test.py +++ b/tests/contrib/mongoengine/test.py @@ -7,13 +7,12 @@ import pymongo # project -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.mongoengine.patch import patch, unpatch -from ddtrace.ext import mongo as mongox +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.mongoengine.patch import patch, unpatch +from oteltrace.ext import mongo as mongox # testing -from tests.opentracer.utils import init_tracer from ..config import MONGO_CONFIG from ...base import override_config from ...test_tracer import get_dummy_tracer @@ -129,33 +128,6 @@ def test_insert_update_delete_query(self): assert span.service == self.TEST_SERVICE _assert_timing(span, start, end) - def test_opentracing(self): - """Ensure the opentracer works with mongoengine.""" - tracer = self.get_tracer_and_connect() - ot_tracer = init_tracer('my_svc', tracer) - - with ot_tracer.start_active_span('ot_span'): - start = time.time() - Artist.drop_collection() - end = time.time() - - # ensure we get a drop collection span - spans = tracer.writer.pop() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == 'ot_span' - assert ot_span.service == 'my_svc' - - assert dd_span.resource == 'drop artist' - assert dd_span.span_type == 'mongodb' - assert dd_span.service == self.TEST_SERVICE - _assert_timing(dd_span, start, end) - def test_analytics_default(self): tracer = self.get_tracer_and_connect() Artist.drop_collection() diff --git a/tests/contrib/mongoengine/test_backwards.py b/tests/contrib/mongoengine/test_backwards.py index ff396ec3..1a289246 100644 --- a/tests/contrib/mongoengine/test_backwards.py +++ b/tests/contrib/mongoengine/test_backwards.py @@ -14,7 +14,7 @@ class Singer(mongoengine.Document): def test_less_than_v04(): # interface from < v0.4 - from ddtrace.contrib.mongoengine import trace_mongoengine + from oteltrace.contrib.mongoengine import trace_mongoengine tracer = get_dummy_tracer() connect = trace_mongoengine(tracer, service='my-mongo-db', patch=False) diff --git a/tests/contrib/mysql/test_backwards_compatibility.py b/tests/contrib/mysql/test_backwards_compatibility.py index 302cb0fd..409b4161 100644 --- a/tests/contrib/mysql/test_backwards_compatibility.py +++ b/tests/contrib/mysql/test_backwards_compatibility.py @@ -1,5 +1,5 @@ -from ddtrace.contrib.mysql import get_traced_mysql_connection +from oteltrace.contrib.mysql import get_traced_mysql_connection from tests.test_tracer import get_dummy_tracer from tests.contrib import config diff --git a/tests/contrib/mysql/test_mysql.py b/tests/contrib/mysql/test_mysql.py index 34588f90..9423a48f 100644 --- a/tests/contrib/mysql/test_mysql.py +++ b/tests/contrib/mysql/test_mysql.py @@ -2,13 +2,12 @@ import mysql # project -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.mysql.patch import patch, unpatch +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.mysql.patch import patch, unpatch # tests from tests.contrib.config import MYSQL_CONFIG -from tests.opentracer.utils import init_tracer from ...base import BaseTracerTestCase from ...util import assert_dict_issuperset @@ -221,81 +220,6 @@ def test_query_proc(self): }) assert span.get_tag('sql.query') is None - def test_simple_query_ot(self): - """OpenTracing version of test_simple_query.""" - conn, tracer = self._get_conn_tracer() - writer = tracer.writer - - ot_tracer = init_tracer('mysql_svc', tracer) - - with ot_tracer.start_active_span('mysql_op'): - cursor = conn.cursor() - cursor.execute('SELECT 1') - rows = cursor.fetchall() - assert len(rows) == 1 - - spans = writer.pop() - assert len(spans) == 2 - - ot_span, dd_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == 'mysql_svc' - assert ot_span.name == 'mysql_op' - - assert dd_span.service == self.TEST_SERVICE - assert dd_span.name == 'mysql.query' - assert dd_span.span_type == 'sql' - assert dd_span.error == 0 - assert_dict_issuperset(dd_span.meta, { - 'out.host': u'127.0.0.1', - 'out.port': u'3306', - 'db.name': u'test', - 'db.user': u'test', - }) - - def test_simple_query_ot_fetchall(self): - """OpenTracing version of test_simple_query.""" - with self.override_config('dbapi2', dict(trace_fetch_methods=True)): - conn, tracer = self._get_conn_tracer() - writer = tracer.writer - - ot_tracer = init_tracer('mysql_svc', tracer) - - with ot_tracer.start_active_span('mysql_op'): - cursor = conn.cursor() - cursor.execute('SELECT 1') - rows = cursor.fetchall() - assert len(rows) == 1 - - spans = writer.pop() - assert len(spans) == 3 - - ot_span, dd_span, fetch_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == 'mysql_svc' - assert ot_span.name == 'mysql_op' - - assert dd_span.service == self.TEST_SERVICE - assert dd_span.name == 'mysql.query' - assert dd_span.span_type == 'sql' - assert dd_span.error == 0 - assert_dict_issuperset(dd_span.meta, { - 'out.host': u'127.0.0.1', - 'out.port': u'3306', - 'db.name': u'test', - 'db.user': u'test', - }) - - assert fetch_span.name == 'mysql.query.fetchall' - def test_commit(self): conn, tracer = self._get_conn_tracer() writer = tracer.writer diff --git a/tests/contrib/mysqldb/test_mysql.py b/tests/contrib/mysqldb/test_mysql.py index 590e6b92..08f60365 100644 --- a/tests/contrib/mysqldb/test_mysql.py +++ b/tests/contrib/mysqldb/test_mysql.py @@ -1,10 +1,9 @@ import MySQLdb -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.mysqldb.patch import patch, unpatch +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.mysqldb.patch import patch, unpatch -from tests.opentracer.utils import init_tracer from ..config import MYSQL_CONFIG from ...base import BaseTracerTestCase from ...util import assert_dict_issuperset @@ -275,75 +274,6 @@ def test_query_proc(self): }) assert span.get_tag('sql.query') is None - def test_simple_query_ot(self): - """OpenTracing version of test_simple_query.""" - conn, tracer = self._get_conn_tracer() - writer = tracer.writer - ot_tracer = init_tracer('mysql_svc', tracer) - with ot_tracer.start_active_span('mysql_op'): - cursor = conn.cursor() - cursor.execute('SELECT 1') - rows = cursor.fetchall() - assert len(rows) == 1 - - spans = writer.pop() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == 'mysql_svc' - assert ot_span.name == 'mysql_op' - - assert dd_span.service == self.TEST_SERVICE - assert dd_span.name == 'mysql.query' - assert dd_span.span_type == 'sql' - assert dd_span.error == 0 - assert_dict_issuperset(dd_span.meta, { - 'out.host': u'127.0.0.1', - 'out.port': u'3306', - 'db.name': u'test', - 'db.user': u'test', - }) - - def test_simple_query_ot_fetchall(self): - """OpenTracing version of test_simple_query.""" - with self.override_config('dbapi2', dict(trace_fetch_methods=True)): - conn, tracer = self._get_conn_tracer() - writer = tracer.writer - ot_tracer = init_tracer('mysql_svc', tracer) - with ot_tracer.start_active_span('mysql_op'): - cursor = conn.cursor() - cursor.execute('SELECT 1') - rows = cursor.fetchall() - assert len(rows) == 1 - - spans = writer.pop() - assert len(spans) == 3 - ot_span, dd_span, fetch_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == 'mysql_svc' - assert ot_span.name == 'mysql_op' - - assert dd_span.service == self.TEST_SERVICE - assert dd_span.name == 'mysql.query' - assert dd_span.span_type == 'sql' - assert dd_span.error == 0 - assert_dict_issuperset(dd_span.meta, { - 'out.host': u'127.0.0.1', - 'out.port': u'3306', - 'db.name': u'test', - 'db.user': u'test', - }) - - assert fetch_span.name == 'mysql.query.fetchall' - def test_commit(self): conn, tracer = self._get_conn_tracer() writer = tracer.writer diff --git a/tests/contrib/patch.py b/tests/contrib/patch.py index 5ae648f2..917834d3 100644 --- a/tests/contrib/patch.py +++ b/tests/contrib/patch.py @@ -3,7 +3,7 @@ import sys import unittest -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt from tests.subprocesstest import SubprocessTestCase, run_in_subprocess @@ -102,7 +102,7 @@ class Base(SubprocessTestCase, PatchMixin): A simple implementation inheriting this TestCase looks like:: - from ddtrace.contrib.redis import unpatch + from oteltrace.contrib.redis import unpatch class RedisPatchTestCase(PatchTestCase.Base): __integration_name__ = 'redis' @@ -146,7 +146,7 @@ def unpatch(): super(PatchTestCase.Base, self).__init__(*args, **kwargs) def patch(self, *args, **kwargs): - from ddtrace import patch + from oteltrace import patch return patch(*args, **kwargs) def _gen_test_attrs(self, ops): @@ -270,7 +270,7 @@ def assert_not_module_double_patched(self, redis): def test_import_patch(self): """ The integration should test that each class, method or function that - is to be patched is in fact done so when ddtrace.patch() is called + is to be patched is in fact done so when oteltrace.patch() is called before the module is imported. For example: @@ -278,7 +278,7 @@ def test_import_patch(self): an appropriate ``test_patch_import`` would be:: import redis - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) self.assert_module_patched(redis) """ self.assert_not_module_imported(self.__module_name__) @@ -291,13 +291,13 @@ def test_import_patch(self): def test_patch_import(self): """ The integration should test that each class, method or function that - is to be patched is in fact done so when ddtrace.patch() is called + is to be patched is in fact done so when oteltrace.patch() is called after the module is imported. an appropriate ``test_patch_import`` would be:: import redis - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) self.assert_module_patched(redis) """ self.assert_not_module_imported(self.__module_name__) @@ -315,9 +315,9 @@ def test_import_patch_patch(self): An example for what this might look like for the redis integration:: import redis - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) self.assert_module_patched(redis) - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) self.assert_not_module_double_patched(redis) """ self.assert_not_module_imported(self.__module_name__) @@ -336,10 +336,10 @@ def test_patch_import_patch(self): An example for what this might look like for the redis integration:: - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) import redis self.assert_module_patched(redis) - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) self.assert_not_module_double_patched(redis) """ self.assert_not_module_imported(self.__module_name__) @@ -358,8 +358,8 @@ def test_patch_patch_import(self): An example for what this might look like for the redis integration:: - ddtrace.patch(redis=True) - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) + oteltrace.patch(redis=True) import redis self.assert_not_double_wrapped(redis.StrictRedis.execute_command) """ @@ -380,11 +380,11 @@ def test_import_patch_unpatch_patch(self): For example:: import redis - from ddtrace.contrib.redis import unpatch + from oteltrace.contrib.redis import unpatch - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) unpatch() - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) self.assert_module_patched(redis) """ self.assert_not_module_imported(self.__module_name__) @@ -405,12 +405,12 @@ def test_patch_import_unpatch_patch(self): For example:: - from ddtrace.contrib.redis import unpatch + from oteltrace.contrib.redis import unpatch - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) import redis unpatch() - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) self.assert_module_patched(redis) """ self.assert_not_module_imported(self.__module_name__) @@ -431,12 +431,12 @@ def test_patch_unpatch_import_patch(self): For example:: - from ddtrace.contrib.redis import unpatch + from oteltrace.contrib.redis import unpatch - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) import redis unpatch() - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) self.assert_module_patched(redis) """ self.assert_not_module_imported(self.__module_name__) @@ -456,11 +456,11 @@ def test_patch_unpatch_patch_import(self): For example:: - from ddtrace.contrib.redis import unpatch + from oteltrace.contrib.redis import unpatch - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) unpatch() - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) import redis self.assert_module_patched(redis) """ @@ -478,9 +478,9 @@ def test_unpatch_patch_import(self): For example:: - from ddtrace.contrib.redis import unpatch + from oteltrace.contrib.redis import unpatch unpatch() - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) import redis self.assert_not_module_patched(redis) """ @@ -499,8 +499,8 @@ def test_patch_unpatch_import(self): For example:: - ddtrace.patch(redis=True) - from ddtrace.contrib.redis import unpatch + oteltrace.patch(redis=True) + from oteltrace.contrib.redis import unpatch unpatch() import redis self.assert_not_module_patched(redis) @@ -520,8 +520,8 @@ def test_import_unpatch_patch(self): For example:: import redis - from ddtrace.contrib.redis import unpatch - ddtrace.patch(redis=True) + from oteltrace.contrib.redis import unpatch + oteltrace.patch(redis=True) unpatch() self.assert_not_module_patched(redis) """ @@ -541,8 +541,8 @@ def test_import_patch_unpatch(self): For example:: import redis - from ddtrace.contrib.redis import unpatch - ddtrace.patch(redis=True) + from oteltrace.contrib.redis import unpatch + oteltrace.patch(redis=True) unpatch() self.assert_not_module_patched(redis) """ @@ -562,8 +562,8 @@ def test_patch_import_unpatch(self): For example:: - from ddtrace.contrib.redis import unpatch - ddtrace.patch(redis=True) + from oteltrace.contrib.redis import unpatch + oteltrace.patch(redis=True) import redis unpatch() self.assert_not_module_patched(redis) @@ -583,9 +583,9 @@ def test_import_patch_unpatch_unpatch(self): For example:: import redis - from ddtrace.contrib.redis import unpatch + from oteltrace.contrib.redis import unpatch - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) self.assert_module_patched(redis) unpatch() self.assert_not_module_patched(redis) @@ -608,9 +608,9 @@ def test_patch_unpatch_import_unpatch(self): For example:: - from ddtrace.contrib.redis import unpatch + from oteltrace.contrib.redis import unpatch - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) unpatch() import redis self.assert_not_module_patched(redis) @@ -632,9 +632,9 @@ def test_patch_unpatch_unpatch_import(self): For example:: - from ddtrace.contrib.redis import unpatch + from oteltrace.contrib.redis import unpatch - ddtrace.patch(redis=True) + oteltrace.patch(redis=True) unpatch() unpatch() import redis diff --git a/tests/contrib/psycopg/test_psycopg.py b/tests/contrib/psycopg/test_psycopg.py index a8e7e699..d1b41c6a 100644 --- a/tests/contrib/psycopg/test_psycopg.py +++ b/tests/contrib/psycopg/test_psycopg.py @@ -9,13 +9,12 @@ from unittest import skipIf # project -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.psycopg import connection_factory -from ddtrace.contrib.psycopg.patch import patch, unpatch, PSYCOPG2_VERSION -from ddtrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.psycopg import connection_factory +from oteltrace.contrib.psycopg.patch import patch, unpatch, PSYCOPG2_VERSION +from oteltrace import Pin # testing -from tests.opentracer.utils import init_tracer from tests.contrib.config import POSTGRES_CONFIG from ...base import BaseTracerTestCase from ...utils.tracer import DummyTracer @@ -134,51 +133,10 @@ def assert_conn_is_traced(self, db, service): self.assertIsNone(root.get_tag('sql.query')) self.reset() - def test_opentracing_propagation(self): - # ensure OpenTracing plays well with our integration - query = """SELECT 'tracing'""" - - db = self._get_conn() - ot_tracer = init_tracer('psycopg-svc', self.tracer) - - with ot_tracer.start_active_span('db.access'): - cursor = db.cursor() - cursor.execute(query) - rows = cursor.fetchall() - - self.assertEquals(rows, [('tracing',)]) - - self.assert_structure( - dict(name='db.access', service='psycopg-svc'), - ( - dict(name='postgres.query', resource=query, service='postgres', error=0, span_type='sql'), - ), - ) - self.reset() - - with self.override_config('dbapi2', dict(trace_fetch_methods=True)): - db = self._get_conn() - ot_tracer = init_tracer('psycopg-svc', self.tracer) - - with ot_tracer.start_active_span('db.access'): - cursor = db.cursor() - cursor.execute(query) - rows = cursor.fetchall() - - self.assertEquals(rows, [('tracing',)]) - - self.assert_structure( - dict(name='db.access', service='psycopg-svc'), - ( - dict(name='postgres.query', resource=query, service='postgres', error=0, span_type='sql'), - dict(name='postgres.query.fetchall', resource=query, service='postgres', error=0, span_type='sql'), - ), - ) - @skipIf(PSYCOPG2_VERSION < (2, 5), 'context manager not available in psycopg2==2.4') def test_cursor_ctx_manager(self): # ensure cursors work with context managers - # https://github.com/DataDog/dd-trace-py/issues/228 + # https://github.com/opentelemetry/otel-trace-py/issues/228 conn = self._get_conn() t = type(conn.cursor()) with conn.cursor() as cur: @@ -231,7 +189,7 @@ def test_manual_wrap_extension_adapt(self): @skipIf(PSYCOPG2_VERSION < (2, 7), 'quote_ident not available in psycopg2<2.7') def test_manual_wrap_extension_quote_ident(self): - from ddtrace import patch_all + from oteltrace import patch_all patch_all() from psycopg2.extensions import quote_ident @@ -331,7 +289,7 @@ def test_backwards_compatibilty_v3(): @skipIf(PSYCOPG2_VERSION < (2, 7), 'quote_ident not available in psycopg2<2.7') def test_manual_wrap_extension_quote_ident_standalone(): - from ddtrace import patch_all + from oteltrace import patch_all patch_all() from psycopg2.extensions import quote_ident diff --git a/tests/contrib/pylibmc/test.py b/tests/contrib/pylibmc/test.py index 3d0759cd..023ce5c7 100644 --- a/tests/contrib/pylibmc/test.py +++ b/tests/contrib/pylibmc/test.py @@ -6,14 +6,13 @@ import pylibmc # project -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.pylibmc import TracedClient -from ddtrace.contrib.pylibmc.patch import patch, unpatch -from ddtrace.ext import memcached +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.pylibmc import TracedClient +from oteltrace.contrib.pylibmc.patch import patch, unpatch +from oteltrace.ext import memcached # testing -from ...opentracer.utils import init_tracer from ...contrib.config import MEMCACHED_CONFIG as cfg from ...base import BaseTracerTestCase @@ -79,33 +78,6 @@ def test_incr_decr(self): resources = sorted(s.resource for s in spans) assert expected_resources == resources - def test_incr_decr_ot(self): - """OpenTracing version of test_incr_decr.""" - client, tracer = self.get_client() - ot_tracer = init_tracer('memcached', tracer) - - start = time.time() - with ot_tracer.start_active_span('mc_ops'): - client.set('a', 1) - client.incr('a', 2) - client.decr('a', 1) - v = client.get('a') - assert v == 2 - end = time.time() - - # verify spans - spans = tracer.writer.pop() - ot_span = spans[0] - - assert ot_span.name == 'mc_ops' - - for s in spans[1:]: - assert s.parent_id == ot_span.span_id - self._verify_cache_span(s, start, end) - expected_resources = sorted(['get', 'set', 'incr', 'decr']) - resources = sorted(s.resource for s in spans[1:]) - assert expected_resources == resources - def test_clone(self): # ensure cloned connections are traced as well. client, tracer = self.get_client() diff --git a/tests/contrib/pylons/test_pylons.py b/tests/contrib/pylons/test_pylons.py index 1410ea28..0c1ab562 100644 --- a/tests/contrib/pylons/test_pylons.py +++ b/tests/contrib/pylons/test_pylons.py @@ -5,12 +5,11 @@ from paste.deploy import loadapp import pytest -from ddtrace import config -from ddtrace.ext import http, errors -from ddtrace.constants import SAMPLING_PRIORITY_KEY, ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.pylons import PylonsTraceMiddleware +from oteltrace import config +from oteltrace.ext import http, errors +from oteltrace.constants import SAMPLING_PRIORITY_KEY, ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.pylons import PylonsTraceMiddleware -from tests.opentracer.utils import init_tracer from ...base import BaseTracerTestCase @@ -400,29 +399,3 @@ def test_distributed_tracing_disabled(self): assert span.trace_id != 100 assert span.parent_id != 42 assert span.get_metric(SAMPLING_PRIORITY_KEY) != 2 - - def test_success_200_ot(self): - """OpenTracing version of test_success_200.""" - ot_tracer = init_tracer('pylons_svc', self.tracer) - - with ot_tracer.start_active_span('pylons_get'): - res = self.app.get(url_for(controller='root', action='index')) - assert res.status == 200 - - spans = self.tracer.writer.pop() - assert spans, spans - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == 'pylons_get' - assert ot_span.service == 'pylons_svc' - - assert dd_span.service == 'web' - assert dd_span.resource == 'root.index' - assert dd_span.meta.get(http.STATUS_CODE) == '200' - assert dd_span.meta.get(http.URL) == 'http://localhost:80/' - assert dd_span.error == 0 diff --git a/tests/contrib/pymemcache/autopatch/test.py b/tests/contrib/pymemcache/autopatch/test.py index 68f55f30..a2f09cd5 100644 --- a/tests/contrib/pymemcache/autopatch/test.py +++ b/tests/contrib/pymemcache/autopatch/test.py @@ -1,18 +1,18 @@ import pymemcache import unittest -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt class AutoPatchTestCase(unittest.TestCase): - """Test ensuring that ddtrace-run patches pymemcache. + """Test ensuring that oteltrace-run patches pymemcache. This ensures that things like the patch functions are properly exported from the module and used to patch the library. - Note: you may get cryptic errors due to ddtrace-run failing, such as + Note: you may get cryptic errors due to oteltrace-run failing, such as Traceback (most recent call last): - File ".../dev/dd-trace-py/tests/contrib/pymemcache/test_autopatch.py", line 8, in test_patch + File ".../dev/otel-trace-py/tests/contrib/pymemcache/test_autopatch.py", line 8, in test_patch assert issubclass(pymemcache.client.base.Client, wrapt.ObjectProxy) AttributeError: 'module' object has no attribute 'client' diff --git a/tests/contrib/pymemcache/test_client.py b/tests/contrib/pymemcache/test_client.py index 72467e47..3d14e88f 100644 --- a/tests/contrib/pymemcache/test_client.py +++ b/tests/contrib/pymemcache/test_client.py @@ -9,11 +9,11 @@ ) import pytest import unittest -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt # project -from ddtrace import Pin -from ddtrace.contrib.pymemcache.patch import patch, unpatch +from oteltrace import Pin +from oteltrace.contrib.pymemcache.patch import patch, unpatch from .utils import MockSocket, _str from .test_client_mixin import PymemcacheClientTestCaseMixin, TEST_HOST, TEST_PORT diff --git a/tests/contrib/pymemcache/test_client_mixin.py b/tests/contrib/pymemcache/test_client_mixin.py index 4205da24..bf718a54 100644 --- a/tests/contrib/pymemcache/test_client_mixin.py +++ b/tests/contrib/pymemcache/test_client_mixin.py @@ -3,11 +3,11 @@ import pymemcache # project -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.pymemcache.patch import patch, unpatch -from ddtrace.ext import memcached as memcachedx -from ddtrace.ext import net +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.pymemcache.patch import patch, unpatch +from oteltrace.ext import memcached as memcachedx +from oteltrace.ext import net from .utils import MockSocket from tests.test_tracer import get_dummy_tracer diff --git a/tests/contrib/pymongo/test.py b/tests/contrib/pymongo/test.py index a16f14b6..3c7a59a6 100644 --- a/tests/contrib/pymongo/test.py +++ b/tests/contrib/pymongo/test.py @@ -6,14 +6,13 @@ import pymongo # project -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.ext import mongo as mongox -from ddtrace.contrib.pymongo.client import trace_mongo_client, normalize_filter -from ddtrace.contrib.pymongo.patch import patch, unpatch +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.ext import mongo as mongox +from oteltrace.contrib.pymongo.client import trace_mongo_client, normalize_filter +from oteltrace.contrib.pymongo.patch import patch, unpatch # testing -from tests.opentracer.utils import init_tracer from ..config import MONGO_CONFIG from ...base import override_config from ...test_tracer import get_dummy_tracer @@ -249,59 +248,6 @@ def test_insert_find(self): # confirm query tag find with query criteria on name assert spans[-1].get_tag('mongodb.query') == '{\'name\': \'?\'}' - def test_update_ot(self): - """OpenTracing version of test_update.""" - tracer, client = self.get_tracer_and_client() - ot_tracer = init_tracer('mongo_svc', tracer) - - writer = tracer.writer - with ot_tracer.start_active_span('mongo_op'): - db = client['testdb'] - db.drop_collection('songs') - input_songs = [ - {'name': 'Powderfinger', 'artist': 'Neil'}, - {'name': 'Harvest', 'artist': 'Neil'}, - {'name': 'Suzanne', 'artist': 'Leonard'}, - {'name': 'Partisan', 'artist': 'Leonard'}, - ] - db.songs.insert_many(input_songs) - result = db.songs.update_many( - {'artist': 'Neil'}, - {'$set': {'artist': 'Shakey'}}, - ) - - assert result.matched_count == 2 - assert result.modified_count == 2 - - # ensure all is traced. - spans = writer.pop() - assert spans, spans - assert len(spans) == 4 - - ot_span = spans[0] - assert ot_span.parent_id is None - assert ot_span.name == 'mongo_op' - assert ot_span.service == 'mongo_svc' - - for span in spans[1:]: - # ensure the parenting - assert span.parent_id == ot_span.span_id - # ensure all the of the common metadata is set - assert span.service == self.TEST_SERVICE - assert span.span_type == 'mongodb' - assert span.meta.get('mongodb.collection') == 'songs' - assert span.meta.get('mongodb.db') == 'testdb' - assert span.meta.get('out.host') - assert span.meta.get('out.port') - - expected_resources = set([ - 'drop songs', - 'update songs {"artist": "?"}', - 'insert songs', - ]) - - assert expected_resources == {s.resource for s in spans[1:]} - def test_analytics_default(self): tracer, client = self.get_tracer_and_client() db = client['testdb'] diff --git a/tests/contrib/pymongo/test_spec.py b/tests/contrib/pymongo/test_spec.py index 0709a64b..f3ec3378 100644 --- a/tests/contrib/pymongo/test_spec.py +++ b/tests/contrib/pymongo/test_spec.py @@ -4,7 +4,7 @@ from bson.son import SON -from ddtrace.contrib.pymongo.parse import parse_spec +from oteltrace.contrib.pymongo.parse import parse_spec def test_empty(): diff --git a/tests/contrib/pymysql/test_backwards_compatibility.py b/tests/contrib/pymysql/test_backwards_compatibility.py index 46cc4cc2..62f6afe5 100644 --- a/tests/contrib/pymysql/test_backwards_compatibility.py +++ b/tests/contrib/pymysql/test_backwards_compatibility.py @@ -1,4 +1,4 @@ -from ddtrace.contrib.pymysql import get_traced_pymysql_connection +from oteltrace.contrib.pymysql import get_traced_pymysql_connection from tests.test_tracer import get_dummy_tracer from tests.contrib import config diff --git a/tests/contrib/pymysql/test_pymysql.py b/tests/contrib/pymysql/test_pymysql.py index 57637a4c..58b33c93 100644 --- a/tests/contrib/pymysql/test_pymysql.py +++ b/tests/contrib/pymysql/test_pymysql.py @@ -2,14 +2,13 @@ import pymysql # project -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.compat import PY2 -from ddtrace.compat import stringify -from ddtrace.contrib.pymysql.patch import patch, unpatch +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.compat import PY2 +from oteltrace.compat import stringify +from oteltrace.contrib.pymysql.patch import patch, unpatch # testing -from tests.opentracer.utils import init_tracer from ...base import BaseTracerTestCase from ...util import assert_dict_issuperset from ...contrib.config import MYSQL_CONFIG @@ -234,69 +233,6 @@ def test_query_proc(self): meta.update(self.DB_INFO) assert_dict_issuperset(span.meta, meta) - def test_simple_query_ot(self): - """OpenTracing version of test_simple_query.""" - conn, tracer = self._get_conn_tracer() - writer = tracer.writer - ot_tracer = init_tracer('mysql_svc', tracer) - with ot_tracer.start_active_span('mysql_op'): - cursor = conn.cursor() - cursor.execute('SELECT 1') - rows = cursor.fetchall() - assert len(rows) == 1 - - spans = writer.pop() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == 'mysql_svc' - assert ot_span.name == 'mysql_op' - - assert dd_span.service == self.TEST_SERVICE - assert dd_span.name == 'pymysql.query' - assert dd_span.span_type == 'sql' - assert dd_span.error == 0 - meta = {} - meta.update(self.DB_INFO) - assert_dict_issuperset(dd_span.meta, meta) - - def test_simple_query_ot_fetchall(self): - """OpenTracing version of test_simple_query.""" - with self.override_config('dbapi2', dict(trace_fetch_methods=True)): - conn, tracer = self._get_conn_tracer() - writer = tracer.writer - ot_tracer = init_tracer('mysql_svc', tracer) - with ot_tracer.start_active_span('mysql_op'): - cursor = conn.cursor() - cursor.execute('SELECT 1') - rows = cursor.fetchall() - assert len(rows) == 1 - - spans = writer.pop() - assert len(spans) == 3 - ot_span, dd_span, fetch_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == 'mysql_svc' - assert ot_span.name == 'mysql_op' - - assert dd_span.service == self.TEST_SERVICE - assert dd_span.name == 'pymysql.query' - assert dd_span.span_type == 'sql' - assert dd_span.error == 0 - meta = {} - meta.update(self.DB_INFO) - assert_dict_issuperset(dd_span.meta, meta) - - assert fetch_span.name == 'pymysql.query.fetchall' - def test_commit(self): conn, tracer = self._get_conn_tracer() writer = tracer.writer diff --git a/tests/contrib/pyramid/app/web.py b/tests/contrib/pyramid/app/web.py index 849e57c6..6b9e1655 100644 --- a/tests/contrib/pyramid/app/web.py +++ b/tests/contrib/pyramid/app/web.py @@ -1,4 +1,4 @@ -from ddtrace.contrib.pyramid import trace_pyramid +from oteltrace.contrib.pyramid import trace_pyramid from pyramid.response import Response from pyramid.config import Configurator diff --git a/tests/contrib/pyramid/test_pyramid.py b/tests/contrib/pyramid/test_pyramid.py index b2429d8a..0739d903 100644 --- a/tests/contrib/pyramid/test_pyramid.py +++ b/tests/contrib/pyramid/test_pyramid.py @@ -1,4 +1,4 @@ -from ddtrace.constants import SAMPLING_PRIORITY_KEY, ORIGIN_KEY +from oteltrace.constants import SAMPLING_PRIORITY_KEY, ORIGIN_KEY from .utils import PyramidTestCase, PyramidBase @@ -51,7 +51,7 @@ class TestPyramidDistributedTracingDisabled(PyramidBase): def get_settings(self): return { - 'datadog_distributed_tracing': False, + 'opentelemetry_distributed_tracing': False, } def test_distributed_tracing_disabled(self): diff --git a/tests/contrib/pyramid/test_pyramid_autopatch.py b/tests/contrib/pyramid/test_pyramid_autopatch.py index 73c2a5f0..be1a0df6 100644 --- a/tests/contrib/pyramid/test_pyramid_autopatch.py +++ b/tests/contrib/pyramid/test_pyramid_autopatch.py @@ -45,6 +45,6 @@ def _include_me(config): def test_config_include(): """ This test makes sure that relative imports still work when the - application is run with ddtrace-run """ + application is run with oteltrace-run """ config = Configurator() config.include('tests.contrib.pyramid._include_me') diff --git a/tests/contrib/pyramid/utils.py b/tests/contrib/pyramid/utils.py index 9dd156bc..a83a9157 100644 --- a/tests/contrib/pyramid/utils.py +++ b/tests/contrib/pyramid/utils.py @@ -4,15 +4,14 @@ import pytest import webtest -from ddtrace import compat -from ddtrace import config -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.pyramid.patch import insert_tween_if_needed -from ddtrace.ext import http +from oteltrace import compat +from oteltrace import config +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.pyramid.patch import insert_tween_if_needed +from oteltrace.ext import http from .app import create_app -from ...opentracer.utils import init_tracer from ...base import BaseTracerTestCase @@ -26,7 +25,7 @@ def create_app(self, settings=None): # get default settings or use what is provided settings = settings or self.get_settings() # always set the dummy tracer as a default tracer - settings.update({'datadog_tracer': self.tracer}) + settings.update({'opentelemetry_tracer': self.tracer}) app, renderer = create_app(settings, self.instrument) self.app = webtest.TestApp(app) @@ -45,7 +44,7 @@ class PyramidTestCase(PyramidBase): def get_settings(self): return { - 'datadog_trace_service': 'foobar', + 'opentelemetry_trace_service': 'foobar', } def test_200(self, query_string=''): @@ -106,7 +105,7 @@ def test_analytics_global_on_integration_on(self): We expect the root span to have the appropriate tag """ with self.override_global_config(dict(analytics_enabled=True)): - self.override_settings(dict(datadog_analytics_enabled=True, datadog_analytics_sample_rate=0.5)) + self.override_settings(dict(opentelemetry_analytics_enabled=True, opentelemetry_analytics_sample_rate=0.5)) res = self.app.get('/', status=200) assert b'idx' in res.body @@ -134,7 +133,7 @@ def test_analytics_global_off_integration_on(self): We expect the root span to have the appropriate tag """ with self.override_global_config(dict(analytics_enabled=False)): - self.override_settings(dict(datadog_analytics_enabled=True, datadog_analytics_sample_rate=0.5)) + self.override_settings(dict(opentelemetry_analytics_enabled=True, opentelemetry_analytics_sample_rate=0.5)) res = self.app.get('/', status=200) assert b'idx' in res.body @@ -288,9 +287,9 @@ def test_http_exception_response(self): assert s.meta.get(http.URL) == 'http://localhost/404/raise_exception' def test_insert_tween_if_needed_already_set(self): - settings = {'pyramid.tweens': 'ddtrace.contrib.pyramid:trace_tween_factory'} + settings = {'pyramid.tweens': 'oteltrace.contrib.pyramid:trace_tween_factory'} insert_tween_if_needed(settings) - assert settings['pyramid.tweens'] == 'ddtrace.contrib.pyramid:trace_tween_factory' + assert settings['pyramid.tweens'] == 'oteltrace.contrib.pyramid:trace_tween_factory' def test_insert_tween_if_needed_none(self): settings = {'pyramid.tweens': ''} @@ -302,7 +301,7 @@ def test_insert_tween_if_needed_excview(self): insert_tween_if_needed(settings) assert ( settings['pyramid.tweens'] == - 'ddtrace.contrib.pyramid:trace_tween_factory\npyramid.tweens.excview_tween_factory' + 'oteltrace.contrib.pyramid:trace_tween_factory\npyramid.tweens.excview_tween_factory' ) def test_insert_tween_if_needed_excview_and_other(self): @@ -311,7 +310,7 @@ def test_insert_tween_if_needed_excview_and_other(self): assert ( settings['pyramid.tweens'] == 'a.first.tween\n' - 'ddtrace.contrib.pyramid:trace_tween_factory\n' + 'oteltrace.contrib.pyramid:trace_tween_factory\n' 'pyramid.tweens.excview_tween_factory\n' 'a.last.tween\n') @@ -320,7 +319,7 @@ def test_insert_tween_if_needed_others(self): insert_tween_if_needed(settings) assert ( settings['pyramid.tweens'] == - 'a.random.tween\nand.another.one\nddtrace.contrib.pyramid:trace_tween_factory' + 'a.random.tween\nand.another.one\noteltrace.contrib.pyramid:trace_tween_factory' ) def test_include_conflicts(self): @@ -329,33 +328,3 @@ def test_include_conflicts(self): self.app.get('/404', status=404) spans = self.tracer.writer.pop() assert len(spans) == 1 - - def test_200_ot(self): - """OpenTracing version of test_200.""" - ot_tracer = init_tracer('pyramid_svc', self.tracer) - - with ot_tracer.start_active_span('pyramid_get'): - res = self.app.get('/', status=200) - assert b'idx' in res.body - - writer = self.tracer.writer - spans = writer.pop() - assert len(spans) == 2 - - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == 'pyramid_get' - assert ot_span.service == 'pyramid_svc' - - assert dd_span.service == 'foobar' - assert dd_span.resource == 'GET index' - assert dd_span.error == 0 - assert dd_span.span_type == 'http' - assert dd_span.meta.get('http.method') == 'GET' - assert dd_span.meta.get('http.status_code') == '200' - assert dd_span.meta.get(http.URL) == 'http://localhost/' - assert dd_span.meta.get('pyramid.route.name') == 'index' diff --git a/tests/contrib/redis/test.py b/tests/contrib/redis/test.py index 637bf7f0..72ecb216 100644 --- a/tests/contrib/redis/test.py +++ b/tests/contrib/redis/test.py @@ -1,12 +1,11 @@ # -*- coding: utf-8 -*- import redis -from ddtrace import Pin, compat -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.redis import get_traced_redis -from ddtrace.contrib.redis.patch import patch, unpatch +from oteltrace import Pin, compat +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.redis import get_traced_redis +from oteltrace.contrib.redis.patch import patch, unpatch -from tests.opentracer.utils import init_tracer from ..config import REDIS_CONFIG from ...test_tracer import get_dummy_tracer from ...base import BaseTracerTestCase @@ -189,32 +188,3 @@ def test_patch_unpatch(self): spans = writer.pop() assert spans, spans assert len(spans) == 1 - - def test_opentracing(self): - """Ensure OpenTracing works with redis.""" - ot_tracer = init_tracer('redis_svc', self.tracer) - - with ot_tracer.start_active_span('redis_get'): - us = self.r.get('cheese') - assert us is None - - spans = self.get_spans() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == 'redis_get' - assert ot_span.service == 'redis_svc' - - assert dd_span.service == self.TEST_SERVICE - assert dd_span.name == 'redis.command' - assert dd_span.span_type == 'redis' - assert dd_span.error == 0 - assert dd_span.get_tag('out.redis_db') == '0' - assert dd_span.get_tag('out.host') == 'localhost' - assert dd_span.get_tag('redis.raw_command') == u'GET cheese' - assert dd_span.get_metric('redis.args_length') == 2 - assert dd_span.resource == 'GET cheese' diff --git a/tests/contrib/rediscluster/test.py b/tests/contrib/rediscluster/test.py index f2224cbc..e099c181 100644 --- a/tests/contrib/rediscluster/test.py +++ b/tests/contrib/rediscluster/test.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- import rediscluster -from ddtrace import Pin -from ddtrace.contrib.rediscluster.patch import patch, unpatch +from oteltrace import Pin +from oteltrace.contrib.rediscluster.patch import patch, unpatch from ..config import REDISCLUSTER_CONFIG from ...test_tracer import get_dummy_tracer from ...base import BaseTracerTestCase diff --git a/tests/contrib/requests/test_requests.py b/tests/contrib/requests/test_requests.py index d4455f5d..61f01d3f 100644 --- a/tests/contrib/requests/test_requests.py +++ b/tests/contrib/requests/test_requests.py @@ -3,13 +3,11 @@ from requests import Session from requests.exceptions import MissingSchema -from ddtrace import config -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.requests import patch, unpatch -from ddtrace.ext import errors, http - -from tests.opentracer.utils import init_tracer +from oteltrace import config +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.requests import patch, unpatch +from oteltrace.ext import errors, http from ...base import BaseTracerTestCase from ...util import override_global_tracer @@ -29,7 +27,7 @@ def setUp(self): patch() self.session = Session() - setattr(self.session, 'datadog_tracer', self.tracer) + setattr(self.session, 'opentelemetry_tracer', self.tracer) def tearDown(self): unpatch() @@ -90,7 +88,7 @@ def test_double_patch(self): # ensure that double patch doesn't duplicate instrumentation patch() session = Session() - setattr(session, 'datadog_tracer', self.tracer) + setattr(session, 'opentelemetry_tracer', self.tracer) out = session.get(URL_200) assert out.status_code == 200 @@ -345,33 +343,6 @@ def test_split_by_domain_includes_port_path(self): assert s.service == 'httpbin.org:80' - def test_200_ot(self): - """OpenTracing version of test_200.""" - - ot_tracer = init_tracer('requests_svc', self.tracer) - - with ot_tracer.start_active_span('requests_get'): - out = self.session.get(URL_200) - assert out.status_code == 200 - - # validation - spans = self.tracer.writer.pop() - assert len(spans) == 2 - - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == 'requests_get' - assert ot_span.service == 'requests_svc' - - assert dd_span.get_tag(http.METHOD) == 'GET' - assert dd_span.get_tag(http.STATUS_CODE) == '200' - assert dd_span.error == 0 - assert dd_span.span_type == http.TYPE - def test_request_and_response_headers(self): # Disabled when not configured self.session.get(URL_200, headers={'my-header': 'my_value'}) diff --git a/tests/contrib/requests/test_requests_distributed.py b/tests/contrib/requests/test_requests_distributed.py index 6b4ea528..0c5b28cf 100644 --- a/tests/contrib/requests/test_requests_distributed.py +++ b/tests/contrib/requests/test_requests_distributed.py @@ -1,6 +1,6 @@ from requests_mock import Adapter -from ddtrace import config +from oteltrace import config from ...base import BaseTracerTestCase from .test_requests import BaseRequestTestCase @@ -34,8 +34,8 @@ def test_propagation_default(self): with self.tracer.trace('root') as root: def matcher(request): return self.headers_here(self.tracer, request, root) - adapter.register_uri('GET', 'mock://datadog/foo', additional_matcher=matcher, text='bar') - resp = self.session.get('mock://datadog/foo') + adapter.register_uri('GET', 'mock://opentelemetry/foo', additional_matcher=matcher, text='bar') + resp = self.session.get('mock://opentelemetry/foo') assert 200 == resp.status_code assert 'bar' == resp.text @@ -48,8 +48,8 @@ def test_propagation_true_global(self): with self.tracer.trace('root') as root: def matcher(request): return self.headers_here(self.tracer, request, root) - adapter.register_uri('GET', 'mock://datadog/foo', additional_matcher=matcher, text='bar') - resp = self.session.get('mock://datadog/foo') + adapter.register_uri('GET', 'mock://opentelemetry/foo', additional_matcher=matcher, text='bar') + resp = self.session.get('mock://opentelemetry/foo') assert 200 == resp.status_code assert 'bar' == resp.text @@ -62,8 +62,8 @@ def test_propagation_false_global(self): with self.tracer.trace('root'): def matcher(request): return self.headers_not_here(self.tracer, request) - adapter.register_uri('GET', 'mock://datadog/foo', additional_matcher=matcher, text='bar') - resp = self.session.get('mock://datadog/foo') + adapter.register_uri('GET', 'mock://opentelemetry/foo', additional_matcher=matcher, text='bar') + resp = self.session.get('mock://opentelemetry/foo') assert 200 == resp.status_code assert 'bar' == resp.text @@ -77,8 +77,8 @@ def test_propagation_true(self): with self.tracer.trace('root') as root: def matcher(request): return self.headers_here(self.tracer, request, root) - adapter.register_uri('GET', 'mock://datadog/foo', additional_matcher=matcher, text='bar') - resp = self.session.get('mock://datadog/foo') + adapter.register_uri('GET', 'mock://opentelemetry/foo', additional_matcher=matcher, text='bar') + resp = self.session.get('mock://opentelemetry/foo') assert 200 == resp.status_code assert 'bar' == resp.text @@ -99,8 +99,8 @@ def test_propagation_false(self): with self.tracer.trace('root'): def matcher(request): return self.headers_not_here(self.tracer, request) - adapter.register_uri('GET', 'mock://datadog/foo', additional_matcher=matcher, text='bar') - resp = self.session.get('mock://datadog/foo') + adapter.register_uri('GET', 'mock://opentelemetry/foo', additional_matcher=matcher, text='bar') + resp = self.session.get('mock://opentelemetry/foo') assert 200 == resp.status_code assert 'bar' == resp.text @@ -113,8 +113,8 @@ def test_propagation_true_legacy_default(self): with self.tracer.trace('root') as root: def matcher(request): return self.headers_here(self.tracer, request, root) - adapter.register_uri('GET', 'mock://datadog/foo', additional_matcher=matcher, text='bar') - resp = self.session.get('mock://datadog/foo') + adapter.register_uri('GET', 'mock://opentelemetry/foo', additional_matcher=matcher, text='bar') + resp = self.session.get('mock://opentelemetry/foo') assert 200 == resp.status_code assert 'bar' == resp.text @@ -135,8 +135,8 @@ def test_propagation_true_legacy(self): with self.tracer.trace('root') as root: def matcher(request): return self.headers_here(self.tracer, request, root) - adapter.register_uri('GET', 'mock://datadog/foo', additional_matcher=matcher, text='bar') - resp = self.session.get('mock://datadog/foo') + adapter.register_uri('GET', 'mock://opentelemetry/foo', additional_matcher=matcher, text='bar') + resp = self.session.get('mock://opentelemetry/foo') assert 200 == resp.status_code assert 'bar' == resp.text @@ -157,7 +157,7 @@ def test_propagation_false_legacy(self): with self.tracer.trace('root'): def matcher(request): return self.headers_not_here(self.tracer, request) - adapter.register_uri('GET', 'mock://datadog/foo', additional_matcher=matcher, text='bar') - resp = self.session.get('mock://datadog/foo') + adapter.register_uri('GET', 'mock://opentelemetry/foo', additional_matcher=matcher, text='bar') + resp = self.session.get('mock://opentelemetry/foo') assert 200 == resp.status_code assert 'bar' == resp.text diff --git a/tests/contrib/requests_gevent/test_requests_gevent.py b/tests/contrib/requests_gevent/test_requests_gevent.py index aa576b7c..31be9ee8 100644 --- a/tests/contrib/requests_gevent/test_requests_gevent.py +++ b/tests/contrib/requests_gevent/test_requests_gevent.py @@ -7,9 +7,9 @@ def test_patch(self): """ Patching `requests` before `gevent` monkeypatching - This is a regression test for https://github.com/DataDog/dd-trace-py/issues/506 + This is a regression test for https://github.com/opentelemetry/otel-trace-py/issues/506 - When using `ddtrace-run` along with `requests` and `gevent` our patching causes + When using `oteltrace-run` along with `requests` and `gevent` our patching causes `requests` and `urllib3` to get loaded before `gevent` has a chance to monkey patch. This causes `gevent` to show a warning and under certain versions cause @@ -18,15 +18,15 @@ def test_patch(self): # Assert none of our modules have been imported yet # DEV: This regression test depends on being able to control import order of these modules # DEV: This is not entirely necessary but is a nice safe guard - self.assertNotIn('ddtrace', sys.modules) + self.assertNotIn('oteltrace', sys.modules) self.assertNotIn('gevent', sys.modules) self.assertNotIn('requests', sys.modules) self.assertNotIn('urllib3', sys.modules) try: - # Import ddtrace and patch only `requests` + # Import oteltrace and patch only `requests` # DEV: We do not need to patch `gevent` for the exception to occur - from ddtrace import patch + from oteltrace import patch patch(requests=True) # Import gevent and monkeypatch @@ -43,5 +43,5 @@ def test_patch(self): finally: # Ensure we always unpatch `requests` when we are done - from ddtrace.contrib.requests import unpatch + from oteltrace.contrib.requests import unpatch unpatch() diff --git a/tests/contrib/sqlalchemy/mixins.py b/tests/contrib/sqlalchemy/mixins.py index 42c94b51..ce71dd1e 100644 --- a/tests/contrib/sqlalchemy/mixins.py +++ b/tests/contrib/sqlalchemy/mixins.py @@ -12,13 +12,10 @@ ) # project -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.sqlalchemy import trace_engine +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.sqlalchemy import trace_engine # testing -from tests.opentracer.utils import init_tracer - - Base = declarative_base() @@ -169,37 +166,6 @@ def test_traced_service(self): expected = {} assert services == expected - def test_opentracing(self): - """Ensure that sqlalchemy works with the opentracer.""" - ot_tracer = init_tracer('sqlalch_svc', self.tracer) - - with ot_tracer.start_active_span('sqlalch_op'): - with self.connection() as conn: - rows = conn.execute('SELECT * FROM players').fetchall() - assert len(rows) == 0 - - traces = self.tracer.writer.pop_traces() - # trace composition - assert len(traces) == 1 - assert len(traces[0]) == 2 - ot_span, dd_span = traces[0] - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == 'sqlalch_op' - assert ot_span.service == 'sqlalch_svc' - - # span fields - assert dd_span.name == '{}.query'.format(self.VENDOR) - assert dd_span.service == self.SERVICE - assert dd_span.resource == 'SELECT * FROM players' - assert dd_span.get_tag('sql.db') == self.SQL_DB - assert dd_span.span_type == 'sql' - assert dd_span.error == 0 - assert dd_span.duration > 0 - def test_analytics_default(self): # ensures that the ORM session is traced wayne = Player(id=1, name='wayne') diff --git a/tests/contrib/sqlalchemy/test_patch.py b/tests/contrib/sqlalchemy/test_patch.py index 05a9a0e2..918cf671 100644 --- a/tests/contrib/sqlalchemy/test_patch.py +++ b/tests/contrib/sqlalchemy/test_patch.py @@ -1,8 +1,8 @@ import sqlalchemy -from ddtrace import Pin -from ddtrace.contrib.sqlalchemy import patch, unpatch -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace import Pin +from oteltrace.contrib.sqlalchemy import patch, unpatch +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY from ..config import POSTGRES_CONFIG from ...base import BaseTracerTestCase diff --git a/tests/contrib/sqlite3/test_sqlite3.py b/tests/contrib/sqlite3/test_sqlite3.py index 2a069124..c74505f9 100644 --- a/tests/contrib/sqlite3/test_sqlite3.py +++ b/tests/contrib/sqlite3/test_sqlite3.py @@ -3,15 +3,14 @@ import time # project -import ddtrace -from ddtrace import Pin -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.sqlite3 import connection_factory -from ddtrace.contrib.sqlite3.patch import patch, unpatch, TracedSQLiteCursor -from ddtrace.ext import errors +import oteltrace +from oteltrace import Pin +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.sqlite3 import connection_factory +from oteltrace.contrib.sqlite3.patch import patch, unpatch, TracedSQLiteCursor +from oteltrace.ext import errors # testing -from tests.opentracer.utils import init_tracer from ...base import BaseTracerTestCase @@ -36,15 +35,15 @@ def test_backwards_compat(self): assert not self.spans def test_service_info(self): - backup_tracer = ddtrace.tracer - ddtrace.tracer = self.tracer + backup_tracer = oteltrace.tracer + oteltrace.tracer = self.tracer sqlite3.connect(':memory:') services = self.tracer.writer.pop_services() self.assertEqual(services, {}) - ddtrace.tracer = backup_tracer + oteltrace.tracer = backup_tracer def test_sqlite(self): # ensure we can trace multiple services without stomping @@ -184,49 +183,6 @@ def test_sqlite_fetchmany_is_traced(self): ) self.assertIsNone(fetchmany_span.get_tag('sql.query')) - def test_sqlite_ot(self): - """Ensure sqlite works with the opentracer.""" - ot_tracer = init_tracer('sqlite_svc', self.tracer) - - # Ensure we can run a query and it's correctly traced - q = 'select * from sqlite_master' - with ot_tracer.start_active_span('sqlite_op'): - db = sqlite3.connect(':memory:') - pin = Pin.get_from(db) - assert pin - self.assertEqual('db', pin.app_type) - pin.clone(tracer=self.tracer).onto(db) - cursor = db.execute(q) - rows = cursor.fetchall() - assert not rows - - self.assert_structure( - dict(name='sqlite_op', service='sqlite_svc'), - ( - dict(name='sqlite.query', span_type='sql', resource=q, error=0), - ) - ) - self.reset() - - with self.override_config('dbapi2', dict(trace_fetch_methods=True)): - with ot_tracer.start_active_span('sqlite_op'): - db = sqlite3.connect(':memory:') - pin = Pin.get_from(db) - assert pin - self.assertEqual('db', pin.app_type) - pin.clone(tracer=self.tracer).onto(db) - cursor = db.execute(q) - rows = cursor.fetchall() - assert not rows - - self.assert_structure( - dict(name='sqlite_op', service='sqlite_svc'), - ( - dict(name='sqlite.query', span_type='sql', resource=q, error=0), - dict(name='sqlite.query.fetchall', span_type='sql', resource=q, error=0), - ), - ) - def test_commit(self): connection = self._given_a_traced_connection(self.tracer) connection.commit() diff --git a/tests/contrib/test_utils.py b/tests/contrib/test_utils.py index cdbb53af..27839a4f 100644 --- a/tests/contrib/test_utils.py +++ b/tests/contrib/test_utils.py @@ -1,5 +1,5 @@ from functools import partial -from ddtrace.utils.importlib import func_name +from oteltrace.utils.importlib import func_name class SomethingCallable(object): diff --git a/tests/contrib/tornado/test_config.py b/tests/contrib/tornado/test_config.py index 2a57751a..042b1651 100644 --- a/tests/contrib/tornado/test_config.py +++ b/tests/contrib/tornado/test_config.py @@ -1,4 +1,4 @@ -from ddtrace.filters import FilterRequestsOnUrl +from oteltrace.filters import FilterRequestsOnUrl from .utils import TornadoTestCase @@ -11,12 +11,10 @@ class TestTornadoSettings(TornadoTestCase): def get_settings(self): # update tracer settings return { - 'datadog_trace': { + 'opentelemetry_trace': { 'default_service': 'custom-tornado', 'tags': {'env': 'production', 'debug': 'false'}, 'enabled': False, - 'agent_hostname': 'dd-agent.service.consul', - 'agent_port': 8126, 'settings': { 'FILTERS': [ FilterRequestsOnUrl(r'http://test\.example\.com'), @@ -29,8 +27,6 @@ def test_tracer_is_properly_configured(self): # the tracer must be properly configured assert self.tracer.tags == {'env': 'production', 'debug': 'false'} assert self.tracer.enabled is False - assert self.tracer.writer.api.hostname == 'dd-agent.service.consul' - assert self.tracer.writer.api.port == 8126 # settings are properly passed assert self.tracer.writer._filters is not None assert len(self.tracer.writer._filters) == 1 diff --git a/tests/contrib/tornado/test_executor_decorator.py b/tests/contrib/tornado/test_executor_decorator.py index 70caf270..56c7e84c 100644 --- a/tests/contrib/tornado/test_executor_decorator.py +++ b/tests/contrib/tornado/test_executor_decorator.py @@ -1,7 +1,7 @@ import unittest -from ddtrace.contrib.tornado.compat import futures_available -from ddtrace.ext import http +from oteltrace.contrib.tornado.compat import futures_available +from oteltrace.ext import http from tornado import version_info @@ -171,10 +171,10 @@ def test_on_executor_custom_args_kwarg(self): def test_futures_double_instrumentation(self): # it should not double wrap `ThreadpPoolExecutor.submit` method if # `futures` is already instrumented - from ddtrace import patch + from oteltrace import patch patch(futures=True) from concurrent.futures import ThreadPoolExecutor - from ddtrace.vendor.wrapt import BoundFunctionWrapper + from oteltrace.vendor.wrapt import BoundFunctionWrapper fn_wrapper = getattr(ThreadPoolExecutor.submit, '__wrapped__', None) assert not isinstance(fn_wrapper, BoundFunctionWrapper) diff --git a/tests/contrib/tornado/test_safety.py b/tests/contrib/tornado/test_safety.py index 7ebcfbab..630fcdae 100644 --- a/tests/contrib/tornado/test_safety.py +++ b/tests/contrib/tornado/test_safety.py @@ -3,8 +3,8 @@ from tornado import httpclient from tornado.testing import gen_test -from ddtrace.contrib.tornado import patch, unpatch -from ddtrace.ext import http +from oteltrace.contrib.tornado import patch, unpatch +from oteltrace.ext import http from . import web from .web.app import CustomDefaultHandler diff --git a/tests/contrib/tornado/test_stack_context.py b/tests/contrib/tornado/test_stack_context.py index a3727dfe..0189b7df 100644 --- a/tests/contrib/tornado/test_stack_context.py +++ b/tests/contrib/tornado/test_stack_context.py @@ -1,8 +1,8 @@ import pytest import tornado -from ddtrace.context import Context -from ddtrace.contrib.tornado import TracerStackContext +from oteltrace.context import Context +from oteltrace.contrib.tornado import TracerStackContext from .utils import TornadoTestCase from .web.compat import sleep diff --git a/tests/contrib/tornado/test_tornado_template.py b/tests/contrib/tornado/test_tornado_template.py index 87c184a6..f12acd61 100644 --- a/tests/contrib/tornado/test_tornado_template.py +++ b/tests/contrib/tornado/test_tornado_template.py @@ -4,7 +4,7 @@ from .utils import TornadoTestCase -from ddtrace.ext import http +from oteltrace.ext import http class TestTornadoTemplate(TornadoTestCase): diff --git a/tests/contrib/tornado/test_tornado_web.py b/tests/contrib/tornado/test_tornado_web.py index ad9ead48..dac1c284 100644 --- a/tests/contrib/tornado/test_tornado_web.py +++ b/tests/contrib/tornado/test_tornado_web.py @@ -1,13 +1,9 @@ from .web.app import CustomDefaultHandler from .utils import TornadoTestCase -from ddtrace import config -from ddtrace.constants import SAMPLING_PRIORITY_KEY, ORIGIN_KEY, ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.ext import http -import pytest -import tornado - -from tests.opentracer.utils import init_tracer +from oteltrace import config +from oteltrace.constants import SAMPLING_PRIORITY_KEY, ORIGIN_KEY, ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.ext import http class TestTornadoWeb(TornadoTestCase): @@ -275,41 +271,6 @@ def test_propagation(self): assert 4567 == request_span.parent_id assert 2 == request_span.get_metric(SAMPLING_PRIORITY_KEY) - # Opentracing support depends on new AsyncioScopeManager - # See: https://github.com/opentracing/opentracing-python/pull/118 - @pytest.mark.skipif(tornado.version_info >= (5, 0), - reason='Opentracing ScopeManager not available for Tornado >= 5') - def test_success_handler_ot(self): - """OpenTracing version of test_success_handler.""" - from opentracing.scope_managers.tornado import TornadoScopeManager - ot_tracer = init_tracer('tornado_svc', self.tracer, scope_manager=TornadoScopeManager()) - - with ot_tracer.start_active_span('tornado_op'): - response = self.fetch('/success/') - assert 200 == response.code - - traces = self.tracer.writer.pop_traces() - assert 1 == len(traces) - assert 2 == len(traces[0]) - # dd_span will start and stop before the ot_span finishes - ot_span, dd_span = traces[0] - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == 'tornado_op' - assert ot_span.service == 'tornado_svc' - - assert 'tornado-web' == dd_span.service - assert 'tornado.request' == dd_span.name - assert 'http' == dd_span.span_type - assert 'tests.contrib.tornado.web.app.SuccessHandler' == dd_span.resource - assert 'GET' == dd_span.get_tag('http.method') - assert '200' == dd_span.get_tag('http.status_code') - assert self.get_url('/success/') == dd_span.get_tag(http.URL) - assert 0 == dd_span.error - class TestTornadoWebAnalyticsDefault(TornadoTestCase): """ @@ -352,7 +313,7 @@ class TestTornadoWebAnalyticsOn(TornadoTestCase): def get_settings(self): # distributed_tracing needs to be disabled manually return { - 'datadog_trace': { + 'opentelemetry_trace': { 'analytics_enabled': True, 'analytics_sample_rate': 0.5, }, @@ -396,7 +357,7 @@ class TestTornadoWebAnalyticsNoRate(TornadoTestCase): def get_settings(self): # distributed_tracing needs to be disabled manually return { - 'datadog_trace': { + 'opentelemetry_trace': { 'analytics_enabled': True, }, } @@ -424,7 +385,7 @@ class TestNoPropagationTornadoWeb(TornadoTestCase): def get_settings(self): # distributed_tracing needs to be disabled manually return { - 'datadog_trace': { + 'opentelemetry_trace': { 'distributed_tracing': False, }, } diff --git a/tests/contrib/tornado/test_wrap_decorator.py b/tests/contrib/tornado/test_wrap_decorator.py index 7ee18954..04a2bbcb 100644 --- a/tests/contrib/tornado/test_wrap_decorator.py +++ b/tests/contrib/tornado/test_wrap_decorator.py @@ -1,4 +1,4 @@ -from ddtrace.ext import http +from oteltrace.ext import http from .utils import TornadoTestCase diff --git a/tests/contrib/tornado/utils.py b/tests/contrib/tornado/utils.py index 45803ffa..42421270 100644 --- a/tests/contrib/tornado/utils.py +++ b/tests/contrib/tornado/utils.py @@ -1,7 +1,7 @@ from tornado.testing import AsyncHTTPTestCase -from ddtrace.contrib.tornado import patch, unpatch -from ddtrace.compat import reload_module +from oteltrace.contrib.tornado import patch, unpatch +from oteltrace.compat import reload_module from .web import app, compat from ...base import BaseTracerTestCase @@ -20,8 +20,8 @@ def get_app(self): reload_module(app) settings = self.get_settings() - trace_settings = settings.get('datadog_trace', {}) - settings['datadog_trace'] = trace_settings + trace_settings = settings.get('opentelemetry_trace', {}) + settings['opentelemetry_trace'] = trace_settings trace_settings['tracer'] = self.tracer self.app = app.make_app(settings=settings) return self.app diff --git a/tests/contrib/tornado/web/app.py b/tests/contrib/tornado/web/app.py index bb7114d9..64fb217a 100644 --- a/tests/contrib/tornado/web/app.py +++ b/tests/contrib/tornado/web/app.py @@ -21,7 +21,7 @@ def get(self): class NestedHandler(tornado.web.RequestHandler): @tornado.gen.coroutine def get(self): - tracer = self.settings['datadog_trace']['tracer'] + tracer = self.settings['opentelemetry_trace']['tracer'] with tracer.trace('tornado.sleep'): yield sleep(0.05) self.write('OK') @@ -30,7 +30,7 @@ def get(self): class NestedWrapHandler(tornado.web.RequestHandler): @tornado.gen.coroutine def get(self): - tracer = self.settings['datadog_trace']['tracer'] + tracer = self.settings['opentelemetry_trace']['tracer'] # define a wrapped coroutine: having an inner coroutine # is only for easy testing @@ -46,7 +46,7 @@ def coro(): class NestedExceptionWrapHandler(tornado.web.RequestHandler): @tornado.gen.coroutine def get(self): - tracer = self.settings['datadog_trace']['tracer'] + tracer = self.settings['opentelemetry_trace']['tracer'] # define a wrapped coroutine: having an inner coroutine # is only for easy testing @@ -108,7 +108,7 @@ def get(self): class SyncNestedWrapHandler(tornado.web.RequestHandler): def get(self): - tracer = self.settings['datadog_trace']['tracer'] + tracer = self.settings['opentelemetry_trace']['tracer'] # define a wrapped coroutine: having an inner coroutine # is only for easy testing @@ -122,7 +122,7 @@ def func(): class SyncNestedExceptionWrapHandler(tornado.web.RequestHandler): def get(self): - tracer = self.settings['datadog_trace']['tracer'] + tracer = self.settings['opentelemetry_trace']['tracer'] # define a wrapped coroutine: having an inner coroutine # is only for easy testing @@ -149,7 +149,7 @@ class ExecutorHandler(tornado.web.RequestHandler): @tornado.concurrent.run_on_executor def outer_executor(self): - tracer = self.settings['datadog_trace']['tracer'] + tracer = self.settings['opentelemetry_trace']['tracer'] with tracer.trace('tornado.executor.with'): time.sleep(0.05) @@ -163,7 +163,7 @@ class ExecutorSubmitHandler(tornado.web.RequestHandler): executor = ThreadPoolExecutor(max_workers=3) def query(self): - tracer = self.settings['datadog_trace']['tracer'] + tracer = self.settings['opentelemetry_trace']['tracer'] with tracer.trace('tornado.executor.query'): time.sleep(0.05) @@ -184,7 +184,7 @@ def outer_executor(self): # waiting here means expecting that the `get()` flushes # the request trace time.sleep(0.01) - tracer = self.settings['datadog_trace']['tracer'] + tracer = self.settings['opentelemetry_trace']['tracer'] with tracer.trace('tornado.executor.with'): time.sleep(0.05) @@ -207,7 +207,7 @@ def outer_executor(self): # wait before creating a trace so that we're sure # the `tornado.executor.with` span has the right # parent - tracer = self.settings['datadog_trace']['tracer'] + tracer = self.settings['opentelemetry_trace']['tracer'] with tracer.trace('tornado.executor.with'): time.sleep(0.05) @@ -246,7 +246,7 @@ def outer_executor(self): # the `tornado.executor.with` span has the right # parent time.sleep(0.05) - tracer = self.settings['datadog_trace']['tracer'] + tracer = self.settings['opentelemetry_trace']['tracer'] with tracer.trace('tornado.executor.with'): raise Exception('Ouch!') @@ -262,7 +262,7 @@ class ExecutorWrapHandler(tornado.web.RequestHandler): @tornado.gen.coroutine def get(self): - tracer = self.settings['datadog_trace']['tracer'] + tracer = self.settings['opentelemetry_trace']['tracer'] @tracer.wrap('tornado.executor.wrap') @tornado.concurrent.run_on_executor @@ -279,7 +279,7 @@ class ExecutorExceptionWrapHandler(tornado.web.RequestHandler): @tornado.gen.coroutine def get(self): - tracer = self.settings['datadog_trace']['tracer'] + tracer = self.settings['opentelemetry_trace']['tracer'] @tracer.wrap('tornado.executor.wrap') @tornado.concurrent.run_on_executor diff --git a/tests/contrib/vertica/test_vertica.py b/tests/contrib/vertica/test_vertica.py index 8e1e5da3..108edadb 100644 --- a/tests/contrib/vertica/test_vertica.py +++ b/tests/contrib/vertica/test_vertica.py @@ -1,19 +1,18 @@ # 3p import pytest -from ddtrace.vendor import wrapt +from oteltrace.vendor import wrapt # project -import ddtrace -from ddtrace import Pin, config -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.contrib.vertica.patch import patch, unpatch -from ddtrace.ext import errors -from ddtrace.utils.merge import deepmerge +import oteltrace +from oteltrace import Pin, config +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.contrib.vertica.patch import patch, unpatch +from oteltrace.ext import errors +from oteltrace.utils.merge import deepmerge # testing from tests.base import BaseTracerTestCase from tests.contrib.config import VERTICA_CONFIG -from tests.opentracer.utils import init_tracer from tests.test_tracer import get_dummy_tracer TEST_TABLE = 'test_table' @@ -27,7 +26,7 @@ def test_tracer(request): @pytest.fixture(scope='function') def test_conn(request, test_tracer): - ddtrace.tracer = test_tracer + oteltrace.tracer = test_tracer patch() import vertica_python # must happen AFTER installing with patch() @@ -355,33 +354,6 @@ def test_copy(self): assert spans[1].name == 'vertica.query' assert spans[1].resource == 'COMMIT;' - def test_opentracing(self): - """Ensure OpenTracing works with vertica.""" - conn, cur = self.test_conn - - ot_tracer = init_tracer('vertica_svc', self.test_tracer) - - with ot_tracer.start_active_span('vertica_execute'): - cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) - conn.close() - - spans = self.test_tracer.writer.pop() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert dd_span.service == 'vertica' - assert dd_span.span_type == 'sql' - assert dd_span.name == 'vertica.query' - assert dd_span.get_metric('db.rowcount') == -1 - query = "INSERT INTO test_table (a, b) VALUES (1, 'aa');" - assert dd_span.resource == query - assert dd_span.get_tag('out.host') == '127.0.0.1' - assert dd_span.get_tag('out.port') == '5433' - def test_analytics_default(self): conn, cur = self.test_conn diff --git a/tests/ddtrace_run.py b/tests/ddtrace_run.py deleted file mode 100644 index 89d9cbb8..00000000 --- a/tests/ddtrace_run.py +++ /dev/null @@ -1,9 +0,0 @@ -import os -import sys - -# DEV: We must append to sys path before importing ddtrace_run -sys.path.append('.') -from ddtrace.commands import ddtrace_run # noqa - -os.environ['PYTHONPATH'] = '{}:{}'.format(os.getenv('PYTHONPATH'), os.path.abspath('.')) -ddtrace_run.main() diff --git a/tests/internal/runtime/test_container.py b/tests/internal/runtime/test_container.py index 63c2af11..a66e358c 100644 --- a/tests/internal/runtime/test_container.py +++ b/tests/internal/runtime/test_container.py @@ -2,8 +2,8 @@ import pytest -from ddtrace.compat import PY2 -from ddtrace.internal.runtime.container import CGroupInfo, get_container_info +from oteltrace.compat import PY2 +from oteltrace.internal.runtime.container import CGroupInfo, get_container_info from .utils import cgroup_line_valid_test_cases @@ -14,7 +14,7 @@ def get_mock_open(read_data=None): mock_open = mock.mock_open(read_data=read_data) - return mock.patch('ddtrace.internal.runtime.container.open', mock_open) + return mock.patch('oteltrace.internal.runtime.container.open', mock_open) def test_cgroup_info_init(): @@ -283,8 +283,8 @@ def test_get_container_info_with_pid(pid, file_name): mock_open.assert_called_once_with(file_name, mode='r') -@mock.patch('ddtrace.internal.runtime.container.CGroupInfo.from_line') -@mock.patch('ddtrace.internal.runtime.container.log') +@mock.patch('oteltrace.internal.runtime.container.CGroupInfo.from_line') +@mock.patch('oteltrace.internal.runtime.container.log') def test_get_container_info_exception(mock_log, mock_from_line): exception = Exception() mock_from_line.side_effect = exception diff --git a/tests/internal/runtime/test_metric_collectors.py b/tests/internal/runtime/test_metric_collectors.py index 1fd3e705..cb18c973 100644 --- a/tests/internal/runtime/test_metric_collectors.py +++ b/tests/internal/runtime/test_metric_collectors.py @@ -1,10 +1,10 @@ -from ddtrace.internal.runtime.metric_collectors import ( +from oteltrace.internal.runtime.metric_collectors import ( RuntimeMetricCollector, GCRuntimeMetricCollector, PSUtilRuntimeMetricCollector, ) -from ddtrace.internal.runtime.constants import ( +from oteltrace.internal.runtime.constants import ( GC_COUNT_GEN0, GC_RUNTIME_METRICS, PSUTIL_RUNTIME_METRICS, diff --git a/tests/internal/runtime/test_metrics.py b/tests/internal/runtime/test_metrics.py index 227713d8..b3264419 100644 --- a/tests/internal/runtime/test_metrics.py +++ b/tests/internal/runtime/test_metrics.py @@ -1,5 +1,5 @@ import mock -from ddtrace.internal.runtime.collector import ValueCollector +from oteltrace.internal.runtime.collector import ValueCollector from ...base import BaseTestCase @@ -73,7 +73,7 @@ def collect_fn(self, keys): def test_required_module_not_installed(self): collect = mock.MagicMock() - with mock.patch('ddtrace.internal.runtime.collector.log') as log_mock: + with mock.patch('oteltrace.internal.runtime.collector.log') as log_mock: # Should log a warning (tested below) vc = mocked_collector(collect, required_modules=['moduleshouldnotexist']) diff --git a/tests/internal/runtime/test_runtime_metrics.py b/tests/internal/runtime/test_runtime_metrics.py index 6ede5146..39a1cd87 100644 --- a/tests/internal/runtime/test_runtime_metrics.py +++ b/tests/internal/runtime/test_runtime_metrics.py @@ -1,12 +1,8 @@ -import time - -import mock - -from ddtrace.internal.runtime.runtime_metrics import ( +from oteltrace.internal.runtime.runtime_metrics import ( RuntimeTags, RuntimeMetrics, ) -from ddtrace.internal.runtime.constants import ( +from oteltrace.internal.runtime.constants import ( DEFAULT_RUNTIME_METRICS, GC_COUNT_GEN0, SERVICE, @@ -69,51 +65,3 @@ def test_all_metrics(self): def test_one_metric(self): metrics = [k for (k, v) in RuntimeMetrics(enabled=[GC_COUNT_GEN0])] self.assertEqual(metrics, [GC_COUNT_GEN0]) - - -class TestRuntimeWorker(BaseTracerTestCase): - def test_tracer_metrics(self): - # Mock socket.socket to hijack the dogstatsd socket - with mock.patch('socket.socket'): - # configure tracer for runtime metrics - self.tracer._RUNTIME_METRICS_INTERVAL = 1./4 - self.tracer.configure(collect_metrics=True) - self.tracer.set_tags({'env': 'tests.dog'}) - - with self.override_global_tracer(self.tracer): - root = self.start_span('parent', service='parent') - context = root.context - self.start_span('child', service='child', child_of=context) - - time.sleep(self.tracer._RUNTIME_METRICS_INTERVAL * 2) - - # Get the socket before it disappears - statsd_socket = self.tracer._dogstatsd_client.socket - # now stop collection - self.tracer.configure(collect_metrics=False) - - received = [ - s.args[0].decode('utf-8') for s in statsd_socket.send.mock_calls - ] - - # we expect more than one flush since it is also called on shutdown - assert len(received) > 1 - - # expect all metrics in default set are received - # DEV: dogstatsd gauges in form "{metric_name}:{metric_value}|g#t{tag_name}:{tag_value},..." - self.assertSetEqual( - set([gauge.split(':')[0] - for packet in received - for gauge in packet.split('\n')]), - DEFAULT_RUNTIME_METRICS - ) - - # check to last set of metrics returned to confirm tags were set - for gauge in received[-len(DEFAULT_RUNTIME_METRICS):]: - self.assertRegexpMatches(gauge, 'service:parent') - self.assertRegexpMatches(gauge, 'service:child') - self.assertRegexpMatches(gauge, 'env:tests.dog') - self.assertRegexpMatches(gauge, 'lang_interpreter:') - self.assertRegexpMatches(gauge, 'lang_version:') - self.assertRegexpMatches(gauge, 'lang:') - self.assertRegexpMatches(gauge, 'tracer_version:') diff --git a/tests/internal/runtime/test_tag_collectors.py b/tests/internal/runtime/test_tag_collectors.py index 2f6ab33d..8d1c12b5 100644 --- a/tests/internal/runtime/test_tag_collectors.py +++ b/tests/internal/runtime/test_tag_collectors.py @@ -1,5 +1,5 @@ -from ddtrace.internal.runtime import constants -from ddtrace.internal.runtime import tag_collectors +from oteltrace.internal.runtime import constants +from oteltrace.internal.runtime import tag_collectors def test_values(): diff --git a/tests/internal/runtime/utils.py b/tests/internal/runtime/utils.py index 9f70e6c0..8dec2dd3 100644 --- a/tests/internal/runtime/utils.py +++ b/tests/internal/runtime/utils.py @@ -1,6 +1,6 @@ import itertools -from ddtrace.internal.runtime.container import CGroupInfo +from oteltrace.internal.runtime.container import CGroupInfo def cgroup_line_valid_test_cases(): diff --git a/tests/internal/test_context_manager.py b/tests/internal/test_context_manager.py index 01f5c130..10482e82 100644 --- a/tests/internal/test_context_manager.py +++ b/tests/internal/test_context_manager.py @@ -1,8 +1,8 @@ import threading -from ddtrace.context import Context -from ddtrace.internal.context_manager import DefaultContextManager -from ddtrace.span import Span +from oteltrace.context import Context +from oteltrace.internal.context_manager import DefaultContextManager +from oteltrace.span import Span from ..base import BaseTestCase diff --git a/tests/internal/test_hostname.py b/tests/internal/test_hostname.py index 6ef048e1..1c6c0737 100644 --- a/tests/internal/test_hostname.py +++ b/tests/internal/test_hostname.py @@ -1,6 +1,6 @@ import mock -from ddtrace.internal.hostname import get_hostname +from oteltrace.internal.hostname import get_hostname @mock.patch('socket.gethostname') diff --git a/tests/internal/test_logger.py b/tests/internal/test_logger.py index 2148da32..431aa86f 100644 --- a/tests/internal/test_logger.py +++ b/tests/internal/test_logger.py @@ -1,16 +1,16 @@ import logging import mock -from ddtrace.internal.logger import DDLogger, get_logger +from oteltrace.internal.logger import OtelLogger, get_logger from ..base import BaseTestCase ALL_LEVEL_NAMES = ('debug', 'info', 'warn', 'warning', 'error', 'exception', 'critical', 'fatal') -class DDLoggerTestCase(BaseTestCase): +class OtelLoggerTestCase(BaseTestCase): def setUp(self): - super(DDLoggerTestCase, self).setUp() + super(OtelLoggerTestCase, self).setUp() self.root = logging.root self.manager = self.root.manager @@ -23,7 +23,7 @@ def tearDown(self): self.root = None self.manager = None - super(DDLoggerTestCase, self).tearDown() + super(OtelLoggerTestCase, self).tearDown() def _make_record( self, logger, msg='test', args=(), level=logging.INFO, @@ -31,7 +31,7 @@ def _make_record( ): return logger.makeRecord(logger.name, level, fn, lno, msg, args, exc_info, func, extra) - @mock.patch('ddtrace.internal.logger.DDLogger.handle') + @mock.patch('oteltrace.internal.logger.OtelLogger.handle') def assert_log_records(self, log, expected_levels, handle): for name in ALL_LEVEL_NAMES: method = getattr(log, name) @@ -50,11 +50,11 @@ def test_get_logger(self): """ When using `get_logger` to get a logger When the logger does not exist - We create a new DDLogger + We create a new OtelLogger When the logger exists We return the expected logger When a different logger is requested - We return a new DDLogger + We return a new OtelLogger """ # Assert the logger doesn't already exist self.assertNotIn('test.logger', self.manager.loggerDict) @@ -64,8 +64,8 @@ def test_get_logger(self): self.assertEqual(log.name, 'test.logger') self.assertEqual(log.level, logging.NOTSET) - # Ensure it is a DDLogger - self.assertIsInstance(log, DDLogger) + # Ensure it is a OtelLogger + self.assertIsInstance(log, OtelLogger) # Make sure it is stored in all the places we expect self.assertEqual(self.manager.getLogger('test.logger'), log) self.assertEqual(self.manager.loggerDict['test.logger'], log) @@ -102,19 +102,19 @@ def test_get_logger_parents(self): def test_logger_init(self): """ - When creating a new DDLogger + When creating a new OtelLogger Has the same interface as logging.Logger Configures a defaultdict for buckets Properly configures the rate limit """ # Create a logger - log = DDLogger('test.logger') + log = OtelLogger('test.logger') # Ensure we set the name and use default log level self.assertEqual(log.name, 'test.logger') self.assertEqual(log.level, logging.NOTSET) - # Assert DDLogger default properties + # Assert OtelLogger default properties self.assertIsInstance(log.buckets, dict) self.assertEqual(log.rate_limit, 60) @@ -125,18 +125,18 @@ def test_logger_init(self): self.assertIsNone(log.parent) # Override rate limit from environment variable - with self.override_env(dict(DD_LOGGING_RATE_LIMIT='10')): - log = DDLogger('test.logger') + with self.override_env(dict(OTEL_LOGGING_RATE_LIMIT='10')): + log = OtelLogger('test.logger') self.assertEqual(log.rate_limit, 10) # Set specific log level - log = DDLogger('test.logger', level=logging.DEBUG) + log = OtelLogger('test.logger', level=logging.DEBUG) self.assertEqual(log.level, logging.DEBUG) def test_logger_log(self): """ - When calling `DDLogger` log methods - We call `DDLogger.handle` with the expected log record + When calling `OtelLogger` log methods + We call `OtelLogger.handle` with the expected log record """ log = get_logger('test.logger') @@ -174,7 +174,7 @@ def test_logger_log(self): @mock.patch('logging.Logger.handle') def test_logger_handle_no_limit(self, base_handle): """ - Calling `DDLogger.handle` + Calling `OtelLogger.handle` When no rate limit is set Always calls the base `Logger.handle` """ @@ -196,7 +196,7 @@ def test_logger_handle_no_limit(self, base_handle): @mock.patch('logging.Logger.handle') def test_logger_handle_bucket(self, base_handle): """ - When calling `DDLogger.handle` + When calling `OtelLogger.handle` With a record We pass it to the base `Logger.handle` We create a bucket for tracking @@ -213,7 +213,7 @@ def test_logger_handle_bucket(self, base_handle): # We added an bucket entry for this record key = (record.name, record.levelno, record.pathname, record.lineno) logging_bucket = log.buckets.get(key) - self.assertIsInstance(logging_bucket, DDLogger.LoggingBucket) + self.assertIsInstance(logging_bucket, OtelLogger.LoggingBucket) # The bucket entry is correct expected_bucket = int(record.created / log.rate_limit) @@ -223,7 +223,7 @@ def test_logger_handle_bucket(self, base_handle): @mock.patch('logging.Logger.handle') def test_logger_handle_bucket_limited(self, base_handle): """ - When calling `DDLogger.handle` + When calling `OtelLogger.handle` With multiple records in a single time frame We pass only the first to the base `Logger.handle` We keep track of the number skipped @@ -255,7 +255,7 @@ def test_logger_handle_bucket_limited(self, base_handle): @mock.patch('logging.Logger.handle') def test_logger_handle_bucket_skipped_msg(self, base_handle): """ - When calling `DDLogger.handle` + When calling `OtelLogger.handle` When a bucket exists for a previous time frame We pass only the record to the base `Logger.handle` We update the record message to include the number of skipped messages @@ -269,7 +269,7 @@ def test_logger_handle_bucket_skipped_msg(self, base_handle): key = (record.name, record.levelno, record.pathname, record.lineno) bucket = int(record.created / log.rate_limit) # We want the time bucket to be for an older bucket - log.buckets[key] = DDLogger.LoggingBucket(bucket=bucket - 1, skipped=20) + log.buckets[key] = OtelLogger.LoggingBucket(bucket=bucket - 1, skipped=20) # Handle our record log.handle(record) @@ -281,7 +281,7 @@ def test_logger_handle_bucket_skipped_msg(self, base_handle): def test_logger_handle_bucket_key(self): """ - When calling `DDLogger.handle` + When calling `OtelLogger.handle` With different log messages We use different buckets to limit them """ diff --git a/tests/internal/test_rate_limiter.py b/tests/internal/test_rate_limiter.py index 3479025b..9d15249a 100644 --- a/tests/internal/test_rate_limiter.py +++ b/tests/internal/test_rate_limiter.py @@ -3,8 +3,8 @@ import pytest -from ddtrace.internal.rate_limiter import RateLimiter -from ddtrace.vendor import monotonic +from oteltrace.internal.rate_limiter import RateLimiter +from oteltrace.vendor import monotonic def test_rate_limiter_init(): @@ -22,7 +22,7 @@ def test_rate_limiter_rate_limit_0(): assert limiter.max_tokens == 0 now = monotonic.monotonic() - with mock.patch('ddtrace.vendor.monotonic.monotonic') as mock_time: + with mock.patch('oteltrace.vendor.monotonic.monotonic') as mock_time: for i in range(10000): # Make sure the time is different for every check mock_time.return_value = now + i @@ -36,7 +36,7 @@ def test_rate_limiter_rate_limit_negative(): assert limiter.max_tokens == -1 now = monotonic.monotonic() - with mock.patch('ddtrace.vendor.monotonic.monotonic') as mock_time: + with mock.patch('oteltrace.vendor.monotonic.monotonic') as mock_time: for i in range(10000): # Make sure the time is different for every check mock_time.return_value = now + i @@ -61,7 +61,7 @@ def check_limit(): # Check the limit for 5 time frames for i in range(5): - with mock.patch('ddtrace.vendor.monotonic.monotonic') as mock_time: + with mock.patch('oteltrace.vendor.monotonic.monotonic') as mock_time: # Keep the same timeframe mock_time.return_value = now + i @@ -73,7 +73,7 @@ def test_rate_limiter_is_allowed_large_gap(): # Start time now = monotonic.monotonic() - with mock.patch('ddtrace.vendor.monotonic.monotonic') as mock_time: + with mock.patch('oteltrace.vendor.monotonic.monotonic') as mock_time: # Keep the same timeframe mock_time.return_value = now @@ -81,7 +81,7 @@ def test_rate_limiter_is_allowed_large_gap(): assert limiter.is_allowed() is True # Large gap before next call to `is_allowed()` - with mock.patch('ddtrace.vendor.monotonic.monotonic') as mock_time: + with mock.patch('oteltrace.vendor.monotonic.monotonic') as mock_time: mock_time.return_value = now + 100 for _ in range(100): @@ -95,7 +95,7 @@ def test_rate_limiter_is_allowed_small_gaps(): now = monotonic.monotonic() gap = 1.0 / 100.0 # Keep incrementing by a gap to keep us at our rate limit - with mock.patch('ddtrace.vendor.monotonic.monotonic') as mock_time: + with mock.patch('oteltrace.vendor.monotonic.monotonic') as mock_time: for i in range(10000): # Keep the same timeframe mock_time.return_value = now + (gap * i) @@ -108,7 +108,7 @@ def test_rate_liimter_effective_rate_rates(): # Static rate limit window starting_window = monotonic.monotonic() - with mock.patch('ddtrace.vendor.monotonic.monotonic') as mock_time: + with mock.patch('oteltrace.vendor.monotonic.monotonic') as mock_time: mock_time.return_value = starting_window for _ in range(100): @@ -123,7 +123,7 @@ def test_rate_liimter_effective_rate_rates(): assert limiter.current_window == starting_window prev_rate = 0.5 - with mock.patch('ddtrace.vendor.monotonic.monotonic') as mock_time: + with mock.patch('oteltrace.vendor.monotonic.monotonic') as mock_time: window = starting_window + 1.0 mock_time.return_value = window @@ -143,7 +143,7 @@ def test_rate_liimter_effective_rate_starting_rate(): limiter = RateLimiter(rate_limit=1) now = monotonic.monotonic() - with mock.patch('ddtrace.vendor.monotonic.monotonic') as mock_time: + with mock.patch('oteltrace.vendor.monotonic.monotonic') as mock_time: mock_time.return_value = now # Default values diff --git a/tests/internal/test_writer.py b/tests/internal/test_writer.py index d1e3a08d..8f7e7327 100644 --- a/tests/internal/test_writer.py +++ b/tests/internal/test_writer.py @@ -5,8 +5,8 @@ import mock -from ddtrace.span import Span -from ddtrace.internal.writer import AgentWriter, Q, Empty +from oteltrace.span import Span +from oteltrace.internal.writer import AgentWriter, Q, Empty class RemoveAllFilter(): @@ -64,8 +64,8 @@ class AgentWriterTests(TestCase): N_TRACES = 11 def create_worker(self, filters=None, api_class=DummyAPI, enable_stats=False): - self.dogstatsd = mock.Mock() - worker = AgentWriter(dogstatsd=self.dogstatsd, filters=filters) + self.metrics_client = mock.Mock() + worker = AgentWriter(metrics_client=self.metrics_client, filters=filters) worker._ENABLE_STATS = enable_stats worker._STATS_EVERY_INTERVAL = 1 self.api = api_class() @@ -112,50 +112,50 @@ def test_no_dogstats(self): worker = self.create_worker() assert worker._ENABLE_STATS is False assert [ - ] == self.dogstatsd.gauge.mock_calls + ] == self.metrics_client.gauge.mock_calls - def test_dogstatsd(self): + def test_metrics_client(self): self.create_worker(enable_stats=True) assert [ - mock.call('datadog.tracer.queue.max_length', 1000), - mock.call('datadog.tracer.queue.length', 11), - mock.call('datadog.tracer.queue.size', mock.ANY), - mock.call('datadog.tracer.queue.spans', 77), - ] == self.dogstatsd.gauge.mock_calls + mock.call('opentelemetry.tracer.queue.max_length', 1000), + mock.call('opentelemetry.tracer.queue.length', 11), + mock.call('opentelemetry.tracer.queue.size', mock.ANY), + mock.call('opentelemetry.tracer.queue.spans', 77), + ] == self.metrics_client.gauge.mock_calls increment_calls = [ - mock.call('datadog.tracer.queue.dropped', 0), - mock.call('datadog.tracer.queue.accepted', 11), - mock.call('datadog.tracer.queue.accepted_lengths', 77), - mock.call('datadog.tracer.queue.accepted_size', mock.ANY), - mock.call('datadog.tracer.traces.filtered', 0), - mock.call('datadog.tracer.api.requests', 11), - mock.call('datadog.tracer.api.errors', 0), - mock.call('datadog.tracer.api.responses', 11, tags=['status:200']), + mock.call('opentelemetry.tracer.queue.dropped', 0), + mock.call('opentelemetry.tracer.queue.accepted', 11), + mock.call('opentelemetry.tracer.queue.accepted_lengths', 77), + mock.call('opentelemetry.tracer.queue.accepted_size', mock.ANY), + mock.call('opentelemetry.tracer.traces.filtered', 0), + mock.call('opentelemetry.tracer.api.requests', 11), + mock.call('opentelemetry.tracer.api.errors', 0), + mock.call('opentelemetry.tracer.api.responses', 11, tags=['status:200']), ] if hasattr(time, 'thread_time_ns'): - increment_calls.append(mock.call('datadog.tracer.writer.cpu_time', mock.ANY)) - assert increment_calls == self.dogstatsd.increment.mock_calls + increment_calls.append(mock.call('opentelemetry.tracer.writer.cpu_time', mock.ANY)) + assert increment_calls == self.metrics_client.increment.mock_calls - def test_dogstatsd_failing_api(self): + def test_metrics_client_failing_api(self): self.create_worker(api_class=FailingAPI, enable_stats=True) assert [ - mock.call('datadog.tracer.queue.max_length', 1000), - mock.call('datadog.tracer.queue.length', 11), - mock.call('datadog.tracer.queue.size', mock.ANY), - mock.call('datadog.tracer.queue.spans', 77), - ] == self.dogstatsd.gauge.mock_calls + mock.call('opentelemetry.tracer.queue.max_length', 1000), + mock.call('opentelemetry.tracer.queue.length', 11), + mock.call('opentelemetry.tracer.queue.size', mock.ANY), + mock.call('opentelemetry.tracer.queue.spans', 77), + ] == self.metrics_client.gauge.mock_calls increment_calls = [ - mock.call('datadog.tracer.queue.dropped', 0), - mock.call('datadog.tracer.queue.accepted', 11), - mock.call('datadog.tracer.queue.accepted_lengths', 77), - mock.call('datadog.tracer.queue.accepted_size', mock.ANY), - mock.call('datadog.tracer.traces.filtered', 0), - mock.call('datadog.tracer.api.requests', 1), - mock.call('datadog.tracer.api.errors', 1), + mock.call('opentelemetry.tracer.queue.dropped', 0), + mock.call('opentelemetry.tracer.queue.accepted', 11), + mock.call('opentelemetry.tracer.queue.accepted_lengths', 77), + mock.call('opentelemetry.tracer.queue.accepted_size', mock.ANY), + mock.call('opentelemetry.tracer.traces.filtered', 0), + mock.call('opentelemetry.tracer.api.requests', 1), + mock.call('opentelemetry.tracer.api.errors', 1), ] if hasattr(time, 'thread_time_ns'): - increment_calls.append(mock.call('datadog.tracer.writer.cpu_time', mock.ANY)) - assert increment_calls == self.dogstatsd.increment.mock_calls + increment_calls.append(mock.call('opentelemetry.tracer.writer.cpu_time', mock.ANY)) + assert increment_calls == self.metrics_client.increment.mock_calls def test_queue_full(): diff --git a/tests/memory.py b/tests/memory.py index 6ec70e38..fd5b56e3 100644 --- a/tests/memory.py +++ b/tests/memory.py @@ -18,15 +18,15 @@ # project -import ddtrace +import oteltrace from tests.contrib import config # verbosity logging.basicConfig(stream=sys.stderr, level=logging.INFO) -ddtrace.patch_all() -ddtrace.tracer.writer = None +oteltrace.patch_all() +oteltrace.tracer.writer = None class KitchenSink(object): diff --git a/tests/opentracer/__init__.py b/tests/opentracer/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/opentracer/conftest.py b/tests/opentracer/conftest.py deleted file mode 100644 index f264d17a..00000000 --- a/tests/opentracer/conftest.py +++ /dev/null @@ -1,61 +0,0 @@ -""" -pytest local plugin used to automatically make the following fixtures -available for all tests in this directory - -https://docs.pytest.org/en/latest/writing_plugins.html#testing-plugins -""" -import pytest - -from ddtrace.opentracer import Tracer, set_global_tracer - -from tests.test_tracer import get_dummy_tracer - - -@pytest.fixture() -def ot_tracer_factory(): - """Fixture which returns an opentracer ready to use for testing.""" - - def make_ot_tracer( - service_name='my_svc', config=None, scope_manager=None, context_provider=None - ): - config = config or {} - tracer = Tracer( - service_name=service_name, config=config, scope_manager=scope_manager - ) - - # similar to how we test the ddtracer, use a dummy tracer - dd_tracer = get_dummy_tracer() - if context_provider: - dd_tracer.configure(context_provider=context_provider) - - # attach the dummy tracer to the opentracer - tracer._dd_tracer = dd_tracer - return tracer - - return make_ot_tracer - - -@pytest.fixture() -def ot_tracer(ot_tracer_factory): - """Fixture for a default opentracer.""" - return ot_tracer_factory() - - -@pytest.fixture() -def global_tracer(ot_tracer): - """A function similar to one OpenTracing users would write to initialize - their OpenTracing tracer. - """ - set_global_tracer(ot_tracer) - - return ot_tracer - - -@pytest.fixture() -def writer(ot_tracer): - return ot_tracer._dd_tracer.writer - - -@pytest.fixture() -def dd_tracer(ot_tracer): - return ot_tracer._dd_tracer diff --git a/tests/opentracer/test_dd_compatibility.py b/tests/opentracer/test_dd_compatibility.py deleted file mode 100644 index 2bb4c090..00000000 --- a/tests/opentracer/test_dd_compatibility.py +++ /dev/null @@ -1,188 +0,0 @@ -import ddtrace -import opentracing -from opentracing import Format - -from ddtrace.opentracer.span_context import SpanContext - - -class TestTracerCompatibility(object): - """Ensure that our opentracer produces results in the underlying ddtracer.""" - - def test_ottracer_uses_global_ddtracer(self): - """Ensure that the opentracer will by default use the global ddtracer - as its underlying Datadog tracer. - """ - tracer = ddtrace.opentracer.Tracer() - assert tracer._dd_tracer is ddtrace.tracer - - def test_custom_ddtracer(self): - """A user should be able to specify their own Datadog tracer instance if - they wish. - """ - custom_dd_tracer = ddtrace.Tracer() - tracer = ddtrace.opentracer.Tracer(dd_tracer=custom_dd_tracer) - assert tracer._dd_tracer is custom_dd_tracer - - def test_ot_dd_global_tracers(self, global_tracer): - """Ensure our test function opentracer_init() prep""" - ot_tracer = global_tracer - dd_tracer = global_tracer._dd_tracer - - # check all the global references - assert ot_tracer is opentracing.tracer - assert ot_tracer._dd_tracer is dd_tracer - assert dd_tracer is ddtrace.tracer - - def test_ot_dd_nested_trace(self, ot_tracer, dd_tracer, writer): - """Ensure intertwined usage of the opentracer and ddtracer.""" - - with ot_tracer.start_span('my_ot_span') as ot_span: - with dd_tracer.trace('my_dd_span') as dd_span: - pass - spans = writer.pop() - assert len(spans) == 2 - - # confirm the ordering - assert spans[0] is ot_span._dd_span - assert spans[1] is dd_span - - # check the parenting - assert spans[0].parent_id is None - assert spans[1].parent_id == spans[0].span_id - - def test_dd_ot_nested_trace(self, ot_tracer, dd_tracer, writer): - """Ensure intertwined usage of the opentracer and ddtracer.""" - with dd_tracer.trace('my_dd_span') as dd_span: - with ot_tracer.start_span('my_ot_span') as ot_span: - pass - spans = writer.pop() - assert len(spans) == 2 - - # confirm the ordering - assert spans[0] is dd_span - assert spans[1] is ot_span._dd_span - - # check the parenting - assert spans[0].parent_id is None - assert spans[1].parent_id is spans[0].span_id - - def test_ot_dd_ot_dd_nested_trace(self, ot_tracer, dd_tracer, writer): - """Ensure intertwined usage of the opentracer and ddtracer.""" - with ot_tracer.start_span('my_ot_span') as ot_span: - with dd_tracer.trace('my_dd_span') as dd_span: - with ot_tracer.start_span('my_ot_span') as ot_span2: - with dd_tracer.trace('my_dd_span') as dd_span2: - pass - - spans = writer.pop() - assert len(spans) == 4 - - # confirm the ordering - assert spans[0] is ot_span._dd_span - assert spans[1] is dd_span - assert spans[2] is ot_span2._dd_span - assert spans[3] is dd_span2 - - # check the parenting - assert spans[0].parent_id is None - assert spans[1].parent_id is spans[0].span_id - assert spans[2].parent_id is spans[1].span_id - assert spans[3].parent_id is spans[2].span_id - - def test_ot_ot_dd_ot_dd_nested_trace_active(self, ot_tracer, dd_tracer, writer): - """Ensure intertwined usage of the opentracer and ddtracer.""" - with ot_tracer.start_active_span('my_ot_span') as ot_scope: - with ot_tracer.start_active_span('my_ot_span') as ot_scope2: - with dd_tracer.trace('my_dd_span') as dd_span: - with ot_tracer.start_active_span('my_ot_span') as ot_scope3: - with dd_tracer.trace('my_dd_span') as dd_span2: - pass - - spans = writer.pop() - assert len(spans) == 5 - - # confirm the ordering - assert spans[0] is ot_scope.span._dd_span - assert spans[1] is ot_scope2.span._dd_span - assert spans[2] is dd_span - assert spans[3] is ot_scope3.span._dd_span - assert spans[4] is dd_span2 - - # check the parenting - assert spans[0].parent_id is None - assert spans[1].parent_id == spans[0].span_id - assert spans[2].parent_id == spans[1].span_id - assert spans[3].parent_id == spans[2].span_id - assert spans[4].parent_id == spans[3].span_id - - def test_consecutive_trace(self, ot_tracer, dd_tracer, writer): - """Ensure consecutive usage of the opentracer and ddtracer.""" - with ot_tracer.start_active_span('my_ot_span') as ot_scope: - pass - - with dd_tracer.trace('my_dd_span') as dd_span: - pass - - with ot_tracer.start_active_span('my_ot_span') as ot_scope2: - pass - - with dd_tracer.trace('my_dd_span') as dd_span2: - pass - - spans = writer.pop() - assert len(spans) == 4 - - # confirm the ordering - assert spans[0] is ot_scope.span._dd_span - assert spans[1] is dd_span - assert spans[2] is ot_scope2.span._dd_span - assert spans[3] is dd_span2 - - # check the parenting - assert spans[0].parent_id is None - assert spans[1].parent_id is None - assert spans[2].parent_id is None - assert spans[3].parent_id is None - - def test_ddtrace_wrapped_fn(self, ot_tracer, dd_tracer, writer): - """Ensure ddtrace wrapped functions work with the opentracer""" - - @dd_tracer.wrap() - def fn(): - with ot_tracer.start_span('ot_span_inner'): - pass - - with ot_tracer.start_active_span('ot_span_outer'): - fn() - - spans = writer.pop() - assert len(spans) == 3 - - # confirm the ordering - assert spans[0].name == 'ot_span_outer' - assert spans[1].name == 'tests.opentracer.test_dd_compatibility.fn' - assert spans[2].name == 'ot_span_inner' - - # check the parenting - assert spans[0].parent_id is None - assert spans[1].parent_id is spans[0].span_id - assert spans[2].parent_id is spans[1].span_id - - def test_distributed_trace_propagation(self, ot_tracer, dd_tracer, writer): - """Ensure that a propagated span context is properly activated.""" - span_ctx = SpanContext(trace_id=123, span_id=456) - carrier = {} - ot_tracer.inject(span_ctx, Format.HTTP_HEADERS, carrier) - - # extract should activate the span so that a subsequent start_span - # will inherit from the propagated span context - ot_tracer.extract(Format.HTTP_HEADERS, carrier) - - with dd_tracer.trace('test') as span: - pass - - assert span.parent_id == 456 - assert span.trace_id == 123 - - spans = writer.pop() - assert len(spans) == 1 diff --git a/tests/opentracer/test_span.py b/tests/opentracer/test_span.py deleted file mode 100644 index fe07f40f..00000000 --- a/tests/opentracer/test_span.py +++ /dev/null @@ -1,153 +0,0 @@ -import pytest -from ddtrace.opentracer.span import Span -from ..test_tracer import get_dummy_tracer - - -@pytest.fixture -def nop_tracer(): - from ddtrace.opentracer import Tracer - tracer = Tracer(service_name='mysvc', config={}) - # use the same test tracer used by the primary tests - tracer._tracer = get_dummy_tracer() - return tracer - - -@pytest.fixture -def nop_span_ctx(): - from ddtrace.ext.priority import AUTO_KEEP - from ddtrace.opentracer.span_context import SpanContext - return SpanContext(sampling_priority=AUTO_KEEP) - - -@pytest.fixture -def nop_span(nop_tracer, nop_span_ctx): - return Span(nop_tracer, nop_span_ctx, 'my_op_name') - - -class TestSpan(object): - """Test the Datadog OpenTracing Span implementation.""" - - def test_init(self, nop_tracer, nop_span_ctx): - """Very basic test for skeleton code""" - span = Span(nop_tracer, nop_span_ctx, 'my_op_name') - assert not span.finished - - def test_tags(self, nop_span): - """Set a tag and get it back.""" - nop_span.set_tag('test', 23) - assert int(nop_span._get_tag('test')) == 23 - - def test_set_baggage(self, nop_span): - """Test setting baggage.""" - r = nop_span.set_baggage_item('test', 23) - assert r is nop_span - - r = nop_span.set_baggage_item('1', 1).set_baggage_item('2', 2) - assert r is nop_span - - def test_get_baggage(self, nop_span): - """Test setting and getting baggage.""" - # test a single item - nop_span.set_baggage_item('test', 23) - assert int(nop_span.get_baggage_item('test')) == 23 - - # test multiple items - nop_span.set_baggage_item('1', '1').set_baggage_item('2', 2) - assert int(nop_span.get_baggage_item('test')) == 23 - assert nop_span.get_baggage_item('1') == '1' - assert int(nop_span.get_baggage_item('2')) == 2 - - def test_log_kv(self, nop_span): - """Ensure logging values doesn't break anything.""" - # just log a bunch of values - nop_span.log_kv({'myval': 2}) - nop_span.log_kv({'myval2': 3}) - nop_span.log_kv({'myval3': 5}) - nop_span.log_kv({'myval': 2}) - - def test_log_dd_kv(self, nop_span): - """Ensure keys that can be handled by our impl. are indeed handled. """ - import traceback - from ddtrace.ext import errors - - stack_trace = str(traceback.format_stack()) - nop_span.log_kv({ - 'event': 'error', - 'error': 3, - 'message': 'my error message', - 'stack': stack_trace, - }) - - # Ensure error flag is set... - assert nop_span._dd_span.error - # ...and that error tags are set with the correct key - assert nop_span._get_tag(errors.ERROR_STACK) == stack_trace - assert nop_span._get_tag(errors.ERROR_MSG) == 'my error message' - assert nop_span._get_tag(errors.ERROR_TYPE) == '3' - - def test_operation_name(self, nop_span): - """Sanity check for setting the operation name.""" - # just try setting the operation name - nop_span.set_operation_name('new_op_name') - assert nop_span._dd_span.name == 'new_op_name' - - def test_context_manager(self, nop_span): - """Test the span context manager.""" - import time - - assert not nop_span.finished - # run the context manager but since the span has not been added - # to the span context, we will not get any traces - with nop_span: - time.sleep(0.005) - - # span should be finished when the context manager exits - assert nop_span.finished - - # there should be no traces (see above comment) - spans = nop_span.tracer._tracer.writer.pop() - assert len(spans) == 0 - - def test_immutable_span_context(self, nop_span): - """Ensure span contexts are immutable.""" - before_ctx = nop_span._context - nop_span.set_baggage_item('key', 'value') - after_ctx = nop_span._context - # should be different contexts - assert before_ctx is not after_ctx - - -class TestSpanCompatibility(object): - """Ensure our opentracer spans features correspond to datadog span features. - """ - def test_set_tag(self, nop_span): - nop_span.set_tag('test', 2) - assert nop_span._dd_span.get_tag('test') == str(2) - - def test_tag_resource_name(self, nop_span): - nop_span.set_tag('resource.name', 'myresource') - assert nop_span._dd_span.resource == 'myresource' - - def test_tag_span_type(self, nop_span): - nop_span.set_tag('span.type', 'db') - assert nop_span._dd_span.span_type == 'db' - - def test_tag_service_name(self, nop_span): - nop_span.set_tag('service.name', 'mysvc234') - assert nop_span._dd_span.service == 'mysvc234' - - def test_tag_db_statement(self, nop_span): - nop_span.set_tag('db.statement', 'SELECT * FROM USERS') - assert nop_span._dd_span.resource == 'SELECT * FROM USERS' - - def test_tag_peer_hostname(self, nop_span): - nop_span.set_tag('peer.hostname', 'peername') - assert nop_span._dd_span.get_tag('out.host') == 'peername' - - def test_tag_peer_port(self, nop_span): - nop_span.set_tag('peer.port', '55555') - assert nop_span._dd_span.get_tag('out.port') == '55555' - - def test_tag_sampling_priority(self, nop_span): - nop_span.set_tag('sampling.priority', '2') - assert nop_span._dd_span.context._sampling_priority == '2' diff --git a/tests/opentracer/test_span_context.py b/tests/opentracer/test_span_context.py deleted file mode 100644 index a8d1b2f5..00000000 --- a/tests/opentracer/test_span_context.py +++ /dev/null @@ -1,39 +0,0 @@ -from ddtrace.opentracer.span_context import SpanContext - - -class TestSpanContext(object): - - def test_init(self): - """Make sure span context creation is fine.""" - span_ctx = SpanContext() - assert span_ctx - - def test_baggage(self): - """Ensure baggage passed is the resulting baggage of the span context.""" - baggage = { - 'some': 'stuff', - } - - span_ctx = SpanContext(baggage=baggage) - - assert span_ctx.baggage == baggage - - def test_with_baggage_item(self): - """Should allow immutable extension of new span contexts.""" - baggage = { - '1': 1, - } - - first_ctx = SpanContext(baggage=baggage) - - second_ctx = first_ctx.with_baggage_item('2', 2) - - assert '2' not in first_ctx.baggage - assert second_ctx.baggage is not first_ctx.baggage - - def test_span_context_immutable_baggage(self): - """Ensure that two different span contexts do not share baggage.""" - ctx1 = SpanContext() - ctx1.set_baggage_item('test', 3) - ctx2 = SpanContext() - assert 'test' not in ctx2._baggage diff --git a/tests/opentracer/test_tracer.py b/tests/opentracer/test_tracer.py deleted file mode 100644 index e259e132..00000000 --- a/tests/opentracer/test_tracer.py +++ /dev/null @@ -1,574 +0,0 @@ -import time - -import opentracing -from opentracing import ( - child_of, - Format, - InvalidCarrierException, - UnsupportedFormatException, - SpanContextCorruptedException, -) - -import ddtrace -from ddtrace.ext.priority import AUTO_KEEP -from ddtrace.opentracer import Tracer, set_global_tracer -from ddtrace.opentracer.span_context import SpanContext -from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID -from ddtrace.settings import ConfigException - -import mock -import pytest - - -class TestTracerConfig(object): - def test_config(self): - """Test the configuration of the tracer""" - config = {'enabled': True} - tracer = Tracer(service_name='myservice', config=config) - - assert tracer._service_name == 'myservice' - assert tracer._enabled is True - - def test_no_service_name(self): - """A service_name should be generated if one is not provided.""" - tracer = Tracer() - assert tracer._service_name == 'pytest' - - def test_multiple_tracer_configs(self): - """Ensure that a tracer config is a copy of the passed config.""" - config = {'enabled': True} - - tracer1 = Tracer(service_name='serv1', config=config) - assert tracer1._service_name == 'serv1' - - config['enabled'] = False - tracer2 = Tracer(service_name='serv2', config=config) - - # Ensure tracer1's config was not mutated - assert tracer1._service_name == 'serv1' - assert tracer1._enabled is True - - assert tracer2._service_name == 'serv2' - assert tracer2._enabled is False - - def test_invalid_config_key(self): - """A config with an invalid key should raise a ConfigException.""" - - config = {'enabeld': False} - - # No debug flag should not raise an error - tracer = Tracer(service_name='mysvc', config=config) - - # With debug flag should raise an error - config['debug'] = True - with pytest.raises(ConfigException) as ce_info: - tracer = Tracer(config=config) - assert 'enabeld' in str(ce_info) - assert tracer is not None - - # Test with multiple incorrect keys - config['setttings'] = {} - with pytest.raises(ConfigException) as ce_info: - tracer = Tracer(service_name='mysvc', config=config) - assert ['enabeld', 'setttings'] in str(ce_info) - assert tracer is not None - - def test_global_tags(self): - """Global tags should be passed from the opentracer to the tracer.""" - config = { - 'global_tags': { - 'tag1': 'value1', - 'tag2': 2, - }, - } - - tracer = Tracer(service_name='mysvc', config=config) - with tracer.start_span('myop') as span: - # global tags should be attached to generated all datadog spans - assert span._dd_span.get_tag('tag1') == 'value1' - assert span._dd_span.get_tag('tag2') == '2' - - with tracer.start_span('myop2') as span2: - assert span2._dd_span.get_tag('tag1') == 'value1' - assert span2._dd_span.get_tag('tag2') == '2' - - -class TestTracer(object): - def test_start_span(self, ot_tracer, writer): - """Start and finish a span.""" - with ot_tracer.start_span('myop') as span: - pass - - # span should be finished when the context manager exits - assert span.finished - - spans = writer.pop() - assert len(spans) == 1 - - def test_start_span_references(self, ot_tracer, writer): - """Start a span using references.""" - - with ot_tracer.start_span('one', references=[child_of()]): - pass - - spans = writer.pop() - assert spans[0].parent_id is None - - root = ot_tracer.start_active_span('root') - # create a child using a parent reference that is not the context parent - with ot_tracer.start_active_span('one'): - with ot_tracer.start_active_span('two', references=[child_of(root.span)]): - pass - root.close() - - spans = writer.pop() - assert spans[2].parent_id is spans[0].span_id - - def test_start_span_custom_start_time(self, ot_tracer): - """Start a span with a custom start time.""" - t = 100 - with mock.patch('time.time') as time: - time.return_value = 102 - with ot_tracer.start_span('myop', start_time=t) as span: - pass - - assert span._dd_span.start == t - assert span._dd_span.duration == 2 - - def test_start_span_with_spancontext(self, ot_tracer, writer): - """Start and finish a span using a span context as the child_of - reference. - """ - with ot_tracer.start_span('myop') as span: - with ot_tracer.start_span('myop', child_of=span.context) as span2: - pass - - # span should be finished when the context manager exits - assert span.finished - assert span2.finished - - spans = writer.pop() - assert len(spans) == 2 - - # ensure proper parenting - assert spans[1].parent_id is spans[0].span_id - - def test_start_span_with_tags(self, ot_tracer): - """Create a span with initial tags.""" - tags = {'key': 'value', 'key2': 'value2'} - with ot_tracer.start_span('myop', tags=tags) as span: - pass - - assert span._dd_span.get_tag('key') == 'value' - assert span._dd_span.get_tag('key2') == 'value2' - - def test_start_active_span_multi_child(self, ot_tracer, writer): - """Start and finish multiple child spans. - This should ensure that child spans can be created 2 levels deep. - """ - with ot_tracer.start_active_span('myfirstop') as scope1: - time.sleep(0.009) - with ot_tracer.start_active_span('mysecondop') as scope2: - time.sleep(0.007) - with ot_tracer.start_active_span('mythirdop') as scope3: - time.sleep(0.005) - - # spans should be finished when the context manager exits - assert scope1.span.finished - assert scope2.span.finished - assert scope3.span.finished - - spans = writer.pop() - - # check spans are captured in the trace - assert scope1.span._dd_span is spans[0] - assert scope2.span._dd_span is spans[1] - assert scope3.span._dd_span is spans[2] - - # ensure proper parenting - assert spans[1].parent_id is spans[0].span_id - assert spans[2].parent_id is spans[1].span_id - - # sanity check a lower bound on the durations - assert spans[0].duration >= 0.009 + 0.007 + 0.005 - assert spans[1].duration >= 0.007 + 0.005 - assert spans[2].duration >= 0.005 - - def test_start_active_span_multi_child_siblings(self, ot_tracer, writer): - """Start and finish multiple span at the same level. - This should test to ensure a parent can have multiple child spans at the - same level. - """ - with ot_tracer.start_active_span('myfirstop') as scope1: - time.sleep(0.009) - with ot_tracer.start_active_span('mysecondop') as scope2: - time.sleep(0.007) - with ot_tracer.start_active_span('mythirdop') as scope3: - time.sleep(0.005) - - # spans should be finished when the context manager exits - assert scope1.span.finished - assert scope2.span.finished - assert scope3.span.finished - - spans = writer.pop() - - # check spans are captured in the trace - assert scope1.span._dd_span is spans[0] - assert scope2.span._dd_span is spans[1] - assert scope3.span._dd_span is spans[2] - - # ensure proper parenting - assert spans[1].parent_id is spans[0].span_id - assert spans[2].parent_id is spans[0].span_id - - # sanity check a lower bound on the durations - assert spans[0].duration >= 0.009 + 0.007 + 0.005 - assert spans[1].duration >= 0.007 - assert spans[2].duration >= 0.005 - - def test_start_span_manual_child_of(self, ot_tracer, writer): - """Start spans without using a scope manager. - Spans should be created without parents since there will be no call - for the active span. - """ - root = ot_tracer.start_span('zero') - - with ot_tracer.start_span('one', child_of=root): - with ot_tracer.start_span('two', child_of=root): - with ot_tracer.start_span('three', child_of=root): - pass - root.finish() - - spans = writer.pop() - - assert spans[0].parent_id is None - # ensure each child span is a child of root - assert spans[1].parent_id is root._dd_span.span_id - assert spans[2].parent_id is root._dd_span.span_id - assert spans[3].parent_id is root._dd_span.span_id - assert ( - spans[0].trace_id == spans[1].trace_id - and spans[1].trace_id == spans[2].trace_id - ) - - def test_start_span_no_active_span(self, ot_tracer, writer): - """Start spans without using a scope manager. - Spans should be created without parents since there will be no call - for the active span. - """ - with ot_tracer.start_span('one', ignore_active_span=True): - with ot_tracer.start_span('two', ignore_active_span=True): - pass - with ot_tracer.start_span('three', ignore_active_span=True): - pass - - spans = writer.pop() - - # ensure each span does not have a parent - assert spans[0].parent_id is None - assert spans[1].parent_id is None - assert spans[2].parent_id is None - # and that each span is a new trace - assert ( - spans[0].trace_id != spans[1].trace_id - and spans[1].trace_id != spans[2].trace_id - and spans[0].trace_id != spans[2].trace_id - ) - - def test_start_active_span_child_finish_after_parent(self, ot_tracer, writer): - """Start a child span and finish it after its parent.""" - span1 = ot_tracer.start_active_span('one').span - span2 = ot_tracer.start_active_span('two').span - span1.finish() - time.sleep(0.005) - span2.finish() - - spans = writer.pop() - assert len(spans) == 2 - assert spans[0].parent_id is None - assert spans[1].parent_id is span1._dd_span.span_id - assert spans[1].duration > spans[0].duration - - def test_start_span_multi_intertwined(self, ot_tracer, writer): - """Start multiple spans at the top level intertwined. - Alternate calling between two traces. - """ - import threading - - # synchronize threads with a threading event object - event = threading.Event() - - def trace_one(): - id = 11 - with ot_tracer.start_active_span(str(id)): - id += 1 - with ot_tracer.start_active_span(str(id)): - id += 1 - with ot_tracer.start_active_span(str(id)): - event.set() - - def trace_two(): - id = 21 - event.wait() - with ot_tracer.start_active_span(str(id)): - id += 1 - with ot_tracer.start_active_span(str(id)): - id += 1 - with ot_tracer.start_active_span(str(id)): - pass - - # the ordering should be - # t1.span1/t2.span1, t2.span2, t1.span2, t1.span3, t2.span3 - t1 = threading.Thread(target=trace_one) - t2 = threading.Thread(target=trace_two) - - t1.start() - t2.start() - # wait for threads to finish - t1.join() - t2.join() - - spans = writer.pop() - - # trace_one will finish before trace_two so its spans should be written - # before the spans from trace_two, let's confirm this - assert spans[0].name == '11' - assert spans[1].name == '12' - assert spans[2].name == '13' - assert spans[3].name == '21' - assert spans[4].name == '22' - assert spans[5].name == '23' - - # next let's ensure that each span has the correct parent: - # trace_one - assert spans[0].parent_id is None - assert spans[1].parent_id is spans[0].span_id - assert spans[2].parent_id is spans[1].span_id - # trace_two - assert spans[3].parent_id is None - assert spans[4].parent_id is spans[3].span_id - assert spans[5].parent_id is spans[3].span_id - - # finally we should ensure that the trace_ids are reasonable - # trace_one - assert ( - spans[0].trace_id == spans[1].trace_id - and spans[1].trace_id == spans[2].trace_id - ) - # traces should be independent - assert spans[2].trace_id != spans[3].trace_id - # trace_two - assert ( - spans[3].trace_id == spans[4].trace_id - and spans[4].trace_id == spans[5].trace_id - ) - - def test_start_active_span(self, ot_tracer, writer): - with ot_tracer.start_active_span('one') as scope: - pass - - assert scope.span._dd_span.name == 'one' - assert scope.span.finished - spans = writer.pop() - assert spans - - def test_start_active_span_finish_on_close(self, ot_tracer, writer): - with ot_tracer.start_active_span('one', finish_on_close=False) as scope: - pass - - assert scope.span._dd_span.name == 'one' - assert not scope.span.finished - spans = writer.pop() - assert not spans - - def test_start_active_span_nested(self, ot_tracer): - """Test the active span of multiple nested calls of start_active_span.""" - with ot_tracer.start_active_span('one') as outer_scope: - assert ot_tracer.active_span == outer_scope.span - with ot_tracer.start_active_span('two') as inner_scope: - assert ot_tracer.active_span == inner_scope.span - with ot_tracer.start_active_span( - 'three' - ) as innest_scope: # why isn't it innest? innermost so verbose - assert ot_tracer.active_span == innest_scope.span - with ot_tracer.start_active_span('two') as inner_scope: - assert ot_tracer.active_span == inner_scope.span - assert ot_tracer.active_span == outer_scope.span - assert ot_tracer.active_span is None - - def test_start_active_span_trace(self, ot_tracer, writer): - """Test the active span of multiple nested calls of start_active_span.""" - with ot_tracer.start_active_span('one') as outer_scope: - outer_scope.span.set_tag('outer', 2) - with ot_tracer.start_active_span('two') as inner_scope: - inner_scope.span.set_tag('inner', 3) - with ot_tracer.start_active_span('two') as inner_scope: - inner_scope.span.set_tag('inner', 3) - with ot_tracer.start_active_span('three') as innest_scope: - innest_scope.span.set_tag('innerest', 4) - - spans = writer.pop() - - assert spans[0].parent_id is None - assert spans[1].parent_id is spans[0].span_id - assert spans[2].parent_id is spans[0].span_id - assert spans[3].parent_id is spans[2].span_id - - -@pytest.fixture -def nop_span_ctx(): - - return SpanContext(sampling_priority=AUTO_KEEP) - - -class TestTracerSpanContextPropagation(object): - """Test the injection and extration of a span context from a tracer.""" - - def test_invalid_format(self, ot_tracer, nop_span_ctx): - """An invalid format should raise an UnsupportedFormatException.""" - # test inject - with pytest.raises(UnsupportedFormatException): - ot_tracer.inject(nop_span_ctx, None, {}) - - # test extract - with pytest.raises(UnsupportedFormatException): - ot_tracer.extract(None, {}) - - def test_inject_invalid_carrier(self, ot_tracer, nop_span_ctx): - """Only dicts should be supported as a carrier.""" - with pytest.raises(InvalidCarrierException): - ot_tracer.inject(nop_span_ctx, Format.HTTP_HEADERS, None) - - def test_extract_invalid_carrier(self, ot_tracer): - """Only dicts should be supported as a carrier.""" - with pytest.raises(InvalidCarrierException): - ot_tracer.extract(Format.HTTP_HEADERS, None) - - def test_http_headers_base(self, ot_tracer): - """extract should undo inject for http headers.""" - - span_ctx = SpanContext(trace_id=123, span_id=456) - carrier = {} - - ot_tracer.inject(span_ctx, Format.HTTP_HEADERS, carrier) - assert len(carrier.keys()) > 0 - - ext_span_ctx = ot_tracer.extract(Format.HTTP_HEADERS, carrier) - assert ext_span_ctx._dd_context.trace_id == 123 - assert ext_span_ctx._dd_context.span_id == 456 - - def test_http_headers_baggage(self, ot_tracer): - """extract should undo inject for http headers.""" - span_ctx = SpanContext( - trace_id=123, span_id=456, baggage={'test': 4, 'test2': 'string'} - ) - carrier = {} - - ot_tracer.inject(span_ctx, Format.HTTP_HEADERS, carrier) - assert len(carrier.keys()) > 0 - - ext_span_ctx = ot_tracer.extract(Format.HTTP_HEADERS, carrier) - assert ext_span_ctx._dd_context.trace_id == 123 - assert ext_span_ctx._dd_context.span_id == 456 - assert ext_span_ctx.baggage == span_ctx.baggage - - def test_empty_propagated_context(self, ot_tracer): - """An empty propagated context should raise a - SpanContextCorruptedException when extracted. - """ - carrier = {} - with pytest.raises(SpanContextCorruptedException): - ot_tracer.extract(Format.HTTP_HEADERS, carrier) - - def test_text(self, ot_tracer): - """extract should undo inject for http headers""" - span_ctx = SpanContext( - trace_id=123, span_id=456, baggage={'test': 4, 'test2': 'string'} - ) - carrier = {} - - ot_tracer.inject(span_ctx, Format.TEXT_MAP, carrier) - assert len(carrier.keys()) > 0 - - ext_span_ctx = ot_tracer.extract(Format.TEXT_MAP, carrier) - assert ext_span_ctx._dd_context.trace_id == 123 - assert ext_span_ctx._dd_context.span_id == 456 - assert ext_span_ctx.baggage == span_ctx.baggage - - def test_corrupted_propagated_context(self, ot_tracer): - """Corrupted context should raise a SpanContextCorruptedException.""" - span_ctx = SpanContext( - trace_id=123, span_id=456, baggage={'test': 4, 'test2': 'string'} - ) - carrier = {} - - ot_tracer.inject(span_ctx, Format.TEXT_MAP, carrier) - assert len(carrier.keys()) > 0 - - # manually alter a key in the carrier baggage - del carrier[HTTP_HEADER_TRACE_ID] - corrupted_key = HTTP_HEADER_TRACE_ID[2:] - carrier[corrupted_key] = 123 - - with pytest.raises(SpanContextCorruptedException): - ot_tracer.extract(Format.TEXT_MAP, carrier) - - def test_immutable_span_context(self, ot_tracer): - """Span contexts should be immutable.""" - with ot_tracer.start_span('root') as root: - ctx_before = root.context - root.set_baggage_item('test', 2) - assert ctx_before is not root.context - with ot_tracer.start_span('child') as level1: - with ot_tracer.start_span('child') as level2: - pass - assert root.context is not level1.context - assert level2.context is not level1.context - assert level2.context is not root.context - - def test_inherited_baggage(self, ot_tracer): - """Baggage should be inherited by child spans.""" - with ot_tracer.start_active_span('root') as root: - # this should be passed down to the child - root.span.set_baggage_item('root', 1) - root.span.set_baggage_item('root2', 1) - with ot_tracer.start_active_span('child') as level1: - level1.span.set_baggage_item('level1', 1) - with ot_tracer.start_active_span('child') as level2: - level2.span.set_baggage_item('level2', 1) - # ensure immutability - assert level1.span.context is not root.span.context - assert level2.span.context is not level1.span.context - - # level1 should have inherited the baggage of root - assert level1.span.get_baggage_item('root') - assert level1.span.get_baggage_item('root2') - - # level2 should have inherited the baggage of both level1 and level2 - assert level2.span.get_baggage_item('root') - assert level2.span.get_baggage_item('root2') - assert level2.span.get_baggage_item('level1') - assert level2.span.get_baggage_item('level2') - - -class TestTracerCompatibility(object): - """Ensure that our opentracer produces results in the underlying datadog tracer.""" - - def test_required_dd_fields(self): - """Ensure required fields needed for successful tracing are possessed - by the underlying datadog tracer. - """ - # a service name is required - tracer = Tracer('service') - with tracer.start_span('my_span') as span: - assert span._dd_span.service - - -def test_set_global_tracer(): - """Sanity check for set_global_tracer""" - my_tracer = Tracer('service') - set_global_tracer(my_tracer) - - assert opentracing.tracer is my_tracer - assert ddtrace.tracer is my_tracer._dd_tracer diff --git a/tests/opentracer/test_tracer_asyncio.py b/tests/opentracer/test_tracer_asyncio.py deleted file mode 100644 index 3e1e6c0e..00000000 --- a/tests/opentracer/test_tracer_asyncio.py +++ /dev/null @@ -1,192 +0,0 @@ -import asyncio -import pytest -from opentracing.scope_managers.asyncio import AsyncioScopeManager - -import ddtrace -from ddtrace.opentracer.utils import get_context_provider_for_scope_manager - -from tests.contrib.asyncio.utils import AsyncioTestCase, mark_asyncio -from .conftest import ot_tracer_factory # noqa: F401 - - -@pytest.fixture() -def ot_tracer(request, ot_tracer_factory): # noqa: F811 - # use the dummy asyncio ot tracer - request.instance.ot_tracer = ot_tracer_factory( - 'asyncio_svc', - config={}, - scope_manager=AsyncioScopeManager(), - context_provider=ddtrace.contrib.asyncio.context_provider, - ) - request.instance.ot_writer = request.instance.ot_tracer._dd_tracer.writer - request.instance.dd_tracer = request.instance.ot_tracer._dd_tracer - - -@pytest.mark.usefixtures('ot_tracer') -class TestTracerAsyncio(AsyncioTestCase): - - def reset(self): - self.ot_writer.pop_traces() - - @mark_asyncio - def test_trace_coroutine(self): - # it should use the task context when invoked in a coroutine - with self.ot_tracer.start_span('coroutine'): - pass - - traces = self.ot_writer.pop_traces() - - assert len(traces) == 1 - assert len(traces[0]) == 1 - assert traces[0][0].name == 'coroutine' - - @mark_asyncio - def test_trace_multiple_coroutines(self): - # if multiple coroutines have nested tracing, they must belong - # to the same trace - @asyncio.coroutine - def coro(): - # another traced coroutine - with self.ot_tracer.start_active_span('coroutine_2'): - return 42 - - with self.ot_tracer.start_active_span('coroutine_1'): - value = yield from coro() - - # the coroutine has been called correctly - assert value == 42 - # a single trace has been properly reported - traces = self.ot_writer.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 2 - assert traces[0][0].name == 'coroutine_1' - assert traces[0][1].name == 'coroutine_2' - # the parenting is correct - assert traces[0][0] == traces[0][1]._parent - assert traces[0][0].trace_id == traces[0][1].trace_id - - @mark_asyncio - def test_exception(self): - @asyncio.coroutine - def f1(): - with self.ot_tracer.start_span('f1'): - raise Exception('f1 error') - - with pytest.raises(Exception): - yield from f1() - - traces = self.ot_writer.pop_traces() - assert len(traces) == 1 - spans = traces[0] - assert len(spans) == 1 - span = spans[0] - assert span.error == 1 - assert span.get_tag('error.msg') == 'f1 error' - assert 'Exception: f1 error' in span.get_tag('error.stack') - - @mark_asyncio - def test_trace_multiple_calls(self): - # create multiple futures so that we expect multiple - # traces instead of a single one (helper not used) - @asyncio.coroutine - def coro(): - # another traced coroutine - with self.ot_tracer.start_span('coroutine'): - yield from asyncio.sleep(0.01) - - futures = [asyncio.ensure_future(coro()) for x in range(10)] - for future in futures: - yield from future - - traces = self.ot_writer.pop_traces() - - assert len(traces) == 10 - assert len(traces[0]) == 1 - assert traces[0][0].name == 'coroutine' - - -@pytest.mark.usefixtures('ot_tracer') -class TestTracerAsyncioCompatibility(AsyncioTestCase): - """Ensure the opentracer works in tandem with the ddtracer and asyncio.""" - - @mark_asyncio - def test_trace_multiple_coroutines_ot_dd(self): - """ - Ensure we can trace from opentracer to ddtracer across asyncio - context switches. - """ - # if multiple coroutines have nested tracing, they must belong - # to the same trace - @asyncio.coroutine - def coro(): - # another traced coroutine - with self.dd_tracer.trace('coroutine_2'): - return 42 - - with self.ot_tracer.start_active_span('coroutine_1'): - value = yield from coro() - - # the coroutine has been called correctly - assert value == 42 - # a single trace has been properly reported - traces = self.ot_tracer._dd_tracer.writer.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 2 - assert traces[0][0].name == 'coroutine_1' - assert traces[0][1].name == 'coroutine_2' - # the parenting is correct - assert traces[0][0] == traces[0][1]._parent - assert traces[0][0].trace_id == traces[0][1].trace_id - - @mark_asyncio - def test_trace_multiple_coroutines_dd_ot(self): - """ - Ensure we can trace from ddtracer to opentracer across asyncio - context switches. - """ - # if multiple coroutines have nested tracing, they must belong - # to the same trace - @asyncio.coroutine - def coro(): - # another traced coroutine - with self.ot_tracer.start_span('coroutine_2'): - return 42 - - with self.dd_tracer.trace('coroutine_1'): - value = yield from coro() - - # the coroutine has been called correctly - assert value == 42 - # a single trace has been properly reported - traces = self.ot_tracer._dd_tracer.writer.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 2 - assert traces[0][0].name == 'coroutine_1' - assert traces[0][1].name == 'coroutine_2' - # the parenting is correct - assert traces[0][0] == traces[0][1]._parent - assert traces[0][0].trace_id == traces[0][1].trace_id - - -@pytest.mark.skipif( - ddtrace.internal.context_manager.CONTEXTVARS_IS_AVAILABLE, - reason='only applicable to legacy asyncio provider' -) -class TestUtilsAsyncio(object): - """Test the util routines of the opentracer with asyncio specific - configuration. - """ - - def test_get_context_provider_for_scope_manager_asyncio(self): - scope_manager = AsyncioScopeManager() - ctx_prov = get_context_provider_for_scope_manager(scope_manager) - assert isinstance( - ctx_prov, ddtrace.contrib.asyncio.provider.AsyncioContextProvider - ) - - def test_tracer_context_provider_config(self): - tracer = ddtrace.opentracer.Tracer('mysvc', scope_manager=AsyncioScopeManager()) - assert isinstance( - tracer._dd_tracer.context_provider, - ddtrace.contrib.asyncio.provider.AsyncioContextProvider, - ) diff --git a/tests/opentracer/test_tracer_gevent.py b/tests/opentracer/test_tracer_gevent.py deleted file mode 100644 index 65f0491e..00000000 --- a/tests/opentracer/test_tracer_gevent.py +++ /dev/null @@ -1,217 +0,0 @@ -import gevent -import pytest -from opentracing.scope_managers.gevent import GeventScopeManager - -import ddtrace -from ddtrace.contrib.gevent import patch, unpatch -from ddtrace.opentracer.utils import get_context_provider_for_scope_manager - - -@pytest.fixture() -def ot_tracer(ot_tracer_factory): - """Fixture providing an opentracer configured for gevent usage.""" - # patch gevent - patch() - yield ot_tracer_factory( - 'gevent_svc', {}, GeventScopeManager(), ddtrace.contrib.gevent.context_provider - ) - # unpatch gevent - unpatch() - - -class TestTracerGevent(object): - """Converted Gevent tests for the regular tracer. - - Ensures that greenlets are properly traced when using - the opentracer. - """ - - def test_no_threading(self, ot_tracer): - with ot_tracer.start_span('span') as span: - span.set_tag('tag', 'value') - - assert span.finished - - def test_greenlets(self, ot_tracer, writer): - def f(): - with ot_tracer.start_span('f') as span: - gevent.sleep(0.04) - span.set_tag('f', 'yes') - - def g(): - with ot_tracer.start_span('g') as span: - gevent.sleep(0.03) - span.set_tag('g', 'yes') - - with ot_tracer.start_span('root'): - gevent.joinall([gevent.spawn(f), gevent.spawn(g)]) - - traces = writer.pop_traces() - assert len(traces) == 3 - - def test_trace_greenlet(self, ot_tracer, writer): - # a greenlet can be traced using the trace API - def greenlet(): - with ot_tracer.start_span('greenlet'): - pass - - gevent.spawn(greenlet).join() - traces = writer.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 1 - assert traces[0][0].name == 'greenlet' - - def test_trace_later_greenlet(self, ot_tracer, writer): - # a greenlet can be traced using the trace API - def greenlet(): - with ot_tracer.start_span('greenlet'): - pass - - gevent.spawn_later(0.01, greenlet).join() - traces = writer.pop_traces() - - assert len(traces) == 1 - assert len(traces[0]) == 1 - assert traces[0][0].name == 'greenlet' - - def test_trace_concurrent_calls(self, ot_tracer, writer): - # create multiple futures so that we expect multiple - # traces instead of a single one - def greenlet(): - with ot_tracer.start_span('greenlet'): - gevent.sleep(0.01) - - jobs = [gevent.spawn(greenlet) for x in range(100)] - gevent.joinall(jobs) - - traces = writer.pop_traces() - - assert len(traces) == 100 - assert len(traces[0]) == 1 - assert traces[0][0].name == 'greenlet' - - def test_trace_concurrent_spawn_later_calls(self, ot_tracer, writer): - # create multiple futures so that we expect multiple - # traces instead of a single one, even if greenlets - # are delayed - def greenlet(): - with ot_tracer.start_span('greenlet'): - gevent.sleep(0.01) - - jobs = [gevent.spawn_later(0.01, greenlet) for x in range(100)] - gevent.joinall(jobs) - - traces = writer.pop_traces() - assert len(traces) == 100 - assert len(traces[0]) == 1 - assert traces[0][0].name == 'greenlet' - - -class TestTracerGeventCompatibility(object): - """Ensure the opentracer works in tandem with the ddtracer and gevent.""" - - def test_trace_spawn_multiple_greenlets_multiple_traces_ot_parent( - self, ot_tracer, dd_tracer, writer - ): - """ - Copy of gevent test with the same name but testing with mixed usage of - the opentracer and datadog tracers. - - Uses an opentracer span as the parent span. - """ - # multiple greenlets must be part of the same trace - def entrypoint(): - with ot_tracer.start_active_span('greenlet.main'): - jobs = [gevent.spawn(green_1), gevent.spawn(green_2)] - gevent.joinall(jobs) - - def green_1(): - with dd_tracer.trace('greenlet.worker') as span: - span.set_tag('worker_id', '1') - gevent.sleep(0.01) - - def green_2(): - with ot_tracer.start_span('greenlet.worker') as span: - span.set_tag('worker_id', '2') - gevent.sleep(0.01) - - gevent.spawn(entrypoint).join() - traces = writer.pop_traces() - assert len(traces) == 3 - assert len(traces[0]) == 1 - parent_span = traces[2][0] - worker_1 = traces[0][0] - worker_2 = traces[1][0] - # check spans data and hierarchy - assert parent_span.name == 'greenlet.main' - assert worker_1.get_tag('worker_id') == '1' - assert worker_1.name == 'greenlet.worker' - assert worker_1.resource == 'greenlet.worker' - assert worker_1.parent_id == parent_span.span_id - assert worker_2.get_tag('worker_id') == '2' - assert worker_2.name == 'greenlet.worker' - assert worker_2.resource == 'greenlet.worker' - assert worker_2.parent_id == parent_span.span_id - - def test_trace_spawn_multiple_greenlets_multiple_traces_dd_parent( - self, ot_tracer, dd_tracer, writer - ): - """ - Copy of gevent test with the same name but testing with mixed usage of - the opentracer and datadog tracers. - - Uses an opentracer span as the parent span. - """ - # multiple greenlets must be part of the same trace - def entrypoint(): - with dd_tracer.trace('greenlet.main'): - jobs = [gevent.spawn(green_1), gevent.spawn(green_2)] - gevent.joinall(jobs) - - def green_1(): - with ot_tracer.start_span('greenlet.worker') as span: - span.set_tag('worker_id', '1') - gevent.sleep(0.01) - - def green_2(): - with dd_tracer.trace('greenlet.worker') as span: - span.set_tag('worker_id', '2') - gevent.sleep(0.01) - - gevent.spawn(entrypoint).join() - traces = writer.pop_traces() - assert len(traces) == 3 - assert len(traces[0]) == 1 - parent_span = traces[2][0] - worker_1 = traces[0][0] - worker_2 = traces[1][0] - # check spans data and hierarchy - assert parent_span.name == 'greenlet.main' - assert worker_1.get_tag('worker_id') == '1' - assert worker_1.name == 'greenlet.worker' - assert worker_1.resource == 'greenlet.worker' - assert worker_1.parent_id == parent_span.span_id - assert worker_2.get_tag('worker_id') == '2' - assert worker_2.name == 'greenlet.worker' - assert worker_2.resource == 'greenlet.worker' - assert worker_2.parent_id == parent_span.span_id - - -class TestUtilsGevent(object): - """Test the util routines of the opentracer with gevent specific - configuration. - """ - - def test_get_context_provider_for_scope_manager_asyncio(self): - scope_manager = GeventScopeManager() - ctx_prov = get_context_provider_for_scope_manager(scope_manager) - assert isinstance( - ctx_prov, ddtrace.contrib.gevent.provider.GeventContextProvider - ) - - def test_tracer_context_provider_config(self): - tracer = ddtrace.opentracer.Tracer('mysvc', scope_manager=GeventScopeManager()) - assert isinstance( - tracer._dd_tracer.context_provider, - ddtrace.contrib.gevent.provider.GeventContextProvider, - ) diff --git a/tests/opentracer/test_tracer_tornado.py b/tests/opentracer/test_tracer_tornado.py deleted file mode 100644 index 86f59bac..00000000 --- a/tests/opentracer/test_tracer_tornado.py +++ /dev/null @@ -1,30 +0,0 @@ -import pytest -from opentracing.scope_managers.tornado import TornadoScopeManager - - -@pytest.fixture() -def ot_tracer(ot_tracer_factory): - """Fixture providing an opentracer configured for tornado usage.""" - yield ot_tracer_factory('tornado_svc', {}, TornadoScopeManager()) - - -class TestTracerTornado(object): - """ - Since the ScopeManager is provided by OpenTracing we should simply test - whether it exists and works for a very simple use-case. - """ - - def test_sanity(self, ot_tracer, writer): - with ot_tracer.start_active_span('one'): - with ot_tracer.start_active_span('two'): - pass - - traces = writer.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 2 - assert traces[0][0].name == 'one' - assert traces[0][1].name == 'two' - - # the parenting is correct - assert traces[0][0] == traces[0][1]._parent - assert traces[0][0].trace_id == traces[0][1].trace_id diff --git a/tests/opentracer/test_utils.py b/tests/opentracer/test_utils.py deleted file mode 100644 index d38c0e55..00000000 --- a/tests/opentracer/test_utils.py +++ /dev/null @@ -1,13 +0,0 @@ -from opentracing.scope_managers import ThreadLocalScopeManager - -import ddtrace -from ddtrace.opentracer.utils import ( - get_context_provider_for_scope_manager, -) - - -class TestOpentracerUtils(object): - def test_get_context_provider_for_scope_manager_thread(self): - scope_manager = ThreadLocalScopeManager() - ctx_prov = get_context_provider_for_scope_manager(scope_manager) - assert isinstance(ctx_prov, ddtrace.provider.DefaultContextProvider) diff --git a/tests/opentracer/utils.py b/tests/opentracer/utils.py deleted file mode 100644 index 884f2406..00000000 --- a/tests/opentracer/utils.py +++ /dev/null @@ -1,14 +0,0 @@ -from ddtrace.opentracer import Tracer - - -def init_tracer(service_name, dd_tracer, scope_manager=None): - """A method that emulates what a user of OpenTracing would call to - initialize a Datadog opentracer. - - It accepts a Datadog tracer that should be the same one used for testing. - """ - writer = dd_tracer.writer - ot_tracer = Tracer(service_name, dd_tracer=dd_tracer, scope_manager=scope_manager) - dd_tracer.writer = writer - ot_tracer._dd_tracer = dd_tracer - return ot_tracer diff --git a/tests/oteltrace_run.py b/tests/oteltrace_run.py new file mode 100644 index 00000000..447387cc --- /dev/null +++ b/tests/oteltrace_run.py @@ -0,0 +1,9 @@ +import os +import sys + +# DEV: We must append to sys path before importing oteltrace_run +sys.path.append('.') +from oteltrace.commands import oteltrace_run # noqa + +os.environ['PYTHONPATH'] = '{}:{}'.format(os.getenv('PYTHONPATH'), os.path.abspath('.')) +oteltrace_run.main() diff --git a/tests/propagation/test_b3_format.py b/tests/propagation/test_b3_format.py new file mode 100644 index 00000000..edf2cd23 --- /dev/null +++ b/tests/propagation/test_b3_format.py @@ -0,0 +1,179 @@ +# Copyright 2019, OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import oteltrace.propagation.b3 as b3_format +from oteltrace.span import _new_id + +FORMAT = b3_format.B3HTTPPropagator() + + +class TestB3Format(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.serialized_trace_id = b3_format.format_trace_id( + _new_id() + ) + cls.serialized_span_id = b3_format.format_span_id( + _new_id() + ) + + def test_extract_multi_header(self): + """Test the extraction of B3 headers.""" + carrier = { + FORMAT.TRACE_ID_KEY: self.serialized_trace_id, + FORMAT.SPAN_ID_KEY: self.serialized_span_id, + FORMAT.SAMPLED_KEY: '1', + } + span_context = FORMAT.extract(carrier) + new_carrier = {} + FORMAT.inject(span_context, new_carrier) + self.assertEqual( + new_carrier[FORMAT.TRACE_ID_KEY], self.serialized_trace_id + ) + self.assertEqual( + new_carrier[FORMAT.SPAN_ID_KEY], self.serialized_span_id + ) + self.assertEqual(new_carrier[FORMAT.SAMPLED_KEY], '1') + + def test_extract_single_header(self): + """Test the extraction from a single b3 header.""" + carrier = { + FORMAT.SINGLE_HEADER_KEY: '{}-{}'.format( + self.serialized_trace_id, self.serialized_span_id + ) + } + span_context = FORMAT.extract(carrier) + new_carrier = {} + FORMAT.inject(span_context, new_carrier) + self.assertEqual( + new_carrier[FORMAT.TRACE_ID_KEY], self.serialized_trace_id + ) + self.assertEqual( + new_carrier[FORMAT.SPAN_ID_KEY], self.serialized_span_id + ) + self.assertEqual(new_carrier[FORMAT.SAMPLED_KEY], '1') + + def test_extract_header_precedence(self): + """A single b3 header should take precedence over multiple + headers. + """ + single_header_trace_id = self.serialized_trace_id[:-3] + '123' + carrier = { + FORMAT.SINGLE_HEADER_KEY: '{}-{}'.format( + single_header_trace_id, self.serialized_span_id + ), + FORMAT.TRACE_ID_KEY: self.serialized_trace_id, + FORMAT.SPAN_ID_KEY: self.serialized_span_id, + FORMAT.SAMPLED_KEY: '1', + } + span_context = FORMAT.extract(carrier) + new_carrier = {} + FORMAT.inject(span_context, new_carrier) + self.assertEqual( + new_carrier[FORMAT.TRACE_ID_KEY], single_header_trace_id + ) + + def test_enabled_sampling(self): + """Test b3 sample key variants that turn on sampling.""" + for variant in ['1', 'True', 'true', 'd']: + carrier = { + FORMAT.TRACE_ID_KEY: self.serialized_trace_id, + FORMAT.SPAN_ID_KEY: self.serialized_span_id, + FORMAT.SAMPLED_KEY: variant, + } + span_context = FORMAT.extract(carrier) + new_carrier = {} + FORMAT.inject(span_context, new_carrier) + self.assertEqual(new_carrier[FORMAT.SAMPLED_KEY], '1') + + def test_disabled_sampling(self): + """Test b3 sample key variants that turn off sampling.""" + for variant in ['0', 'False', 'false', None]: + carrier = { + FORMAT.TRACE_ID_KEY: self.serialized_trace_id, + FORMAT.SPAN_ID_KEY: self.serialized_span_id, + FORMAT.SAMPLED_KEY: variant, + } + span_context = FORMAT.extract(carrier) + new_carrier = {} + FORMAT.inject(span_context, new_carrier) + self.assertEqual(new_carrier[FORMAT.SAMPLED_KEY], '0') + + def test_flags(self): + """x-b3-flags set to '1' should result in propagation.""" + carrier = { + FORMAT.TRACE_ID_KEY: self.serialized_trace_id, + FORMAT.SPAN_ID_KEY: self.serialized_span_id, + FORMAT.FLAGS_KEY: '1', + } + span_context = FORMAT.extract(carrier) + new_carrier = {} + FORMAT.inject(span_context, new_carrier) + self.assertEqual(new_carrier[FORMAT.SAMPLED_KEY], '1') + + def test_flags_and_sampling(self): + """Propagate if b3 flags and sampling are set.""" + carrier = { + FORMAT.TRACE_ID_KEY: self.serialized_trace_id, + FORMAT.SPAN_ID_KEY: self.serialized_span_id, + FORMAT.FLAGS_KEY: '1', + } + span_context = FORMAT.extract(carrier) + new_carrier = {} + FORMAT.inject(span_context, new_carrier) + self.assertEqual(new_carrier[FORMAT.SAMPLED_KEY], '1') + + def test_64bit_trace_id(self): + """64 bit trace ids should be padded to 128 bit trace ids.""" + trace_id_64_bit = self.serialized_trace_id[:16] + carrier = { + FORMAT.TRACE_ID_KEY: trace_id_64_bit, + FORMAT.SPAN_ID_KEY: self.serialized_span_id, + FORMAT.FLAGS_KEY: '1', + } + span_context = FORMAT.extract(carrier) + new_carrier = {} + FORMAT.inject(span_context, new_carrier) + self.assertEqual( + new_carrier[FORMAT.TRACE_ID_KEY], '0' * 16 + trace_id_64_bit + ) + + def test_invalid_single_header(self): + """If an invalid single header is passed, return an + invalid SpanContext. + """ + carrier = {FORMAT.SINGLE_HEADER_KEY: '0-1-2-3-4-5-6-7'} + span_context = FORMAT.extract(carrier) + self.assertIsNone(span_context.trace_id) + self.assertIsNone(span_context.span_id) + + def test_missing_trace_id(self): + """If a trace id is missing, populate an invalid trace id.""" + carrier = { + FORMAT.SPAN_ID_KEY: self.serialized_span_id, + FORMAT.FLAGS_KEY: '1', + } + span_context = FORMAT.extract(carrier) + self.assertEqual(span_context.trace_id, 0) + + def test_missing_span_id(self): + """If a trace id is missing, populate an invalid trace id.""" + carrier = { + FORMAT.TRACE_ID_KEY: self.serialized_trace_id, + FORMAT.FLAGS_KEY: '1', + } + span_context = FORMAT.extract(carrier) + self.assertEqual(span_context.span_id, 0) diff --git a/tests/propagation/test_datadog.py b/tests/propagation/test_datadog.py new file mode 100644 index 00000000..29c1a0fd --- /dev/null +++ b/tests/propagation/test_datadog.py @@ -0,0 +1,79 @@ +from unittest import TestCase +from tests.test_tracer import get_dummy_tracer + +from oteltrace.propagation.datadog import ( + DatadogHTTPPropagator, + HTTP_HEADER_TRACE_ID, + HTTP_HEADER_PARENT_ID, + HTTP_HEADER_SAMPLING_PRIORITY, + HTTP_HEADER_ORIGIN, +) + + +class TestDatadogHttpPropagation(TestCase): + """ + Tests related to the ``Context`` class that hosts the trace for the + current execution flow. + """ + + def test_inject(self): + tracer = get_dummy_tracer() + + with tracer.trace('global_root_span') as span: + span.context.sampling_priority = 2 + span.context._otel_origin = 'synthetics' + headers = {} + propagator = DatadogHTTPPropagator() + propagator.inject(span.context, headers) + + assert int(headers[HTTP_HEADER_TRACE_ID]) == span.trace_id + assert int(headers[HTTP_HEADER_PARENT_ID]) == span.span_id + assert ( + int(headers[HTTP_HEADER_SAMPLING_PRIORITY]) == + span.context.sampling_priority + ) + assert ( + headers[HTTP_HEADER_ORIGIN] == + span.context._otel_origin + ) + + def test_extract(self): + tracer = get_dummy_tracer() + + headers = { + 'x-datadog-trace-id': '1234', + 'x-datadog-parent-id': '5678', + 'x-datadog-sampling-priority': '1', + 'x-datadog-origin': 'synthetics', + } + + propagator = DatadogHTTPPropagator() + context = propagator.extract(headers) + tracer.context_provider.activate(context) + + with tracer.trace('local_root_span') as span: + assert span.trace_id == 1234 + assert span.parent_id == 5678 + assert span.context.sampling_priority == 1 + assert span.context._otel_origin == 'synthetics' + + def test_WSGI_extract(self): + """Ensure we support the WSGI formatted headers as well.""" + tracer = get_dummy_tracer() + + headers = { + 'HTTP_X_DATADOG_TRACE_ID': '1234', + 'HTTP_X_DATADOG_PARENT_ID': '5678', + 'HTTP_X_DATADOG_SAMPLING_PRIORITY': '1', + 'HTTP_X_DATADOG_ORIGIN': 'synthetics', + } + + propagator = DatadogHTTPPropagator() + context = propagator.extract(headers) + tracer.context_provider.activate(context) + + with tracer.trace('local_root_span') as span: + assert span.trace_id == 1234 + assert span.parent_id == 5678 + assert span.context.sampling_priority == 1 + assert span.context._otel_origin == 'synthetics' diff --git a/tests/propagation/test_http.py b/tests/propagation/test_http.py index 6249c037..1c711805 100644 --- a/tests/propagation/test_http.py +++ b/tests/propagation/test_http.py @@ -1,79 +1,32 @@ -from unittest import TestCase -from tests.test_tracer import get_dummy_tracer +from unittest import TestCase, mock -from ddtrace.propagation.http import ( - HTTPPropagator, - HTTP_HEADER_TRACE_ID, - HTTP_HEADER_PARENT_ID, - HTTP_HEADER_SAMPLING_PRIORITY, - HTTP_HEADER_ORIGIN, -) +from oteltrace.propagation import http as http_propagator_module +from oteltrace.propagation.http import DatadogHTTPPropagator +from oteltrace import tracer -class TestHttpPropagation(TestCase): - """ - Tests related to the ``Context`` class that hosts the trace for the - current execution flow. - """ - def test_inject(self): - tracer = get_dummy_tracer() +class TestHTTPPropagator(TestCase): + def test_default(self): + prop = http_propagator_module.HTTPPropagator() + self.assertIsInstance(prop, DatadogHTTPPropagator) - with tracer.trace('global_root_span') as span: - span.context.sampling_priority = 2 - span.context._dd_origin = 'synthetics' - headers = {} - propagator = HTTPPropagator() - propagator.inject(span.context, headers) + def test_set_http_propagator_factory(self): + mock_propagator = mock.Mock() - assert int(headers[HTTP_HEADER_TRACE_ID]) == span.trace_id - assert int(headers[HTTP_HEADER_PARENT_ID]) == span.span_id - assert ( - int(headers[HTTP_HEADER_SAMPLING_PRIORITY]) == - span.context.sampling_priority - ) - assert ( - headers[HTTP_HEADER_ORIGIN] == - span.context._dd_origin - ) + def get_propagator(): + return mock_propagator - def test_extract(self): - tracer = get_dummy_tracer() + http_propagator_module.set_http_propagator_factory(get_propagator) - headers = { - 'x-datadog-trace-id': '1234', - 'x-datadog-parent-id': '5678', - 'x-datadog-sampling-priority': '1', - 'x-datadog-origin': 'synthetics', - } + self.assertIs(http_propagator_module.HTTPPropagator(), mock_propagator) - propagator = HTTPPropagator() - context = propagator.extract(headers) - tracer.context_provider.activate(context) + def test_tracer_configure_http_propagator(self): + mock_propagator = mock.Mock() - with tracer.trace('local_root_span') as span: - assert span.trace_id == 1234 - assert span.parent_id == 5678 - assert span.context.sampling_priority == 1 - assert span.context._dd_origin == 'synthetics' + def get_propagator(): + return mock_propagator - def test_WSGI_extract(self): - """Ensure we support the WSGI formatted headers as well.""" - tracer = get_dummy_tracer() + tracer.configure(http_propagator=get_propagator) - headers = { - 'HTTP_X_DATADOG_TRACE_ID': '1234', - 'HTTP_X_DATADOG_PARENT_ID': '5678', - 'HTTP_X_DATADOG_SAMPLING_PRIORITY': '1', - 'HTTP_X_DATADOG_ORIGIN': 'synthetics', - } - - propagator = HTTPPropagator() - context = propagator.extract(headers) - tracer.context_provider.activate(context) - - with tracer.trace('local_root_span') as span: - assert span.trace_id == 1234 - assert span.parent_id == 5678 - assert span.context.sampling_priority == 1 - assert span.context._dd_origin == 'synthetics' + self.assertIs(http_propagator_module.HTTPPropagator(), mock_propagator) diff --git a/tests/propagation/test_utils.py b/tests/propagation/test_utils.py index 8b80e5a5..de3a5a30 100644 --- a/tests/propagation/test_utils.py +++ b/tests/propagation/test_utils.py @@ -1,4 +1,4 @@ -from ddtrace.propagation.utils import get_wsgi_header +from oteltrace.propagation.utils import get_wsgi_header class TestPropagationUtils(object): diff --git a/tests/test_api.py b/tests/test_api.py deleted file mode 100644 index a4734a05..00000000 --- a/tests/test_api.py +++ /dev/null @@ -1,286 +0,0 @@ -import mock -import re -import socket -import threading -import time -import warnings - -from unittest import TestCase - -import pytest - -from ddtrace.api import API, Response -from ddtrace.compat import iteritems, httplib, PY3 -from ddtrace.internal.runtime.container import CGroupInfo -from ddtrace.vendor.six.moves import BaseHTTPServer, socketserver - - -class _BaseHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): - error_message_format = '%(message)s\n' - error_content_type = 'text/plain' - - @staticmethod - def log_message(format, *args): # noqa: A002 - pass - - -class _APIEndpointRequestHandlerTest(_BaseHTTPRequestHandler): - - def do_PUT(self): - self.send_error(200, 'OK') - - -class _TimeoutAPIEndpointRequestHandlerTest(_BaseHTTPRequestHandler): - def do_PUT(self): - # This server sleeps longer than our timeout - time.sleep(5) - - -class _ResetAPIEndpointRequestHandlerTest(_BaseHTTPRequestHandler): - - def do_PUT(self): - return - - -_HOST = '0.0.0.0' -_TIMEOUT_PORT = 8743 -_RESET_PORT = _TIMEOUT_PORT + 1 - - -class UDSHTTPServer(socketserver.UnixStreamServer, BaseHTTPServer.HTTPServer): - def server_bind(self): - BaseHTTPServer.HTTPServer.server_bind(self) - - -def _make_uds_server(path, request_handler): - server = UDSHTTPServer(path, request_handler) - t = threading.Thread(target=server.serve_forever) - # Set daemon just in case something fails - t.daemon = True - t.start() - return server, t - - -@pytest.fixture -def endpoint_uds_server(tmp_path): - server, thread = _make_uds_server(str(tmp_path / 'uds_server_socket'), _APIEndpointRequestHandlerTest) - try: - yield server - finally: - server.shutdown() - thread.join() - - -def _make_server(port, request_handler): - server = BaseHTTPServer.HTTPServer((_HOST, port), request_handler) - t = threading.Thread(target=server.serve_forever) - # Set daemon just in case something fails - t.daemon = True - t.start() - return server, t - - -@pytest.fixture(scope='module') -def endpoint_test_timeout_server(): - server, thread = _make_server(_TIMEOUT_PORT, _TimeoutAPIEndpointRequestHandlerTest) - try: - yield thread - finally: - server.shutdown() - thread.join() - - -@pytest.fixture(scope='module') -def endpoint_test_reset_server(): - server, thread = _make_server(_RESET_PORT, _ResetAPIEndpointRequestHandlerTest) - try: - yield thread - finally: - server.shutdown() - thread.join() - - -class ResponseMock: - def __init__(self, content, status=200): - self.status = status - self.content = content - - def read(self): - return self.content - - -def test_api_str(): - api = API('localhost', 8126, https=True) - assert str(api) == 'https://localhost:8126' - api = API('localhost', 8126, '/path/to/uds') - assert str(api) == 'unix:///path/to/uds' - - -class APITests(TestCase): - - def setUp(self): - # DEV: Mock here instead of in tests, before we have patched `httplib.HTTPConnection` - self.conn = mock.MagicMock(spec=httplib.HTTPConnection) - self.api = API('localhost', 8126) - - def tearDown(self): - del self.api - del self.conn - - def test_typecast_port(self): - api = API('localhost', u'8126') - self.assertEqual(api.port, 8126) - - @mock.patch('logging.Logger.debug') - def test_parse_response_json(self, log): - test_cases = { - 'OK': dict( - js=None, - log='Cannot parse Datadog Agent response, please make sure your Datadog Agent is up to date', - ), - 'OK\n': dict( - js=None, - log='Cannot parse Datadog Agent response, please make sure your Datadog Agent is up to date', - ), - 'error:unsupported-endpoint': dict( - js=None, - log='Unable to parse Datadog Agent JSON response: .*? \'error:unsupported-endpoint\'', - ), - 42: dict( # int as key to trigger TypeError - js=None, - log='Unable to parse Datadog Agent JSON response: .*? 42', - ), - '{}': dict(js={}), - '[]': dict(js=[]), - - # Priority sampling "rate_by_service" response - ('{"rate_by_service": ' - '{"service:,env:":0.5, "service:mcnulty,env:test":0.9, "service:postgres,env:test":0.6}}'): dict( - js=dict( - rate_by_service={ - 'service:,env:': 0.5, - 'service:mcnulty,env:test': 0.9, - 'service:postgres,env:test': 0.6, - }, - ), - ), - ' [4,2,1] ': dict(js=[4, 2, 1]), - } - - for k, v in iteritems(test_cases): - log.reset_mock() - - r = Response.from_http_response(ResponseMock(k)) - js = r.get_json() - assert v['js'] == js - if 'log' in v: - log.assert_called_once() - msg = log.call_args[0][0] % log.call_args[0][1:] - assert re.match(v['log'], msg), msg - - @mock.patch('ddtrace.compat.httplib.HTTPConnection') - def test_put_connection_close(self, HTTPConnection): - """ - When calling API._put - we close the HTTPConnection we create - """ - HTTPConnection.return_value = self.conn - - with warnings.catch_warnings(record=True) as w: - self.api._put('/test', '', 1) - - self.assertEqual(len(w), 0, 'Test raised unexpected warnings: {0!r}'.format(w)) - - self.conn.request.assert_called_once() - self.conn.close.assert_called_once() - - @mock.patch('ddtrace.compat.httplib.HTTPConnection') - def test_put_connection_close_exception(self, HTTPConnection): - """ - When calling API._put raises an exception - we close the HTTPConnection we create - """ - HTTPConnection.return_value = self.conn - # Ensure calling `request` raises an exception - self.conn.request.side_effect = Exception - - with warnings.catch_warnings(record=True) as w: - with self.assertRaises(Exception): - self.api._put('/test', '', 1) - - self.assertEqual(len(w), 0, 'Test raised unexpected warnings: {0!r}'.format(w)) - - self.conn.request.assert_called_once() - self.conn.close.assert_called_once() - - -def test_https(): - conn = mock.MagicMock(spec=httplib.HTTPSConnection) - api = API('localhost', 8126, https=True) - with mock.patch('ddtrace.compat.httplib.HTTPSConnection') as HTTPSConnection: - HTTPSConnection.return_value = conn - api._put('/test', '', 1) - conn.request.assert_called_once() - conn.close.assert_called_once() - - -def test_flush_connection_timeout_connect(): - payload = mock.Mock() - payload.get_payload.return_value = 'foobar' - payload.length = 12 - api = API(_HOST, 2019) - response = api._flush(payload) - if PY3: - assert isinstance(response, (OSError, ConnectionRefusedError)) # noqa: F821 - else: - assert isinstance(response, socket.error) - assert response.errno in (99, 111) - - -def test_flush_connection_timeout(endpoint_test_timeout_server): - payload = mock.Mock() - payload.get_payload.return_value = 'foobar' - payload.length = 12 - api = API(_HOST, _TIMEOUT_PORT) - response = api._flush(payload) - assert isinstance(response, socket.timeout) - - -def test_flush_connection_reset(endpoint_test_reset_server): - payload = mock.Mock() - payload.get_payload.return_value = 'foobar' - payload.length = 12 - api = API(_HOST, _RESET_PORT) - response = api._flush(payload) - if PY3: - assert isinstance(response, (httplib.BadStatusLine, ConnectionResetError)) # noqa: F821 - else: - assert isinstance(response, httplib.BadStatusLine) - - -def test_flush_connection_uds(endpoint_uds_server): - payload = mock.Mock() - payload.get_payload.return_value = 'foobar' - payload.length = 12 - api = API(_HOST, 2019, uds_path=endpoint_uds_server.server_address) - response = api._flush(payload) - assert response.status == 200 - - -@mock.patch('ddtrace.internal.runtime.container.get_container_info') -def test_api_container_info(get_container_info): - # When we have container information - # DEV: `get_container_info` will return a `CGroupInfo` with a `container_id` or `None` - info = CGroupInfo(container_id='test-container-id') - get_container_info.return_value = info - - api = API(_HOST, 8126) - assert api._container_info is info - assert api._headers['Datadog-Container-Id'] == 'test-container-id' - - # When we do not have container information - get_container_info.return_value = None - - api = API(_HOST, 8126) - assert api._container_info is None - assert 'Datadog-Container-Id' not in api._headers diff --git a/tests/test_api_otel_exporter.py b/tests/test_api_otel_exporter.py new file mode 100644 index 00000000..1171af42 --- /dev/null +++ b/tests/test_api_otel_exporter.py @@ -0,0 +1,113 @@ +import mock +from unittest import TestCase + +from oteltrace.span import Span +from oteltrace.context import Context +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from oteltrace.api_otel_exporter import APIOtel + + +class MockSpan(mock.Mock): + pass + + +class TestAPIOtelExporter(TestCase): + def test_init(self): + exporter = mock.Mock() + api = APIOtel(exporter=exporter) + self.assertIs(api._exporter, exporter) + + def test_send_traces(self): + def return_same(span): + return span + + exporter = InMemorySpanExporter() + api = APIOtel(exporter=exporter) + # we don't care about _span_to_otel_span() here + api._span_to_otel_span = return_same + + traces = ( + (MockSpan(),), + (MockSpan(), MockSpan()), + (MockSpan(), MockSpan(), MockSpan()), + ) + + api.send_traces(traces) + + # flat traces to 1 dimension tuple + traces_tuple = tuple([item for sublist in traces for item in sublist]) + + self.assertEqual(traces_tuple, exporter.get_finished_spans()) + + def test__span_to_otel_span(self): + api = APIOtel(exporter=None) + + trace_id = 0x32452526 + span_id = 0x29025326 + parent_id = 0x592153109 + + start_time = 683647322 + end_time = start_time + 50 + + ctx = Context() + span = Span( + tracer=None, + name='test_span', + trace_id=trace_id, + span_id=span_id, + parent_id=parent_id, + context=ctx, + start=start_time, + resource='foo_resource', + service='foo_service', + span_type='foo_span', + ) + + span.finish(finish_time=end_time) + + span.set_tag('out.host', 'opentelemetry.io') + span.set_tag('out.port', 443) + span.set_tag('creator_name', 'mauricio.vasquez') + + otel_span = api._span_to_otel_span(span) + + self.assertEqual(span.name, otel_span.name) + + self.assertEqual(span.trace_id, otel_span.context.trace_id) + self.assertEqual(span.span_id, otel_span.context.span_id) + + self.assertEqual(span.parent_id, otel_span.parent.span_id) + self.assertEqual(span.trace_id, otel_span.parent.trace_id) + + self.assertEqual(start_time * 10 ** 9, otel_span.start_time) + self.assertEqual(end_time * 10 ** 9, otel_span.end_time) + + self.assertEqual(span.service, otel_span.attributes['service.name']) + self.assertEqual(span.resource, otel_span.attributes['resource.name']) + self.assertEqual(span.span_type, otel_span.attributes['component']) + + self.assertEqual(span.get_tag('out.host'), + otel_span.attributes['peer.hostname']) + self.assertEqual(span.get_tag('out.port'), + otel_span.attributes['peer.port']) + + self.assertEqual(span.get_tag('creator_name'), + otel_span.attributes['creator_name']) + + # test parent None + + span = Span( + tracer=None, + name='test_span', + trace_id=trace_id, + span_id=span_id, + parent_id=None, + context=ctx, + start=start_time, + ) + + span.finish(finish_time=end_time) + + otel_span = api._span_to_otel_span(span) + + self.assertIsNone(otel_span.parent) diff --git a/tests/test_compat.py b/tests/test_compat.py index cc1bc09c..04a98017 100644 --- a/tests/test_compat.py +++ b/tests/test_compat.py @@ -6,7 +6,7 @@ import pytest # Project -from ddtrace.compat import to_unicode, PY3, reraise, get_connection_response +from oteltrace.compat import to_unicode, PY3, reraise, get_connection_response if PY3: diff --git a/tests/test_context.py b/tests/test_context.py index 468c0496..c0e69848 100644 --- a/tests/test_context.py +++ b/tests/test_context.py @@ -8,10 +8,10 @@ import pytest -from ddtrace.span import Span -from ddtrace.context import Context -from ddtrace.constants import HOSTNAME_KEY -from ddtrace.ext.priority import USER_REJECT, AUTO_REJECT, AUTO_KEEP, USER_KEEP +from oteltrace.span import Span +from oteltrace.context import Context +from oteltrace.constants import HOSTNAME_KEY +from oteltrace.ext.priority import USER_REJECT, AUTO_REJECT, AUTO_KEEP, USER_KEEP @pytest.fixture @@ -157,7 +157,7 @@ def test_get_trace_empty(self): assert trace is None assert sampled is None - @mock.patch('ddtrace.internal.hostname.get_hostname') + @mock.patch('oteltrace.internal.hostname.get_hostname') def test_get_report_hostname_enabled(self, get_hostname): get_hostname.return_value = 'test-hostname' @@ -176,7 +176,7 @@ def test_get_report_hostname_enabled(self, get_hostname): assert trace[0].get_tag(HOSTNAME_KEY) == 'test-hostname' assert span.get_tag(HOSTNAME_KEY) == 'test-hostname' - @mock.patch('ddtrace.internal.hostname.get_hostname') + @mock.patch('oteltrace.internal.hostname.get_hostname') def test_get_report_hostname_disabled(self, get_hostname): get_hostname.return_value = 'test-hostname' @@ -195,7 +195,7 @@ def test_get_report_hostname_disabled(self, get_hostname): assert trace[0].get_tag(HOSTNAME_KEY) is None assert span.get_tag(HOSTNAME_KEY) is None - @mock.patch('ddtrace.internal.hostname.get_hostname') + @mock.patch('oteltrace.internal.hostname.get_hostname') def test_get_report_hostname_default(self, get_hostname): get_hostname.return_value = 'test-hostname' @@ -444,6 +444,6 @@ def test_clone(self): assert cloned_ctx._parent_trace_id == ctx._parent_trace_id assert cloned_ctx._parent_span_id == ctx._parent_span_id assert cloned_ctx._sampling_priority == ctx._sampling_priority - assert cloned_ctx._dd_origin == ctx._dd_origin + assert cloned_ctx._otel_origin == ctx._otel_origin assert cloned_ctx._current_span == ctx._current_span assert cloned_ctx._trace == [] diff --git a/tests/test_encoders.py b/tests/test_encoders.py deleted file mode 100644 index 682a5507..00000000 --- a/tests/test_encoders.py +++ /dev/null @@ -1,136 +0,0 @@ -import json - -from unittest import TestCase - -from ddtrace.span import Span -from ddtrace.compat import msgpack_type, string_type -from ddtrace.encoding import JSONEncoder, MsgpackEncoder -from ddtrace.vendor import msgpack - - -class TestEncoders(TestCase): - """ - Ensures that Encoders serialize the payload as expected. - """ - def test_encode_traces_json(self): - # test encoding for JSON format - traces = [] - traces.append([ - Span(name='client.testing', tracer=None), - Span(name='client.testing', tracer=None), - ]) - traces.append([ - Span(name='client.testing', tracer=None), - Span(name='client.testing', tracer=None), - ]) - - encoder = JSONEncoder() - spans = encoder.encode_traces(traces) - items = json.loads(spans) - - # test the encoded output that should be a string - # and the output must be flatten - assert isinstance(spans, string_type) - assert len(items) == 2 - assert len(items[0]) == 2 - assert len(items[1]) == 2 - for i in range(2): - for j in range(2): - assert 'client.testing' == items[i][j]['name'] - - def test_join_encoded_json(self): - # test encoding for JSON format - traces = [] - traces.append([ - Span(name='client.testing', tracer=None), - Span(name='client.testing', tracer=None), - ]) - traces.append([ - Span(name='client.testing', tracer=None), - Span(name='client.testing', tracer=None), - ]) - - encoder = JSONEncoder() - - # Encode each trace on it's own - encoded_traces = [ - encoder.encode_trace(trace) - for trace in traces - ] - - # Join the encoded traces together - data = encoder.join_encoded(encoded_traces) - - # Parse the resulting data - items = json.loads(data) - - # test the encoded output that should be a string - # and the output must be flatten - assert isinstance(data, string_type) - assert len(items) == 2 - assert len(items[0]) == 2 - assert len(items[1]) == 2 - for i in range(2): - for j in range(2): - assert 'client.testing' == items[i][j]['name'] - - def test_encode_traces_msgpack(self): - # test encoding for MsgPack format - traces = [] - traces.append([ - Span(name='client.testing', tracer=None), - Span(name='client.testing', tracer=None), - ]) - traces.append([ - Span(name='client.testing', tracer=None), - Span(name='client.testing', tracer=None), - ]) - - encoder = MsgpackEncoder() - spans = encoder.encode_traces(traces) - items = msgpack.unpackb(spans) - - # test the encoded output that should be a string - # and the output must be flatten - assert isinstance(spans, msgpack_type) - assert len(items) == 2 - assert len(items[0]) == 2 - assert len(items[1]) == 2 - for i in range(2): - for j in range(2): - assert b'client.testing' == items[i][j][b'name'] - - def test_join_encoded_msgpack(self): - # test encoding for MsgPack format - traces = [] - traces.append([ - Span(name='client.testing', tracer=None), - Span(name='client.testing', tracer=None), - ]) - traces.append([ - Span(name='client.testing', tracer=None), - Span(name='client.testing', tracer=None), - ]) - - encoder = MsgpackEncoder() - - # Encode each individual trace on it's own - encoded_traces = [ - encoder.encode_trace(trace) - for trace in traces - ] - # Join the encoded traces together - data = encoder.join_encoded(encoded_traces) - - # Parse the encoded data - items = msgpack.unpackb(data) - - # test the encoded output that should be a string - # and the output must be flatten - assert isinstance(data, msgpack_type) - assert len(items) == 2 - assert len(items[0]) == 2 - assert len(items[1]) == 2 - for i in range(2): - for j in range(2): - assert b'client.testing' == items[i][j][b'name'] diff --git a/tests/test_filters.py b/tests/test_filters.py index d4baacc9..b3c2a8d7 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -1,8 +1,8 @@ from unittest import TestCase -from ddtrace.filters import FilterRequestsOnUrl -from ddtrace.span import Span -from ddtrace.ext.http import URL +from oteltrace.filters import FilterRequestsOnUrl +from oteltrace.span import Span +from oteltrace.ext.http import URL class FilterRequestOnUrlTests(TestCase): diff --git a/tests/test_global_config.py b/tests/test_global_config.py index 6bfcb1b3..8ab1f854 100644 --- a/tests/test_global_config.py +++ b/tests/test_global_config.py @@ -3,8 +3,8 @@ import pytest -from ddtrace import config as global_config -from ddtrace.settings import Config +from oteltrace import config as global_config +from oteltrace.settings import Config from .test_tracer import get_dummy_tracer @@ -50,7 +50,7 @@ def test_missing_integration_key(self): assert isinstance(e.value, KeyError) def test_global_configuration(self): - # ensure a global configuration is available in the `ddtrace` module + # ensure a global configuration is available in the `oteltrace` module assert isinstance(global_config, Config) def test_settings_merge(self): diff --git a/tests/test_helpers.py b/tests/test_helpers.py index a09b055c..75278af0 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -1,13 +1,13 @@ import mock -from ddtrace import helpers +from oteltrace import helpers from .base import BaseTracerTestCase from .util import override_global_tracer class HelpersTestCase(BaseTracerTestCase): - """Test suite for ``ddtrace`` helpers""" + """Test suite for ``oteltrace`` helpers""" def test_correlation_identifiers(self): # ensures the right correlation identifiers are # returned when a Trace is active diff --git a/tests/test_hook.py b/tests/test_hook.py index e9da85b6..91196187 100644 --- a/tests/test_hook.py +++ b/tests/test_hook.py @@ -1,7 +1,7 @@ import mock -from ddtrace.compat import reload_module -from ddtrace.utils.hook import ( +from oteltrace.compat import reload_module +from oteltrace.utils.hook import ( register_post_import_hook, deregister_post_import_hook, ) @@ -26,7 +26,7 @@ def test_register_post_import_hook_after_import(self): appropriate log debug message. """ test_hook = mock.MagicMock() - with mock.patch('ddtrace.utils.hook.log') as log_mock: + with mock.patch('oteltrace.utils.hook.log') as log_mock: import tests.utils.test_module # noqa register_post_import_hook('tests.utils.test_module', test_hook) test_hook.assert_called_once() @@ -64,9 +64,9 @@ def test_register_post_import_hook_different_modules(self): test_hook = mock.MagicMock() test_hook_redis = mock.MagicMock() register_post_import_hook('tests.utils.test_module', test_hook) - register_post_import_hook('ddtrace.contrib.redis', test_hook_redis) + register_post_import_hook('oteltrace.contrib.redis', test_hook_redis) import tests.utils.test_module # noqa - import ddtrace.contrib.redis # noqa + import oteltrace.contrib.redis # noqa test_hook.assert_called_once() test_hook_redis.assert_called_once() @@ -75,7 +75,7 @@ def test_register_post_import_hook_duplicate_register(self): Test that a function can be registered as a hook twice. """ test_hook = mock.MagicMock() - with mock.patch('ddtrace.utils.hook.log') as log_mock: + with mock.patch('oteltrace.utils.hook.log') as log_mock: register_post_import_hook('tests.utils.test_module', test_hook) register_post_import_hook('tests.utils.test_module', test_hook) import tests.utils.test_module # noqa @@ -171,7 +171,7 @@ def test_hook(module): raise Exception('test_hook_failed') register_post_import_hook('tests.utils.test_module', test_hook) - with mock.patch('ddtrace.utils.hook.log') as log_mock: + with mock.patch('oteltrace.utils.hook.log') as log_mock: import tests.utils.test_module # noqa calls = [ mock.call('hook "{}" for module "tests.utils.test_module" failed: test_hook_failed'.format(test_hook)) diff --git a/tests/test_instance_config.py b/tests/test_instance_config.py index 871906f1..ad94cfa9 100644 --- a/tests/test_instance_config.py +++ b/tests/test_instance_config.py @@ -1,8 +1,8 @@ from unittest import TestCase -from ddtrace import config -from ddtrace.pin import Pin -from ddtrace.settings import IntegrationConfig +from oteltrace import config +from oteltrace.pin import Pin +from oteltrace.settings import IntegrationConfig class InstanceConfigTestCase(TestCase): diff --git a/tests/test_integration.py b/tests/test_integration.py index 5fd0ad65..ea948b9b 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -1,77 +1,32 @@ -import os -import json -import time -import logging -import mock -import ddtrace +from unittest import TestCase -from unittest import TestCase, skip, skipUnless +from oteltrace.tracer import Tracer +from oteltrace.constants import FILTERS_KEY +from oteltrace.ext import http +from oteltrace.filters import FilterRequestsOnUrl +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from oteltrace.api_otel_exporter import APIOtel -from ddtrace.api import API, Response -from ddtrace.ext import http -from ddtrace.filters import FilterRequestsOnUrl -from ddtrace.constants import FILTERS_KEY -from ddtrace.tracer import Tracer -from ddtrace.encoding import JSONEncoder, MsgpackEncoder, get_encoder -from ddtrace.compat import httplib, PYTHON_INTERPRETER, PYTHON_VERSION -from ddtrace.internal.runtime.container import CGroupInfo -from ddtrace.vendor import msgpack -from tests.test_tracer import get_dummy_tracer - -class MockedLogHandler(logging.Handler): - """Record log messages to verify error logging logic""" - - def __init__(self, *args, **kwargs): - self.messages = {'debug': [], 'info': [], 'warning': [], 'error': [], 'critical': []} - super(MockedLogHandler, self).__init__(*args, **kwargs) - - def emit(self, record): - self.acquire() - try: - self.messages[record.levelname.lower()].append(record.getMessage()) - finally: - self.release() - - -class FlawedAPI(API): - """ - Deliberately report data with an incorrect method to trigger a 4xx response - """ - def _put(self, endpoint, data, count=0): - conn = httplib.HTTPConnection(self.hostname, self.port) - conn.request('HEAD', endpoint, data, self._headers) - return Response.from_http_response(conn.getresponse()) - - -@skipUnless( - os.environ.get('TEST_DATADOG_INTEGRATION', False), - 'You should have a running trace agent and set TEST_DATADOG_INTEGRATION=1 env variable' -) class TestWorkers(TestCase): """ Ensures that a workers interacts correctly with the main thread. These are part of integration tests so real calls are triggered. """ - def _decode(self, payload): - """ - Helper function that decodes data based on the given Encoder. - """ - if isinstance(self.api._encoder, JSONEncoder): - return json.loads(payload) - elif isinstance(self.api._encoder, MsgpackEncoder): - return msgpack.unpackb(payload, encoding='utf-8') def setUp(self): """ Create a tracer with running workers, while spying the ``_put()`` method to keep trace of triggered API calls. """ + self.exporter = InMemorySpanExporter() + api = APIOtel(exporter=self.exporter) + # create a new tracer self.tracer = Tracer() + self.tracer.configure(api=api) # spy the send() method self.api = self.tracer.writer.api - self.api._put = mock.Mock(self.api._put, wraps=self.api._put) def tearDown(self): """ @@ -86,51 +41,6 @@ def _wait_thread_flush(self): self.tracer.writer.stop() self.tracer.writer.join(None) - def _get_endpoint_payload(self, calls, endpoint): - """ - Helper to retrieve the endpoint call from a concurrent - trace or service call. - """ - for call, _ in calls: - if endpoint in call[0]: - return call[0], self._decode(call[1]) - - return None, None - - @skipUnless( - os.environ.get('TEST_DATADOG_INTEGRATION_UDS', False), - 'You should have a running trace agent on a socket and set TEST_DATADOG_INTEGRATION_UDS=1 env variable' - ) - def test_worker_single_trace_uds(self): - self.tracer.configure(uds_path='/tmp/ddagent/trace.sock') - # Write a first trace so we get a _worker - self.tracer.trace('client.testing').finish() - worker = self.tracer.writer - worker._log_error_status = mock.Mock( - worker._log_error_status, wraps=worker._log_error_status, - ) - self.tracer.trace('client.testing').finish() - - # one send is expected - self._wait_thread_flush() - # Check that no error was logged - assert worker._log_error_status.call_count == 0 - - def test_worker_single_trace_uds_wrong_socket_path(self): - self.tracer.configure(uds_path='/tmp/ddagent/nosockethere') - # Write a first trace so we get a _worker - self.tracer.trace('client.testing').finish() - worker = self.tracer.writer - worker._log_error_status = mock.Mock( - worker._log_error_status, wraps=worker._log_error_status, - ) - self.tracer.trace('client.testing').finish() - - # one send is expected - self._wait_thread_flush() - # Check that no error was logged - assert worker._log_error_status.call_count == 1 - def test_worker_single_trace(self): # create a trace block and send it using the transport system tracer = self.tracer @@ -138,76 +48,33 @@ def test_worker_single_trace(self): # one send is expected self._wait_thread_flush() - assert self.api._put.call_count == 1 - # check and retrieve the right call - endpoint, payload = self._get_endpoint_payload(self.api._put.call_args_list, '/v0.4/traces') - assert endpoint == '/v0.4/traces' - assert len(payload) == 1 - assert len(payload[0]) == 1 - assert payload[0][0]['name'] == 'client.testing' - # DEV: If we can make the writer flushing deterministic for the case of tests, then we can re-enable this - @skip('Writer flush intervals are impossible to time correctly to make this test not flaky') - def test_worker_multiple_traces(self): - # make a single send() if multiple traces are created before the flush interval - tracer = self.tracer - tracer.trace('client.testing').finish() - tracer.trace('client.testing').finish() + spans = self.exporter.get_finished_spans() - # one send is expected - self._wait_thread_flush() - assert self.api._put.call_count == 1 - # check and retrieve the right call - endpoint, payload = self._get_endpoint_payload(self.api._put.call_args_list, '/v0.4/traces') - assert endpoint == '/v0.4/traces' - assert len(payload) == 2 - assert len(payload[0]) == 1 - assert len(payload[1]) == 1 - assert payload[0][0]['name'] == 'client.testing' - assert payload[1][0]['name'] == 'client.testing' + self.assertEqual(len(spans), 1) + self.assertEqual(spans[0].name, 'client.testing') def test_worker_single_trace_multiple_spans(self): # make a single send() if a single trace with multiple spans is created before the flush tracer = self.tracer - parent = tracer.trace('client.testing') - tracer.trace('client.testing').finish() + parent = tracer.trace('client.testing.parent') + tracer.trace('client.testing.child').finish() parent.finish() # one send is expected self._wait_thread_flush() - assert self.api._put.call_count == 1 - # check and retrieve the right call - endpoint, payload = self._get_endpoint_payload(self.api._put.call_args_list, '/v0.4/traces') - assert endpoint == '/v0.4/traces' - assert len(payload) == 1 - assert len(payload[0]) == 2 - assert payload[0][0]['name'] == 'client.testing' - assert payload[0][1]['name'] == 'client.testing' - - def test_worker_http_error_logging(self): - # Tests the logging http error logic - tracer = self.tracer - self.tracer.writer.api = FlawedAPI(Tracer.DEFAULT_HOSTNAME, Tracer.DEFAULT_PORT) - tracer.trace('client.testing').finish() - - log = logging.getLogger('ddtrace.internal.writer') - log_handler = MockedLogHandler(level='DEBUG') - log.addHandler(log_handler) - self._wait_thread_flush() - assert tracer.writer._last_error_ts < time.time() + spans = self.exporter.get_finished_spans() - logged_errors = log_handler.messages['error'] - assert len(logged_errors) == 1 - assert 'Failed to send traces to Datadog Agent at http://localhost:8126: ' \ - 'HTTP error status 400, reason Bad Request, message Content-Type:' \ - in logged_errors[0] + self.assertEqual(len(spans), 2) + self.assertEqual(spans[0].name, 'client.testing.parent') + self.assertEqual(spans[1].name, 'client.testing.child') def test_worker_filter_request(self): - self.tracer.configure(settings={FILTERS_KEY: [FilterRequestsOnUrl(r'http://example\.com/health')]}) - # spy the send() method - self.api = self.tracer.writer.api - self.api._put = mock.Mock(self.api._put, wraps=self.api._put) + self.tracer.configure( + settings={FILTERS_KEY: [FilterRequestsOnUrl(r'http://example\.com/health')]}, + api=self.api, + ) span = self.tracer.trace('testing.filteredurl') span.set_tag(http.URL, 'http://example.com/health') @@ -217,296 +84,7 @@ def test_worker_filter_request(self): span.finish() self._wait_thread_flush() - # Only the second trace should have been sent - assert self.api._put.call_count == 1 - # check and retrieve the right call - endpoint, payload = self._get_endpoint_payload(self.api._put.call_args_list, '/v0.4/traces') - assert endpoint == '/v0.4/traces' - assert len(payload) == 1 - assert payload[0][0]['name'] == 'testing.nonfilteredurl' - - -@skipUnless( - os.environ.get('TEST_DATADOG_INTEGRATION', False), - 'You should have a running trace agent and set TEST_DATADOG_INTEGRATION=1 env variable' -) -class TestAPITransport(TestCase): - """ - Ensures that traces are properly sent to a local agent. These are part - of integration tests so real calls are triggered and you have to execute - a real trace-agent to let them pass. - """ - @mock.patch('ddtrace.internal.runtime.container.get_container_info') - def setUp(self, get_container_info): - """ - Create a tracer without workers, while spying the ``send()`` method - """ - # Mock the container id we use for making requests - get_container_info.return_value = CGroupInfo(container_id='test-container-id') - - # create a new API object to test the transport using synchronous calls - self.tracer = get_dummy_tracer() - self.api_json = API('localhost', 8126, encoder=JSONEncoder()) - self.api_msgpack = API('localhost', 8126, encoder=MsgpackEncoder()) - - @mock.patch('ddtrace.api.httplib.HTTPConnection') - def test_send_presampler_headers(self, mocked_http): - # register a single trace with a span and send them to the trace agent - self.tracer.trace('client.testing').finish() - trace = self.tracer.writer.pop() - traces = [trace] - - # make a call and retrieve the `conn` Mock object - self.api_msgpack.send_traces(traces) - request_call = mocked_http.return_value.request - assert request_call.call_count == 1 + spans = self.exporter.get_finished_spans() - # retrieve the headers from the mocked request call - expected_headers = { - 'Datadog-Container-Id': 'test-container-id', # mocked in setUp() - 'Datadog-Meta-Lang': 'python', - 'Datadog-Meta-Lang-Interpreter': PYTHON_INTERPRETER, - 'Datadog-Meta-Lang-Version': PYTHON_VERSION, - 'Datadog-Meta-Tracer-Version': ddtrace.__version__, - 'X-Datadog-Trace-Count': '1', - 'Content-Type': 'application/msgpack', - } - params, _ = request_call.call_args_list[0] - headers = params[3] - assert len(expected_headers) == len(headers) - for k, v in expected_headers.items(): - assert v == headers[k] - - @mock.patch('ddtrace.api.httplib.HTTPConnection') - def test_send_presampler_headers_not_in_services(self, mocked_http): - # register some services and send them to the trace agent - services = [{ - 'client.service': { - 'app': 'django', - 'app_type': 'web', - }, - }] - - # make a call and retrieve the `conn` Mock object - self.api_msgpack.send_services(services) - request_call = mocked_http.return_value.request - assert request_call.call_count == 0 - - def _send_traces_and_check(self, traces, nresponses=1): - # test JSON encoder - responses = self.api_json.send_traces(traces) - assert len(responses) == nresponses - for response in responses: - assert response.status == 200 - - # test Msgpack encoder - responses = self.api_msgpack.send_traces(traces) - assert len(responses) == nresponses - for response in responses: - assert response.status == 200 - - def test_send_single_trace(self): - # register a single trace with a span and send them to the trace agent - self.tracer.trace('client.testing').finish() - trace = self.tracer.writer.pop() - traces = [trace] - - self._send_traces_and_check(traces) - - def test_send_many_traces(self): - # register a single trace with a span and send them to the trace agent - self.tracer.trace('client.testing').finish() - trace = self.tracer.writer.pop() - # 30k is a right number to have both json and msgpack send 2 payload :) - traces = [trace] * 30000 - - self._send_traces_and_check(traces, 2) - - def test_send_single_with_wrong_errors(self): - # if the error field is set to True, it must be cast as int so - # that the agent decoder handles that properly without providing - # a decoding error - span = self.tracer.trace('client.testing') - span.error = True - span.finish() - trace = self.tracer.writer.pop() - traces = [trace] - - self._send_traces_and_check(traces) - - def test_send_multiple_traces(self): - # register some traces and send them to the trace agent - self.tracer.trace('client.testing').finish() - trace_1 = self.tracer.writer.pop() - self.tracer.trace('client.testing').finish() - trace_2 = self.tracer.writer.pop() - traces = [trace_1, trace_2] - - self._send_traces_and_check(traces) - - def test_send_single_trace_multiple_spans(self): - # register some traces and send them to the trace agent - with self.tracer.trace('client.testing'): - self.tracer.trace('client.testing').finish() - trace = self.tracer.writer.pop() - traces = [trace] - - self._send_traces_and_check(traces) - - def test_send_multiple_traces_multiple_spans(self): - # register some traces and send them to the trace agent - with self.tracer.trace('client.testing'): - self.tracer.trace('client.testing').finish() - trace_1 = self.tracer.writer.pop() - - with self.tracer.trace('client.testing'): - self.tracer.trace('client.testing').finish() - trace_2 = self.tracer.writer.pop() - - traces = [trace_1, trace_2] - - self._send_traces_and_check(traces) - - def test_send_single_service(self): - # register some services and send them to the trace agent - services = [{ - 'client.service': { - 'app': 'django', - 'app_type': 'web', - }, - }] - - # test JSON encoder - response = self.api_json.send_services(services) - assert response is None - - # test Msgpack encoder - response = self.api_msgpack.send_services(services) - assert response is None - - def test_send_service_called_multiple_times(self): - # register some services and send them to the trace agent - services = [{ - 'backend': { - 'app': 'django', - 'app_type': 'web', - }, - 'database': { - 'app': 'postgres', - 'app_type': 'db', - }, - }] - - # test JSON encoder - response = self.api_json.send_services(services) - assert response is None - - # test Msgpack encoder - response = self.api_msgpack.send_services(services) - assert response is None - - -@skipUnless( - os.environ.get('TEST_DATADOG_INTEGRATION', False), - 'You should have a running trace agent and set TEST_DATADOG_INTEGRATION=1 env variable' -) -class TestAPIDowngrade(TestCase): - """ - Ensures that if the tracing client found an earlier trace agent, - it will downgrade the current connection to a stable API version - """ - @skip('msgpack package split breaks this test; it works for newer version of msgpack') - def test_get_encoder_default(self): - # get_encoder should return MsgpackEncoder instance if - # msgpack and the CPP implementaiton are available - encoder = get_encoder() - assert isinstance(encoder, MsgpackEncoder) - - @mock.patch('ddtrace.encoding.MSGPACK_ENCODING', False) - def test_get_encoder_fallback(self): - # get_encoder should return JSONEncoder instance if - # msgpack or the CPP implementaiton, are not available - encoder = get_encoder() - assert isinstance(encoder, JSONEncoder) - - @skip('msgpack package split breaks this test; it works for newer version of msgpack') - def test_downgrade_api(self): - # make a call to a not existing endpoint, downgrades - # the current API to a stable one - tracer = get_dummy_tracer() - tracer.trace('client.testing').finish() - trace = tracer.writer.pop() - - # the encoder is right but we're targeting an API - # endpoint that is not available - api = API('localhost', 8126) - api._traces = '/v0.0/traces' - assert isinstance(api._encoder, MsgpackEncoder) - - # after the call, we downgrade to a working endpoint - response = api.send_traces([trace]) - assert response - assert response.status == 200 - assert isinstance(api._encoder, JSONEncoder) - - -@skipUnless( - os.environ.get('TEST_DATADOG_INTEGRATION', False), - 'You should have a running trace agent and set TEST_DATADOG_INTEGRATION=1 env variable' -) -class TestRateByService(TestCase): - """ - Check we get feedback from the agent and we're able to process it. - """ - def setUp(self): - """ - Create a tracer without workers, while spying the ``send()`` method - """ - # create a new API object to test the transport using synchronous calls - self.tracer = get_dummy_tracer() - self.api_json = API('localhost', 8126, encoder=JSONEncoder(), priority_sampling=True) - self.api_msgpack = API('localhost', 8126, encoder=MsgpackEncoder(), priority_sampling=True) - - def test_send_single_trace(self): - # register a single trace with a span and send them to the trace agent - self.tracer.trace('client.testing').finish() - trace = self.tracer.writer.pop() - traces = [trace] - - # [TODO:christian] when CI has an agent that is able to process the v0.4 - # endpoint, add a check to: - # - make sure the output is a valid JSON - # - make sure the priority sampler (if enabled) is updated - - # test JSON encoder - responses = self.api_json.send_traces(traces) - assert len(responses) == 1 - assert responses[0].status == 200 - assert responses[0].get_json() == dict(rate_by_service={'service:,env:': 1}) - - # test Msgpack encoder - responses = self.api_msgpack.send_traces(traces) - assert len(responses) == 1 - assert responses[0].status == 200 - assert responses[0].get_json() == dict(rate_by_service={'service:,env:': 1}) - - -@skipUnless( - os.environ.get('TEST_DATADOG_INTEGRATION', False), - 'You should have a running trace agent and set TEST_DATADOG_INTEGRATION=1 env variable' -) -class TestConfigure(TestCase): - """ - Ensures that when calling configure without specifying hostname and port, - previous overrides have been kept. - """ - def test_configure_keeps_api_hostname_and_port(self): - tracer = Tracer() # use real tracer with real api - assert 'localhost' == tracer.writer.api.hostname - assert 8126 == tracer.writer.api.port - tracer.configure(hostname='127.0.0.1', port=8127) - assert '127.0.0.1' == tracer.writer.api.hostname - assert 8127 == tracer.writer.api.port - tracer.configure(priority_sampling=True) - assert '127.0.0.1' == tracer.writer.api.hostname - assert 8127 == tracer.writer.api.port + self.assertEqual(len(spans), 1) + self.assertEqual(spans[0].name, 'testing.nonfilteredurl') diff --git a/tests/test_payload.py b/tests/test_payload.py deleted file mode 100644 index fc2cd25e..00000000 --- a/tests/test_payload.py +++ /dev/null @@ -1,111 +0,0 @@ -import math - -from ddtrace.encoding import get_encoder, JSONEncoder -from ddtrace.payload import Payload, PayloadFull -from ddtrace.span import Span - -from .base import BaseTracerTestCase - -import pytest - - -class PayloadTestCase(BaseTracerTestCase): - def test_init(self): - """ - When calling `Payload.init` - With an encoder - We use that encoder - With no encoder - We use the default encoder - """ - default_encoder_type = type(get_encoder()) - - payload = Payload() - self.assertIsInstance(payload.encoder, default_encoder_type) - - json_encoder = JSONEncoder() - payload = Payload(encoder=json_encoder) - self.assertEqual(payload.encoder, json_encoder) - - def test_add_trace(self): - """ - When calling `Payload.add_trace` - With a falsey value - Nothing is added to the payload - With a trace - We encode and add the trace to the payload - We increment the payload size by the expected amount - """ - payload = Payload() - - # Add falsey traces - for val in (False, None, 0, '', [], dict()): - payload.add_trace(val) - self.assertEqual(payload.length, 0) - self.assertTrue(payload.empty) - - # Add a single trace to the payload - trace = [Span(self.tracer, name='root.span'), Span(self.tracer, name='child.span')] - payload.add_trace(trace) - - self.assertEqual(payload.length, 1) - self.assertFalse(payload.empty) - - def test_get_payload(self): - """ - When calling `Payload.get_payload` - With no traces - We return the appropriate data - With traces - We return the appropriate data - """ - payload = Payload() - - # No traces - self.assertTrue(payload.empty) - encoded_data = payload.get_payload() - decoded_data = payload.encoder.decode(encoded_data) - self.assertEqual(decoded_data, []) - - # Add traces to the payload - for _ in range(5): - trace = [Span(self.tracer, name='root.span'), Span(self.tracer, name='child.span')] - payload.add_trace(trace) - - self.assertEqual(payload.length, 5) - self.assertFalse(payload.empty) - - # Assert the payload generated from Payload - encoded_data = payload.get_payload() - decoded_data = payload.encoder.decode(encoded_data) - self.assertEqual(len(decoded_data), 5) - for trace in decoded_data: - self.assertEqual(len(trace), 2) - self.assertEqual(trace[0][b'name'], b'root.span') - self.assertEqual(trace[1][b'name'], b'child.span') - - def test_full(self): - payload = Payload() - - # Empty - self.assertTrue(payload.empty) - - # Trace and it's size in bytes - trace = [Span(self.tracer, 'root.span'), Span(self.tracer, 'child.span')] - trace_size = len(payload.encoder.encode_trace(trace)) - - # Number of traces before we hit the max size limit and are considered full - num_traces = int(math.floor(payload.max_payload_size / trace_size)) - - # Add the traces - for _ in range(num_traces): - payload.add_trace(trace) - - # Just confirm - self.assertEqual(payload.length, num_traces) - - with pytest.raises(PayloadFull): - payload.add_trace(trace) - - # Just confirm again - self.assertEqual(payload.length, num_traces) diff --git a/tests/test_pin.py b/tests/test_pin.py index b8ccdc0e..f143b605 100644 --- a/tests/test_pin.py +++ b/tests/test_pin.py @@ -2,7 +2,7 @@ import pytest -from ddtrace import Pin +from oteltrace import Pin class PinTestCase(TestCase): diff --git a/tests/test_sampler.py b/tests/test_sampler.py index 53ea398a..7df69427 100644 --- a/tests/test_sampler.py +++ b/tests/test_sampler.py @@ -6,14 +6,14 @@ import pytest -from ddtrace.compat import iteritems -from ddtrace.constants import SAMPLING_PRIORITY_KEY, SAMPLE_RATE_METRIC_KEY -from ddtrace.constants import SAMPLING_AGENT_DECISION, SAMPLING_RULE_DECISION, SAMPLING_LIMIT_DECISION -from ddtrace.ext.priority import AUTO_KEEP, AUTO_REJECT -from ddtrace.internal.rate_limiter import RateLimiter -from ddtrace.sampler import DatadogSampler, SamplingRule -from ddtrace.sampler import RateSampler, AllSampler, RateByServiceSampler -from ddtrace.span import Span +from oteltrace.compat import iteritems +from oteltrace.constants import SAMPLING_PRIORITY_KEY, SAMPLE_RATE_METRIC_KEY +from oteltrace.constants import SAMPLING_AGENT_DECISION, SAMPLING_RULE_DECISION, SAMPLING_LIMIT_DECISION +from oteltrace.ext.priority import AUTO_KEEP, AUTO_REJECT +from oteltrace.internal.rate_limiter import RateLimiter +from oteltrace.sampler import OpenTelemetrySampler, SamplingRule +from oteltrace.sampler import RateSampler, AllSampler, RateByServiceSampler +from oteltrace.span import Span from .test_tracer import get_dummy_tracer @@ -381,7 +381,7 @@ def pattern(prop): rule = SamplingRule(sample_rate=1.0, name=pattern) span = create_span(name='test.span') - with mock.patch('ddtrace.sampler.log') as mock_log: + with mock.patch('oteltrace.sampler.log') as mock_log: assert rule.matches(span) is False mock_log.warning.assert_called_once_with( '%r pattern %r failed with %r: %s', @@ -432,39 +432,39 @@ def test_sampling_rule_sample_rate_0(): ) == 0 -def test_datadog_sampler_init(): +def test_opentelemetry_sampler_init(): # No args - sampler = DatadogSampler() + sampler = OpenTelemetrySampler() assert sampler.rules == [] assert isinstance(sampler.limiter, RateLimiter) - assert sampler.limiter.rate_limit == DatadogSampler.DEFAULT_RATE_LIMIT + assert sampler.limiter.rate_limit == OpenTelemetrySampler.DEFAULT_RATE_LIMIT # With rules rule = SamplingRule(sample_rate=1) - sampler = DatadogSampler(rules=[rule]) + sampler = OpenTelemetrySampler(rules=[rule]) assert sampler.rules == [rule] - assert sampler.limiter.rate_limit == DatadogSampler.DEFAULT_RATE_LIMIT + assert sampler.limiter.rate_limit == OpenTelemetrySampler.DEFAULT_RATE_LIMIT # With rate limit - sampler = DatadogSampler(rate_limit=10) + sampler = OpenTelemetrySampler(rate_limit=10) assert sampler.limiter.rate_limit == 10 # Invalid rules for val in (None, True, False, object(), 1, Exception()): with pytest.raises(TypeError): - DatadogSampler(rules=[val]) + OpenTelemetrySampler(rules=[val]) # Ensure rule order rule_1 = SamplingRule(sample_rate=1) rule_2 = SamplingRule(sample_rate=0.5, service='test') rule_3 = SamplingRule(sample_rate=0.25, name='flask.request') - sampler = DatadogSampler(rules=[rule_1, rule_2, rule_3]) + sampler = OpenTelemetrySampler(rules=[rule_1, rule_2, rule_3]) assert sampler.rules == [rule_1, rule_2, rule_3] -@mock.patch('ddtrace.internal.rate_limiter.RateLimiter.is_allowed') -def test_datadog_sampler_sample_no_rules(mock_is_allowed, dummy_tracer): - sampler = DatadogSampler() +@mock.patch('oteltrace.internal.rate_limiter.RateLimiter.is_allowed') +def test_opentelemetry_sampler_sample_no_rules(mock_is_allowed, dummy_tracer): + sampler = OpenTelemetrySampler() span = create_span(tracer=dummy_tracer) # Default SamplingRule(sample_rate=1.0) is applied @@ -494,8 +494,8 @@ def test_datadog_sampler_sample_no_rules(mock_is_allowed, dummy_tracer): mock_is_allowed.assert_called_once_with() -@mock.patch('ddtrace.internal.rate_limiter.RateLimiter.is_allowed') -def test_datadog_sampler_sample_rules(mock_is_allowed, dummy_tracer): +@mock.patch('oteltrace.internal.rate_limiter.RateLimiter.is_allowed') +def test_opentelemetry_sampler_sample_rules(mock_is_allowed, dummy_tracer): # Do not let the limiter get in the way of our test mock_is_allowed.return_value = True @@ -504,7 +504,7 @@ def test_datadog_sampler_sample_rules(mock_is_allowed, dummy_tracer): mock.Mock(spec=SamplingRule), mock.Mock(spec=SamplingRule), ] - sampler = DatadogSampler(rules=rules) + sampler = OpenTelemetrySampler(rules=rules) sampler.default_sampler = mock.Mock(spec=SamplingRule) sampler.default_sampler.return_value = True @@ -696,12 +696,12 @@ def reset(): sampler._priority_sampler = None -def test_datadog_sampler_tracer(dummy_tracer): +def test_opentelemetry_sampler_tracer(dummy_tracer): rule = SamplingRule(sample_rate=1.0, name='test.span') rule_spy = mock.Mock(spec=rule, wraps=rule) rule_spy.sample_rate = rule.sample_rate - sampler = DatadogSampler(rules=[rule_spy]) + sampler = OpenTelemetrySampler(rules=[rule_spy]) limiter_spy = mock.Mock(spec=sampler.limiter, wraps=sampler.limiter) sampler.limiter = limiter_spy sampler_spy = mock.Mock(spec=sampler, wraps=sampler) @@ -724,12 +724,12 @@ def test_datadog_sampler_tracer(dummy_tracer): assert_sampling_decision_tags(span, rule=1.0) -def test_datadog_sampler_tracer_rate_limited(dummy_tracer): +def test_opentelemetry_sampler_tracer_rate_limited(dummy_tracer): rule = SamplingRule(sample_rate=1.0, name='test.span') rule_spy = mock.Mock(spec=rule, wraps=rule) rule_spy.sample_rate = rule.sample_rate - sampler = DatadogSampler(rules=[rule_spy]) + sampler = OpenTelemetrySampler(rules=[rule_spy]) limiter_spy = mock.Mock(spec=sampler.limiter, wraps=sampler.limiter) limiter_spy.is_allowed.return_value = False # Have the limiter deny the span sampler.limiter = limiter_spy @@ -753,12 +753,12 @@ def test_datadog_sampler_tracer_rate_limited(dummy_tracer): assert_sampling_decision_tags(span, rule=1.0, limit=None) -def test_datadog_sampler_tracer_rate_0(dummy_tracer): +def test_opentelemetry_sampler_tracer_rate_0(dummy_tracer): rule = SamplingRule(sample_rate=0, name='test.span') # Sample rate of 0 means never sample rule_spy = mock.Mock(spec=rule, wraps=rule) rule_spy.sample_rate = rule.sample_rate - sampler = DatadogSampler(rules=[rule_spy]) + sampler = OpenTelemetrySampler(rules=[rule_spy]) limiter_spy = mock.Mock(spec=sampler.limiter, wraps=sampler.limiter) sampler.limiter = limiter_spy sampler_spy = mock.Mock(spec=sampler, wraps=sampler) @@ -781,12 +781,12 @@ def test_datadog_sampler_tracer_rate_0(dummy_tracer): assert_sampling_decision_tags(span, rule=0) -def test_datadog_sampler_tracer_child(dummy_tracer): +def test_opentelemetry_sampler_tracer_child(dummy_tracer): rule = SamplingRule(sample_rate=1.0) # No rules means it gets applied to every span rule_spy = mock.Mock(spec=rule, wraps=rule) rule_spy.sample_rate = rule.sample_rate - sampler = DatadogSampler(rules=[rule_spy]) + sampler = OpenTelemetrySampler(rules=[rule_spy]) limiter_spy = mock.Mock(spec=sampler.limiter, wraps=sampler.limiter) sampler.limiter = limiter_spy sampler_spy = mock.Mock(spec=sampler, wraps=sampler) @@ -815,12 +815,12 @@ def test_datadog_sampler_tracer_child(dummy_tracer): assert child._context.sampling_priority is AUTO_KEEP -def test_datadog_sampler_tracer_start_span(dummy_tracer): +def test_opentelemetry_sampler_tracer_start_span(dummy_tracer): rule = SamplingRule(sample_rate=1.0) # No rules means it gets applied to every span rule_spy = mock.Mock(spec=rule, wraps=rule) rule_spy.sample_rate = rule.sample_rate - sampler = DatadogSampler(rules=[rule_spy]) + sampler = OpenTelemetrySampler(rules=[rule_spy]) limiter_spy = mock.Mock(spec=sampler.limiter, wraps=sampler.limiter) sampler.limiter = limiter_spy sampler_spy = mock.Mock(spec=sampler, wraps=sampler) diff --git a/tests/test_span.py b/tests/test_span.py index f2fbbe1d..4e4ca41a 100644 --- a/tests/test_span.py +++ b/tests/test_span.py @@ -2,10 +2,10 @@ from unittest.case import SkipTest -from ddtrace.context import Context -from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.span import Span -from ddtrace.ext import errors, priority +from oteltrace.context import Context +from oteltrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from oteltrace.span import Span +from oteltrace.ext import errors, priority from .base import BaseTracerTestCase diff --git a/tests/test_tracer.py b/tests/test_tracer.py index fc8145ec..c2293a9c 100644 --- a/tests/test_tracer.py +++ b/tests/test_tracer.py @@ -7,14 +7,11 @@ from unittest.case import SkipTest -import mock - -import ddtrace -from ddtrace.ext import system -from ddtrace.context import Context +import oteltrace +from oteltrace.ext import system +from oteltrace.context import Context from .base import BaseTracerTestCase -from .util import override_global_tracer from .utils.tracer import DummyTracer from .utils.tracer import DummyWriter # noqa @@ -437,7 +434,7 @@ def test_adding_services(self): self.start_span('child', service='two', child_of=context) self.assertSetEqual(self.tracer._services, set(['one', 'two'])) - def test_configure_runtime_worker(self): + def _test_configure_runtime_worker(self): # by default runtime worker not started though runtime id is set self.assertIsNone(self.tracer._runtime_worker) @@ -456,7 +453,7 @@ def test_span_no_runtime_tags(self): self.assertIsNone(child.get_tag('language')) - def test_only_root_span_runtime(self): + def _test_only_root_span_runtime(self): self.tracer.configure(collect_metrics=True) root = self.start_span('root') @@ -469,36 +466,9 @@ def test_only_root_span_runtime(self): def test_installed_excepthook(): - ddtrace.install_excepthook() - assert sys.excepthook is ddtrace._excepthook - ddtrace.uninstall_excepthook() - assert sys.excepthook is not ddtrace._excepthook - ddtrace.install_excepthook() - assert sys.excepthook is ddtrace._excepthook - - -def test_excepthook(): - ddtrace.install_excepthook() - - class Foobar(Exception): - pass - - called = {} - - def original(type, value, traceback): - called['yes'] = True - - sys.excepthook = original - ddtrace.install_excepthook() - - e = Foobar() - - tracer = ddtrace.Tracer() - tracer._dogstatsd_client = mock.Mock() - with override_global_tracer(tracer): - sys.excepthook(e.__class__, e, None) - - tracer._dogstatsd_client.increment.assert_has_calls(( - mock.call('datadog.tracer.uncaught_exceptions', 1, tags=['class:Foobar']), - )) - assert called + oteltrace.install_excepthook() + assert sys.excepthook is oteltrace._excepthook + oteltrace.uninstall_excepthook() + assert sys.excepthook is not oteltrace._excepthook + oteltrace.install_excepthook() + assert sys.excepthook is oteltrace._excepthook diff --git a/tests/test_utils.py b/tests/test_utils.py index c0a36f51..daea6e60 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -2,9 +2,9 @@ import unittest import warnings -from ddtrace.utils.deprecation import deprecation, deprecated, format_message -from ddtrace.utils.formats import asbool, get_env, flatten_dict -from ddtrace.utils import sizeof +from oteltrace.utils.deprecation import deprecation, deprecated, format_message +from oteltrace.utils.formats import asbool, get_env, flatten_dict +from oteltrace.utils import sizeof class TestUtils(unittest.TestCase): @@ -30,7 +30,7 @@ def test_get_env(self): def test_get_env_found(self): # ensure `get_env` returns a value if the environment variable is set - os.environ['DD_REQUESTS_DISTRIBUTED_TRACING'] = '1' + os.environ['OTEL_REQUESTS_DISTRIBUTED_TRACING'] = '1' value = get_env('requests', 'distributed_tracing') self.assertEqual(value, '1') @@ -39,17 +39,17 @@ def test_get_env_found_legacy(self): # are used, raising a Deprecation warning with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') - os.environ['DATADOG_REQUESTS_DISTRIBUTED_TRACING'] = '1' + os.environ['OPENTELEMETRY_REQUESTS_DISTRIBUTED_TRACING'] = '1' value = get_env('requests', 'distributed_tracing') self.assertEqual(value, '1') self.assertEqual(len(w), 1) self.assertTrue(issubclass(w[-1].category, DeprecationWarning)) - self.assertTrue('Use `DD_` prefix instead' in str(w[-1].message)) + self.assertTrue('Use `OTEL_` prefix instead' in str(w[-1].message)) def test_get_env_key_priority(self): - # ensure `get_env` use `DD_` with highest priority - os.environ['DD_REQUESTS_DISTRIBUTED_TRACING'] = 'highest' - os.environ['DATADOG_REQUESTS_DISTRIBUTED_TRACING'] = 'lowest' + # ensure `get_env` use `OTEL_` with highest priority + os.environ['OTEL_REQUESTS_DISTRIBUTED_TRACING'] = 'highest' + os.environ['OPENTELEMETRY_REQUESTS_DISTRIBUTED_TRACING'] = 'lowest' value = get_env('requests', 'distributed_tracing') self.assertEqual(value, 'highest') @@ -126,7 +126,7 @@ class BrokenSlots(object): def test_sizeof_broken_slots(): - """https://github.com/DataDog/dd-trace-py/issues/1079""" + """https://github.com/opentelemetry/otel-trace-py/issues/1079""" assert sizeof.sizeof(BrokenSlots()) >= 1 diff --git a/tests/test_worker.py b/tests/test_worker.py index a08d2a2a..f8b2ac59 100644 --- a/tests/test_worker.py +++ b/tests/test_worker.py @@ -1,6 +1,6 @@ import pytest -from ddtrace import _worker +from oteltrace import _worker def test_start(): diff --git a/tests/unit/http/test_headers.py b/tests/unit/http/test_headers.py index d0e6b692..83396c64 100644 --- a/tests/unit/http/test_headers.py +++ b/tests/unit/http/test_headers.py @@ -1,8 +1,8 @@ import pytest -from ddtrace import tracer, Span -from ddtrace.http import store_request_headers, store_response_headers -from ddtrace.settings import Config, IntegrationConfig +from oteltrace import tracer, Span +from oteltrace.http import store_request_headers, store_response_headers +from oteltrace.settings import Config, IntegrationConfig class TestHeaders(object): diff --git a/tests/unit/test_settings.py b/tests/unit/test_settings.py index e2243227..bfe07836 100644 --- a/tests/unit/test_settings.py +++ b/tests/unit/test_settings.py @@ -1,40 +1,40 @@ -from ddtrace.settings import Config, HttpConfig, IntegrationConfig +from oteltrace.settings import Config, HttpConfig, IntegrationConfig from ..base import BaseTestCase class TestConfig(BaseTestCase): def test_environment_analytics_enabled(self): - with self.override_env(dict(DD_ANALYTICS_ENABLED='True')): + with self.override_env(dict(OTEL_ANALYTICS_ENABLED='True')): config = Config() self.assertTrue(config.analytics_enabled) - with self.override_env(dict(DD_ANALYTICS_ENABLED='False')): + with self.override_env(dict(OTEL_ANALYTICS_ENABLED='False')): config = Config() self.assertFalse(config.analytics_enabled) - with self.override_env(dict(DD_TRACE_ANALYTICS_ENABLED='True')): + with self.override_env(dict(OTEL_TRACE_ANALYTICS_ENABLED='True')): config = Config() self.assertTrue(config.analytics_enabled) - with self.override_env(dict(DD_TRACE_ANALYTICS_ENABLED='False')): + with self.override_env(dict(OTEL_TRACE_ANALYTICS_ENABLED='False')): config = Config() self.assertFalse(config.analytics_enabled) def test_environment_analytics_overrides(self): - with self.override_env(dict(DD_ANALYTICS_ENABLED='False', DD_TRACE_ANALYTICS_ENABLED='True')): + with self.override_env(dict(OTEL_ANALYTICS_ENABLED='False', OTEL_TRACE_ANALYTICS_ENABLED='True')): config = Config() self.assertTrue(config.analytics_enabled) - with self.override_env(dict(DD_ANALYTICS_ENABLED='False', DD_TRACE_ANALYTICS_ENABLED='False')): + with self.override_env(dict(OTEL_ANALYTICS_ENABLED='False', OTEL_TRACE_ANALYTICS_ENABLED='False')): config = Config() self.assertFalse(config.analytics_enabled) - with self.override_env(dict(DD_ANALYTICS_ENABLED='True', DD_TRACE_ANALYTICS_ENABLED='True')): + with self.override_env(dict(OTEL_ANALYTICS_ENABLED='True', OTEL_TRACE_ANALYTICS_ENABLED='True')): config = Config() self.assertTrue(config.analytics_enabled) - with self.override_env(dict(DD_ANALYTICS_ENABLED='True', DD_TRACE_ANALYTICS_ENABLED='False')): + with self.override_env(dict(OTEL_ANALYTICS_ENABLED='True', OTEL_TRACE_ANALYTICS_ENABLED='False')): config = Config() self.assertFalse(config.analytics_enabled) @@ -144,21 +144,21 @@ def test_environment_analytics_enabled(self): self.assertFalse(self.config.analytics_enabled) self.assertIsNone(self.config.foo.analytics_enabled) - with self.override_env(dict(DD_ANALYTICS_ENABLED='True')): + with self.override_env(dict(OTEL_ANALYTICS_ENABLED='True')): config = Config() self.assertTrue(config.analytics_enabled) self.assertIsNone(config.foo.analytics_enabled) - with self.override_env(dict(DD_FOO_ANALYTICS_ENABLED='True')): + with self.override_env(dict(OTEL_FOO_ANALYTICS_ENABLED='True')): config = Config() self.assertTrue(config.foo.analytics_enabled) self.assertEqual(config.foo.analytics_sample_rate, 1.0) - with self.override_env(dict(DD_FOO_ANALYTICS_ENABLED='False')): + with self.override_env(dict(OTEL_FOO_ANALYTICS_ENABLED='False')): config = Config() self.assertFalse(config.foo.analytics_enabled) - with self.override_env(dict(DD_FOO_ANALYTICS_ENABLED='True', DD_FOO_ANALYTICS_SAMPLE_RATE='0.5')): + with self.override_env(dict(OTEL_FOO_ANALYTICS_ENABLED='True', OTEL_FOO_ANALYTICS_SAMPLE_RATE='0.5')): config = Config() self.assertTrue(config.foo.analytics_enabled) self.assertEqual(config.foo.analytics_sample_rate, 0.5) @@ -171,7 +171,7 @@ def test_analytics_enabled_attribute(self): ic = IntegrationConfig(self.config, 'foo', analytics_enabled=False) self.assertFalse(ic.analytics_enabled) - with self.override_env(dict(DD_FOO_ANALYTICS_ENABLED='True')): + with self.override_env(dict(OTEL_FOO_ANALYTICS_ENABLED='True')): ic = IntegrationConfig(self.config, 'foo', analytics_enabled=False) self.assertFalse(ic.analytics_enabled) @@ -186,12 +186,12 @@ def test_get_analytics_sample_rate(self): ic = IntegrationConfig(self.config, 'foo', analytics_enabled=False) self.assertIsNone(ic.get_analytics_sample_rate()) - with self.override_env(dict(DD_ANALYTICS_ENABLED='True')): + with self.override_env(dict(OTEL_ANALYTICS_ENABLED='True')): config = Config() ic = IntegrationConfig(config, 'foo') self.assertEqual(ic.get_analytics_sample_rate(use_global_config=True), 1.0) - with self.override_env(dict(DD_ANALYTICS_ENABLED='False')): + with self.override_env(dict(OTEL_ANALYTICS_ENABLED='False')): config = Config() ic = IntegrationConfig(config, 'foo') self.assertIsNone(ic.get_analytics_sample_rate(use_global_config=True)) diff --git a/tests/unit/utils/test_http.py b/tests/unit/utils/test_http.py index 4a4409cb..9e75e0fb 100644 --- a/tests/unit/utils/test_http.py +++ b/tests/unit/utils/test_http.py @@ -1,4 +1,4 @@ -from ddtrace.utils.http import normalize_header_name +from oteltrace.utils.http import normalize_header_name class TestHeaderNameNormalization(object): diff --git a/tests/util.py b/tests/util.py index 6d1204a3..678c3f70 100644 --- a/tests/util.py +++ b/tests/util.py @@ -1,4 +1,4 @@ -import ddtrace +import oteltrace from contextlib import contextmanager @@ -10,11 +10,11 @@ def assert_dict_issuperset(a, b): @contextmanager def override_global_tracer(tracer): """Helper functions that overrides the global tracer available in the - `ddtrace` package. This is required because in some `httplib` tests we + `oteltrace` package. This is required because in some `httplib` tests we can't get easily the PIN object attached to the `HTTPConnection` to replace the used tracer with a dummy tracer. """ - original_tracer = ddtrace.tracer - ddtrace.tracer = tracer + original_tracer = oteltrace.tracer + oteltrace.tracer = tracer yield - ddtrace.tracer = original_tracer + oteltrace.tracer = original_tracer diff --git a/tests/utils/span.py b/tests/utils/span.py index af11fe1a..a9fc416d 100644 --- a/tests/utils/span.py +++ b/tests/utils/span.py @@ -1,11 +1,11 @@ -from ddtrace.span import Span +from oteltrace.span import Span NO_CHILDREN = object() class TestSpan(Span): """ - Test wrapper for a :class:`ddtrace.span.Span` that provides additional functions and assertions + Test wrapper for a :class:`oteltrace.span.Span` that provides additional functions and assertions Example:: @@ -22,8 +22,8 @@ def __init__(self, span): """ Constructor for TestSpan - :param span: The :class:`ddtrace.span.Span` to wrap - :type span: :class:`ddtrace.span.Span` + :param span: The :class:`oteltrace.span.Span` to wrap + :type span: :class:`oteltrace.span.Span` """ if isinstance(span, TestSpan): span = span._span @@ -33,7 +33,7 @@ def __init__(self, span): def __getattr__(self, key): """ - First look for property on the base :class:`ddtrace.span.Span` otherwise return this object's attribute + First look for property on the base :class:`oteltrace.span.Span` otherwise return this object's attribute """ if hasattr(self._span, key): return getattr(self._span, key) @@ -41,12 +41,12 @@ def __getattr__(self, key): return self.__getattribute__(key) def __setattr__(self, key, value): - """Pass through all assignment to the base :class:`ddtrace.span.Span`""" + """Pass through all assignment to the base :class:`oteltrace.span.Span`""" return setattr(self._span, key, value) def __eq__(self, other): """ - Custom equality code to ensure we are using the base :class:`ddtrace.span.Span.__eq__` + Custom equality code to ensure we are using the base :class:`oteltrace.span.Span.__eq__` :param other: The object to check equality with :type other: object @@ -172,7 +172,7 @@ def assert_metrics(self, metrics, exact=False): Example:: span = TestSpan(span) - span.assert_metrics({'_dd1.sr.eausr': 1}) + span.assert_metrics({'_otel1.sr.eausr': 1}) :param metrics: Property/Value pairs to evaluate on this span :type metrics: dict @@ -217,7 +217,7 @@ def _ensure_test_spans(self, spans): """ internal helper to ensure the list of spans are all :class:`tests.utils.span.TestSpan` - :param spans: List of :class:`ddtrace.span.Span` or :class:`tests.utils.span.TestSpan` + :param spans: List of :class:`oteltrace.span.Span` or :class:`tests.utils.span.TestSpan` :type spans: list :returns: A list og :class:`tests.utils.span.TestSpan` :rtype: list @@ -362,7 +362,7 @@ class TestSpanNode(TestSpan, TestSpanContainer): """ A :class:`tests.utils.span.TestSpan` which is used as part of a span tree. - Each :class:`tests.utils.span.TestSpanNode` represents the current :class:`ddtrace.span.Span` + Each :class:`tests.utils.span.TestSpanNode` represents the current :class:`oteltrace.span.Span` along with any children who have that span as it's parent. This class can be used to assert on the parent/child relationships between spans. diff --git a/tests/utils/tracer.py b/tests/utils/tracer.py index 180d700c..9f4d2bd8 100644 --- a/tests/utils/tracer.py +++ b/tests/utils/tracer.py @@ -1,6 +1,5 @@ -from ddtrace.encoding import JSONEncoder, MsgpackEncoder -from ddtrace.internal.writer import AgentWriter -from ddtrace.tracer import Tracer +from oteltrace.internal.writer import AgentWriter +from oteltrace.tracer import Tracer class DummyWriter(AgentWriter): @@ -14,8 +13,6 @@ def __init__(self, *args, **kwargs): self.spans = [] self.traces = [] self.services = {} - self.json_encoder = JSONEncoder() - self.msgpack_encoder = MsgpackEncoder() def write(self, spans=None, services=None): if spans: @@ -23,14 +20,10 @@ def write(self, spans=None, services=None): # put spans in a list like we do in the real execution path # with both encoders trace = [spans] - self.json_encoder.encode_traces(trace) - self.msgpack_encoder.encode_traces(trace) self.spans += spans self.traces += trace if services: - self.json_encoder.encode_services(services) - self.msgpack_encoder.encode_services(services) self.services.update(services) def pop(self): @@ -64,12 +57,13 @@ def __init__(self): self._update_writer() def _update_writer(self): - self.writer = DummyWriter( - hostname=self.writer.api.hostname, - port=self.writer.api.port, - filters=self.writer._filters, - priority_sampler=self.writer._priority_sampler, - ) + if hasattr(self, 'writer'): + self.writer = DummyWriter( + filters=self.writer._filters, + priority_sampler=self.writer._priority_sampler, + ) + else: + self.writer = DummyWriter() def configure(self, *args, **kwargs): super(DummyTracer, self).configure(*args, **kwargs) diff --git a/tox.ini b/tox.ini index ef86b418..fbfa7525 100644 --- a/tox.ini +++ b/tox.ini @@ -19,12 +19,11 @@ envlist = flake8 wait - {py27,py34,py35,py36,py37}-tracer - {py27,py34,py35,py36,py37}-internal - {py27,py34,py35,py36,py37}-integration - {py27,py34,py35,py36,py37}-ddtracerun - {py27,py34,py35,py36,py37}-test_utils - {py27,py34,py35,py36,py37}-test_logging + {py34,py35,py36,py37}-tracer + {py34,py35,py36,py37}-internal + {py34,py35,py36,py37}-oteltracerun + {py34,py35,py36,py37}-test_utils + {py34,py35,py36,py37}-test_logging # Integrations environments aiobotocore_contrib-py34-aiobotocore{02,03,04} aiobotocore_contrib-{py35,py36}-aiobotocore{02,03,04,05,07,08,09,010} @@ -36,68 +35,62 @@ envlist = aiohttp_contrib-{py35,py36,py37}-aiohttp{30,31,32,33,34,35}-aiohttp_jinja{015}-yarl10 aiopg_contrib-{py34,py35,py36}-aiopg{012,015} aiopg_contrib-py37-aiopg015 - algoliasearch_contrib-{py27,py34,py35,py36,py37}-algoliasearch{1,2} + algoliasearch_contrib-{py34,py35,py36,py37}-algoliasearch{1,2} asyncio_contrib-{py34,py35,py36,py37} # boto needs moto<1 and moto<1 does not support Python >= 3.7 - boto_contrib-{py27,py34,py35,py36}-boto - botocore_contrib-{py27,py34,py35,py36,py37}-botocore - bottle_contrib{,_autopatch}-{py27,py34,py35,py36,py37}-bottle{11,12}-webtest - cassandra_contrib-{py27,py34,py35,py36,py37}-cassandra{35,36,37,38,315} + boto_contrib-{py34,py35,py36}-boto + botocore_contrib-{py34,py35,py36,py37}-botocore + bottle_contrib{,_autopatch}-{py34,py35,py36,py37}-bottle{11,12}-webtest + cassandra_contrib-{py34,py35,py36,py37}-cassandra{35,36,37,38,315} # Non-4.x celery should be able to use the older redis lib, since it locks to an older kombu - celery_contrib-{py27,py34,py35,py36}-celery{31}-redis{210} + celery_contrib-{py34,py35,py36}-celery{31}-redis{210} # 4.x celery bumps kombu to 4.4+, which requires redis 3.2 or later, this tests against # older redis with an older kombu, and newer kombu/newer redis. # https://github.com/celery/kombu/blob/3e60e6503a77b9b1a987cf7954659929abac9bac/Changelog#L35 - celery_contrib-{py27,py34,py35,py36}-celery{40,41}-{redis210-kombu43,redis320-kombu44} + celery_contrib-{py34,py35,py36}-celery{40,41}-{redis210-kombu43,redis320-kombu44} # Celery 4.2 is now limited to Kombu 4.3 # https://github.com/celery/celery/commit/1571d414461f01ae55be63a03e2adaa94dbcb15d - celery_contrib-{py27,py34,py35,py36}-celery42-redis210-kombu43 + celery_contrib-{py34,py35,py36}-celery42-redis210-kombu43 # Celery 4.3 wants Kombu >= 4.4 and Redis >= 3.2 # Python 3.7 needs Celery 4.3 - celery_contrib-{py27,py34,py35,py36,py37}-celery43-redis320-kombu44 - consul_contrib-py{27,34,35,36,37}-consul{07,10,11} - dbapi_contrib-{py27,py34,py35,py36} - django_contrib{,_autopatch}-{py27,py34,py35,py36}-django{18,111}-djangopylibmc06-djangoredis45-pylibmc-redis{210}-memcached + celery_contrib-{py34,py35,py36,py37}-celery43-redis320-kombu44 + consul_contrib-py{34,35,36,37}-consul{07,10,11} + dbapi_contrib-{py34,py35,py36} + django_contrib{,_autopatch}-{py34,py35,py36}-django{18,111}-djangopylibmc06-djangoredis45-pylibmc-redis{210}-memcached django_contrib{,_autopatch}-{py34,py35,py36}-django{200}-djangopylibmc06-djangoredis45-pylibmc-redis{210}-memcached - django_drf_contrib-{py27,py34,py35,py36}-django{111}-djangorestframework{34,37,38} + django_drf_contrib-{py34,py35,py36}-django{111}-djangorestframework{34,37,38} django_drf_contrib-{py34,py35,py36}-django{200}-djangorestframework{37,38} - elasticsearch_contrib-{py27,py34,py35,py36}-elasticsearch{16,17,18,23,24,51,52,53,54,63} - elasticsearch_contrib-{py27,py34,py35,py36}-elasticsearch1{100} - elasticsearch_contrib-{py27,py34,py35,py36}-elasticsearch2{50} - elasticsearch_contrib-{py27,py34,py35,py36}-elasticsearch5{50} - falcon_contrib{,_autopatch}-{py27,py34,py35,py36}-falcon{10,11,12,13,14} - flask_contrib{,_autopatch}-{py27,py34,py35,py36}-flask{010,011,012,10}-blinker + elasticsearch_contrib-{py34,py35,py36}-elasticsearch{16,17,18,23,24,51,52,53,54,63} + elasticsearch_contrib-{py34,py35,py36}-elasticsearch1{100} + elasticsearch_contrib-{py34,py35,py36}-elasticsearch2{50} + elasticsearch_contrib-{py34,py35,py36}-elasticsearch5{50} + falcon_contrib{,_autopatch}-{py34,py35,py36}-falcon{10,11,12,13,14} + flask_contrib{,_autopatch}-{py34,py35,py36}-flask{010,011,012,10}-blinker # Flask <=0.9 does not support Python 3 - flask_contrib{,_autopatch}-{py27}-flask{09}-blinker - flask_cache_contrib{,_autopatch}-{py27,py34,py35,py36,py37}-flask{010,011,012}-flaskcache{013}-memcached-redis{210}-blinker - flask_cache_contrib{,_autopatch}-{py27}-flask{010,011}-flaskcache{012}-memcached-redis{210}-blinker - futures_contrib-{py27}-futures{30,31,32} + flask_cache_contrib{,_autopatch}-{py34,py35,py36,py37}-flask{010,011,012}-flaskcache{013}-memcached-redis{210}-blinker futures_contrib-{py34,py35,py36,py37} - gevent_contrib-{py27,py34,py35,py36}-gevent{11,12,13} + gevent_contrib-{py34,py35,py36}-gevent{11,12,13} gevent_contrib-py37-gevent{13,14} # gevent 1.0 is not python 3 compatible - gevent_contrib-{py27}-gevent{10} - grpc_contrib-{py27,py34,py35,py36,py37}-grpc{112,113,114,115,116,117,118,119,120,121,122} - httplib_contrib-{py27,py34,py35,py36,py37} - jinja2_contrib-{py27,py34,py35,py36,py37}-jinja{27,28,29,210} - mako_contrib-{py27,py34,py35,py36,py37}-mako{010,100} + grpc_contrib-{py34,py35,py36,py37}-grpc{112,113,114,115,116,117,118,119,120,121,122} + httplib_contrib-{py34,py35,py36,py37} + jinja2_contrib-{py34,py35,py36,py37}-jinja{27,28,29,210} + mako_contrib-{py34,py35,py36,py37}-mako{010,100} molten_contrib-py{36,37}-molten{070,072} - mongoengine_contrib-{py27,py34,py35,py36,py37}-mongoengine{015,016,017,018,latest}-pymongo{latest} - mysql_contrib-{py27,py34,py35,py36,py37}-mysqlconnector - mysqldb_contrib-{py27}-mysqldb{12} - mysqldb_contrib-{py27,py34,py35,py36,py37}-mysqlclient{13} - psycopg_contrib-{py27,py34,py35,py36}-psycopg2{24,25,26,27,28} + mongoengine_contrib-{py34,py35,py36,py37}-mongoengine{015,016,017,018,latest}-pymongo{latest} + mysql_contrib-{py34,py35,py36,py37}-mysqlconnector + mysqldb_contrib-{py34,py35,py36,py37}-mysqlclient{13} + psycopg_contrib-{py34,py35,py36}-psycopg2{24,25,26,27,28} psycopg_contrib-py37-psycopg2{27,28} - pylibmc_contrib-{py27,py34,py35,py36,py37}-pylibmc{140,150} - pylons_contrib-{py27}-pylons{096,097,010,10} - pymemcache_contrib{,_autopatch}-{py27,py34,py35,py36,py37}-pymemcache{130,140} - pymongo_contrib-{py27,py34,py35,py36,py37}-pymongo{30,31,32,33,34,35,36,37,38,39,latest}-mongoengine{latest} - pymysql_contrib-{py27,py34,py35,py36,py37}-pymysql{07,08,09} - pyramid_contrib{,_autopatch}-{py27,py34,py35,py36,py37}-pyramid{17,18,19}-webtest - redis_contrib-{py27,py34,py35,py36,py37}-redis{26,27,28,29,210,300} - rediscluster_contrib-{py27,py34,py35,py36,py37}-rediscluster{135,136}-redis210 - requests_contrib{,_autopatch}-{py27,py34,py35,py36,py37}-requests{208,209,210,211,212,213,219} - kombu_contrib-{py27,py34,py35,py36}-kombu{40,41,42} + pylibmc_contrib-{py34,py35,py36,py37}-pylibmc{140,150} + pymemcache_contrib{,_autopatch}-{py34,py35,py36,py37}-pymemcache{130,140} + pymongo_contrib-{py34,py35,py36,py37}-pymongo{30,31,32,33,34,35,36,37,38,39,latest}-mongoengine{latest} + pymysql_contrib-{py34,py35,py36,py37}-pymysql{07,08,09} + pyramid_contrib{,_autopatch}-{py34,py35,py36,py37}-pyramid{17,18,19}-webtest + redis_contrib-{py34,py35,py36,py37}-redis{26,27,28,29,210,300} + rediscluster_contrib-{py34,py35,py36,py37}-rediscluster{135,136}-redis210 + requests_contrib{,_autopatch}-{py34,py35,py36,py37}-requests{208,209,210,211,212,213,219} + kombu_contrib-{py34,py35,py36}-kombu{40,41,42} # Python 3.7 needs Kombu >= 4.2 kombu_contrib-py37-kombu42 # python 3.6 requests + gevent regression test @@ -105,44 +98,35 @@ envlist = # https://github.com/gevent/gevent/issues/903 requests_gevent_contrib-{py36}-requests{208,209,210,211,212,213,219}-gevent{12,13} requests_gevent_contrib-py37-requests{208,209,210,211,212,213,219}-gevent13 - sqlalchemy_contrib-{py27,py34,py35,py36,py37}-sqlalchemy{10,11,12}-psycopg228-mysqlconnector - sqlite3_contrib-{py27,py34,py35,py36,py37}-sqlite3 - tornado_contrib-{py27,py34,py35,py36,py37}-tornado{40,41,42,43,44,45} + sqlalchemy_contrib-{py34,py35,py36,py37}-sqlalchemy{10,11,12}-psycopg228-mysqlconnector + sqlite3_contrib-{py34,py35,py36,py37}-sqlite3 + tornado_contrib-{py34,py35,py36,py37}-tornado{40,41,42,43,44,45} tornado_contrib-{py37}-tornado{50,51,60} - tornado_contrib-{py27}-tornado{40,41,42,43,44,45}-futures{30,31,32} - vertica_contrib-{py27,py34,py35,py36,py37}-vertica{060,070} -# Opentracer - {py27,py34,py35,py36,py37}-opentracer - {py34,py35,py36,py37}-opentracer_asyncio - {py34,py35,py36,py37}-opentracer_tornado-tornado{40,41,42,43,44} - {py27}-opentracer_gevent-gevent{10} - {py27,py34,py35,py36}-opentracer_gevent-gevent{11,12} - py37-opentracer_gevent-gevent{13,14} + vertica_contrib-{py34,py35,py36,py37}-vertica{060,070} # Unit tests: pytest based test suite that do not require any additional dependency - unit_tests-{py27,py34,py35,py36,py37} - benchmarks-{py27,py34,py35,py36,py37} + unit_tests-{py34,py35,py36,py37} + benchmarks-{py34,py35,py36,py37} [testenv] usedevelop = True basepython = - py27: python2.7 py34: python3.4 py35: python3.5 py36: python3.6 py37: python3.7 deps = -# Avoid installing wrapt and msgpack-python, our only packages declared, dependencies, when we are testing the real +# Avoid installing wrapt, our only packages declared, dependencies, when we are testing the real # distribution build. - !ddtracerun: wrapt - !msgpack03-!msgpack04-!msgpack05-!ddtracerun: msgpack-python + !oteltracerun: wrapt pdbpp pytest>=3 pytest-benchmark pytest-cov pytest-django - opentracing psutil + opentelemetry-api + opentelemetry-sdk # test dependencies installed in all envs mock # force the downgrade as a workaround @@ -200,7 +184,7 @@ deps = consul07: python-consul>=0.7,<1.0 consul10: python-consul>=1.0,<1.1 consul11: python-consul>=1.1,<1.2 - ddtracerun: redis + oteltracerun: redis django18: django>=1.8,<1.9 django111: django>=1.11,<1.12 django200: django>=2.0,<2.1 @@ -285,14 +269,6 @@ deps = mysqlconnector: mysql-connector-python mysqldb12: mysql-python>=1.2,<1.3 mysqlclient13: mysqlclient>=1.3,<1.4 -# webob is required for Pylons < 1.0 - pylons096: pylons>=0.9.6,<0.9.7 - pylons096: webob<1.1 - pylons097: pylons>=0.9.7,<0.9.8 - pylons097: webob<1.1 - pylons010: pylons>=0.10,<0.11 - pylons010: webob<1.1 - pylons10: pylons>=1.0,<1.1 pylibmc: pylibmc pylibmc140: pylibmc>=1.4.0,<1.5.0 pylibmc150: pylibmc>=1.5.0,<1.6.0 @@ -364,16 +340,9 @@ passenv=TEST_* commands = # run only essential tests related to the tracing client - tracer: pytest {posargs} --ignore="tests/contrib" --ignore="tests/test_integration.py" --ignore="tests/commands" --ignore="tests/opentracer" --ignore="tests/unit" --ignore="tests/internal" tests -# run only the `ddtrace.internal` tests + tracer: pytest {posargs} --ignore="tests/contrib" --ignore="tests/commands" --ignore="tests/unit" --ignore="tests/internal" tests +# run only the `oteltrace.internal` tests internal: pytest {posargs} tests/internal -# run only the opentrace tests - opentracer: pytest {posargs} tests/opentracer/test_tracer.py tests/opentracer/test_span.py tests/opentracer/test_span_context.py tests/opentracer/test_dd_compatibility.py tests/opentracer/test_utils.py - opentracer_asyncio: pytest {posargs} tests/opentracer/test_tracer_asyncio.py - opentracer_tornado-tornado{40,41,42,43,44}: pytest {posargs} tests/opentracer/test_tracer_tornado.py - opentracer_gevent: pytest {posargs} tests/opentracer/test_tracer_gevent.py -# integration tests - integration: pytest {posargs} tests/test_integration.py # Contribs aiobotocore_contrib-{py34,py35,py36,py37}: pytest {posargs} tests/contrib/aiobotocore aiopg_contrib-{py34,py35,py36,py37}: pytest {posargs} tests/contrib/aiopg @@ -383,19 +352,19 @@ commands = boto_contrib: pytest {posargs} tests/contrib/boto botocore_contrib: pytest {posargs} tests/contrib/botocore bottle_contrib: pytest {posargs} --ignore="tests/contrib/bottle/test_autopatch.py" tests/contrib/bottle/ - bottle_contrib_autopatch: python tests/ddtrace_run.py pytest {posargs} tests/contrib/bottle/test_autopatch.py + bottle_contrib_autopatch: python tests/oteltrace_run.py pytest {posargs} tests/contrib/bottle/test_autopatch.py cassandra_contrib: pytest {posargs} tests/contrib/cassandra celery_contrib: pytest {posargs} tests/contrib/celery consul_contrib: pytest {posargs} tests/contrib/consul dbapi_contrib: pytest {posargs} tests/contrib/dbapi django_contrib: pytest {posargs} tests/contrib/django - django_contrib_autopatch: python tests/ddtrace_run.py pytest {posargs} tests/contrib/django + django_contrib_autopatch: python tests/oteltrace_run.py pytest {posargs} tests/contrib/django django_drf_contrib: pytest {posargs} tests/contrib/djangorestframework elasticsearch_contrib: pytest {posargs} tests/contrib/elasticsearch falcon_contrib: pytest {posargs} tests/contrib/falcon/test_middleware.py tests/contrib/falcon/test_distributed_tracing.py - falcon_contrib_autopatch: python tests/ddtrace_run.py pytest {posargs} tests/contrib/falcon/test_autopatch.py + falcon_contrib_autopatch: python tests/oteltrace_run.py pytest {posargs} tests/contrib/falcon/test_autopatch.py flask_contrib: pytest {posargs} tests/contrib/flask - flask_contrib_autopatch: python tests/ddtrace_run.py pytest {posargs} tests/contrib/flask_autopatch + flask_contrib_autopatch: python tests/oteltrace_run.py pytest {posargs} tests/contrib/flask_autopatch flask_cache_contrib: pytest {posargs} tests/contrib/flask_cache futures_contrib: pytest {posargs} tests/contrib/futures gevent_contrib: pytest {posargs} tests/contrib/gevent @@ -409,13 +378,12 @@ commands = mysqldb_contrib: pytest {posargs} tests/contrib/mysqldb psycopg_contrib: pytest {posargs} tests/contrib/psycopg pylibmc_contrib: pytest {posargs} tests/contrib/pylibmc - pylons_contrib: pytest {posargs} tests/contrib/pylons pymemcache_contrib: pytest {posargs} --ignore="tests/contrib/pymemcache/autopatch" tests/contrib/pymemcache/ - pymemcache_contrib_autopatch: python tests/ddtrace_run.py pytest {posargs} tests/contrib/pymemcache/autopatch/ + pymemcache_contrib_autopatch: python tests/oteltrace_run.py pytest {posargs} tests/contrib/pymemcache/autopatch/ pymongo_contrib: pytest {posargs} tests/contrib/pymongo pymysql_contrib: pytest {posargs} tests/contrib/pymysql pyramid_contrib: pytest {posargs} tests/contrib/pyramid/test_pyramid.py - pyramid_contrib_autopatch: python tests/ddtrace_run.py pytest {posargs} tests/contrib/pyramid/test_pyramid_autopatch.py + pyramid_contrib_autopatch: python tests/oteltrace_run.py pytest {posargs} tests/contrib/pyramid/test_pyramid_autopatch.py redis_contrib: pytest {posargs} tests/contrib/redis rediscluster_contrib: pytest {posargs} tests/contrib/rediscluster requests_contrib: pytest {posargs} tests/contrib/requests @@ -426,7 +394,7 @@ commands = tornado_contrib: pytest {posargs} tests/contrib/tornado vertica_contrib: pytest {posargs} tests/contrib/vertica/ # run subsets of the tests for particular library versions - ddtracerun: pytest {posargs} tests/commands/test_runner.py + oteltracerun: pytest {posargs} tests/commands/test_runner.py test_utils: pytest {posargs} tests/contrib/test_utils.py test_logging: pytest {posargs} tests/contrib/logging/ # Unit tests: pytest based test suite that do not require any additional dependency. @@ -435,7 +403,7 @@ commands = [testenv:wait] commands=python tests/wait-for-services.py {posargs} -basepython=python +basepython=python3 deps= cassandra-driver psycopg2 @@ -458,22 +426,7 @@ inline-quotes = ' [falcon_autopatch] setenv = - DATADOG_SERVICE_NAME=my-falcon -[testenv:falcon_contrib_autopatch-py27-falcon10] -setenv = - {[falcon_autopatch]setenv} -[testenv:falcon_contrib_autopatch-py27-falcon11] -setenv = - {[falcon_autopatch]setenv} -[testenv:falcon_contrib_autopatch-py27-falcon12] -setenv = - {[falcon_autopatch]setenv} -[testenv:falcon_contrib_autopatch-py27-falcon13] -setenv = - {[falcon_autopatch]setenv} -[testenv:falcon_contrib_autopatch-py27-falcon14] -setenv = - {[falcon_autopatch]setenv} + OPENTELEMETRY_SERVICE_NAME=my-falcon [testenv:falcon_contrib_autopatch-py34-falcon10] setenv = {[falcon_autopatch]setenv} @@ -538,18 +491,10 @@ setenv = [pyramid_autopatch] setenv = - DATADOG_SERVICE_NAME = foobar - DATADOG_PYRAMID_DISTRIBUTED_TRACING = True -[testenv:pyramid_contrib_autopatch-py27-pyramid17-webtest] -setenv = - {[pyramid_autopatch]setenv} + OPENTELEMETRY_SERVICE_NAME = foobar + OPENTELEMETRY_PYRAMID_DISTRIBUTED_TRACING = True + OTEL_TRACER_PROPAGATOR = datadog -[testenv:pyramid_contrib_autopatch-py27-pyramid18-webtest] -setenv = - {[pyramid_autopatch]setenv} -[testenv:pyramid_contrib_autopatch-py27-pyramid19-webtest] -setenv = - {[pyramid_autopatch]setenv} [testenv:pyramid_contrib_autopatch-py34-pyramid17-webtest] setenv = {[pyramid_autopatch]setenv} @@ -590,20 +535,8 @@ setenv = [flask_autopatch] setenv = - DATADOG_SERVICE_NAME = test.flask.service - DATADOG_PATCH_MODULES = jinja2:false -[testenv:flask_contrib_autopatch-py27-flask010-blinker] -setenv = - {[flask_autopatch]setenv} -[testenv:flask_contrib_autopatch-py27-flask011-blinker] -setenv = - {[flask_autopatch]setenv} -[testenv:flask_contrib_autopatch-py27-flask012-blinker] -setenv = - {[flask_autopatch]setenv} -[testenv:flask_contrib_autopatch-py27-flask10-blinker] -setenv = - {[flask_autopatch]setenv} + OPENTELEMETRY_SERVICE_NAME = test.flask.service + OPENTELEMETRY_PATCH_MODULES = jinja2:false [testenv:flask_contrib_autopatch-py34-flask010-blinker] setenv = {[flask_autopatch]setenv} @@ -650,15 +583,6 @@ setenv = setenv = {[flask_autopatch]setenv} [testenv:flask_contrib_autopatch-py37-flask10-blinker] -setenv = - {[flask_autopatch]setenv} -[testenv:flask_contrib_autopatch-py27-flask010-flaskcache013-memcached-redis210-blinker] -setenv = - {[flask_autopatch]setenv} -[testenv:flask_contrib_autopatch-py27-flask011-flaskcache013-memcached-redis210-blinker] -setenv = - {[flask_autopatch]setenv} -[testenv:flask_contrib_autopatch-py27-flask012-flaskcache013-memcached-redis210-blinker] setenv = {[flask_autopatch]setenv} [testenv:flask_contrib_autopatch-py34-flask010-flaskcache013-memcached-redis210-blinker] @@ -697,20 +621,11 @@ setenv = [testenv:flask_contrib_autopatch-py37-flask012-flaskcache013-memcached-redis210-blinker] setenv = {[flask_autopatch]setenv} -[testenv:flask_contrib_autopatch-py27-flask010-flaskcache012-memcached-redis210-blinker] -setenv = - {[flask_autopatch]setenv} -[testenv:flask_contrib_autopatch-py27-flask011-flaskcache012-memcached-redis210-blinker] -setenv = - {[flask_autopatch]setenv} [bottle_autopatch] setenv = - DATADOG_SERVICE_NAME = bottle-app -[testenv:bottle_contrib_autopatch-py27-bottle11-webtest] -setenv = - {[bottle_autopatch]setenv} + OPENTELEMETRY_SERVICE_NAME = bottle-app [testenv:bottle_contrib_autopatch-py34-bottle11-webtest] setenv = {[bottle_autopatch]setenv} @@ -721,9 +636,6 @@ setenv = setenv = {[bottle_autopatch]setenv} [testenv:bottle_contrib_autopatch-py37-bottle11-webtest] -setenv = - {[bottle_autopatch]setenv} -[testenv:bottle_contrib_autopatch-py27-bottle12-webtest] setenv = {[bottle_autopatch]setenv} [testenv:bottle_contrib_autopatch-py34-bottle12-webtest] @@ -740,6 +652,38 @@ setenv = {[bottle_autopatch]setenv} +[django_autopatch] +setenv = + OTEL_TRACER_PROPAGATOR = datadog +[testenv:django_contrib_autopatch-py34-django18-djangopylibmc06-djangoredis45-pylibmc-redis210-memcached] +setenv = + {[django_autopatch]setenv} +[testenv:django_contrib_autopatch-py34-django111-djangopylibmc06-djangoredis45-pylibmc-redis210-memcached] +setenv = + {[django_autopatch]setenv} +[testenv:django_contrib_autopatch-py35-django18-djangopylibmc06-djangoredis45-pylibmc-redis210-memcached] +setenv = + {[django_autopatch]setenv} +[testenv:django_contrib_autopatch-py35-django111-djangopylibmc06-djangoredis45-pylibmc-redis210-memcached] +setenv = + {[django_autopatch]setenv} +[testenv:django_contrib_autopatch-py36-django18-djangopylibmc06-djangoredis45-pylibmc-redis210-memcached] +setenv = + {[django_autopatch]setenv} +[testenv:django_contrib_autopatch-py36-django111-djangopylibmc06-djangoredis45-pylibmc-redis210-memcached] +setenv = + {[django_autopatch]setenv} +[testenv:django_contrib_autopatch-py34-django200-djangopylibmc06-djangoredis45-pylibmc-redis210-memcached] +setenv = + {[django_autopatch]setenv} +[testenv:django_contrib_autopatch-py35-django200-djangopylibmc06-djangoredis45-pylibmc-redis210-memcached] +setenv = + {[django_autopatch]setenv} +[testenv:django_contrib_autopatch-py36-django200-djangopylibmc06-djangoredis45-pylibmc-redis210-memcached] +setenv = + {[django_autopatch]setenv} + + # DEV: We use `conftest.py` as a local pytest plugin to configure hooks for collection [pytest] # Common directories to ignore @@ -754,4 +698,4 @@ exclude= .git,__pycache__, .eggs,*.egg, # We shouldn't lint our vendored dependencies - ddtrace/vendor/ + oteltrace/vendor/