diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 8cb43804d..fa15cb546 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -14,3 +14,4 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 + diff --git a/UPGRADING.md b/UPGRADING.md index af7461dda..e882a497b 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -1,3 +1,131 @@ +# 3.0.0 Migration Guide + +The v3.0.0 release of `google-cloud-logging` improves usability of the library, +particularly on serverless environments. + +If you experience technical issues or have questions, please file an [issue](https://github.com/googleapis/python-logging/issues). + +## Primary Changes + +### Handler deprecations ([#310](https://github.com/googleapis/python-logging/pull/310)) + +> **WARNING**: Breaking change + +We have changed our design policy to support more generic `Handler` classes instead of product-specific classes: + +- [`CloudLoggingHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/handlers.py) + - Sends logs over the network (using gRPC or HTTP API calls) + - Replaces `AppEngineHandler` +- [`StructuredLogHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/structured_log.py) + - Exports logs in JSON format through standard out, to be parsed by an agent + - Replaces `ContainerEngineHandler` + +As of v3.0.0, [`AppEngineHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/app_engine.py) +and [`ContainerEngineHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/container_engine.py) +are deprecated and won't be updated. These handlers might be removed from the library in a future update. + +### Full JSON log support in standard library integration ([#316](https://github.com/googleapis/python-logging/pull/316), [#339](https://github.com/googleapis/python-logging/pull/339), [#447](https://github.com/googleapis/python-logging/pull/447)) + +You can now log JSON data using the Python `logging` standard library integration. +To log JSON data, do one of the following: + +1. Use `json_fields` `extra` argument: + +```py +import logging + +data_dict = {"hello": "world"} +logging.info("message field", extra={"json_fields": data_dict}) +``` + +2. Log a JSON-parsable string: + +```py +import logging +import json + +data_dict = {"hello": "world"} +logging.info(json.dumps(data_dict)) +``` + +### Metadata autodetection ([#315](https://github.com/googleapis/python-logging/pull/315)) + +> **WARNING**: Breaking change + +Logs emitted by the library must be associated with a [montored-resource type](https://cloud.google.com/monitoring/api/resources) +that indicates the compute environment the log originated from. +- Prior to 3.0.0, when a log doesn't specify a monitored resource, that field is set to ["global"](https://cloud.google.com/monitoring/api/resources#tag_global). +- With 3.0.0, when a log doesn't specify a monitored resource, the library attempts to identify the resource. If a resource can't be detected, the field will still default to ["global"](https://cloud.google.com/monitoring/api/resources#tag_global). + +### New `Logger.log` method ([#316](https://github.com/googleapis/python-logging/pull/316)) + +In v3.0.0, the library adds a generic `log()` method that will attempt to infer and log any type: + +```py +logger.log("hello world") +``` + +v3.0.0 also supports the Logging class methods from previous releases: + +```py +logger.log_text("hello world") +logger.log_struct({"hello": "world"}) +logger.log_proto(proto_message) +logger.log_empty() +``` + +### More permissive arguments ([#422](https://github.com/googleapis/python-logging/pull/422)) + +> **WARNING**: Breaking change + +In v3.0.0, the library supports a wider variety of input formats: + +```py +# lowercase severity strings will be accepted +logger.log("hello world", severity="warning") +``` + +```py +# a severity will be pulled out of the JSON payload if not otherwise set +logger.log({"hello": "world", "severity":"warning"}) +``` + +```py +# resource data can be passed as a dict instead of a Resource object +logger.log("hello world", resource={"type":"global", "labels":[]}) +``` + +### Allow reading from non-project resources ([#444](https://github.com/googleapis/python-logging/pull/444)) + +Prior to v3.0.0, there was a crashing bug when attempting to read logs from non-project resources: + +- `organizations/[ORGANIZATION_ID]/logs/[LOG_ID]` +- `billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]` +- `folders/[FOLDER_ID]/logs/[LOG_ID]` + +The v3.0.0 update fixes this issue. + +### Internal Gapic and HTTP implementation changes ([#375](https://github.com/googleapis/python-logging/pull/375)) + +> **WARNING**: Breaking change + +The library supports sending logs using two network protocols: gRPC and HTTP. Prior to v3.0.0, there was an +inconsistency in the implementations, resulting in unexpected behavior when in HTTP mode. + +### Max_size argument when listing entries ([#375](https://github.com/googleapis/python-logging/pull/375)) + +v3.0.0 introduces a new `max_size` argument to `list_entries` calls, which can be used to specify an upper bound +on how many logs should be returned: + +```py +from google.cloud import logging_v2 + +client = logging_v2.Client() +client.list_entries(max_size=5) +``` + +--- + # 2.0.0 Migration Guide The 2.0 release of the `google-cloud-logging` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. @@ -334,4 +462,4 @@ The following resource name helpers have been renamed. **`ConfigServiceV2Client`** * `sink_path` -> `log_sink_path` -* `exclusion_path` -> `log_exclusion_path` \ No newline at end of file +* `exclusion_path` -> `log_exclusion_path` diff --git a/docs/direct-lib-usage.rst b/docs/direct-lib-usage.rst new file mode 100644 index 000000000..11cf39e9c --- /dev/null +++ b/docs/direct-lib-usage.rst @@ -0,0 +1,330 @@ +Direct Library Usage +==================== + +We recommend that you use the :mod:`google-cloud-logging` library +by integrating it with the :doc:`Python logging standard library`; +However, you can also use the library to interact with the Google Cloud Logging API +directly. + +In addition to writing logs, you can use the library to manage +:doc:`logs`, :doc:`sinks`, :doc:`metrics`, and other resources. + +Setup +---------------------------- + +Create a Client +~~~~~~~~~~~~~~~~~ + +.. _Creating Client: + +You must set up a :doc:`Client` to use the library: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START usage_client_setup] + :end-before: [END usage_client_setup] + :dedent: 4 + +To use HTTP, :doc:`disable gRPC` when you set up the :doc:`Client`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START usage_http_client_setup] + :end-before: [END usage_http_client_setup] + :dedent: 4 + +Create a Logger +~~~~~~~~~~~~~~~~~ + +Loggers read, write, and delete logs from Google Cloud. + +You use your :doc:`Client` to create a :doc:`Logger`. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_create] + :end-before: [END logger_create] + :dedent: 4 + +To add custom labels, do so when you initialize a :doc:`Logger`. +When you add custom labels, these labels are added to each +:doc:`LogEntry` written by the :doc:`Logger`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_custom_labels] + :end-before: [END logger_custom_labels] + :dedent: 4 + +By default, the library adds a `Monitored Resource field `_ +associated with the environment the code is run on. For example, code run on +App Engine will have a `gae_app `_ +resource, while code run locally will have a `global `_ resource field. + +To manually set the resource field, do so when you initialize the :doc:`Logger`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_custom_resource] + :end-before: [END logger_custom_resource] + :dedent: 4 + + +Write Log Entries +------------------- + +You write logs by using :meth:`Logger.log `: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_basic] + :end-before: [END logger_log_basic] + :dedent: 4 + +You can add `LogEntry fields `_ +by passing them as keyword arguments: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_fields] + :end-before: [END logger_log_fields] + :dedent: 4 + +:meth:`Logger.log ` chooses the appropriate :doc:`LogEntry ` type +based on input type. To specify type, you can use the following Logger methods: + +- :meth:`Logger.log_text ` creates a :class:`~google.cloud.logging_v2.entries.TextEntry` +- :meth:`Logger.log_struct ` creates a :class:`~google.cloud.logging_v2.entries.StructEntry` +- :meth:`Logger.log_proto ` creates a :class:`~google.cloud.logging_v2.entries.ProtobufEntry` +- :meth:`Logger.log_empty ` creates an empty :class:`~google.cloud.logging_v2.entries.LogEntry` + +Batch Write Logs +------------------ + +By default, each log write takes place in an individual network request, which may be inefficient at scale. + +Using the :class:`~google.cloud.logging_v2.logger.Batch` class, logs are batched together, and only sent out +when :func:`batch.commit ` is called. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_batch] + :end-before: [END logger_log_batch] + :dedent: 4 + +To simplify things, you can also use :class:`~google.cloud.logging_v2.logger.Batch` as a context manager: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_batch_context] + :end-before: [END logger_log_batch_context] + :dedent: 4 + +In the previous example, the logs are automatically committed when the code exits the "with" block. + +Retrieve Log Entries +--------------------- + +You retrieve log entries for the default project using +:meth:`list_entries() ` +on a :doc:`Client` or :doc:`Logger` object: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_entries_default] + :end-before: [END client_list_entries_default] + :dedent: 4 + +Entries returned by +:meth:`Client.list_entries() ` +or +:meth:`Logger.list_entries() ` +are instances of one of the following classes: + +- :class:`~google.cloud.logging_v2.entries.TextEntry` +- :class:`~google.cloud.logging_v2.entries.StructEntry` +- :class:`~google.cloud.logging_v2.entries.ProtobufEntry` + +To filter entries retrieved using the `Advanced Logs Filters`_ syntax + +.. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters + +To fetch entries for the default project. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_entries_filter] + :end-before: [END client_list_entries_filter] + :dedent: 4 + +To sort entries in descending timestamp order. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_entries_order_by] + :end-before: [END client_list_entries_order_by] + :dedent: 4 + +To retrieve entries for a single logger, sorting in descending timestamp order: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_list_entries] + :end-before: [END logger_list_entries] + :dedent: 4 + +For example, to retrieve all `GKE Admin Activity audit logs`_ +from the past 24 hours: + +.. _GKE Admin Activity audit logs: https://cloud.google.com/kubernetes-engine/docs/how-to/audit-logging#audit_logs_in_your_project + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_list_gke_audit_logs] + :end-before: [END logging_list_gke_audit_logs] + :dedent: 4 + + +Delete Log Entries +-------------------- + +To delete all logs associated with a logger, use the following call: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_delete] + :end-before: [END logger_delete] + :dedent: 8 + + +Manage Log Metrics +-------------------- + +Logs-based metrics are counters of entries which match a given filter. +They can be used within Cloud Monitoring to create charts and alerts. + +To list all logs-based metrics for a project: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_metrics] + :end-before: [END client_list_metrics] + :dedent: 4 + +To create a logs-based metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_create] + :end-before: [END metric_create] + :dedent: 4 + +To refresh local information about a logs-based metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_reload] + :end-before: [END metric_reload] + :dedent: 4 + +To update a logs-based metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_update] + :end-before: [END metric_update] + :dedent: 4 + +To delete a logs-based metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_delete] + :end-before: [END metric_delete] + :dedent: 4 + +Log Sinks +--------------- + +Sinks allow exporting of log entries which match a given filter to +Cloud Storage buckets, BigQuery datasets, or Cloud Pub/Sub topics. + +Cloud Storage Sink +~~~~~~~~~~~~~~~~~~~~~~~ + +Ensure the storage bucket that you want to export logs to has +``cloud-logs@google.com`` as an owner. See +`Setting permissions for Cloud Storage`_. + +.. _Setting permissions for Cloud Storage: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_cloud_storage + +Ensure that ``cloud-logs@google.com`` is an owner of the bucket: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_bucket_permissions] + :end-before: [END sink_bucket_permissions] + :dedent: 4 + +To create a Cloud Storage sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_storage_create] + :end-before: [END sink_storage_create] + :dedent: 4 + + +BigQuery Sink +~~~~~~~~~~~~~~~~~~ + +To export logs to BigQuery, you must log into the Cloud Console +and add ``cloud-logs@google.com`` to a dataset. + +See: `Setting permissions for BigQuery`_ + +.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_bigquery + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_dataset_permissions] + :end-before: [END sink_dataset_permissions] + :dedent: 4 + +To create a BigQuery sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_bigquery_create] + :end-before: [END sink_bigquery_create] + :dedent: 4 + + +Pub/Sub Sink +~~~~~~~~~~~~~~~~~ + +To export logs to BigQuery you must log into the Cloud Console +and add ``cloud-logs@google.com`` to a topic. + +See: `Setting permissions for Pub/Sub`_ + +.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_logs_to_cloud_pubsub + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_topic_permissions] + :end-before: [END sink_topic_permissions] + :dedent: 4 + +To create a Cloud Pub/Sub sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_pubsub_create] + :end-before: [END sink_pubsub_create] + :dedent: 4 + +Manage Sinks +~~~~~~~~~~~~~~ + +To list all sinks for a project: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_sinks] + :end-before: [END client_list_sinks] + :dedent: 4 + +To refresh local information about a sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_reload] + :end-before: [END sink_reload] + :dedent: 4 + +To update a sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_update] + :end-before: [END sink_update] + :dedent: 4 + +To delete a sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_delete] + :end-before: [END sink_delete] + :dedent: 4 diff --git a/docs/entries.rst b/docs/entries.rst index 9d473f3c1..dc257e4c9 100644 --- a/docs/entries.rst +++ b/docs/entries.rst @@ -1,5 +1,5 @@ -Entries -======= +Log Entries +=========== .. automodule:: google.cloud.logging_v2.entries :members: diff --git a/docs/grpc-vs-http.rst b/docs/grpc-vs-http.rst new file mode 100644 index 000000000..e6891420c --- /dev/null +++ b/docs/grpc-vs-http.rst @@ -0,0 +1,14 @@ +gRPC vs HTTP +==================== + +:mod:`google-cloud-logging` supports two different protocols for sending logs over the network: +gRPC and HTTP. Both implementations conform to the same API, and should be +invisible to the end user. + +gRPC is enabled by default. You can switch to HTTP mode by either: + +- setting the `DISABLE_GRPC` environment variable to `TRUE` +- or, passing `_use_grpc=False` when :ref:`initializing a Client` + +We recommend using gRPC whenever possible, but you may want to try the HTTP +implementation if you have network issues when using gRPC. diff --git a/docs/handlers-app-engine.rst b/docs/handlers-app-engine.rst index f25223a20..9f8a6c8db 100644 --- a/docs/handlers-app-engine.rst +++ b/docs/handlers-app-engine.rst @@ -1,5 +1,8 @@ -Google App Engine flexible Log Handler -====================================== +[DEPRECATED] App Engine Handler +=================================================== + +.. deprecated:: 3.0.0 + Use :class:`CloudLoggingHandler` instead. .. automodule:: google.cloud.logging_v2.handlers.app_engine :members: diff --git a/docs/handlers-cloud-logging.rst b/docs/handlers-cloud-logging.rst new file mode 100644 index 000000000..5ebaa51ff --- /dev/null +++ b/docs/handlers-cloud-logging.rst @@ -0,0 +1,6 @@ +Cloud Logging Handler +============================== + +.. automodule:: google.cloud.logging_v2.handlers.handlers + :members: + :show-inheritance: diff --git a/docs/handlers-container-engine.rst b/docs/handlers-container-engine.rst index 981b41dcb..0c074eb19 100644 --- a/docs/handlers-container-engine.rst +++ b/docs/handlers-container-engine.rst @@ -1,5 +1,8 @@ -Google Kubernetes Engine Log Handler -==================================== +[DEPRECATED] Kubernetes Engine Handler +================================================= + +.. deprecated:: 3.0.0 + Use :class:`StructuredLogHandler` instead. .. automodule:: google.cloud.logging_v2.handlers.container_engine :members: diff --git a/docs/handlers-structured-log.rst b/docs/handlers-structured-log.rst new file mode 100644 index 000000000..337ad591d --- /dev/null +++ b/docs/handlers-structured-log.rst @@ -0,0 +1,6 @@ +Structured Log Handler +============================== + +.. automodule:: google.cloud.logging_v2.handlers.structured_log + :members: + :show-inheritance: diff --git a/docs/handlers.rst b/docs/handlers.rst index 9089170fb..914757834 100644 --- a/docs/handlers.rst +++ b/docs/handlers.rst @@ -1,6 +1,9 @@ -Python Logging Module Handler -============================== +Handlers +---------------- +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.logging_v2.handlers.handlers - :members: - :show-inheritance: + handlers-cloud-logging + handlers-structured-log + handlers-app-engine + handlers-container-engine diff --git a/docs/index.rst b/docs/index.rst index 64c2dcd1e..01d8e4eee 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,17 +1,31 @@ .. include:: README.rst +Usage Guide +------------------- +.. toctree:: + :maxdepth: 2 + + usage + Documentation ------------------- .. toctree:: :maxdepth: 3 - v2 + client + logger + entries + metric + resource + sink + handlers + transport -Migration Guide ---------------- +Migration Guides +---------------- -See the guide below for instructions on migrating to the 2.x release of this library. +See the guide below for instructions on migrating between major releases of this library. .. toctree:: :maxdepth: 2 diff --git a/docs/logger.rst b/docs/logger.rst index 8aca18199..13f8e0d7e 100644 --- a/docs/logger.rst +++ b/docs/logger.rst @@ -3,4 +3,5 @@ Logger .. automodule:: google.cloud.logging_v2.logger :members: + :undoc-members: :show-inheritance: diff --git a/docs/std-lib-integration.rst b/docs/std-lib-integration.rst new file mode 100644 index 000000000..a485fce6d --- /dev/null +++ b/docs/std-lib-integration.rst @@ -0,0 +1,146 @@ +Integration with `logging` Standard Library +=========================================== + +We recommend that you use :mod:`google-cloud-logging` to integrate with +the Python :mod:`logging` standard library. This way, you can write logs using Python +standards, and still have your logs appear in Google Cloud Logging. + +Automatic Configuration +----------------------- + +To integrate :mod:`google-cloud-logging` with the standard :mod:`logging` module, +call :meth:`~google.cloud.logging_v2.client.Client.setup_logging` on a :class:`~google.cloud.logging_v2.client.Client` instance. + +.. literalinclude:: ../samples/snippets/handler.py + :start-after: [START logging_handler_setup] + :end-before: [END logging_handler_setup] + :dedent: 4 + +This :meth:`~google.cloud.logging_v2.client.Client.setup_logging` function chooses the best configurations for the environment your +code is running on. For more information, see the `Google Cloud Logging documentation `_. + +Manual Handler Configuration +----------------------------- + +.. _Manual Handler: + +Automatic Configuration automatically determines the appropriate handler for the environment. +To specify the handler yourself, construct an instance manually and pass it in +as an argument to :meth:`~google.cloud.logging_v2.handlers.setup_logging`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START create_cloud_handler] + :end-before: [END create_cloud_handler] + :dedent: 4 + +There are two supported handler classes to choose from: + +- :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler`: + - Sends logs directly to Cloud Logging over the network (:doc:`gRPC or HTTP`) + - Logs are transmitted according to a :ref:`Transport ` class + - This is the default handler on most environments, including local development +- :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler`: + - Outputs logs as `structured JSON `_ + to standard out, to be read and parsed by a GCP logging agent + - This is the default handler on Kubernetes Engine, Cloud Functions and Cloud Run + +Standard Library +--------------------------- + +After you setup the Google Cloud Logging library with the Python :mod:`logging` standard library, +you can send logs with the standard logging library as you normally would: + +.. literalinclude:: ../samples/snippets/handler.py + :start-after: [START logging_handler_usage] + :end-before: [END logging_handler_usage] + :dedent: 4 + +For more information on using the Python :mod:`logging` standard library, see the `logging documentation `_ + +Logging JSON Payloads +---------------------- + +.. _JSON: + +Although the Python :mod:`logging` standard library `expects all logs to be strings `_, +Google Cloud Logging allows `JSON payload data `_. + +To write JSON logs using the standard library integration, do one of the following: + +1. Use the `json_fields` `extra` argument: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_extra_json_fields] + :end-before: [END logging_extra_json_fields] + :dedent: 4 + +2. Log a JSON-parsable string: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_json_dumps] + :end-before: [END logging_json_dumps] + :dedent: 4 + + +Automatic Metadata Detection +---------------------------- + +.. _Autodetection: + +The Google Cloud Logging library attempts to detect and attach additional +`LogEntry fields `_ . +The following fields are currently supported: + +- labels +- trace* +- span_id* +- trace_sampled* +- http_request* +- source_location +- resource +- :ref:`json_fields` + +.. note:: + Fields marked with "*" require a supported Python web framework. The Google Cloud Logging + library currently supports `flask `_ and `django `_ + +Manual Metadata Using the `extra` Argument +-------------------------------------------- + +The Python :mod:`logging` standard library accepts `an "extra" argument `_ when +writing logs. You can use this argument to populate LogRecord objects with user-defined +key-value pairs. Google Cloud Logging uses the `extra` field as a way to pass in additional +metadata to populate `LogEntry fields `_. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_extras] + :end-before: [END logging_extras] + :dedent: 4 + +All of the `LogEntry fields `_ +that can be :ref:`autodetected` can also be set manually through the `extra` argument. Fields sent explicitly through the `extra` +argument override any :ref:`automatically detected` fields. + +CloudLoggingHandler Transports +------------------------------ + +.. _Transports: + +:doc:`Transport` classes define how the :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler` +transports logs over the network to Google Cloud. There are two Transport implementations +(defined as subclasses of :class:`transports.base.Transport `): + +- :class:`~google.cloud.logging_v2.handlers.transports.background_thread.BackgroundThreadTransport`: + - sends logs in batches, using a background thread + - the default Transport class +- :class:`~google.cloud.logging_v2.handlers.transports.sync.SyncTransport`: + - sends each log synchronously in a single API call + +You can set a Transport class by passing it as an argument when +:ref:`initializing CloudLoggingHandler manually.` + +You can use both transport options over :doc:`gRPC or HTTP`. + +.. note:: + :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler` + prints logs as formatted JSON to standard output, and does not use a Transport class. diff --git a/docs/stdlib-usage.rst b/docs/stdlib-usage.rst deleted file mode 100644 index 375b41ddf..000000000 --- a/docs/stdlib-usage.rst +++ /dev/null @@ -1,70 +0,0 @@ -Integration with Python logging module --------------------------------------- - - -It's possible to tie the Python :mod:`logging` module directly into Google Cloud Logging. To use it, -create a :class:`CloudLoggingHandler ` instance from your -Logging client. - -.. code-block:: python - - >>> import logging - >>> import google.cloud.logging # Don't conflict with standard logging - >>> from google.cloud.logging.handlers import CloudLoggingHandler - >>> client = google.cloud.logging.Client() - >>> handler = CloudLoggingHandler(client) - >>> cloud_logger = logging.getLogger('cloudLogger') - >>> cloud_logger.setLevel(logging.INFO) # defaults to WARN - >>> cloud_logger.addHandler(handler) - >>> cloud_logger.error('bad news') - -.. note:: - - This handler by default uses an asynchronous transport that sends log entries on a background - thread. However, the API call will still be made in the same process. For other transport - options, see the transports section. - -All logs will go to a single custom log, which defaults to "python". The name of the Python -logger will be included in the structured log entry under the "python_logger" field. You can -change it by providing a name to the handler: - -.. code-block:: python - - >>> handler = CloudLoggingHandler(client, name="mycustomlog") - -It is also possible to attach the handler to the root Python logger, so that for example a plain -`logging.warn` call would be sent to Cloud Logging, as well as any other loggers created. However, -you must avoid infinite recursion from the logging calls the client itself makes. A helper -method :meth:`setup_logging ` is provided to configure -this automatically: - -.. code-block:: python - - >>> import logging - >>> import google.cloud.logging # Don't conflict with standard logging - >>> from google.cloud.logging.handlers import CloudLoggingHandler, setup_logging - >>> client = google.cloud.logging.Client() - >>> handler = CloudLoggingHandler(client) - >>> logging.getLogger().setLevel(logging.INFO) # defaults to WARN - >>> setup_logging(handler) - >>> logging.error('bad news') - -You can also exclude certain loggers: - -.. code-block:: python - - >>> setup_logging(handler, excluded_loggers=('werkzeug',)) - - - -Python logging handler transports -================================== - -The Python logging handler can use different transports. The default is -:class:`google.cloud.logging_v2.handlers.BackgroundThreadTransport`. - - 1. :class:`google.cloud.logging_V2.handlers.BackgroundThreadTransport` this is the default. It writes - entries on a background :class:`python.threading.Thread`. - - 1. :class:`google.cloud.logging_V2.handlers.SyncTransport` this handler does a direct API call on each - logging statement to write the entry. diff --git a/docs/transport.rst b/docs/transport.rst new file mode 100644 index 000000000..9f4430103 --- /dev/null +++ b/docs/transport.rst @@ -0,0 +1,25 @@ +Transports +---------------- + +These classes define how the :class:`CloudLoggingHandler ` +transport logs into GCP. More information in the :ref:`User Guide` + +Base Transport +~~~~~~~~~~~~~~ +.. automodule:: google.cloud.logging_v2.handlers.transports.base + :members: + :show-inheritance: + +Background Thread Transport +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.logging_v2.handlers.transports.background_thread + :members: + :show-inheritance: + +Synchronous Transport +~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.logging_v2.handlers.transports.sync + :members: + :show-inheritance: diff --git a/docs/transports-base.rst b/docs/transports-base.rst deleted file mode 100644 index b28fb5ba6..000000000 --- a/docs/transports-base.rst +++ /dev/null @@ -1,6 +0,0 @@ -Python Logging Handler Sync Transport -====================================== - -.. automodule:: google.cloud.logging_v2.handlers.transports.base - :members: - :show-inheritance: diff --git a/docs/transports-sync.rst b/docs/transports-sync.rst deleted file mode 100644 index 32e6401cb..000000000 --- a/docs/transports-sync.rst +++ /dev/null @@ -1,6 +0,0 @@ -Python Logging Handler Sync Transport -====================================== - -.. automodule:: google.cloud.logging_v2.handlers.transports.sync - :members: - :show-inheritance: diff --git a/docs/transports-thread.rst b/docs/transports-thread.rst deleted file mode 100644 index 2899e6c48..000000000 --- a/docs/transports-thread.rst +++ /dev/null @@ -1,7 +0,0 @@ -Python Logging Handler Threaded Transport -========================================= - - -.. automodule:: google.cloud.logging_v2.handlers.transports.background_thread - :members: - :show-inheritance: diff --git a/docs/usage.rst b/docs/usage.rst index 1fde3d8ea..929ee9cef 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -1,356 +1,9 @@ Usage Guide -=========== +------------- +.. toctree:: + :maxdepth: 2 -Writing log entries -------------------- + std-lib-integration + direct-lib-usage + grpc-vs-http -To write log entries, first create a -:class:`~google.cloud.logging.logger.Logger`, passing the "log name" with -which to associate the entries: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_create] - :end-before: [END logger_create] - :dedent: 4 - -Write a simple text entry to the logger. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_log_text] - :end-before: [END logger_log_text] - :dedent: 4 - -Write a dictionary entry to the logger. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_log_struct] - :end-before: [END logger_log_struct] - :dedent: 4 - -Write a simple text entry and resource to the logger. - -Supported Resource values are listed at `Monitored Resource Types`_ - -.. _Monitored Resource Types: https://cloud.google.com/logging/docs/api/v2/resource-list - - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_log_resource_text] - :end-before: [END logger_log_resource_text] - :dedent: 4 - -Retrieving log entries ----------------------- - -Fetch entries for the default project. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_entries_default] - :end-before: [END client_list_entries_default] - :dedent: 4 - -Entries returned by -:meth:`Client.list_entries ` -or -:meth:`Logger.list_entries ` -will be instances of one of the following classes: - -- :class:`~google.cloud.logging.entries.TextEntry` -- :class:`~google.cloud.logging.entries.StructEntry` -- :class:`~google.cloud.logging.entries.ProtobufEntry` - -Filter entries retrieved using the `Advanced Logs Filters`_ syntax - -.. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters - -Fetch entries for the default project. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_entries_filter] - :end-before: [END client_list_entries_filter] - :dedent: 4 - -Sort entries in descending timestamp order. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_entries_order_by] - :end-before: [END client_list_entries_order_by] - :dedent: 4 - -Retrieve entries for a single logger, sorting in descending timestamp order: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_list_entries] - :end-before: [END logger_list_entries] - :dedent: 4 - -And as a practical example, retrieve all `GKE Admin Activity audit logs`_ -from the past 24 hours: - -.. _GKE Admin Activity audit logs: https://cloud.google.com/kubernetes-engine/docs/how-to/audit-logging#audit_logs_in_your_project - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logging_list_gke_audit_logs] - :end-before: [END logging_list_gke_audit_logs] - :dedent: 4 - -Delete all entries for a logger -------------------------------- - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_delete] - :end-before: [END logger_delete] - :dedent: 8 - - -Manage log metrics ------------------- - -Metrics are counters of entries which match a given filter. They can be -used within Cloud Monitoring to create charts and alerts. - -List all metrics for a project: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_metrics] - :end-before: [END client_list_metrics] - :dedent: 4 - -Create a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_create] - :end-before: [END metric_create] - :dedent: 4 - -Refresh local information about a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_reload] - :end-before: [END metric_reload] - :dedent: 4 - -Update a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_update] - :end-before: [END metric_update] - :dedent: 4 - -Delete a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_delete] - :end-before: [END metric_delete] - :dedent: 4 - -Export log entries using sinks ------------------------------- - -Sinks allow exporting entries which match a given filter to Cloud Storage -buckets, BigQuery datasets, or Cloud Pub/Sub topics. - -Export to Cloud Storage -~~~~~~~~~~~~~~~~~~~~~~~ - -Make sure that the storage bucket you want to export logs too has -``cloud-logs@google.com`` as the owner. See -`Setting permissions for Cloud Storage`_. - -.. _Setting permissions for Cloud Storage: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_cloud_storage - -Add ``cloud-logs@google.com`` as the owner of the bucket: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_bucket_permissions] - :end-before: [END sink_bucket_permissions] - :dedent: 4 - -Create a Cloud Storage sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_storage_create] - :end-before: [END sink_storage_create] - :dedent: 4 - - -Export to BigQuery -~~~~~~~~~~~~~~~~~~ - -To export logs to BigQuery you must log into the Cloud Platform Console -and add ``cloud-logs@google.com`` to a dataset. - -See: `Setting permissions for BigQuery`_ - -.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_bigquery - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_dataset_permissions] - :end-before: [END sink_dataset_permissions] - :dedent: 4 - -Create a BigQuery sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_bigquery_create] - :end-before: [END sink_bigquery_create] - :dedent: 4 - - -Export to Pub/Sub -~~~~~~~~~~~~~~~~~ - -To export logs to BigQuery you must log into the Cloud Platform Console -and add ``cloud-logs@google.com`` to a topic. - -See: `Setting permissions for Pub/Sub`_ - -.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_logs_to_cloud_pubsub - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_topic_permissions] - :end-before: [END sink_topic_permissions] - :dedent: 4 - -Create a Cloud Pub/Sub sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_pubsub_create] - :end-before: [END sink_pubsub_create] - :dedent: 4 - -Manage Sinks -~~~~~~~~~~~~ - -List all sinks for a project: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_sinks] - :end-before: [END client_list_sinks] - :dedent: 4 - -Refresh local information about a sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_reload] - :end-before: [END sink_reload] - :dedent: 4 - -Update a sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_update] - :end-before: [END sink_update] - :dedent: 4 - -Delete a sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_delete] - :end-before: [END sink_delete] - :dedent: 4 - -Integration with Python logging module --------------------------------------- - -It's possible to tie the Python :mod:`logging` module directly into Google -Cloud Logging. There are different handler options to accomplish this. -To automatically pick the default for your current environment, use -:meth:`~google.cloud.logging.client.Client.get_default_handler`. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START create_default_handler] - :end-before: [END create_default_handler] - :dedent: 4 - -It is also possible to attach the handler to the root Python logger, so that -for example a plain ``logging.warn`` call would be sent to Cloud Logging, -as well as any other loggers created. A helper method -:meth:`~google.cloud.logging.client.Client.setup_logging` is provided -to configure this automatically. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START setup_logging] - :end-before: [END setup_logging] - :dedent: 4 - -.. note:: - - To reduce cost and quota usage, do not enable Cloud Logging - handlers while testing locally. - -You can also exclude certain loggers: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START setup_logging_excludes] - :end-before: [END setup_logging_excludes] - :dedent: 4 - -Cloud Logging Handler -~~~~~~~~~~~~~~~~~~~~~ - -If you prefer not to use -:meth:`~google.cloud.logging.client.Client.get_default_handler`, you can -directly create a -:class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` instance -which will write directly to the API. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START create_cloud_handler] - :end-before: [END create_cloud_handler] - :dedent: 4 - -.. note:: - - This handler by default uses an asynchronous transport that sends log - entries on a background thread. However, the API call will still be made - in the same process. For other transport options, see the transports - section. - -All logs will go to a single custom log, which defaults to "python". The name -of the Python logger will be included in the structured log entry under the -"python_logger" field. You can change it by providing a name to the handler: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START create_named_handler] - :end-before: [END create_named_handler] - :dedent: 4 - -Cloud Logging Handler transports -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` -logging handler can use different transports. The default is -:class:`~google.cloud.logging.handlers.BackgroundThreadTransport`. - - 1. :class:`~google.cloud.logging.handlers.BackgroundThreadTransport` this is - the default. It writes entries on a background - :class:`python.threading.Thread`. - - 1. :class:`~google.cloud.logging.handlers.SyncTransport` this handler does a - direct API call on each logging statement to write the entry. - - -.. _Google Kubernetes Engine: https://cloud.google.com/kubernetes-engine - -fluentd logging handlers -~~~~~~~~~~~~~~~~~~~~~~~~ - -Besides :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler`, -which writes directly to the API, two other handlers are provided. -:class:`~google.cloud.logging.handlers.app_engine.AppEngineHandler`, which is -recommended when running on the Google App Engine Flexible vanilla runtimes -(i.e. your app.yaml contains ``runtime: python``), and -:class:`~google.cloud.logging.handlers.container_engine.ContainerEngineHandler` -, which is recommended when running on `Google Kubernetes Engine`_ with the -Cloud Logging plugin enabled. - -:meth:`~google.cloud.logging.client.Client.get_default_handler` and -:meth:`~google.cloud.logging.client.Client.setup_logging` will attempt to use -the environment to automatically detect whether the code is running in -these platforms and use the appropriate handler. - -In both cases, the fluentd agent is configured to automatically parse log files -in an expected format and forward them to Cloud Logging. The handlers -provided help set the correct metadata such as log level so that logs can be -filtered accordingly. diff --git a/docs/v2.rst b/docs/v2.rst deleted file mode 100644 index 823097bd7..000000000 --- a/docs/v2.rst +++ /dev/null @@ -1,19 +0,0 @@ -v2 ----------------- -.. toctree:: - :maxdepth: 2 - - usage - client - logger - entries - metric - resource - sink - stdlib-usage - handlers - handlers-app-engine - handlers-container-engine - transports-sync - transports-thread - transports-base diff --git a/google/cloud/logging_v2/_gapic.py b/google/cloud/logging_v2/_gapic.py index 7a6d70650..3661d3d09 100644 --- a/google/cloud/logging_v2/_gapic.py +++ b/google/cloud/logging_v2/_gapic.py @@ -49,10 +49,11 @@ def list_entries( *, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): - """Return a page of log entry resources. + """Return a generator of log entry resources. Args: resource_names (Sequence[str]): Names of one or more parent resources @@ -69,14 +70,16 @@ def list_entries( https://cloud.google.com/logging/docs/view/advanced_filters order_by (str) One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (int): maximum number of entries to return, If not passed, - defaults to a value set by the API. - page_token (str): opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. - + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.LogEntry] + Generator[~logging_v2.LogEntry] """ # full resource names are expected by the API resource_names = resource_names @@ -89,19 +92,27 @@ def list_entries( ) response = self._gapic_api.list_log_entries(request=request) - page_iter = iter(response) + log_iter = iter(response) # We attach a mutable loggers dictionary so that as Logger # objects are created by entry_from_resource, they can be # re-used by other log entries from the same logger. loggers = {} - def log_entries_pager(page_iter): - for page in page_iter: - log_entry_dict = _parse_log_entry(LogEntryPB.pb(page)) + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") + + # create generator + def log_entries_pager(log_iter): + i = 0 + for entry in log_iter: + if max_results is not None and i >= max_results: + break + log_entry_dict = _parse_log_entry(LogEntryPB.pb(entry)) yield entry_from_resource(log_entry_dict, self._client, loggers=loggers) + i += 1 - return log_entries_pager(page_iter) + return log_entries_pager(log_iter) def write_entries( self, @@ -175,7 +186,7 @@ def __init__(self, gapic_api, client): self._gapic_api = gapic_api self._client = client - def list_sinks(self, parent, *, page_size=0, page_token=None): + def list_sinks(self, parent, *, max_results=None, page_size=None, page_token=None): """List sinks for the parent resource. Args: @@ -187,27 +198,37 @@ def list_sinks(self, parent, *, page_size=0, page_token=None): "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". - page_size (Optional[int]): Maximum number of sinks to return, If not passed, - defaults to a value set by the API. - page_token (Optional[str]): Opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.Sink] + Generator[~logging_v2.Sink] """ request = ListSinksRequest( parent=parent, page_size=page_size, page_token=page_token ) response = self._gapic_api.list_sinks(request) - page_iter = iter(response) + sink_iter = iter(response) + + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") - def sinks_pager(page_iter): - for page in page_iter: + def sinks_pager(sink_iter): + i = 0 + for entry in sink_iter: + if max_results is not None and i >= max_results: + break # Convert the GAPIC sink type into the handwritten `Sink` type - yield Sink.from_api_repr(LogSink.to_dict(page), client=self._client) + yield Sink.from_api_repr(LogSink.to_dict(entry), client=self._client) + i += 1 - return sinks_pager(page_iter) + return sinks_pager(sink_iter) def sink_create( self, parent, sink_name, filter_, destination, *, unique_writer_identity=False @@ -347,33 +368,47 @@ def __init__(self, gapic_api, client): self._gapic_api = gapic_api self._client = client - def list_metrics(self, project, *, page_size=0, page_token=None): + def list_metrics( + self, project, *, max_results=None, page_size=None, page_token=None + ): """List metrics for the project associated with this client. Args: project (str): ID of the project whose metrics are to be listed. - page_size (int): Maximum number of metrics to return, If not passed, - defaults to a value set by the API. - page_token (str): Opaque marker for the next "page" of metrics. If not - passed, the API will return the first page of - sinks. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterable[logging_v2.Metric]: Iterable of metrics. + Generator[logging_v2.Metric] """ path = f"projects/{project}" request = ListLogMetricsRequest( parent=path, page_size=page_size, page_token=page_token, ) response = self._gapic_api.list_log_metrics(request=request) - page_iter = iter(response) + metric_iter = iter(response) + + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") - def metrics_pager(page_iter): - for page in page_iter: + def metrics_pager(metric_iter): + i = 0 + for entry in metric_iter: + if max_results is not None and i >= max_results: + break # Convert GAPIC metrics type into handwritten `Metric` type - yield Metric.from_api_repr(LogMetric.to_dict(page), client=self._client) + yield Metric.from_api_repr( + LogMetric.to_dict(entry), client=self._client + ) + i += 1 - return metrics_pager(page_iter) + return metrics_pager(metric_iter) def metric_create(self, project, metric_name, filter_, description): """Create a metric resource. diff --git a/google/cloud/logging_v2/_http.py b/google/cloud/logging_v2/_http.py index 68bde346a..21fb38606 100644 --- a/google/cloud/logging_v2/_http.py +++ b/google/cloud/logging_v2/_http.py @@ -74,6 +74,7 @@ def list_entries( *, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): @@ -94,14 +95,16 @@ def list_entries( https://cloud.google.com/logging/docs/view/advanced_filters order_by (str) One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (int): maximum number of entries to return, If not passed, - defaults to a value set by the API. - page_token (str): opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. - + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.LogEntry] + Generator[~logging_v2.LogEntry] """ extra_params = {"resourceNames": resource_names} @@ -131,7 +134,8 @@ def list_entries( ) # This method uses POST to make a read-only request. iterator._HTTP_METHOD = "POST" - return iterator + + return _entries_pager(iterator, max_results) def write_entries( self, @@ -219,7 +223,7 @@ def __init__(self, client): self._client = client self.api_request = client._connection.api_request - def list_sinks(self, parent, *, page_size=None, page_token=None): + def list_sinks(self, parent, *, max_results=None, page_size=None, page_token=None): """List sinks for the parent resource. See @@ -234,14 +238,17 @@ def list_sinks(self, parent, *, page_size=None, page_token=None): "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". - page_size (Optional[int]): Maximum number of sinks to return, If not passed, - defaults to a value set by the API. - page_token (Optional[str]): Opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.Sink] + Generator[~logging_v2.Sink] """ extra_params = {} @@ -249,7 +256,7 @@ def list_sinks(self, parent, *, page_size=None, page_token=None): extra_params["pageSize"] = page_size path = f"/{parent}/sinks" - return page_iterator.HTTPIterator( + iterator = page_iterator.HTTPIterator( client=self._client, api_request=self._client._connection.api_request, path=path, @@ -259,6 +266,8 @@ def list_sinks(self, parent, *, page_size=None, page_token=None): extra_params=extra_params, ) + return _entries_pager(iterator, max_results) + def sink_create( self, parent, sink_name, filter_, destination, *, unique_writer_identity=False ): @@ -373,24 +382,27 @@ def __init__(self, client): self._client = client self.api_request = client._connection.api_request - def list_metrics(self, project, *, page_size=None, page_token=None): + def list_metrics( + self, project, *, max_results=None, page_size=None, page_token=None + ): """List metrics for the project associated with this client. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list Args: - page_size (Optional[int]): The maximum number of sinks in each - page of results from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - page_token (Optional[str]): If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property ofthe returned iterator instead of manually passing the - token. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[google.cloud.logging_v2.metric.Metric] + Generator[logging_v2.Metric] + """ extra_params = {} @@ -398,7 +410,7 @@ def list_metrics(self, project, *, page_size=None, page_token=None): extra_params["pageSize"] = page_size path = f"/projects/{project}/metrics" - return page_iterator.HTTPIterator( + iterator = page_iterator.HTTPIterator( client=self._client, api_request=self._client._connection.api_request, path=path, @@ -407,6 +419,7 @@ def list_metrics(self, project, *, page_size=None, page_token=None): page_token=page_token, extra_params=extra_params, ) + return _entries_pager(iterator, max_results) def metric_create(self, project, metric_name, filter_, description): """Create a metric resource. @@ -469,6 +482,18 @@ def metric_delete(self, project, metric_name): self.api_request(method="DELETE", path=target) +def _entries_pager(page_iter, max_results=None): + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") + + i = 0 + for page in page_iter: + if max_results is not None and i >= max_results: + break + yield page + i += 1 + + def _item_to_entry(iterator, resource, loggers): """Convert a log entry resource to the native object. diff --git a/google/cloud/logging_v2/client.py b/google/cloud/logging_v2/client.py index e9b432eb2..3d5ea24fc 100644 --- a/google/cloud/logging_v2/client.py +++ b/google/cloud/logging_v2/client.py @@ -18,13 +18,6 @@ import os import sys -try: - from google.cloud.logging_v2 import _gapic -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False - _gapic = None -else: - _HAVE_GRPC = True import google.api_core.client_options from google.cloud.client import ClientWithProject @@ -35,8 +28,6 @@ from google.cloud.logging_v2._http import _MetricsAPI as JSONMetricsAPI from google.cloud.logging_v2._http import _SinksAPI as JSONSinksAPI from google.cloud.logging_v2.handlers import CloudLoggingHandler -from google.cloud.logging_v2.handlers import AppEngineHandler -from google.cloud.logging_v2.handlers import ContainerEngineHandler from google.cloud.logging_v2.handlers import StructuredLogHandler from google.cloud.logging_v2.handlers import setup_logging from google.cloud.logging_v2.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS @@ -50,6 +41,19 @@ _DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) +_HAVE_GRPC = False + +try: + if not _DISABLE_GRPC: + # only import if DISABLE_GRPC is not set + from google.cloud.logging_v2 import _gapic + + _HAVE_GRPC = True +except ImportError: # pragma: NO COVER + # could not import gapic library. Fall back to HTTP mode + _HAVE_GRPC = False + _gapic = None + _USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC _GAE_RESOURCE_TYPE = "gae_app" @@ -184,16 +188,21 @@ def metrics_api(self): self._metrics_api = JSONMetricsAPI(self) return self._metrics_api - def logger(self, name): + def logger(self, name, *, labels=None, resource=None): """Creates a logger bound to the current client. Args: name (str): The name of the logger to be constructed. + resource (Optional[~logging_v2.Resource]): a monitored resource object + representing the resource the code was run on. If not given, will + be inferred from the environment. + labels (Optional[dict]): Mapping of default labels for entries written + via this logger. Returns: ~logging_v2.logger.Logger: Logger created with the current client. """ - return Logger(name, client=self) + return Logger(name, client=self, labels=labels, resource=resource) def list_entries( self, @@ -201,10 +210,11 @@ def list_entries( resource_names=None, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): - """Return a page of log entry resources. + """Return a generator of log entry resources. Args: resource_names (Sequence[str]): Names of one or more parent resources @@ -223,14 +233,17 @@ def list_entries( https://cloud.google.com/logging/docs/view/advanced_filters order_by (str) One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (int): maximum number of entries to return, If not passed, - defaults to a value set by the API. - page_token (str): opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.LogEntry] + Generator[~logging_v2.LogEntry] """ if resource_names is None: resource_names = [f"projects/{self.project}"] @@ -240,6 +253,7 @@ def list_entries( resource_names=resource_names, filter_=filter_, order_by=order_by, + max_results=max_results, page_size=page_size, page_token=page_token, ) @@ -263,7 +277,9 @@ def sink(self, name, *, filter_=None, destination=None): """ return Sink(name, filter_=filter_, destination=destination, client=self) - def list_sinks(self, *, parent=None, page_size=None, page_token=None): + def list_sinks( + self, *, parent=None, max_results=None, page_size=None, page_token=None + ): """List sinks for the a parent resource. See @@ -280,22 +296,25 @@ def list_sinks(self, *, parent=None, page_size=None, page_token=None): "folders/[FOLDER_ID]". If not passed, defaults to the project bound to the API's client. - page_size (Optional[int]): The maximum number of sinks in each - page of results from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - page_token (Optional[str]): If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property ofthe returned iterator instead of manually passing the - token. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.sink.Sink] + Generator[~logging_v2.Sink] """ if parent is None: parent = f"projects/{self.project}" return self.sinks_api.list_sinks( - parent=parent, page_size=page_size, page_token=page_token + parent=parent, + max_results=max_results, + page_size=page_size, + page_token=page_token, ) def metric(self, name, *, filter_=None, description=""): @@ -316,27 +335,30 @@ def metric(self, name, *, filter_=None, description=""): """ return Metric(name, filter_=filter_, client=self, description=description) - def list_metrics(self, *, page_size=None, page_token=None): + def list_metrics(self, *, max_results=None, page_size=None, page_token=None): """List metrics for the project associated with this client. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list Args: - page_size (Optional[int]): The maximum number of sinks in each - page of results from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - page_token (Optional[str]): If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property ofthe returned iterator instead of manually passing the - token. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.metric.Metric] + Generator[logging_v2.Metric] """ return self.metrics_api.list_metrics( - self.project, page_size=page_size, page_token=page_token + self.project, + max_results=max_results, + page_size=page_size, + page_token=page_token, ) def get_default_handler(self, **kw): @@ -352,9 +374,9 @@ def get_default_handler(self, **kw): if isinstance(monitored_resource, Resource): if monitored_resource.type == _GAE_RESOURCE_TYPE: - return AppEngineHandler(self, **kw) + return CloudLoggingHandler(self, resource=monitored_resource, **kw) elif monitored_resource.type == _GKE_RESOURCE_TYPE: - return ContainerEngineHandler(**kw) + return StructuredLogHandler(**kw, project_id=self.project) elif monitored_resource.type == _GCF_RESOURCE_TYPE: # __stdout__ stream required to support structured logging on Python 3.7 kw["stream"] = kw.get("stream", sys.__stdout__) diff --git a/google/cloud/logging_v2/entries.py b/google/cloud/logging_v2/entries.py index 0af5a46f7..cb485da61 100644 --- a/google/cloud/logging_v2/entries.py +++ b/google/cloud/logging_v2/entries.py @@ -46,11 +46,12 @@ ) -def logger_name_from_path(path): +def logger_name_from_path(path, project=None): """Validate a logger URI path and get the logger name. Args: path (str): URI path for a logger API request + project (str): The project the path is expected to belong to Returns: str: Logger name parsed from ``path``. @@ -59,7 +60,7 @@ def logger_name_from_path(path): ValueError: If the ``path`` is ill-formed of if the project from ``path`` does not agree with the ``project`` passed in. """ - return _name_from_project_path(path, None, _LOGGER_TEMPLATE) + return _name_from_project_path(path, project, _LOGGER_TEMPLATE) def _int_or_none(value): @@ -155,7 +156,8 @@ def from_api_repr(cls, resource, client, *, loggers=None): Client which holds credentials and project configuration. loggers (Optional[dict]): A mapping of logger fullnames -> loggers. If not - passed, the entry will have a newly-created logger. + passed, the entry will have a newly-created logger if possible, + or an empty logger field if not. Returns: google.cloud.logging.entries.LogEntry: Log entry parsed from ``resource``. @@ -165,8 +167,13 @@ def from_api_repr(cls, resource, client, *, loggers=None): logger_fullname = resource["logName"] logger = loggers.get(logger_fullname) if logger is None: - logger_name = logger_name_from_path(logger_fullname) - logger = loggers[logger_fullname] = client.logger(logger_name) + # attempt to create a logger if possible + try: + logger_name = logger_name_from_path(logger_fullname, client.project) + logger = loggers[logger_fullname] = client.logger(logger_name) + except ValueError: + # log name is not scoped to a project. Leave logger as None + pass payload = cls._extract_payload(resource) insert_id = resource.get("insertId") timestamp = resource.get("timestamp") diff --git a/google/cloud/logging_v2/handlers/_helpers.py b/google/cloud/logging_v2/handlers/_helpers.py index 931b7a2f5..32e70dfdd 100644 --- a/google/cloud/logging_v2/handlers/_helpers.py +++ b/google/cloud/logging_v2/handlers/_helpers.py @@ -17,6 +17,7 @@ import math import json import re +import warnings try: import flask @@ -26,11 +27,13 @@ from google.cloud.logging_v2.handlers.middleware.request import _get_django_request _DJANGO_CONTENT_LENGTH = "CONTENT_LENGTH" -_DJANGO_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" +_DJANGO_XCLOUD_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" +_DJANGO_TRACEPARENT = "HTTP_TRACEPARENT" _DJANGO_USERAGENT_HEADER = "HTTP_USER_AGENT" _DJANGO_REMOTE_ADDR_HEADER = "REMOTE_ADDR" _DJANGO_REFERER_HEADER = "HTTP_REFERER" -_FLASK_TRACE_HEADER = "X_CLOUD_TRACE_CONTEXT" +_FLASK_XCLOUD_TRACE_HEADER = "X_CLOUD_TRACE_CONTEXT" +_FLASK_TRACEPARENT = "TRACEPARENT" _PROTOCOL_HEADER = "SERVER_PROTOCOL" @@ -39,6 +42,8 @@ def format_stackdriver_json(record, message): Returns: str: JSON str to be written to the log file. + + DEPRECATED: use StructuredLogHandler to write formatted logs to standard out instead. """ subsecond, second = math.modf(record.created) @@ -48,7 +53,10 @@ def format_stackdriver_json(record, message): "thread": record.thread, "severity": record.levelname, } - + warnings.warn( + "format_stackdriver_json is deprecated. Use StructuredLogHandler instead.", + DeprecationWarning, + ) return json.dumps(payload, ensure_ascii=False) @@ -56,93 +64,124 @@ def get_request_data_from_flask(): """Get http_request and trace data from flask request headers. Returns: - Tuple[Optional[dict], Optional[str], Optional[str]]: - Data related to the current http request, trace_id, and span_id for - the request. All fields will be None if a django request isn't - found. + Tuple[Optional[dict], Optional[str], Optional[str], bool]: + Data related to the current http request, trace_id, span_id and trace_sampled + for the request. All fields will be None if a django request isn't found. """ if flask is None or not flask.request: - return None, None, None + return None, None, None, False # build http_request http_request = { "requestMethod": flask.request.method, "requestUrl": flask.request.url, - "requestSize": flask.request.content_length, "userAgent": flask.request.user_agent.string, - "remoteIp": flask.request.remote_addr, - "referer": flask.request.referrer, "protocol": flask.request.environ.get(_PROTOCOL_HEADER), } # find trace id and span id - header = flask.request.headers.get(_FLASK_TRACE_HEADER) - trace_id, span_id = _parse_trace_span(header) + # first check for w3c traceparent header + header = flask.request.headers.get(_FLASK_TRACEPARENT) + trace_id, span_id, trace_sampled = _parse_trace_parent(header) + if trace_id is None: + # traceparent not found. look for xcloud_trace_context header + header = flask.request.headers.get(_FLASK_XCLOUD_TRACE_HEADER) + trace_id, span_id, trace_sampled = _parse_xcloud_trace(header) - return http_request, trace_id, span_id + return http_request, trace_id, span_id, trace_sampled def get_request_data_from_django(): """Get http_request and trace data from django request headers. Returns: - Tuple[Optional[dict], Optional[str], Optional[str]]: - Data related to the current http request, trace_id, and span_id for - the request. All fields will be None if a django request isn't - found. + Tuple[Optional[dict], Optional[str], Optional[str], bool]: + Data related to the current http request, trace_id, span_id, and trace_sampled + for the request. All fields will be None if a django request isn't found. """ request = _get_django_request() if request is None: - return None, None, None - - # convert content_length to int if it exists - content_length = None - try: - content_length = int(request.META.get(_DJANGO_CONTENT_LENGTH)) - except (ValueError, TypeError): - content_length = None + return None, None, None, False # build http_request http_request = { "requestMethod": request.method, "requestUrl": request.build_absolute_uri(), - "requestSize": content_length, "userAgent": request.META.get(_DJANGO_USERAGENT_HEADER), - "remoteIp": request.META.get(_DJANGO_REMOTE_ADDR_HEADER), - "referer": request.META.get(_DJANGO_REFERER_HEADER), "protocol": request.META.get(_PROTOCOL_HEADER), } # find trace id and span id - header = request.META.get(_DJANGO_TRACE_HEADER) - trace_id, span_id = _parse_trace_span(header) + # first check for w3c traceparent header + header = request.META.get(_DJANGO_TRACEPARENT) + trace_id, span_id, trace_sampled = _parse_trace_parent(header) + if trace_id is None: + # traceparent not found. look for xcloud_trace_context header + header = request.META.get(_DJANGO_XCLOUD_TRACE_HEADER) + trace_id, span_id, trace_sampled = _parse_xcloud_trace(header) + + return http_request, trace_id, span_id, trace_sampled + + +def _parse_trace_parent(header): + """Given a w3 traceparent header, extract the trace and span ids. + For more information see https://www.w3.org/TR/trace-context/ - return http_request, trace_id, span_id + Args: + header (str): the string extracted from the traceparent header + example: 00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01 + Returns: + Tuple[Optional[dict], Optional[str], bool]: + The trace_id, span_id and trace_sampled extracted from the header + Each field will be None if header can't be parsed in expected format. + """ + trace_id = span_id = None + trace_sampled = False + # see https://www.w3.org/TR/trace-context/ for W3C traceparent format + if header: + try: + VERSION_PART = r"(?!ff)[a-f\d]{2}" + TRACE_ID_PART = r"(?![0]{32})[a-f\d]{32}" + PARENT_ID_PART = r"(?![0]{16})[a-f\d]{16}" + FLAGS_PART = r"[a-f\d]{2}" + regex = f"^\\s?({VERSION_PART})-({TRACE_ID_PART})-({PARENT_ID_PART})-({FLAGS_PART})(-.*)?\\s?$" + match = re.match(regex, header) + trace_id = match.group(2) + span_id = match.group(3) + # trace-flag component is an 8-bit bit field. Read as an int + int_flag = int(match.group(4), 16) + # trace sampled is set if the right-most bit in flag component is set + trace_sampled = bool(int_flag & 1) + except (IndexError, AttributeError): + # could not parse header as expected. Return None + pass + return trace_id, span_id, trace_sampled -def _parse_trace_span(header): +def _parse_xcloud_trace(header): """Given an X_CLOUD_TRACE header, extract the trace and span ids. Args: header (str): the string extracted from the X_CLOUD_TRACE header Returns: - Tuple[Optional[dict], Optional[str]]: - The trace_id and span_id extracted from the header + Tuple[Optional[dict], Optional[str], bool]: + The trace_id, span_id and trace_sampled extracted from the header Each field will be None if not found. """ - trace_id = None - span_id = None + trace_id = span_id = None + trace_sampled = False + # see https://cloud.google.com/trace/docs/setup for X-Cloud-Trace_Context format if header: try: - split_header = header.split("/", 1) - trace_id = split_header[0] - header_suffix = split_header[1] - # the span is the set of alphanumeric characters after the / - span_id = re.findall(r"^\w+", header_suffix)[0] + regex = r"([\w-]+)?(\/?([\w-]+))?(;?o=(\d))?" + match = re.match(regex, header) + trace_id = match.group(1) + span_id = match.group(3) + trace_sampled = match.group(5) == "1" except IndexError: pass - return trace_id, span_id + return trace_id, span_id, trace_sampled def get_request_data(): @@ -150,10 +189,9 @@ def get_request_data(): frameworks (currently supported: Flask and Django). Returns: - Tuple[Optional[dict], Optional[str], Optional[str]]: - Data related to the current http request, trace_id, and span_id for - the request. All fields will be None if a django request isn't - found. + Tuple[Optional[dict], Optional[str], Optional[str], bool]: + Data related to the current http request, trace_id, span_id, and trace_sampled + for the request. All fields will be None if a http request isn't found. """ checkers = ( get_request_data_from_django, @@ -161,8 +199,8 @@ def get_request_data(): ) for checker in checkers: - http_request, trace_id, span_id = checker() + http_request, trace_id, span_id, trace_sampled = checker() if http_request is not None: - return http_request, trace_id, span_id + return http_request, trace_id, span_id, trace_sampled - return None, None, None + return None, None, None, False diff --git a/google/cloud/logging_v2/handlers/_monitored_resources.py b/google/cloud/logging_v2/handlers/_monitored_resources.py index e257f08e4..144258749 100644 --- a/google/cloud/logging_v2/handlers/_monitored_resources.py +++ b/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -169,6 +169,8 @@ def _create_global_resource(project): def detect_resource(project=""): """Return the default monitored resource based on the local environment. + If GCP resource not found, defaults to `global`. + Args: project (str): The project ID to pass on to the resource (if needed) Returns: diff --git a/google/cloud/logging_v2/handlers/app_engine.py b/google/cloud/logging_v2/handlers/app_engine.py index 874a9d608..a65d16a0e 100644 --- a/google/cloud/logging_v2/handlers/app_engine.py +++ b/google/cloud/logging_v2/handlers/app_engine.py @@ -20,6 +20,7 @@ import logging import os +import warnings from google.cloud.logging_v2.handlers._helpers import get_request_data from google.cloud.logging_v2.handlers._monitored_resources import ( @@ -36,9 +37,14 @@ _TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" +_DEPRECATION_MSG = "AppEngineHandler is deprecated. Use CloudLoggingHandler instead." + class AppEngineHandler(logging.StreamHandler): - """A logging handler that sends App Engine-formatted logs to Stackdriver.""" + """A logging handler that sends App Engine-formatted logs to Stackdriver. + + DEPRECATED: use CloudLoggingHandler instead. + """ def __init__( self, @@ -71,6 +77,8 @@ def __init__( self.version_id = os.environ.get(_GAE_VERSION_ENV, "") self.resource = self.get_gae_resource() + warnings.warn(_DEPRECATION_MSG, DeprecationWarning) + def get_gae_resource(self): """Return the GAE resource using the environment variables. @@ -90,7 +98,7 @@ def get_gae_labels(self): """ gae_labels = {} - _, trace_id, _ = get_request_data() + _, trace_id, _, _ = get_request_data() if trace_id is not None: gae_labels[_TRACE_ID_LABEL] = trace_id @@ -107,7 +115,7 @@ def emit(self, record): record (logging.LogRecord): The record to be logged. """ message = super(AppEngineHandler, self).format(record) - inferred_http, inferred_trace, _ = get_request_data() + inferred_http, inferred_trace, _, _ = get_request_data() if inferred_trace is not None: inferred_trace = f"projects/{self.project_id}/traces/{inferred_trace}" # allow user overrides diff --git a/google/cloud/logging_v2/handlers/container_engine.py b/google/cloud/logging_v2/handlers/container_engine.py index a4bd0f848..3842111b4 100644 --- a/google/cloud/logging_v2/handlers/container_engine.py +++ b/google/cloud/logging_v2/handlers/container_engine.py @@ -20,15 +20,22 @@ """ import logging.handlers +import warnings from google.cloud.logging_v2.handlers._helpers import format_stackdriver_json +_DEPRECATION_MSG = ( + "ContainerEngineHandler is deprecated. Use StructuredLogHandler instead." +) + class ContainerEngineHandler(logging.StreamHandler): """Handler to format log messages the format expected by GKE fluent. This handler is written to format messages for the Google Container Engine (GKE) fluentd plugin, so that metadata such as log level are properly set. + + DEPRECATED: use StructuredLogHandler to write formatted logs to standard out instead. """ def __init__(self, *, name=None, stream=None): @@ -40,6 +47,7 @@ def __init__(self, *, name=None, stream=None): """ super(ContainerEngineHandler, self).__init__(stream=stream) self.name = name + warnings.warn(_DEPRECATION_MSG, DeprecationWarning) def format(self, record): """Format the message into JSON expected by fluentd. diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py index b3b787fe2..769146007 100644 --- a/google/cloud/logging_v2/handlers/handlers.py +++ b/google/cloud/logging_v2/handlers/handlers.py @@ -14,10 +14,10 @@ """Python :mod:`logging` handlers for Cloud Logging.""" +import collections import json import logging -from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport from google.cloud.logging_v2.handlers._monitored_resources import detect_resource from google.cloud.logging_v2.handlers._helpers import get_request_data @@ -33,8 +33,15 @@ "werkzeug", ) +"""These environments require us to remove extra handlers on setup""" _CLEAR_HANDLER_RESOURCE_TYPES = ("gae_app", "cloud_function") +"""Extra trace label to be added on App Engine environments""" +_GAE_TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" + +"""Resource name for App Engine environments""" +_GAE_RESOURCE_TYPE = "gae_app" + class CloudLoggingFilter(logging.Filter): """Python standard ``logging`` Filter class to add Cloud Logging @@ -45,10 +52,6 @@ class CloudLoggingFilter(logging.Filter): overwritten using the `extras` argument when writing logs. """ - # The subset of http_request fields have been tested to work consistently across GCP environments - # https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#httprequest - _supported_http_fields = ("requestMethod", "requestUrl", "userAgent", "protocol") - def __init__(self, project=None, default_labels=None): self.project = project self.default_labels = default_labels if default_labels else {} @@ -79,14 +82,12 @@ def filter(self, record): """ user_labels = getattr(record, "labels", {}) # infer request data from the environment - inferred_http, inferred_trace, inferred_span = get_request_data() - if inferred_http is not None: - # filter inferred_http to include only well-supported fields - inferred_http = { - k: v - for (k, v) in inferred_http.items() - if k in self._supported_http_fields and v is not None - } + ( + inferred_http, + inferred_trace, + inferred_span, + inferred_sampled, + ) = get_request_data() if inferred_trace is not None and self.project is not None: # add full path for detected trace inferred_trace = f"projects/{self.project}/traces/{inferred_trace}" @@ -94,17 +95,23 @@ def filter(self, record): record._resource = getattr(record, "resource", None) record._trace = getattr(record, "trace", inferred_trace) or None record._span_id = getattr(record, "span_id", inferred_span) or None + record._trace_sampled = bool(getattr(record, "trace_sampled", inferred_sampled)) record._http_request = getattr(record, "http_request", inferred_http) record._source_location = CloudLoggingFilter._infer_source_location(record) - record._labels = {**self.default_labels, **user_labels} or None + # add logger name as a label if possible + logger_label = {"python_logger": record.name} if record.name else {} + record._labels = {**logger_label, **self.default_labels, **user_labels} or None # create string representations for structured logging record._trace_str = record._trace or "" record._span_id_str = record._span_id or "" - record._http_request_str = json.dumps(record._http_request or {}) - record._source_location_str = json.dumps(record._source_location or {}) - record._labels_str = json.dumps(record._labels or {}) - # break quotes for parsing through structured logging - record._msg_str = str(record.msg).replace('"', '\\"') if record.msg else "" + record._trace_sampled_str = "true" if record._trace_sampled else "false" + record._http_request_str = json.dumps( + record._http_request or {}, ensure_ascii=False + ) + record._source_location_str = json.dumps( + record._source_location or {}, ensure_ascii=False + ) + record._labels_str = json.dumps(record._labels or {}, ensure_ascii=False) return True @@ -143,7 +150,7 @@ def __init__( *, name=DEFAULT_LOGGER_NAME, transport=BackgroundThreadTransport, - resource=_GLOBAL_RESOURCE, + resource=None, labels=None, stream=None, ): @@ -162,11 +169,14 @@ def __init__( :class:`.BackgroundThreadTransport`. The other option is :class:`.SyncTransport`. resource (~logging_v2.resource.Resource): - Resource for this Handler. Defaults to ``global``. + Resource for this Handler. If not given, will be inferred from the environment. labels (Optional[dict]): Additional labels to attach to logs. stream (Optional[IO]): Stream to be used by the handler. """ super(CloudLoggingHandler, self).__init__(stream) + if not resource: + # infer the correct monitored resource from the local environment + resource = detect_resource(client.project) self.name = name self.client = client self.transport = transport(client, name) @@ -187,20 +197,68 @@ def emit(self, record): Args: record (logging.LogRecord): The record to be logged. """ - message = super(CloudLoggingHandler, self).format(record) + resource = record._resource or self.resource + labels = record._labels + message = _format_and_parse_message(record, self) + + if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None: + # add GAE-specific label + labels = {_GAE_TRACE_ID_LABEL: record._trace, **(labels or {})} # send off request self.transport.send( record, message, - resource=(record._resource or self.resource), - labels=record._labels, + resource=resource, + labels=labels, trace=record._trace, span_id=record._span_id, + trace_sampled=record._trace_sampled, http_request=record._http_request, source_location=record._source_location, ) +def _format_and_parse_message(record, formatter_handler): + """ + Helper function to apply formatting to a LogRecord message, + and attempt to parse encoded JSON into a dictionary object. + + Resulting output will be of type (str | dict | None) + + Args: + record (logging.LogRecord): The record object representing the log + formatter_handler (logging.Handler): The handler used to format the log + """ + passed_json_fields = getattr(record, "json_fields", {}) + # if message is a dictionary, use dictionary directly + if isinstance(record.msg, collections.abc.Mapping): + payload = record.msg + # attach any extra json fields if present + if passed_json_fields and isinstance( + passed_json_fields, collections.abc.Mapping + ): + payload = {**payload, **passed_json_fields} + return payload + # format message string based on superclass + message = formatter_handler.format(record) + try: + # attempt to parse encoded json into dictionary + if message[0] == "{": + json_message = json.loads(message) + if isinstance(json_message, collections.abc.Mapping): + message = json_message + except (json.decoder.JSONDecodeError, IndexError): + # log string is not valid json + pass + # if json_fields was set, create a dictionary using that + if passed_json_fields and isinstance(passed_json_fields, collections.abc.Mapping): + if message != "None": + passed_json_fields["message"] = message + return passed_json_fields + # if formatted message contains no content, return None + return message if message != "None" else None + + def setup_logging( handler, *, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, log_level=logging.INFO ): diff --git a/google/cloud/logging_v2/handlers/structured_log.py b/google/cloud/logging_v2/handlers/structured_log.py index 43e1250a3..2d7c5e078 100644 --- a/google/cloud/logging_v2/handlers/structured_log.py +++ b/google/cloud/logging_v2/handlers/structured_log.py @@ -14,19 +14,23 @@ """Logging handler for printing formatted structured logs to standard output. """ +import collections import json import logging.handlers from google.cloud.logging_v2.handlers.handlers import CloudLoggingFilter +from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message GCP_FORMAT = ( - '{"message": %(_formatted_msg)s, ' + "{%(_payload_str)s" '"severity": "%(levelname)s", ' '"logging.googleapis.com/labels": %(_labels_str)s, ' '"logging.googleapis.com/trace": "%(_trace_str)s", ' '"logging.googleapis.com/spanId": "%(_span_id_str)s", ' + '"logging.googleapis.com/trace_sampled": %(_trace_sampled_str)s, ' '"logging.googleapis.com/sourceLocation": %(_source_location_str)s, ' - '"httpRequest": %(_http_request_str)s }' + '"httpRequest": %(_http_request_str)s ' + "}" ) @@ -57,15 +61,22 @@ def format(self, record): Args: record (logging.LogRecord): The log record. Returns: - str: A JSON string formatted for GKE fluentd. + str: A JSON string formatted for GCP structured logging. """ - # let other formatters alter the message - super_payload = None - if record.msg: - # format the message using default handler behaviors - super_payload = super(StructuredLogHandler, self).format(record) - # properly break any formatting in string to make it json safe - record._formatted_msg = json.dumps(super_payload or "") + payload = None + message = _format_and_parse_message(record, super(StructuredLogHandler, self)) + + if isinstance(message, collections.abc.Mapping): + # if input is a dictionary, encode it as a json string + encoded_msg = json.dumps(message, ensure_ascii=False) + # strip out open and close parentheses + payload = encoded_msg.lstrip("{").rstrip("}") + "," + elif message: + # properly break any formatting in string to make it json safe + encoded_message = json.dumps(message, ensure_ascii=False) + payload = '"message": {},'.format(encoded_message) + + record._payload_str = payload or "" # remove exception info to avoid duplicating it # https://github.com/googleapis/python-logging/issues/382 record.exc_info = None diff --git a/google/cloud/logging_v2/handlers/transports/background_thread.py b/google/cloud/logging_v2/handlers/transports/background_thread.py index 60828a117..1097830a8 100644 --- a/google/cloud/logging_v2/handlers/transports/background_thread.py +++ b/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -137,7 +137,7 @@ def _thread_main(self): if item is _WORKER_TERMINATOR: done = True # Continue processing items. else: - batch.log_struct(**item) + batch.log(**item) self._safely_commit_batch(batch) @@ -226,12 +226,18 @@ def enqueue(self, record, message, **kwargs): Args: record (logging.LogRecord): Python log record that the handler was called with. - message (str): The message from the ``LogRecord`` after being + message (str or dict): The message from the ``LogRecord`` after being formatted by the associated log formatters. kwargs: Additional optional arguments for the logger """ + # set python logger name as label if missing + labels = kwargs.pop("labels", {}) + if record.name: + labels["python_logger"] = labels.get("python_logger", record.name) + kwargs["labels"] = labels + # enqueue new entry queue_entry = { - "info": {"message": message, "python_logger": record.name}, + "message": message, "severity": _helpers._normalize_severity(record.levelno), "timestamp": datetime.datetime.utcfromtimestamp(record.created), } @@ -285,7 +291,7 @@ def send(self, record, message, **kwargs): Args: record (logging.LogRecord): Python log record that the handler was called with. - message (str): The message from the ``LogRecord`` after being + message (str or dict): The message from the ``LogRecord`` after being formatted by the associated log formatters. kwargs: Additional optional arguments for the logger """ diff --git a/google/cloud/logging_v2/handlers/transports/base.py b/google/cloud/logging_v2/handlers/transports/base.py index d60a5a070..bd52b4e75 100644 --- a/google/cloud/logging_v2/handlers/transports/base.py +++ b/google/cloud/logging_v2/handlers/transports/base.py @@ -27,7 +27,7 @@ def send(self, record, message, **kwargs): Args: record (logging.LogRecord): Python log record that the handler was called with. - message (str): The message from the ``LogRecord`` after being + message (str or dict): The message from the ``LogRecord`` after being formatted by the associated log formatters. kwargs: Additional optional arguments for the logger """ diff --git a/google/cloud/logging_v2/handlers/transports/sync.py b/google/cloud/logging_v2/handlers/transports/sync.py index 35ee73daa..796f0d2ff 100644 --- a/google/cloud/logging_v2/handlers/transports/sync.py +++ b/google/cloud/logging_v2/handlers/transports/sync.py @@ -16,7 +16,6 @@ Logs directly to the the Cloud Logging API with a synchronous call. """ - from google.cloud.logging_v2 import _helpers from google.cloud.logging_v2.handlers.transports.base import Transport @@ -36,11 +35,18 @@ def send(self, record, message, **kwargs): Args: record (logging.LogRecord): Python log record that the handler was called with. - message (str): The message from the ``LogRecord`` after being + message (str or dict): The message from the ``LogRecord`` after being formatted by the associated log formatters. kwargs: Additional optional arguments for the logger """ - info = {"message": message, "python_logger": record.name} - self.logger.log_struct( - info, severity=_helpers._normalize_severity(record.levelno), **kwargs, + # set python logger name as label if missing + labels = kwargs.pop("labels", {}) + if record.name: + labels["python_logger"] = labels.get("python_logger", record.name) + # send log synchronously + self.logger.log( + message, + severity=_helpers._normalize_severity(record.levelno), + labels=labels, + **kwargs, ) diff --git a/google/cloud/logging_v2/logger.py b/google/cloud/logging_v2/logger.py index fafb70629..02ecb6905 100644 --- a/google/cloud/logging_v2/logger.py +++ b/google/cloud/logging_v2/logger.py @@ -14,13 +14,17 @@ """Define API Loggers.""" +import collections + from google.cloud.logging_v2._helpers import _add_defaults_to_filter from google.cloud.logging_v2.entries import LogEntry from google.cloud.logging_v2.entries import ProtobufEntry from google.cloud.logging_v2.entries import StructEntry from google.cloud.logging_v2.entries import TextEntry from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2.handlers._monitored_resources import detect_resource +import google.protobuf.message _GLOBAL_RESOURCE = Resource(type="global", labels={}) @@ -41,6 +45,8 @@ ("source_location", None), ) +_STRUCT_EXTRACTABLE_FIELDS = ["severity", "trace", "span_id"] + class Logger(object): """Loggers represent named targets for log entries. @@ -48,19 +54,23 @@ class Logger(object): See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs """ - def __init__(self, name, client, *, labels=None, resource=_GLOBAL_RESOURCE): + def __init__(self, name, client, *, labels=None, resource=None): """ Args: name (str): The name of the logger. client (~logging_v2.client.Client): A client which holds credentials and project configuration for the logger (which requires a project). - resource (~logging_v2.Resource): a monitored resource object - representing the resource the code was run on. + resource (Optional[~logging_v2.Resource]): a monitored resource object + representing the resource the code was run on. If not given, will + be inferred from the environment. labels (Optional[dict]): Mapping of default labels for entries written via this logger. """ + if not resource: + # infer the correct monitored resource from the local environment + resource = detect_resource(client.project) self.name = name self._client = client self.labels = labels @@ -125,6 +135,20 @@ def _do_log(self, client, _entry_class, payload=None, **kw): kw["labels"] = kw.pop("labels", self.labels) kw["resource"] = kw.pop("resource", self.default_resource) + severity = kw.get("severity", None) + if isinstance(severity, str) and not severity.isupper(): + # convert severity to upper case, as expected by enum definition + kw["severity"] = severity.upper() + + if isinstance(kw["resource"], collections.abc.Mapping): + # if resource was passed as a dict, attempt to parse it into a + # Resource object + try: + kw["resource"] = Resource(**kw["resource"]) + except TypeError as e: + # dict couldn't be parsed as a Resource + raise TypeError("invalid resource dict") from e + if payload is not None: entry = _entry_class(payload=payload, **kw) else: @@ -134,7 +158,7 @@ def _do_log(self, client, _entry_class, payload=None, **kw): client.logging_api.write_entries([api_repr]) def log_empty(self, *, client=None, **kw): - """Log an empty message via a POST request + """Log an empty message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write @@ -149,7 +173,7 @@ def log_empty(self, *, client=None, **kw): self._do_log(client, LogEntry, **kw) def log_text(self, text, *, client=None, **kw): - """Log a text message via a POST request + """Log a text message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write @@ -165,7 +189,7 @@ def log_text(self, text, *, client=None, **kw): self._do_log(client, TextEntry, text, **kw) def log_struct(self, info, *, client=None, **kw): - """Log a structured message via a POST request + """Log a dictionary message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write @@ -178,10 +202,14 @@ def log_struct(self, info, *, client=None, **kw): kw (Optional[dict]): additional keyword arguments for the entry. See :class:`~logging_v2.entries.LogEntry`. """ + for field in _STRUCT_EXTRACTABLE_FIELDS: + # attempt to copy relevant fields from the payload into the LogEntry body + if field in info and field not in kw: + kw[field] = info[field] self._do_log(client, StructEntry, info, **kw) def log_proto(self, message, *, client=None, **kw): - """Log a protobuf message via a POST request + """Log a protobuf message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list @@ -197,6 +225,29 @@ def log_proto(self, message, *, client=None, **kw): """ self._do_log(client, ProtobufEntry, message, **kw) + def log(self, message=None, *, client=None, **kw): + """Log an arbitrary message. Type will be inferred based on the input. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list + + Args: + message (Optional[str or dict or google.protobuf.Message]): The message. to log + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + kw (Optional[dict]): additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + if isinstance(message, google.protobuf.message.Message): + self.log_proto(message, client=client, **kw) + elif isinstance(message, collections.abc.Mapping): + self.log_struct(message, client=client, **kw) + elif isinstance(message, str): + self.log_text(message, client=client, **kw) + else: + self._do_log(client, LogEntry, message, **kw) + def delete(self, logger_name=None, *, client=None): """Delete all entries in a logger via a DELETE request @@ -232,10 +283,11 @@ def list_entries( resource_names=None, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): - """Return a page of log entries. + """Return a generator of log entry resources. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list @@ -257,19 +309,16 @@ def list_entries( By default, a 24 hour filter is applied. order_by (Optional[str]): One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (Optional[int]): - Optional. The maximum number of entries in each page of results - from this request. Non-positive values are ignored. Defaults - to a sensible value set by the API. - page_token (Optional[str]): - Optional. If present, return the next batch of entries, using - the value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property of the returned iterator instead of manually passing - the token. - + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.entries.LogEntry] + Generator[~logging_v2.LogEntry] """ if resource_names is None: @@ -285,6 +334,7 @@ def list_entries( resource_names=resource_names, filter_=filter_, order_by=order_by, + max_results=max_results, page_size=page_size, page_token=page_token, ) @@ -361,6 +411,24 @@ def log_proto(self, message, **kw): """ self.entries.append(ProtobufEntry(payload=message, **kw)) + def log(self, message=None, **kw): + """Add an arbitrary message to be logged during :meth:`commit`. + Type will be inferred based on the input message. + + Args: + message (Optional[str or dict or google.protobuf.Message]): The message. to log + kw (Optional[dict]): Additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + entry_type = LogEntry + if isinstance(message, google.protobuf.message.Message): + entry_type = ProtobufEntry + elif isinstance(message, collections.abc.Mapping): + entry_type = StructEntry + elif isinstance(message, str): + entry_type = TextEntry + self.entries.append(entry_type(payload=message, **kw)) + def commit(self, *, client=None): """Send saved log entries as a single API call. diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py index e519c75c1..fdbbe1211 100644 --- a/samples/snippets/usage_guide.py +++ b/samples/snippets/usage_guide.py @@ -100,15 +100,65 @@ def client_list_entries(client, to_delete): # pylint: disable=unused-argument @snippet -def logger_usage(client, to_delete): +def client_setup(client2, to_delete): + """Client setup.""" + + # [START usage_client_setup] + import google.cloud.logging + + # if project not given, it will be inferred from the environment + client = google.cloud.logging.Client(project="my-project") + # [END usage_client_setup] + to_delete.append(client) + + # [START usage_http_client_setup] + http_client = google.cloud.logging.Client(_use_grpc=False) + # [END usage_http_client_setup] + to_delete.append(http_client) + + +@snippet +def logger_usage(client_true, to_delete): """Logger usage.""" - log_name = "logger_usage_%d" % (_millis()) + import google.cloud.logging # [START logger_create] - logger = client.logger(log_name) + client = google.cloud.logging.Client(project="my-project") + logger = client.logger(name="log_id") + # logger will bind to logName "projects/my_project/logs/log_id" # [END logger_create] + client = client_true + + log_id = "logger_usage_%d" % (_millis()) + # [START logger_custom_labels] + custom_labels = {"my-key": "my-value"} + label_logger = client.logger(log_id, labels=custom_labels) + # [END logger_custom_labels] + to_delete.append(label_logger) + # [START logger_custom_resource] + from google.cloud.logging_v2.resource import Resource + + resource = Resource(type="global", labels={}) + global_logger = client.logger(log_id, resource=resource) + # [END logger_custom_resource] + to_delete.append(global_logger) + + logger = client_true.logger(log_id) to_delete.append(logger) + # [START logger_log_basic] + logger.log("A simple entry") # API call + # [END logger_log_basic] + + # [START logger_log_fields] + logger.log( + "an entry with fields set", + severity="ERROR", + insert_id="0123", + labels={"my-label": "my-value"}, + ) # API call + # [END logger_log_fields] + # [START logger_log_text] logger.log_text("A simple entry") # API call # [END logger_log_text] @@ -135,6 +185,20 @@ def logger_usage(client, to_delete): ) # [END logger_log_resource_text] + # [START logger_log_batch] + batch = logger.batch() + batch.log("first log") + batch.log("second log") + batch.commit() + # [END logger_log_batch] + + # [START logger_log_batch_context] + with logger.batch() as batch: + batch.log("first log") + # do work + batch.log("last log") + # [END logger_log_batch_context] + # [START logger_list_entries] from google.cloud.logging import DESCENDING @@ -357,12 +421,10 @@ def logging_handler(client): # [START create_cloud_handler] from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging_v2.handlers import setup_logging handler = CloudLoggingHandler(client) - cloud_logger = logging.getLogger("cloudLogger") - cloud_logger.setLevel(logging.INFO) - cloud_logger.addHandler(handler) - cloud_logger.error("bad news") + setup_logging(handler) # [END create_cloud_handler] # [START create_named_handler] @@ -370,6 +432,39 @@ def logging_handler(client): # [END create_named_handler] +@snippet +def logging_json(client): + # [START logging_json_dumps] + import logging + import json + + data_dict = {"hello": "world"} + logging.info(json.dumps(data_dict)) + # [END logging_json_dumps] + + # [START logging_extra_json_fields] + import logging + + data_dict = {"hello": "world"} + logging.info("message field", extra={"json_fields": data_dict}) + # [END logging_extra_json_fields] + + +@snippet +def using_extras(client): + import logging + + # [START logging_extras] + my_labels = {"foo": "bar"} + my_http = {"requestUrl": "localhost"} + my_trace = "01234" + + logging.info( + "hello", extra={"labels": my_labels, "http_request": my_http, "trace": my_trace} + ) + # [END logging_extras] + + @snippet def setup_logging(client): import logging diff --git a/tests/environment b/tests/environment index dc8506605..41c32ce34 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit dc85066053b8dc2246c8b72f93a5b97f92885eb2 +Subproject commit 41c32ce3425529680e32701549d3f682f9c82b63 diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 532eea96b..90b4059d6 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -32,11 +32,11 @@ from google.api_core.exceptions import ServiceUnavailable import google.cloud.logging from google.cloud._helpers import UTC -from google.cloud.logging_v2.handlers import AppEngineHandler from google.cloud.logging_v2.handlers import CloudLoggingHandler from google.cloud.logging_v2.handlers.transports import SyncTransport from google.cloud.logging_v2 import client from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2.entries import TextEntry from google.protobuf.struct_pb2 import Struct, Value, ListValue, NullValue @@ -98,11 +98,13 @@ class Config(object): """ CLIENT = None + HTTP_CLIENT = None use_mtls = os.environ.get("GOOGLE_API_USE_MTLS_ENDPOINT", "never") def setUpModule(): Config.CLIENT = client.Client() + Config.HTTP_CLIENT = client.Client(_use_grpc=False) # Skip the test cases using bigquery, storage and pubsub clients for mTLS testing. @@ -187,34 +189,34 @@ def test_list_entry_with_auditlog(self): audit_dict = { "@type": type_url, "methodName": "test", - "requestMetadata": {"callerIp": "::1", "callerSuppliedUserAgent": "test"}, "resourceName": "test", "serviceName": "test", - "status": {"code": 0}, } audit_struct = self._dict_to_struct(audit_dict) - logger = Config.CLIENT.logger(f"audit-proto-{uuid.uuid1()}") - logger.log_proto(audit_struct) - - # retrieve log - retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) - protobuf_entry = retry(lambda: next(logger.list_entries()))() - - self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) - self.assertIsNone(protobuf_entry.payload_pb) - self.assertIsInstance(protobuf_entry.payload_json, dict) - self.assertEqual(protobuf_entry.payload_json["@type"], type_url) - self.assertEqual( - protobuf_entry.payload_json["methodName"], audit_dict["methodName"] - ) - self.assertEqual( - protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url - ) - self.assertEqual( - protobuf_entry.to_api_repr()["protoPayload"]["methodName"], - audit_dict["methodName"], - ) + gapic_logger = Config.CLIENT.logger(f"audit-proto-{uuid.uuid1()}") + http_logger = Config.HTTP_CLIENT.logger(f"audit-proto-{uuid.uuid1()}-http") + for logger in [gapic_logger, http_logger]: + logger.log_proto(audit_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() + + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.payload_json["methodName"], audit_dict["methodName"] + ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["methodName"], + audit_dict["methodName"], + ) def test_list_entry_with_requestlog(self): """ @@ -245,20 +247,22 @@ def test_list_entry_with_requestlog(self): } req_struct = self._dict_to_struct(req_dict) - logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}") - logger.log_proto(req_struct) - - # retrieve log - retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) - protobuf_entry = retry(lambda: next(logger.list_entries()))() - - self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) - self.assertIsNone(protobuf_entry.payload_pb) - self.assertIsInstance(protobuf_entry.payload_json, dict) - self.assertEqual(protobuf_entry.payload_json["@type"], type_url) - self.assertEqual( - protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url - ) + gapic_logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}") + http_logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}-http") + for logger in [gapic_logger, http_logger]: + logger.log_proto(req_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() + + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) def test_list_entry_with_auditdata(self): """ @@ -295,46 +299,51 @@ def test_list_entry_with_auditdata(self): def test_log_text(self): TEXT_PAYLOAD = "System test: test_log_text" - logger = Config.CLIENT.logger(self._logger_name("log_text")) - self.to_delete.append(logger) - logger.log_text(TEXT_PAYLOAD) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + logger.log_text(TEXT_PAYLOAD) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + self.assertTrue(isinstance(entries[0], TextEntry)) def test_log_text_with_timestamp(self): text_payload = "System test: test_log_text_with_timestamp" - logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_ts_http")) now = datetime.utcnow() - - self.to_delete.append(logger) - - logger.log_text(text_payload, timestamp=now) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, text_payload) - self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) - self.assertIsInstance(entries[0].received_timestamp, datetime) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + logger.log_text(text_payload, timestamp=now) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, text_payload) + self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) + self.assertIsInstance(entries[0].received_timestamp, datetime) def test_log_text_with_resource(self): text_payload = "System test: test_log_text_with_timestamp" - logger = Config.CLIENT.logger(self._logger_name("log_text_res")) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_res")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_res_http")) now = datetime.utcnow() - resource = Resource( - type="gae_app", - labels={"module_id": "default", "version_id": "test", "zone": ""}, - ) + for logger in [gapic_logger, http_logger]: + resource = Resource( + type="gae_app", + labels={"module_id": "default", "version_id": "test", "zone": ""}, + ) - self.to_delete.append(logger) + self.to_delete.append(logger) - logger.log_text(text_payload, timestamp=now, resource=resource) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, text_payload) - # project_id is output only so we don't want it in assertion - del entries[0].resource.labels["project_id"] - self.assertEqual(entries[0].resource, resource) + logger.log_text(text_payload, timestamp=now, resource=resource) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, text_payload) + # project_id is output only so we don't want it in assertion + del entries[0].resource.labels["project_id"] + self.assertEqual(entries[0].resource, resource) def test_log_text_w_metadata(self): TEXT_PAYLOAD = "System test: test_log_text" @@ -344,35 +353,42 @@ def test_log_text_w_metadata(self): URI = "https://api.example.com/endpoint" STATUS = 500 REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - logger = Config.CLIENT.logger(self._logger_name("log_text_md")) - self.to_delete.append(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_md")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_md_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) - logger.log_text( - TEXT_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, http_request=REQUEST - ) - entries = _list_entries(logger) + logger.log_text( + TEXT_PAYLOAD, + insert_id=INSERT_ID, + severity=SEVERITY, + http_request=REQUEST, + ) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) + self.assertEqual(len(entries), 1) - entry = entries[0] - self.assertEqual(entry.payload, TEXT_PAYLOAD) - self.assertEqual(entry.insert_id, INSERT_ID) - self.assertEqual(entry.severity, SEVERITY) + entry = entries[0] + self.assertEqual(entry.payload, TEXT_PAYLOAD) + self.assertEqual(entry.insert_id, INSERT_ID) + self.assertEqual(entry.severity, SEVERITY) - request = entry.http_request - self.assertEqual(request["requestMethod"], METHOD) - self.assertEqual(request["requestUrl"], URI) - self.assertEqual(request["status"], STATUS) + request = entry.http_request + self.assertEqual(request["requestMethod"], METHOD) + self.assertEqual(request["requestUrl"], URI) + self.assertEqual(request["status"], STATUS) def test_log_struct(self): - logger = Config.CLIENT.logger(self._logger_name("log_struct")) - self.to_delete.append(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_struct")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_struct_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) - logger.log_struct(self.JSON_PAYLOAD) - entries = _list_entries(logger) + logger.log_struct(self.JSON_PAYLOAD) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) def test_log_struct_w_metadata(self): INSERT_ID = "INSERTID" @@ -381,25 +397,84 @@ def test_log_struct_w_metadata(self): URI = "https://api.example.com/endpoint" STATUS = 500 REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - logger = Config.CLIENT.logger(self._logger_name("log_struct_md")) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_struct_md")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_struct_md_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + + logger.log_struct( + self.JSON_PAYLOAD, + insert_id=INSERT_ID, + severity=SEVERITY, + http_request=REQUEST, + ) + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(entries[0].insert_id, INSERT_ID) + self.assertEqual(entries[0].severity, SEVERITY) + request = entries[0].http_request + self.assertEqual(request["requestMethod"], METHOD) + self.assertEqual(request["requestUrl"], URI) + self.assertEqual(request["status"], STATUS) + + def test_log_w_text(self): + TEXT_PAYLOAD = "System test: test_log_w_text" + gapic_logger = Config.CLIENT.logger(self._logger_name("log_w_text")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_w_text")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + logger.log(TEXT_PAYLOAD) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + + def test_log_w_struct(self): + gapic_logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_w_struct_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + + logger.log(self.JSON_PAYLOAD) + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + + def test_log_empty(self): + gapic_logger = Config.CLIENT.logger(self._logger_name("log_empty")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_empty_http")) + + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + + logger.log() + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertIsNone(entries[0].payload) + self.assertFalse(entries[0].trace_sampled) + + def test_log_struct_logentry_data(self): + logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) self.to_delete.append(logger) - logger.log_struct( - self.JSON_PAYLOAD, - insert_id=INSERT_ID, - severity=SEVERITY, - http_request=REQUEST, - ) + JSON_PAYLOAD = { + "message": "System test: test_log_struct_logentry_data", + "severity": "warning", + "trace": "123", + "span_id": "456", + } + logger.log(JSON_PAYLOAD) entries = _list_entries(logger) self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) - self.assertEqual(entries[0].insert_id, INSERT_ID) - self.assertEqual(entries[0].severity, SEVERITY) - request = entries[0].http_request - self.assertEqual(request["requestMethod"], METHOD) - self.assertEqual(request["requestUrl"], URI) - self.assertEqual(request["status"], STATUS) + self.assertEqual(entries[0].payload, JSON_PAYLOAD) + self.assertEqual(entries[0].severity, "WARNING") + self.assertEqual(entries[0].trace, JSON_PAYLOAD["trace"]) + self.assertEqual(entries[0].span_id, JSON_PAYLOAD["span_id"]) + self.assertFalse(entries[0].trace_sampled) def test_log_handler_async(self): LOG_MESSAGE = "It was the worst of times" @@ -415,7 +490,7 @@ def test_log_handler_async(self): cloud_logger.warning(LOG_MESSAGE) handler.flush() entries = _list_entries(logger) - expected_payload = {"message": LOG_MESSAGE, "python_logger": handler.name} + expected_payload = LOG_MESSAGE self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) @@ -437,44 +512,73 @@ def test_log_handler_sync(self): cloud_logger.warning(LOG_MESSAGE) entries = _list_entries(logger) - expected_payload = {"message": LOG_MESSAGE, "python_logger": LOGGER_NAME} + expected_payload = LOG_MESSAGE self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) def test_handlers_w_extras(self): LOG_MESSAGE = "Testing with injected extras." + LOGGER_NAME = "handler_extras" + handler_name = self._logger_name(LOGGER_NAME) - for cls in [CloudLoggingHandler, AppEngineHandler]: - LOGGER_NAME = f"{cls.__name__}-handler_extras" - handler_name = self._logger_name(LOGGER_NAME) + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=SyncTransport + ) - handler = cls(Config.CLIENT, name=handler_name, transport=SyncTransport) + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) - # only create the logger to delete, hidden otherwise - logger = Config.CLIENT.logger(handler.name) - self.to_delete.append(logger) + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + expected_request = {"requestUrl": "localhost"} + expected_source = {"file": "test.py"} + extra = { + "trace": "123", + "span_id": "456", + "trace_sampled": True, + "http_request": expected_request, + "source_location": expected_source, + "resource": Resource(type="cloudiot_device", labels={}), + "labels": {"test-label": "manual"}, + } + cloud_logger.warn(LOG_MESSAGE, extra=extra) - cloud_logger = logging.getLogger(LOGGER_NAME) - cloud_logger.addHandler(handler) - expected_request = {"requestUrl": "localhost"} - expected_source = {"file": "test.py"} - extra = { - "trace": "123", - "span_id": "456", - "http_request": expected_request, - "source_location": expected_source, - "resource": Resource(type="cloudiot_device", labels={}), - "labels": {"test-label": "manual"}, - } - cloud_logger.warning(LOG_MESSAGE, extra=extra) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].trace, extra["trace"]) + self.assertEqual(entries[0].span_id, extra["span_id"]) + self.assertTrue(entries[0].trace_sampled) + self.assertEqual(entries[0].http_request, expected_request) + self.assertEqual( + entries[0].labels, {**extra["labels"], "python_logger": LOGGER_NAME} + ) + self.assertEqual(entries[0].resource.type, extra["resource"].type) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].trace, extra["trace"]) - self.assertEqual(entries[0].span_id, extra["span_id"]) - self.assertEqual(entries[0].http_request, expected_request) - self.assertEqual(entries[0].labels, extra["labels"]) - self.assertEqual(entries[0].resource.type, extra["resource"].type) + def test_handlers_w_json_fields(self): + LOG_MESSAGE = "Testing with json_field extras." + LOGGER_NAME = "json_field_extras" + handler_name = self._logger_name(LOGGER_NAME) + + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=SyncTransport + ) + + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) + + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + extra = {"json_fields": {"hello": "world", "two": 2}} + cloud_logger.warn(LOG_MESSAGE, extra=extra) + + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + payload = entries[0].payload + self.assertEqual(payload["message"], LOG_MESSAGE) + self.assertEqual(payload["hello"], "world") + self.assertEqual(payload["two"], 2) def test_log_root_handler(self): LOG_MESSAGE = "It was the best of times." @@ -490,7 +594,7 @@ def test_log_root_handler(self): logging.warning(LOG_MESSAGE) entries = _list_entries(logger) - expected_payload = {"message": LOG_MESSAGE, "python_logger": "root"} + expected_payload = LOG_MESSAGE self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) @@ -725,6 +829,51 @@ def test_update_sink(self): self.assertEqual(sink.filter_, UPDATED_FILTER) self.assertEqual(sink.destination, dataset_uri) + def test_api_equality_list_logs(self): + unique_id = uuid.uuid1() + gapic_logger = Config.CLIENT.logger(f"api-list-{unique_id}") + http_logger = Config.HTTP_CLIENT.logger(f"api-list-{unique_id}") + # write logs + log_count = 5 + for i in range(log_count): + gapic_logger.log_text(f"test {i}") + + def retryable(): + max_results = 3 + gapic_generator = gapic_logger.list_entries(max_results=max_results) + http_generator = http_logger.list_entries(max_results=max_results) + # returned objects should be consistent + self.assertEqual(type(gapic_generator), type(http_generator)) + gapic_list, http_list = list(gapic_generator), list(http_generator) + # max_results should limit the number of logs returned + self.assertEqual(len(gapic_list), max_results) + self.assertEqual(len(http_list), max_results) + # returned logs should be the same + self.assertEqual(gapic_list[0].insert_id, http_list[0].insert_id) + # should return in ascending order + self.assertEqual(gapic_list[0].payload, "test 0") + # test reverse ordering + gapic_generator = gapic_logger.list_entries( + max_results=max_results, order_by=google.cloud.logging_v2.DESCENDING + ) + http_generator = http_logger.list_entries( + max_results=max_results, order_by=google.cloud.logging_v2.DESCENDING + ) + gapic_list, http_list = list(gapic_generator), list(http_generator) + self.assertEqual(len(gapic_list), max_results) + self.assertEqual(len(http_list), max_results) + # http and gapic results should be consistent + self.assertEqual(gapic_list[0].insert_id, http_list[0].insert_id) + # returned logs should be in descending order + self.assertEqual(gapic_list[0].payload, f"test {log_count-1}") + + RetryErrors( + (ServiceUnavailable, InternalServerError, AssertionError), + delay=2, + backoff=2, + max_tries=3, + )(retryable)() + class _DeleteWrapper(object): def __init__(self, publisher, topic_path): diff --git a/tests/unit/handlers/test__helpers.py b/tests/unit/handlers/test__helpers.py index d26e700e8..9946c8eb5 100644 --- a/tests/unit/handlers/test__helpers.py +++ b/tests/unit/handlers/test__helpers.py @@ -16,10 +16,10 @@ import mock -_FLASK_TRACE_ID = "flask-id" +_FLASK_TRACE_ID = "flask0id" _FLASK_SPAN_ID = "span0flask" _FLASK_HTTP_REQUEST = {"requestUrl": "https://flask.palletsprojects.com/en/1.1.x/"} -_DJANGO_TRACE_ID = "django-id" +_DJANGO_TRACE_ID = "django0id" _DJANGO_SPAN_ID = "span0django" _DJANGO_HTTP_REQUEST = {"requestUrl": "https://www.djangoproject.com/"} @@ -29,7 +29,8 @@ class Test_get_request_data_from_flask(unittest.TestCase): def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_request_data_from_flask() + http, trace, span, sampled = _helpers.get_request_data_from_flask() + return http, trace, span, sampled @staticmethod def create_app(): @@ -46,17 +47,18 @@ def index(): def test_no_context_header(self): app = self.create_app() with app.test_request_context(path="/", headers={}): - http_request, trace_id, span_id = self._call_fut() + http_request, trace_id, span_id, sampled = self._call_fut() self.assertIsNone(trace_id) self.assertIsNone(span_id) + self.assertEqual(sampled, False) self.assertEqual(http_request["requestMethod"], "GET") - def test_valid_context_header(self): + def test_xcloud_header(self): flask_trace_header = "X_CLOUD_TRACE_CONTEXT" expected_trace_id = _FLASK_TRACE_ID expected_span_id = _FLASK_SPAN_ID - flask_trace_id = f"{expected_trace_id}/{expected_span_id}" + flask_trace_id = f"{expected_trace_id}/{expected_span_id};o=1" app = self.create_app() context = app.test_request_context( @@ -64,10 +66,30 @@ def test_valid_context_header(self): ) with context: - http_request, trace_id, span_id = self._call_fut() + http_request, trace_id, span_id, sampled = self._call_fut() self.assertEqual(trace_id, expected_trace_id) self.assertEqual(span_id, expected_span_id) + self.assertEqual(sampled, True) + self.assertEqual(http_request["requestMethod"], "GET") + + def test_traceparent_header(self): + flask_trace_header = "TRACEPARENT" + expected_trace_id = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span_id = "00f067aa0ba902b7" + flask_trace_id = f"00-{expected_trace_id}-{expected_span_id}-01" + + app = self.create_app() + context = app.test_request_context( + path="/", headers={flask_trace_header: flask_trace_id} + ) + + with context: + http_request, trace_id, span_id, sampled = self._call_fut() + + self.assertEqual(trace_id, expected_trace_id) + self.assertEqual(span_id, expected_span_id) + self.assertEqual(sampled, True) self.assertEqual(http_request["requestMethod"], "GET") def test_http_request_populated(self): @@ -106,7 +128,8 @@ class Test_get_request_data_from_django(unittest.TestCase): def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_request_data_from_django() + http, trace, span, sampled = _helpers.get_request_data_from_django() + return http, trace, span, sampled def setUp(self): from django.conf import settings @@ -131,20 +154,21 @@ def test_no_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - http_request, trace_id, span_id = self._call_fut() + http_request, trace_id, span_id, sampled = self._call_fut() self.assertEqual(http_request["requestMethod"], "GET") self.assertIsNone(trace_id) self.assertIsNone(span_id) + self.assertEqual(sampled, False) - def test_valid_context_header(self): + def test_xcloud_header(self): from django.test import RequestFactory from google.cloud.logging_v2.handlers.middleware import request django_trace_header = "HTTP_X_CLOUD_TRACE_CONTEXT" expected_span_id = _DJANGO_SPAN_ID expected_trace_id = _DJANGO_TRACE_ID - django_trace_id = f"{expected_trace_id}/{expected_span_id}" + django_trace_id = f"{expected_trace_id}/{expected_span_id};o=1" django_request = RequestFactory().get( "/", **{django_trace_header: django_trace_id} @@ -152,10 +176,31 @@ def test_valid_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - http_request, trace_id, span_id = self._call_fut() + http_request, trace_id, span_id, sampled = self._call_fut() + + self.assertEqual(trace_id, expected_trace_id) + self.assertEqual(span_id, expected_span_id) + self.assertEqual(sampled, True) + self.assertEqual(http_request["requestMethod"], "GET") + + def test_traceparent_header(self): + from django.test import RequestFactory + from google.cloud.logging_v2.handlers.middleware import request + + django_trace_header = "HTTP_TRACEPARENT" + expected_trace_id = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span_id = "00f067aa0ba902b7" + header = f"00-{expected_trace_id}-{expected_span_id}-01" + + django_request = RequestFactory().get("/", **{django_trace_header: header}) + + middleware = request.RequestMiddleware(None) + middleware.process_request(django_request) + http_request, trace_id, span_id, sampled = self._call_fut() self.assertEqual(trace_id, expected_trace_id) self.assertEqual(span_id, expected_span_id) + self.assertEqual(sampled, True) self.assertEqual(http_request["requestMethod"], "GET") def test_http_request_populated(self): @@ -203,7 +248,8 @@ class Test_get_request_data(unittest.TestCase): def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_request_data() + http, trace, span, sampled = _helpers.get_request_data() + return http, trace, span, sampled def _helper(self, django_return, flask_return): django_patch = mock.patch( @@ -222,8 +268,13 @@ def _helper(self, django_return, flask_return): return django_mock, flask_mock, result def test_from_django(self): - django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID) - flask_expected = (None, None, None) + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (None, None, None, False) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) self.assertEqual(output, django_expected) @@ -231,8 +282,8 @@ def test_from_django(self): flask_mock.assert_not_called() def test_from_flask(self): - django_expected = (None, None, None) - flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID) + django_expected = (None, None, None, False) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) self.assertEqual(output, flask_expected) @@ -241,8 +292,13 @@ def test_from_flask(self): flask_mock.assert_called_once_with() def test_from_django_and_flask(self): - django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID) - flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID) + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) @@ -253,19 +309,19 @@ def test_from_django_and_flask(self): flask_mock.assert_not_called() def test_missing_http_request(self): - flask_expected = (None, _FLASK_TRACE_ID, _FLASK_SPAN_ID) - django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_TRACE_ID) + flask_expected = (None, _FLASK_TRACE_ID, _FLASK_SPAN_ID, True) + django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_TRACE_ID, True) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) # function only returns trace if http_request data is present - self.assertEqual(output, (None, None, None)) + self.assertEqual(output, (None, None, None, False)) django_mock.assert_called_once_with() flask_mock.assert_called_once_with() def test_missing_trace_id(self): - flask_expected = (_FLASK_HTTP_REQUEST, None, None) - django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID) + flask_expected = (_FLASK_HTTP_REQUEST, None, None, False) + django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID, True) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) # trace_id is optional @@ -275,77 +331,136 @@ def test_missing_trace_id(self): flask_mock.assert_called_once_with() def test_missing_both(self): - flask_expected = (None, None, None) - django_expected = (None, None, None) + flask_expected = (None, None, None, False) + django_expected = (None, None, None, False) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) - self.assertEqual(output, (None, None, None)) + self.assertEqual(output, (None, None, None, False)) django_mock.assert_called_once_with() flask_mock.assert_called_once_with() def test_wo_libraries(self): output = self._call_fut() - self.assertEqual(output, (None, None, None)) + self.assertEqual(output, (None, None, None, False)) -class Test__parse_trace_span(unittest.TestCase): +class Test__parse_xcloud_trace(unittest.TestCase): @staticmethod def _call_fut(header): from google.cloud.logging_v2.handlers import _helpers - return _helpers._parse_trace_span(header) + trace, span, sampled = _helpers._parse_xcloud_trace(header) + return trace, span, sampled def test_empty_header(self): header = "" - trace_id, span_id = self._call_fut(header) - self.assertEqual(trace_id, None) - self.assertEqual(span_id, None) + trace_id, span_id, sampled = self._call_fut(header) + self.assertIsNone(trace_id) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) def test_no_span(self): header = "12345" - trace_id, span_id = self._call_fut(header) + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, header) - self.assertEqual(span_id, None) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) def test_no_trace(self): header = "/12345" - trace_id, span_id = self._call_fut(header) - self.assertEqual(trace_id, "") + trace_id, span_id, sampled = self._call_fut(header) + self.assertIsNone(trace_id) self.assertEqual(span_id, "12345") + self.assertEqual(sampled, False) def test_with_span(self): expected_trace = "12345" expected_span = "67890" header = f"{expected_trace}/{expected_span}" - trace_id, span_id = self._call_fut(header) + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) + self.assertEqual(sampled, False) def test_with_extra_characters(self): expected_trace = "12345" expected_span = "67890" - header = f"{expected_trace}/{expected_span};o=0" - trace_id, span_id = self._call_fut(header) + header = f"{expected_trace}/{expected_span};abc" + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) + self.assertEqual(sampled, False) - def test_with_unicode_span(self): - """ - Spans are expected to be alphanumeric - """ + def test_with_explicit_no_sampled(self): expected_trace = "12345" - header = f"{expected_trace}/😀123" - trace_id, span_id = self._call_fut(header) + expected_span = "67890" + header = f"{expected_trace}/{expected_span};o=0" + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) - self.assertEqual(span_id, None) + self.assertEqual(span_id, expected_span) + self.assertEqual(sampled, False) - def test_with_unicode_trace(self): - """ - Spans are expected to be alphanumeric - """ - expected_trace = "12😀345" + def test_with__sampled(self): + expected_trace = "12345" expected_span = "67890" - header = f"{expected_trace}/{expected_span}" - trace_id, span_id = self._call_fut(header) + header = f"{expected_trace}/{expected_span};o=1" + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) + self.assertEqual(sampled, True) + + +class Test__parse_trace_parent(unittest.TestCase): + @staticmethod + def _call_fut(header): + from google.cloud.logging_v2.handlers import _helpers + + trace, span, sampled = _helpers._parse_trace_parent(header) + return trace, span, sampled + + def test_empty_header(self): + header = "" + trace_id, span_id, sampled = self._call_fut(header) + self.assertIsNone(trace_id) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) + + def test_valid_header(self): + header = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01" + trace_id, span_id, sampled = self._call_fut(header) + self.assertEqual(trace_id, "0af7651916cd43dd8448eb211c80319c") + self.assertEqual(span_id, "b7ad6b7169203331") + self.assertEqual(sampled, True) + + def test_not_sampled(self): + header = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-00" + trace_id, span_id, sampled = self._call_fut(header) + self.assertEqual(trace_id, "0af7651916cd43dd8448eb211c80319c") + self.assertEqual(span_id, "b7ad6b7169203331") + self.assertEqual(sampled, False) + + def test_sampled_w_other_flags(self): + header = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-09" + trace_id, span_id, sampled = self._call_fut(header) + self.assertEqual(trace_id, "0af7651916cd43dd8448eb211c80319c") + self.assertEqual(span_id, "b7ad6b7169203331") + self.assertEqual(sampled, True) + + def test_invalid_headers(self): + invalid_headers = [ + "", + "test" + "ff-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01", # invalid version + "00-00000000000000000000000000000000-b7ad6b7169203331-01", # invalid trace + "00-0af7651916cd43dd8448eb211c80319c-0000000000000000-01", # invalid span + "00-af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-00", + "00-0af7651916cd43dd8448eb211c80319c-bad6b7169203331-00", + "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-0", + "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-", + "00-0af7651916cd43dd8448eb211c80319c-00", + ] + for header in invalid_headers: + trace_id, span_id, sampled = self._call_fut(header) + self.assertIsNone(trace_id) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) diff --git a/tests/unit/handlers/test_app_engine.py b/tests/unit/handlers/test_app_engine.py index c726c8496..8eedfad9b 100644 --- a/tests/unit/handlers/test_app_engine.py +++ b/tests/unit/handlers/test_app_engine.py @@ -97,7 +97,7 @@ def test_emit(self): expected_trace_id = f"projects/{self.PROJECT}/traces/{trace_id}" get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", - return_value=(expected_http_request, trace_id, None), + return_value=(expected_http_request, trace_id, None, None), ) with get_request_patch: # library integrations mocked to return test data @@ -135,7 +135,7 @@ def test_emit_manual_field_override(self): inferred_trace_id = "trace-test" get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", - return_value=(inferred_http_request, inferred_trace_id, None), + return_value=(inferred_http_request, inferred_trace_id, None, None), ) with get_request_patch: # library integrations mocked to return test data @@ -180,7 +180,7 @@ def test_emit_manual_field_override(self): def _get_gae_labels_helper(self, trace_id): get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", - return_value=(None, trace_id, None), + return_value=(None, trace_id, None, None), ) client = mock.Mock(project=self.PROJECT, spec=["project"]) diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index b7fef1b9e..bbfacf59f 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -63,6 +63,7 @@ def test_filter_record(self): "file": "testpath", "function": "test-function", } + expected_label = {"python_logger": logname} record = logging.LogRecord( logname, logging.INFO, @@ -78,18 +79,19 @@ def test_filter_record(self): self.assertTrue(success) self.assertEqual(record.msg, message) - self.assertEqual(record._msg_str, message) self.assertEqual(record._source_location, expected_location) self.assertEqual(record._source_location_str, json.dumps(expected_location)) self.assertIsNone(record._resource) self.assertIsNone(record._trace) self.assertEqual(record._trace_str, "") + self.assertFalse(record._trace_sampled) + self.assertEqual(record._trace_sampled_str, "false") self.assertIsNone(record._span_id) self.assertEqual(record._span_id_str, "") self.assertIsNone(record._http_request) self.assertEqual(record._http_request_str, "{}") - self.assertIsNone(record._labels) - self.assertEqual(record._labels_str, "{}") + self.assertEqual(record._labels, expected_label) + self.assertEqual(record._labels_str, json.dumps(expected_label)) def test_minimal_record(self): """ @@ -105,7 +107,6 @@ def test_minimal_record(self): self.assertTrue(success) self.assertIsNone(record.msg) - self.assertEqual(record._msg_str, "") self.assertIsNone(record._source_location) self.assertEqual(record._source_location_str, "{}") self.assertIsNone(record._resource) @@ -113,6 +114,8 @@ def test_minimal_record(self): self.assertEqual(record._trace_str, "") self.assertIsNone(record._span_id) self.assertEqual(record._span_id_str, "") + self.assertFalse(record._trace_sampled) + self.assertEqual(record._trace_sampled_str, "false") self.assertIsNone(record._http_request) self.assertEqual(record._http_request_str, "{}") self.assertIsNone(record._labels) @@ -132,7 +135,7 @@ def test_record_with_request(self): expected_agent = "Mozilla/5.0" expected_trace = "123" expected_span = "456" - combined_trace = f"{expected_trace}/{expected_span}" + combined_trace = f"{expected_trace}/{expected_span};o=1" expected_request = { "requestMethod": "GET", "requestUrl": expected_path, @@ -155,6 +158,47 @@ def test_record_with_request(self): self.assertEqual(record._trace_str, expected_trace) self.assertEqual(record._span_id, expected_span) self.assertEqual(record._span_id_str, expected_span) + self.assertTrue(record._trace_sampled) + self.assertEqual(record._trace_sampled_str, "true") + self.assertEqual(record._http_request, expected_request) + self.assertEqual(record._http_request_str, json.dumps(expected_request)) + + def test_record_with_traceparent_request(self): + """ + test filter adds http request data when available + """ + import logging + + filter_obj = self._make_one() + record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record.created = None + + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + expected_trace = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span = "00f067aa0ba902b7" + combined_trace = f"00-{expected_trace}-{expected_span}-03" + expected_request = { + "requestMethod": "GET", + "requestUrl": expected_path, + "userAgent": expected_agent, + "protocol": "HTTP/1.1", + } + + app = self.create_app() + with app.test_request_context( + expected_path, + headers={"User-Agent": expected_agent, "TRACEPARENT": combined_trace}, + ): + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record._trace, expected_trace) + self.assertEqual(record._trace_str, expected_trace) + self.assertEqual(record._span_id, expected_span) + self.assertEqual(record._span_id_str, expected_span) + self.assertTrue(record._trace_sampled) + self.assertEqual(record._trace_sampled_str, "true") self.assertEqual(record._http_request, expected_request) self.assertEqual(record._http_request_str, json.dumps(expected_request)) @@ -237,7 +281,9 @@ def _make_one(self, *args, **kw): def test_ctor_defaults(self): import sys - from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_global_resource, + ) from google.cloud.logging_v2.handlers.handlers import DEFAULT_LOGGER_NAME patch = mock.patch( @@ -252,7 +298,8 @@ def test_ctor_defaults(self): self.assertIsInstance(handler.transport, _Transport) self.assertIs(handler.transport.client, client) self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME) - self.assertEqual(handler.resource, _GLOBAL_RESOURCE) + global_resource = _create_global_resource(self.PROJECT) + self.assertEqual(handler.resource, global_resource) self.assertIsNone(handler.labels) self.assertIs(handler.stream, sys.stderr) @@ -297,7 +344,31 @@ def test_emit(self): handler.handle(record) self.assertEqual( handler.transport.send_called_with, - (record, message, _GLOBAL_RESOURCE, None, None, None, None, None), + ( + record, + message, + _GLOBAL_RESOURCE, + {"python_logger": logname}, + None, + None, + False, + None, + None, + ), + ) + + def test_emit_minimal(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE + ) + record = logging.LogRecord(None, logging.INFO, None, None, None, None, None) + handler.handle(record) + self.assertEqual( + handler.transport.send_called_with, + (record, None, _GLOBAL_RESOURCE, None, None, None, False, None, None,), ) def test_emit_manual_field_override(self): @@ -325,6 +396,8 @@ def test_emit_manual_field_override(self): setattr(record, "trace", expected_trace) expected_span = "456" setattr(record, "span_id", expected_span) + expected_sampled = True + setattr(record, "trace_sampled", expected_sampled) expected_http = {"reuqest_url": "manual"} setattr(record, "http_request", expected_http) expected_source = {"file": "test-file"} @@ -336,6 +409,7 @@ def test_emit_manual_field_override(self): "default_key": "default-value", "overwritten_key": "new_value", "added_key": "added_value", + "python_logger": logname, } setattr(record, "labels", added_labels) handler.handle(record) @@ -349,6 +423,7 @@ def test_emit_manual_field_override(self): expected_labels, expected_trace, expected_span, + expected_sampled, expected_http, expected_source, ), @@ -368,14 +443,127 @@ def test_emit_with_custom_formatter(self): handler.setFormatter(logFormatter) message = "test" expected_result = "logname :: INFO :: test" + logname = "logname" + expected_label = {"python_logger": logname} + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + ( + record, + expected_result, + _GLOBAL_RESOURCE, + expected_label, + None, + None, + False, + None, + None, + ), + ) + + def test_emit_dict(self): + """ + Handler should support logging dictionaries + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + message = {"x": "test"} + logname = "logname" + expected_label = {"python_logger": logname} + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + _GLOBAL_RESOURCE, + expected_label, + None, + None, + False, + None, + None, + ), + ) + + def test_emit_w_json_extras(self): + """ + User can add json_fields to the record, which should populate the payload + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + message = "message" + json_fields = {"hello": "world"} + logname = "logname" + expected_label = {"python_logger": logname} record = logging.LogRecord( - "logname", logging.INFO, None, None, message, None, None + logname, logging.INFO, None, None, message, None, None ) + setattr(record, "json_fields", json_fields) handler.handle(record) self.assertEqual( handler.transport.send_called_with, - (record, expected_result, _GLOBAL_RESOURCE, None, None, None, None, None,), + ( + record, + {"message": "message", "hello": "world"}, + _GLOBAL_RESOURCE, + expected_label, + None, + None, + False, + None, + None, + ), + ) + + def test_emit_with_encoded_json(self): + """ + Handler should parse json encoded as a string + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + logFormatter = logging.Formatter(fmt='{ "x" : "%(name)s" }') + handler.setFormatter(logFormatter) + logname = "logname" + expected_result = {"x": logname} + expected_label = {"python_logger": logname} + record = logging.LogRecord(logname, logging.INFO, None, None, None, None, None) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + ( + record, + expected_result, + _GLOBAL_RESOURCE, + expected_label, + None, + None, + False, + None, + None, + ), ) def test_format_with_arguments(self): @@ -398,10 +586,182 @@ def test_format_with_arguments(self): self.assertEqual( handler.transport.send_called_with, - (record, expected_result, _GLOBAL_RESOURCE, None, None, None, None, None,), + ( + record, + expected_result, + _GLOBAL_RESOURCE, + None, + None, + None, + False, + None, + None, + ), ) +class TestFormatAndParseMessage(unittest.TestCase): + def test_none(self): + """ + None messages with no special formatting should return + None after formatting + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = None + record = logging.LogRecord(None, None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, None) + + def test_none_formatted(self): + """ + None messages with formatting rules should return formatted string + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = None + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + formatter = logging.Formatter("name: %(name)s") + handler.setFormatter(formatter) + result = _format_and_parse_message(record, handler) + self.assertEqual(result, "name: logname") + + def test_unformatted_string(self): + """ + Unformated strings should be returned unchanged + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = '"test"' + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_empty_string(self): + """ + Empty strings should be returned unchanged + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "" + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_string_formatted_with_args(self): + """ + string messages should properly apply formatting and arguments + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "argument: %s" + arg = "test" + record = logging.LogRecord("logname", None, None, None, message, arg, None) + handler = logging.StreamHandler() + formatter = logging.Formatter("name: %(name)s :: message: %(message)s") + handler.setFormatter(formatter) + result = _format_and_parse_message(record, handler) + self.assertEqual(result, "name: logname :: message: argument: test") + + def test_dict(self): + """ + dict messages should be unchanged + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = {"a": "b"} + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + formatter = logging.Formatter("name: %(name)s") + handler.setFormatter(formatter) + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_string_encoded_dict(self): + """ + dicts should be extracted from string messages + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = '{ "x": { "y" : "z" } }' + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, {"x": {"y": "z"}}) + + def test_broken_encoded_dict(self): + """ + unparseable encoded dicts should be kept as strings + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = '{ "x": { "y" : ' + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_json_fields(self): + """ + record.json_fields should populate the json payload + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "hello" + json_fields = {"key": "val"} + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", json_fields) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, {"message": message, "key": "val"}) + + def test_empty_json_fields(self): + """ + empty jsond_field dictionaries should result in a string output + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "hello" + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", {}) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_json_fields_empty_message(self): + """ + empty message fields should not be added to json_fields dictionaries + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = None + json_fields = {"key": "val"} + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", json_fields) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, json_fields) + + def test_json_fields_with_json_message(self): + """ + if json_fields and message are both dicts, they should be combined + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = {"key_m": "val_m"} + json_fields = {"key_j": "val_j"} + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", json_fields) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result["key_m"], message["key_m"]) + self.assertEqual(result["key_j"], json_fields["key_j"]) + + class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): from google.cloud.logging.handlers import setup_logging @@ -512,6 +872,7 @@ def send( labels=None, trace=None, span_id=None, + trace_sampled=None, http_request=None, source_location=None, ): @@ -522,6 +883,7 @@ def send( labels, trace, span_id, + trace_sampled, http_request, source_location, ) diff --git a/tests/unit/handlers/test_structured_log.py b/tests/unit/handlers/test_structured_log.py index 271a68189..5db098c29 100644 --- a/tests/unit/handlers/test_structured_log.py +++ b/tests/unit/handlers/test_structured_log.py @@ -60,18 +60,20 @@ def test_format(self): record = logging.LogRecord( logname, logging.INFO, pathname, lineno, message, None, None, func=func ) + expected_labels = {**labels, "python_logger": logname} expected_payload = { "message": message, "severity": record.levelname, "logging.googleapis.com/trace": "", "logging.googleapis.com/spanId": "", + "logging.googleapis.com/trace_sampled": False, "logging.googleapis.com/sourceLocation": { "file": pathname, "line": lineno, "function": func, }, "httpRequest": {}, - "logging.googleapis.com/labels": labels, + "logging.googleapis.com/labels": expected_labels, } handler.filter(record) result = json.loads(handler.format(record)) @@ -91,14 +93,17 @@ def test_format_minimal(self): record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) record.created = None expected_payload = { - "message": "", + "severity": "INFO", "logging.googleapis.com/trace": "", + "logging.googleapis.com/spanId": "", + "logging.googleapis.com/trace_sampled": False, "logging.googleapis.com/sourceLocation": {}, "httpRequest": {}, "logging.googleapis.com/labels": {}, } handler.filter(record) result = json.loads(handler.format(record)) + self.assertEqual(set(expected_payload.keys()), set(result.keys())) for (key, value) in expected_payload.items(): self.assertEqual( value, result[key], f"expected_payload[{key}] != result[{key}]" @@ -170,6 +175,44 @@ def test_format_with_custom_formatter(self): handler.filter(record) result = handler.format(record) self.assertIn(expected_result, result) + self.assertIn("message", result) + + def test_dict(self): + """ + Handler should parse json encoded as a string + """ + import logging + + handler = self._make_one() + message = {"x": "test"} + expected_result = '"x": "test"' + record = logging.LogRecord( + "logname", logging.INFO, None, None, message, None, None, + ) + record.created = None + handler.filter(record) + result = handler.format(record) + self.assertIn(expected_result, result) + self.assertNotIn("message", result) + + def test_encoded_json(self): + """ + Handler should parse json encoded as a string + """ + import logging + + handler = self._make_one() + logFormatter = logging.Formatter(fmt='{ "name" : "%(name)s" }') + handler.setFormatter(logFormatter) + expected_result = '"name": "logname"' + record = logging.LogRecord( + "logname", logging.INFO, None, None, None, None, None, + ) + record.created = None + handler.filter(record) + result = handler.format(record) + self.assertIn(expected_result, result) + self.assertNotIn("message", result) def test_format_with_arguments(self): """ @@ -201,10 +244,11 @@ def test_format_with_request(self): expected_agent = "Mozilla/5.0" expected_trace = "123" expected_span = "456" - trace_header = f"{expected_trace}/{expected_span};o=0" + trace_header = f"{expected_trace}/{expected_span};o=1" expected_payload = { "logging.googleapis.com/trace": expected_trace, "logging.googleapis.com/spanId": expected_span, + "logging.googleapis.com/trace_sampled": True, "httpRequest": { "requestMethod": "GET", "requestUrl": expected_path, @@ -226,6 +270,41 @@ def test_format_with_request(self): for (key, value) in expected_payload.items(): self.assertEqual(value, result[key]) + def test_format_with_traceparent(self): + import logging + import json + + handler = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + expected_trace = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span = "00f067aa0ba902b7" + trace_header = f"00-{expected_trace}-{expected_span}-09" + expected_payload = { + "logging.googleapis.com/trace": expected_trace, + "logging.googleapis.com/spanId": expected_span, + "logging.googleapis.com/trace_sampled": True, + "httpRequest": { + "requestMethod": "GET", + "requestUrl": expected_path, + "userAgent": expected_agent, + "protocol": "HTTP/1.1", + }, + } + + app = self.create_app() + with app.test_request_context( + expected_path, + headers={"User-Agent": expected_agent, "TRACEPARENT": trace_header}, + ): + handler.filter(record) + result = json.loads(handler.format(record)) + for (key, value) in expected_payload.items(): + self.assertEqual(value, result[key]) + def test_format_overrides(self): """ Allow users to override log fields using `logging.info("", extra={})` @@ -248,23 +327,26 @@ def test_format_overrides(self): inferred_path = "http://testserver/123" overwrite_trace = "abc" overwrite_span = "def" - inferred_trace_span = "123/456;" + inferred_trace_span = "123/456;o=1" overwrite_file = "test-file" record.http_request = {"requestUrl": overwrite_path} record.source_location = {"file": overwrite_file} record.trace = overwrite_trace record.span_id = overwrite_span + record.trace_sampled = False added_labels = {"added_key": "added_value", "overwritten_key": "new_value"} record.labels = added_labels expected_payload = { "logging.googleapis.com/trace": overwrite_trace, "logging.googleapis.com/spanId": overwrite_span, + "logging.googleapis.com/trace_sampled": False, "logging.googleapis.com/sourceLocation": {"file": overwrite_file}, "httpRequest": {"requestUrl": overwrite_path}, "logging.googleapis.com/labels": { "default_key": "default-value", "overwritten_key": "new_value", "added_key": "added_value", + "python_logger": logname, }, } @@ -279,3 +361,26 @@ def test_format_overrides(self): result = json.loads(handler.format(record)) for (key, value) in expected_payload.items(): self.assertEqual(value, result[key]) + + def test_format_with_json_fields(self): + """ + User can add json_fields to the record, which should populate the payload + """ + import logging + import json + + handler = self._make_one() + message = "name: %s" + name_arg = "Daniel" + expected_result = "name: Daniel" + json_fields = {"hello": "world", "number": 12} + record = logging.LogRecord( + None, logging.INFO, None, None, message, name_arg, None, + ) + record.created = None + setattr(record, "json_fields", json_fields) + handler.filter(record) + result = json.loads(handler.format(record)) + self.assertEqual(result["message"], expected_result) + self.assertEqual(result["hello"], "world") + self.assertEqual(result["number"], 12) diff --git a/tests/unit/handlers/transports/test_background_thread.py b/tests/unit/handlers/transports/test_background_thread.py index 1666cd74b..f408de476 100644 --- a/tests/unit/handlers/transports/test_background_thread.py +++ b/tests/unit/handlers/transports/test_background_thread.py @@ -279,15 +279,14 @@ def test_enqueue_defaults(self): self._enqueue_record(worker, message) entry = worker._queue.get_nowait() - expected_info = {"message": message, "python_logger": "testing"} - self.assertEqual(entry["info"], expected_info) + self.assertEqual(entry["message"], message) self.assertEqual(entry["severity"], LogSeverity.INFO) self.assertIsInstance(entry["timestamp"], datetime.datetime) self.assertNotIn("resource", entry.keys()) - self.assertNotIn("labels", entry.keys()) self.assertNotIn("trace", entry.keys()) self.assertNotIn("span_id", entry.keys()) self.assertNotIn("http_request", entry.keys()) + self.assertEqual(entry["labels"], {"python_logger": "testing"}) def test_enqueue_explicit(self): import datetime @@ -313,11 +312,10 @@ def test_enqueue_explicit(self): entry = worker._queue.get_nowait() - expected_info = {"message": message, "python_logger": "testing"} - self.assertEqual(entry["info"], expected_info) + self.assertEqual(entry["message"], message) self.assertEqual(entry["severity"], LogSeverity.ERROR) self.assertIs(entry["resource"], resource) - self.assertIs(entry["labels"], labels) + self.assertEqual(entry["labels"], {**labels, "python_logger": "testing"}) self.assertIs(entry["trace"], trace) self.assertIs(entry["span_id"], span_id) self.assertIsInstance(entry["timestamp"], datetime.datetime) @@ -388,9 +386,9 @@ def test__thread_main_max_latency(self, time): worker._queue = mock.create_autospec(queue.Queue, instance=True) worker._queue.get.side_effect = [ - {"info": {"message": "1"}}, # Single record. + {"message": 1}, # Single record. queue.Empty(), # Emulate a queue.get() timeout. - {"info": {"message": "1"}}, # Second record. + {"message": "2"}, # Second record. background_thread._WORKER_TERMINATOR, # Stop the thread. queue.Empty(), # Emulate a queue.get() timeout. ] @@ -479,9 +477,9 @@ def __init__(self): self.commit_called = False self.commit_count = None - def log_struct( + def log( self, - info, + message, severity=logging.INFO, resource=None, labels=None, @@ -495,8 +493,8 @@ def log_struct( assert resource is None resource = _GLOBAL_RESOURCE - self.log_struct_called_with = (info, severity, resource, labels, trace, span_id) - self.entries.append(info) + self.log_called_with = (message, severity, resource, labels, trace, span_id) + self.entries.append(message) def commit(self): self.commit_called = True diff --git a/tests/unit/handlers/transports/test_sync.py b/tests/unit/handlers/transports/test_sync.py index 9f0642757..cc8ffe284 100644 --- a/tests/unit/handlers/transports/test_sync.py +++ b/tests/unit/handlers/transports/test_sync.py @@ -41,26 +41,51 @@ def test_send(self): client = _Client(self.PROJECT) - stackdriver_logger_name = "python" + client_name = "python" python_logger_name = "mylogger" - transport = self._make_one(client, stackdriver_logger_name) + transport = self._make_one(client, client_name) message = "hello world" record = logging.LogRecord( python_logger_name, logging.INFO, None, None, message, None, None ) transport.send(record, message, resource=_GLOBAL_RESOURCE) - EXPECTED_STRUCT = {"message": message, "python_logger": python_logger_name} EXPECTED_SENT = ( - EXPECTED_STRUCT, + message, LogSeverity.INFO, _GLOBAL_RESOURCE, + {"python_logger": python_logger_name}, + None, + None, None, + ) + self.assertEqual(transport.logger.log_called_with, EXPECTED_SENT) + + def test_send_struct(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2._helpers import LogSeverity + + client = _Client(self.PROJECT) + + client_name = "python" + python_logger_name = "mylogger" + transport = self._make_one(client, client_name) + message = {"message": "hello world", "extra": "test"} + record = logging.LogRecord( + python_logger_name, logging.INFO, None, None, message, None, None + ) + + transport.send(record, message, resource=_GLOBAL_RESOURCE) + EXPECTED_SENT = ( + message, + LogSeverity.INFO, + _GLOBAL_RESOURCE, + {"python_logger": python_logger_name}, None, None, None, ) - self.assertEqual(transport.logger.log_struct_called_with, EXPECTED_SENT) + self.assertEqual(transport.logger.log_called_with, EXPECTED_SENT) class _Logger(object): @@ -69,7 +94,7 @@ class _Logger(object): def __init__(self, name): self.name = name - def log_struct( + def log( self, message, severity=None, @@ -79,7 +104,7 @@ def log_struct( span_id=None, http_request=None, ): - self.log_struct_called_with = ( + self.log_called_with = ( message, severity, resource, diff --git a/tests/unit/test__gapic.py b/tests/unit/test__gapic.py index 5da1c7122..d8c4bf57e 100644 --- a/tests/unit/test__gapic.py +++ b/tests/unit/test__gapic.py @@ -32,7 +32,7 @@ FILTER = "logName:syslog AND severity>=ERROR" -class Test_LoggingAPI(object): +class Test_LoggingAPI(unittest.TestCase): LOG_NAME = "log_name" LOG_PATH = f"projects/{PROJECT}/logs/{LOG_NAME}" @@ -107,6 +107,49 @@ def test_list_entries_with_options(self): assert request.page_size == 42 assert request.page_token == "token" + def test_list_logs_with_max_results(self): + client = self.make_logging_api() + log_entry_msg = LogEntryPB(log_name=self.LOG_PATH, text_payload="text") + + with mock.patch.object( + type(client._gapic_api.transport.list_log_entries), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogEntriesResponse( + entries=[log_entry_msg, log_entry_msg] + ) + result = client.list_entries( + [PROJECT_PATH], + filter_=FILTER, + order_by=google.cloud.logging.ASCENDING, + page_size=42, + page_token="token", + max_results=1, + ) + + # Check the request + call.assert_called_once() + assert len(list(result)) == 1 + + def test_list_logs_negative_max_results(self): + client = self.make_logging_api() + + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.list_log_entries), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogEntriesResponse(entries=[]) + result = client.list_entries( + [PROJECT_PATH], + filter_=FILTER, + order_by=google.cloud.logging.ASCENDING, + page_size=42, + page_token="token", + max_results=-1, + ) + # Check the request + list(result) + call.assert_called_once() + def test_write_entries_single(self): client = self.make_logging_api() @@ -141,7 +184,7 @@ def test_logger_delete(self): assert call.call_args.args[0].log_name == self.LOG_PATH -class Test_SinksAPI(object): +class Test_SinksAPI(unittest.TestCase): SINK_NAME = "sink_name" PARENT_PATH = f"projects/{PROJECT}" SINK_PATH = f"projects/{PROJECT}/sinks/{SINK_NAME}" @@ -208,6 +251,40 @@ def test_list_sinks_with_options(self): assert request.page_size == 42 assert request.page_token == "token" + def test_list_sinks_with_max_results(self): + client = self.make_sinks_api() + sink_msg = LogSink( + name=self.SINK_NAME, destination=self.DESTINATION_URI, filter=FILTER + ) + + with mock.patch.object( + type(client._gapic_api.transport.list_sinks), "__call__" + ) as call: + call.return_value = logging_v2.types.ListSinksResponse( + sinks=[sink_msg, sink_msg] + ) + result = client.list_sinks( + self.PARENT_PATH, page_size=42, page_token="token", max_results=1 + ) + # Check the request + call.assert_called_once() + assert len(list(result)) == 1 + + def test_list_sinks_negative_max_results(self): + client = self.make_sinks_api() + + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.list_sinks), "__call__" + ) as call: + call.return_value = logging_v2.types.ListSinksResponse(sinks=[]) + result = client.list_sinks( + self.PARENT_PATH, page_size=42, page_token="token", max_results=-1 + ) + # Check the request + list(result) + call.assert_called_once() + def test_sink_create(self): client = self.make_sinks_api() with mock.patch.object( @@ -315,7 +392,7 @@ def test_sink_delete(self): assert request.sink_name == self.SINK_PATH -class Test_MetricsAPI(object): +class Test_MetricsAPI(unittest.TestCase): METRIC_NAME = "metric_name" METRIC_PATH = f"projects/{PROJECT}/metrics/{METRIC_NAME}" DESCRIPTION = "Description" @@ -379,6 +456,39 @@ def test_list_metrics_options(self): assert request.page_size == 42 assert request.page_token == "token" + def test_list_metrics_with_max_results(self): + client = self.make_metrics_api() + metric = logging_v2.types.LogMetric( + name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER + ) + with mock.patch.object( + type(client._gapic_api.transport.list_log_metrics), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogMetricsResponse( + metrics=[metric, metric] + ) + result = client.list_metrics( + PROJECT, page_size=42, page_token="token", max_results=1 + ) + # Check the request + call.assert_called_once() + assert len(list(result)) == 1 + + def test_list_metrics_negative_max_results(self): + client = self.make_metrics_api() + + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.list_log_metrics), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogMetricsResponse(metrics=[]) + result = client.list_metrics( + PROJECT, page_size=42, page_token="token", max_results=-1 + ) + # Check the request + list(result) + call.assert_called_once() + def test_metric_create(self): client = self.make_metrics_api() diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py index e927f6c15..2154b6f57 100644 --- a/tests/unit/test__http.py +++ b/tests/unit/test__http.py @@ -129,16 +129,20 @@ def _make_timestamp(): NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) return NOW, _datetime_to_rfc3339_w_nanos(NOW) - def test_list_entries_no_paging(self): + def test_list_entries_with_limits(self): from google.cloud.logging import Client from google.cloud.logging import TextEntry from google.cloud.logging import Logger NOW, TIMESTAMP = self._make_timestamp() IID = "IID" + IID1 = "IID1" + IID2 = "IID2" TEXT = "TEXT" SENT = {"resourceNames": [self.PROJECT_PATH]} - TOKEN = "TOKEN" + PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" RETURNED = { "entries": [ { @@ -147,24 +151,42 @@ def test_list_entries_no_paging(self): "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": f"projects/{self.PROJECT}/logs/{self.LOGGER_NAME}", - } + }, + { + "jsonPayload": PAYLOAD, + "insertId": IID1, + "resource": {"type": "global"}, + "timestamp": TIMESTAMP, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": PROTO_PAYLOAD, + "insertId": IID2, + "resource": {"type": "global"}, + "timestamp": TIMESTAMP, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, ], - "nextPageToken": TOKEN, } client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) + # try with negative max_results + with self.assertRaises(ValueError): + client._connection = _Connection(RETURNED) + api = self._make_one(client) + empty = list(api.list_entries([self.PROJECT_PATH], max_results=-1)) + # try with max_results of 0 client._connection = _Connection(RETURNED) api = self._make_one(client) - - iterator = api.list_entries([self.PROJECT_PATH]) - page = next(iterator.pages) - entries = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the entries returned. + empty = list(api.list_entries([self.PROJECT_PATH], max_results=0)) + self.assertEqual(empty, []) + # try with single result + client._connection = _Connection(RETURNED) + api = self._make_one(client) + iterator = api.list_entries([self.PROJECT_PATH], max_results=1) + entries = list(iterator) + # check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, TextEntry) @@ -183,7 +205,7 @@ def test_list_entries_no_paging(self): called_with, {"method": "POST", "path": expected_path, "data": SENT} ) - def test_list_entries_w_paging(self): + def test_list_entries(self): from google.cloud.logging import DESCENDING from google.cloud.logging import Client from google.cloud.logging import Logger @@ -241,11 +263,8 @@ def test_list_entries_w_paging(self): page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token - # First check the token. - self.assertIsNone(token) - # Then check the entries returned. + # Check the entries returned. self.assertEqual(len(entries), 2) entry1 = entries[0] self.assertIsInstance(entry1, StructEntry) @@ -361,32 +380,38 @@ def test_ctor(self): self.assertIs(api._client, client) self.assertEqual(api.api_request, connection.api_request) - def test_list_sinks_no_paging(self): + def test_list_sinks_max_returned(self): from google.cloud.logging import Sink - TOKEN = "TOKEN" RETURNED = { "sinks": [ { "name": self.SINK_PATH, "filter": self.FILTER, "destination": self.DESTINATION_URI, - } + }, + {"name": "test", "filter": "test", "destination": "test"}, ], - "nextPageToken": TOKEN, } + # try with negative max_results + with self.assertRaises(ValueError): + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + empty = list(api.list_sinks(self.PROJECT_PATH, max_results=-1)) + # try with max_results of 0 conn = _Connection(RETURNED) client = _Client(conn) api = self._make_one(client) - - iterator = api.list_sinks(self.PROJECT_PATH) - page = next(iterator.pages) - sinks = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the sinks returned. + empty = list(api.list_sinks(self.PROJECT_PATH, max_results=0)) + self.assertEqual(empty, []) + # try with single result + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + iterator = api.list_sinks(self.PROJECT_PATH, max_results=1) + sinks = list(iterator) + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -401,7 +426,7 @@ def test_list_sinks_no_paging(self): called_with, {"method": "GET", "path": path, "query_params": {}} ) - def test_list_sinks_w_paging(self): + def test_list_sinks(self): from google.cloud.logging import Sink TOKEN = "TOKEN" @@ -423,11 +448,7 @@ def test_list_sinks_w_paging(self): self.PROJECT_PATH, page_size=PAGE_SIZE, page_token=TOKEN ) sinks = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the sinks returned. + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -632,26 +653,35 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_list_metrics_no_paging(self): + def test_list_metrics_max_results(self): from google.cloud.logging import Metric - TOKEN = "TOKEN" RETURNED = { - "metrics": [{"name": self.METRIC_PATH, "filter": self.FILTER}], - "nextPageToken": TOKEN, + "metrics": [ + {"name": self.METRIC_PATH, "filter": self.FILTER}, + {"name": "test", "filter": "test"}, + ], } + # try with negative max_results + with self.assertRaises(ValueError): + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + empty = list(api.list_metrics(self.PROJECT, max_results=-1)) + # try with max_results of 0 + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + empty = list(api.list_metrics(self.PROJECT, max_results=0)) + self.assertEqual(empty, []) + # try with single result conn = _Connection(RETURNED) client = _Client(conn) api = self._make_one(client) - iterator = api.list_metrics(self.PROJECT) - page = next(iterator.pages) - metrics = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the metrics returned. + iterator = api.list_metrics(self.PROJECT, max_results=1) + metrics = list(iterator) + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) @@ -666,7 +696,7 @@ def test_list_metrics_no_paging(self): called_with, {"method": "GET", "path": path, "query_params": {}} ) - def test_list_metrics_w_paging(self): + def test_list_metrics(self): from google.cloud.logging import Metric TOKEN = "TOKEN" @@ -678,11 +708,7 @@ def test_list_metrics_w_paging(self): iterator = api.list_metrics(self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) metrics = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the metrics returned. + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 9dbfa87fd..1a31e9c0c 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -239,21 +239,26 @@ def make_api(client_obj): def test_logger(self): from google.cloud.logging import Logger + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) - logger = client.logger(self.LOGGER_NAME) + labels = {"test": "true"} + logger = client.logger( + self.LOGGER_NAME, resource=_GLOBAL_RESOURCE, labels=labels + ) self.assertIsInstance(logger, Logger) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertIs(logger.client, client) self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(logger.default_resource, _GLOBAL_RESOURCE) + self.assertEqual(logger.labels, labels) def test_list_entries_defaults(self): from google.cloud.logging import TextEntry IID = "IID" TEXT = "TEXT" - TOKEN = "TOKEN" ENTRIES = [ { "textPayload": TEXT, @@ -266,13 +271,11 @@ def test_list_entries_defaults(self): client = self._make_one( project=self.PROJECT, credentials=creds, _use_grpc=False ) - returned = {"entries": ENTRIES, "nextPageToken": TOKEN} + returned = {"entries": ENTRIES} client._connection = _Connection(returned) iterator = client.list_entries() - page = next(iterator.pages) - entries = list(page) - token = iterator.next_page_token + entries = list(iterator) self.assertEqual(len(entries), 1) entry = entries[0] @@ -283,7 +286,6 @@ def test_list_entries_defaults(self): self.assertEqual(logger.name, self.LOGGER_NAME) self.assertIs(logger.client, client) self.assertEqual(logger.project, self.PROJECT) - self.assertEqual(token, TOKEN) # check call payload call_payload_no_filter = deepcopy(client._connection._called_with) @@ -336,6 +338,12 @@ def test_list_entries_explicit(self): "resource": {"type": "global"}, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, + { + "protoPayload": "ignored", + "insertId": "ignored", + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, ] client = self._make_one( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False @@ -349,13 +357,10 @@ def test_list_entries_explicit(self): order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, + max_results=2, ) entries = list(iterator) - token = iterator.next_page_token - - # First, check the token. - self.assertIsNone(token) - # Then check the entries. + # Check the entries. self.assertEqual(len(entries), 2) entry = entries[0] self.assertIsInstance(entry, StructEntry) @@ -417,7 +422,6 @@ def test_list_entries_explicit_timestamp(self): PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} PROTO_PAYLOAD = PAYLOAD.copy() PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" - TOKEN = "TOKEN" PAGE_SIZE = 42 ENTRIES = [ { @@ -444,14 +448,9 @@ def test_list_entries_explicit_timestamp(self): filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, - page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token - - # First, check the token. - self.assertIsNone(token) - # Then check the entries. + # Check the entries. self.assertEqual(len(entries), 2) entry = entries[0] self.assertIsInstance(entry, StructEntry) @@ -485,7 +484,6 @@ def test_list_entries_explicit_timestamp(self): "filter": INPUT_FILTER, "orderBy": DESCENDING, "pageSize": PAGE_SIZE, - "pageToken": TOKEN, "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], }, }, @@ -523,7 +521,6 @@ def test_list_sinks_no_paging(self): from google.cloud.logging import Sink PROJECT = "PROJECT" - TOKEN = "TOKEN" SINK_NAME = "sink_name" FILTER = "logName:syslog AND severity>=ERROR" SINKS = [ @@ -532,17 +529,13 @@ def test_list_sinks_no_paging(self): client = self._make_one( project=PROJECT, credentials=_make_credentials(), _use_grpc=False ) - returned = {"sinks": SINKS, "nextPageToken": TOKEN} + returned = {"sinks": SINKS} client._connection = _Connection(returned) iterator = client.list_sinks() - page = next(iterator.pages) - sinks = list(page) - token = iterator.next_page_token + sinks = list(iterator) - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the sinks returned. + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -567,7 +560,8 @@ def test_list_sinks_with_paging(self): TOKEN = "TOKEN" PAGE_SIZE = 42 SINKS = [ - {"name": SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI} + {"name": SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI}, + {"name": "test", "filter": "test", "destination": "test"}, ] client = self._make_one( project=PROJECT, credentials=_make_credentials(), _use_grpc=False @@ -575,13 +569,11 @@ def test_list_sinks_with_paging(self): returned = {"sinks": SINKS} client._connection = _Connection(returned) - iterator = client.list_sinks(page_size=PAGE_SIZE, page_token=TOKEN) + iterator = client.list_sinks( + page_size=PAGE_SIZE, page_token=TOKEN, max_results=1 + ) sinks = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the sinks returned. + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -672,29 +664,27 @@ def test_list_metrics_with_paging(self): from google.cloud.logging import Metric token = "TOKEN" - next_token = "T00KEN" page_size = 42 metrics = [ { "name": self.METRIC_NAME, "filter": self.FILTER, "description": self.DESCRIPTION, - } + }, + {"name": "test", "filter": "test", "description": "test"}, ] client = self._make_one( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) - returned = {"metrics": metrics, "nextPageToken": next_token} + returned = {"metrics": metrics} client._connection = _Connection(returned) # Execute request. - iterator = client.list_metrics(page_size=page_size, page_token=token) - page = next(iterator.pages) - metrics = list(page) - - # First check the token. - self.assertEqual(iterator.next_page_token, next_token) - # Then check the metrics returned. + iterator = client.list_metrics( + page_size=page_size, page_token=token, max_results=1 + ) + metrics = list(iterator) + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) @@ -719,7 +709,7 @@ def test_get_default_handler_app_engine(self): import os from google.cloud._testing import _Monkey from google.cloud.logging_v2.handlers._monitored_resources import _GAE_ENV_VARS - from google.cloud.logging.handlers import AppEngineHandler + from google.cloud.logging.handlers import CloudLoggingHandler credentials = _make_credentials() client = self._make_one( @@ -733,10 +723,10 @@ def test_get_default_handler_app_engine(self): handler.transport.worker.stop() - self.assertIsInstance(handler, AppEngineHandler) + self.assertIsInstance(handler, CloudLoggingHandler) def test_get_default_handler_container_engine(self): - from google.cloud.logging.handlers import ContainerEngineHandler + from google.cloud.logging.handlers import StructuredLogHandler credentials = _make_credentials() client = self._make_one( @@ -751,7 +741,7 @@ def test_get_default_handler_container_engine(self): with patch: handler = client.get_default_handler() - self.assertIsInstance(handler, ContainerEngineHandler) + self.assertIsInstance(handler, StructuredLogHandler) def test_get_default_handler_general(self): import io diff --git a/tests/unit/test_entries.py b/tests/unit/test_entries.py index b8795b8ce..6f3af684f 100644 --- a/tests/unit/test_entries.py +++ b/tests/unit/test_entries.py @@ -18,10 +18,10 @@ class Test_logger_name_from_path(unittest.TestCase): - def _call_fut(self, path): + def _call_fut(self, path, project=None): from google.cloud.logging_v2.entries import logger_name_from_path - return logger_name_from_path(path) + return logger_name_from_path(path, project) def test_w_simple_name(self): LOGGER_NAME = "LOGGER_NAME" @@ -37,6 +37,30 @@ def test_w_name_w_all_extras(self): logger_name = self._call_fut(PATH) self.assertEqual(logger_name, LOGGER_NAME) + def test_w_wrong_project(self): + LOGGER_NAME = "LOGGER_NAME" + IN_PROJECT = "in-project" + PATH_PROJECT = "path-project" + PATH = "projects/%s/logs/%s" % (PATH_PROJECT, LOGGER_NAME) + with self.assertRaises(ValueError): + self._call_fut(PATH, IN_PROJECT) + + def test_invalid_inputs(self): + invalid_list = [ + "", + "abc/123/logs/456", + "projects//logs/", + "projects/123/logs", + "projects/123logs/", + "projects123/logs", + "project/123", + "projects123logs456", + "/logs/123", + ] + for path in invalid_list: + with self.assertRaises(ValueError): + self._call_fut(path) + class Test__int_or_none(unittest.TestCase): def _call_fut(self, value): @@ -315,6 +339,62 @@ def test_from_api_repr_w_loggers_w_logger_match(self): self.assertEqual(entry.operation, OPERATION) self.assertIsNone(entry.payload) + def test_from_api_repr_w_folder_path(self): + from datetime import datetime + from datetime import timedelta + from google.cloud._helpers import UTC + + client = _Client(self.PROJECT) + IID = "IID" + NOW = datetime.utcnow().replace(tzinfo=UTC) + LATER = NOW + timedelta(seconds=1) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + RECEIVED = _datetime_to_rfc3339_w_nanos(LATER) + LOG_NAME = "folders/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + LABELS = {"foo": "bar", "baz": "qux"} + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + FILE = "my_file.py" + LINE_NO = 123 + FUNCTION = "my_function" + SOURCE_LOCATION = {"file": FILE, "line": str(LINE_NO), "function": FUNCTION} + OP_ID = "OP_ID" + PRODUCER = "PRODUCER" + OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} + API_REPR = { + "logName": LOG_NAME, + "insertId": IID, + "timestamp": TIMESTAMP, + "receiveTimestamp": RECEIVED, + "labels": LABELS, + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + "sourceLocation": SOURCE_LOCATION, + "operation": OPERATION, + } + klass = self._get_target_class() + + entry = klass.from_api_repr(API_REPR, client) + + self.assertEqual(entry.log_name, LOG_NAME) + self.assertIsNone(entry.logger) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.received_timestamp, LATER) + self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.trace, TRACE) + self.assertEqual(entry.span_id, SPANID) + self.assertTrue(entry.trace_sampled) + + source_location = entry.source_location + self.assertEqual(source_location["file"], FILE) + self.assertEqual(source_location["line"], LINE_NO) + self.assertEqual(source_location["function"], FUNCTION) + + self.assertEqual(entry.operation, OPERATION) + self.assertIsNone(entry.payload) + def test_to_api_repr_w_source_location_no_line(self): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py index 5ad486178..1eae1cda6 100644 --- a/tests/unit/test_logger.py +++ b/tests/unit/test_logger.py @@ -99,11 +99,15 @@ def test_batch_w_alternate_client(self): self.assertIs(batch.client, client2) def test_log_empty_defaults_w_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), "labels": DEFAULT_LABELS, } ] @@ -170,7 +174,11 @@ def test_log_empty_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_defaults(self): - RESOURCE = {"type": "global", "labels": {}} + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + RESOURCE = detect_resource(self.PROJECT)._to_dict() TEXT = "TEXT" ENTRIES = [ { @@ -188,8 +196,12 @@ def test_log_text_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_w_unicode_and_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + TEXT = "TEXT" - RESOURCE = {"type": "global", "labels": {}} + RESOURCE = detect_resource(self.PROJECT)._to_dict() DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { @@ -265,8 +277,12 @@ def test_log_text_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_defaults(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - RESOURCE = {"type": "global", "labels": {}} + RESOURCE = detect_resource(self.PROJECT)._to_dict() ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), @@ -283,8 +299,12 @@ def test_log_struct_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_w_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - RESOURCE = {"type": "global", "labels": {}} + RESOURCE = detect_resource(self.PROJECT)._to_dict() DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { @@ -359,7 +379,111 @@ def test_log_struct_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_struct_inference(self): + """ + LogEntry fields in _STRUCT_EXTRACTABLE_FIELDS should be inferred from + the payload data if not passed as a parameter + """ + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + STRUCT = { + "message": "System test: test_log_struct_logentry_data", + "severity": "warning", + "trace": "123", + "span_id": "456", + } + RESOURCE = detect_resource(self.PROJECT)._to_dict() + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "jsonPayload": STRUCT, + "severity": "WARNING", + "trace": "123", + "spanId": "456", + "resource": RESOURCE, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_struct(STRUCT, resource=RESOURCE) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_w_dict_resource(self): + """ + Users should be able to input a dictionary with type and labels instead + of a Resource object + """ + import pytest + + MESSAGE = "hello world" + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + broken_resource_dicts = [{}, {"type": ""}, {"labels": ""}] + for resource in broken_resource_dicts: + # ensure bad inputs result in a helpful error + with pytest.raises(TypeError): + logger.log(MESSAGE, resource=resource) + # ensure well-formed dict is converted to a resource + resource = {"type": "gae_app", "labels": []} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "textPayload": MESSAGE, + "resource": resource, + } + ] + logger.log(MESSAGE, resource=resource) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_lowercase_severity(self): + """ + lower case severity strings should be accepted + """ + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + for lower_severity in [ + "default", + "debug", + "info", + "notice", + "warning", + "error", + "critical", + "alert", + "emergency", + ]: + MESSAGE = "hello world" + RESOURCE = detect_resource(self.PROJECT)._to_dict() + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "textPayload": MESSAGE, + "resource": RESOURCE, + "severity": lower_severity.upper(), + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(MESSAGE, severity=lower_severity) + + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None) + ) + def test_log_proto_defaults(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value @@ -369,7 +493,7 @@ def test_log_proto_defaults(self): { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), } ] client = _Client(self.PROJECT) @@ -381,6 +505,9 @@ def test_log_proto_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_proto_w_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value @@ -391,7 +518,7 @@ def test_log_proto_w_default_labels(self): { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), "labels": DEFAULT_LABELS, } ] @@ -464,6 +591,93 @@ def test_log_proto_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_inference_empty(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + DEFAULT_LABELS = {"foo": "spam"} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "resource": detect_resource(self.PROJECT)._to_dict(), + "labels": DEFAULT_LABELS, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) + + logger.log() + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_inference_text(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + TEXT = "TEXT" + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "textPayload": TEXT, + "resource": detect_resource(self.PROJECT)._to_dict(), + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(TEXT) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_inference_struct(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "jsonPayload": STRUCT, + "resource": detect_resource(self.PROJECT)._to_dict(), + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(STRUCT) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_inference_proto(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + message = Struct(fields={"foo": Value(bool_value=True)}) + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "protoPayload": json.loads(MessageToJson(message)), + "resource": detect_resource(self.PROJECT)._to_dict(), + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(message) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() @@ -492,23 +706,18 @@ def test_delete_w_alternate_client(self): def test_list_entries_defaults(self): from google.cloud.logging import Client - TOKEN = "TOKEN" - client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) - returned = {"nextPageToken": TOKEN} + returned = {} client._connection = _Connection(returned) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries() - page = next(iterator.pages) - entries = list(page) - token = iterator.next_page_token + entries = list(iterator) self.assertEqual(len(entries), 0) - self.assertEqual(token, TOKEN) LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) # check call payload @@ -555,10 +764,8 @@ def test_list_entries_explicit(self): page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token self.assertEqual(len(entries), 0) - self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) # check call payload call_payload_no_filter = deepcopy(client._connection._called_with) @@ -615,10 +822,8 @@ def test_list_entries_explicit_timestamp(self): page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token self.assertEqual(len(entries), 0) - self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) # check call payload LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME,) @@ -638,6 +843,130 @@ def test_list_entries_explicit_timestamp(self): }, ) + def test_list_entries_limit(self): + from google.cloud.logging import DESCENDING + from google.cloud.logging import ProtobufEntry + from google.cloud.logging import StructEntry + from google.cloud.logging import Logger + from google.cloud.logging import Client + + PROJECT1 = "PROJECT1" + PROJECT2 = "PROJECT2" + INPUT_FILTER = "logName:LOGNAME" + IID1 = "IID1" + IID2 = "IID2" + PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" + TOKEN = "TOKEN" + PAGE_SIZE = 42 + ENTRIES = [ + { + "jsonPayload": PAYLOAD, + "insertId": IID1, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": PROTO_PAYLOAD, + "insertId": IID2, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": "ignored", + "insertId": "ignored", + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + ] + client = Client( + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + returned = {"entries": ENTRIES} + client._connection = _Connection(returned) + logger = self._make_one(self.LOGGER_NAME, client=client) + + iterator = logger.list_entries( + resource_names=[f"projects/{PROJECT1}", f"projects/{PROJECT2}"], + filter_=INPUT_FILTER, + order_by=DESCENDING, + page_size=PAGE_SIZE, + page_token=TOKEN, + max_results=2, + ) + entries = list(iterator) + # Check the entries. + self.assertEqual(len(entries), 2) + entry = entries[0] + self.assertIsInstance(entry, StructEntry) + self.assertEqual(entry.insert_id, IID1) + self.assertEqual(entry.payload, PAYLOAD) + logger = entry.logger + self.assertIsInstance(logger, Logger) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + entry = entries[1] + self.assertIsInstance(entry, ProtobufEntry) + self.assertEqual(entry.insert_id, IID2) + self.assertEqual(entry.payload, PROTO_PAYLOAD) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + self.assertIs(entries[0].logger, entries[1].logger) + + # check call payload + call_payload_no_filter = deepcopy(client._connection._called_with) + call_payload_no_filter["data"]["filter"] = "removed" + self.assertEqual( + call_payload_no_filter, + { + "path": "/entries:list", + "method": "POST", + "data": { + "filter": "removed", + "orderBy": DESCENDING, + "pageSize": PAGE_SIZE, + "pageToken": TOKEN, + "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], + }, + }, + ) + + def test_list_entries_folder(self): + from google.cloud.logging import TextEntry + from google.cloud.logging import Client + + client = Client( + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + FOLDER_ID = "123" + LOG_NAME = f"folders/{FOLDER_ID}/logs/cloudaudit.googleapis.com%2Fdata_access" + + ENTRIES = [ + { + "textPayload": "hello world", + "insertId": "1", + "resource": {"type": "global"}, + "logName": LOG_NAME, + }, + ] + returned = {"entries": ENTRIES} + client._connection = _Connection(returned) + + iterator = client.list_entries(resource_names=[f"folder/{FOLDER_ID}"],) + entries = list(iterator) + # Check the entries. + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertIsInstance(entry, TextEntry) + self.assertIsNone(entry.logger) + self.assertEqual(entry.log_name, LOG_NAME) + class TestBatch(unittest.TestCase): @@ -902,6 +1231,123 @@ def test_log_proto_explicit(self): ) self.assertEqual(batch.entries, [ENTRY]) + def test_log_inference_empty(self): + """ + When calling batch.log with empty input, it should + call batch.log_empty + """ + from google.cloud.logging import LogEntry + + ENTRY = LogEntry() + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log() + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_inference_text(self): + """ + When calling batch.log with text input, it should + call batch.log_text + """ + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging import TextEntry + + TEXT = "This is the entry text" + ENTRY = TextEntry(payload=TEXT, resource=_GLOBAL_RESOURCE) + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log(TEXT) + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_inference_struct(self): + """ + When calling batch.struct with text input, it should + call batch.log_struct + """ + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging import StructEntry + + STRUCT = {"message": "Message text", "weather": "partly cloudy"} + ENTRY = StructEntry(payload=STRUCT, resource=_GLOBAL_RESOURCE) + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log(STRUCT) + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_inference_proto(self): + """ + When calling batch.log with proto input, it should + call batch.log_proto + """ + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging import ProtobufEntry + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + message = Struct(fields={"foo": Value(bool_value=True)}) + ENTRY = ProtobufEntry(payload=message, resource=_GLOBAL_RESOURCE) + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log(message) + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_inference_struct_explicit(self): + """ + When calling batch.log with struct input, it should + call batch.log_struct, along with input arguments + """ + import datetime + from google.cloud.logging import Resource + from google.cloud.logging import StructEntry + + STRUCT = {"message": "Message text", "weather": "partly cloudy"} + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type="gae_app", labels={"module_id": "default", "version_id": "test"} + ) + ENTRY = StructEntry( + payload=STRUCT, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) + + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log( + STRUCT, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) + self.assertEqual(batch.entries, [ENTRY]) + def test_commit_w_unknown_entry_type(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging import LogEntry