From a5c36d732526ffbdd5cc2c7aad809c0ca88b49c1 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 7 Dec 2021 15:44:30 -0800 Subject: [PATCH 01/28] added empty v3.0.0 section to UPGRADING.md --- UPGRADING.md | 3 +++ docs/index.rst | 6 +++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/UPGRADING.md b/UPGRADING.md index af7461dda..249336d80 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -1,3 +1,6 @@ +# 3.0.0 Migration Guide + + # 2.0.0 Migration Guide The 2.0 release of the `google-cloud-logging` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. diff --git a/docs/index.rst b/docs/index.rst index 64c2dcd1e..87854686f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -8,10 +8,10 @@ Documentation v2 -Migration Guide ---------------- +Migration Guides +---------------- -See the guide below for instructions on migrating to the 2.x release of this library. +See the guide below for instructions on migrating between major releases of this library. .. toctree:: :maxdepth: 2 From 0ff70480e81925ca632653abab52987b094e63a0 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 7 Dec 2021 15:45:42 -0800 Subject: [PATCH 02/28] reorganized table of contents --- docs/handlers-cloud-logging.rst | 6 ++++++ docs/handlers-structured-log.rst | 6 ++++++ docs/handlers.rst | 13 ++++++++----- docs/index.rst | 11 ++++++++++- docs/transport.rst | 8 ++++++++ docs/v2.rst | 19 ------------------- 6 files changed, 38 insertions(+), 25 deletions(-) create mode 100644 docs/handlers-cloud-logging.rst create mode 100644 docs/handlers-structured-log.rst create mode 100644 docs/transport.rst delete mode 100644 docs/v2.rst diff --git a/docs/handlers-cloud-logging.rst b/docs/handlers-cloud-logging.rst new file mode 100644 index 000000000..5ebaa51ff --- /dev/null +++ b/docs/handlers-cloud-logging.rst @@ -0,0 +1,6 @@ +Cloud Logging Handler +============================== + +.. automodule:: google.cloud.logging_v2.handlers.handlers + :members: + :show-inheritance: diff --git a/docs/handlers-structured-log.rst b/docs/handlers-structured-log.rst new file mode 100644 index 000000000..337ad591d --- /dev/null +++ b/docs/handlers-structured-log.rst @@ -0,0 +1,6 @@ +Structured Log Handler +============================== + +.. automodule:: google.cloud.logging_v2.handlers.structured_log + :members: + :show-inheritance: diff --git a/docs/handlers.rst b/docs/handlers.rst index 9089170fb..914757834 100644 --- a/docs/handlers.rst +++ b/docs/handlers.rst @@ -1,6 +1,9 @@ -Python Logging Module Handler -============================== +Handlers +---------------- +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.logging_v2.handlers.handlers - :members: - :show-inheritance: + handlers-cloud-logging + handlers-structured-log + handlers-app-engine + handlers-container-engine diff --git a/docs/index.rst b/docs/index.rst index 87854686f..2e8016d47 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -5,7 +5,16 @@ Documentation .. toctree:: :maxdepth: 3 - v2 + usage + stdlib-usage + client + logger + entries + metric + resource + sink + handlers + transport Migration Guides diff --git a/docs/transport.rst b/docs/transport.rst new file mode 100644 index 000000000..2a1e1a308 --- /dev/null +++ b/docs/transport.rst @@ -0,0 +1,8 @@ +Transport +---------------- +.. toctree:: + :maxdepth: 2 + + transports-sync + transports-thread + transports-base diff --git a/docs/v2.rst b/docs/v2.rst deleted file mode 100644 index 823097bd7..000000000 --- a/docs/v2.rst +++ /dev/null @@ -1,19 +0,0 @@ -v2 ----------------- -.. toctree:: - :maxdepth: 2 - - usage - client - logger - entries - metric - resource - sink - stdlib-usage - handlers - handlers-app-engine - handlers-container-engine - transports-sync - transports-thread - transports-base From 68e7f9d4e5efb211ee0e1e5c4987083a5546ee84 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 7 Dec 2021 15:56:37 -0800 Subject: [PATCH 03/28] marked old handlers as deprecated --- docs/handlers-app-engine.rst | 7 +++++-- docs/handlers-container-engine.rst | 7 +++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/docs/handlers-app-engine.rst b/docs/handlers-app-engine.rst index f25223a20..9f8a6c8db 100644 --- a/docs/handlers-app-engine.rst +++ b/docs/handlers-app-engine.rst @@ -1,5 +1,8 @@ -Google App Engine flexible Log Handler -====================================== +[DEPRECATED] App Engine Handler +=================================================== + +.. deprecated:: 3.0.0 + Use :class:`CloudLoggingHandler` instead. .. automodule:: google.cloud.logging_v2.handlers.app_engine :members: diff --git a/docs/handlers-container-engine.rst b/docs/handlers-container-engine.rst index 981b41dcb..0c074eb19 100644 --- a/docs/handlers-container-engine.rst +++ b/docs/handlers-container-engine.rst @@ -1,5 +1,8 @@ -Google Kubernetes Engine Log Handler -==================================== +[DEPRECATED] Kubernetes Engine Handler +================================================= + +.. deprecated:: 3.0.0 + Use :class:`StructuredLogHandler` instead. .. automodule:: google.cloud.logging_v2.handlers.container_engine :members: From ff046f5ca6cafaa49972b9ac4e9a4754e85ecfc0 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 7 Dec 2021 16:13:52 -0800 Subject: [PATCH 04/28] removed duplicated stdlib doc --- docs/index.rst | 9 ++++-- docs/stdlib-usage.rst | 70 ------------------------------------------- docs/usage.rst | 12 ++++---- 3 files changed, 13 insertions(+), 78 deletions(-) delete mode 100644 docs/stdlib-usage.rst diff --git a/docs/index.rst b/docs/index.rst index 2e8016d47..01d8e4eee 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,12 +1,17 @@ .. include:: README.rst +Usage Guide +------------------- +.. toctree:: + :maxdepth: 2 + + usage + Documentation ------------------- .. toctree:: :maxdepth: 3 - usage - stdlib-usage client logger entries diff --git a/docs/stdlib-usage.rst b/docs/stdlib-usage.rst deleted file mode 100644 index 375b41ddf..000000000 --- a/docs/stdlib-usage.rst +++ /dev/null @@ -1,70 +0,0 @@ -Integration with Python logging module --------------------------------------- - - -It's possible to tie the Python :mod:`logging` module directly into Google Cloud Logging. To use it, -create a :class:`CloudLoggingHandler ` instance from your -Logging client. - -.. code-block:: python - - >>> import logging - >>> import google.cloud.logging # Don't conflict with standard logging - >>> from google.cloud.logging.handlers import CloudLoggingHandler - >>> client = google.cloud.logging.Client() - >>> handler = CloudLoggingHandler(client) - >>> cloud_logger = logging.getLogger('cloudLogger') - >>> cloud_logger.setLevel(logging.INFO) # defaults to WARN - >>> cloud_logger.addHandler(handler) - >>> cloud_logger.error('bad news') - -.. note:: - - This handler by default uses an asynchronous transport that sends log entries on a background - thread. However, the API call will still be made in the same process. For other transport - options, see the transports section. - -All logs will go to a single custom log, which defaults to "python". The name of the Python -logger will be included in the structured log entry under the "python_logger" field. You can -change it by providing a name to the handler: - -.. code-block:: python - - >>> handler = CloudLoggingHandler(client, name="mycustomlog") - -It is also possible to attach the handler to the root Python logger, so that for example a plain -`logging.warn` call would be sent to Cloud Logging, as well as any other loggers created. However, -you must avoid infinite recursion from the logging calls the client itself makes. A helper -method :meth:`setup_logging ` is provided to configure -this automatically: - -.. code-block:: python - - >>> import logging - >>> import google.cloud.logging # Don't conflict with standard logging - >>> from google.cloud.logging.handlers import CloudLoggingHandler, setup_logging - >>> client = google.cloud.logging.Client() - >>> handler = CloudLoggingHandler(client) - >>> logging.getLogger().setLevel(logging.INFO) # defaults to WARN - >>> setup_logging(handler) - >>> logging.error('bad news') - -You can also exclude certain loggers: - -.. code-block:: python - - >>> setup_logging(handler, excluded_loggers=('werkzeug',)) - - - -Python logging handler transports -================================== - -The Python logging handler can use different transports. The default is -:class:`google.cloud.logging_v2.handlers.BackgroundThreadTransport`. - - 1. :class:`google.cloud.logging_V2.handlers.BackgroundThreadTransport` this is the default. It writes - entries on a background :class:`python.threading.Thread`. - - 1. :class:`google.cloud.logging_V2.handlers.SyncTransport` this handler does a direct API call on each - logging statement to write the entry. diff --git a/docs/usage.rst b/docs/usage.rst index 1fde3d8ea..736ae9372 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -250,8 +250,8 @@ Delete a sink: :end-before: [END sink_delete] :dedent: 4 -Integration with Python logging module --------------------------------------- +Integration with `logging` Standard Library +=========================================== It's possible to tie the Python :mod:`logging` module directly into Google Cloud Logging. There are different handler options to accomplish this. @@ -286,8 +286,8 @@ You can also exclude certain loggers: :end-before: [END setup_logging_excludes] :dedent: 4 -Cloud Logging Handler -~~~~~~~~~~~~~~~~~~~~~ +Manual Handler +--------------------- If you prefer not to use :meth:`~google.cloud.logging.client.Client.get_default_handler`, you can @@ -317,7 +317,7 @@ of the Python logger will be included in the structured log entry under the :dedent: 4 Cloud Logging Handler transports -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-------------------------------- The :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` logging handler can use different transports. The default is @@ -334,7 +334,7 @@ logging handler can use different transports. The default is .. _Google Kubernetes Engine: https://cloud.google.com/kubernetes-engine fluentd logging handlers -~~~~~~~~~~~~~~~~~~~~~~~~ +------------------------ Besides :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler`, which writes directly to the API, two other handlers are provided. From 2e5fd1881ec75f5e8c21583aaa916f1ce8220860 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 8 Dec 2021 14:48:10 -0800 Subject: [PATCH 05/28] made some progress on standard library integration doc --- docs/std-lib-integration.rst | 70 +++++++ docs/usage.rst | 357 +---------------------------------- 2 files changed, 74 insertions(+), 353 deletions(-) create mode 100644 docs/std-lib-integration.rst diff --git a/docs/std-lib-integration.rst b/docs/std-lib-integration.rst new file mode 100644 index 000000000..fc811be29 --- /dev/null +++ b/docs/std-lib-integration.rst @@ -0,0 +1,70 @@ +Integration with `logging` Standard Library +=========================================== + +The recommended way to use :mod:`google-cloud-logging` is to allow it to integrate with +the Python :mod:`logging` standard library. This way, you can write logs using Python +standards, and still have your logs appear in Google Cloud Logging using custom handlers +behind the scenes. + +Automatic Configuration +----------------------- + +To integrate :mod:`google-cloud-logging` with the standard :mod:`logging` module, +simply call :meth:`setup_logging` on a :class:`~google.cloud.logging.client.Client` instance. + +.. literalinclude:: ../samples/snippets/handler.py + :start-after: [START logging_handler_setup] + :end-before: [END logging_handler_setup] + :dedent: 4 + +This function will automatically choose the best configurations for the environment your +code is running on. After running :meth:`setup_logging`, you can write logs using the +standard :mod:`logging` module as normal: + +.. literalinclude:: ../samples/snippets/handler.py + :start-after: [START logging_handler_usage] + :end-before: [END logging_handler_usage] + :dedent: 4 + +For more information on the library, see the `Google Cloud Logging documentation `_. +For more information on the Python :mod:`logging` standard library, see the `logging documentation `_ +Manual Handler Configuration +----------------------------- + +Automatic Configuration will automatically determine the appropriate handler for the environment. +If you would rather choose the handler yourself, you can construct an instance manually and pass it in +as an argument to :meth:`setup_logging`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START setup_logging] + :end-before: [END setup_logging] + :dedent: 4 + +There are two supported handler classes to choose from: + +- :class:`~google.cloud.logging.handlers.CloudLoggingHandler`: + - Sends logs directly to Cloud Logging over the network (gRPC or HTTP) + - This is the default handler on most environments, including local development +- :class:`~google.cloud.logging.handlers.StructuredLogHandler`: + - Outputs logs as `structured JSON `_ + to standard out, to be read and parsed by a GCP logging agent + - This is the default handler on Kubernetes Engine, Cloud Functions and Cloud Run + +logging JSON poayloads +---------------------- + +Although the Python :mod:`logging` standard library `expects all logs to be strings `_ + + +- *logging dictionaries* + + +:mod:`google-cloud-logging` will also attempt to parse stringified JSON objects logged using the library. + +Using `extras` +-------------- + +Automatic Metadata Detection +---------------------------- + + diff --git a/docs/usage.rst b/docs/usage.rst index 736ae9372..4a14a186c 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -1,356 +1,7 @@ Usage Guide -=========== +------------- +.. toctree:: + :maxdepth: 2 -Writing log entries -------------------- + std-lib-integration -To write log entries, first create a -:class:`~google.cloud.logging.logger.Logger`, passing the "log name" with -which to associate the entries: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_create] - :end-before: [END logger_create] - :dedent: 4 - -Write a simple text entry to the logger. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_log_text] - :end-before: [END logger_log_text] - :dedent: 4 - -Write a dictionary entry to the logger. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_log_struct] - :end-before: [END logger_log_struct] - :dedent: 4 - -Write a simple text entry and resource to the logger. - -Supported Resource values are listed at `Monitored Resource Types`_ - -.. _Monitored Resource Types: https://cloud.google.com/logging/docs/api/v2/resource-list - - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_log_resource_text] - :end-before: [END logger_log_resource_text] - :dedent: 4 - -Retrieving log entries ----------------------- - -Fetch entries for the default project. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_entries_default] - :end-before: [END client_list_entries_default] - :dedent: 4 - -Entries returned by -:meth:`Client.list_entries ` -or -:meth:`Logger.list_entries ` -will be instances of one of the following classes: - -- :class:`~google.cloud.logging.entries.TextEntry` -- :class:`~google.cloud.logging.entries.StructEntry` -- :class:`~google.cloud.logging.entries.ProtobufEntry` - -Filter entries retrieved using the `Advanced Logs Filters`_ syntax - -.. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters - -Fetch entries for the default project. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_entries_filter] - :end-before: [END client_list_entries_filter] - :dedent: 4 - -Sort entries in descending timestamp order. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_entries_order_by] - :end-before: [END client_list_entries_order_by] - :dedent: 4 - -Retrieve entries for a single logger, sorting in descending timestamp order: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_list_entries] - :end-before: [END logger_list_entries] - :dedent: 4 - -And as a practical example, retrieve all `GKE Admin Activity audit logs`_ -from the past 24 hours: - -.. _GKE Admin Activity audit logs: https://cloud.google.com/kubernetes-engine/docs/how-to/audit-logging#audit_logs_in_your_project - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logging_list_gke_audit_logs] - :end-before: [END logging_list_gke_audit_logs] - :dedent: 4 - -Delete all entries for a logger -------------------------------- - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_delete] - :end-before: [END logger_delete] - :dedent: 8 - - -Manage log metrics ------------------- - -Metrics are counters of entries which match a given filter. They can be -used within Cloud Monitoring to create charts and alerts. - -List all metrics for a project: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_metrics] - :end-before: [END client_list_metrics] - :dedent: 4 - -Create a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_create] - :end-before: [END metric_create] - :dedent: 4 - -Refresh local information about a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_reload] - :end-before: [END metric_reload] - :dedent: 4 - -Update a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_update] - :end-before: [END metric_update] - :dedent: 4 - -Delete a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_delete] - :end-before: [END metric_delete] - :dedent: 4 - -Export log entries using sinks ------------------------------- - -Sinks allow exporting entries which match a given filter to Cloud Storage -buckets, BigQuery datasets, or Cloud Pub/Sub topics. - -Export to Cloud Storage -~~~~~~~~~~~~~~~~~~~~~~~ - -Make sure that the storage bucket you want to export logs too has -``cloud-logs@google.com`` as the owner. See -`Setting permissions for Cloud Storage`_. - -.. _Setting permissions for Cloud Storage: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_cloud_storage - -Add ``cloud-logs@google.com`` as the owner of the bucket: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_bucket_permissions] - :end-before: [END sink_bucket_permissions] - :dedent: 4 - -Create a Cloud Storage sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_storage_create] - :end-before: [END sink_storage_create] - :dedent: 4 - - -Export to BigQuery -~~~~~~~~~~~~~~~~~~ - -To export logs to BigQuery you must log into the Cloud Platform Console -and add ``cloud-logs@google.com`` to a dataset. - -See: `Setting permissions for BigQuery`_ - -.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_bigquery - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_dataset_permissions] - :end-before: [END sink_dataset_permissions] - :dedent: 4 - -Create a BigQuery sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_bigquery_create] - :end-before: [END sink_bigquery_create] - :dedent: 4 - - -Export to Pub/Sub -~~~~~~~~~~~~~~~~~ - -To export logs to BigQuery you must log into the Cloud Platform Console -and add ``cloud-logs@google.com`` to a topic. - -See: `Setting permissions for Pub/Sub`_ - -.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_logs_to_cloud_pubsub - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_topic_permissions] - :end-before: [END sink_topic_permissions] - :dedent: 4 - -Create a Cloud Pub/Sub sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_pubsub_create] - :end-before: [END sink_pubsub_create] - :dedent: 4 - -Manage Sinks -~~~~~~~~~~~~ - -List all sinks for a project: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_sinks] - :end-before: [END client_list_sinks] - :dedent: 4 - -Refresh local information about a sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_reload] - :end-before: [END sink_reload] - :dedent: 4 - -Update a sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_update] - :end-before: [END sink_update] - :dedent: 4 - -Delete a sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_delete] - :end-before: [END sink_delete] - :dedent: 4 - -Integration with `logging` Standard Library -=========================================== - -It's possible to tie the Python :mod:`logging` module directly into Google -Cloud Logging. There are different handler options to accomplish this. -To automatically pick the default for your current environment, use -:meth:`~google.cloud.logging.client.Client.get_default_handler`. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START create_default_handler] - :end-before: [END create_default_handler] - :dedent: 4 - -It is also possible to attach the handler to the root Python logger, so that -for example a plain ``logging.warn`` call would be sent to Cloud Logging, -as well as any other loggers created. A helper method -:meth:`~google.cloud.logging.client.Client.setup_logging` is provided -to configure this automatically. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START setup_logging] - :end-before: [END setup_logging] - :dedent: 4 - -.. note:: - - To reduce cost and quota usage, do not enable Cloud Logging - handlers while testing locally. - -You can also exclude certain loggers: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START setup_logging_excludes] - :end-before: [END setup_logging_excludes] - :dedent: 4 - -Manual Handler ---------------------- - -If you prefer not to use -:meth:`~google.cloud.logging.client.Client.get_default_handler`, you can -directly create a -:class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` instance -which will write directly to the API. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START create_cloud_handler] - :end-before: [END create_cloud_handler] - :dedent: 4 - -.. note:: - - This handler by default uses an asynchronous transport that sends log - entries on a background thread. However, the API call will still be made - in the same process. For other transport options, see the transports - section. - -All logs will go to a single custom log, which defaults to "python". The name -of the Python logger will be included in the structured log entry under the -"python_logger" field. You can change it by providing a name to the handler: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START create_named_handler] - :end-before: [END create_named_handler] - :dedent: 4 - -Cloud Logging Handler transports --------------------------------- - -The :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` -logging handler can use different transports. The default is -:class:`~google.cloud.logging.handlers.BackgroundThreadTransport`. - - 1. :class:`~google.cloud.logging.handlers.BackgroundThreadTransport` this is - the default. It writes entries on a background - :class:`python.threading.Thread`. - - 1. :class:`~google.cloud.logging.handlers.SyncTransport` this handler does a - direct API call on each logging statement to write the entry. - - -.. _Google Kubernetes Engine: https://cloud.google.com/kubernetes-engine - -fluentd logging handlers ------------------------- - -Besides :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler`, -which writes directly to the API, two other handlers are provided. -:class:`~google.cloud.logging.handlers.app_engine.AppEngineHandler`, which is -recommended when running on the Google App Engine Flexible vanilla runtimes -(i.e. your app.yaml contains ``runtime: python``), and -:class:`~google.cloud.logging.handlers.container_engine.ContainerEngineHandler` -, which is recommended when running on `Google Kubernetes Engine`_ with the -Cloud Logging plugin enabled. - -:meth:`~google.cloud.logging.client.Client.get_default_handler` and -:meth:`~google.cloud.logging.client.Client.setup_logging` will attempt to use -the environment to automatically detect whether the code is running in -these platforms and use the appropriate handler. - -In both cases, the fluentd agent is configured to automatically parse log files -in an expected format and forward them to Cloud Logging. The handlers -provided help set the correct metadata such as log level so that logs can be -filtered accordingly. From 51d2a9a9bef9eddd2ec7a5d233c08cc83e98aa55 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 8 Dec 2021 15:43:32 -0800 Subject: [PATCH 06/28] added more stdlib documentation --- docs/std-lib-integration.rst | 53 +++++++++++++++++++++++++++++---- samples/snippets/usage_guide.py | 31 +++++++++++++++++++ 2 files changed, 78 insertions(+), 6 deletions(-) diff --git a/docs/std-lib-integration.rst b/docs/std-lib-integration.rst index fc811be29..fc2d87b08 100644 --- a/docs/std-lib-integration.rst +++ b/docs/std-lib-integration.rst @@ -28,6 +28,7 @@ standard :mod:`logging` module as normal: For more information on the library, see the `Google Cloud Logging documentation `_. For more information on the Python :mod:`logging` standard library, see the `logging documentation `_ + Manual Handler Configuration ----------------------------- @@ -50,19 +51,59 @@ There are two supported handler classes to choose from: to standard out, to be read and parsed by a GCP logging agent - This is the default handler on Kubernetes Engine, Cloud Functions and Cloud Run -logging JSON poayloads +.. _JSON: + +Logging JSON Payloads ---------------------- -Although the Python :mod:`logging` standard library `expects all logs to be strings `_ +Although the Python :mod:`logging` standard library `expects all logs to be strings `_, +Google Cloud Logging allows `JSON payload data `_. +You can write JSON logs using the standard library integration in one of the following ways: + +1. Using the `json_fields` `extra` argument: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_extra_json_fields] + :end-before: [END logging_extra_json_fields] + :dedent: 4 + +2. Logging a JSON-parsable string: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_json_dumps] + :end-before: [END logging_json_dumps] + :dedent: 4 + +Using the `extra` Argument +-------------------------- + +The Python :mod:`logging` standard library accepts `an "extra" argument `_ when +writing logs, which can be used to populate LogRecord objects with user-defined +key-value pairs. Google Cloud Logging uses the `extra` field as a way to pass in +metadata to populate `LogEntry fields `_. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_extras] + :end-before: [END logging_extras] + :dedent: 4 + +The following `LogEntry fields `_ +can be set through the `extra` argument: -- *logging dictionaries* +- labels +- trace +- span_id +- trace_sampled +- http_request +- source_location +- resource +- :ref:`json_fields` -:mod:`google-cloud-logging` will also attempt to parse stringified JSON objects logged using the library. +Metadata sent explicitly through the `extra` argument will override any :ref:`automatically detected` fields. -Using `extras` --------------- +.. _Autodetection: Automatic Metadata Detection ---------------------------- diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py index e519c75c1..b2a73d906 100644 --- a/samples/snippets/usage_guide.py +++ b/samples/snippets/usage_guide.py @@ -369,6 +369,37 @@ def logging_handler(client): handler = CloudLoggingHandler(client, name="mycustomlog") # [END create_named_handler] +@snippet +def logging_json(client): + # [START logging_json_dumps] + import logging + import json + + data_dict = {"hello": "world"} + logging.info(json.dumps(data_dict)) + # [END logging_json_dumps] + + # [START logging_extra_json_fields] + import logging + + data_dict = {"hello": "world"} + logging.info("message field", extra={"json_fields": data_dict}) + # [END logging_extra_json_fields] + +@snippet +def using_extras(client): + # [START logging_extras] + my_labels = {"foo": "bar"} + my_http = {"requestUrl": "localhost"} + my_trace = "01234" + + logging.info("hello", extra={ + "labels": my_labels, + "http_request": my_http, + "trace": my_trace + }) + # [END logging_extras] + @snippet def setup_logging(client): From fcc239bc29e815f6d2d3d71ee35e0d4d429e0965 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Dec 2021 13:51:31 -0800 Subject: [PATCH 07/28] finished up stdlib docs --- docs/std-lib-integration.rst | 73 ++++++++++++++++++++---------------- 1 file changed, 41 insertions(+), 32 deletions(-) diff --git a/docs/std-lib-integration.rst b/docs/std-lib-integration.rst index fc2d87b08..d2007f240 100644 --- a/docs/std-lib-integration.rst +++ b/docs/std-lib-integration.rst @@ -18,16 +18,7 @@ simply call :meth:`setup_logging` on a :class:`~google.cloud.logging.client.Clie :dedent: 4 This function will automatically choose the best configurations for the environment your -code is running on. After running :meth:`setup_logging`, you can write logs using the -standard :mod:`logging` module as normal: - -.. literalinclude:: ../samples/snippets/handler.py - :start-after: [START logging_handler_usage] - :end-before: [END logging_handler_usage] - :dedent: 4 - -For more information on the library, see the `Google Cloud Logging documentation `_. -For more information on the Python :mod:`logging` standard library, see the `logging documentation `_ +code is running on. For more information, see the `Google Cloud Logging documentation `_. Manual Handler Configuration ----------------------------- @@ -53,6 +44,19 @@ There are two supported handler classes to choose from: .. _JSON: +Using the Standard Library +--------------------------- + +After the Google Cloug Logging library has been setup with the Python :mod:`logging` standard library, +you can begin to send logs with the standard logging library as you normally would: + +.. literalinclude:: ../samples/snippets/handler.py + :start-after: [START logging_handler_usage] + :end-before: [END logging_handler_usage] + :dedent: 4 + +For more information on using the Python :mod:`logging` standard library, see the `logging documentation `_ + Logging JSON Payloads ---------------------- @@ -74,38 +78,43 @@ You can write JSON logs using the standard library integration in one of the fol :end-before: [END logging_json_dumps] :dedent: 4 -Using the `extra` Argument --------------------------- - -The Python :mod:`logging` standard library accepts `an "extra" argument `_ when -writing logs, which can be used to populate LogRecord objects with user-defined -key-value pairs. Google Cloud Logging uses the `extra` field as a way to pass in -metadata to populate `LogEntry fields `_. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logging_extras] - :end-before: [END logging_extras] - :dedent: 4 +.. _Autodetection: +Automatic Metadata Detection +---------------------------- -The following `LogEntry fields `_ -can be set through the `extra` argument: +The Google Cloud Logging library will attempt to detect and attach additional +`LogEntry fields `_ +whenever possible. The following fields are currently supported: - labels -- trace -- span_id -- trace_sampled -- http_request +- trace* +- span_id* +- trace_sampled* +- http_request* - source_location - resource - :ref:`json_fields` +.. note:: + Fields marked with "*" can only be detected when using a supported Python web framework. The Google Cloud Logging + library currently supports `flask `_ and `django `_ -Metadata sent explicitly through the `extra` argument will override any :ref:`automatically detected` fields. +Manual Metadata Using the `extra` Argument +------------------------------------------ -.. _Autodetection: +The Python :mod:`logging` standard library accepts `an "extra" argument `_ when +writing logs, which can be used to populate LogRecord objects with user-defined +key-value pairs. Google Cloud Logging uses the `extra` field as a way to pass in additional +metadata to populate `LogEntry fields `_. -Automatic Metadata Detection ----------------------------- +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_extras] + :end-before: [END logging_extras] + :dedent: 4 + +All of the `LogEntry fields `_ +that can be :ref:`autodetected` can also be set manually through the `extra` argument. Fields sent explicitly through the `extra` +argument will override any :ref:`automatically detected` fields. From 5826ce36280dc28969d982975db4797cb58520d6 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Dec 2021 14:06:53 -0800 Subject: [PATCH 08/28] added page for direct API usage --- docs/direct-lib-usage.rst | 30 ++++++++++++++++++++++++++++++ docs/entries.rst | 4 ++-- docs/usage.rst | 1 + 3 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 docs/direct-lib-usage.rst diff --git a/docs/direct-lib-usage.rst b/docs/direct-lib-usage.rst new file mode 100644 index 000000000..ab9671776 --- /dev/null +++ b/docs/direct-lib-usage.rst @@ -0,0 +1,30 @@ +Direct Library Usage +==================== + +Although the recommended way of using the :mod:`google-cloud-logging` library +is to integrate it with the :doc:`Python logging standard library`, +you can also use the library to interact with the Googel Cloud Logging API +directly. In additoion to writing logs, using the library in this way allows you to read and delete +:doc:`logs`, :doc:`sinks`, :doc:`metrics`, and other resources. + +Creating a Client +----------------- +http vs grpc +clients will have associated resource + +Writing Log Entries +------------------- + +Retriving Log Entries +--------------------- + +Deleting Log Entries +-------------------- + +Managing Log Metrics +-------------------- + +Using Log Sinks +--------------- + + diff --git a/docs/entries.rst b/docs/entries.rst index 9d473f3c1..dc257e4c9 100644 --- a/docs/entries.rst +++ b/docs/entries.rst @@ -1,5 +1,5 @@ -Entries -======= +Log Entries +=========== .. automodule:: google.cloud.logging_v2.entries :members: diff --git a/docs/usage.rst b/docs/usage.rst index 4a14a186c..32c51a089 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -4,4 +4,5 @@ Usage Guide :maxdepth: 2 std-lib-integration + direct-lib-usage From 205d915c99cfcd56c2ae4af21f015e635c8cc256 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Dec 2021 14:21:04 -0800 Subject: [PATCH 09/28] copied old usage guide info --- docs/direct-lib-usage.rst | 210 +++++++++++++++++++++++++++++++++++++- 1 file changed, 208 insertions(+), 2 deletions(-) diff --git a/docs/direct-lib-usage.rst b/docs/direct-lib-usage.rst index ab9671776..6fe341f4c 100644 --- a/docs/direct-lib-usage.rst +++ b/docs/direct-lib-usage.rst @@ -4,13 +4,17 @@ Direct Library Usage Although the recommended way of using the :mod:`google-cloud-logging` library is to integrate it with the :doc:`Python logging standard library`, you can also use the library to interact with the Googel Cloud Logging API -directly. In additoion to writing logs, using the library in this way allows you to read and delete +directly. + +In addition to writing logs, using the library in this way allows you to manage :doc:`logs`, :doc:`sinks`, :doc:`metrics`, and other resources. Creating a Client ----------------- http vs grpc -clients will have associated resource +create logger +loggers will have associated resource +can attach labels to logger Writing Log Entries ------------------- @@ -18,13 +22,215 @@ Writing Log Entries Retriving Log Entries --------------------- +Fetch entries for the default project. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_entries_default] + :end-before: [END client_list_entries_default] + :dedent: 4 + +Entries returned by +:meth:`Client.list_entries ` +or +:meth:`Logger.list_entries ` +will be instances of one of the following classes: + +- :class:`~google.cloud.logging.entries.TextEntry` +- :class:`~google.cloud.logging.entries.StructEntry` +- :class:`~google.cloud.logging.entries.ProtobufEntry` + +Filter entries retrieved using the `Advanced Logs Filters`_ syntax + +.. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters + +Fetch entries for the default project. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_entries_filter] + :end-before: [END client_list_entries_filter] + :dedent: 4 + +Sort entries in descending timestamp order. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_entries_order_by] + :end-before: [END client_list_entries_order_by] + :dedent: 4 + +Retrieve entries for a single logger, sorting in descending timestamp order: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_list_entries] + :end-before: [END logger_list_entries] + :dedent: 4 + +And as a practical example, retrieve all `GKE Admin Activity audit logs`_ +from the past 24 hours: + +.. _GKE Admin Activity audit logs: https://cloud.google.com/kubernetes-engine/docs/how-to/audit-logging#audit_logs_in_your_project + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_list_gke_audit_logs] + :end-before: [END logging_list_gke_audit_logs] + :dedent: 4 + + Deleting Log Entries -------------------- +You can delete all logs associated with a logger using the following call: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_delete] + :end-before: [END logger_delete] + :dedent: 8 + + Managing Log Metrics -------------------- +Metrics are counters of entries which match a given filter. They can be +used within Cloud Monitoring to create charts and alerts. + +List all metrics for a project: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_metrics] + :end-before: [END client_list_metrics] + :dedent: 4 + +Create a metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_create] + :end-before: [END metric_create] + :dedent: 4 + +Refresh local information about a metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_reload] + :end-before: [END metric_reload] + :dedent: 4 + +Update a metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_update] + :end-before: [END metric_update] + :dedent: 4 + +Delete a metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_delete] + :end-before: [END metric_delete] + :dedent: 4 + Using Log Sinks --------------- +Sinks allow exporting entries which match a given filter to Cloud Storage +buckets, BigQuery datasets, or Cloud Pub/Sub topics. + +Cloud Storage Sink +~~~~~~~~~~~~~~~~~~~~~~~ + +Make sure that the storage bucket you want to export logs too has +``cloud-logs@google.com`` as the owner. See +`Setting permissions for Cloud Storage`_. + +.. _Setting permissions for Cloud Storage: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_cloud_storage + +Add ``cloud-logs@google.com`` as the owner of the bucket: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_bucket_permissions] + :end-before: [END sink_bucket_permissions] + :dedent: 4 + +Create a Cloud Storage sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_storage_create] + :end-before: [END sink_storage_create] + :dedent: 4 + + +BigQuery Sink +~~~~~~~~~~~~~~~~~~ + +To export logs to BigQuery you must log into the Cloud Platform Console +and add ``cloud-logs@google.com`` to a dataset. + +See: `Setting permissions for BigQuery`_ + +.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_bigquery + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_dataset_permissions] + :end-before: [END sink_dataset_permissions] + :dedent: 4 + +Create a BigQuery sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_bigquery_create] + :end-before: [END sink_bigquery_create] + :dedent: 4 + + +Pub/Sub Sink +~~~~~~~~~~~~~~~~~ + +To export logs to BigQuery you must log into the Cloud Platform Console +and add ``cloud-logs@google.com`` to a topic. + +See: `Setting permissions for Pub/Sub`_ + +.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_logs_to_cloud_pubsub + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_topic_permissions] + :end-before: [END sink_topic_permissions] + :dedent: 4 + +Create a Cloud Pub/Sub sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_pubsub_create] + :end-before: [END sink_pubsub_create] + :dedent: 4 + +Managing Sinks +~~~~~~~~~~~~~~ + +List all sinks for a project: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_sinks] + :end-before: [END client_list_sinks] + :dedent: 4 + +Refresh local information about a sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_reload] + :end-before: [END sink_reload] + :dedent: 4 + +Update a sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_update] + :end-before: [END sink_update] + :dedent: 4 + +Delete a sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_delete] + :end-before: [END sink_delete] + :dedent: 4 + From 98a0a8c70e017549336e91ccb80bde80d5a978c7 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Dec 2021 16:10:50 -0800 Subject: [PATCH 10/28] working on setup section --- docs/direct-lib-usage.rst | 58 ++++++++++++++++++++++++++++++--- samples/snippets/usage_guide.py | 40 +++++++++++++++++++++-- 2 files changed, 90 insertions(+), 8 deletions(-) diff --git a/docs/direct-lib-usage.rst b/docs/direct-lib-usage.rst index 6fe341f4c..4e67a4dc8 100644 --- a/docs/direct-lib-usage.rst +++ b/docs/direct-lib-usage.rst @@ -9,16 +9,64 @@ directly. In addition to writing logs, using the library in this way allows you to manage :doc:`logs`, :doc:`sinks`, :doc:`metrics`, and other resources. +Setup +---------------------------- + Creating a Client ------------------ -http vs grpc -create logger -loggers will have associated resource -can attach labels to logger +~~~~~~~~~~~~~~~~~ + +Before using the library, you must first set up a :doc:`Client`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START usage_client_setup] + :end-before: [END usage_client_setup] + :dedent: 4 + +When setting up the :doc:`Client`, you can also disable gRPC to put the library +into HTTP mode: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START usage_http_client_setup] + :end-before: [END usage_http_client_setup] + :dedent: 4 + +Creating a Logger +~~~~~~~~~~~~~~~~~ + +After creating a :doc:`Client`, you can use it to create a :doc:`Logger`, which can be used +to read, write, and delete logs from Google Cloud: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_create] + :end-before: [END logger_create] + :dedent: 4 + +You can add custom labels initializing a :doc:`Logger`, which will +be added on to each :doc:`LogEntry` created by it: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_custom_labels] + :end-before: [END logger_custom_labels] + :dedent: 4 + +By default, the library will attempt to add a `Monitored Resource field `_ +associated with the environment the code is run on. For example, code run on +App Engine will have a `gae_app `_ +resource, while code run locally will have a `global `_ resource field. +If you want to manually set the resource field, you can do so when initializing the :doc:`Logger`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_custom_resource] + :end-before: [END logger_custom_resource] + :dedent: 4 + Writing Log Entries ------------------- +Batch Writing Logs +------------------ + Retriving Log Entries --------------------- diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py index b2a73d906..6f8e0bf04 100644 --- a/samples/snippets/usage_guide.py +++ b/samples/snippets/usage_guide.py @@ -100,14 +100,48 @@ def client_list_entries(client, to_delete): # pylint: disable=unused-argument @snippet -def logger_usage(client, to_delete): +def client_setup(client2, to_delete): + """Client setup.""" + log_name = "client_setup_%d" % (_millis()) + + # [START usage_client_setup] + import google.cloud.logging + # if project not given, it will be inferred from the environment + client = google.cloud.logging.Client(project="my-project") + # [END usage_client_setup] + to_delete.append(client) + + # [START usage_http_client_setup] + http_client = google.cloud.logging.Client(_use_grpc=False) + # [END usage_http_client_setup] + to_delete.append(http_client) + +@snippet +def logger_usage(client_true, to_delete): """Logger usage.""" - log_name = "logger_usage_%d" % (_millis()) # [START logger_create] - logger = client.logger(log_name) + client = google.cloud.logging.Client(project="my-project") + logger = client.logger(name="log_id") + # logger will bind to logName "projects/my_project/logs/log_id" # [END logger_create] to_delete.append(logger) + log_id = "logger_usage_sd" % (_millis()) + # [START logger_custom_labels] + custom_labels = {"my-key": "my-value"} + label_logger = client.logger(log_id, labels=custom_labels) + # [END logger_custom_labels] + to_delete.append(label_logger) + # [START logger_custom_resource] + from google.cloud.logging_v2.resource import Resource + resource = Resource(type="global", labels={}) + global_logger = client.logger(log_id, resource=resource) + # [END logger_custom_resource] + to_delete.append(global_logger) + + logger = client_true.logger(log_id) + to_delete.append(logger) + # [START logger_log_text] logger.log_text("A simple entry") # API call From 117e619874578db0f07755a4fa56c1da7a770db2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Dec 2021 16:45:19 -0800 Subject: [PATCH 11/28] added log writing docs --- docs/direct-lib-usage.rst | 24 ++++++++++++++++++++++++ samples/snippets/usage_guide.py | 11 +++++++++++ 2 files changed, 35 insertions(+) diff --git a/docs/direct-lib-usage.rst b/docs/direct-lib-usage.rst index 4e67a4dc8..8b5c30372 100644 --- a/docs/direct-lib-usage.rst +++ b/docs/direct-lib-usage.rst @@ -64,6 +64,30 @@ If you want to manually set the resource field, you can do so when initializing Writing Log Entries ------------------- +You can write logs using :meth:`Logger.log `: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_basic] + :end-before: [END logger_log_basic] + :dedent: 4 + +Additional `LogEntry fields `_ +can be set by passing them as keyword arguments: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_fields] + :end-before: [END logger_log_fields] + :dedent: 4 + +:meth:`Logger.log ` will attempt to choose the appropriate :doc:`LogEntry ` type +based on input type. If you want to be more explicit about the type used, you can use the following +Logger methods: + +- :meth:`Logger.log_text ` creates a :class:`TextEntry ` +- :meth:`Logger.log_struct ` creates a :class:`StructEntry ` +- :meth:`Logger.log_proto ` creates a :class:`ProtobufEntry ` +- :meth:`Logger.log_empty ` creates an empty :class:`LogEntry ` + Batch Writing Logs ------------------ diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py index 6f8e0bf04..fbc28be06 100644 --- a/samples/snippets/usage_guide.py +++ b/samples/snippets/usage_guide.py @@ -142,6 +142,17 @@ def logger_usage(client_true, to_delete): logger = client_true.logger(log_id) to_delete.append(logger) + # [START logger_log_basic] + logger.log("A simple entry") # API call + # [END logger_log_basic] + + # [START logger_log_fields] + logger.log("an entry with fields set", + severity="ERROR", + insert_id="0123", + labels={'my-label':'my-value'} + ) # API call + # [END logger_log_fields] # [START logger_log_text] logger.log_text("A simple entry") # API call From 913a08cc40cd2c4087c3666a5907a6473937b951 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Dec 2021 16:45:28 -0800 Subject: [PATCH 12/28] removed references to POST requests --- google/cloud/logging_v2/logger.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/google/cloud/logging_v2/logger.py b/google/cloud/logging_v2/logger.py index 542e4d629..02ecb6905 100644 --- a/google/cloud/logging_v2/logger.py +++ b/google/cloud/logging_v2/logger.py @@ -158,7 +158,7 @@ def _do_log(self, client, _entry_class, payload=None, **kw): client.logging_api.write_entries([api_repr]) def log_empty(self, *, client=None, **kw): - """Log an empty message via a POST request + """Log an empty message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write @@ -173,7 +173,7 @@ def log_empty(self, *, client=None, **kw): self._do_log(client, LogEntry, **kw) def log_text(self, text, *, client=None, **kw): - """Log a text message via a POST request + """Log a text message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write @@ -189,7 +189,7 @@ def log_text(self, text, *, client=None, **kw): self._do_log(client, TextEntry, text, **kw) def log_struct(self, info, *, client=None, **kw): - """Log a structured message via a POST request + """Log a dictionary message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write @@ -209,7 +209,7 @@ def log_struct(self, info, *, client=None, **kw): self._do_log(client, StructEntry, info, **kw) def log_proto(self, message, *, client=None, **kw): - """Log a protobuf message via a POST request + """Log a protobuf message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list @@ -226,8 +226,7 @@ def log_proto(self, message, *, client=None, **kw): self._do_log(client, ProtobufEntry, message, **kw) def log(self, message=None, *, client=None, **kw): - """Log an arbitrary message via a POST request. - Type will be inferred based on the input message. + """Log an arbitrary message. Type will be inferred based on the input. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list From 29ea7d44be88b817d84c3f7c2c0faf1a559717a7 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Dec 2021 13:12:10 -0800 Subject: [PATCH 13/28] added batching info --- docs/direct-lib-usage.rst | 28 +++++++++++++++++++++++----- docs/logger.rst | 1 + samples/snippets/usage_guide.py | 14 ++++++++++++++ 3 files changed, 38 insertions(+), 5 deletions(-) diff --git a/docs/direct-lib-usage.rst b/docs/direct-lib-usage.rst index 8b5c30372..ab5f05ffe 100644 --- a/docs/direct-lib-usage.rst +++ b/docs/direct-lib-usage.rst @@ -91,6 +91,24 @@ Logger methods: Batch Writing Logs ------------------ +By default, each log write will take place in an individual network request, which may be inefficient at scale. +Instead, you can use a :class:`Batch `: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_batch] + :end-before: [END logger_log_batch] + :dedent: 4 + +In this case, logs are batched together, and only sent out when :func:`batch.commit ` is called. +To simplify things, you can also use :class:`Batch ` as a context manager: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_batch_context] + :end-before: [END logger_log_batch_context] + :dedent: 4 + +Here, the logs will be automatically committed once the code exits the "with" block. + Retriving Log Entries --------------------- @@ -102,14 +120,14 @@ Fetch entries for the default project. :dedent: 4 Entries returned by -:meth:`Client.list_entries ` +:meth:`Client.list_entries ` or -:meth:`Logger.list_entries ` +:meth:`Logger.list_entries ` will be instances of one of the following classes: -- :class:`~google.cloud.logging.entries.TextEntry` -- :class:`~google.cloud.logging.entries.StructEntry` -- :class:`~google.cloud.logging.entries.ProtobufEntry` +- :class:`~google.cloud.logging_v2.entries.TextEntry` +- :class:`~google.cloud.logging_v2.entries.StructEntry` +- :class:`~google.cloud.logging_v2.entries.ProtobufEntry` Filter entries retrieved using the `Advanced Logs Filters`_ syntax diff --git a/docs/logger.rst b/docs/logger.rst index 8aca18199..13f8e0d7e 100644 --- a/docs/logger.rst +++ b/docs/logger.rst @@ -3,4 +3,5 @@ Logger .. automodule:: google.cloud.logging_v2.logger :members: + :undoc-members: :show-inheritance: diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py index fbc28be06..b974cb62c 100644 --- a/samples/snippets/usage_guide.py +++ b/samples/snippets/usage_guide.py @@ -180,6 +180,20 @@ def logger_usage(client_true, to_delete): ) # [END logger_log_resource_text] + # [START logger_log_batch] + batch = logger.batch() + batch.log("first log") + batch.log("second log") + batch.commit() + # [END logger_log_batch] + + # [START logger_log_batch_context] + with logger.batch() as batch: + batch.log("first log") + # do work + batch.log("last log") + # [END logger_log_batch_context] + # [START logger_list_entries] from google.cloud.logging import DESCENDING From e822bd088dfe5929ec38cf4bbbc5a23316b68266 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Dec 2021 14:27:30 -0800 Subject: [PATCH 14/28] added transport docs --- docs/std-lib-integration.rst | 38 +++++++++++++++++++++++++++++------- docs/transport.rst | 29 +++++++++++++++++++++------ docs/transports-base.rst | 6 ------ docs/transports-sync.rst | 6 ------ docs/transports-thread.rst | 7 ------- 5 files changed, 54 insertions(+), 32 deletions(-) delete mode 100644 docs/transports-base.rst delete mode 100644 docs/transports-sync.rst delete mode 100644 docs/transports-thread.rst diff --git a/docs/std-lib-integration.rst b/docs/std-lib-integration.rst index d2007f240..69c84608d 100644 --- a/docs/std-lib-integration.rst +++ b/docs/std-lib-integration.rst @@ -10,7 +10,7 @@ Automatic Configuration ----------------------- To integrate :mod:`google-cloud-logging` with the standard :mod:`logging` module, -simply call :meth:`setup_logging` on a :class:`~google.cloud.logging.client.Client` instance. +simply call :meth:`~google.cloud.logging_v2.client.Client.setup_logging` on a :class:`~google.cloud.logging_v2.client.Client` instance. .. literalinclude:: ../samples/snippets/handler.py :start-after: [START logging_handler_setup] @@ -23,9 +23,11 @@ code is running on. For more information, see the `Google Cloud Logging document Manual Handler Configuration ----------------------------- +.. _Manual Handler: + Automatic Configuration will automatically determine the appropriate handler for the environment. If you would rather choose the handler yourself, you can construct an instance manually and pass it in -as an argument to :meth:`setup_logging`: +as an argument to :meth:`~google.cloud.logging_v2.client.Client.setup_logging`: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START setup_logging] @@ -34,16 +36,15 @@ as an argument to :meth:`setup_logging`: There are two supported handler classes to choose from: -- :class:`~google.cloud.logging.handlers.CloudLoggingHandler`: +- :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler`: - Sends logs directly to Cloud Logging over the network (gRPC or HTTP) + - Logs are transmitted according to a :ref:`Transport ` class - This is the default handler on most environments, including local development -- :class:`~google.cloud.logging.handlers.StructuredLogHandler`: +- :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler`: - Outputs logs as `structured JSON `_ to standard out, to be read and parsed by a GCP logging agent - This is the default handler on Kubernetes Engine, Cloud Functions and Cloud Run -.. _JSON: - Using the Standard Library --------------------------- @@ -60,6 +61,8 @@ For more information on using the Python :mod:`logging` standard library, see th Logging JSON Payloads ---------------------- +.. _JSON: + Although the Python :mod:`logging` standard library `expects all logs to be strings `_, Google Cloud Logging allows `JSON payload data `_. You can write JSON logs using the standard library integration in one of the following ways: @@ -78,11 +81,12 @@ You can write JSON logs using the standard library integration in one of the fol :end-before: [END logging_json_dumps] :dedent: 4 -.. _Autodetection: Automatic Metadata Detection ---------------------------- +.. _Autodetection: + The Google Cloud Logging library will attempt to detect and attach additional `LogEntry fields `_ whenever possible. The following fields are currently supported: @@ -117,4 +121,24 @@ All of the `LogEntry fields ` can also be set manually through the `extra` argument. Fields sent explicitly through the `extra` argument will override any :ref:`automatically detected` fields. +CloudLoggingHandler Transports +------------------------------ + +.. _Transports: + +:doc:`Transport` classes define how the :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler` +transports logs over the network to GCP. There are currently two Transport implementations +(defined as subclasses of :class:`transports.base.Transport `): +- :class:`~google.cloud.logging_v2.handlers.transports.background_thread.BackgroundThreadTransport`: + - sends logs in batches, using a background thread + - the default Transport class +- :class:`~google.cloud.logging_v2.handlers.transports.sync.SyncTransport`: + - sends each log synchronously in a single API call + +You can set a Transport class by passing it as an argument when +:ref:`initializing CloudLoggingHandler manually.` + +.. note:: + :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler` + prints logs as formatted JSON to standard output, and does not use a Transport class. diff --git a/docs/transport.rst b/docs/transport.rst index 2a1e1a308..9f4430103 100644 --- a/docs/transport.rst +++ b/docs/transport.rst @@ -1,8 +1,25 @@ -Transport +Transports ---------------- -.. toctree:: - :maxdepth: 2 - transports-sync - transports-thread - transports-base +These classes define how the :class:`CloudLoggingHandler ` +transport logs into GCP. More information in the :ref:`User Guide` + +Base Transport +~~~~~~~~~~~~~~ +.. automodule:: google.cloud.logging_v2.handlers.transports.base + :members: + :show-inheritance: + +Background Thread Transport +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.logging_v2.handlers.transports.background_thread + :members: + :show-inheritance: + +Synchronous Transport +~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.logging_v2.handlers.transports.sync + :members: + :show-inheritance: diff --git a/docs/transports-base.rst b/docs/transports-base.rst deleted file mode 100644 index b28fb5ba6..000000000 --- a/docs/transports-base.rst +++ /dev/null @@ -1,6 +0,0 @@ -Python Logging Handler Sync Transport -====================================== - -.. automodule:: google.cloud.logging_v2.handlers.transports.base - :members: - :show-inheritance: diff --git a/docs/transports-sync.rst b/docs/transports-sync.rst deleted file mode 100644 index 32e6401cb..000000000 --- a/docs/transports-sync.rst +++ /dev/null @@ -1,6 +0,0 @@ -Python Logging Handler Sync Transport -====================================== - -.. automodule:: google.cloud.logging_v2.handlers.transports.sync - :members: - :show-inheritance: diff --git a/docs/transports-thread.rst b/docs/transports-thread.rst deleted file mode 100644 index 2899e6c48..000000000 --- a/docs/transports-thread.rst +++ /dev/null @@ -1,7 +0,0 @@ -Python Logging Handler Threaded Transport -========================================= - - -.. automodule:: google.cloud.logging_v2.handlers.transports.background_thread - :members: - :show-inheritance: From 8bce609769987031be81677b29a036a026ac4752 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Dec 2021 14:55:39 -0800 Subject: [PATCH 15/28] added grpc vs http docs --- docs/direct-lib-usage.rst | 18 ++++++++++-------- docs/grpc-vs-http.rst | 14 ++++++++++++++ docs/std-lib-integration.rst | 4 +++- docs/usage.rst | 1 + 4 files changed, 28 insertions(+), 9 deletions(-) create mode 100644 docs/grpc-vs-http.rst diff --git a/docs/direct-lib-usage.rst b/docs/direct-lib-usage.rst index ab5f05ffe..15befd5b4 100644 --- a/docs/direct-lib-usage.rst +++ b/docs/direct-lib-usage.rst @@ -15,6 +15,8 @@ Setup Creating a Client ~~~~~~~~~~~~~~~~~ +.. _Creating Client: + Before using the library, you must first set up a :doc:`Client`: .. literalinclude:: ../samples/snippets/usage_guide.py @@ -22,8 +24,8 @@ Before using the library, you must first set up a :doc:`Client`: :end-before: [END usage_client_setup] :dedent: 4 -When setting up the :doc:`Client`, you can also disable gRPC to put the library -into HTTP mode: +When setting up the :doc:`Client`, you can also :doc:`disable gRPC` +to put the library into HTTP mode: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START usage_http_client_setup] @@ -83,16 +85,16 @@ can be set by passing them as keyword arguments: based on input type. If you want to be more explicit about the type used, you can use the following Logger methods: -- :meth:`Logger.log_text ` creates a :class:`TextEntry ` -- :meth:`Logger.log_struct ` creates a :class:`StructEntry ` -- :meth:`Logger.log_proto ` creates a :class:`ProtobufEntry ` -- :meth:`Logger.log_empty ` creates an empty :class:`LogEntry ` +- :meth:`Logger.log_text ` creates a :class:`~google.cloud.logging_v2.entries.TextEntry` +- :meth:`Logger.log_struct ` creates a :class:`~google.cloud.logging_v2.entries.StructEntry` +- :meth:`Logger.log_proto ` creates a :class:`~google.cloud.logging_v2.entries.ProtobufEntry` +- :meth:`Logger.log_empty ` creates an empty :class:`~google.cloud.logging_v2.entries.LogEntry` Batch Writing Logs ------------------ By default, each log write will take place in an individual network request, which may be inefficient at scale. -Instead, you can use a :class:`Batch `: +Instead, you can use a :class:`~google.cloud.logging_v2.logger.Batch`: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_log_batch] @@ -100,7 +102,7 @@ Instead, you can use a :class:`Batch `: :dedent: 4 In this case, logs are batched together, and only sent out when :func:`batch.commit ` is called. -To simplify things, you can also use :class:`Batch ` as a context manager: +To simplify things, you can also use :class:`~google.cloud.logging_v2.logger.Batch` as a context manager: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_log_batch_context] diff --git a/docs/grpc-vs-http.rst b/docs/grpc-vs-http.rst new file mode 100644 index 000000000..e6891420c --- /dev/null +++ b/docs/grpc-vs-http.rst @@ -0,0 +1,14 @@ +gRPC vs HTTP +==================== + +:mod:`google-cloud-logging` supports two different protocols for sending logs over the network: +gRPC and HTTP. Both implementations conform to the same API, and should be +invisible to the end user. + +gRPC is enabled by default. You can switch to HTTP mode by either: + +- setting the `DISABLE_GRPC` environment variable to `TRUE` +- or, passing `_use_grpc=False` when :ref:`initializing a Client` + +We recommend using gRPC whenever possible, but you may want to try the HTTP +implementation if you have network issues when using gRPC. diff --git a/docs/std-lib-integration.rst b/docs/std-lib-integration.rst index 69c84608d..feb6b3479 100644 --- a/docs/std-lib-integration.rst +++ b/docs/std-lib-integration.rst @@ -37,7 +37,7 @@ as an argument to :meth:`~google.cloud.logging_v2.client.Client.setup_logging`: There are two supported handler classes to choose from: - :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler`: - - Sends logs directly to Cloud Logging over the network (gRPC or HTTP) + - Sends logs directly to Cloud Logging over the network (:doc:`gRPC or HTTP`) - Logs are transmitted according to a :ref:`Transport ` class - This is the default handler on most environments, including local development - :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler`: @@ -139,6 +139,8 @@ transports logs over the network to GCP. There are currently two Transport imple You can set a Transport class by passing it as an argument when :ref:`initializing CloudLoggingHandler manually.` +Both options can be used over either :doc:`gRPC or HTTP`. + .. note:: :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler` prints logs as formatted JSON to standard output, and does not use a Transport class. diff --git a/docs/usage.rst b/docs/usage.rst index 32c51a089..929ee9cef 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -5,4 +5,5 @@ Usage Guide std-lib-integration direct-lib-usage + grpc-vs-http From eac5e2db83f83b24962524fd9e0d7afa09e2785b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 14 Dec 2021 15:26:53 -0800 Subject: [PATCH 16/28] added upgrading notes --- UPGRADING.md | 131 ++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 130 insertions(+), 1 deletion(-) diff --git a/UPGRADING.md b/UPGRADING.md index 249336d80..4d9f9e85f 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -1,5 +1,134 @@ # 3.0.0 Migration Guide +The v3.0.0 release of `google-cloud-logging` is focused on improving usability of the library, +particularly on newer serverless environments. + +If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-logging/issues). + +## Primary Changes + +### Handler deprecations ([#310](https://github.com/googleapis/python-logging/pull/310)) + +> **WARNING**: Breaking change + +Prior to v3.0.0, there were three `Handler` classes used for the Python logging standard library integration: + +- [`AppEngineHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/app_engine.py) +- [`ContainerEngineHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/container_engine.py) +- [`CloudLoggingHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/handlers.py) + +Google Cloud has grown, and adding a new handler class for each new product does not scale. +Recently, we have changed to support two more generic `Handler` classes instead: + +- [`CloudLoggingHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/handlers.py) + - sends logs over the network (using gRPC or HTTP API calls) + - replaces `AppEngineHandler` +- [`StructuredLogHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/structured_log.py) + - exports logs in JSON format through standard out, to be parsed by an agent + - replaces `ContainerEngineHandler` + +As of v3.0.0, `AppEngineHandler` and `ContainerEngineHandler` have been marked as deprecated and will not be updated. +They may be removed from the library in a future update. + +### Full JSON log support in standard library integration ([#316](https://github.com/googleapis/python-logging/pull/316), [#339](https://github.com/googleapis/python-logging/pull/339), [#447](https://github.com/googleapis/python-logging/pull/447)) + +You can now log JSON data using the Python `logging` standard library integration. +The library supports two different methods: + +1. Using the `json_fields` `extra` argument: + +```py +import logging + +data_dict = {"hello": "world"} +logging.info("message field", extra={"json_fields": data_dict}) +``` + +2. Logging a JSON-parsable string: + +```py +import logging +import json + +data_dict = {"hello": "world"} +logging.info(json.dumps(data_dict)) +``` + +### Metadata autodetection ([#315](https://github.com/googleapis/python-logging/pull/315)) + +> **WARNING**: Breaking change + +Logs emitted by the library must be associated with a [montored-resource type](https://cloud.google.com/monitoring/api/resources), +indicating the compute environment the log originated from. Previously, the logs would default to +["global"](https://cloud.google.com/monitoring/api/resources#tag_global) when left unspecified. +Going forward, the library will attempt to determine the monitored-resource automatically if not explicitly set. + +### New `Logger.log` method ([#316](https://github.com/googleapis/python-logging/pull/316)) + +Previously, the Logger class had four methods for sending logs of different types: + +```py +logger.log_text("hello world") +logger.log_struct({"hello": "world"}) +logger.log_proto(proto_message) +logger.log_empty() +``` + +In v3.0.0, the library adds a generic `log()` method that will attempt to infer and log any type: + +```py +logger.log("hello world") +``` + +### More permissive arguments ([#422](https://github.com/googleapis/python-logging/pull/422)) + +> **WARNING**: Breaking change + +In v3.0.0, the library will be more forgiving if inputs are given in a different format than expected + +```py +# lowercase severity strings will be accepted +logger.log("hello world", severity="warning") +``` + +```py +# a severity will be pulled out of the JSON payload if not otherwise set +logger.log({"hello": "world", "severity":"warning"}) +``` + +```py +# resource data can be passed as a dict instead of a Resource object +logger.log("hello world", resource={"type":"global", "labels":[]}) +``` + +### Allow reading from non-project resources ([#444](https://github.com/googleapis/python-logging/pull/444)) + +Prior to v3.0.0, there was a crashing bug when attempting to read logs from non-project resources: + +- `organizations/[ORGANIZATION_ID]/logs/[LOG_ID]` +- `billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]` +- `folders/[FOLDER_ID]/logs/[LOG_ID]` + +The v3.0.0 update fixes this issue. + +### Internal Gapic and HTTP implementation changes ([#375](https://github.com/googleapis/python-logging/pull/375)) + +> **WARNING**: Breaking change + +The library supports sending logs using two different network protocols: gRPC and HTTP. Previously, there was an \ +inconsistency in the implementations, resulting in unexpected behaviour when in HTTP mode. + +As part of these changes, we introduced a new `max_size` argument to `list_entries` calls, which can be used to determine +how many results should be returned: + +```py +from google.cloud import logging_v2 + +client = logging_v2.Client() +client.list_entries(max_size=5) +``` + +--- # 2.0.0 Migration Guide @@ -337,4 +466,4 @@ The following resource name helpers have been renamed. **`ConfigServiceV2Client`** * `sink_path` -> `log_sink_path` -* `exclusion_path` -> `log_exclusion_path` \ No newline at end of file +* `exclusion_path` -> `log_exclusion_path` From 01a0e3351a2e9659435bf8d2410055c87a8fced2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 14 Jan 2022 14:35:54 -0800 Subject: [PATCH 17/28] improved manual handler section --- docs/std-lib-integration.rst | 6 +++--- samples/snippets/usage_guide.py | 6 ++---- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/docs/std-lib-integration.rst b/docs/std-lib-integration.rst index feb6b3479..25b1e1105 100644 --- a/docs/std-lib-integration.rst +++ b/docs/std-lib-integration.rst @@ -27,11 +27,11 @@ Manual Handler Configuration Automatic Configuration will automatically determine the appropriate handler for the environment. If you would rather choose the handler yourself, you can construct an instance manually and pass it in -as an argument to :meth:`~google.cloud.logging_v2.client.Client.setup_logging`: +as an argument to :meth:`~google.cloud.logging_v2.handlers.setup_logging`: .. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START setup_logging] - :end-before: [END setup_logging] + :start-after: [START create_cloud_handler] + :end-before: [END create_cloud_handler] :dedent: 4 There are two supported handler classes to choose from: diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py index b974cb62c..0775c2845 100644 --- a/samples/snippets/usage_guide.py +++ b/samples/snippets/usage_guide.py @@ -416,12 +416,10 @@ def logging_handler(client): # [START create_cloud_handler] from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging_v2.handlers import setup_logging handler = CloudLoggingHandler(client) - cloud_logger = logging.getLogger("cloudLogger") - cloud_logger.setLevel(logging.INFO) - cloud_logger.addHandler(handler) - cloud_logger.error("bad news") + setup_logging(handler) # [END create_cloud_handler] # [START create_named_handler] From 05269556111ffb7acb00e9c43e689c57104f3105 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 14 Jan 2022 16:14:48 -0800 Subject: [PATCH 18/28] fixed lint issues --- samples/snippets/usage_guide.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py index 0775c2845..d7020603b 100644 --- a/samples/snippets/usage_guide.py +++ b/samples/snippets/usage_guide.py @@ -102,10 +102,10 @@ def client_list_entries(client, to_delete): # pylint: disable=unused-argument @snippet def client_setup(client2, to_delete): """Client setup.""" - log_name = "client_setup_%d" % (_millis()) # [START usage_client_setup] import google.cloud.logging + # if project not given, it will be inferred from the environment client = google.cloud.logging.Client(project="my-project") # [END usage_client_setup] @@ -116,9 +116,11 @@ def client_setup(client2, to_delete): # [END usage_http_client_setup] to_delete.append(http_client) + @snippet def logger_usage(client_true, to_delete): """Logger usage.""" + import google.cloud.logging # [START logger_create] client = google.cloud.logging.Client(project="my-project") @@ -134,6 +136,7 @@ def logger_usage(client_true, to_delete): to_delete.append(label_logger) # [START logger_custom_resource] from google.cloud.logging_v2.resource import Resource + resource = Resource(type="global", labels={}) global_logger = client.logger(log_id, resource=resource) # [END logger_custom_resource] @@ -147,10 +150,11 @@ def logger_usage(client_true, to_delete): # [END logger_log_basic] # [START logger_log_fields] - logger.log("an entry with fields set", + logger.log( + "an entry with fields set", severity="ERROR", insert_id="0123", - labels={'my-label':'my-value'} + labels={"my-label": "my-value"}, ) # API call # [END logger_log_fields] @@ -426,6 +430,7 @@ def logging_handler(client): handler = CloudLoggingHandler(client, name="mycustomlog") # [END create_named_handler] + @snippet def logging_json(client): # [START logging_json_dumps] @@ -443,18 +448,18 @@ def logging_json(client): logging.info("message field", extra={"json_fields": data_dict}) # [END logging_extra_json_fields] + @snippet def using_extras(client): + import logging # [START logging_extras] my_labels = {"foo": "bar"} my_http = {"requestUrl": "localhost"} my_trace = "01234" - logging.info("hello", extra={ - "labels": my_labels, - "http_request": my_http, - "trace": my_trace - }) + logging.info( + "hello", extra={"labels": my_labels, "http_request": my_http, "trace": my_trace} + ) # [END logging_extras] From a91b5f45c2838a727591da75f7a37c013945a03d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 24 Jan 2022 12:52:32 -0800 Subject: [PATCH 19/28] fixed snippet issue --- samples/snippets/usage_guide.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py index d7020603b..30d7310c9 100644 --- a/samples/snippets/usage_guide.py +++ b/samples/snippets/usage_guide.py @@ -128,7 +128,7 @@ def logger_usage(client_true, to_delete): # logger will bind to logName "projects/my_project/logs/log_id" # [END logger_create] to_delete.append(logger) - log_id = "logger_usage_sd" % (_millis()) + log_id = "logger_usage_%d" % (_millis()) # [START logger_custom_labels] custom_labels = {"my-key": "my-value"} label_logger = client.logger(log_id, labels=custom_labels) @@ -452,6 +452,7 @@ def logging_json(client): @snippet def using_extras(client): import logging + # [START logging_extras] my_labels = {"foo": "bar"} my_http = {"requestUrl": "localhost"} From 0ad37c812d47a4049623cbf9820f71c82ab4ab66 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 24 Jan 2022 12:54:12 -0800 Subject: [PATCH 20/28] addressed Drew's comments --- docs/direct-lib-usage.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/direct-lib-usage.rst b/docs/direct-lib-usage.rst index 15befd5b4..b466bd0ca 100644 --- a/docs/direct-lib-usage.rst +++ b/docs/direct-lib-usage.rst @@ -228,13 +228,13 @@ buckets, BigQuery datasets, or Cloud Pub/Sub topics. Cloud Storage Sink ~~~~~~~~~~~~~~~~~~~~~~~ -Make sure that the storage bucket you want to export logs too has -``cloud-logs@google.com`` as the owner. See +Make sure that the storage bucket you want to export logs to has +``cloud-logs@google.com`` as an owner. See `Setting permissions for Cloud Storage`_. .. _Setting permissions for Cloud Storage: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_cloud_storage -Add ``cloud-logs@google.com`` as the owner of the bucket: +Add ``cloud-logs@google.com`` as an owner of the bucket: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_bucket_permissions] From fd314b919ffc96bd7423e16d9b87d85c71eb2ffa Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 25 Jan 2022 15:58:54 -0800 Subject: [PATCH 21/28] incorporated Pamela's feedback to migration guide --- UPGRADING.md | 67 +++++++++++++++++++++++++--------------------------- 1 file changed, 32 insertions(+), 35 deletions(-) diff --git a/UPGRADING.md b/UPGRADING.md index 4d9f9e85f..64ecea1f8 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -1,9 +1,9 @@ # 3.0.0 Migration Guide -The v3.0.0 release of `google-cloud-logging` is focused on improving usability of the library, -particularly on newer serverless environments. +The v3.0.0 release of `google-cloud-logging` improves usability of the library, +particularly on serverless environments. -If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-logging/issues). +If you experience technical issues or have questions, please file an [issue](https://github.com/googleapis/python-logging/issues). ## Primary Changes @@ -11,31 +11,25 @@ If you experience issues or have questions, please file an [issue](https://githu > **WARNING**: Breaking change -Prior to v3.0.0, there were three `Handler` classes used for the Python logging standard library integration: +We have changed our design policy to support more generic `Handler` classes instead of product-specific classes: -- [`AppEngineHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/app_engine.py) -- [`ContainerEngineHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/container_engine.py) - [`CloudLoggingHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/handlers.py) - -Google Cloud has grown, and adding a new handler class for each new product does not scale. -Recently, we have changed to support two more generic `Handler` classes instead: - -- [`CloudLoggingHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/handlers.py) - - sends logs over the network (using gRPC or HTTP API calls) - - replaces `AppEngineHandler` + - Sends logs over the network (using gRPC or HTTP API calls) + - Replaces `AppEngineHandler` - [`StructuredLogHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/structured_log.py) - - exports logs in JSON format through standard out, to be parsed by an agent - - replaces `ContainerEngineHandler` + - Exports logs in JSON format through standard out, to be parsed by an agent + - Replaces `ContainerEngineHandler` -As of v3.0.0, `AppEngineHandler` and `ContainerEngineHandler` have been marked as deprecated and will not be updated. -They may be removed from the library in a future update. +As of v3.0.0, [`AppEngineHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/app_engine.py) +and [`ContainerEngineHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/container_engine.py) +are deprecated and won't be updated. These handlers might be removed from the library in a future update. ### Full JSON log support in standard library integration ([#316](https://github.com/googleapis/python-logging/pull/316), [#339](https://github.com/googleapis/python-logging/pull/339), [#447](https://github.com/googleapis/python-logging/pull/447)) You can now log JSON data using the Python `logging` standard library integration. -The library supports two different methods: +To log JSON data, do one of the following: -1. Using the `json_fields` `extra` argument: +1. Use `json_fields` `extra` argument: ```py import logging @@ -44,7 +38,7 @@ data_dict = {"hello": "world"} logging.info("message field", extra={"json_fields": data_dict}) ``` -2. Logging a JSON-parsable string: +2. Log a JSON-parsable string: ```py import logging @@ -58,33 +52,34 @@ logging.info(json.dumps(data_dict)) > **WARNING**: Breaking change -Logs emitted by the library must be associated with a [montored-resource type](https://cloud.google.com/monitoring/api/resources), -indicating the compute environment the log originated from. Previously, the logs would default to +Logs emitted by the library must be associated with a [montored-resource type](https://cloud.google.com/monitoring/api/resources) +that indicates the compute environment the log originated from. Prior to v3.0.0, the logs would default to ["global"](https://cloud.google.com/monitoring/api/resources#tag_global) when left unspecified. -Going forward, the library will attempt to determine the monitored-resource automatically if not explicitly set. +With v3.0.0, the library will attempt to determine the monitored-resource automatically if not explicitly set, +and only default to "global" when the environment can't be determined. ### New `Logger.log` method ([#316](https://github.com/googleapis/python-logging/pull/316)) -Previously, the Logger class had four methods for sending logs of different types: +In v3.0.0, the library adds a generic `log()` method that will attempt to infer and log any type: ```py -logger.log_text("hello world") -logger.log_struct({"hello": "world"}) -logger.log_proto(proto_message) -logger.log_empty() +logger.log("hello world") ``` -In v3.0.0, the library adds a generic `log()` method that will attempt to infer and log any type: +v3.0.0 also supports the Logging class methods from previous releases: ```py -logger.log("hello world") +logger.log_text("hello world") +logger.log_struct({"hello": "world"}) +logger.log_proto(proto_message) +logger.log_empty() ``` ### More permissive arguments ([#422](https://github.com/googleapis/python-logging/pull/422)) > **WARNING**: Breaking change -In v3.0.0, the library will be more forgiving if inputs are given in a different format than expected +In v3.0.0, the library supports a wider variety of input formats: ```py # lowercase severity strings will be accepted @@ -115,11 +110,13 @@ The v3.0.0 update fixes this issue. > **WARNING**: Breaking change -The library supports sending logs using two different network protocols: gRPC and HTTP. Previously, there was an \ -inconsistency in the implementations, resulting in unexpected behaviour when in HTTP mode. +The library supports sending logs using two network protocols: gRPC and HTTP. Prior to v3.0.0, there was an +inconsistency in the implementations, resulting in unexpected behavior when in HTTP mode. + +### Max_size argument when listing entries ([#375](https://github.com/googleapis/python-logging/pull/375)) -As part of these changes, we introduced a new `max_size` argument to `list_entries` calls, which can be used to determine -how many results should be returned: +v3.0.0 introduces a new `max_size` argument to `list_entries` calls, which can be used to specify an upper bound +on how many logs should be returned on the call: ```py from google.cloud import logging_v2 From 95e5890c04ca2f3172bf60ee2b7dd24e61958d99 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 25 Jan 2022 16:06:33 -0800 Subject: [PATCH 22/28] updated formatting --- UPGRADING.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/UPGRADING.md b/UPGRADING.md index 64ecea1f8..cbffc311d 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -53,10 +53,11 @@ logging.info(json.dumps(data_dict)) > **WARNING**: Breaking change Logs emitted by the library must be associated with a [montored-resource type](https://cloud.google.com/monitoring/api/resources) -that indicates the compute environment the log originated from. Prior to v3.0.0, the logs would default to +that indicates the compute environment the log originated from. +- Prior to v3.0.0, the logs would default to ["global"](https://cloud.google.com/monitoring/api/resources#tag_global) when left unspecified. -With v3.0.0, the library will attempt to determine the monitored-resource automatically if not explicitly set, -and only default to "global" when the environment can't be determined. +- With v3.0.0, the library will attempt to determine the monitored-resource automatically if not explicitly set, +and will default to "global" only when the environment can't be determined. ### New `Logger.log` method ([#316](https://github.com/googleapis/python-logging/pull/316)) From 51a7cd5a1cc26eb24bfcaea6d03f3f0a673e59a1 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 25 Jan 2022 16:16:07 -0800 Subject: [PATCH 23/28] updated wording --- UPGRADING.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/UPGRADING.md b/UPGRADING.md index cbffc311d..bdc821662 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -54,10 +54,8 @@ logging.info(json.dumps(data_dict)) Logs emitted by the library must be associated with a [montored-resource type](https://cloud.google.com/monitoring/api/resources) that indicates the compute environment the log originated from. -- Prior to v3.0.0, the logs would default to -["global"](https://cloud.google.com/monitoring/api/resources#tag_global) when left unspecified. -- With v3.0.0, the library will attempt to determine the monitored-resource automatically if not explicitly set, -and will default to "global" only when the environment can't be determined. +- Prior to 3.0.0, when a log doesn't specify a monitored resource, that field is set to ["global"](https://cloud.google.com/monitoring/api/resources#tag_global). +- With 3.0.0, when a log doesn't specify a monitored resource, the library attempts to identify the resource. If a resource can't be detected, the field will still default to ["global"](https://cloud.google.com/monitoring/api/resources#tag_global). ### New `Logger.log` method ([#316](https://github.com/googleapis/python-logging/pull/316)) From 69209b1b1e05690df03a87d755215a8e85dde812 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 25 Jan 2022 16:33:46 -0800 Subject: [PATCH 24/28] changed wording --- UPGRADING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/UPGRADING.md b/UPGRADING.md index bdc821662..e882a497b 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -115,7 +115,7 @@ inconsistency in the implementations, resulting in unexpected behavior when in H ### Max_size argument when listing entries ([#375](https://github.com/googleapis/python-logging/pull/375)) v3.0.0 introduces a new `max_size` argument to `list_entries` calls, which can be used to specify an upper bound -on how many logs should be returned on the call: +on how many logs should be returned: ```py from google.cloud import logging_v2 From 741dfac4a11ee48731a59c56f90f93171ec5aead Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 25 Jan 2022 16:51:28 -0800 Subject: [PATCH 25/28] addressed Pamela's comments on the direct library usage document --- docs/direct-lib-usage.rst | 130 +++++++++++++++++++------------------- 1 file changed, 66 insertions(+), 64 deletions(-) diff --git a/docs/direct-lib-usage.rst b/docs/direct-lib-usage.rst index b466bd0ca..11cf39e9c 100644 --- a/docs/direct-lib-usage.rst +++ b/docs/direct-lib-usage.rst @@ -1,61 +1,63 @@ Direct Library Usage ==================== -Although the recommended way of using the :mod:`google-cloud-logging` library -is to integrate it with the :doc:`Python logging standard library`, -you can also use the library to interact with the Googel Cloud Logging API +We recommend that you use the :mod:`google-cloud-logging` library +by integrating it with the :doc:`Python logging standard library`; +However, you can also use the library to interact with the Google Cloud Logging API directly. -In addition to writing logs, using the library in this way allows you to manage +In addition to writing logs, you can use the library to manage :doc:`logs`, :doc:`sinks`, :doc:`metrics`, and other resources. Setup ---------------------------- -Creating a Client +Create a Client ~~~~~~~~~~~~~~~~~ .. _Creating Client: -Before using the library, you must first set up a :doc:`Client`: +You must set up a :doc:`Client` to use the library: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START usage_client_setup] :end-before: [END usage_client_setup] :dedent: 4 -When setting up the :doc:`Client`, you can also :doc:`disable gRPC` -to put the library into HTTP mode: +To use HTTP, :doc:`disable gRPC` when you set up the :doc:`Client`: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START usage_http_client_setup] :end-before: [END usage_http_client_setup] :dedent: 4 -Creating a Logger +Create a Logger ~~~~~~~~~~~~~~~~~ -After creating a :doc:`Client`, you can use it to create a :doc:`Logger`, which can be used -to read, write, and delete logs from Google Cloud: +Loggers read, write, and delete logs from Google Cloud. + +You use your :doc:`Client` to create a :doc:`Logger`. .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_create] :end-before: [END logger_create] :dedent: 4 -You can add custom labels initializing a :doc:`Logger`, which will -be added on to each :doc:`LogEntry` created by it: +To add custom labels, do so when you initialize a :doc:`Logger`. +When you add custom labels, these labels are added to each +:doc:`LogEntry` written by the :doc:`Logger`: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_custom_labels] :end-before: [END logger_custom_labels] :dedent: 4 -By default, the library will attempt to add a `Monitored Resource field `_ +By default, the library adds a `Monitored Resource field `_ associated with the environment the code is run on. For example, code run on App Engine will have a `gae_app `_ resource, while code run locally will have a `global `_ resource field. -If you want to manually set the resource field, you can do so when initializing the :doc:`Logger`: + +To manually set the resource field, do so when you initialize the :doc:`Logger`: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_custom_resource] @@ -63,45 +65,45 @@ If you want to manually set the resource field, you can do so when initializing :dedent: 4 -Writing Log Entries +Write Log Entries ------------------- -You can write logs using :meth:`Logger.log `: +You write logs by using :meth:`Logger.log `: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_log_basic] :end-before: [END logger_log_basic] :dedent: 4 -Additional `LogEntry fields `_ -can be set by passing them as keyword arguments: +You can add `LogEntry fields `_ +by passing them as keyword arguments: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_log_fields] :end-before: [END logger_log_fields] :dedent: 4 -:meth:`Logger.log ` will attempt to choose the appropriate :doc:`LogEntry ` type -based on input type. If you want to be more explicit about the type used, you can use the following -Logger methods: +:meth:`Logger.log ` chooses the appropriate :doc:`LogEntry ` type +based on input type. To specify type, you can use the following Logger methods: - :meth:`Logger.log_text ` creates a :class:`~google.cloud.logging_v2.entries.TextEntry` - :meth:`Logger.log_struct ` creates a :class:`~google.cloud.logging_v2.entries.StructEntry` - :meth:`Logger.log_proto ` creates a :class:`~google.cloud.logging_v2.entries.ProtobufEntry` - :meth:`Logger.log_empty ` creates an empty :class:`~google.cloud.logging_v2.entries.LogEntry` -Batch Writing Logs +Batch Write Logs ------------------ -By default, each log write will take place in an individual network request, which may be inefficient at scale. -Instead, you can use a :class:`~google.cloud.logging_v2.logger.Batch`: +By default, each log write takes place in an individual network request, which may be inefficient at scale. + +Using the :class:`~google.cloud.logging_v2.logger.Batch` class, logs are batched together, and only sent out +when :func:`batch.commit ` is called. .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_log_batch] :end-before: [END logger_log_batch] :dedent: 4 -In this case, logs are batched together, and only sent out when :func:`batch.commit ` is called. To simplify things, you can also use :class:`~google.cloud.logging_v2.logger.Batch` as a context manager: .. literalinclude:: ../samples/snippets/usage_guide.py @@ -109,12 +111,14 @@ To simplify things, you can also use :class:`~google.cloud.logging_v2.logger.Bat :end-before: [END logger_log_batch_context] :dedent: 4 -Here, the logs will be automatically committed once the code exits the "with" block. +In the previous example, the logs are automatically committed when the code exits the "with" block. -Retriving Log Entries +Retrieve Log Entries --------------------- -Fetch entries for the default project. +You retrieve log entries for the default project using +:meth:`list_entries() ` +on a :doc:`Client` or :doc:`Logger` object: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_entries_default] @@ -122,41 +126,41 @@ Fetch entries for the default project. :dedent: 4 Entries returned by -:meth:`Client.list_entries ` +:meth:`Client.list_entries() ` or -:meth:`Logger.list_entries ` -will be instances of one of the following classes: +:meth:`Logger.list_entries() ` +are instances of one of the following classes: - :class:`~google.cloud.logging_v2.entries.TextEntry` - :class:`~google.cloud.logging_v2.entries.StructEntry` - :class:`~google.cloud.logging_v2.entries.ProtobufEntry` -Filter entries retrieved using the `Advanced Logs Filters`_ syntax +To filter entries retrieved using the `Advanced Logs Filters`_ syntax .. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters -Fetch entries for the default project. +To fetch entries for the default project. .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_entries_filter] :end-before: [END client_list_entries_filter] :dedent: 4 -Sort entries in descending timestamp order. +To sort entries in descending timestamp order. .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_entries_order_by] :end-before: [END client_list_entries_order_by] :dedent: 4 -Retrieve entries for a single logger, sorting in descending timestamp order: +To retrieve entries for a single logger, sorting in descending timestamp order: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_list_entries] :end-before: [END logger_list_entries] :dedent: 4 -And as a practical example, retrieve all `GKE Admin Activity audit logs`_ +For example, to retrieve all `GKE Admin Activity audit logs`_ from the past 24 hours: .. _GKE Admin Activity audit logs: https://cloud.google.com/kubernetes-engine/docs/how-to/audit-logging#audit_logs_in_your_project @@ -167,10 +171,10 @@ from the past 24 hours: :dedent: 4 -Deleting Log Entries +Delete Log Entries -------------------- -You can delete all logs associated with a logger using the following call: +To delete all logs associated with a logger, use the following call: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_delete] @@ -178,70 +182,70 @@ You can delete all logs associated with a logger using the following call: :dedent: 8 -Managing Log Metrics +Manage Log Metrics -------------------- -Metrics are counters of entries which match a given filter. They can be -used within Cloud Monitoring to create charts and alerts. +Logs-based metrics are counters of entries which match a given filter. +They can be used within Cloud Monitoring to create charts and alerts. -List all metrics for a project: +To list all logs-based metrics for a project: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_metrics] :end-before: [END client_list_metrics] :dedent: 4 -Create a metric: +To create a logs-based metric: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START metric_create] :end-before: [END metric_create] :dedent: 4 -Refresh local information about a metric: +To refresh local information about a logs-based metric: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START metric_reload] :end-before: [END metric_reload] :dedent: 4 -Update a metric: +To update a logs-based metric: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START metric_update] :end-before: [END metric_update] :dedent: 4 -Delete a metric: +To delete a logs-based metric: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START metric_delete] :end-before: [END metric_delete] :dedent: 4 -Using Log Sinks +Log Sinks --------------- -Sinks allow exporting entries which match a given filter to Cloud Storage -buckets, BigQuery datasets, or Cloud Pub/Sub topics. +Sinks allow exporting of log entries which match a given filter to +Cloud Storage buckets, BigQuery datasets, or Cloud Pub/Sub topics. Cloud Storage Sink ~~~~~~~~~~~~~~~~~~~~~~~ -Make sure that the storage bucket you want to export logs to has +Ensure the storage bucket that you want to export logs to has ``cloud-logs@google.com`` as an owner. See `Setting permissions for Cloud Storage`_. .. _Setting permissions for Cloud Storage: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_cloud_storage -Add ``cloud-logs@google.com`` as an owner of the bucket: +Ensure that ``cloud-logs@google.com`` is an owner of the bucket: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_bucket_permissions] :end-before: [END sink_bucket_permissions] :dedent: 4 -Create a Cloud Storage sink: +To create a Cloud Storage sink: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_storage_create] @@ -252,7 +256,7 @@ Create a Cloud Storage sink: BigQuery Sink ~~~~~~~~~~~~~~~~~~ -To export logs to BigQuery you must log into the Cloud Platform Console +To export logs to BigQuery, you must log into the Cloud Console and add ``cloud-logs@google.com`` to a dataset. See: `Setting permissions for BigQuery`_ @@ -264,7 +268,7 @@ See: `Setting permissions for BigQuery`_ :end-before: [END sink_dataset_permissions] :dedent: 4 -Create a BigQuery sink: +To create a BigQuery sink: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_bigquery_create] @@ -275,7 +279,7 @@ Create a BigQuery sink: Pub/Sub Sink ~~~~~~~~~~~~~~~~~ -To export logs to BigQuery you must log into the Cloud Platform Console +To export logs to BigQuery you must log into the Cloud Console and add ``cloud-logs@google.com`` to a topic. See: `Setting permissions for Pub/Sub`_ @@ -287,42 +291,40 @@ See: `Setting permissions for Pub/Sub`_ :end-before: [END sink_topic_permissions] :dedent: 4 -Create a Cloud Pub/Sub sink: +To create a Cloud Pub/Sub sink: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_pubsub_create] :end-before: [END sink_pubsub_create] :dedent: 4 -Managing Sinks +Manage Sinks ~~~~~~~~~~~~~~ -List all sinks for a project: +To list all sinks for a project: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_sinks] :end-before: [END client_list_sinks] :dedent: 4 -Refresh local information about a sink: +To refresh local information about a sink: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_reload] :end-before: [END sink_reload] :dedent: 4 -Update a sink: +To update a sink: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_update] :end-before: [END sink_update] :dedent: 4 -Delete a sink: +To delete a sink: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_delete] :end-before: [END sink_delete] :dedent: 4 - - From 49dc3c304d1df7ac52209adaecfa6f267f2fb578 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 25 Jan 2022 16:59:52 -0800 Subject: [PATCH 26/28] incorporated Pamela's feedback in std lib integration doc --- docs/std-lib-integration.rst | 44 ++++++++++++++++++------------------ 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/docs/std-lib-integration.rst b/docs/std-lib-integration.rst index 25b1e1105..a485fce6d 100644 --- a/docs/std-lib-integration.rst +++ b/docs/std-lib-integration.rst @@ -1,23 +1,22 @@ Integration with `logging` Standard Library =========================================== -The recommended way to use :mod:`google-cloud-logging` is to allow it to integrate with +We recommend that you use :mod:`google-cloud-logging` to integrate with the Python :mod:`logging` standard library. This way, you can write logs using Python -standards, and still have your logs appear in Google Cloud Logging using custom handlers -behind the scenes. +standards, and still have your logs appear in Google Cloud Logging. Automatic Configuration ----------------------- To integrate :mod:`google-cloud-logging` with the standard :mod:`logging` module, -simply call :meth:`~google.cloud.logging_v2.client.Client.setup_logging` on a :class:`~google.cloud.logging_v2.client.Client` instance. +call :meth:`~google.cloud.logging_v2.client.Client.setup_logging` on a :class:`~google.cloud.logging_v2.client.Client` instance. .. literalinclude:: ../samples/snippets/handler.py :start-after: [START logging_handler_setup] :end-before: [END logging_handler_setup] :dedent: 4 -This function will automatically choose the best configurations for the environment your +This :meth:`~google.cloud.logging_v2.client.Client.setup_logging` function chooses the best configurations for the environment your code is running on. For more information, see the `Google Cloud Logging documentation `_. Manual Handler Configuration @@ -25,8 +24,8 @@ Manual Handler Configuration .. _Manual Handler: -Automatic Configuration will automatically determine the appropriate handler for the environment. -If you would rather choose the handler yourself, you can construct an instance manually and pass it in +Automatic Configuration automatically determines the appropriate handler for the environment. +To specify the handler yourself, construct an instance manually and pass it in as an argument to :meth:`~google.cloud.logging_v2.handlers.setup_logging`: .. literalinclude:: ../samples/snippets/usage_guide.py @@ -45,11 +44,11 @@ There are two supported handler classes to choose from: to standard out, to be read and parsed by a GCP logging agent - This is the default handler on Kubernetes Engine, Cloud Functions and Cloud Run -Using the Standard Library +Standard Library --------------------------- -After the Google Cloug Logging library has been setup with the Python :mod:`logging` standard library, -you can begin to send logs with the standard logging library as you normally would: +After you setup the Google Cloud Logging library with the Python :mod:`logging` standard library, +you can send logs with the standard logging library as you normally would: .. literalinclude:: ../samples/snippets/handler.py :start-after: [START logging_handler_usage] @@ -65,16 +64,17 @@ Logging JSON Payloads Although the Python :mod:`logging` standard library `expects all logs to be strings `_, Google Cloud Logging allows `JSON payload data `_. -You can write JSON logs using the standard library integration in one of the following ways: -1. Using the `json_fields` `extra` argument: +To write JSON logs using the standard library integration, do one of the following: + +1. Use the `json_fields` `extra` argument: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logging_extra_json_fields] :end-before: [END logging_extra_json_fields] :dedent: 4 -2. Logging a JSON-parsable string: +2. Log a JSON-parsable string: .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logging_json_dumps] @@ -87,9 +87,9 @@ Automatic Metadata Detection .. _Autodetection: -The Google Cloud Logging library will attempt to detect and attach additional -`LogEntry fields `_ -whenever possible. The following fields are currently supported: +The Google Cloud Logging library attempts to detect and attach additional +`LogEntry fields `_ . +The following fields are currently supported: - labels - trace* @@ -101,14 +101,14 @@ whenever possible. The following fields are currently supported: - :ref:`json_fields` .. note:: - Fields marked with "*" can only be detected when using a supported Python web framework. The Google Cloud Logging + Fields marked with "*" require a supported Python web framework. The Google Cloud Logging library currently supports `flask `_ and `django `_ Manual Metadata Using the `extra` Argument ------------------------------------------- +-------------------------------------------- The Python :mod:`logging` standard library accepts `an "extra" argument `_ when -writing logs, which can be used to populate LogRecord objects with user-defined +writing logs. You can use this argument to populate LogRecord objects with user-defined key-value pairs. Google Cloud Logging uses the `extra` field as a way to pass in additional metadata to populate `LogEntry fields `_. @@ -119,7 +119,7 @@ metadata to populate `LogEntry fields `_ that can be :ref:`autodetected` can also be set manually through the `extra` argument. Fields sent explicitly through the `extra` -argument will override any :ref:`automatically detected` fields. +argument override any :ref:`automatically detected` fields. CloudLoggingHandler Transports ------------------------------ @@ -127,7 +127,7 @@ CloudLoggingHandler Transports .. _Transports: :doc:`Transport` classes define how the :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler` -transports logs over the network to GCP. There are currently two Transport implementations +transports logs over the network to Google Cloud. There are two Transport implementations (defined as subclasses of :class:`transports.base.Transport `): - :class:`~google.cloud.logging_v2.handlers.transports.background_thread.BackgroundThreadTransport`: @@ -139,7 +139,7 @@ transports logs over the network to GCP. There are currently two Transport imple You can set a Transport class by passing it as an argument when :ref:`initializing CloudLoggingHandler manually.` -Both options can be used over either :doc:`gRPC or HTTP`. +You can use both transport options over :doc:`gRPC or HTTP`. .. note:: :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler` From ef947d320100ba63791e6a606cdb376f14aaea54 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 26 Jan 2022 13:29:21 -0800 Subject: [PATCH 27/28] removed unneeded delete lines --- samples/snippets/usage_guide.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py index 30d7310c9..15f20d4e5 100644 --- a/samples/snippets/usage_guide.py +++ b/samples/snippets/usage_guide.py @@ -109,12 +109,10 @@ def client_setup(client2, to_delete): # if project not given, it will be inferred from the environment client = google.cloud.logging.Client(project="my-project") # [END usage_client_setup] - to_delete.append(client) # [START usage_http_client_setup] http_client = google.cloud.logging.Client(_use_grpc=False) # [END usage_http_client_setup] - to_delete.append(http_client) @snippet From 1ff01aaddd0c35480b009b12ea74cf022f717c10 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 26 Jan 2022 13:52:17 -0800 Subject: [PATCH 28/28] switched client for tests --- samples/snippets/usage_guide.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py index 15f20d4e5..fdbbe1211 100644 --- a/samples/snippets/usage_guide.py +++ b/samples/snippets/usage_guide.py @@ -109,10 +109,12 @@ def client_setup(client2, to_delete): # if project not given, it will be inferred from the environment client = google.cloud.logging.Client(project="my-project") # [END usage_client_setup] + to_delete.append(client) # [START usage_http_client_setup] http_client = google.cloud.logging.Client(_use_grpc=False) # [END usage_http_client_setup] + to_delete.append(http_client) @snippet @@ -125,7 +127,8 @@ def logger_usage(client_true, to_delete): logger = client.logger(name="log_id") # logger will bind to logName "projects/my_project/logs/log_id" # [END logger_create] - to_delete.append(logger) + client = client_true + log_id = "logger_usage_%d" % (_millis()) # [START logger_custom_labels] custom_labels = {"my-key": "my-value"}