From 4bb2783e1a5052323dfde7fccf941c885240a4b2 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Fri, 24 Jun 2016 20:32:47 -0400 Subject: [PATCH 1/3] Add docs, exporting logging to storage permissions. --- docs/logging-usage.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/logging-usage.rst b/docs/logging-usage.rst index 9b23e2edd8ab..f59fe9b7948c 100644 --- a/docs/logging-usage.rst +++ b/docs/logging-usage.rst @@ -212,6 +212,22 @@ Export log entries using sinks Sinks allow exporting entries which match a given filter to Cloud Storage buckets, BigQuery datasets, or Cloud Pub/Sub topics. +Make sure that the storage bucket you want to export logs too has +`cloud-logs@google.com` as the owner. See `Set permission for writing exported logs`_. + +Add `cloud-logs@google.com` as the owner of `my-bucket-name`: + +.. doctest:: + + >>> from gcloud import storage + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket-name') + >>> acl = bucket.acl + >>> acl.user('cloud-logs@google.com').grant_owner() + >>> acl.save() + +.. _Set permission for writing exported logs: https://cloud.google.com/logging/docs/export/configure_export#setting_product_name_short_permissions_for_writing_exported_logs + Create a Cloud Storage sink: .. doctest:: From c542ff8c1eaa4023daf38562a969c238983414e7 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Mon, 27 Jun 2016 11:46:26 -0400 Subject: [PATCH 2/3] Add BigQuery and Pub/Sub. --- docs/logging-usage.rst | 44 ++++++++++++++++++++++++++++++++++-------- 1 file changed, 36 insertions(+), 8 deletions(-) diff --git a/docs/logging-usage.rst b/docs/logging-usage.rst index f59fe9b7948c..dd4bd36196e9 100644 --- a/docs/logging-usage.rst +++ b/docs/logging-usage.rst @@ -206,11 +206,8 @@ Delete a metric: False -Export log entries using sinks ------------------------------- - -Sinks allow exporting entries which match a given filter to Cloud Storage -buckets, BigQuery datasets, or Cloud Pub/Sub topics. +Export to Cloud storage +======================= Make sure that the storage bucket you want to export logs too has `cloud-logs@google.com` as the owner. See `Set permission for writing exported logs`_. @@ -222,12 +219,43 @@ Add `cloud-logs@google.com` as the owner of `my-bucket-name`: >>> from gcloud import storage >>> client = storage.Client() >>> bucket = client.get_bucket('my-bucket-name') - >>> acl = bucket.acl - >>> acl.user('cloud-logs@google.com').grant_owner() - >>> acl.save() + >>> bucket.acl.reload() + >>> logs_group = bucket.acl.group('cloud-logs@google.com') + >>> logs_group.grant_owner() + >>> bucket.acl.add_entity(logs_group) + >>> bucket.acl.save() .. _Set permission for writing exported logs: https://cloud.google.com/logging/docs/export/configure_export#setting_product_name_short_permissions_for_writing_exported_logs + +Export to BigQuery +================== + +To export logs to BigQuery you must log into the Cloud Platform Console +and add `cloud-logs@google.com` to your project. + +See: `Setting permissions for BigQuery`_ + +.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export#manual-access-bq + + +Export to Pub/Sub +================= + +To export logs to BigQuery you must log into the Cloud Platform Console +and add `cloud-logs@google.com` to your project. + +See: `Setting permissions for Pub/Sub`_ + +.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export#manual-access-pubsub + + +Export log entries using sinks +------------------------------ + +Sinks allow exporting entries which match a given filter to Cloud Storage +buckets, BigQuery datasets, or Cloud Pub/Sub topics. + Create a Cloud Storage sink: .. doctest:: From 1e4405eff5c4cc34870d5c6e0082728091631af0 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Mon, 27 Jun 2016 12:17:51 -0400 Subject: [PATCH 3/3] Add pubsub and bigquery code examples for logging. --- docs/logging-usage.rst | 45 +++++++++++++++++++++++++++++------------- 1 file changed, 31 insertions(+), 14 deletions(-) diff --git a/docs/logging-usage.rst b/docs/logging-usage.rst index dd4bd36196e9..ec8f157cec5c 100644 --- a/docs/logging-usage.rst +++ b/docs/logging-usage.rst @@ -205,9 +205,14 @@ Delete a metric: >>> metric.exists() # API call False +Export log entries using sinks +------------------------------ + +Sinks allow exporting entries which match a given filter to Cloud Storage +buckets, BigQuery datasets, or Cloud Pub/Sub topics. Export to Cloud storage -======================= +~~~~~~~~~~~~~~~~~~~~~~~ Make sure that the storage bucket you want to export logs too has `cloud-logs@google.com` as the owner. See `Set permission for writing exported logs`_. @@ -227,34 +232,46 @@ Add `cloud-logs@google.com` as the owner of `my-bucket-name`: .. _Set permission for writing exported logs: https://cloud.google.com/logging/docs/export/configure_export#setting_product_name_short_permissions_for_writing_exported_logs - Export to BigQuery -================== +~~~~~~~~~~~~~~~~~~ To export logs to BigQuery you must log into the Cloud Platform Console -and add `cloud-logs@google.com` to your project. +and add `cloud-logs@google.com` to a dataset. See: `Setting permissions for BigQuery`_ -.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export#manual-access-bq +.. doctest:: + >>> from gcloud import bigquery + >>> from gcloud.bigquery.dataset import AccessGrant + >>> bigquery_client = bigquery.Client() + >>> dataset = bigquery_client.dataset('my-dataset-name') + >>> dataset.create() + >>> dataset.reload() + >>> grants = dataset.access_grants + >>> grants.append(AccessGrant( + ... 'WRITER', 'groupByEmail', 'cloud-logs@google.com'))) + >>> dataset.access_grants = grants + >>> dataset.update() +.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export#manual-access-bq Export to Pub/Sub -================= +~~~~~~~~~~~~~~~~~ To export logs to BigQuery you must log into the Cloud Platform Console -and add `cloud-logs@google.com` to your project. +and add `cloud-logs@google.com` to a topic. See: `Setting permissions for Pub/Sub`_ -.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export#manual-access-pubsub - - -Export log entries using sinks ------------------------------- +.. doctest:: + >>> from gcloud import pubsub + >>> client = pubsub.Client() + >>> topic = client.topic('your-topic-name') + >>> policy = top.get_iam_policy() + >>> policy.owners.add(policy.group('cloud-logs@google.com')) + >>> topic.set_iam_policy(policy) -Sinks allow exporting entries which match a given filter to Cloud Storage -buckets, BigQuery datasets, or Cloud Pub/Sub topics. +.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export#manual-access-pubsub Create a Cloud Storage sink: