Skip to content

Commit 517104d

Browse files
Sita04partheagcf-owl-bot[bot]
authored
docs(samples): add bigquery export samples (#315)
* docs(samples): init add bigquery export samples * minor var name change * minor var name change * added projectid to create bigquery dataset * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * updated project id var * updated dataset id * fixture scope change * lint fix * minor path syntax fix * added capsys to delete * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * removed capsys check * updated requirements with specific version * updated comments Co-authored-by: Anthonios Partheniou <[email protected]> Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent 9f8100f commit 517104d

File tree

3 files changed

+288
-1
lines changed

3 files changed

+288
-1
lines changed
Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
1-
pytest
1+
pytest==6.2.5
2+
google-cloud-bigquery==2.34.2
Lines changed: 190 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,190 @@
1+
#!/usr/bin/env python
2+
#
3+
# Copyright 2022 Google LLC
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# https://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
17+
"""Snippets on exporting findings from Security Command Center to BigQuery."""
18+
19+
20+
# [START securitycenter_create_bigquery_export]
21+
22+
23+
def create_bigquery_export(
24+
parent: str, export_filter: str, bigquery_dataset_id: str, bigquery_export_id: str
25+
):
26+
27+
from google.cloud import securitycenter
28+
29+
"""
30+
Create export configuration to export findings from a project to a BigQuery dataset.
31+
Optionally specify filter to export certain findings only.
32+
33+
Args:
34+
parent: Use any one of the following resource paths:
35+
- organizations/{organization_id}
36+
- folders/{folder_id}
37+
- projects/{project_id}
38+
export_filter: Expression that defines the filter to apply across create/update events of findings.
39+
bigquery_dataset_id: The BigQuery dataset to write findings' updates to.
40+
bigquery_export_id: Unique identifier provided by the client.
41+
- example id: f"default-{str(uuid.uuid4()).split('-')[0]}"
42+
For more info, see:
43+
https://cloud.google.com/security-command-center/docs/how-to-analyze-findings-in-big-query#export_findings_from_to
44+
"""
45+
client = securitycenter.SecurityCenterClient()
46+
47+
# Create the BigQuery export configuration.
48+
bigquery_export = securitycenter.BigQueryExport()
49+
bigquery_export.description = "Export low and medium findings if the compute resource has an IAM anomalous grant"
50+
bigquery_export.filter = export_filter
51+
bigquery_export.dataset = f"{parent}/datasets/{bigquery_dataset_id}"
52+
53+
request = securitycenter.CreateBigQueryExportRequest()
54+
request.parent = parent
55+
request.big_query_export = bigquery_export
56+
request.big_query_export_id = bigquery_export_id
57+
58+
# Create the export request.
59+
response = client.create_big_query_export(request)
60+
61+
print(f"BigQuery export request created successfully: {response.name}\n")
62+
63+
64+
# [END securitycenter_create_bigquery_export]
65+
66+
67+
# [START securitycenter_get_bigquery_export]
68+
def get_bigquery_export(parent: str, bigquery_export_id: str):
69+
from google.cloud import securitycenter
70+
71+
"""
72+
Retrieve an existing BigQuery export.
73+
Args:
74+
parent: Use any one of the following resource paths:
75+
- organizations/{organization_id}
76+
- folders/{folder_id}
77+
- projects/{project_id}
78+
bigquery_export_id: Unique identifier that is used to identify the export.
79+
"""
80+
81+
client = securitycenter.SecurityCenterClient()
82+
83+
request = securitycenter.GetBigQueryExportRequest()
84+
request.name = f"{parent}/bigQueryExports/{bigquery_export_id}"
85+
86+
response = client.get_big_query_export(request)
87+
print(f"Retrieved the BigQuery export: {response.name}")
88+
89+
90+
# [END securitycenter_get_bigquery_export]
91+
92+
93+
# [START securitycenter_list_bigquery_export]
94+
def list_bigquery_exports(parent: str):
95+
from google.cloud import securitycenter
96+
97+
"""
98+
List BigQuery exports in the given parent.
99+
Args:
100+
parent: The parent which owns the collection of BigQuery exports.
101+
Use any one of the following resource paths:
102+
- organizations/{organization_id}
103+
- folders/{folder_id}
104+
- projects/{project_id}
105+
"""
106+
107+
client = securitycenter.SecurityCenterClient()
108+
109+
request = securitycenter.ListBigQueryExportsRequest()
110+
request.parent = parent
111+
112+
response = client.list_big_query_exports(request)
113+
114+
print("Listing BigQuery exports:")
115+
for bigquery_export in response:
116+
print(bigquery_export.name)
117+
118+
119+
# [END securitycenter_list_bigquery_export]
120+
121+
122+
# [START securitycenter_update_bigquery_export]
123+
def update_bigquery_export(parent: str, export_filter: str, bigquery_export_id: str):
124+
"""
125+
Updates an existing BigQuery export.
126+
Args:
127+
parent: Use any one of the following resource paths:
128+
- organizations/{organization_id}
129+
- folders/{folder_id}
130+
- projects/{project_id}
131+
export_filter: Expression that defines the filter to apply across create/update events of findings.
132+
bigquery_export_id: Unique identifier provided by the client.
133+
For more info, see:
134+
https://cloud.google.com/security-command-center/docs/how-to-analyze-findings-in-big-query#export_findings_from_to
135+
"""
136+
from google.cloud import securitycenter
137+
from google.protobuf import field_mask_pb2
138+
139+
client = securitycenter.SecurityCenterClient()
140+
141+
# Set the new values for export configuration.
142+
bigquery_export = securitycenter.BigQueryExport()
143+
bigquery_export.name = f"{parent}/bigQueryExports/{bigquery_export_id}"
144+
bigquery_export.filter = export_filter
145+
146+
# Field mask to only update the export filter.
147+
# Set the update mask to specify which properties should be updated.
148+
# If empty, all mutable fields will be updated.
149+
# For more info on constructing field mask path, see the proto or:
150+
# https://googleapis.dev/python/protobuf/latest/google/protobuf/field_mask_pb2.html
151+
field_mask = field_mask_pb2.FieldMask(paths=["filter"])
152+
153+
request = securitycenter.UpdateBigQueryExportRequest()
154+
request.big_query_export = bigquery_export
155+
request.update_mask = field_mask
156+
157+
response = client.update_big_query_export(request)
158+
159+
if response.filter != export_filter:
160+
print("Failed to update BigQueryExport!")
161+
return
162+
print("BigQueryExport updated successfully!")
163+
164+
165+
# [END securitycenter_update_bigquery_export]
166+
167+
168+
# [START securitycenter_delete_bigquery_export]
169+
def delete_bigquery_export(parent: str, bigquery_export_id: str):
170+
"""
171+
Delete an existing BigQuery export.
172+
Args:
173+
parent: Use any one of the following resource paths:
174+
- organizations/{organization_id}
175+
- folders/{folder_id}
176+
- projects/{project_id}
177+
bigquery_export_id: Unique identifier that is used to identify the export.
178+
"""
179+
from google.cloud import securitycenter
180+
181+
client = securitycenter.SecurityCenterClient()
182+
183+
request = securitycenter.DeleteBigQueryExportRequest()
184+
request.name = f"{parent}/bigQueryExports/{bigquery_export_id}"
185+
186+
client.delete_big_query_export(request)
187+
print(f"BigQuery export request deleted successfully: {bigquery_export_id}")
188+
189+
190+
# [END securitycenter_delete_bigquery_export]
Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,96 @@
1+
#!/usr/bin/env python
2+
#
3+
# Copyright 2022 Google LLC
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# https://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
17+
18+
# TODO(developer): Replace these variables before running the sample.
19+
import os
20+
import re
21+
import uuid
22+
23+
from _pytest.capture import CaptureFixture
24+
import pytest
25+
26+
import snippets_bigquery_export
27+
28+
PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"]
29+
GOOGLE_APPLICATION_CREDENTIALS = os.environ["GOOGLE_APPLICATION_CREDENTIALS"]
30+
BIGQUERY_DATASET_ID = f"sampledataset{str(uuid.uuid4()).split('-')[0]}"
31+
32+
33+
@pytest.fixture(scope="module")
34+
def bigquery_export_id():
35+
bigquery_export_id = f"default-{str(uuid.uuid4()).split('-')[0]}"
36+
37+
create_bigquery_dataset(BIGQUERY_DATASET_ID)
38+
export_filter = 'severity="LOW" OR severity="MEDIUM"'
39+
snippets_bigquery_export.create_bigquery_export(
40+
f"projects/{PROJECT_ID}", export_filter, BIGQUERY_DATASET_ID, bigquery_export_id
41+
)
42+
43+
yield bigquery_export_id
44+
45+
snippets_bigquery_export.delete_bigquery_export(
46+
f"projects/{PROJECT_ID}", bigquery_export_id
47+
)
48+
delete_bigquery_dataset(BIGQUERY_DATASET_ID)
49+
50+
51+
def create_bigquery_dataset(dataset_id: str):
52+
from google.cloud import bigquery
53+
54+
bigquery_client = bigquery.Client()
55+
56+
dataset_id_full = "{}.{}".format(PROJECT_ID, dataset_id)
57+
dataset = bigquery.Dataset(dataset_id_full)
58+
59+
dataset = bigquery_client.create_dataset(dataset)
60+
print("Dataset {} created.".format(dataset.dataset_id))
61+
62+
63+
def delete_bigquery_dataset(dataset_id: str):
64+
from google.cloud import bigquery
65+
66+
bigquery_client = bigquery.Client()
67+
bigquery_client.delete_dataset(dataset_id)
68+
print("Dataset {} deleted.".format(dataset_id))
69+
70+
71+
def test_get_bigquery_export(capsys: CaptureFixture, bigquery_export_id: str):
72+
snippets_bigquery_export.get_bigquery_export(
73+
f"projects/{PROJECT_ID}", bigquery_export_id
74+
)
75+
out, _ = capsys.readouterr()
76+
assert re.search(
77+
"Retrieved the BigQuery export",
78+
out,
79+
)
80+
assert re.search(f"bigQueryExports/{bigquery_export_id}", out)
81+
82+
83+
def test_list_bigquery_exports(capsys: CaptureFixture, bigquery_export_id: str):
84+
snippets_bigquery_export.list_bigquery_exports(f"projects/{PROJECT_ID}")
85+
out, _ = capsys.readouterr()
86+
assert re.search("Listing BigQuery exports:", out)
87+
assert re.search(bigquery_export_id, out)
88+
89+
90+
def test_update_bigquery_exports(capsys: CaptureFixture, bigquery_export_id: str):
91+
export_filter = 'severity="MEDIUM"'
92+
snippets_bigquery_export.update_bigquery_export(
93+
f"projects/{PROJECT_ID}", export_filter, bigquery_export_id
94+
)
95+
out, _ = capsys.readouterr()
96+
assert re.search("BigQueryExport updated successfully!", out)

0 commit comments

Comments
 (0)