Skip to content

Commit 8360566

Browse files
authored
[Monitor Ingestion] Improve input validation/docs (#33976)
- Add another check to see if the user is passing in a string - Include an example in the README about uploading JSON files/strings Signed-off-by: Paul Van Eck <[email protected]>
1 parent 096cdf1 commit 8360566

File tree

6 files changed

+59
-15
lines changed

6 files changed

+59
-15
lines changed

sdk/monitor/azure-monitor-ingestion/CHANGELOG.md

+2-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,8 @@
1010

1111
### Other Changes
1212

13-
* Bumped minimum dependency on `azure-core` to `>=1.28.0`.
13+
- Bumped minimum dependency on `azure-core` to `>=1.28.0`.
14+
- Added additional type validation for the `logs` parameter in the `upload` method to ensure that a string hasn't been passed in. ([#33976](https://github.com/Azure/azure-sdk-for-python/pull/33976))
1415

1516
## 1.0.3 (2023-11-07)
1617

sdk/monitor/azure-monitor-ingestion/README.md

+43-4
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,7 @@ The logs that were uploaded using this library can be queried using the [Azure M
102102
## Examples
103103

104104
- [Upload custom logs](#upload-custom-logs)
105+
- [Upload data from JSON file or string](#upload-data-from-json-file-or-string)
105106
- [Upload with custom error handling](#upload-with-custom-error-handling)
106107

107108
### Upload custom logs
@@ -116,11 +117,12 @@ from azure.identity import DefaultAzureCredential
116117
from azure.monitor.ingestion import LogsIngestionClient
117118

118119
endpoint = os.environ['DATA_COLLECTION_ENDPOINT']
119-
credential = DefaultAzureCredential()
120+
rule_id = os.environ['LOGS_DCR_RULE_ID']
121+
stream_name = os.environ['LOGS_DCR_STREAM_NAME']
120122

123+
credential = DefaultAzureCredential()
121124
client = LogsIngestionClient(endpoint=endpoint, credential=credential, logging_enable=True)
122125

123-
rule_id = os.environ['LOGS_DCR_RULE_ID']
124126
body = [
125127
{
126128
"Time": "2021-12-08T23:51:14.1104269Z",
@@ -135,7 +137,44 @@ body = [
135137
]
136138

137139
try:
138-
client.upload(rule_id=rule_id, stream_name=os.environ['LOGS_DCR_STREAM_NAME'], logs=body)
140+
client.upload(rule_id=rule_id, stream_name=stream_name, logs=body)
141+
except HttpResponseError as e:
142+
print(f"Upload failed: {e}")
143+
```
144+
145+
### Upload data from JSON file or string
146+
147+
This example shows uploading when the data is in a JSON file or string.
148+
149+
```python
150+
import json
151+
import os
152+
153+
from azure.core.exceptions import HttpResponseError
154+
from azure.identity import DefaultAzureCredential
155+
from azure.monitor.ingestion import LogsIngestionClient
156+
157+
endpoint = os.environ["DATA_COLLECTION_ENDPOINT"]
158+
rule_id = os.environ['LOGS_DCR_RULE_ID']
159+
stream_name = os.environ["LOGS_DCR_STREAM_NAME"]
160+
161+
credential = DefaultAzureCredential()
162+
client = LogsIngestionClient(endpoint=endpoint, credential=credential, logging_enable=True)
163+
164+
# If you have a JSON file containing an array of JSON objects
165+
file_path = "./test-logs.json"
166+
with open(file_path, "r") as f:
167+
logs = json.load(f)
168+
try:
169+
client.upload(rule_id=rule_id, stream_name=stream_name, logs=logs)
170+
except HttpResponseError as e:
171+
print(f"Upload failed: {e}")
172+
173+
# If you have a JSON string representing an array of JSON objects
174+
string = '[{"Time": "2023-12-08T23:51:14.1104269Z", "Computer": "Computer1", "AdditionalContext": "context-2"}]'
175+
logs = json.loads(string)
176+
try:
177+
client.upload(rule_id=rule_id, stream_name=stream_name, logs=logs)
139178
except HttpResponseError as e:
140179
print(f"Upload failed: {e}")
141180
```
@@ -155,7 +194,7 @@ def on_error(error):
155194
def on_error_pass(error):
156195
pass
157196

158-
client.upload(rule_id=rule_id, stream_name=os.environ['LOGS_DCR_STREAM_NAME'], logs=body, on_error=on_error)
197+
client.upload(rule_id=rule_id, stream_name=stream_name, logs=body, on_error=on_error)
159198
```
160199

161200
## Troubleshooting

sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_patch.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,10 @@ def upload(
6868
super()._upload(rule_id, stream=stream_name, body=logs, content_encoding=content_encoding, **kwargs)
6969
return
7070

71-
if not isinstance(logs, Sequence):
72-
raise ValueError("The 'logs' parameter must be a list of JSON objects or an I/O stream that is readable.")
71+
if not isinstance(logs, Sequence) or isinstance(logs, str):
72+
raise ValueError(
73+
"The 'logs' parameter must be a list of mappings/dictionaries or an I/O stream that is readable."
74+
)
7375

7476
for gzip_data, log_chunk in _create_gzip_requests(cast(List[JSON], logs)):
7577
try:

sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_patch.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,10 @@ async def upload(
6868
await super()._upload(rule_id, stream=stream_name, body=logs, content_encoding=content_encoding, **kwargs)
6969
return
7070

71-
if not isinstance(logs, Sequence):
72-
raise ValueError("The 'logs' parameter must be a list of JSON objects or an I/O stream that is readable.")
71+
if not isinstance(logs, Sequence) or isinstance(logs, str):
72+
raise ValueError(
73+
"The 'logs' parameter must be a list of mappings/dictionaries or an I/O stream that is readable."
74+
)
7375

7476
for gzip_data, log_chunk in _create_gzip_requests(cast(List[JSON], logs)):
7577
try:

sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -140,10 +140,10 @@ def on_error(e):
140140

141141
assert on_error.called
142142

143-
def test_invalid_logs_format(self, monitor_info):
143+
@pytest.mark.parametrize("logs", ['[{"foo": "bar"}]', "foo", {"foo": "bar"}, None])
144+
def test_invalid_logs_format(self, monitor_info, logs):
144145
client = self.create_client_from_credential(
145146
LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce'])
146147

147-
body = {"foo": "bar"}
148148
with pytest.raises(ValueError):
149-
client.upload(rule_id="rule", stream_name="stream", logs=body)
149+
client.upload(rule_id="rule", stream_name="stream", logs=logs)

sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion_async.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -166,12 +166,12 @@ async def on_error(e):
166166
await credential.close()
167167

168168
@pytest.mark.asyncio
169-
async def test_invalid_logs_format(self, monitor_info):
169+
@pytest.mark.parametrize("logs", ['[{"foo": "bar"}]', "foo", {"foo": "bar"}, None])
170+
async def test_invalid_logs_format(self, monitor_info, logs):
170171
credential = self.get_credential(LogsIngestionClient, is_async=True)
171172
client = self.create_client_from_credential(LogsIngestionClient, credential, endpoint=monitor_info['dce'])
172173

173-
body = {"foo": "bar"}
174174
async with client:
175175
with pytest.raises(ValueError):
176-
await client.upload(rule_id="rule", stream_name="stream", logs=body)
176+
await client.upload(rule_id="rule", stream_name="stream", logs=logs)
177177
await credential.close()

0 commit comments

Comments
 (0)