diff --git a/sdk/__init__.py b/sdk/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_clear_messages.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_clear_messages.yaml
deleted file mode 100644
index e060cbc6da2e..000000000000
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_clear_messages.yaml
+++ /dev/null
@@ -1,279 +0,0 @@
-interactions:
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- x-ms-version:
- - '2018-03-28'
- method: PUT
- uri: https://storagename.queue.core.windows.net/pyqueuesyncbf740c50
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:14 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: '
-
- message1'
- headers:
- Accept:
- - application/xml
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '103'
- Content-Type:
- - application/xml; charset=utf-8
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- x-ms-version:
- - '2018-03-28'
- method: POST
- uri: https://storagename.queue.core.windows.net/pyqueuesyncbf740c50/messages
- response:
- body:
- string: "\uFEFFe37a1af0-3478-44d6-a6d5-ceccea68f5cdWed,
- 30 Oct 2019 19:51:14 GMTWed, 06 Nov 2019 19:51:14
- GMTAgAAAAMAAAAAAAAAMhI6Y1uP1QE=Wed,
- 30 Oct 2019 19:51:14 GMT"
- headers:
- content-type:
- - application/xml
- date:
- - Wed, 30 Oct 2019 19:51:14 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- transfer-encoding:
- - chunked
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: '
-
- message2'
- headers:
- Accept:
- - application/xml
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '103'
- Content-Type:
- - application/xml; charset=utf-8
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- x-ms-version:
- - '2018-03-28'
- method: POST
- uri: https://storagename.queue.core.windows.net/pyqueuesyncbf740c50/messages
- response:
- body:
- string: "\uFEFFe8c70bbf-1c7e-4de7-9390-8e322ac892c1Wed,
- 30 Oct 2019 19:51:14 GMTWed, 06 Nov 2019 19:51:14
- GMTAgAAAAMAAAAAAAAAsLRBY1uP1QE=Wed,
- 30 Oct 2019 19:51:14 GMT"
- headers:
- content-type:
- - application/xml
- date:
- - Wed, 30 Oct 2019 19:51:14 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- transfer-encoding:
- - chunked
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: '
-
- message3'
- headers:
- Accept:
- - application/xml
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '103'
- Content-Type:
- - application/xml; charset=utf-8
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- x-ms-version:
- - '2018-03-28'
- method: POST
- uri: https://storagename.queue.core.windows.net/pyqueuesyncbf740c50/messages
- response:
- body:
- string: "\uFEFF2c6a6466-eff0-46ec-a343-f19faf4c5890Wed,
- 30 Oct 2019 19:51:14 GMTWed, 06 Nov 2019 19:51:14
- GMTAgAAAAMAAAAAAAAAt0FKY1uP1QE=Wed,
- 30 Oct 2019 19:51:14 GMT"
- headers:
- content-type:
- - application/xml
- date:
- - Wed, 30 Oct 2019 19:51:14 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- transfer-encoding:
- - chunked
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: '
-
- message4'
- headers:
- Accept:
- - application/xml
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '103'
- Content-Type:
- - application/xml; charset=utf-8
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- x-ms-version:
- - '2018-03-28'
- method: POST
- uri: https://storagename.queue.core.windows.net/pyqueuesyncbf740c50/messages
- response:
- body:
- string: "\uFEFF95978a93-9183-49e3-bb2b-baf2a99bb71aWed,
- 30 Oct 2019 19:51:14 GMTWed, 06 Nov 2019 19:51:14
- GMTAgAAAAMAAAAAAAAANeRRY1uP1QE=Wed,
- 30 Oct 2019 19:51:14 GMT"
- headers:
- content-type:
- - application/xml
- date:
- - Wed, 30 Oct 2019 19:51:14 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- transfer-encoding:
- - chunked
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- x-ms-version:
- - '2018-03-28'
- method: DELETE
- uri: https://storagename.queue.core.windows.net/pyqueuesyncbf740c50/messages
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:14 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 204
- message: No Content
-- request:
- body: null
- headers:
- Accept:
- - application/xml
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- x-ms-version:
- - '2018-03-28'
- method: GET
- uri: https://storagename.queue.core.windows.net/pyqueuesyncbf740c50/messages?peekonly=true
- response:
- body:
- string: "\uFEFF"
- headers:
- cache-control:
- - no-cache
- content-type:
- - application/xml
- date:
- - Wed, 30 Oct 2019 19:51:14 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- transfer-encoding:
- - chunked
- x-ms-version:
- - '2018-03-28'
- status:
- code: 200
- message: OK
-version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_create_queue.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_create_queue.yaml
deleted file mode 100644
index c2eaf63f0df5..000000000000
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_create_queue.yaml
+++ /dev/null
@@ -1,36 +0,0 @@
-interactions:
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- x-ms-version:
- - '2018-03-28'
- method: PUT
- uri: https://storagename.queue.core.windows.net/pyqueuesynca7af0b8a
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:14 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_create_queue_fail_on_exist.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_create_queue_fail_on_exist.yaml
deleted file mode 100644
index 2947cae8f1ce..000000000000
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_create_queue_fail_on_exist.yaml
+++ /dev/null
@@ -1,70 +0,0 @@
-interactions:
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:16 GMT
- x-ms-version:
- - '2018-03-28'
- method: PUT
- uri: https://storagename.queue.core.windows.net/pyqueuesync736a114d
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:14 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:16 GMT
- x-ms-version:
- - '2018-03-28'
- method: PUT
- uri: https://storagename.queue.core.windows.net/pyqueuesync736a114d
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 204
- message: No Content
-version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_create_queue_fail_on_exist_different_metadata.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_create_queue_fail_on_exist_different_metadata.yaml
deleted file mode 100644
index 30f2c696fb08..000000000000
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_create_queue_fail_on_exist_different_metadata.yaml
+++ /dev/null
@@ -1,79 +0,0 @@
-interactions:
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:16 GMT
- x-ms-version:
- - '2018-03-28'
- method: PUT
- uri: https://storagename.queue.core.windows.net/pyqueuesync9101903
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:16 GMT
- x-ms-meta:
- - '{''val'': ''value''}'
- x-ms-meta-val:
- - value
- x-ms-version:
- - '2018-03-28'
- method: PUT
- uri: https://storagename.queue.core.windows.net/pyqueuesync9101903
- response:
- body:
- string: "\uFEFFQueueAlreadyExists
The
- specified queue already exists.\nRequestId:edb962eb-9003-0068-2d5b-8f545b000000\nTime:2019-10-30T19:51:15.7457762Z"
- headers:
- content-length:
- - '222'
- content-type:
- - application/xml
- date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-error-code:
- - QueueAlreadyExists
- x-ms-version:
- - '2018-03-28'
- status:
- code: 409
- message: The specified queue already exists.
-version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_create_queue_with_options.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_create_queue_with_options.yaml
deleted file mode 100644
index 069af593840b..000000000000
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_create_queue_with_options.yaml
+++ /dev/null
@@ -1,82 +0,0 @@
-interactions:
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:16 GMT
- x-ms-meta:
- - '{''val1'': ''test'', ''val2'': ''blah''}'
- x-ms-meta-val1:
- - test
- x-ms-meta-val2:
- - blah
- x-ms-version:
- - '2018-03-28'
- method: PUT
- uri: https://storagename.queue.core.windows.net/pyqueuesync63ff1110
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:16 GMT
- x-ms-version:
- - '2018-03-28'
- method: GET
- uri: https://storagename.queue.core.windows.net/pyqueuesync63ff1110?comp=metadata
- response:
- body:
- string: ''
- headers:
- cache-control:
- - no-cache
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-approximate-messages-count:
- - '0'
- x-ms-meta-val1:
- - test
- x-ms-meta-val2:
- - blah
- x-ms-version:
- - '2018-03-28'
- status:
- code: 200
- message: OK
-version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_delete_existing_queue_fail_not_exist.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_delete_existing_queue_fail_not_exist.yaml
deleted file mode 100644
index 72186649bf7f..000000000000
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_delete_existing_queue_fail_not_exist.yaml
+++ /dev/null
@@ -1,70 +0,0 @@
-interactions:
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:16 GMT
- x-ms-version:
- - '2018-03-28'
- method: PUT
- uri: https://storagename.queue.core.windows.net/pyqueuesync38c6158a
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:17 GMT
- x-ms-version:
- - '2018-03-28'
- method: DELETE
- uri: https://storagename.queue.core.windows.net/pyqueuesync38c6158a
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:15 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 204
- message: No Content
-version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_delete_message.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_delete_message.yaml
index 05211c8b2ecc..187fce4eae4a 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_delete_message.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_delete_message.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:56 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:15 GMT
+ - Mon, 08 Jun 2020 17:04:50 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -49,24 +49,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:57 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesyncbf910c49/messages
response:
body:
- string: "\uFEFF17e942ab-bc07-427b-837f-5b076dfbef14Wed,
- 30 Oct 2019 19:51:16 GMTWed, 06 Nov 2019 19:51:16
- GMTAgAAAAMAAAAAAAAAvMFiZFuP1QE=Wed,
- 30 Oct 2019 19:51:16 GMT"
+ string: "\uFEFF1c9be4f9-5022-456c-b25f-fdca898548b9Mon,
+ 08 Jun 2020 17:04:50 GMTMon, 15 Jun 2020 17:04:50
+ GMTAgAAAAMAAAAAAAAAaP4c7LY91gE=Mon,
+ 08 Jun 2020 17:04:50 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:15 GMT
+ - Mon, 08 Jun 2020 17:04:50 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -92,24 +92,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:57 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesyncbf910c49/messages
response:
body:
- string: "\uFEFFedcc05bf-a29d-4cb9-8bbb-649b5d80fe1dWed,
- 30 Oct 2019 19:51:16 GMTWed, 06 Nov 2019 19:51:16
- GMTAgAAAAMAAAAAAAAAX7JqZFuP1QE=Wed,
- 30 Oct 2019 19:51:16 GMT"
+ string: "\uFEFF320cb08e-675b-4b3b-a432-d70c42088a38Mon,
+ 08 Jun 2020 17:04:51 GMTMon, 15 Jun 2020 17:04:51
+ GMTAgAAAAMAAAAAAAAATqk07LY91gE=Mon,
+ 08 Jun 2020 17:04:51 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:15 GMT
+ - Mon, 08 Jun 2020 17:04:50 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -135,24 +135,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:57 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesyncbf910c49/messages
response:
body:
- string: "\uFEFFe776e4ef-5047-4f14-9df0-8d8ec2757314Wed,
- 30 Oct 2019 19:51:16 GMTWed, 06 Nov 2019 19:51:16
- GMTAgAAAAMAAAAAAAAAAFF0ZFuP1QE=Wed,
- 30 Oct 2019 19:51:16 GMT"
+ string: "\uFEFFff5f8c39-45b4-4709-92ec-89d83ad46604Mon,
+ 08 Jun 2020 17:04:51 GMTMon, 15 Jun 2020 17:04:51
+ GMTAgAAAAMAAAAAAAAAyjRI7LY91gE=Mon,
+ 08 Jun 2020 17:04:51 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:16 GMT
+ - Mon, 08 Jun 2020 17:04:51 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -178,24 +178,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:58 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesyncbf910c49/messages
response:
body:
- string: "\uFEFFf28c546c-010a-49a6-9199-8d6df09f580dWed,
- 30 Oct 2019 19:51:16 GMTWed, 06 Nov 2019 19:51:16
- GMTAgAAAAMAAAAAAAAAlxp8ZFuP1QE=Wed,
- 30 Oct 2019 19:51:16 GMT"
+ string: "\uFEFF3ee46b27-db9b-4c7a-aba6-d033dedfca09Mon,
+ 08 Jun 2020 17:04:51 GMTMon, 15 Jun 2020 17:04:51
+ GMTAgAAAAMAAAAAAAAAdA5c7LY91gE=Mon,
+ 08 Jun 2020 17:04:51 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:16 GMT
+ - Mon, 08 Jun 2020 17:04:51 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -215,26 +215,26 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:58 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesyncbf910c49/messages
response:
body:
- string: "\uFEFF17e942ab-bc07-427b-837f-5b076dfbef14Wed,
- 30 Oct 2019 19:51:16 GMTWed, 06 Nov 2019 19:51:16
- GMTAgAAAAMAAAAAAAAA/zhldluP1QE=Wed,
- 30 Oct 2019 19:51:46 GMT1message1"
+ string: "\uFEFF1c9be4f9-5022-456c-b25f-fdca898548b9Mon,
+ 08 Jun 2020 17:04:50 GMTMon, 15 Jun 2020 17:04:50
+ GMTAgAAAAMAAAAAAAAATNlR/rY91gE=Mon,
+ 08 Jun 2020 17:05:21 GMT1message1"
headers:
cache-control:
- no-cache
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:16 GMT
+ - Mon, 08 Jun 2020 17:04:51 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -256,13 +256,13 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:58 GMT
x-ms-version:
- '2018-03-28'
method: DELETE
- uri: https://storagename.queue.core.windows.net/pyqueuesyncbf910c49/messages/17e942ab-bc07-427b-837f-5b076dfbef14?popreceipt=AgAAAAMAAAAAAAAA%2FzhldluP1QE%3D
+ uri: https://storagename.queue.core.windows.net/pyqueuesyncbf910c49/messages/1c9be4f9-5022-456c-b25f-fdca898548b9?popreceipt=AgAAAAMAAAAAAAAATNlR%2FrY91gE%3D
response:
body:
string: ''
@@ -270,7 +270,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:16 GMT
+ - Mon, 08 Jun 2020 17:04:51 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -288,32 +288,32 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:58 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesyncbf910c49/messages?numofmessages=32
response:
body:
- string: "\uFEFFedcc05bf-a29d-4cb9-8bbb-649b5d80fe1dWed,
- 30 Oct 2019 19:51:16 GMTWed, 06 Nov 2019 19:51:16
- GMTAgAAAAMAAAAAAAAAbJNzdluP1QE=Wed,
- 30 Oct 2019 19:51:46 GMT1message2e776e4ef-5047-4f14-9df0-8d8ec2757314Wed,
- 30 Oct 2019 19:51:16 GMTWed, 06 Nov 2019 19:51:16
- GMTAgAAAAMAAAAAAAAAbJNzdluP1QE=Wed,
- 30 Oct 2019 19:51:46 GMT1message3f28c546c-010a-49a6-9199-8d6df09f580dWed,
- 30 Oct 2019 19:51:16 GMTWed, 06 Nov 2019 19:51:16
- GMTAgAAAAMAAAAAAAAAbJNzdluP1QE=Wed,
- 30 Oct 2019 19:51:46 GMT1message4"
+ string: "\uFEFF320cb08e-675b-4b3b-a432-d70c42088a38Mon,
+ 08 Jun 2020 17:04:51 GMTMon, 15 Jun 2020 17:04:51
+ GMTAgAAAAMAAAAAAAAAjxN7/rY91gE=Mon,
+ 08 Jun 2020 17:05:21 GMT1message2ff5f8c39-45b4-4709-92ec-89d83ad46604Mon,
+ 08 Jun 2020 17:04:51 GMTMon, 15 Jun 2020 17:04:51
+ GMTAgAAAAMAAAAAAAAAjxN7/rY91gE=Mon,
+ 08 Jun 2020 17:05:21 GMT1message33ee46b27-db9b-4c7a-aba6-d033dedfca09Mon,
+ 08 Jun 2020 17:04:51 GMTMon, 15 Jun 2020 17:04:51
+ GMTAgAAAAMAAAAAAAAAjxN7/rY91gE=Mon,
+ 08 Jun 2020 17:05:21 GMT1message4"
headers:
cache-control:
- no-cache
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:16 GMT
+ - Mon, 08 Jun 2020 17:04:51 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -333,9 +333,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:58 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -350,7 +350,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:16 GMT
+ - Mon, 08 Jun 2020 17:04:51 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_delete_non_existing_queue.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_delete_non_existing_queue.yaml
index 129cdac020b1..c63eb0b09a5c 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_delete_non_existing_queue.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_delete_non_existing_queue.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:58 GMT
x-ms-version:
- '2018-03-28'
method: DELETE
@@ -21,14 +21,14 @@ interactions:
response:
body:
string: "\uFEFFQueueNotFound
The
- specified queue does not exist.\nRequestId:bfeb4a6d-0003-0022-485b-8f643c000000\nTime:2019-10-30T19:51:17.2062766Z"
+ specified queue does not exist.\nRequestId:0f370be3-f003-0062-50b6-3d99b9000000\nTime:2020-06-08T17:04:52.3842926Z"
headers:
content-length:
- '217'
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:16 GMT
+ - Mon, 08 Jun 2020 17:04:51 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-error-code:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_messages.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_messages.yaml
index 11245d2648df..b02ae58799dc 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_messages.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_messages.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:04:59 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:52 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -49,24 +49,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:04:59 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesynca7c20b89/messages
response:
body:
- string: "\uFEFFfbe399c1-6b46-4ee5-844a-fae4739e75f4Wed,
- 30 Oct 2019 19:51:17 GMTWed, 06 Nov 2019 19:51:17
- GMTAgAAAAMAAAAAAAAAkxriZFuP1QE=Wed,
- 30 Oct 2019 19:51:17 GMT"
+ string: "\uFEFF6e8a4fff-c220-4d0b-a24e-d5fae0b31c3fMon,
+ 08 Jun 2020 17:04:53 GMTMon, 15 Jun 2020 17:04:53
+ GMTAgAAAAMAAAAAAAAAXjpt7bY91gE=Mon,
+ 08 Jun 2020 17:04:53 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:52 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -92,24 +92,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:04:59 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesynca7c20b89/messages
response:
body:
- string: "\uFEFFb0cbf279-e432-4b98-ad15-e742baa9fef3Wed,
- 30 Oct 2019 19:51:17 GMTWed, 06 Nov 2019 19:51:17
- GMTAgAAAAMAAAAAAAAAPwvqZFuP1QE=Wed,
- 30 Oct 2019 19:51:17 GMT"
+ string: "\uFEFF964d043e-bb66-4401-bb98-f8b1ecc062a5Mon,
+ 08 Jun 2020 17:04:53 GMTMon, 15 Jun 2020 17:04:53
+ GMTAgAAAAMAAAAAAAAAwJ6A7bY91gE=Mon,
+ 08 Jun 2020 17:04:53 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:52 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -135,24 +135,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:05:00 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesynca7c20b89/messages
response:
body:
- string: "\uFEFF3310e3e9-08c3-45fd-9f90-79e2f38d6ce9Wed,
- 30 Oct 2019 19:51:17 GMTWed, 06 Nov 2019 19:51:17
- GMTAgAAAAMAAAAAAAAAT2AFZVuP1QE=Wed,
- 30 Oct 2019 19:51:17 GMT"
+ string: "\uFEFF6ba5fad4-c804-48cf-91ad-7e5651fbd9a6Mon,
+ 08 Jun 2020 17:04:53 GMTMon, 15 Jun 2020 17:04:53
+ GMTAgAAAAMAAAAAAAAAq6Wb7bY91gE=Mon,
+ 08 Jun 2020 17:04:53 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:52 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -178,24 +178,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:05:00 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesynca7c20b89/messages
response:
body:
- string: "\uFEFFde80d5f2-4ef3-4937-9378-79df09dc01caWed,
- 30 Oct 2019 19:51:17 GMTWed, 06 Nov 2019 19:51:17
- GMTAgAAAAMAAAAAAAAARxgMZVuP1QE=Wed,
- 30 Oct 2019 19:51:17 GMT"
+ string: "\uFEFF057c70a4-4996-439b-b749-c3cb26eccd61Mon,
+ 08 Jun 2020 17:04:53 GMTMon, 15 Jun 2020 17:04:53
+ GMTAgAAAAMAAAAAAAAAmc2v7bY91gE=Mon,
+ 08 Jun 2020 17:04:53 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:53 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -215,26 +215,26 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:05:00 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesynca7c20b89/messages
response:
body:
- string: "\uFEFFfbe399c1-6b46-4ee5-844a-fae4739e75f4Wed,
- 30 Oct 2019 19:51:17 GMTWed, 06 Nov 2019 19:51:17
- GMTAgAAAAMAAAAAAAAAwF31dluP1QE=Wed,
- 30 Oct 2019 19:51:47 GMT1message1"
+ string: "\uFEFF6e8a4fff-c220-4d0b-a24e-d5fae0b31c3fMon,
+ 08 Jun 2020 17:04:53 GMTMon, 15 Jun 2020 17:04:53
+ GMTAgAAAAMAAAAAAAAAOfim/7Y91gE=Mon,
+ 08 Jun 2020 17:05:23 GMT1message1"
headers:
cache-control:
- no-cache
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:53 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_messages_with_options.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_messages_with_options.yaml
index 560678d35b46..27522516da33 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_messages_with_options.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_messages_with_options.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:05:00 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:53 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -49,24 +49,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:05:01 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync6405110f/messages
response:
body:
- string: "\uFEFFfdb39e8d-fd73-4be0-a54b-ff5b473ecd48Wed,
- 30 Oct 2019 19:51:18 GMTWed, 06 Nov 2019 19:51:18
- GMTAgAAAAMAAAAAAAAAWpE9ZVuP1QE=Wed,
- 30 Oct 2019 19:51:18 GMT"
+ string: "\uFEFF1164f797-f0a5-4595-a130-f8c287689decMon,
+ 08 Jun 2020 17:04:54 GMTMon, 15 Jun 2020 17:04:54
+ GMTAgAAAAMAAAAAAAAAliIx7rY91gE=Mon,
+ 08 Jun 2020 17:04:54 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:04:53 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -92,24 +92,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:05:01 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync6405110f/messages
response:
body:
- string: "\uFEFF39216381-2a62-49d8-94ba-409cc777a033Wed,
- 30 Oct 2019 19:51:18 GMTWed, 06 Nov 2019 19:51:18
- GMTAgAAAAMAAAAAAAAAaHBEZVuP1QE=Wed,
- 30 Oct 2019 19:51:18 GMT"
+ string: "\uFEFFf626ecc9-b2b9-480e-873c-cc2fba610363Mon,
+ 08 Jun 2020 17:04:54 GMTMon, 15 Jun 2020 17:04:54
+ GMTAgAAAAMAAAAAAAAAl0ZH7rY91gE=Mon,
+ 08 Jun 2020 17:04:54 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:04:53 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -135,24 +135,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:05:01 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync6405110f/messages
response:
body:
- string: "\uFEFFeaa398b9-eda5-4f52-9372-524f0ea29282Wed,
- 30 Oct 2019 19:51:18 GMTWed, 06 Nov 2019 19:51:18
- GMTAgAAAAMAAAAAAAAAqktNZVuP1QE=Wed,
- 30 Oct 2019 19:51:18 GMT"
+ string: "\uFEFF7dc252f8-2980-43c8-b382-7a870590196dMon,
+ 08 Jun 2020 17:04:54 GMTMon, 15 Jun 2020 17:04:54
+ GMTAgAAAAMAAAAAAAAAB6ta7rY91gE=Mon,
+ 08 Jun 2020 17:04:54 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:04:54 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -178,24 +178,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:05:01 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync6405110f/messages
response:
body:
- string: "\uFEFF122258b5-c66e-4d13-ae04-9756bd99515cWed,
- 30 Oct 2019 19:51:18 GMTWed, 06 Nov 2019 19:51:18
- GMTAgAAAAMAAAAAAAAAwlFUZVuP1QE=Wed,
- 30 Oct 2019 19:51:18 GMT"
+ string: "\uFEFFb46befd1-3eaa-443d-a943-55b9e0a9a12aMon,
+ 08 Jun 2020 17:04:54 GMTMon, 15 Jun 2020 17:04:54
+ GMTAgAAAAMAAAAAAAAAPm9v7rY91gE=Mon,
+ 08 Jun 2020 17:04:54 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:04:54 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -215,35 +215,35 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:05:01 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesync6405110f/messages?numofmessages=4&visibilitytimeout=20
response:
body:
- string: "\uFEFFfdb39e8d-fd73-4be0-a54b-ff5b473ecd48Wed,
- 30 Oct 2019 19:51:18 GMTWed, 06 Nov 2019 19:51:18
- GMTAgAAAAMAAAAAAAAA9EBHcVuP1QE=Wed,
- 30 Oct 2019 19:51:38 GMT1message139216381-2a62-49d8-94ba-409cc777a033Wed,
- 30 Oct 2019 19:51:18 GMTWed, 06 Nov 2019 19:51:18
- GMTAgAAAAMAAAAAAAAA9EBHcVuP1QE=Wed,
- 30 Oct 2019 19:51:38 GMT1message2eaa398b9-eda5-4f52-9372-524f0ea29282Wed,
- 30 Oct 2019 19:51:18 GMTWed, 06 Nov 2019 19:51:18
- GMTAgAAAAMAAAAAAAAA9EBHcVuP1QE=Wed,
- 30 Oct 2019 19:51:38 GMT1message3122258b5-c66e-4d13-ae04-9756bd99515cWed,
- 30 Oct 2019 19:51:18 GMTWed, 06 Nov 2019 19:51:18
- GMTAgAAAAMAAAAAAAAA9EBHcVuP1QE=Wed,
- 30 Oct 2019 19:51:38 GMT1message4"
+ string: "\uFEFF1164f797-f0a5-4595-a130-f8c287689decMon,
+ 08 Jun 2020 17:04:54 GMTMon, 15 Jun 2020 17:04:54
+ GMTAgAAAAMAAAAAAAAAjxxw+rY91gE=Mon,
+ 08 Jun 2020 17:05:14 GMT1message1f626ecc9-b2b9-480e-873c-cc2fba610363Mon,
+ 08 Jun 2020 17:04:54 GMTMon, 15 Jun 2020 17:04:54
+ GMTAgAAAAMAAAAAAAAAjxxw+rY91gE=Mon,
+ 08 Jun 2020 17:05:14 GMT1message27dc252f8-2980-43c8-b382-7a870590196dMon,
+ 08 Jun 2020 17:04:54 GMTMon, 15 Jun 2020 17:04:54
+ GMTAgAAAAMAAAAAAAAAjxxw+rY91gE=Mon,
+ 08 Jun 2020 17:05:14 GMT1message3b46befd1-3eaa-443d-a943-55b9e0a9a12aMon,
+ 08 Jun 2020 17:04:54 GMTMon, 15 Jun 2020 17:04:54
+ GMTAgAAAAMAAAAAAAAAjxxw+rY91gE=Mon,
+ 08 Jun 2020 17:05:14 GMT1message4"
headers:
cache-control:
- no-cache
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:04:54 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -263,9 +263,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:05:01 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -280,7 +280,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:04:54 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_acl.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_acl.yaml
index 756454a3e5f3..a773b0ef5b25 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_acl.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_acl.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:05:01 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:04:55 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -43,9 +43,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:05:02 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -60,7 +60,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:04:55 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_acl_iter.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_acl_iter.yaml
index 5b24804c643f..b67230f7d669 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_acl_iter.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_acl_iter.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:05:02 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:56 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -43,9 +43,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:05:03 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -60,7 +60,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:17 GMT
+ - Mon, 08 Jun 2020 17:04:56 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_acl_with_non_existing_queue.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_acl_with_non_existing_queue.yaml
index ce4c8c8ad2a8..7111912599be 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_acl_with_non_existing_queue.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_acl_with_non_existing_queue.yaml
@@ -9,9 +9,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:05:03 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -19,14 +19,14 @@ interactions:
response:
body:
string: "\uFEFFQueueNotFound
The
- specified queue does not exist.\nRequestId:baed32a0-f003-0015-1e5b-8fc893000000\nTime:2019-10-30T19:51:19.1587481Z"
+ specified queue does not exist.\nRequestId:7a015ec3-c003-0061-41b6-3d9abe000000\nTime:2020-06-08T17:04:57.0046890Z"
headers:
content-length:
- '217'
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:18 GMT
+ - Mon, 08 Jun 2020 17:04:56 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-error-code:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_metadata_message_count.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_metadata_message_count.yaml
index 08281eb9b07a..0c9722fa0b9a 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_metadata_message_count.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_get_queue_metadata_message_count.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:05:03 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:04:57 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -49,24 +49,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:05:04 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesynce42613c2/messages
response:
body:
- string: "\uFEFF78a1568c-0c64-4e39-aca7-803372f5d20cWed,
- 30 Oct 2019 19:51:19 GMTWed, 06 Nov 2019 19:51:19
- GMTAgAAAAMAAAAAAAAAXHAQZluP1QE=Wed,
- 30 Oct 2019 19:51:19 GMT"
+ string: "\uFEFF4dd60b84-d499-4a8a-b09e-ef983c46e553Mon,
+ 08 Jun 2020 17:04:57 GMTMon, 15 Jun 2020 17:04:57
+ GMTAgAAAAMAAAAAAAAA4sop8LY91gE=Mon,
+ 08 Jun 2020 17:04:57 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:04:57 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -86,9 +86,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:05:04 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -102,7 +102,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:04:57 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-approximate-messages-count:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_list_queues.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_list_queues.yaml
index 05c5e82b78c0..930fd871d41f 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_list_queues.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_list_queues.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:05:04 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:04:57 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -43,9 +43,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:05:05 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -53,7 +53,7 @@ interactions:
response:
body:
string: "\uFEFFpyqueuesync63ff1110pyqueuesync6405110fpyqueuesync736a114dpyqueuesync9101903pyqueuesync9d350b45pyqueuesynca7af0b8apyqueuesynca7c20b89pyqueuesyncb3cd0be5pyqueuesyncbf740c50pyqueuesyncbf910c49pyqueuesynce42613c2pyqueuesyncf55d0df8pyqueuesync63ff1110pyqueuesync6405110fpyqueuesync736a114dpyqueuesync9101903pyqueuesync9c250b25pyqueuesync9d350b45pyqueuesynca7af0b8apyqueuesynca7c20b89pyqueuesyncb3cd0be5pyqueuesyncbf740c50pyqueuesyncbf910c49pyqueuesynce42613c2pyqueuesyncf55d0df8"
headers:
cache-control:
@@ -61,7 +61,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:04:57 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_list_queues_with_metadata.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_list_queues_with_metadata.yaml
index 09d90d7326e5..8032ab62271b 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_list_queues_with_metadata.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_list_queues_with_metadata.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:05:05 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:04:58 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -45,9 +45,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:05:05 GMT
x-ms-meta-val1:
- test
x-ms-meta-val2:
@@ -63,7 +63,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:04:58 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -81,9 +81,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:05:06 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -99,7 +99,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:04:58 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_list_queues_with_options.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_list_queues_with_options.yaml
deleted file mode 100644
index 793d0ca495ae..000000000000
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_list_queues_with_options.yaml
+++ /dev/null
@@ -1,213 +0,0 @@
-interactions:
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:21 GMT
- x-ms-version:
- - '2018-03-28'
- method: PUT
- uri: https://storagename.queue.core.windows.net/listqueue0560410cb
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:20 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:21 GMT
- x-ms-version:
- - '2018-03-28'
- method: PUT
- uri: https://storagename.queue.core.windows.net/listqueue1560410cb
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:20 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:21 GMT
- x-ms-version:
- - '2018-03-28'
- method: PUT
- uri: https://storagename.queue.core.windows.net/listqueue2560410cb
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:20 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: null
- headers:
- Accept:
- - '*/*'
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- Content-Length:
- - '0'
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:21 GMT
- x-ms-version:
- - '2018-03-28'
- method: PUT
- uri: https://storagename.queue.core.windows.net/listqueue3560410cb
- response:
- body:
- string: ''
- headers:
- content-length:
- - '0'
- date:
- - Wed, 30 Oct 2019 19:51:20 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- x-ms-version:
- - '2018-03-28'
- status:
- code: 201
- message: Created
-- request:
- body: null
- headers:
- Accept:
- - application/xml
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:21 GMT
- x-ms-version:
- - '2018-03-28'
- method: GET
- uri: https://storagename.queue.core.windows.net/?prefix=listqueue&maxresults=3&comp=list
- response:
- body:
- string: "\uFEFFlistqueue3listqueue0560410cblistqueue1560410cblistqueue2560410cb/storagename/listqueue3560410cb"
- headers:
- cache-control:
- - no-cache
- content-type:
- - application/xml
- date:
- - Wed, 30 Oct 2019 19:51:20 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- transfer-encoding:
- - chunked
- x-ms-version:
- - '2018-03-28'
- status:
- code: 200
- message: OK
-- request:
- body: null
- headers:
- Accept:
- - application/xml
- Accept-Encoding:
- - gzip, deflate
- Connection:
- - keep-alive
- User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
- x-ms-date:
- - Wed, 30 Oct 2019 19:51:21 GMT
- x-ms-version:
- - '2018-03-28'
- method: GET
- uri: https://storagename.queue.core.windows.net/?prefix=listqueue&marker=%2Fstoragename%2Flistqueue3560410cb&include=metadata&comp=list
- response:
- body:
- string: "\uFEFFlistqueue/storagename/listqueue3560410cblistqueue3560410cb"
- headers:
- cache-control:
- - no-cache
- content-type:
- - application/xml
- date:
- - Wed, 30 Oct 2019 19:51:20 GMT
- server:
- - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
- transfer-encoding:
- - chunked
- x-ms-version:
- - '2018-03-28'
- status:
- code: 200
- message: OK
-version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_peek_messages.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_peek_messages.yaml
index 44ec542ac6ca..0f4ad1703b6f 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_peek_messages.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_peek_messages.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:04:00 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:19 GMT
+ - Mon, 08 Jun 2020 17:03:52 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -49,24 +49,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:04:00 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesyncb3920bee/messages
response:
body:
- string: "\uFEFFd3e2c7c5-b6fe-4ff1-98de-d2f9a21bd606Wed,
- 30 Oct 2019 19:51:20 GMTWed, 06 Nov 2019 19:51:20
- GMTAgAAAAMAAAAAAAAA0+T7ZluP1QE=Wed,
- 30 Oct 2019 19:51:20 GMT"
+ string: "\uFEFF4ac2aaa1-19cd-4e63-aad7-6201dcd5bc4dMon,
+ 08 Jun 2020 17:03:53 GMTMon, 15 Jun 2020 17:03:53
+ GMTAgAAAAMAAAAAAAAAlgosyrY91gE=Mon,
+ 08 Jun 2020 17:03:53 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:03:53 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -92,24 +92,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:04:00 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesyncb3920bee/messages
response:
body:
- string: "\uFEFF1e5f207c-2717-45ce-9e4b-62e573642715Wed,
- 30 Oct 2019 19:51:21 GMTWed, 06 Nov 2019 19:51:21
- GMTAgAAAAMAAAAAAAAA53EEZ1uP1QE=Wed,
- 30 Oct 2019 19:51:21 GMT"
+ string: "\uFEFF3a9fd261-b454-4db3-b852-8dcc2a25d0dfMon,
+ 08 Jun 2020 17:03:54 GMTMon, 15 Jun 2020 17:03:54
+ GMTAgAAAAMAAAAAAAAAhDJAyrY91gE=Mon,
+ 08 Jun 2020 17:03:54 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:03:53 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -135,24 +135,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:04:00 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesyncb3920bee/messages
response:
body:
- string: "\uFEFF46bc96d4-f6d2-43cc-8463-91346560114aWed,
- 30 Oct 2019 19:51:21 GMTWed, 06 Nov 2019 19:51:21
- GMTAgAAAAMAAAAAAAAAcTsMZ1uP1QE=Wed,
- 30 Oct 2019 19:51:21 GMT"
+ string: "\uFEFF54bb7834-6c28-4adc-a643-9915e2f92307Mon,
+ 08 Jun 2020 17:03:54 GMTMon, 15 Jun 2020 17:03:54
+ GMTAgAAAAMAAAAAAAAAgahUyrY91gE=Mon,
+ 08 Jun 2020 17:03:54 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:03:53 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -178,24 +178,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:04:01 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesyncb3920bee/messages
response:
body:
- string: "\uFEFF93b81ec5-0137-4bb2-a5e5-1e282f028c1cWed,
- 30 Oct 2019 19:51:21 GMTWed, 06 Nov 2019 19:51:21
- GMTAgAAAAMAAAAAAAAAwI8TZ1uP1QE=Wed,
- 30 Oct 2019 19:51:21 GMT"
+ string: "\uFEFF88f3e32a-0ecd-4e07-8296-bb392ad3f5bfMon,
+ 08 Jun 2020 17:03:54 GMTMon, 15 Jun 2020 17:03:54
+ GMTAgAAAAMAAAAAAAAA1+VnyrY91gE=Mon,
+ 08 Jun 2020 17:03:54 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:03:53 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -215,17 +215,17 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:04:01 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesyncb3920bee/messages?peekonly=true
response:
body:
- string: "\uFEFFd3e2c7c5-b6fe-4ff1-98de-d2f9a21bd606Wed,
- 30 Oct 2019 19:51:20 GMTWed, 06 Nov 2019 19:51:20
+ string: "\uFEFF4ac2aaa1-19cd-4e63-aad7-6201dcd5bc4dMon,
+ 08 Jun 2020 17:03:53 GMTMon, 15 Jun 2020 17:03:53
GMT0message1"
headers:
cache-control:
@@ -233,7 +233,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:03:53 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_peek_messages_with_options.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_peek_messages_with_options.yaml
index 802c3eb9cff3..1dd2aa6b28e6 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_peek_messages_with_options.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_peek_messages_with_options.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:04:01 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:03:54 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -49,24 +49,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:04:01 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync74f61174/messages
response:
body:
- string: "\uFEFFae5fb0e7-98e6-4556-b73d-59afb5b41525Wed,
- 30 Oct 2019 19:51:21 GMTWed, 06 Nov 2019 19:51:21
- GMTAgAAAAMAAAAAAAAAw+FEZ1uP1QE=Wed,
- 30 Oct 2019 19:51:21 GMT"
+ string: "\uFEFF06aa0e94-fc63-409e-a1bd-bc8366c01a44Mon,
+ 08 Jun 2020 17:03:55 GMTMon, 15 Jun 2020 17:03:55
+ GMTAgAAAAMAAAAAAAAA6LHeyrY91gE=Mon,
+ 08 Jun 2020 17:03:55 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:03:54 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -92,24 +92,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:04:01 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync74f61174/messages
response:
body:
- string: "\uFEFF7bb79078-d297-4a70-94cd-48c9c1473d75Wed,
- 30 Oct 2019 19:51:21 GMTWed, 06 Nov 2019 19:51:21
- GMTAgAAAAMAAAAAAAAAskdNZ1uP1QE=Wed,
- 30 Oct 2019 19:51:21 GMT"
+ string: "\uFEFF37bf4327-a181-40ea-abf9-3989e512ae58Mon,
+ 08 Jun 2020 17:03:55 GMTMon, 15 Jun 2020 17:03:55
+ GMTAgAAAAMAAAAAAAAALO/xyrY91gE=Mon,
+ 08 Jun 2020 17:03:55 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:03:54 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -135,24 +135,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:04:02 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync74f61174/messages
response:
body:
- string: "\uFEFF9a8e1140-3ab4-437a-b7fc-80f79ce0c9fcWed,
- 30 Oct 2019 19:51:21 GMTWed, 06 Nov 2019 19:51:21
- GMTAgAAAAMAAAAAAAAAYThVZ1uP1QE=Wed,
- 30 Oct 2019 19:51:21 GMT"
+ string: "\uFEFF09443a42-3ab0-4fbe-ba02-98e7878c7c5dMon,
+ 08 Jun 2020 17:03:55 GMTMon, 15 Jun 2020 17:03:55
+ GMTAgAAAAMAAAAAAAAAsCgHy7Y91gE=Mon,
+ 08 Jun 2020 17:03:55 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:20 GMT
+ - Mon, 08 Jun 2020 17:03:55 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -178,24 +178,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:04:02 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync74f61174/messages
response:
body:
- string: "\uFEFF555a9d2d-5a9c-441c-abf2-af8958b77b7cWed,
- 30 Oct 2019 19:51:21 GMTWed, 06 Nov 2019 19:51:21
- GMTAgAAAAMAAAAAAAAAxrNcZ1uP1QE=Wed,
- 30 Oct 2019 19:51:21 GMT"
+ string: "\uFEFF60a7e342-67af-4aa7-a7df-e0fc64307ca6Mon,
+ 08 Jun 2020 17:03:55 GMTMon, 15 Jun 2020 17:03:55
+ GMTAgAAAAMAAAAAAAAA2MUby7Y91gE=Mon,
+ 08 Jun 2020 17:03:55 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:03:55 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -215,23 +215,23 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:04:02 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesync74f61174/messages?numofmessages=4&peekonly=true
response:
body:
- string: "\uFEFFae5fb0e7-98e6-4556-b73d-59afb5b41525Wed,
- 30 Oct 2019 19:51:21 GMTWed, 06 Nov 2019 19:51:21
- GMT0message17bb79078-d297-4a70-94cd-48c9c1473d75Wed,
- 30 Oct 2019 19:51:21 GMTWed, 06 Nov 2019 19:51:21
- GMT0message29a8e1140-3ab4-437a-b7fc-80f79ce0c9fcWed,
- 30 Oct 2019 19:51:21 GMTWed, 06 Nov 2019 19:51:21
- GMT0message3555a9d2d-5a9c-441c-abf2-af8958b77b7cWed,
- 30 Oct 2019 19:51:21 GMTWed, 06 Nov 2019 19:51:21
+ string: "\uFEFF06aa0e94-fc63-409e-a1bd-bc8366c01a44Mon,
+ 08 Jun 2020 17:03:55 GMTMon, 15 Jun 2020 17:03:55
+ GMT0message137bf4327-a181-40ea-abf9-3989e512ae58Mon,
+ 08 Jun 2020 17:03:55 GMTMon, 15 Jun 2020 17:03:55
+ GMT0message209443a42-3ab0-4fbe-ba02-98e7878c7c5dMon,
+ 08 Jun 2020 17:03:55 GMTMon, 15 Jun 2020 17:03:55
+ GMT0message360a7e342-67af-4aa7-a7df-e0fc64307ca6Mon,
+ 08 Jun 2020 17:03:55 GMTMon, 15 Jun 2020 17:03:55
GMT0message4"
headers:
cache-control:
@@ -239,7 +239,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:03:55 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_put_message.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_put_message.yaml
index 89dabf78db85..018b08be9a72 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_put_message.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_put_message.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:04:02 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:03:55 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -49,24 +49,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:04:03 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync9d3c0b2f/messages
response:
body:
- string: "\uFEFFdb597d02-2706-4dee-91bd-497f83c5e4ebWed,
- 30 Oct 2019 19:51:22 GMTWed, 06 Nov 2019 19:51:22
- GMTAgAAAAMAAAAAAAAA/uCWZ1uP1QE=Wed,
- 30 Oct 2019 19:51:22 GMT"
+ string: "\uFEFF604cdbd8-9a86-438b-a7f5-d9797c4fd01aMon,
+ 08 Jun 2020 17:03:56 GMTMon, 15 Jun 2020 17:03:56
+ GMTAgAAAAMAAAAAAAAAHLGWy7Y91gE=Mon,
+ 08 Jun 2020 17:03:56 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:03:55 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -92,24 +92,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:04:03 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync9d3c0b2f/messages
response:
body:
- string: "\uFEFF289685d5-c1ea-45d5-a54c-f2ae603ccd7aWed,
- 30 Oct 2019 19:51:22 GMTWed, 06 Nov 2019 19:51:22
- GMTAgAAAAMAAAAAAAAAvvieZ1uP1QE=Wed,
- 30 Oct 2019 19:51:22 GMT"
+ string: "\uFEFF287c9f24-7f9e-4ddb-a13f-64d57582dfa9Mon,
+ 08 Jun 2020 17:03:56 GMTMon, 15 Jun 2020 17:03:56
+ GMTAgAAAAMAAAAAAAAAwTisy7Y91gE=Mon,
+ 08 Jun 2020 17:03:56 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:03:55 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -135,24 +135,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:04:03 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync9d3c0b2f/messages
response:
body:
- string: "\uFEFF2df29b67-253c-4aff-8c89-5b2f9bf4edc1Wed,
- 30 Oct 2019 19:51:22 GMTWed, 06 Nov 2019 19:51:22
- GMTAgAAAAMAAAAAAAAAtrClZ1uP1QE=Wed,
- 30 Oct 2019 19:51:22 GMT"
+ string: "\uFEFF9d0080b5-a9f8-456c-8551-85a3630c1c2cMon,
+ 08 Jun 2020 17:03:56 GMTMon, 15 Jun 2020 17:03:56
+ GMTAgAAAAMAAAAAAAAA4NXAy7Y91gE=Mon,
+ 08 Jun 2020 17:03:56 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:03:56 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -178,24 +178,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:04:03 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync9d3c0b2f/messages
response:
body:
- string: "\uFEFF86e29298-41ae-4763-8e96-96cedbbaeabbWed,
- 30 Oct 2019 19:51:22 GMTWed, 06 Nov 2019 19:51:22
- GMTAgAAAAMAAAAAAAAAoBauZ1uP1QE=Wed,
- 30 Oct 2019 19:51:22 GMT"
+ string: "\uFEFFcade9094-53de-47b0-8cb7-0bd760f70e2aMon,
+ 08 Jun 2020 17:03:56 GMTMon, 15 Jun 2020 17:03:56
+ GMTAgAAAAMAAAAAAAAA/CDXy7Y91gE=Mon,
+ 08 Jun 2020 17:03:56 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:03:56 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_put_message_infinite_time_to_live.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_put_message_infinite_time_to_live.yaml
index 3f3af1064344..0f0fd4a0cca1 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_put_message_infinite_time_to_live.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_put_message_infinite_time_to_live.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:03 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:03:56 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -49,24 +49,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:04 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesyncfb421443/messages?messagettl=-1
response:
body:
- string: "\uFEFF552739d6-9508-400b-900d-34bf7148f519Wed,
- 30 Oct 2019 19:51:22 GMTFri, 31 Dec 9999 23:59:59
- GMTAgAAAAMAAAAAAAAAl4nYZ1uP1QE=Wed,
- 30 Oct 2019 19:51:22 GMT"
+ string: "\uFEFF25af4061-5261-4c1d-9f01-dd88579490eeMon,
+ 08 Jun 2020 17:03:57 GMTFri, 31 Dec 9999 23:59:59
+ GMTAgAAAAMAAAAAAAAAK/Y+zLY91gE=Mon,
+ 08 Jun 2020 17:03:57 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:03:56 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -86,17 +86,17 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:04 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesyncfb421443/messages?peekonly=true
response:
body:
- string: "\uFEFF552739d6-9508-400b-900d-34bf7148f519Wed,
- 30 Oct 2019 19:51:22 GMTFri, 31 Dec 9999 23:59:59
+ string: "\uFEFF25af4061-5261-4c1d-9f01-dd88579490eeMon,
+ 08 Jun 2020 17:03:57 GMTFri, 31 Dec 9999 23:59:59
GMT0message1"
headers:
cache-control:
@@ -104,7 +104,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:03:56 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_put_message_large_time_to_live.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_put_message_large_time_to_live.yaml
index 5600a099c3ba..afeb538794ee 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_put_message_large_time_to_live.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_put_message_large_time_to_live.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:04 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:03:57 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -49,24 +49,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:04 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesyncbef712f8/messages?messagettl=1073741824
response:
body:
- string: "\uFEFFb5933b27-90aa-42a0-ac03-b80881de690dWed,
- 30 Oct 2019 19:51:22 GMTSat, 08 Nov 2053 09:28:26
- GMTAgAAAAMAAAAAAAAAYekMaFuP1QE=Wed,
- 30 Oct 2019 19:51:22 GMT"
+ string: "\uFEFF004f7411-f417-4533-88a1-853bcd9ecec7Mon,
+ 08 Jun 2020 17:03:58 GMTThu, 18 Jun 2054 06:41:02
+ GMTAgAAAAMAAAAAAAAAkOu+zLY91gE=Mon,
+ 08 Jun 2020 17:03:58 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:21 GMT
+ - Mon, 08 Jun 2020 17:03:58 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -86,17 +86,17 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:05 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesyncbef712f8/messages?peekonly=true
response:
body:
- string: "\uFEFFb5933b27-90aa-42a0-ac03-b80881de690dWed,
- 30 Oct 2019 19:51:22 GMTSat, 08 Nov 2053 09:28:26
+ string: "\uFEFF004f7411-f417-4533-88a1-853bcd9ecec7Mon,
+ 08 Jun 2020 17:03:58 GMTThu, 18 Jun 2054 06:41:02
GMT0message1"
headers:
cache-control:
@@ -104,7 +104,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:03:58 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_queue_exists.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_queue_exists.yaml
index d980139b0187..a855d56b4258 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_queue_exists.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_queue_exists.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:05 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:03:57 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -43,9 +43,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:05 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -59,7 +59,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:03:58 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-approximate-messages-count:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_queue_not_exists.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_queue_not_exists.yaml
index 87f708decb12..bd2fb23f438c 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_queue_not_exists.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_queue_not_exists.yaml
@@ -9,9 +9,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:05 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -19,14 +19,14 @@ interactions:
response:
body:
string: "\uFEFFQueueNotFound
The
- specified queue does not exist.\nRequestId:9d9e853d-1003-003d-2e5b-8fbf2c000000\nTime:2019-10-30T19:51:23.3193976Z"
+ specified queue does not exist.\nRequestId:a814d0ad-6003-0023-12b6-3db1aa000000\nTime:2020-06-08T17:03:59.5534106Z"
headers:
content-length:
- '217'
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:22 GMT
+ - Mon, 08 Jun 2020 17:03:59 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-error-code:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_add.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_add.yaml
new file mode 100644
index 000000000000..8176250fb415
--- /dev/null
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_add.yaml
@@ -0,0 +1,118 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:06 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: PUT
+ uri: https://storagename.queue.core.windows.net/pyqueuesync72410961
+ response:
+ body:
+ string: ''
+ headers:
+ content-length:
+ - '0'
+ date:
+ - Mon, 08 Jun 2020 17:03:59 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '
+
+ addedmessage'
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '107'
+ Content-Type:
+ - application/xml; charset=utf-8
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:06 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: POST
+ uri: https://storagename.queue.core.windows.net/pyqueuesync72410961/messages?se=2020-06-08T18%3A04%3A06Z&sp=a&sv=2018-03-28&sig=746j%2BwI7Xg1WPD%2BwfTSktm/u1fkZWoy3PpDeIP3PX7w%3D
+ response:
+ body:
+ string: "\uFEFF216cb79c-aa68-4a75-86af-e49d344a1481Mon,
+ 08 Jun 2020 17:04:00 GMTMon, 15 Jun 2020 17:04:00
+ GMTAgAAAAMAAAAAAAAAEjYmzrY91gE=Mon,
+ 08 Jun 2020 17:04:00 GMT"
+ headers:
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:03:59 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:07 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: GET
+ uri: https://storagename.queue.core.windows.net/pyqueuesync72410961/messages
+ response:
+ body:
+ string: "\uFEFF216cb79c-aa68-4a75-86af-e49d344a1481Mon,
+ 08 Jun 2020 17:04:00 GMTMon, 15 Jun 2020 17:04:00
+ GMTAgAAAAMAAAAAAAAAF08c4LY91gE=Mon,
+ 08 Jun 2020 17:04:30 GMT1addedmessage"
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:04:00 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 200
+ message: OK
+version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_process.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_process.yaml
new file mode 100644
index 000000000000..51e7170a6ba4
--- /dev/null
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_process.yaml
@@ -0,0 +1,118 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:07 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: PUT
+ uri: https://storagename.queue.core.windows.net/pyqueuesync9ccd0b37
+ response:
+ body:
+ string: ''
+ headers:
+ content-length:
+ - '0'
+ date:
+ - Mon, 08 Jun 2020 17:04:00 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '
+
+ message1'
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '103'
+ Content-Type:
+ - application/xml; charset=utf-8
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:08 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: POST
+ uri: https://storagename.queue.core.windows.net/pyqueuesync9ccd0b37/messages
+ response:
+ body:
+ string: "\uFEFFb0996dac-fe4c-41e5-8d7b-2b8d3377feedMon,
+ 08 Jun 2020 17:04:01 GMTMon, 15 Jun 2020 17:04:01
+ GMTAgAAAAMAAAAAAAAAgq6pzrY91gE=Mon,
+ 08 Jun 2020 17:04:01 GMT"
+ headers:
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:04:00 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:08 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: GET
+ uri: https://storagename.queue.core.windows.net/pyqueuesync9ccd0b37/messages?se=2020-06-08T18%3A04%3A08Z&sp=p&sv=2018-03-28&sig=vWh%2BrCFVfVCPesedK3URqwsmUYnODczF8vTQ21vRFjc%3D
+ response:
+ body:
+ string: "\uFEFFb0996dac-fe4c-41e5-8d7b-2b8d3377feedMon,
+ 08 Jun 2020 17:04:01 GMTMon, 15 Jun 2020 17:04:01
+ GMTAgAAAAMAAAAAAAAAI7LT4LY91gE=Mon,
+ 08 Jun 2020 17:04:31 GMT1message1"
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:04:01 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 200
+ message: OK
+version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_read.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_read.yaml
new file mode 100644
index 000000000000..480db3d340c0
--- /dev/null
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_read.yaml
@@ -0,0 +1,117 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:08 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: PUT
+ uri: https://storagename.queue.core.windows.net/pyqueuesync7c4709d4
+ response:
+ body:
+ string: ''
+ headers:
+ content-length:
+ - '0'
+ date:
+ - Mon, 08 Jun 2020 17:04:01 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '
+
+ message1'
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '103'
+ Content-Type:
+ - application/xml; charset=utf-8
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:09 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: POST
+ uri: https://storagename.queue.core.windows.net/pyqueuesync7c4709d4/messages
+ response:
+ body:
+ string: "\uFEFFf980ec03-8355-466e-bc1c-5fc4b38063d3Mon,
+ 08 Jun 2020 17:04:02 GMTMon, 15 Jun 2020 17:04:02
+ GMTAgAAAAMAAAAAAAAA+fVaz7Y91gE=Mon,
+ 08 Jun 2020 17:04:02 GMT"
+ headers:
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:04:01 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:09 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: GET
+ uri: https://storagename.queue.core.windows.net/pyqueuesync7c4709d4/messages?peekonly=true&st=2020-06-08T16%3A59%3A09Z&se=2020-06-08T18%3A04%3A09Z&sp=r&sv=2018-03-28&sig=S0yjP8DaGzN1J%2Bgh%2B9EpJttEt1vOBD2vSqTtt/QeC7s%3D
+ response:
+ body:
+ string: "\uFEFFf980ec03-8355-466e-bc1c-5fc4b38063d3Mon,
+ 08 Jun 2020 17:04:02 GMTMon, 15 Jun 2020 17:04:02
+ GMT0message1"
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:04:03 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 200
+ message: OK
+version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_signed_identifier.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_signed_identifier.yaml
new file mode 100644
index 000000000000..4d4cb97a1619
--- /dev/null
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_signed_identifier.yaml
@@ -0,0 +1,155 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:10 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: PUT
+ uri: https://storagename.queue.core.windows.net/pyqueuesync21d30f34
+ response:
+ body:
+ string: ''
+ headers:
+ content-length:
+ - '0'
+ date:
+ - Mon, 08 Jun 2020 17:04:03 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '
+
+ testid2020-06-08T16:04:10Z2020-06-08T18:04:10Zr'
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '257'
+ Content-Type:
+ - application/xml; charset=utf-8
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:10 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: PUT
+ uri: https://storagename.queue.core.windows.net/pyqueuesync21d30f34?comp=acl
+ response:
+ body:
+ string: ''
+ headers:
+ content-length:
+ - '0'
+ date:
+ - Mon, 08 Jun 2020 17:04:03 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 204
+ message: No Content
+- request:
+ body: '
+
+ message1'
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '103'
+ Content-Type:
+ - application/xml; charset=utf-8
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:11 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: POST
+ uri: https://storagename.queue.core.windows.net/pyqueuesync21d30f34/messages
+ response:
+ body:
+ string: "\uFEFF0ccc654a-3747-4054-95aa-583ad998c07eMon,
+ 08 Jun 2020 17:04:04 GMTMon, 15 Jun 2020 17:04:04
+ GMTAgAAAAMAAAAAAAAAADFk0LY91gE=Mon,
+ 08 Jun 2020 17:04:04 GMT"
+ headers:
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:04:03 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:11 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: GET
+ uri: https://storagename.queue.core.windows.net/pyqueuesync21d30f34/messages?peekonly=true&sv=2018-03-28&si=testid&sig=FuU5921vP09dY/nAFGENsp%2BQKcN3xZ1JryZv5t8BpFk%3D
+ response:
+ body:
+ string: "\uFEFF0ccc654a-3747-4054-95aa-583ad998c07eMon,
+ 08 Jun 2020 17:04:04 GMTMon, 15 Jun 2020 17:04:04
+ GMT0message1"
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:04:04 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 200
+ message: OK
+version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_update.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_update.yaml
new file mode 100644
index 000000000000..e373b7071987
--- /dev/null
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_sas_update.yaml
@@ -0,0 +1,199 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:11 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: PUT
+ uri: https://storagename.queue.core.windows.net/pyqueuesync91880abb
+ response:
+ body:
+ string: ''
+ headers:
+ content-length:
+ - '0'
+ date:
+ - Mon, 08 Jun 2020 17:04:05 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '
+
+ message1'
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '103'
+ Content-Type:
+ - application/xml; charset=utf-8
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:12 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: POST
+ uri: https://storagename.queue.core.windows.net/pyqueuesync91880abb/messages
+ response:
+ body:
+ string: "\uFEFF873252c5-dcf9-43da-8dfb-c18d22ebaf27Mon,
+ 08 Jun 2020 17:04:05 GMTMon, 15 Jun 2020 17:04:05
+ GMTAgAAAAMAAAAAAAAAVDc30bY91gE=Mon,
+ 08 Jun 2020 17:04:05 GMT"
+ headers:
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:04:05 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:12 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: GET
+ uri: https://storagename.queue.core.windows.net/pyqueuesync91880abb/messages
+ response:
+ body:
+ string: "\uFEFF873252c5-dcf9-43da-8dfb-c18d22ebaf27Mon,
+ 08 Jun 2020 17:04:05 GMTMon, 15 Jun 2020 17:04:05
+ GMTAgAAAAMAAAAAAAAAKAIt47Y91gE=Mon,
+ 08 Jun 2020 17:04:35 GMT1message1"
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:04:05 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 200
+ message: OK
+- request:
+ body: '
+
+ updatedmessage1'
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '110'
+ Content-Type:
+ - application/xml; charset=utf-8
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:12 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: PUT
+ uri: https://storagename.queue.core.windows.net/pyqueuesync91880abb/messages/873252c5-dcf9-43da-8dfb-c18d22ebaf27?popreceipt=AgAAAAMAAAAAAAAAKAIt47Y91gE%3D&visibilitytimeout=0&se=2020-06-08T18%3A04%3A12Z&sp=u&sv=2018-03-28&sig=6nP8Pj1XWdEAn4LuDlqtRLiXS2zJ3M8nLL7rt0GHfUI%3D
+ response:
+ body:
+ string: ''
+ headers:
+ content-length:
+ - '0'
+ date:
+ - Mon, 08 Jun 2020 17:04:05 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ x-ms-popreceipt:
+ - AwAAAAMAAAAAAAAAt4em0bY91gEBAAAA
+ x-ms-time-next-visible:
+ - Mon, 08 Jun 2020 17:04:06 GMT
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 204
+ message: No Content
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:04:13 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: GET
+ uri: https://storagename.queue.core.windows.net/pyqueuesync91880abb/messages
+ response:
+ body:
+ string: "\uFEFF873252c5-dcf9-43da-8dfb-c18d22ebaf27Mon,
+ 08 Jun 2020 17:04:05 GMTMon, 15 Jun 2020 17:04:05
+ GMTAgAAAAMAAAAAAAAA1sec47Y91gE=Mon,
+ 08 Jun 2020 17:04:36 GMT2updatedmessage1"
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:04:06 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 200
+ message: OK
+version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl.yaml
index cbf7b547dbe1..0c0ce7c5d492 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:24 GMT
+ - Mon, 08 Jun 2020 17:04:13 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:07 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -47,9 +47,9 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:24 GMT
+ - Mon, 08 Jun 2020 17:04:14 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -61,7 +61,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:07 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -79,9 +79,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:24 GMT
+ - Mon, 08 Jun 2020 17:04:14 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -96,7 +96,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:07 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_too_many_ids.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_too_many_ids.yaml
index 6cff524d0670..56fbd770f1bb 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_too_many_ids.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_too_many_ids.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:24 GMT
+ - Mon, 08 Jun 2020 17:04:14 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:08 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_empty_signed_identifier.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_empty_signed_identifier.yaml
index 4e612dc805e9..10b24d6516e7 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_empty_signed_identifier.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_empty_signed_identifier.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:24 GMT
+ - Mon, 08 Jun 2020 17:04:15 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:09 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -49,9 +49,9 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:24 GMT
+ - Mon, 08 Jun 2020 17:04:16 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -63,7 +63,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:09 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -81,9 +81,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:16 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -97,7 +97,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:09 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_empty_signed_identifiers.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_empty_signed_identifiers.yaml
index d7b57e1680fe..a97e05d66a38 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_empty_signed_identifiers.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_empty_signed_identifiers.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:16 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:24 GMT
+ - Mon, 08 Jun 2020 17:04:09 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -47,9 +47,9 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:16 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -61,7 +61,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:24 GMT
+ - Mon, 08 Jun 2020 17:04:09 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -79,9 +79,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:17 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -96,7 +96,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:24 GMT
+ - Mon, 08 Jun 2020 17:04:09 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_non_existing_queue.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_non_existing_queue.yaml
index 6ac01b7ad98a..c1152acf1680 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_non_existing_queue.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_non_existing_queue.yaml
@@ -13,9 +13,9 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:17 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -23,14 +23,14 @@ interactions:
response:
body:
string: "\uFEFFQueueNotFound
The
- specified queue does not exist.\nRequestId:1cddac6e-6003-0010-015b-8f3cec000000\nTime:2019-10-30T19:51:24.8157732Z"
+ specified queue does not exist.\nRequestId:9817e02e-b003-0021-0db6-3db350000000\nTime:2020-06-08T17:04:11.0659611Z"
headers:
content-length:
- '217'
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:23 GMT
+ - Mon, 08 Jun 2020 17:04:10 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-error-code:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_signed_identifiers.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_signed_identifiers.yaml
index 0c07bbb33e43..4aa0c3dbfa0c 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_signed_identifiers.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_acl_with_signed_identifiers.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:18 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:11 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -36,7 +36,7 @@ interactions:
- request:
body: '
- testid2019-10-30T19:46:25Z2019-10-30T20:51:25Zr'
+ testid2020-06-08T16:59:18Z2020-06-08T18:04:18Zr'
headers:
Accept:
- '*/*'
@@ -49,9 +49,9 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:18 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -63,7 +63,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:11 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -81,23 +81,23 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:26 GMT
+ - Mon, 08 Jun 2020 17:04:18 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesync4ea015da?comp=acl
response:
body:
- string: "\uFEFFtestid2019-10-30T19:46:25.0000000Z2019-10-30T20:51:25.0000000Zr"
+ string: "\uFEFFtestid2020-06-08T16:59:18.0000000Z2020-06-08T18:04:18.0000000Zr"
headers:
cache-control:
- no-cache
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:11 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_metadata.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_metadata.yaml
index 86aebde1a7e4..043b2d7c6d8e 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_metadata.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_set_queue_metadata.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:26 GMT
+ - Mon, 08 Jun 2020 17:04:18 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:11 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -45,9 +45,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:26 GMT
+ - Mon, 08 Jun 2020 17:04:19 GMT
x-ms-meta-hello:
- world
x-ms-meta-number:
@@ -63,7 +63,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:12 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -81,9 +81,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:26 GMT
+ - Mon, 08 Jun 2020 17:04:19 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -97,7 +97,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:04:12 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-approximate-messages-count:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_transport_closed_only_once.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_transport_closed_only_once.yaml
new file mode 100644
index 000000000000..2bb15b3dd4ec
--- /dev/null
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_transport_closed_only_once.yaml
@@ -0,0 +1,76 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:02:31 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: GET
+ uri: https://storagename.queue.core.windows.net/?restype=service&comp=properties
+ response:
+ body:
+ string: "\uFEFF1.0falsefalsefalsefalse1.0truetruetrue71.0falsefalse"
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:02:23 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 200
+ message: OK
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ User-Agent:
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Mon, 08 Jun 2020 17:02:31 GMT
+ x-ms-version:
+ - '2018-03-28'
+ method: GET
+ uri: https://storagename.queue.core.windows.net/?restype=service&comp=properties
+ response:
+ body:
+ string: "\uFEFF1.0falsefalsefalsefalse1.0truetruetrue71.0falsefalse"
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/xml
+ date:
+ - Mon, 08 Jun 2020 17:02:23 GMT
+ server:
+ - Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2018-03-28'
+ status:
+ code: 200
+ message: OK
+version: 1
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_unicode_create_queue_unicode_name.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_unicode_create_queue_unicode_name.yaml
index a8077e4d8b0c..1c1d85ae2938 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_unicode_create_queue_unicode_name.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_unicode_create_queue_unicode_name.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:26 GMT
+ - Mon, 08 Jun 2020 17:02:31 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -21,14 +21,14 @@ interactions:
response:
body:
string: "\uFEFFInvalidResourceName
The
- specifed resource name contains invalid characters.\nRequestId:367f59b4-c003-0052-775b-8f17f8000000\nTime:2019-10-30T19:51:25.8477780Z"
+ specifed resource name contains invalid characters.\nRequestId:d644a252-0003-0015-7eb6-3dad31000000\nTime:2020-06-08T17:02:25.2173183Z"
headers:
content-length:
- '243'
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:02:24 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-error-code:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_update_message.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_update_message.yaml
index 1fe03f0d25e8..9dd76fa02e64 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_update_message.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_update_message.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:26 GMT
+ - Mon, 08 Jun 2020 17:02:32 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:02:25 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -49,24 +49,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:26 GMT
+ - Mon, 08 Jun 2020 17:02:32 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesyncc0820c59/messages
response:
body:
- string: "\uFEFF0dfda078-1f38-4051-a68b-f95ba1c93996Wed,
- 30 Oct 2019 19:51:26 GMTWed, 06 Nov 2019 19:51:26
- GMTAgAAAAMAAAAAAAAA8foLaluP1QE=Wed,
- 30 Oct 2019 19:51:26 GMT"
+ string: "\uFEFFd65c53d5-9ba8-4c7d-a343-752ffd150f0aMon,
+ 08 Jun 2020 17:02:25 GMTMon, 15 Jun 2020 17:02:25
+ GMTAgAAAAMAAAAAAAAAPqyrlbY91gE=Mon,
+ 08 Jun 2020 17:02:25 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:02:25 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -86,26 +86,26 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:27 GMT
+ - Mon, 08 Jun 2020 17:02:32 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesyncc0820c59/messages
response:
body:
- string: "\uFEFF0dfda078-1f38-4051-a68b-f95ba1c93996Wed,
- 30 Oct 2019 19:51:26 GMTWed, 06 Nov 2019 19:51:26
- GMTAgAAAAMAAAAAAAAA+Xz0e1uP1QE=Wed,
- 30 Oct 2019 19:51:56 GMT1message1"
+ string: "\uFEFFd65c53d5-9ba8-4c7d-a343-752ffd150f0aMon,
+ 08 Jun 2020 17:02:25 GMTMon, 15 Jun 2020 17:02:25
+ GMTAgAAAAMAAAAAAAAA5aWdp7Y91gE=Mon,
+ 08 Jun 2020 17:02:55 GMT1message1"
headers:
cache-control:
- no-cache
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:26 GMT
+ - Mon, 08 Jun 2020 17:02:25 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -129,13 +129,13 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:27 GMT
+ - Mon, 08 Jun 2020 17:02:32 GMT
x-ms-version:
- '2018-03-28'
method: PUT
- uri: https://storagename.queue.core.windows.net/pyqueuesyncc0820c59/messages/0dfda078-1f38-4051-a68b-f95ba1c93996?popreceipt=AgAAAAMAAAAAAAAA%2BXz0e1uP1QE%3D&visibilitytimeout=0
+ uri: https://storagename.queue.core.windows.net/pyqueuesyncc0820c59/messages/d65c53d5-9ba8-4c7d-a343-752ffd150f0a?popreceipt=AgAAAAMAAAAAAAAA5aWdp7Y91gE%3D&visibilitytimeout=0
response:
body:
string: ''
@@ -143,13 +143,13 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:26 GMT
+ - Mon, 08 Jun 2020 17:02:25 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-popreceipt:
- - AwAAAAMAAAAAAAAAPQcaaluP1QEBAAAA
+ - AwAAAAMAAAAAAAAAML3LlbY91gEBAAAA
x-ms-time-next-visible:
- - Wed, 30 Oct 2019 19:51:26 GMT
+ - Mon, 08 Jun 2020 17:02:26 GMT
x-ms-version:
- '2018-03-28'
status:
@@ -165,26 +165,26 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:27 GMT
+ - Mon, 08 Jun 2020 17:02:32 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesyncc0820c59/messages
response:
body:
- string: "\uFEFF0dfda078-1f38-4051-a68b-f95ba1c93996Wed,
- 30 Oct 2019 19:51:26 GMTWed, 06 Nov 2019 19:51:26
- GMTAgAAAAMAAAAAAAAAg/4CfFuP1QE=Wed,
- 30 Oct 2019 19:51:56 GMT2message1"
+ string: "\uFEFFd65c53d5-9ba8-4c7d-a343-752ffd150f0aMon,
+ 08 Jun 2020 17:02:25 GMTMon, 15 Jun 2020 17:02:25
+ GMTAgAAAAMAAAAAAAAAB1e8p7Y91gE=Mon,
+ 08 Jun 2020 17:02:56 GMT2message1"
headers:
cache-control:
- no-cache
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:26 GMT
+ - Mon, 08 Jun 2020 17:02:25 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_update_message_content.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_update_message_content.yaml
index 43d5963749a8..9ff9288857eb 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_update_message_content.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue.test_update_message_content.yaml
@@ -11,9 +11,9 @@ interactions:
Content-Length:
- '0'
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:27 GMT
+ - Mon, 08 Jun 2020 17:02:33 GMT
x-ms-version:
- '2018-03-28'
method: PUT
@@ -25,7 +25,7 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:02:25 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-version:
@@ -49,24 +49,24 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:27 GMT
+ - Mon, 08 Jun 2020 17:02:33 GMT
x-ms-version:
- '2018-03-28'
method: POST
uri: https://storagename.queue.core.windows.net/pyqueuesync32150fb3/messages
response:
body:
- string: "\uFEFF8d1e1c77-d9fb-43c5-bcb0-d4a85b57e960Wed,
- 30 Oct 2019 19:51:26 GMTWed, 06 Nov 2019 19:51:26
- GMTAgAAAAMAAAAAAAAAvUNMaluP1QE=Wed,
- 30 Oct 2019 19:51:26 GMT"
+ string: "\uFEFF90473314-6b20-418a-85fe-f97a10d7f415Mon,
+ 08 Jun 2020 17:02:27 GMTMon, 15 Jun 2020 17:02:27
+ GMTAgAAAAMAAAAAAAAAEH1nlrY91gE=Mon,
+ 08 Jun 2020 17:02:27 GMT"
headers:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:02:26 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -86,26 +86,26 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:27 GMT
+ - Mon, 08 Jun 2020 17:02:33 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesync32150fb3/messages
response:
body:
- string: "\uFEFF8d1e1c77-d9fb-43c5-bcb0-d4a85b57e960Wed,
- 30 Oct 2019 19:51:26 GMTWed, 06 Nov 2019 19:51:26
- GMTAgAAAAMAAAAAAAAA9xM1fFuP1QE=Wed,
- 30 Oct 2019 19:51:56 GMT1message1"
+ string: "\uFEFF90473314-6b20-418a-85fe-f97a10d7f415Mon,
+ 08 Jun 2020 17:02:27 GMTMon, 15 Jun 2020 17:02:27
+ GMTAgAAAAMAAAAAAAAAtXZZqLY91gE=Mon,
+ 08 Jun 2020 17:02:57 GMT1message1"
headers:
cache-control:
- no-cache
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:02:26 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
@@ -131,13 +131,13 @@ interactions:
Content-Type:
- application/xml; charset=utf-8
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:27 GMT
+ - Mon, 08 Jun 2020 17:02:34 GMT
x-ms-version:
- '2018-03-28'
method: PUT
- uri: https://storagename.queue.core.windows.net/pyqueuesync32150fb3/messages/8d1e1c77-d9fb-43c5-bcb0-d4a85b57e960?popreceipt=AgAAAAMAAAAAAAAA9xM1fFuP1QE%3D&visibilitytimeout=0
+ uri: https://storagename.queue.core.windows.net/pyqueuesync32150fb3/messages/90473314-6b20-418a-85fe-f97a10d7f415?popreceipt=AgAAAAMAAAAAAAAAtXZZqLY91gE%3D&visibilitytimeout=0
response:
body:
string: ''
@@ -145,13 +145,13 @@ interactions:
content-length:
- '0'
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:02:26 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
x-ms-popreceipt:
- - AwAAAAMAAAAAAAAAo2FbaluP1QEBAAAA
+ - AwAAAAMAAAAAAAAAdlGIlrY91gEBAAAA
x-ms-time-next-visible:
- - Wed, 30 Oct 2019 19:51:26 GMT
+ - Mon, 08 Jun 2020 17:02:27 GMT
x-ms-version:
- '2018-03-28'
status:
@@ -167,19 +167,19 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:51:27 GMT
+ - Mon, 08 Jun 2020 17:02:34 GMT
x-ms-version:
- '2018-03-28'
method: GET
uri: https://storagename.queue.core.windows.net/pyqueuesync32150fb3/messages
response:
body:
- string: "\uFEFF8d1e1c77-d9fb-43c5-bcb0-d4a85b57e960Wed,
- 30 Oct 2019 19:51:26 GMTWed, 06 Nov 2019 19:51:26
- GMTAgAAAAMAAAAAAAAA+1hEfFuP1QE=Wed,
- 30 Oct 2019 19:51:56 GMT2new
+ string: "\uFEFF90473314-6b20-418a-85fe-f97a10d7f415Mon,
+ 08 Jun 2020 17:02:27 GMTMon, 15 Jun 2020 17:02:27
+ GMTAgAAAAMAAAAAAAAAUJl6qLY91gE=Mon,
+ 08 Jun 2020 17:02:57 GMT2new
text"
headers:
cache-control:
@@ -187,7 +187,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:51:25 GMT
+ - Mon, 08 Jun 2020 17:02:26 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue_service_stats.test_queue_service_stats_f.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue_service_stats.test_queue_service_stats_f.yaml
index 9b0c8f39fab4..3f5a76090ddd 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue_service_stats.test_queue_service_stats_f.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue_service_stats.test_queue_service_stats_f.yaml
@@ -9,9 +9,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:52:24 GMT
+ - Wed, 03 Jun 2020 20:46:26 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -25,7 +25,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:52:24 GMT
+ - Wed, 03 Jun 2020 20:46:25 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/storage/azure-storage-queue/tests/recordings/test_queue_service_stats.test_queue_service_stats_when_unavailable.yaml b/sdk/storage/azure-storage-queue/tests/recordings/test_queue_service_stats.test_queue_service_stats_when_unavailable.yaml
index 5fc02fbb92d1..77ac688342dd 100644
--- a/sdk/storage/azure-storage-queue/tests/recordings/test_queue_service_stats.test_queue_service_stats_when_unavailable.yaml
+++ b/sdk/storage/azure-storage-queue/tests/recordings/test_queue_service_stats.test_queue_service_stats_when_unavailable.yaml
@@ -9,9 +9,9 @@ interactions:
Connection:
- keep-alive
User-Agent:
- - azsdk-python-storage-queue/12.0.0b5 Python/3.6.3 (Windows-10-10.0.18362-SP0)
+ - azsdk-python-storage-queue/12.1.2 Python/3.8.3 (Windows-10-10.0.19041-SP0)
x-ms-date:
- - Wed, 30 Oct 2019 19:52:46 GMT
+ - Wed, 03 Jun 2020 20:46:52 GMT
x-ms-version:
- '2018-03-28'
method: GET
@@ -25,7 +25,7 @@ interactions:
content-type:
- application/xml
date:
- - Wed, 30 Oct 2019 19:52:45 GMT
+ - Wed, 03 Jun 2020 20:46:51 GMT
server:
- Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0
transfer-encoding:
diff --git a/sdk/table/__init__.py b/sdk/table/__init__.py
new file mode 100644
index 000000000000..0d1f7edf5dc6
--- /dev/null
+++ b/sdk/table/__init__.py
@@ -0,0 +1 @@
+__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
diff --git a/sdk/table/azure/__init__.py b/sdk/table/azure/__init__.py
new file mode 100644
index 000000000000..5960c353a898
--- /dev/null
+++ b/sdk/table/azure/__init__.py
@@ -0,0 +1 @@
+__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
\ No newline at end of file
diff --git a/sdk/table/azure/azure_table/__init__.py b/sdk/table/azure/azure_table/__init__.py
new file mode 100644
index 000000000000..44caedd99661
--- /dev/null
+++ b/sdk/table/azure/azure_table/__init__.py
@@ -0,0 +1,7 @@
+__all__ = [
+ 'generate_account_sas',
+]
+
+from azure.azure_table._shared.shared_access_signature import generate_account_sas
+from azure.azure_table._table_service_client import TableServiceClient
+from azure.azure_table._table_client import TableClient
\ No newline at end of file
diff --git a/sdk/table/azure/azure_table/_deserialize.py b/sdk/table/azure/azure_table/_deserialize.py
new file mode 100644
index 000000000000..456fc1bace09
--- /dev/null
+++ b/sdk/table/azure/azure_table/_deserialize.py
@@ -0,0 +1,40 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+# pylint: disable=unused-argument
+from azure.azure_table._generated.models import TableProperties
+from azure.core.exceptions import ResourceExistsError
+
+from ._shared.models import StorageErrorCode
+
+
+def deserialize_metadata(response, obj, headers):
+ raw_metadata = {k: v for k, v in response.headers.items() if k.startswith("x-ms-meta-")}
+ return {k[10:]: v for k, v in raw_metadata.items()}
+
+
+def deserialize_table_properties(response, obj, headers):
+ metadata = deserialize_metadata(response, obj, headers)
+ queue_properties = TableProperties(
+ metadata=metadata,
+ **headers
+ )
+ return queue_properties
+
+
+def deserialize_table_creation(response, obj, headers):
+ if response.status_code == 204:
+ error_code = StorageErrorCode.queue_already_exists
+ error = ResourceExistsError(
+ message="Table already exists\nRequestId:{}\nTime:{}\nErrorCode:{}".format(
+ headers['x-ms-request-id'],
+ headers['Date'],
+ error_code
+ ),
+ response=response)
+ error.error_code = error_code
+ error.additional_info = {}
+ raise error
+ return headers
diff --git a/sdk/table/azure/azure_table/_generated/__init__.py b/sdk/table/azure/azure_table/_generated/__init__.py
new file mode 100644
index 000000000000..38dbb2a7267b
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/__init__.py
@@ -0,0 +1,14 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_table import AzureTable
+__all__ = ['AzureTable']
+
+try:
+ from ._patch import patch_sdk
+ patch_sdk()
+except ImportError:
+ pass
diff --git a/sdk/table/azure/azure_table/_generated/_azure_table.py b/sdk/table/azure/azure_table/_generated/_azure_table.py
new file mode 100644
index 000000000000..633aeafce1da
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/_azure_table.py
@@ -0,0 +1,65 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.core import PipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any
+
+from ._configuration import AzureTableConfiguration
+from .operations import TableOperations
+from .operations import ServiceOperations
+from azure.azure_table._generated import models
+
+
+
+class AzureTable(object):
+ """AzureTable.
+
+ :ivar table: TableOperations operations
+ :vartype table: azure_table.operations.TableOperations
+ :ivar service: ServiceOperations operations
+ :vartype service: azure_table.operations.ServiceOperations
+ :param url: The URL of the service account or table that is the targe of the desired operation.
+ :type url: str
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ url, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ base_url = '{url}'
+ self._config = AzureTableConfiguration(url, **kwargs)
+ self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.table = TableOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.service = ServiceOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ def close(self):
+ # type: () -> None
+ self._client.close()
+
+ def __enter__(self):
+ # type: () -> AzureTable
+ self._client.__enter__()
+ return self
+
+ def __exit__(self, *exc_details):
+ # type: (Any) -> None
+ self._client.__exit__(*exc_details)
diff --git a/sdk/table/azure/azure_table/_generated/_configuration.py b/sdk/table/azure/azure_table/_generated/_configuration.py
new file mode 100644
index 000000000000..f822bc3d1ad0
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/_configuration.py
@@ -0,0 +1,55 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any
+
+VERSION = "unknown"
+
+class AzureTableConfiguration(Configuration):
+ """Configuration for AzureTable.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param url: The URL of the service account or table that is the targe of the desired operation.
+ :type url: str
+ """
+
+ def __init__(
+ self,
+ url, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ if url is None:
+ raise ValueError("Parameter 'url' must not be None.")
+ super(AzureTableConfiguration, self).__init__(**kwargs)
+
+ self.url = url
+ self.version = "2019-02-02"
+ kwargs.setdefault('sdk_moniker', 'azuretable/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
diff --git a/sdk/table/azure/azure_table/_generated/aio/__init__.py b/sdk/table/azure/azure_table/_generated/aio/__init__.py
new file mode 100644
index 000000000000..93c7e7cca18d
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/aio/__init__.py
@@ -0,0 +1,8 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_table_async import AzureTable
+__all__ = ['AzureTable']
diff --git a/sdk/table/azure/azure_table/_generated/aio/_azure_table_async.py b/sdk/table/azure/azure_table/_generated/aio/_azure_table_async.py
new file mode 100644
index 000000000000..1b3c4d87d19c
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/aio/_azure_table_async.py
@@ -0,0 +1,56 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any
+
+from azure.core import AsyncPipelineClient
+from msrest import Deserializer, Serializer
+
+from ._configuration_async import AzureTableConfiguration
+from .operations_async import TableOperations
+from .operations_async import ServiceOperations
+from .. import models
+
+
+class AzureTable(object):
+ """AzureTable.
+
+ :ivar table: TableOperations operations
+ :vartype table: azure_table.aio.operations_async.TableOperations
+ :ivar service: ServiceOperations operations
+ :vartype service: azure_table.aio.operations_async.ServiceOperations
+ :param url: The URL of the service account or table that is the targe of the desired operation.
+ :type url: str
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ url: str,
+ **kwargs: Any
+ ) -> None:
+ base_url = '{url}'
+ self._config = AzureTableConfiguration(url, **kwargs)
+ self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.table = TableOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.service = ServiceOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ async def close(self) -> None:
+ await self._client.close()
+
+ async def __aenter__(self) -> "AzureTable":
+ await self._client.__aenter__()
+ return self
+
+ async def __aexit__(self, *exc_details) -> None:
+ await self._client.__aexit__(*exc_details)
diff --git a/sdk/table/azure/azure_table/_generated/aio/_configuration_async.py b/sdk/table/azure/azure_table/_generated/aio/_configuration_async.py
new file mode 100644
index 000000000000..d2581e2463c9
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/aio/_configuration_async.py
@@ -0,0 +1,49 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+
+VERSION = "unknown"
+
+class AzureTableConfiguration(Configuration):
+ """Configuration for AzureTable.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param url: The URL of the service account or table that is the targe of the desired operation.
+ :type url: str
+ """
+
+ def __init__(
+ self,
+ url: str,
+ **kwargs: Any
+ ) -> None:
+ if url is None:
+ raise ValueError("Parameter 'url' must not be None.")
+ super(AzureTableConfiguration, self).__init__(**kwargs)
+
+ self.url = url
+ self.version = "2019-02-02"
+ kwargs.setdefault('sdk_moniker', 'azuretable/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs: Any
+ ) -> None:
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
diff --git a/sdk/table/azure/azure_table/_generated/aio/operations_async/__init__.py b/sdk/table/azure/azure_table/_generated/aio/operations_async/__init__.py
new file mode 100644
index 000000000000..90d9a5059444
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/aio/operations_async/__init__.py
@@ -0,0 +1,13 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._table_operations_async import TableOperations
+from ._service_operations_async import ServiceOperations
+
+__all__ = [
+ 'TableOperations',
+ 'ServiceOperations',
+]
diff --git a/sdk/table/azure/azure_table/_generated/aio/operations_async/_service_operations_async.py b/sdk/table/azure/azure_table/_generated/aio/operations_async/_service_operations_async.py
new file mode 100644
index 000000000000..47c4de404e95
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/aio/operations_async/_service_operations_async.py
@@ -0,0 +1,245 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Optional, TypeVar
+
+from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ServiceOperations:
+ """ServiceOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_table.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def set_properties(
+ self,
+ table_service_properties: "models.TableServiceProperties",
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs
+ ) -> None:
+ """Sets properties for an account's Table service endpoint, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules.
+
+ :param table_service_properties: The Table Service properties.
+ :type table_service_properties: ~azure_table.models.TableServiceProperties
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+ restype = "service"
+ comp = "properties"
+ content_type = kwargs.pop("content_type", "application/xml")
+
+ # Construct URL
+ url = self.set_properties.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['restype'] = self._serialize.query("restype", restype, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+
+ # Construct and send request
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(table_service_properties, 'TableServiceProperties', is_xml=True)
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ set_properties.metadata = {'url': '/'} # type: ignore
+
+ async def get_properties(
+ self,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs
+ ) -> "models.TableServiceProperties":
+ """Gets the properties of an account's Table service, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules.
+
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TableServiceProperties, or the result of cls(response)
+ :rtype: ~azure_table.models.TableServiceProperties
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.TableServiceProperties"]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+ restype = "service"
+ comp = "properties"
+
+ # Construct URL
+ url = self.get_properties.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['restype'] = self._serialize.query("restype", restype, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['Accept'] = 'application/xml'
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ deserialized = self._deserialize('TableServiceProperties', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ get_properties.metadata = {'url': '/'} # type: ignore
+
+ async def get_statistics(
+ self,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs
+ ) -> "models.TableServiceStats":
+ """Retrieves statistics related to replication for the Table service. It is only available on the secondary location endpoint when read-access geo-redundant replication is enabled for the account.
+
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TableServiceStats, or the result of cls(response)
+ :rtype: ~azure_table.models.TableServiceStats
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.TableServiceStats"]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+ restype = "service"
+ comp = "stats"
+
+ # Construct URL
+ url = self.get_statistics.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['restype'] = self._serialize.query("restype", restype, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['Accept'] = 'application/xml'
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ deserialized = self._deserialize('TableServiceStats', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ get_statistics.metadata = {'url': '/'} # type: ignore
diff --git a/sdk/table/azure/azure_table/_generated/aio/operations_async/_table_operations_async.py b/sdk/table/azure/azure_table/_generated/aio/operations_async/_table_operations_async.py
new file mode 100644
index 000000000000..fcf4f8014308
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/aio/operations_async/_table_operations_async.py
@@ -0,0 +1,1041 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, List, Optional, TypeVar, Union
+
+from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class TableOperations:
+ """TableOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_table.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def query(
+ self,
+ request_id_parameter: Optional[str] = None,
+ next_table_name: Optional[str] = None,
+ query_options: Optional["models.QueryOptions"] = None,
+ **kwargs
+ ) -> "models.TableQueryResponse":
+ """Queries tables under the given account.
+
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param next_table_name: A table query continuation token from a previous call.
+ :type next_table_name: str
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TableQueryResponse, or the result of cls(response)
+ :rtype: ~azure_table.models.TableQueryResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.TableQueryResponse"]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ _top = None
+ _select = None
+ _filter = None
+ if query_options is not None:
+ _format = query_options.format
+ _top = query_options.top
+ _select = query_options.select
+ _filter = query_options.filter
+ data_service_version = "3.0"
+
+ # Construct URL
+ url = self.query.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+ if _top is not None:
+ query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0)
+ if _select is not None:
+ query_parameters['$select'] = self._serialize.query("select", _select, 'str')
+ if _filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
+ if next_table_name is not None:
+ query_parameters['NextTableName'] = self._serialize.query("next_table_name", next_table_name, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ header_parameters['Accept'] = 'application/json;odata=minimalmetadata'
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['x-ms-continuation-NextTableName']=self._deserialize('str', response.headers.get('x-ms-continuation-NextTableName'))
+ deserialized = self._deserialize('TableQueryResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ query.metadata = {'url': '/Tables'} # type: ignore
+
+ async def create(
+ self,
+ table_properties: "models.TableProperties",
+ request_id_parameter: Optional[str] = None,
+ response_preference: Optional[Union[str, "models.ResponseFormat"]] = None,
+ query_options: Optional["models.QueryOptions"] = None,
+ **kwargs
+ ) -> "models.TableResponse":
+ """Creates a new table under the given account.
+
+ :param table_properties: The Table properties.
+ :type table_properties: ~azure_table.models.TableProperties
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param response_preference: Specifies whether the response should include the inserted entity
+ in the payload. Possible values are return-no-content and return-content.
+ :type response_preference: str or ~azure_table.models.ResponseFormat
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TableResponse, or the result of cls(response)
+ :rtype: ~azure_table.models.TableResponse or None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.TableResponse"]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ if query_options is not None:
+ _format = query_options.format
+ data_service_version = "3.0"
+ content_type = kwargs.pop("content_type", "application/json;odata=nometadata")
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ if response_preference is not None:
+ header_parameters['Prefer'] = self._serialize.header("response_preference", response_preference, 'str')
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = 'application/json;odata=minimalmetadata'
+
+ # Construct and send request
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(table_properties, 'TableProperties')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 201:
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied'))
+ deserialized = self._deserialize('TableResponse', pipeline_response)
+
+ if response.status_code == 204:
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ create.metadata = {'url': '/Tables'} # type: ignore
+
+ async def delete(
+ self,
+ table: str,
+ request_id_parameter: Optional[str] = None,
+ **kwargs
+ ) -> None:
+ """Operation permanently deletes the specified table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+
+ # Construct and send request
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ delete.metadata = {'url': '/Tables(\'{table}\')'} # type: ignore
+
+ async def query_entities(
+ self,
+ table: str,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ next_partition_key: Optional[str] = None,
+ next_row_key: Optional[str] = None,
+ query_options: Optional["models.QueryOptions"] = None,
+ **kwargs
+ ) -> "models.TableEntityQueryResponse":
+ """Queries entities in a table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param next_partition_key: An entity query continuation token from a previous call.
+ :type next_partition_key: str
+ :param next_row_key: An entity query continuation token from a previous call.
+ :type next_row_key: str
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TableEntityQueryResponse, or the result of cls(response)
+ :rtype: ~azure_table.models.TableEntityQueryResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.TableEntityQueryResponse"]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ _top = None
+ _select = None
+ _filter = None
+ if query_options is not None:
+ _format = query_options.format
+ _top = query_options.top
+ _select = query_options.select
+ _filter = query_options.filter
+ data_service_version = "3.0"
+
+ # Construct URL
+ url = self.query_entities.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+ if _top is not None:
+ query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0)
+ if _select is not None:
+ query_parameters['$select'] = self._serialize.query("select", _select, 'str')
+ if _filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
+ if next_partition_key is not None:
+ query_parameters['NextPartitionKey'] = self._serialize.query("next_partition_key", next_partition_key, 'str')
+ if next_row_key is not None:
+ query_parameters['NextRowKey'] = self._serialize.query("next_row_key", next_row_key, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ header_parameters['Accept'] = 'application/json;odata=minimalmetadata'
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['x-ms-continuation-NextPartitionKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextPartitionKey'))
+ response_headers['x-ms-continuation-NextRowKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextRowKey'))
+ deserialized = self._deserialize('TableEntityQueryResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ query_entities.metadata = {'url': '/{table}()'} # type: ignore
+
+ async def query_entities_with_partition_and_row_key(
+ self,
+ table: str,
+ partition_key: str,
+ row_key: str,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ query_options: Optional["models.QueryOptions"] = None,
+ **kwargs
+ ) -> "models.TableEntityQueryResponse":
+ """Queries entities in a table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param partition_key: The partition key of the entity.
+ :type partition_key: str
+ :param row_key: The row key of the entity.
+ :type row_key: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TableEntityQueryResponse, or the result of cls(response)
+ :rtype: ~azure_table.models.TableEntityQueryResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.TableEntityQueryResponse"]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ _select = None
+ _filter = None
+ if query_options is not None:
+ _format = query_options.format
+ _select = query_options.select
+ _filter = query_options.filter
+ data_service_version = "3.0"
+
+ # Construct URL
+ url = self.query_entities_with_partition_and_row_key.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'),
+ 'rowKey': self._serialize.url("row_key", row_key, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+ if _select is not None:
+ query_parameters['$select'] = self._serialize.query("select", _select, 'str')
+ if _filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ header_parameters['Accept'] = 'application/json;odata=minimalmetadata'
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
+ response_headers['x-ms-continuation-NextPartitionKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextPartitionKey'))
+ response_headers['x-ms-continuation-NextRowKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextRowKey'))
+ deserialized = self._deserialize('TableEntityQueryResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ query_entities_with_partition_and_row_key.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore
+
+ async def update_entity(
+ self,
+ table: str,
+ partition_key: str,
+ row_key: str,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ if_match: Optional[str] = None,
+ table_entity_properties: Optional[Dict[str, object]] = None,
+ query_options: Optional["models.QueryOptions"] = None,
+ **kwargs
+ ) -> None:
+ """Update entity in a table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param partition_key: The partition key of the entity.
+ :type partition_key: str
+ :param row_key: The row key of the entity.
+ :type row_key: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param if_match: Match condition for an entity to be updated. If specified and a matching
+ entity is not found, an error will be raised. To force an unconditional update, set to the
+ wildcard character (*). If not specified, an insert will be performed when no existing entity
+ is found to update and a replace will be performed if an existing entity is found.
+ :type if_match: str
+ :param table_entity_properties: The properties for the table entity.
+ :type table_entity_properties: dict[str, object]
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ if query_options is not None:
+ _format = query_options.format
+ data_service_version = "3.0"
+ content_type = kwargs.pop("content_type", "application/json")
+
+ # Construct URL
+ url = self.update_entity.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'),
+ 'rowKey': self._serialize.url("row_key", row_key, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ if if_match is not None:
+ header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+
+ # Construct and send request
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if table_entity_properties is not None:
+ body_content = self._serialize.body(table_entity_properties, '{object}')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ update_entity.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore
+
+ async def merge_entity(
+ self,
+ table: str,
+ partition_key: str,
+ row_key: str,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ if_match: Optional[str] = None,
+ table_entity_properties: Optional[Dict[str, object]] = None,
+ query_options: Optional["models.QueryOptions"] = None,
+ **kwargs
+ ) -> None:
+ """Merge entity in a table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param partition_key: The partition key of the entity.
+ :type partition_key: str
+ :param row_key: The row key of the entity.
+ :type row_key: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param if_match: Match condition for an entity to be updated. If specified and a matching
+ entity is not found, an error will be raised. To force an unconditional update, set to the
+ wildcard character (*). If not specified, an insert will be performed when no existing entity
+ is found to update and a merge will be performed if an existing entity is found.
+ :type if_match: str
+ :param table_entity_properties: The properties for the table entity.
+ :type table_entity_properties: dict[str, object]
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ if query_options is not None:
+ _format = query_options.format
+ data_service_version = "3.0"
+ content_type = kwargs.pop("content_type", "application/json")
+
+ # Construct URL
+ url = self.merge_entity.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'),
+ 'rowKey': self._serialize.url("row_key", row_key, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ if if_match is not None:
+ header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+
+ # Construct and send request
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if table_entity_properties is not None:
+ body_content = self._serialize.body(table_entity_properties, '{object}')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ merge_entity.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore
+
+ async def delete_entity(
+ self,
+ table: str,
+ partition_key: str,
+ row_key: str,
+ if_match: str,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ query_options: Optional["models.QueryOptions"] = None,
+ **kwargs
+ ) -> None:
+ """Deletes the specified entity in a table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param partition_key: The partition key of the entity.
+ :type partition_key: str
+ :param row_key: The row key of the entity.
+ :type row_key: str
+ :param if_match: Match condition for an entity to be deleted. If specified and a matching
+ entity is not found, an error will be raised. To force an unconditional delete, set to the
+ wildcard character (*).
+ :type if_match: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ if query_options is not None:
+ _format = query_options.format
+ data_service_version = "3.0"
+
+ # Construct URL
+ url = self.delete_entity.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'),
+ 'rowKey': self._serialize.url("row_key", row_key, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
+
+ # Construct and send request
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ delete_entity.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore
+
+ async def insert_entity(
+ self,
+ table: str,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ response_preference: Optional[Union[str, "models.ResponseFormat"]] = None,
+ table_entity_properties: Optional[Dict[str, object]] = None,
+ query_options: Optional["models.QueryOptions"] = None,
+ **kwargs
+ ) -> Dict[str, object]:
+ """Insert entity in a table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param response_preference: Specifies whether the response should include the inserted entity
+ in the payload. Possible values are return-no-content and return-content.
+ :type response_preference: str or ~azure_table.models.ResponseFormat
+ :param table_entity_properties: The properties for the table entity.
+ :type table_entity_properties: dict[str, object]
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: dict mapping str to object, or the result of cls(response)
+ :rtype: dict[str, object] or None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ if query_options is not None:
+ _format = query_options.format
+ data_service_version = "3.0"
+ content_type = kwargs.pop("content_type", "application/json;odata=nometadata")
+
+ # Construct URL
+ url = self.insert_entity.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ if response_preference is not None:
+ header_parameters['Prefer'] = self._serialize.header("response_preference", response_preference, 'str')
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = 'application/json;odata=minimalmetadata'
+
+ # Construct and send request
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if table_entity_properties is not None:
+ body_content = self._serialize.body(table_entity_properties, '{object}')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 201:
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
+ response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied'))
+ response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type'))
+ deserialized = self._deserialize('{object}', pipeline_response)
+
+ if response.status_code == 204:
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
+ response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied'))
+ response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ insert_entity.metadata = {'url': '/{table}'} # type: ignore
+
+ async def get_access_policy(
+ self,
+ table: str,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs
+ ) -> List["models.SignedIdentifier"]:
+ """Retrieves details about any stored access policies specified on the table that may be used with Shared Access Signatures.
+
+ :param table: The name of the table.
+ :type table: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: list of SignedIdentifier, or the result of cls(response)
+ :rtype: list[~azure_table.models.SignedIdentifier]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[List["models.SignedIdentifier"]]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+ comp = "acl"
+
+ # Construct URL
+ url = self.get_access_policy.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['Accept'] = 'application/xml'
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ deserialized = self._deserialize('[SignedIdentifier]', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ get_access_policy.metadata = {'url': '/{table}'} # type: ignore
+
+ async def set_access_policy(
+ self,
+ table: str,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ table_acl: Optional[List["models.SignedIdentifier"]] = None,
+ **kwargs
+ ) -> None:
+ """Sets stored access policies for the table that may be used with Shared Access Signatures.
+
+ :param table: The name of the table.
+ :type table: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param table_acl: The acls for the table.
+ :type table_acl: list[~azure_table.models.SignedIdentifier]
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+ comp = "acl"
+ content_type = kwargs.pop("content_type", "application/xml")
+
+ # Construct URL
+ url = self.set_access_policy.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+
+ # Construct and send request
+ body_content_kwargs = {} # type: Dict[str, Any]
+ serialization_ctxt = {'xml': {'name': 'SignedIdentifiers', 'wrapped': True, 'itemsName': 'SignedIdentifier'}}
+ if table_acl is not None:
+ body_content = self._serialize.body(table_acl, '[SignedIdentifier]', is_xml=True, serialization_ctxt=serialization_ctxt)
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ set_access_policy.metadata = {'url': '/{table}'} # type: ignore
diff --git a/sdk/table/azure/azure_table/_generated/models/__init__.py b/sdk/table/azure/azure_table/_generated/models/__init__.py
new file mode 100644
index 000000000000..944581fccd98
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/models/__init__.py
@@ -0,0 +1,68 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+try:
+ from ._models_py3 import AccessPolicy
+ from ._models_py3 import CorsRule
+ from ._models_py3 import GeoReplication
+ from ._models_py3 import Logging
+ from ._models_py3 import Metrics
+ from ._models_py3 import QueryOptions
+ from ._models_py3 import RetentionPolicy
+ from ._models_py3 import SignedIdentifier
+ from ._models_py3 import TableEntityQueryResponse
+ from ._models_py3 import TableProperties
+ from ._models_py3 import TableQueryResponse
+ from ._models_py3 import TableResponse
+ from ._models_py3 import TableResponseProperties
+ from ._models_py3 import TableServiceError
+ from ._models_py3 import TableServiceProperties
+ from ._models_py3 import TableServiceStats
+except (SyntaxError, ImportError):
+ from ._models import AccessPolicy # type: ignore
+ from ._models import CorsRule # type: ignore
+ from ._models import GeoReplication # type: ignore
+ from ._models import Logging # type: ignore
+ from ._models import Metrics # type: ignore
+ from ._models import QueryOptions # type: ignore
+ from ._models import RetentionPolicy # type: ignore
+ from ._models import SignedIdentifier # type: ignore
+ from ._models import TableEntityQueryResponse # type: ignore
+ from ._models import TableProperties # type: ignore
+ from ._models import TableQueryResponse # type: ignore
+ from ._models import TableResponse # type: ignore
+ from ._models import TableResponseProperties # type: ignore
+ from ._models import TableServiceError # type: ignore
+ from ._models import TableServiceProperties # type: ignore
+ from ._models import TableServiceStats # type: ignore
+
+from ._azure_table_enums import (
+ GeoReplicationStatusType,
+ OdataMetadataFormat,
+ ResponseFormat,
+)
+
+__all__ = [
+ 'AccessPolicy',
+ 'CorsRule',
+ 'GeoReplication',
+ 'Logging',
+ 'Metrics',
+ 'QueryOptions',
+ 'RetentionPolicy',
+ 'SignedIdentifier',
+ 'TableEntityQueryResponse',
+ 'TableProperties',
+ 'TableQueryResponse',
+ 'TableResponse',
+ 'TableResponseProperties',
+ 'TableServiceError',
+ 'TableServiceProperties',
+ 'TableServiceStats',
+ 'GeoReplicationStatusType',
+ 'OdataMetadataFormat',
+ 'ResponseFormat',
+]
diff --git a/sdk/table/azure/azure_table/_generated/models/_azure_table_enums.py b/sdk/table/azure/azure_table/_generated/models/_azure_table_enums.py
new file mode 100644
index 000000000000..ef8e90bd92c9
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/models/_azure_table_enums.py
@@ -0,0 +1,26 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum
+
+class GeoReplicationStatusType(str, Enum):
+ """The status of the secondary location.
+ """
+
+ live = "live"
+ bootstrap = "bootstrap"
+ unavailable = "unavailable"
+
+class OdataMetadataFormat(str, Enum):
+
+ application_json_odata_nometadata = "application/json;odata=nometadata"
+ application_json_odata_minimalmetadata = "application/json;odata=minimalmetadata"
+ application_json_odata_fullmetadata = "application/json;odata=fullmetadata"
+
+class ResponseFormat(str, Enum):
+
+ return_no_content = "return-no-content"
+ return_content = "return-content"
diff --git a/sdk/table/azure/azure_table/_generated/models/_models.py b/sdk/table/azure/azure_table/_generated/models/_models.py
new file mode 100644
index 000000000000..40bc70abc678
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/models/_models.py
@@ -0,0 +1,536 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+
+class AccessPolicy(msrest.serialization.Model):
+ """An Access policy.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param start: Required. The start datetime from which the policy is active.
+ :type start: ~datetime.datetime
+ :param expiry: Required. The datetime that the policy expires.
+ :type expiry: ~datetime.datetime
+ :param permission: Required. The permissions for the acl policy.
+ :type permission: str
+ """
+
+ _validation = {
+ 'start': {'required': True},
+ 'expiry': {'required': True},
+ 'permission': {'required': True},
+ }
+
+ _attribute_map = {
+ 'start': {'key': 'Start', 'type': 'iso-8601', 'str': {'name': 'Start'}},
+ 'expiry': {'key': 'Expiry', 'type': 'iso-8601', 'str': {'name': 'Expiry'}},
+ 'permission': {'key': 'Permission', 'type': 'str', 'xml': {'name': 'Permission'}},
+ }
+ _xml_map = {
+ 'name': 'AccessPolicy'
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AccessPolicy, self).__init__(**kwargs)
+ self.start = kwargs['start']
+ self.expiry = kwargs['expiry']
+ self.permission = kwargs['permission']
+
+
+class CorsRule(msrest.serialization.Model):
+ """CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param allowed_origins: Required. The origin domains that are permitted to make a request
+ against the service via CORS. The origin domain is the domain from which the request
+ originates. Note that the origin must be an exact case-sensitive match with the origin that the
+ user age sends to the service. You can also use the wildcard character '*' to allow all origin
+ domains to make requests via CORS.
+ :type allowed_origins: str
+ :param allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may
+ use for a CORS request. (comma separated).
+ :type allowed_methods: str
+ :param allowed_headers: Required. The request headers that the origin domain may specify on the
+ CORS request.
+ :type allowed_headers: str
+ :param exposed_headers: Required. The response headers that may be sent in the response to the
+ CORS request and exposed by the browser to the request issuer.
+ :type exposed_headers: str
+ :param max_age_in_seconds: Required. The maximum amount time that a browser should cache the
+ preflight OPTIONS request.
+ :type max_age_in_seconds: int
+ """
+
+ _validation = {
+ 'allowed_origins': {'required': True},
+ 'allowed_methods': {'required': True},
+ 'allowed_headers': {'required': True},
+ 'exposed_headers': {'required': True},
+ 'max_age_in_seconds': {'required': True, 'minimum': 0},
+ }
+
+ _attribute_map = {
+ 'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str', 'xml': {'name': 'AllowedOrigins'}},
+ 'allowed_methods': {'key': 'AllowedMethods', 'type': 'str', 'xml': {'name': 'AllowedMethods'}},
+ 'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str', 'xml': {'name': 'AllowedHeaders'}},
+ 'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str', 'xml': {'name': 'ExposedHeaders'}},
+ 'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int', 'xml': {'name': 'MaxAgeInSeconds'}},
+ }
+ _xml_map = {
+ 'name': 'CorsRule'
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CorsRule, self).__init__(**kwargs)
+ self.allowed_origins = kwargs['allowed_origins']
+ self.allowed_methods = kwargs['allowed_methods']
+ self.allowed_headers = kwargs['allowed_headers']
+ self.exposed_headers = kwargs['exposed_headers']
+ self.max_age_in_seconds = kwargs['max_age_in_seconds']
+
+
+class GeoReplication(msrest.serialization.Model):
+ """GeoReplication.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param status: Required. The status of the secondary location. Possible values include: "live",
+ "bootstrap", "unavailable".
+ :type status: str or ~azure_table.models.GeoReplicationStatusType
+ :param last_sync_time: Required. A GMT date/time value, to the second. All primary writes
+ preceding this value are guaranteed to be available for read operations at the secondary.
+ Primary writes after this point in time may or may not be available for reads.
+ :type last_sync_time: ~datetime.datetime
+ """
+
+ _validation = {
+ 'status': {'required': True},
+ 'last_sync_time': {'required': True},
+ }
+
+ _attribute_map = {
+ 'status': {'key': 'Status', 'type': 'str', 'xml': {'name': 'Status'}},
+ 'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123', 'xml': {'name': 'LastSyncTime'}},
+ }
+ _xml_map = {
+ 'name': 'GeoReplication'
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(GeoReplication, self).__init__(**kwargs)
+ self.status = kwargs['status']
+ self.last_sync_time = kwargs['last_sync_time']
+
+
+class Logging(msrest.serialization.Model):
+ """Azure Analytics Logging settings.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param version: Required. The version of Analytics to configure.
+ :type version: str
+ :param delete: Required. Indicates whether all delete requests should be logged.
+ :type delete: bool
+ :param read: Required. Indicates whether all read requests should be logged.
+ :type read: bool
+ :param write: Required. Indicates whether all write requests should be logged.
+ :type write: bool
+ :param retention_policy: Required. The retention policy.
+ :type retention_policy: ~azure_table.models.RetentionPolicy
+ """
+
+ _validation = {
+ 'version': {'required': True},
+ 'delete': {'required': True},
+ 'read': {'required': True},
+ 'write': {'required': True},
+ 'retention_policy': {'required': True},
+ }
+
+ _attribute_map = {
+ 'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}},
+ 'delete': {'key': 'Delete', 'type': 'bool', 'xml': {'name': 'Delete'}},
+ 'read': {'key': 'Read', 'type': 'bool', 'xml': {'name': 'Read'}},
+ 'write': {'key': 'Write', 'type': 'bool', 'xml': {'name': 'Write'}},
+ 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'},
+ }
+ _xml_map = {
+ 'name': 'Logging'
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Logging, self).__init__(**kwargs)
+ self.version = kwargs['version']
+ self.delete = kwargs['delete']
+ self.read = kwargs['read']
+ self.write = kwargs['write']
+ self.retention_policy = kwargs['retention_policy']
+
+
+class Metrics(msrest.serialization.Model):
+ """Metrics.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param version: The version of Analytics to configure.
+ :type version: str
+ :param enabled: Required. Indicates whether metrics are enabled for the Table service.
+ :type enabled: bool
+ :param include_apis: Indicates whether metrics should generate summary statistics for called
+ API operations.
+ :type include_apis: bool
+ :param retention_policy: The retention policy.
+ :type retention_policy: ~azure_table.models.RetentionPolicy
+ """
+
+ _validation = {
+ 'enabled': {'required': True},
+ }
+
+ _attribute_map = {
+ 'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}},
+ 'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}},
+ 'include_apis': {'key': 'IncludeAPIs', 'type': 'bool', 'xml': {'name': 'IncludeAPIs'}},
+ 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'},
+ }
+ _xml_map = {
+
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Metrics, self).__init__(**kwargs)
+ self.version = kwargs.get('version', None)
+ self.enabled = kwargs['enabled']
+ self.include_apis = kwargs.get('include_apis', None)
+ self.retention_policy = kwargs.get('retention_policy', None)
+
+
+class QueryOptions(msrest.serialization.Model):
+ """Parameter group.
+
+ :param format: Specifies the media type for the response. Possible values include:
+ "application/json;odata=nometadata", "application/json;odata=minimalmetadata",
+ "application/json;odata=fullmetadata".
+ :type format: str or ~azure_table.models.OdataMetadataFormat
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param select: Select expression using OData notation. Limits the columns on each record to
+ just those requested, e.g. "$select=PolicyAssignmentId, ResourceId".
+ :type select: str
+ :param filter: OData filter expression.
+ :type filter: str
+ """
+
+ _validation = {
+ 'top': {'minimum': 0},
+ }
+
+ _attribute_map = {
+ 'format': {'key': 'Format', 'type': 'str'},
+ 'top': {'key': 'Top', 'type': 'int'},
+ 'select': {'key': 'Select', 'type': 'str'},
+ 'filter': {'key': 'Filter', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(QueryOptions, self).__init__(**kwargs)
+ self.format = kwargs.get('format', None)
+ self.top = kwargs.get('top', None)
+ self.select = kwargs.get('select', None)
+ self.filter = kwargs.get('filter', None)
+
+
+class RetentionPolicy(msrest.serialization.Model):
+ """The retention policy.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param enabled: Required. Indicates whether a retention policy is enabled for the service.
+ :type enabled: bool
+ :param days: Indicates the number of days that metrics or logging or soft-deleted data should
+ be retained. All data older than this value will be deleted.
+ :type days: int
+ """
+
+ _validation = {
+ 'enabled': {'required': True},
+ 'days': {'minimum': 1},
+ }
+
+ _attribute_map = {
+ 'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}},
+ 'days': {'key': 'Days', 'type': 'int', 'xml': {'name': 'Days'}},
+ }
+ _xml_map = {
+ 'name': 'RetentionPolicy'
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RetentionPolicy, self).__init__(**kwargs)
+ self.enabled = kwargs['enabled']
+ self.days = kwargs.get('days', None)
+
+
+class SignedIdentifier(msrest.serialization.Model):
+ """A signed identifier.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. A unique id.
+ :type id: str
+ :param access_policy: Required. The access policy.
+ :type access_policy: ~azure_table.models.AccessPolicy
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ 'access_policy': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'Id', 'type': 'str', 'xml': {'name': 'Id'}},
+ 'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy'},
+ }
+ _xml_map = {
+ 'name': 'SignedIdentifier'
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SignedIdentifier, self).__init__(**kwargs)
+ self.id = kwargs['id']
+ self.access_policy = kwargs['access_policy']
+
+
+class TableEntityQueryResponse(msrest.serialization.Model):
+ """The properties for the table entity query response.
+
+ :param odata_metadata: The metadata response of the table.
+ :type odata_metadata: str
+ :param value: List of table entities.
+ :type value: list[dict[str, object]]
+ """
+
+ _attribute_map = {
+ 'odata_metadata': {'key': 'odata\\.metadata', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[{object}]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TableEntityQueryResponse, self).__init__(**kwargs)
+ self.odata_metadata = kwargs.get('odata_metadata', None)
+ self.value = kwargs.get('value', None)
+
+
+class TableProperties(msrest.serialization.Model):
+ """The properties for creating a table.
+
+ :param table_name: The name of the table to create.
+ :type table_name: str
+ """
+
+ _attribute_map = {
+ 'table_name': {'key': 'TableName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TableProperties, self).__init__(**kwargs)
+ self.table_name = kwargs.get('table_name', None)
+
+
+class TableQueryResponse(msrest.serialization.Model):
+ """The properties for the table query response.
+
+ :param odata_metadata: The metadata response of the table.
+ :type odata_metadata: str
+ :param value: List of tables.
+ :type value: list[~azure_table.models.TableResponseProperties]
+ """
+
+ _attribute_map = {
+ 'odata_metadata': {'key': 'odata\\.metadata', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[TableResponseProperties]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TableQueryResponse, self).__init__(**kwargs)
+ self.odata_metadata = kwargs.get('odata_metadata', None)
+ self.value = kwargs.get('value', None)
+
+
+class TableResponseProperties(msrest.serialization.Model):
+ """The properties for the table response.
+
+ :param table_name: The name of the table.
+ :type table_name: str
+ :param odata_type: The odata type of the table.
+ :type odata_type: str
+ :param odata_id: The id of the table.
+ :type odata_id: str
+ :param odata_edit_link: The edit link of the table.
+ :type odata_edit_link: str
+ """
+
+ _attribute_map = {
+ 'table_name': {'key': 'TableName', 'type': 'str'},
+ 'odata_type': {'key': 'odata\\.type', 'type': 'str'},
+ 'odata_id': {'key': 'odata\\.id', 'type': 'str'},
+ 'odata_edit_link': {'key': 'odata\\.editLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TableResponseProperties, self).__init__(**kwargs)
+ self.table_name = kwargs.get('table_name', None)
+ self.odata_type = kwargs.get('odata_type', None)
+ self.odata_id = kwargs.get('odata_id', None)
+ self.odata_edit_link = kwargs.get('odata_edit_link', None)
+
+
+class TableResponse(TableResponseProperties):
+ """The response for a single table.
+
+ :param table_name: The name of the table.
+ :type table_name: str
+ :param odata_type: The odata type of the table.
+ :type odata_type: str
+ :param odata_id: The id of the table.
+ :type odata_id: str
+ :param odata_edit_link: The edit link of the table.
+ :type odata_edit_link: str
+ :param odata_metadata: The metadata response of the table.
+ :type odata_metadata: str
+ """
+
+ _attribute_map = {
+ 'table_name': {'key': 'TableName', 'type': 'str'},
+ 'odata_type': {'key': 'odata\\.type', 'type': 'str'},
+ 'odata_id': {'key': 'odata\\.id', 'type': 'str'},
+ 'odata_edit_link': {'key': 'odata\\.editLink', 'type': 'str'},
+ 'odata_metadata': {'key': 'odata\\.metadata', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TableResponse, self).__init__(**kwargs)
+ self.odata_metadata = kwargs.get('odata_metadata', None)
+
+
+class TableServiceError(msrest.serialization.Model):
+ """Table Service error.
+
+ :param message: The error message.
+ :type message: str
+ """
+
+ _attribute_map = {
+ 'message': {'key': 'Message', 'type': 'str', 'xml': {'name': 'Message'}},
+ }
+ _xml_map = {
+
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TableServiceError, self).__init__(**kwargs)
+ self.message = kwargs.get('message', None)
+
+
+class TableServiceProperties(msrest.serialization.Model):
+ """Table Service Properties.
+
+ :param logging: Azure Analytics Logging settings.
+ :type logging: ~azure_table.models.Logging
+ :param hour_metrics: A summary of request statistics grouped by API in hourly aggregates for
+ tables.
+ :type hour_metrics: ~azure_table.models.Metrics
+ :param minute_metrics: A summary of request statistics grouped by API in minute aggregates for
+ tables.
+ :type minute_metrics: ~azure_table.models.Metrics
+ :param cors: The set of CORS rules.
+ :type cors: list[~azure_table.models.CorsRule]
+ """
+
+ _attribute_map = {
+ 'logging': {'key': 'Logging', 'type': 'Logging'},
+ 'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics'},
+ 'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics'},
+ 'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'name': 'Cors', 'wrapped': True, 'itemsName': 'CorsRule'}},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TableServiceProperties, self).__init__(**kwargs)
+ self.logging = kwargs.get('logging', None)
+ self.hour_metrics = kwargs.get('hour_metrics', None)
+ self.minute_metrics = kwargs.get('minute_metrics', None)
+ self.cors = kwargs.get('cors', None)
+
+
+class TableServiceStats(msrest.serialization.Model):
+ """Stats for the service.
+
+ :param geo_replication: Geo-Replication information for the Secondary Storage Service.
+ :type geo_replication: ~azure_table.models.GeoReplication
+ """
+
+ _attribute_map = {
+ 'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication'},
+ }
+ _xml_map = {
+
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TableServiceStats, self).__init__(**kwargs)
+ self.geo_replication = kwargs.get('geo_replication', None)
diff --git a/sdk/table/azure/azure_table/_generated/models/_models_py3.py b/sdk/table/azure/azure_table/_generated/models/_models_py3.py
new file mode 100644
index 000000000000..1a821005c9e9
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/models/_models_py3.py
@@ -0,0 +1,604 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Dict, List, Optional, Union
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+from ._azure_table_enums import *
+
+
+class AccessPolicy(msrest.serialization.Model):
+ """An Access policy.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param start: Required. The start datetime from which the policy is active.
+ :type start: ~datetime.datetime
+ :param expiry: Required. The datetime that the policy expires.
+ :type expiry: ~datetime.datetime
+ :param permission: Required. The permissions for the acl policy.
+ :type permission: str
+ """
+
+ _validation = {
+ 'start': {'required': True},
+ 'expiry': {'required': True},
+ 'permission': {'required': True},
+ }
+
+ _attribute_map = {
+ 'start': {'key': 'Start', 'type': 'iso-8601', 'xml': {'name': 'Start'}},
+ 'expiry': {'key': 'Expiry', 'type': 'iso-8601', 'xml': {'name': 'Expiry'}},
+ 'permission': {'key': 'Permission', 'type': 'str', 'xml': {'name': 'Permission'}},
+ }
+ _xml_map = {
+ 'name': 'AccessPolicy'
+ }
+
+ def __init__(
+ self,
+ *,
+ start: datetime.datetime,
+ expiry: datetime.datetime,
+ permission: str,
+ **kwargs
+ ):
+ super(AccessPolicy, self).__init__(**kwargs)
+ self.start = start
+ self.expiry = expiry
+ self.permission = permission
+
+
+class CorsRule(msrest.serialization.Model):
+ """CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param allowed_origins: Required. The origin domains that are permitted to make a request
+ against the service via CORS. The origin domain is the domain from which the request
+ originates. Note that the origin must be an exact case-sensitive match with the origin that the
+ user age sends to the service. You can also use the wildcard character '*' to allow all origin
+ domains to make requests via CORS.
+ :type allowed_origins: str
+ :param allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may
+ use for a CORS request. (comma separated).
+ :type allowed_methods: str
+ :param allowed_headers: Required. The request headers that the origin domain may specify on the
+ CORS request.
+ :type allowed_headers: str
+ :param exposed_headers: Required. The response headers that may be sent in the response to the
+ CORS request and exposed by the browser to the request issuer.
+ :type exposed_headers: str
+ :param max_age_in_seconds: Required. The maximum amount time that a browser should cache the
+ preflight OPTIONS request.
+ :type max_age_in_seconds: int
+ """
+
+ _validation = {
+ 'allowed_origins': {'required': True},
+ 'allowed_methods': {'required': True},
+ 'allowed_headers': {'required': True},
+ 'exposed_headers': {'required': True},
+ 'max_age_in_seconds': {'required': True, 'minimum': 0},
+ }
+
+ _attribute_map = {
+ 'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str', 'xml': {'name': 'AllowedOrigins'}},
+ 'allowed_methods': {'key': 'AllowedMethods', 'type': 'str', 'xml': {'name': 'AllowedMethods'}},
+ 'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str', 'xml': {'name': 'AllowedHeaders'}},
+ 'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str', 'xml': {'name': 'ExposedHeaders'}},
+ 'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int', 'xml': {'name': 'MaxAgeInSeconds'}},
+ }
+ _xml_map = {
+ 'name': 'CorsRule'
+ }
+
+ def __init__(
+ self,
+ *,
+ allowed_origins: str,
+ allowed_methods: str,
+ allowed_headers: str,
+ exposed_headers: str,
+ max_age_in_seconds: int,
+ **kwargs
+ ):
+ super(CorsRule, self).__init__(**kwargs)
+ self.allowed_origins = allowed_origins
+ self.allowed_methods = allowed_methods
+ self.allowed_headers = allowed_headers
+ self.exposed_headers = exposed_headers
+ self.max_age_in_seconds = max_age_in_seconds
+
+
+class GeoReplication(msrest.serialization.Model):
+ """GeoReplication.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param status: Required. The status of the secondary location. Possible values include: "live",
+ "bootstrap", "unavailable".
+ :type status: str or ~azure_table.models.GeoReplicationStatusType
+ :param last_sync_time: Required. A GMT date/time value, to the second. All primary writes
+ preceding this value are guaranteed to be available for read operations at the secondary.
+ Primary writes after this point in time may or may not be available for reads.
+ :type last_sync_time: ~datetime.datetime
+ """
+
+ _validation = {
+ 'status': {'required': True},
+ 'last_sync_time': {'required': True},
+ }
+
+ _attribute_map = {
+ 'status': {'key': 'Status', 'type': 'str', 'xml': {'name': 'Status'}},
+ 'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123', 'xml': {'name': 'LastSyncTime'}},
+ }
+ _xml_map = {
+ 'name': 'GeoReplication'
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Union[str, "GeoReplicationStatusType"],
+ last_sync_time: datetime.datetime,
+ **kwargs
+ ):
+ super(GeoReplication, self).__init__(**kwargs)
+ self.status = status
+ self.last_sync_time = last_sync_time
+
+
+class Logging(msrest.serialization.Model):
+ """Azure Analytics Logging settings.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param version: Required. The version of Analytics to configure.
+ :type version: str
+ :param delete: Required. Indicates whether all delete requests should be logged.
+ :type delete: bool
+ :param read: Required. Indicates whether all read requests should be logged.
+ :type read: bool
+ :param write: Required. Indicates whether all write requests should be logged.
+ :type write: bool
+ :param retention_policy: Required. The retention policy.
+ :type retention_policy: ~azure_table.models.RetentionPolicy
+ """
+
+ _validation = {
+ 'version': {'required': True},
+ 'delete': {'required': True},
+ 'read': {'required': True},
+ 'write': {'required': True},
+ 'retention_policy': {'required': True},
+ }
+
+ _attribute_map = {
+ 'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}},
+ 'delete': {'key': 'Delete', 'type': 'bool', 'xml': {'name': 'Delete'}},
+ 'read': {'key': 'Read', 'type': 'bool', 'xml': {'name': 'Read'}},
+ 'write': {'key': 'Write', 'type': 'bool', 'xml': {'name': 'Write'}},
+ 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'},
+ }
+ _xml_map = {
+ 'name': 'Logging'
+ }
+
+ def __init__(
+ self,
+ *,
+ version: str,
+ delete: bool,
+ read: bool,
+ write: bool,
+ retention_policy: "RetentionPolicy",
+ **kwargs
+ ):
+ super(Logging, self).__init__(**kwargs)
+ self.version = version
+ self.delete = delete
+ self.read = read
+ self.write = write
+ self.retention_policy = retention_policy
+
+
+class Metrics(msrest.serialization.Model):
+ """Metrics.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param version: The version of Analytics to configure.
+ :type version: str
+ :param enabled: Required. Indicates whether metrics are enabled for the Table service.
+ :type enabled: bool
+ :param include_apis: Indicates whether metrics should generate summary statistics for called
+ API operations.
+ :type include_apis: bool
+ :param retention_policy: The retention policy.
+ :type retention_policy: ~azure_table.models.RetentionPolicy
+ """
+
+ _validation = {
+ 'enabled': {'required': True},
+ }
+
+ _attribute_map = {
+ 'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}},
+ 'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}},
+ 'include_apis': {'key': 'IncludeAPIs', 'type': 'bool', 'xml': {'name': 'IncludeAPIs'}},
+ 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'},
+ }
+ _xml_map = {
+
+ }
+
+ def __init__(
+ self,
+ *,
+ enabled: bool,
+ version: Optional[str] = None,
+ include_apis: Optional[bool] = None,
+ retention_policy: Optional["RetentionPolicy"] = None,
+ **kwargs
+ ):
+ super(Metrics, self).__init__(**kwargs)
+ self.version = version
+ self.enabled = enabled
+ self.include_apis = include_apis
+ self.retention_policy = retention_policy
+
+
+class QueryOptions(msrest.serialization.Model):
+ """Parameter group.
+
+ :param format: Specifies the media type for the response. Possible values include:
+ "application/json;odata=nometadata", "application/json;odata=minimalmetadata",
+ "application/json;odata=fullmetadata".
+ :type format: str or ~azure_table.models.OdataMetadataFormat
+ :param top: Maximum number of records to return.
+ :type top: int
+ :param select: Select expression using OData notation. Limits the columns on each record to
+ just those requested, e.g. "$select=PolicyAssignmentId, ResourceId".
+ :type select: str
+ :param filter: OData filter expression.
+ :type filter: str
+ """
+
+ _validation = {
+ 'top': {'minimum': 0},
+ }
+
+ _attribute_map = {
+ 'format': {'key': 'Format', 'type': 'str'},
+ 'top': {'key': 'Top', 'type': 'int'},
+ 'select': {'key': 'Select', 'type': 'str'},
+ 'filter': {'key': 'Filter', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ format: Optional[Union[str, "OdataMetadataFormat"]] = None,
+ top: Optional[int] = None,
+ select: Optional[str] = None,
+ filter: Optional[str] = None,
+ **kwargs
+ ):
+ super(QueryOptions, self).__init__(**kwargs)
+ self.format = format
+ self.top = top
+ self.select = select
+ self.filter = filter
+
+
+class RetentionPolicy(msrest.serialization.Model):
+ """The retention policy.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param enabled: Required. Indicates whether a retention policy is enabled for the service.
+ :type enabled: bool
+ :param days: Indicates the number of days that metrics or logging or soft-deleted data should
+ be retained. All data older than this value will be deleted.
+ :type days: int
+ """
+
+ _validation = {
+ 'enabled': {'required': True},
+ 'days': {'minimum': 1},
+ }
+
+ _attribute_map = {
+ 'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}},
+ 'days': {'key': 'Days', 'type': 'int', 'xml': {'name': 'Days'}},
+ }
+ _xml_map = {
+ 'name': 'RetentionPolicy'
+ }
+
+ def __init__(
+ self,
+ *,
+ enabled: bool,
+ days: Optional[int] = None,
+ **kwargs
+ ):
+ super(RetentionPolicy, self).__init__(**kwargs)
+ self.enabled = enabled
+ self.days = days
+
+
+class SignedIdentifier(msrest.serialization.Model):
+ """A signed identifier.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. A unique id.
+ :type id: str
+ :param access_policy: Required. The access policy.
+ :type access_policy: ~azure_table.models.AccessPolicy
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ 'access_policy': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'Id', 'type': 'str', 'xml': {'name': 'Id'}},
+ 'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy'},
+ }
+ _xml_map = {
+ 'name': 'SignedIdentifier'
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str,
+ access_policy: "AccessPolicy",
+ **kwargs
+ ):
+ super(SignedIdentifier, self).__init__(**kwargs)
+ self.id = id
+ self.access_policy = access_policy
+
+
+class TableEntityQueryResponse(msrest.serialization.Model):
+ """The properties for the table entity query response.
+
+ :param odata_metadata: The metadata response of the table.
+ :type odata_metadata: str
+ :param value: List of table entities.
+ :type value: list[dict[str, object]]
+ """
+
+ _attribute_map = {
+ 'odata_metadata': {'key': 'odata\\.metadata', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[{object}]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ odata_metadata: Optional[str] = None,
+ value: Optional[List[Dict[str, object]]] = None,
+ **kwargs
+ ):
+ super(TableEntityQueryResponse, self).__init__(**kwargs)
+ self.odata_metadata = odata_metadata
+ self.value = value
+
+
+class TableProperties(msrest.serialization.Model):
+ """The properties for creating a table.
+
+ :param table_name: The name of the table to create.
+ :type table_name: str
+ """
+
+ _attribute_map = {
+ 'table_name': {'key': 'TableName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ table_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(TableProperties, self).__init__(**kwargs)
+ self.table_name = table_name
+
+
+class TableQueryResponse(msrest.serialization.Model):
+ """The properties for the table query response.
+
+ :param odata_metadata: The metadata response of the table.
+ :type odata_metadata: str
+ :param value: List of tables.
+ :type value: list[~azure_table.models.TableResponseProperties]
+ """
+
+ _attribute_map = {
+ 'odata_metadata': {'key': 'odata\\.metadata', 'type': 'str'},
+ 'value': {'key': 'value', 'type': '[TableResponseProperties]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ odata_metadata: Optional[str] = None,
+ value: Optional[List["TableResponseProperties"]] = None,
+ **kwargs
+ ):
+ super(TableQueryResponse, self).__init__(**kwargs)
+ self.odata_metadata = odata_metadata
+ self.value = value
+
+
+class TableResponseProperties(msrest.serialization.Model):
+ """The properties for the table response.
+
+ :param table_name: The name of the table.
+ :type table_name: str
+ :param odata_type: The odata type of the table.
+ :type odata_type: str
+ :param odata_id: The id of the table.
+ :type odata_id: str
+ :param odata_edit_link: The edit link of the table.
+ :type odata_edit_link: str
+ """
+
+ _attribute_map = {
+ 'table_name': {'key': 'TableName', 'type': 'str'},
+ 'odata_type': {'key': 'odata\\.type', 'type': 'str'},
+ 'odata_id': {'key': 'odata\\.id', 'type': 'str'},
+ 'odata_edit_link': {'key': 'odata\\.editLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ table_name: Optional[str] = None,
+ odata_type: Optional[str] = None,
+ odata_id: Optional[str] = None,
+ odata_edit_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(TableResponseProperties, self).__init__(**kwargs)
+ self.table_name = table_name
+ self.odata_type = odata_type
+ self.odata_id = odata_id
+ self.odata_edit_link = odata_edit_link
+
+
+class TableResponse(TableResponseProperties):
+ """The response for a single table.
+
+ :param table_name: The name of the table.
+ :type table_name: str
+ :param odata_type: The odata type of the table.
+ :type odata_type: str
+ :param odata_id: The id of the table.
+ :type odata_id: str
+ :param odata_edit_link: The edit link of the table.
+ :type odata_edit_link: str
+ :param odata_metadata: The metadata response of the table.
+ :type odata_metadata: str
+ """
+
+ _attribute_map = {
+ 'table_name': {'key': 'TableName', 'type': 'str'},
+ 'odata_type': {'key': 'odata\\.type', 'type': 'str'},
+ 'odata_id': {'key': 'odata\\.id', 'type': 'str'},
+ 'odata_edit_link': {'key': 'odata\\.editLink', 'type': 'str'},
+ 'odata_metadata': {'key': 'odata\\.metadata', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ table_name: Optional[str] = None,
+ odata_type: Optional[str] = None,
+ odata_id: Optional[str] = None,
+ odata_edit_link: Optional[str] = None,
+ odata_metadata: Optional[str] = None,
+ **kwargs
+ ):
+ super(TableResponse, self).__init__(table_name=table_name, odata_type=odata_type, odata_id=odata_id, odata_edit_link=odata_edit_link, **kwargs)
+ self.odata_metadata = odata_metadata
+
+
+class TableServiceError(msrest.serialization.Model):
+ """Table Service error.
+
+ :param message: The error message.
+ :type message: str
+ """
+
+ _attribute_map = {
+ 'message': {'key': 'Message', 'type': 'str', 'xml': {'name': 'Message'}},
+ }
+ _xml_map = {
+
+ }
+
+ def __init__(
+ self,
+ *,
+ message: Optional[str] = None,
+ **kwargs
+ ):
+ super(TableServiceError, self).__init__(**kwargs)
+ self.message = message
+
+
+class TableServiceProperties(msrest.serialization.Model):
+ """Table Service Properties.
+
+ :param logging: Azure Analytics Logging settings.
+ :type logging: ~azure_table.models.Logging
+ :param hour_metrics: A summary of request statistics grouped by API in hourly aggregates for
+ tables.
+ :type hour_metrics: ~azure_table.models.Metrics
+ :param minute_metrics: A summary of request statistics grouped by API in minute aggregates for
+ tables.
+ :type minute_metrics: ~azure_table.models.Metrics
+ :param cors: The set of CORS rules.
+ :type cors: list[~azure_table.models.CorsRule]
+ """
+
+ _attribute_map = {
+ 'logging': {'key': 'Logging', 'type': 'Logging'},
+ 'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics'},
+ 'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics'},
+ 'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'name': 'Cors', 'wrapped': True, 'itemsName': 'CorsRule'}},
+ }
+
+ def __init__(
+ self,
+ *,
+ logging: Optional["Logging"] = None,
+ hour_metrics: Optional["Metrics"] = None,
+ minute_metrics: Optional["Metrics"] = None,
+ cors: Optional[List["CorsRule"]] = None,
+ **kwargs
+ ):
+ super(TableServiceProperties, self).__init__(**kwargs)
+ self.logging = logging
+ self.hour_metrics = hour_metrics
+ self.minute_metrics = minute_metrics
+ self.cors = cors
+
+
+class TableServiceStats(msrest.serialization.Model):
+ """Stats for the service.
+
+ :param geo_replication: Geo-Replication information for the Secondary Storage Service.
+ :type geo_replication: ~azure_table.models.GeoReplication
+ """
+
+ _attribute_map = {
+ 'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication'},
+ }
+ _xml_map = {
+
+ }
+
+ def __init__(
+ self,
+ *,
+ geo_replication: Optional["GeoReplication"] = None,
+ **kwargs
+ ):
+ super(TableServiceStats, self).__init__(**kwargs)
+ self.geo_replication = geo_replication
diff --git a/sdk/table/azure/azure_table/_generated/operations/__init__.py b/sdk/table/azure/azure_table/_generated/operations/__init__.py
new file mode 100644
index 000000000000..1298fa41c6f8
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/operations/__init__.py
@@ -0,0 +1,13 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._table_operations import TableOperations
+from ._service_operations import ServiceOperations
+
+__all__ = [
+ 'TableOperations',
+ 'ServiceOperations',
+]
diff --git a/sdk/table/azure/azure_table/_generated/operations/_service_operations.py b/sdk/table/azure/azure_table/_generated/operations/_service_operations.py
new file mode 100644
index 000000000000..f95ed5b8670e
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/operations/_service_operations.py
@@ -0,0 +1,254 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+
+from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+
+
+from azure.azure_table._generated import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ServiceOperations(object):
+ """ServiceOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_table.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def set_properties(
+ self,
+ table_service_properties, # type: "models.TableServiceProperties"
+ timeout=None, # type: Optional[int]
+ request_id_parameter=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Sets properties for an account's Table service endpoint, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules.
+
+ :param table_service_properties: The Table Service properties.
+ :type table_service_properties: ~azure_table.models.TableServiceProperties
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+ restype = "service"
+ comp = "properties"
+ content_type = kwargs.pop("content_type", "application/xml")
+
+ # Construct URL
+ url = self.set_properties.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['restype'] = self._serialize.query("restype", restype, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+
+ # Construct and send request
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(table_service_properties, 'TableServiceProperties', is_xml=True)
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ set_properties.metadata = {'url': '/'} # type: ignore
+
+ def get_properties(
+ self,
+ timeout=None, # type: Optional[int]
+ request_id_parameter=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.TableServiceProperties"
+ """Gets the properties of an account's Table service, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules.
+
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TableServiceProperties, or the result of cls(response)
+ :rtype: ~azure_table.models.TableServiceProperties
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.TableServiceProperties"]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+ restype = "service"
+ comp = "properties"
+
+ # Construct URL
+ url = self.get_properties.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['restype'] = self._serialize.query("restype", restype, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['Accept'] = 'application/xml'
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ deserialized = self._deserialize('TableServiceProperties', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ get_properties.metadata = {'url': '/'} # type: ignore
+
+ def get_statistics(
+ self,
+ timeout=None, # type: Optional[int]
+ request_id_parameter=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.TableServiceStats"
+ """Retrieves statistics related to replication for the Table service. It is only available on the secondary location endpoint when read-access geo-redundant replication is enabled for the account.
+
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TableServiceStats, or the result of cls(response)
+ :rtype: ~azure_table.models.TableServiceStats
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.TableServiceStats"]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+ restype = "service"
+ comp = "stats"
+
+ # Construct URL
+ url = self.get_statistics.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['restype'] = self._serialize.query("restype", restype, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['Accept'] = 'application/xml'
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ deserialized = self._deserialize('TableServiceStats', pipeline_response)
+
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ get_statistics.metadata = {'url': '/'} # type: ignore
diff --git a/sdk/table/azure/azure_table/_generated/operations/_table_operations.py b/sdk/table/azure/azure_table/_generated/operations/_table_operations.py
new file mode 100644
index 000000000000..7377906998bc
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/operations/_table_operations.py
@@ -0,0 +1,1062 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6282, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+
+from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from xml.etree import ElementTree
+from azure.azure_table._generated import models
+from xml.etree import ElementTree
+from azure.azure_table._generated import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class TableOperations(object):
+ """TableOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_table.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def query(
+ self,
+ request_id_parameter=None, # type: Optional[str]
+ next_table_name=None, # type: Optional[str]
+ query_options=None, # type: Optional["models.QueryOptions"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.TableQueryResponse"
+ """Queries tables under the given account.
+
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param next_table_name: A table query continuation token from a previous call.
+ :type next_table_name: str
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TableQueryResponse, or the result of cls(response)
+ :rtype: ~azure_table.models.TableQueryResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.TableQueryResponse"]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ _top = None
+ _select = None
+ _filter = None
+ if query_options is not None:
+ _format = query_options.format
+ _top = query_options.top
+ _select = query_options.select
+ _filter = query_options.filter
+ data_service_version = "3.0"
+
+ # Construct URL
+ url = self.query.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+ if _top is not None:
+ query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0)
+ if _select is not None:
+ query_parameters['$select'] = self._serialize.query("select", _select, 'str')
+ if _filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
+ if next_table_name is not None:
+ query_parameters['NextTableName'] = self._serialize.query("next_table_name", next_table_name, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ header_parameters['Accept'] = 'application/json;odata=minimalmetadata'
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['x-ms-continuation-NextTableName']=self._deserialize('str', response.headers.get('x-ms-continuation-NextTableName'))
+ deserialized = self._deserialize('TableQueryResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ query.metadata = {'url': '/Tables'} # type: ignore
+
+ def create(
+ self,
+ table_properties, # type: "models.TableProperties"
+ request_id_parameter=None, # type: Optional[str]
+ response_preference=None, # type: Optional[Union[str, "models.ResponseFormat"]]
+ query_options=None, # type: Optional["models.QueryOptions"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.TableResponse"
+ """Creates a new table under the given account.
+
+ :param table_properties: The Table properties.
+ :type table_properties: ~azure_table.models.TableProperties
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param response_preference: Specifies whether the response should include the inserted entity
+ in the payload. Possible values are return-no-content and return-content.
+ :type response_preference: str or ~azure_table.models.ResponseFormat
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TableResponse, or the result of cls(response)
+ :rtype: ~azure_table.models.TableResponse or None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.TableResponse"]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ if query_options is not None:
+ _format = query_options.format
+ data_service_version = "3.0"
+ content_type = kwargs.pop("content_type", "application/json;odata=nometadata")
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ if response_preference is not None:
+ header_parameters['Prefer'] = self._serialize.header("response_preference", response_preference, 'str')
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = 'application/json;odata=minimalmetadata'
+
+ # Construct and send request
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(table_properties, 'TableProperties')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 201:
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied'))
+ deserialized = self._deserialize('TableResponse', pipeline_response)
+
+ if response.status_code == 204:
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ create.metadata = {'url': '/Tables'} # type: ignore
+
+ def delete(
+ self,
+ table, # type: str
+ request_id_parameter=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Operation permanently deletes the specified table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+
+ # Construct and send request
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ delete.metadata = {'url': '/Tables(\'{table}\')'} # type: ignore
+
+ def query_entities(
+ self,
+ table, # type: str
+ timeout=None, # type: Optional[int]
+ request_id_parameter=None, # type: Optional[str]
+ next_partition_key=None, # type: Optional[str]
+ next_row_key=None, # type: Optional[str]
+ query_options=None, # type: Optional["models.QueryOptions"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.TableEntityQueryResponse"
+ """Queries entities in a table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param next_partition_key: An entity query continuation token from a previous call.
+ :type next_partition_key: str
+ :param next_row_key: An entity query continuation token from a previous call.
+ :type next_row_key: str
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TableEntityQueryResponse, or the result of cls(response)
+ :rtype: ~azure_table.models.TableEntityQueryResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.TableEntityQueryResponse"]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ _top = None
+ _select = None
+ _filter = None
+ if query_options is not None:
+ _format = query_options.format
+ _top = query_options.top
+ _select = query_options.select
+ _filter = query_options.filter
+ data_service_version = "3.0"
+
+ # Construct URL
+ url = self.query_entities.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+ if _top is not None:
+ query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0)
+ if _select is not None:
+ query_parameters['$select'] = self._serialize.query("select", _select, 'str')
+ if _filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
+ if next_partition_key is not None:
+ query_parameters['NextPartitionKey'] = self._serialize.query("next_partition_key", next_partition_key, 'str')
+ if next_row_key is not None:
+ query_parameters['NextRowKey'] = self._serialize.query("next_row_key", next_row_key, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ header_parameters['Accept'] = 'application/json;odata=minimalmetadata'
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['x-ms-continuation-NextPartitionKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextPartitionKey'))
+ response_headers['x-ms-continuation-NextRowKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextRowKey'))
+ deserialized = self._deserialize('TableEntityQueryResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ query_entities.metadata = {'url': '/{table}()'} # type: ignore
+
+ def query_entities_with_partition_and_row_key(
+ self,
+ table, # type: str
+ partition_key, # type: str
+ row_key, # type: str
+ timeout=None, # type: Optional[int]
+ request_id_parameter=None, # type: Optional[str]
+ query_options=None, # type: Optional["models.QueryOptions"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.TableEntityQueryResponse"
+ """Queries entities in a table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param partition_key: The partition key of the entity.
+ :type partition_key: str
+ :param row_key: The row key of the entity.
+ :type row_key: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TableEntityQueryResponse, or the result of cls(response)
+ :rtype: ~azure_table.models.TableEntityQueryResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.TableEntityQueryResponse"]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ _select = None
+ _filter = None
+ if query_options is not None:
+ _format = query_options.format
+ _select = query_options.select
+ _filter = query_options.filter
+ data_service_version = "3.0"
+
+ # Construct URL
+ url = self.query_entities_with_partition_and_row_key.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'),
+ 'rowKey': self._serialize.url("row_key", row_key, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+ if _select is not None:
+ query_parameters['$select'] = self._serialize.query("select", _select, 'str')
+ if _filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ header_parameters['Accept'] = 'application/json;odata=minimalmetadata'
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
+ response_headers['x-ms-continuation-NextPartitionKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextPartitionKey'))
+ response_headers['x-ms-continuation-NextRowKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextRowKey'))
+ deserialized = self._deserialize('TableEntityQueryResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ query_entities_with_partition_and_row_key.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore
+
+ def update_entity(
+ self,
+ table, # type: str
+ partition_key, # type: str
+ row_key, # type: str
+ timeout=None, # type: Optional[int]
+ request_id_parameter=None, # type: Optional[str]
+ if_match=None, # type: Optional[str]
+ table_entity_properties=None, # type: Optional[Dict[str, object]]
+ query_options=None, # type: Optional["models.QueryOptions"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Update entity in a table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param partition_key: The partition key of the entity.
+ :type partition_key: str
+ :param row_key: The row key of the entity.
+ :type row_key: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param if_match: Match condition for an entity to be updated. If specified and a matching
+ entity is not found, an error will be raised. To force an unconditional update, set to the
+ wildcard character (*). If not specified, an insert will be performed when no existing entity
+ is found to update and a replace will be performed if an existing entity is found.
+ :type if_match: str
+ :param table_entity_properties: The properties for the table entity.
+ :type table_entity_properties: dict[str, object]
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ if query_options is not None:
+ _format = query_options.format
+ data_service_version = "3.0"
+ content_type = kwargs.pop("content_type", "application/json")
+
+ # Construct URL
+ url = self.update_entity.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'),
+ 'rowKey': self._serialize.url("row_key", row_key, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ if if_match is not None:
+ header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+
+ # Construct and send request
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if table_entity_properties is not None:
+ body_content = self._serialize.body(table_entity_properties, '{object}')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ update_entity.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore
+
+ def merge_entity(
+ self,
+ table, # type: str
+ partition_key, # type: str
+ row_key, # type: str
+ timeout=None, # type: Optional[int]
+ request_id_parameter=None, # type: Optional[str]
+ if_match=None, # type: Optional[str]
+ table_entity_properties=None, # type: Optional[Dict[str, object]]
+ query_options=None, # type: Optional["models.QueryOptions"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Merge entity in a table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param partition_key: The partition key of the entity.
+ :type partition_key: str
+ :param row_key: The row key of the entity.
+ :type row_key: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param if_match: Match condition for an entity to be updated. If specified and a matching
+ entity is not found, an error will be raised. To force an unconditional update, set to the
+ wildcard character (*). If not specified, an insert will be performed when no existing entity
+ is found to update and a merge will be performed if an existing entity is found.
+ :type if_match: str
+ :param table_entity_properties: The properties for the table entity.
+ :type table_entity_properties: dict[str, object]
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ if query_options is not None:
+ _format = query_options.format
+ data_service_version = "3.0"
+ content_type = kwargs.pop("content_type", "application/json")
+
+ # Construct URL
+ url = self.merge_entity.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'),
+ 'rowKey': self._serialize.url("row_key", row_key, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ if if_match is not None:
+ header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+
+ # Construct and send request
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if table_entity_properties is not None:
+ body_content = self._serialize.body(table_entity_properties, '{object}')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ merge_entity.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore
+
+ def delete_entity(
+ self,
+ table, # type: str
+ partition_key, # type: str
+ row_key, # type: str
+ if_match, # type: str
+ timeout=None, # type: Optional[int]
+ request_id_parameter=None, # type: Optional[str]
+ query_options=None, # type: Optional["models.QueryOptions"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes the specified entity in a table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param partition_key: The partition key of the entity.
+ :type partition_key: str
+ :param row_key: The row key of the entity.
+ :type row_key: str
+ :param if_match: Match condition for an entity to be deleted. If specified and a matching
+ entity is not found, an error will be raised. To force an unconditional delete, set to the
+ wildcard character (*).
+ :type if_match: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ if query_options is not None:
+ _format = query_options.format
+ data_service_version = "3.0"
+
+ # Construct URL
+ url = self.delete_entity.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'),
+ 'rowKey': self._serialize.url("row_key", row_key, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
+
+ # Construct and send request
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ delete_entity.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore
+
+ def insert_entity(
+ self,
+ table, # type: str
+ timeout=None, # type: Optional[int]
+ request_id_parameter=None, # type: Optional[str]
+ response_preference=None, # type: Optional[Union[str, "models.ResponseFormat"]]
+ table_entity_properties=None, # type: Optional[Dict[str, object]]
+ query_options=None, # type: Optional["models.QueryOptions"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Dict[str, object]
+ """Insert entity in a table.
+
+ :param table: The name of the table.
+ :type table: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param response_preference: Specifies whether the response should include the inserted entity
+ in the payload. Possible values are return-no-content and return-content.
+ :type response_preference: str or ~azure_table.models.ResponseFormat
+ :param table_entity_properties: The properties for the table entity.
+ :type table_entity_properties: dict[str, object]
+ :param query_options: Parameter group.
+ :type query_options: ~azure_table.models.QueryOptions
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: dict mapping str to object, or the result of cls(response)
+ :rtype: dict[str, object] or None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+
+ _format = None
+ if query_options is not None:
+ _format = query_options.format
+ data_service_version = "3.0"
+ content_type = kwargs.pop("content_type", "application/json;odata=nometadata")
+
+ # Construct URL
+ url = self.insert_entity.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if _format is not None:
+ query_parameters['$format'] = self._serialize.query("format", _format, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str')
+ if response_preference is not None:
+ header_parameters['Prefer'] = self._serialize.header("response_preference", response_preference, 'str')
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = 'application/json;odata=minimalmetadata'
+
+ # Construct and send request
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if table_entity_properties is not None:
+ body_content = self._serialize.body(table_entity_properties, '{object}')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 201:
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
+ response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied'))
+ response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type'))
+ deserialized = self._deserialize('{object}', pipeline_response)
+
+ if response.status_code == 204:
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
+ response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied'))
+ response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ insert_entity.metadata = {'url': '/{table}'} # type: ignore
+
+ def get_access_policy(
+ self,
+ table, # type: str
+ timeout=None, # type: Optional[int]
+ request_id_parameter=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> List["models.SignedIdentifier"]
+ """Retrieves details about any stored access policies specified on the table that may be used with Shared Access Signatures.
+
+ :param table: The name of the table.
+ :type table: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: list of SignedIdentifier, or the result of cls(response)
+ :rtype: list[~azure_table.models.SignedIdentifier]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[List["models.SignedIdentifier"]]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+ comp = "acl"
+
+ # Construct URL
+ url = self.get_access_policy.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['Accept'] = 'application/xml'
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+ deserialized = self._deserialize('[SignedIdentifier]', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ get_access_policy.metadata = {'url': '/{table}'} # type: ignore
+
+ def set_access_policy(
+ self,
+ table, # type: str
+ timeout=None, # type: Optional[int]
+ request_id_parameter=None, # type: Optional[str]
+ table_acl=None, # type: Optional[List["models.SignedIdentifier"]]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Sets stored access policies for the table that may be used with Shared Access Signatures.
+
+ :param table: The name of the table.
+ :type table: str
+ :param timeout: The timeout parameter is expressed in seconds.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when analytics logging is enabled.
+ :type request_id_parameter: str
+ :param table_acl: The acls for the table.
+ :type table_acl: list[~azure_table.models.SignedIdentifier]
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
+ error_map.update(kwargs.pop('error_map', {}))
+ comp = "acl"
+ content_type = kwargs.pop("content_type", "application/xml")
+
+ # Construct URL
+ url = self.set_access_policy.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
+ 'table': self._serialize.url("table", table, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id_parameter is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = 'application/xml'
+
+ # Construct and send request
+ body_content_kwargs = {} # type: Dict[str, Any]
+ serialization_ctxt = {'xml': {'name': 'SignedIdentifiers', 'wrapped': True, 'itemsName': 'SignedIdentifier'}}
+ if table_acl is not None:
+ body_content = self._serialize.body(table_acl, '[SignedIdentifier]', is_xml=True, serialization_ctxt=serialization_ctxt)
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ print(request.body)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.TableServiceError, response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
+ response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
+ response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
+ response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ set_access_policy.metadata = {'url': '/{table}'} # type: ignore
diff --git a/sdk/table/azure/azure_table/_generated/py.typed b/sdk/table/azure/azure_table/_generated/py.typed
new file mode 100644
index 000000000000..e5aff4f83af8
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file
diff --git a/sdk/table/azure/azure_table/_generated/version.py b/sdk/table/azure/azure_table/_generated/version.py
new file mode 100644
index 000000000000..629812170000
--- /dev/null
+++ b/sdk/table/azure/azure_table/_generated/version.py
@@ -0,0 +1,13 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+VERSION = "2019-07-07"
+
diff --git a/sdk/table/azure/azure_table/_message_encoding.py b/sdk/table/azure/azure_table/_message_encoding.py
new file mode 100644
index 000000000000..52b643905c08
--- /dev/null
+++ b/sdk/table/azure/azure_table/_message_encoding.py
@@ -0,0 +1,150 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+# pylint: disable=unused-argument
+
+from base64 import b64encode, b64decode
+
+import sys
+import six
+from azure.core.exceptions import DecodeError
+
+from ._shared.encryption import decrypt_queue_message, encrypt_queue_message
+
+
+class MessageEncodePolicy(object):
+
+ def __init__(self):
+ self.require_encryption = False
+ self.key_encryption_key = None
+ self.resolver = None
+
+ def __call__(self, content):
+ if content:
+ content = self.encode(content)
+ if self.key_encryption_key is not None:
+ content = encrypt_queue_message(content, self.key_encryption_key)
+ return content
+
+ def configure(self, require_encryption, key_encryption_key, resolver):
+ self.require_encryption = require_encryption
+ self.key_encryption_key = key_encryption_key
+ self.resolver = resolver
+ if self.require_encryption and not self.key_encryption_key:
+ raise ValueError("Encryption required but no key was provided.")
+
+ def encode(self, content):
+ raise NotImplementedError("Must be implemented by child class.")
+
+
+class MessageDecodePolicy(object):
+
+ def __init__(self):
+ self.require_encryption = False
+ self.key_encryption_key = None
+ self.resolver = None
+
+ def __call__(self, response, obj, headers):
+ for message in obj:
+ if message.message_text in [None, "", b""]:
+ continue
+ content = message.message_text
+ if (self.key_encryption_key is not None) or (self.resolver is not None):
+ content = decrypt_queue_message(
+ content, response,
+ self.require_encryption,
+ self.key_encryption_key,
+ self.resolver)
+ message.message_text = self.decode(content, response)
+ return obj
+
+ def configure(self, require_encryption, key_encryption_key, resolver):
+ self.require_encryption = require_encryption
+ self.key_encryption_key = key_encryption_key
+ self.resolver = resolver
+
+ def decode(self, content, response):
+ raise NotImplementedError("Must be implemented by child class.")
+
+
+class TextBase64EncodePolicy(MessageEncodePolicy):
+ """Base 64 message encoding policy for text messages.
+
+ Encodes text (unicode) messages to base 64. If the input content
+ is not text, a TypeError will be raised. Input text must support UTF-8.
+ """
+
+ def encode(self, content):
+ if not isinstance(content, six.text_type):
+ raise TypeError("Message content must be text for base 64 encoding.")
+ return b64encode(content.encode('utf-8')).decode('utf-8')
+
+
+class TextBase64DecodePolicy(MessageDecodePolicy):
+ """Message decoding policy for base 64-encoded messages into text.
+
+ Decodes base64-encoded messages to text (unicode). If the input content
+ is not valid base 64, a DecodeError will be raised. Message data must
+ support UTF-8.
+ """
+
+ def decode(self, content, response):
+ try:
+ return b64decode(content.encode('utf-8')).decode('utf-8')
+ except (ValueError, TypeError) as error:
+ # ValueError for Python 3, TypeError for Python 2
+ raise DecodeError(
+ message="Message content is not valid base 64.",
+ response=response,
+ error=error)
+
+
+class BinaryBase64EncodePolicy(MessageEncodePolicy):
+ """Base 64 message encoding policy for binary messages.
+
+ Encodes binary messages to base 64. If the input content
+ is not bytes, a TypeError will be raised.
+ """
+
+ def encode(self, content):
+ if not isinstance(content, six.binary_type):
+ raise TypeError("Message content must be bytes for base 64 encoding.")
+ return b64encode(content).decode('utf-8')
+
+
+class BinaryBase64DecodePolicy(MessageDecodePolicy):
+ """Message decoding policy for base 64-encoded messages into bytes.
+
+ Decodes base64-encoded messages to bytes. If the input content
+ is not valid base 64, a DecodeError will be raised.
+ """
+
+ def decode(self, content, response):
+ try:
+ return b64decode(content.encode('utf-8'))
+ except (ValueError, TypeError) as error:
+ # ValueError for Python 3, TypeError for Python 2
+ raise DecodeError(
+ message="Message content is not valid base 64.",
+ response=response,
+ error=error)
+
+
+class NoEncodePolicy(MessageEncodePolicy):
+ """Bypass any message content encoding."""
+
+ def encode(self, content):
+ if isinstance(content, six.binary_type) and sys.version_info > (3,):
+ raise TypeError(
+ "Message content must not be bytes. Use the BinaryBase64EncodePolicy to send bytes."
+ )
+ return content
+
+
+class NoDecodePolicy(MessageDecodePolicy):
+ """Bypass any message content decoding."""
+
+ def decode(self, content, response):
+ return content
diff --git a/sdk/table/azure/azure_table/_models.py b/sdk/table/azure/azure_table/_models.py
new file mode 100644
index 000000000000..5574d7698dad
--- /dev/null
+++ b/sdk/table/azure/azure_table/_models.py
@@ -0,0 +1,330 @@
+import kwargs as kwargs
+from azure.azure_table._generated.models import TableProperties
+from azure.azure_table._shared.response_handlers import return_context_and_deserialized, process_storage_error
+from azure.core.exceptions import HttpResponseError
+from azure.core.paging import PageIterator
+from ._generated.models import AccessPolicy as GenAccessPolicy
+from ._generated.models import Logging as GeneratedLogging
+from ._generated.models import Metrics as GeneratedMetrics
+from ._generated.models import RetentionPolicy as GeneratedRetentionPolicy
+from ._generated.models import CorsRule as GeneratedCorsRule
+
+
+class AccessPolicy(GenAccessPolicy):
+ """Access Policy class used by the set and get access policy methods.
+
+ A stored access policy can specify the start time, expiry time, and
+ permissions for the Shared Access Signatures with which it's associated.
+ Depending on how you want to control access to your resource, you can
+ specify all of these parameters within the stored access policy, and omit
+ them from the URL for the Shared Access Signature. Doing so permits you to
+ modify the associated signature's behavior at any time, as well as to revoke
+ it. Or you can specify one or more of the access policy parameters within
+ the stored access policy, and the others on the URL. Finally, you can
+ specify all of the parameters on the URL. In this case, you can use the
+ stored access policy to revoke the signature, but not to modify its behavior.
+
+ Together the Shared Access Signature and the stored access policy must
+ include all fields required to authenticate the signature. If any required
+ fields are missing, the request will fail. Likewise, if a field is specified
+ both in the Shared Access Signature URL and in the stored access policy, the
+ request will fail with status code 400 (Bad Request).
+
+ :param str permission:
+ The permissions associated with the shared access signature. The
+ user is restricted to operations allowed by the permissions.
+ Required unless an id is given referencing a stored access policy
+ which contains this field. This field must be omitted if it has been
+ specified in an associated stored access policy.
+ :param expiry:
+ The time at which the shared access signature becomes invalid.
+ Required unless an id is given referencing a stored access policy
+ which contains this field. This field must be omitted if it has
+ been specified in an associated stored access policy. Azure will always
+ convert values to UTC. If a date is passed in without timezone info, it
+ is assumed to be UTC.
+ :type expiry: ~datetime.datetime or str
+ :param start:
+ The time at which the shared access signature becomes valid. If
+ omitted, start time for this call is assumed to be the time when the
+ storage service receives the request. Azure will always convert values
+ to UTC. If a date is passed in without timezone info, it is assumed to
+ be UTC.
+ :type start: ~datetime.datetime or str
+ """
+
+ def __init__(self, permission=None, expiry=None, start=None):
+ self.start = start
+ self.expiry = expiry
+ self.permission = permission
+
+
+class TableAnalyticsLogging(GeneratedLogging):
+ """Azure Analytics Logging settings.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :keyword str version: Required. The version of Storage Analytics to configure.
+ :keyword bool delete: Required. Indicates whether all delete requests should be logged.
+ :keyword bool read: Required. Indicates whether all read requests should be logged.
+ :keyword bool write: Required. Indicates whether all write requests should be logged.
+ :keyword ~azure.storage.queue.RetentionPolicy retention_policy: Required.
+ The retention policy for the metrics.
+ """
+
+ def __init__(self, **kwargs):
+ self.version = kwargs.get('version', u'1.0')
+ self.delete = kwargs.get('delete', False)
+ self.read = kwargs.get('read', False)
+ self.write = kwargs.get('write', False)
+ self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy()
+
+ @classmethod
+ def _from_generated(cls, generated):
+ if not generated:
+ return cls()
+ return cls(
+ version=generated.version,
+ delete=generated.delete,
+ read=generated.read,
+ write=generated.write,
+ retention_policy=RetentionPolicy._from_generated(generated.retention_policy)
+ # pylint: disable=protected-access
+ )
+
+
+class Metrics(GeneratedMetrics):
+ """A summary of request statistics grouped by API in hour or minute aggregates.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :keyword str version: The version of Storage Analytics to configure.
+ :keyword bool enabled: Required. Indicates whether metrics are enabled for the service.
+ :keyword bool include_ap_is: Indicates whether metrics should generate summary
+ statistics for called API operations.
+ :keyword ~azure.storage.queue.RetentionPolicy retention_policy: Required.
+ The retention policy for the metrics.
+ """
+
+ def __init__(self, **kwargs):
+ self.version = kwargs.get('version', u'1.0')
+ self.enabled = kwargs.get('enabled', False)
+ self.include_apis = kwargs.get('include_apis')
+ self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy()
+
+ @classmethod
+ def _from_generated(cls, generated):
+ if not generated:
+ return cls()
+ return cls(
+ version=generated.version,
+ enabled=generated.enabled,
+ include_apis=generated.include_apis,
+ retention_policy=RetentionPolicy._from_generated(generated.retention_policy)
+ # pylint: disable=protected-access
+ )
+
+
+class RetentionPolicy(GeneratedRetentionPolicy):
+ """The retention policy which determines how long the associated data should
+ persist.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param bool enabled: Required. Indicates whether a retention policy is enabled
+ for the storage service.
+ :param int days: Indicates the number of days that metrics or logging or
+ soft-deleted data should be retained. All data older than this value will
+ be deleted.
+ """
+
+ def __init__(self, enabled=False, days=None):
+ self.enabled = enabled
+ self.days = days
+ if self.enabled and (self.days is None):
+ raise ValueError("If policy is enabled, 'days' must be specified.")
+
+ @classmethod
+ def _from_generated(cls, generated):
+ if not generated:
+ return cls()
+ return cls(
+ enabled=generated.enabled,
+ days=generated.days,
+ )
+
+
+class CorsRule(GeneratedCorsRule):
+ """CORS is an HTTP feature that enables a web application running under one
+ domain to access resources in another domain. Web browsers implement a
+ security restriction known as same-origin policy that prevents a web page
+ from calling APIs in a different domain; CORS provides a secure way to
+ allow one domain (the origin domain) to call APIs in another domain.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param list(str) allowed_origins:
+ A list of origin domains that will be allowed via CORS, or "*" to allow
+ all domains. The list of must contain at least one entry. Limited to 64
+ origin domains. Each allowed origin can have up to 256 characters.
+ :param list(str) allowed_methods:
+ A list of HTTP methods that are allowed to be executed by the origin.
+ The list of must contain at least one entry. For Azure Storage,
+ permitted methods are DELETE, GET, HEAD, MERGE, POST, OPTIONS or PUT.
+ :keyword int max_age_in_seconds:
+ The number of seconds that the client/browser should cache a
+ pre-flight response.
+ :keyword list(str) exposed_headers:
+ Defaults to an empty list. A list of response headers to expose to CORS
+ clients. Limited to 64 defined headers and two prefixed headers. Each
+ header can be up to 256 characters.
+ :keyword list(str) allowed_headers:
+ Defaults to an empty list. A list of headers allowed to be part of
+ the cross-origin request. Limited to 64 defined headers and 2 prefixed
+ headers. Each header can be up to 256 characters.
+ """
+
+ def __init__(self, allowed_origins, allowed_methods, **kwargs):
+ self.allowed_origins = ','.join(allowed_origins)
+ self.allowed_methods = ','.join(allowed_methods)
+ self.allowed_headers = ','.join(kwargs.get('allowed_headers', []))
+ self.exposed_headers = ','.join(kwargs.get('exposed_headers', []))
+ self.max_age_in_seconds = kwargs.get('max_age_in_seconds', 0)
+
+ @classmethod
+ def _from_generated(cls, generated):
+ return cls(
+ [generated.allowed_origins],
+ [generated.allowed_methods],
+ allowed_headers=[generated.allowed_headers],
+ exposed_headers=[generated.exposed_headers],
+ max_age_in_seconds=generated.max_age_in_seconds,
+ )
+
+
+class TablePropertiesPaged(PageIterator):
+ """An iterable of Table properties.
+
+ :ivar str service_endpoint: The service URL.
+ :ivar str prefix: A queue name prefix being used to filter the list.
+ :ivar str marker: The continuation token of the current page of results.
+ :ivar int results_per_page: The maximum number of results retrieved per API call.
+ :ivar str next_marker: The continuation token to retrieve the next page of results.
+ :ivar str location_mode: The location mode being used to list results. The available
+ options include "primary" and "secondary".
+ :param callable command: Function to retrieve the next page of items.
+ :param str prefix: Filters the results to return only queues whose names
+ begin with the specified prefix.
+ :param int results_per_page: The maximum number of queue names to retrieve per
+ call.
+ :param str continuation_token: An opaque continuation token.
+ """
+
+ def __init__(self, command, prefix=None, results_per_page=None, continuation_token=None):
+ super(TablePropertiesPaged, self).__init__(
+ self._get_next_cb,
+ self._extract_data_cb,
+ continuation_token=continuation_token or ""
+ )
+ self._command = command
+ self.prefix = prefix
+ self.service_endpoint = None
+ self.next_table_name = None
+ self._headers = None
+ self.results_per_page = results_per_page
+ self.location_mode = None
+
+ def _get_next_cb(self, continuation_token):
+ try:
+ return self._command(
+ next_table_name=continuation_token or None,
+ query_options=self.results_per_page or None,
+ cls=return_context_and_deserialized,
+ use_location=self.location_mode
+ )
+ except HttpResponseError as error:
+ process_storage_error(error)
+
+ def _extract_data_cb(self, get_next_return):
+ self.location_mode, self._response, self._headers = get_next_return
+ props_list = [t for t in self._response.value]
+ # props_list = [TableProperties._from_generated(q) for q in self._response.value] # pylint: disable=protected-access
+ # return self._response.next_marker or None, props_list
+ return self._headers['x-ms-continuation-NextTableName'] or None, props_list
+
+
+class TableSasPermissions(object):
+ """QueueSasPermissions class to be used with the
+ :func:`~azure.storage.queue.generate_queue_sas` function and for the AccessPolicies used with
+ :func:`~azure.storage.queue.QueueClient.set_queue_access_policy`.
+
+ :param bool read:
+ Read metadata and properties, including message count. Peek at messages.
+ :param bool add:
+ Add messages to the queue.
+ :param bool update:
+ Update messages in the queue. Note: Use the Process permission with
+ Update so you can first get the message you want to update.
+ :param bool process:
+ Get and delete messages from the queue.
+ """
+ def __init__(self, read=False, add=False, update=False, process=False, query=False):
+ self.read = read
+ self.add = add
+ self.update = update
+ self.process = process
+ self.query = query
+ self._str = (('r' if self.read else '') +
+ ('a' if self.add else '') +
+ ('u' if self.update else '') +
+ ('q' if self.query else '') +
+ ('p' if self.process else ''))
+
+ def __str__(self):
+ return self._str
+
+ @classmethod
+ def from_string(cls, permission):
+ """Create a QueueSasPermissions from a string.
+
+ To specify read, add, update, or process permissions you need only to
+ include the first letter of the word in the string. E.g. For read and
+ update permissions, you would provide a string "ru".
+
+ :param str permission: The string which dictates the
+ read, add, update, or process permissions.
+ :return: A QueueSasPermissions object
+ :rtype: ~azure.storage.queue.QueueSasPermissions
+ """
+ p_read = 'r' in permission
+ p_add = 'a' in permission
+ p_update = 'u' in permission
+ p_process = 'p' in permission
+ p_query = 'q' in permission
+
+ parsed = cls(p_read, p_add, p_update, p_process,p_query)
+ parsed._str = permission # pylint: disable = protected-access
+ return parsed
+
+
+def service_stats_deserialize(generated):
+ """Deserialize a ServiceStats objects into a dict.
+ """
+ return {
+ 'geo_replication': {
+ 'status': generated.geo_replication.status,
+ 'last_sync_time': generated.geo_replication.last_sync_time,
+ }
+ }
+
+
+def service_properties_deserialize(generated):
+ """Deserialize a ServiceProperties objects into a dict.
+ """
+ return {
+ 'analytics_logging': TableAnalyticsLogging._from_generated(generated.logging),
+ # pylint: disable=protected-access
+ 'hour_metrics': Metrics._from_generated(generated.hour_metrics), # pylint: disable=protected-access
+ 'minute_metrics': Metrics._from_generated(generated.minute_metrics), # pylint: disable=protected-access
+ 'cors': [CorsRule._from_generated(cors) for cors in generated.cors], # pylint: disable=protected-access
+ }
diff --git a/sdk/table/azure/azure_table/_shared/__init__.py b/sdk/table/azure/azure_table/_shared/__init__.py
new file mode 100644
index 000000000000..160f88223820
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/__init__.py
@@ -0,0 +1,56 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+import base64
+import hashlib
+import hmac
+
+try:
+ from urllib.parse import quote, unquote
+except ImportError:
+ from urllib2 import quote, unquote # type: ignore
+
+import six
+
+
+def url_quote(url):
+ return quote(url)
+
+
+def url_unquote(url):
+ return unquote(url)
+
+
+def encode_base64(data):
+ if isinstance(data, six.text_type):
+ data = data.encode('utf-8')
+ encoded = base64.b64encode(data)
+ return encoded.decode('utf-8')
+
+
+def decode_base64_to_bytes(data):
+ if isinstance(data, six.text_type):
+ data = data.encode('utf-8')
+ return base64.b64decode(data)
+
+
+def decode_base64_to_text(data):
+ decoded_bytes = decode_base64_to_bytes(data)
+ return decoded_bytes.decode('utf-8')
+
+
+def sign_string(key, string_to_sign, key_is_base64=True):
+ if key_is_base64:
+ key = decode_base64_to_bytes(key)
+ else:
+ if isinstance(key, six.text_type):
+ key = key.encode('utf-8')
+ if isinstance(string_to_sign, six.text_type):
+ string_to_sign = string_to_sign.encode('utf-8')
+ signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)
+ digest = signed_hmac_sha256.digest()
+ encoded_digest = encode_base64(digest)
+ return encoded_digest
diff --git a/sdk/table/azure/azure_table/_shared/_common_conversion.py b/sdk/table/azure/azure_table/_shared/_common_conversion.py
new file mode 100644
index 000000000000..80c92353848f
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/_common_conversion.py
@@ -0,0 +1,127 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+import base64
+import hashlib
+import hmac
+import sys
+from io import (SEEK_SET)
+
+from dateutil.tz import tzutc
+from pyparsing import unicode
+
+from ._error import (
+ _ERROR_VALUE_SHOULD_BE_BYTES_OR_STREAM,
+ _ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM,
+)
+from .models import (
+ _unicode_type,
+)
+
+if sys.version_info < (3,):
+ def _str(value):
+ if isinstance(value, unicode):
+ return value.encode('utf-8')
+
+ return str(value)
+else:
+ _str = str
+
+
+def _to_str(value):
+ return _str(value) if value is not None else None
+
+
+def _int_to_str(value):
+ return str(int(value)) if value is not None else None
+
+
+def _bool_to_str(value):
+ if value is None:
+ return None
+
+ if isinstance(value, bool):
+ if value:
+ return 'true'
+ else:
+ return 'false'
+
+ return str(value)
+
+
+def _to_utc_datetime(value):
+ return value.strftime('%Y-%m-%dT%H:%M:%SZ')
+
+
+def _datetime_to_utc_string(value):
+ # Azure expects the date value passed in to be UTC.
+ # Azure will always return values as UTC.
+ # If a date is passed in without timezone info, it is assumed to be UTC.
+ if value is None:
+ return None
+
+ if value.tzinfo:
+ value = value.astimezone(tzutc())
+
+ return value.strftime('%a, %d %b %Y %H:%M:%S GMT')
+
+
+def _encode_base64(data):
+ if isinstance(data, _unicode_type):
+ data = data.encode('utf-8')
+ encoded = base64.b64encode(data)
+ return encoded.decode('utf-8')
+
+
+def _decode_base64_to_bytes(data):
+ if isinstance(data, _unicode_type):
+ data = data.encode('utf-8')
+ return base64.b64decode(data)
+
+
+def _decode_base64_to_text(data):
+ decoded_bytes = _decode_base64_to_bytes(data)
+ return decoded_bytes.decode('utf-8')
+
+
+def _sign_string(key, string_to_sign, key_is_base64=True):
+ if key_is_base64:
+ key = _decode_base64_to_bytes(key)
+ else:
+ if isinstance(key, _unicode_type):
+ key = key.encode('utf-8')
+ if isinstance(string_to_sign, _unicode_type):
+ string_to_sign = string_to_sign.encode('utf-8')
+ signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)
+ digest = signed_hmac_sha256.digest()
+ encoded_digest = _encode_base64(digest)
+ return encoded_digest
+
+
+def _get_content_md5(data):
+ md5 = hashlib.md5()
+ if isinstance(data, bytes):
+ md5.update(data)
+ elif hasattr(data, 'read'):
+ pos = 0
+ try:
+ pos = data.tell()
+ except:
+ pass
+ for chunk in iter(lambda: data.read(4096), b""):
+ md5.update(chunk)
+ try:
+ data.seek(pos, SEEK_SET)
+ except (AttributeError, IOError):
+ raise ValueError(_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM.format('data'))
+ else:
+ raise ValueError(_ERROR_VALUE_SHOULD_BE_BYTES_OR_STREAM.format('data'))
+
+ return base64.b64encode(md5.digest()).decode('utf-8')
+
+
+def _lower(text):
+ return text.lower()
diff --git a/sdk/table/azure/azure_table/_shared/_constants.py b/sdk/table/azure/azure_table/_shared/_constants.py
new file mode 100644
index 000000000000..858875b6af28
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/_constants.py
@@ -0,0 +1,51 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+import platform
+import sys
+
+__author__ = 'Microsoft Corp. '
+__version__ = '1.4.2'
+
+# UserAgent string sample: 'Azure-Storage/0.37.0-0.38.0 (Python CPython 3.4.2; Windows 8)'
+# First version(0.37.0) is the common package, and the second version(0.38.0) is the service package
+USER_AGENT_STRING_PREFIX = 'Azure-Storage/{}-'.format(__version__)
+USER_AGENT_STRING_SUFFIX = '(Python {} {}; {} {})'.format(platform.python_implementation(),
+ platform.python_version(), platform.system(),
+ platform.release())
+
+# default values for common package, in case it is used directly
+DEFAULT_X_MS_VERSION = '2018-03-28'
+DEFAULT_USER_AGENT_STRING = '{}None {}'.format(USER_AGENT_STRING_PREFIX, USER_AGENT_STRING_SUFFIX)
+
+# Live ServiceClient URLs
+SERVICE_HOST_BASE = 'core.windows.net'
+DEFAULT_PROTOCOL = 'https'
+
+# Development ServiceClient URLs
+DEV_BLOB_HOST = '127.0.0.1:10000'
+DEV_QUEUE_HOST = '127.0.0.1:10001'
+
+# Default credentials for Development Storage Service
+DEV_ACCOUNT_NAME = 'devstoreaccount1'
+DEV_ACCOUNT_SECONDARY_NAME = 'devstoreaccount1-secondary'
+DEV_ACCOUNT_KEY = 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=='
+
+# Socket timeout in seconds
+DEFAULT_SOCKET_TIMEOUT = 20
+
+# for python 3.5+, there was a change to the definition of the socket timeout (as far as socket.sendall is concerned)
+# The socket timeout is now the maximum total duration to send all data.
+if sys.version_info >= (3, 5):
+ # the timeout to connect is 20 seconds, and the read timeout is 2000 seconds
+ # the 2000 seconds was calculated with: 100MB (max block size)/ 50KB/s (an arbitrarily chosen minimum upload speed)
+ DEFAULT_SOCKET_TIMEOUT = (20, 2000)
+
+# Encryption constants
+_ENCRYPTION_PROTOCOL_V1 = '1.0'
+
+_AUTHORIZATION_HEADER_NAME = 'Authorization'
+_COPY_SOURCE_HEADER_NAME = 'x-ms-copy-source'
+_REDACTED_VALUE = 'REDACTED'
diff --git a/sdk/table/azure/azure_table/_shared/_error.py b/sdk/table/azure/azure_table/_shared/_error.py
new file mode 100644
index 000000000000..49e0fffeb68e
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/_error.py
@@ -0,0 +1,209 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+from sys import version_info
+
+from pyparsing import unicode
+
+if version_info < (3,):
+ def _str(value):
+ if isinstance(value, unicode):
+ return value.encode('utf-8')
+
+ return str(value)
+else:
+ _str = str
+
+
+def _to_str(value):
+ return _str(value) if value is not None else None
+
+
+from azure.common import (
+ AzureHttpError,
+ AzureConflictHttpError,
+ AzureMissingResourceHttpError,
+ AzureException,
+)
+from ._constants import (
+ _ENCRYPTION_PROTOCOL_V1,
+)
+
+_ERROR_CONFLICT = 'Conflict ({0})'
+_ERROR_NOT_FOUND = 'Not found ({0})'
+_ERROR_UNKNOWN = 'Unknown error ({0})'
+_ERROR_STORAGE_MISSING_INFO = \
+ 'You need to provide an account name and either an account_key or sas_token when creating a storage service.'
+_ERROR_EMULATOR_DOES_NOT_SUPPORT_FILES = \
+ 'The emulator does not support the file service.'
+_ERROR_ACCESS_POLICY = \
+ 'share_access_policy must be either SignedIdentifier or AccessPolicy ' + \
+ 'instance'
+_ERROR_PARALLEL_NOT_SEEKABLE = 'Parallel operations require a seekable stream.'
+_ERROR_VALUE_SHOULD_BE_BYTES = '{0} should be of type bytes.'
+_ERROR_VALUE_SHOULD_BE_BYTES_OR_STREAM = '{0} should be of type bytes or a readable file-like/io.IOBase stream object.'
+_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = '{0} should be a seekable file-like/io.IOBase type stream object.'
+_ERROR_VALUE_SHOULD_BE_STREAM = '{0} should be a file-like/io.IOBase type stream object with a read method.'
+_ERROR_VALUE_NONE = '{0} should not be None.'
+_ERROR_VALUE_NONE_OR_EMPTY = '{0} should not be None or empty.'
+_ERROR_VALUE_NEGATIVE = '{0} should not be negative.'
+_ERROR_START_END_NEEDED_FOR_MD5 = \
+ 'Both end_range and start_range need to be specified ' + \
+ 'for getting content MD5.'
+_ERROR_RANGE_TOO_LARGE_FOR_MD5 = \
+ 'Getting content MD5 for a range greater than 4MB ' + \
+ 'is not supported.'
+_ERROR_MD5_MISMATCH = \
+ 'MD5 mismatch. Expected value is \'{0}\', computed value is \'{1}\'.'
+_ERROR_TOO_MANY_ACCESS_POLICIES = \
+ 'Too many access policies provided. The server does not support setting more than 5 access policies on a single resource.'
+_ERROR_OBJECT_INVALID = \
+ '{0} does not define a complete interface. Value of {1} is either missing or invalid.'
+_ERROR_UNSUPPORTED_ENCRYPTION_VERSION = \
+ 'Encryption version is not supported.'
+_ERROR_DECRYPTION_FAILURE = \
+ 'Decryption failed'
+_ERROR_ENCRYPTION_REQUIRED = \
+ 'Encryption required but no key was provided.'
+_ERROR_DECRYPTION_REQUIRED = \
+ 'Decryption required but neither key nor resolver was provided.' + \
+ ' If you do not want to decypt, please do not set the require encryption flag.'
+_ERROR_INVALID_KID = \
+ 'Provided or resolved key-encryption-key does not match the id of key used to encrypt.'
+_ERROR_UNSUPPORTED_ENCRYPTION_ALGORITHM = \
+ 'Specified encryption algorithm is not supported.'
+_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = 'The require_encryption flag is set, but encryption is not supported' + \
+ ' for this method.'
+_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM = 'Unknown key wrap algorithm.'
+_ERROR_DATA_NOT_ENCRYPTED = 'Encryption required, but received data does not contain appropriate metatadata.' + \
+ 'Data was either not encrypted or metadata has been lost.'
+
+
+def _dont_fail_on_exist(error):
+ ''' don't throw exception if the resource exists.
+ This is called by create_* APIs with fail_on_exist=False'''
+ if isinstance(error, AzureConflictHttpError):
+ return False
+ else:
+ raise error
+
+
+def _dont_fail_not_exist(error):
+ ''' don't throw exception if the resource doesn't exist.
+ This is called by create_* APIs with fail_on_exist=False'''
+ if isinstance(error, AzureMissingResourceHttpError):
+ return False
+ else:
+ raise error
+
+
+def _http_error_handler(http_error):
+ ''' Simple error handler for azure.'''
+ message = str(http_error)
+ error_code = None
+
+ if 'x-ms-error-code' in http_error.respheader:
+ error_code = http_error.respheader['x-ms-error-code']
+ message += ' ErrorCode: ' + error_code
+
+ if http_error.respbody is not None:
+ message += '\n' + http_error.respbody.decode('utf-8-sig')
+
+ ex = AzureHttpError(message, http_error.status)
+ ex.error_code = error_code
+
+ raise ex
+
+def _validate_type_bytes(param_name, param):
+ if not isinstance(param, bytes):
+ raise TypeError(_ERROR_VALUE_SHOULD_BE_BYTES.format(param_name))
+
+
+def _validate_type_bytes_or_stream(param_name, param):
+ if not (isinstance(param, bytes) or hasattr(param, 'read')):
+ raise TypeError(_ERROR_VALUE_SHOULD_BE_BYTES_OR_STREAM.format(param_name))
+
+
+def _validate_not_none(param_name, param):
+ if param is None:
+ raise ValueError(_ERROR_VALUE_NONE.format(param_name))
+
+
+def _validate_content_match(server_md5, computed_md5):
+ if server_md5 != computed_md5:
+ raise AzureException(_ERROR_MD5_MISMATCH.format(server_md5, computed_md5))
+
+
+def _validate_access_policies(identifiers):
+ if identifiers and len(identifiers) > 5:
+ raise AzureException(_ERROR_TOO_MANY_ACCESS_POLICIES)
+
+
+def _validate_key_encryption_key_wrap(kek):
+ # Note that None is not callable and so will fail the second clause of each check.
+ if not hasattr(kek, 'wrap_key') or not callable(kek.wrap_key):
+ raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'wrap_key'))
+ if not hasattr(kek, 'get_kid') or not callable(kek.get_kid):
+ raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid'))
+ if not hasattr(kek, 'get_key_wrap_algorithm') or not callable(kek.get_key_wrap_algorithm):
+ raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_key_wrap_algorithm'))
+
+
+def _validate_key_encryption_key_unwrap(kek):
+ if not hasattr(kek, 'get_kid') or not callable(kek.get_kid):
+ raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid'))
+ if not hasattr(kek, 'unwrap_key') or not callable(kek.unwrap_key):
+ raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'unwrap_key'))
+
+
+def _validate_encryption_required(require_encryption, kek):
+ if require_encryption and (kek is None):
+ raise ValueError(_ERROR_ENCRYPTION_REQUIRED)
+
+
+def _validate_decryption_required(require_encryption, kek, resolver):
+ if (require_encryption and (kek is None) and
+ (resolver is None)):
+ raise ValueError(_ERROR_DECRYPTION_REQUIRED)
+
+
+def _validate_encryption_protocol_version(encryption_protocol):
+ if not (_ENCRYPTION_PROTOCOL_V1 == encryption_protocol):
+ raise ValueError(_ERROR_UNSUPPORTED_ENCRYPTION_VERSION)
+
+
+def _validate_kek_id(kid, resolved_id):
+ if not (kid == resolved_id):
+ raise ValueError(_ERROR_INVALID_KID)
+
+
+def _validate_encryption_unsupported(require_encryption, key_encryption_key):
+ if require_encryption or (key_encryption_key is not None):
+ raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)
+
+
+# wraps a given exception with the desired exception type
+def _wrap_exception(ex, desired_type):
+ msg = ""
+ if len(ex.args) > 0:
+ msg = ex.args[0]
+ if version_info >= (3,):
+ # Automatic chaining in Python 3 means we keep the trace
+ return desired_type(msg)
+ else:
+ # There isn't a good solution in 2 for keeping the stack trace
+ # in general, or that will not result in an error in 3
+ # However, we can keep the previous error type and message
+ # TODO: In the future we will log the trace
+ return desired_type('{}: {}'.format(ex.__class__.__name__, msg))
+
+
+class AzureSigningError(AzureException):
+ """
+ Represents a fatal error when attempting to sign a request.
+ In general, the cause of this exception is user error. For example, the given account key is not valid.
+ Please visit https://docs.microsoft.com/en-us/azure/storage/common/storage-create-storage-account for more info.
+ """
+ pass
diff --git a/sdk/table/azure/azure_table/_shared/authentication.py b/sdk/table/azure/azure_table/_shared/authentication.py
new file mode 100644
index 000000000000..c9ddb438ea62
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/authentication.py
@@ -0,0 +1,149 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+import logging
+import sys
+
+try:
+ from urllib.parse import urlparse, unquote, parse_qsl
+except ImportError:
+ from urlparse import urlparse # type: ignore
+ from urllib2 import unquote # type: ignore
+
+try:
+ from yarl import URL
+except ImportError:
+ pass
+
+try:
+ from azure.core.pipeline.transport import AioHttpTransport
+except ImportError:
+ AioHttpTransport = None
+
+from azure.core.exceptions import ClientAuthenticationError
+from azure.core.pipeline.policies import SansIOHTTPPolicy
+
+from ._common_conversion import (
+ _sign_string,
+)
+
+from azure.azure_table._shared._constants import (
+ DEV_ACCOUNT_NAME,
+ DEV_ACCOUNT_SECONDARY_NAME
+)
+
+from ._error import (
+ AzureSigningError,
+ _wrap_exception,
+)
+
+logger = logging.getLogger(__name__)
+
+
+# wraps a given exception with the desired exception type
+def _wrap_exception(ex, desired_type):
+ msg = ""
+ if ex.args:
+ msg = ex.args[0]
+ if sys.version_info >= (3,):
+ # Automatic chaining in Python 3 means we keep the trace
+ return desired_type(msg)
+ # There isn't a good solution in 2 for keeping the stack trace
+ # in general, or that will not result in an error in 3
+ # However, we can keep the previous error type and message
+ # TODO: In the future we will log the trace
+ return desired_type('{}: {}'.format(ex.__class__.__name__, msg))
+
+
+class AzureSigningError(ClientAuthenticationError):
+ """
+ Represents a fatal error when attempting to sign a request.
+ In general, the cause of this exception is user error. For example, the given account key is not valid.
+ Please visit https://docs.microsoft.com/en-us/azure/storage/common/storage-create-storage-account for more info.
+ """
+
+
+# pylint: disable=no-self-use
+class SharedKeyCredentialPolicy(SansIOHTTPPolicy):
+
+ def __init__(self, account_name, account_key, is_emulated=False):
+ self.account_name = account_name
+ self.account_key = account_key
+ self.is_emulated = is_emulated
+
+ def _get_headers(self, request, headers_to_sign):
+ headers = dict((name.lower(), value) for name, value in request.headers.items() if value)
+ if 'content-length' in headers and headers['content-length'] == '0':
+ del headers['content-length']
+ return '\n'.join(headers.get(x, '') for x in headers_to_sign) + '\n'
+
+ def _get_verb(self, request):
+ return request.method + '\n'
+
+ def _get_canonicalized_resource(self, request):
+ # uri_path = request.path.split('?')[0]
+ uri_path = urlparse(request.url).path
+
+ # for emulator, use the DEV_ACCOUNT_NAME instead of DEV_ACCOUNT_SECONDARY_NAME
+ # as this is how the emulator works
+ if self.is_emulated and uri_path.find(DEV_ACCOUNT_SECONDARY_NAME) == 1:
+ # only replace the first instance
+ uri_path = uri_path.replace(DEV_ACCOUNT_SECONDARY_NAME, DEV_ACCOUNT_NAME, 1)
+
+ return '/' + self.account_name + uri_path
+
+ def _get_canonicalized_headers(self, request):
+ string_to_sign = ''
+ x_ms_headers = []
+ for name, value in request.headers.items():
+ if name.startswith('x-ms-'):
+ x_ms_headers.append((name.lower(), value))
+ x_ms_headers.sort()
+ for name, value in x_ms_headers:
+ if value is not None:
+ string_to_sign += ''.join([name, ':', value, '\n'])
+ return string_to_sign
+
+ def _add_authorization_header(self, request, string_to_sign):
+ try:
+ signature = _sign_string(self.account_key, string_to_sign)
+ auth_string = 'SharedKey ' + self.account_name + ':' + signature
+ request.headers['Authorization'] = auth_string
+ except Exception as ex:
+ # Wrap any error that occurred as signing error
+ # Doing so will clarify/locate the source of problem
+ raise _wrap_exception(ex, AzureSigningError)
+
+ def on_request(self, request): # type: (PipelineRequest) -> Union[None, Awaitable[None]]
+ self.sign_request(request.http_request)
+
+ def sign_request(self, request):
+ string_to_sign = \
+ self._get_verb(request) + \
+ self._get_headers(
+ request,
+ ['content-md5', 'content-type', 'x-ms-date'],
+ ) + \
+ self._get_canonicalized_resource(request) + \
+ self._get_canonicalized_resource_query(request)
+ self._add_authorization_header(request, string_to_sign)
+ logger.debug("String_to_sign=%s", string_to_sign)
+
+ def _get_canonicalized_resource_query(self, request):
+ for name, value in request.query.items():
+ if name == 'comp':
+ return '?comp=' + value
+ return ''
+
+ # def _get_canonicalized_resource_query(self, request):
+ # sorted_queries = [(name, value) for name, value in request.query.items()]
+ # sorted_queries.sort()
+ #
+ # string_to_sign = ''
+ # for name, value in sorted_queries:
+ # if value is not None:
+ # string_to_sign += '\n' + name.lower() + ':' + value
+ # return string_to_sign
diff --git a/sdk/table/azure/azure_table/_shared/base_client.py b/sdk/table/azure/azure_table/_shared/base_client.py
new file mode 100644
index 000000000000..4ad388c685bd
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/base_client.py
@@ -0,0 +1,429 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+from typing import ( # pylint: disable=unused-import
+ Union,
+ Optional,
+ Any,
+ Iterable,
+ Dict,
+ List,
+ Type,
+ Tuple,
+ TYPE_CHECKING,
+)
+import logging
+
+try:
+ from urllib.parse import parse_qs, quote
+except ImportError:
+ from urlparse import parse_qs # type: ignore
+ from urllib2 import quote # type: ignore
+
+import six
+
+from azure.core.configuration import Configuration
+from azure.core.exceptions import HttpResponseError
+from azure.core.pipeline import Pipeline
+from azure.core.pipeline.transport import RequestsTransport, HttpTransport
+from azure.core.pipeline.policies import (
+ RedirectPolicy,
+ ContentDecodePolicy,
+ BearerTokenCredentialPolicy,
+ ProxyPolicy,
+ DistributedTracingPolicy,
+ HttpLoggingPolicy,
+ UserAgentPolicy,
+)
+
+from .constants import STORAGE_OAUTH_SCOPE, SERVICE_HOST_BASE, CONNECTION_TIMEOUT, READ_TIMEOUT
+from .models import LocationMode
+from .authentication import SharedKeyCredentialPolicy
+from .shared_access_signature import QueryStringConstants
+from .policies import (
+ StorageHeadersPolicy,
+ StorageContentValidation,
+ StorageRequestHook,
+ StorageResponseHook,
+ StorageLoggingPolicy,
+ StorageHosts,
+ ExponentialRetry,
+)
+from .._version import VERSION
+# from .._generated.models import StorageErrorException
+from .response_handlers import process_storage_error, PartialBatchErrorException
+
+
+_LOGGER = logging.getLogger(__name__)
+_SERVICE_PARAMS = {
+ "blob": {"primary": "BlobEndpoint", "secondary": "BlobSecondaryEndpoint"},
+ "queue": {"primary": "QueueEndpoint", "secondary": "QueueSecondaryEndpoint"},
+ "file": {"primary": "FileEndpoint", "secondary": "FileSecondaryEndpoint"},
+ "table": {"primary": "TableEndpoint", "secondary": "TableSecondaryEndpoint"},
+ "dfs": {"primary": "BlobEndpoint", "secondary": "BlobEndpoint"},
+}
+
+
+class StorageAccountHostsMixin(object): # pylint: disable=too-many-instance-attributes
+ def __init__(
+ self,
+ parsed_url, # type: Any
+ service, # type: str
+ credential=None, # type: Optional[Any]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ self._location_mode = kwargs.get("_location_mode", LocationMode.PRIMARY)
+ self._hosts = kwargs.get("_hosts")
+ self.scheme = parsed_url.scheme
+
+ if service not in ["blob", "queue", "file-share", "dfs", "table"]:
+ raise ValueError("Invalid service: {}".format(service))
+ service_name = service.split('-')[0]
+ account = parsed_url.netloc.split(".{}.core.".format(service_name))
+ self.account_name = account[0] if len(account) > 1 else None
+ secondary_hostname = None
+
+ self.credential = format_shared_key_credential(account, credential)
+ if self.scheme.lower() != "https" and hasattr(self.credential, "get_token"):
+ raise ValueError("Token credential is only supported with HTTPS.")
+ if hasattr(self.credential, "account_name"):
+ self.account_name = self.credential.account_name
+ secondary_hostname = "{}-secondary.{}.{}".format(
+ self.credential.account_name, service_name, SERVICE_HOST_BASE)
+
+ if not self._hosts:
+ if len(account) > 1:
+ secondary_hostname = parsed_url.netloc.replace(account[0], account[0] + "-secondary")
+ if kwargs.get("secondary_hostname"):
+ secondary_hostname = kwargs["secondary_hostname"]
+ primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip('/')
+ self._hosts = {LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname}
+
+ self.require_encryption = kwargs.get("require_encryption", False)
+ self.key_encryption_key = kwargs.get("key_encryption_key")
+ self.key_resolver_function = kwargs.get("key_resolver_function")
+ self._config, self._pipeline = self._create_pipeline(self.credential, storage_sdk=service, **kwargs)
+
+ def __enter__(self):
+ self._client.__enter__()
+ return self
+
+ def __exit__(self, *args):
+ self._client.__exit__(*args)
+
+ def close(self):
+ """ This method is to close the sockets opened by the client.
+ It need not be used when using with a context manager.
+ """
+ self._client.close()
+
+ @property
+ def url(self):
+ """The full endpoint URL to this entity, including SAS token if used.
+
+ This could be either the primary endpoint,
+ or the secondary endpoint depending on the current :func:`location_mode`.
+ """
+ return self._format_url(self._hosts[self._location_mode])
+
+ @property
+ def primary_endpoint(self):
+ """The full primary endpoint URL.
+
+ :type: str
+ """
+ return self._format_url(self._hosts[LocationMode.PRIMARY])
+
+ @property
+ def primary_hostname(self):
+ """The hostname of the primary endpoint.
+
+ :type: str
+ """
+ return self._hosts[LocationMode.PRIMARY]
+
+ @property
+ def secondary_endpoint(self):
+ """The full secondary endpoint URL if configured.
+
+ If not available a ValueError will be raised. To explicitly specify a secondary hostname, use the optional
+ `secondary_hostname` keyword argument on instantiation.
+
+ :type: str
+ :raise ValueError:
+ """
+ if not self._hosts[LocationMode.SECONDARY]:
+ raise ValueError("No secondary host configured.")
+ return self._format_url(self._hosts[LocationMode.SECONDARY])
+
+ @property
+ def secondary_hostname(self):
+ """The hostname of the secondary endpoint.
+
+ If not available this will be None. To explicitly specify a secondary hostname, use the optional
+ `secondary_hostname` keyword argument on instantiation.
+
+ :type: str or None
+ """
+ return self._hosts[LocationMode.SECONDARY]
+
+ @property
+ def location_mode(self):
+ """The location mode that the client is currently using.
+
+ By default this will be "primary". Options include "primary" and "secondary".
+
+ :type: str
+ """
+
+ return self._location_mode
+
+ @location_mode.setter
+ def location_mode(self, value):
+ if self._hosts.get(value):
+ self._location_mode = value
+ self._client._config.url = self.url # pylint: disable=protected-access
+ else:
+ raise ValueError("No host URL for location mode: {}".format(value))
+
+ @property
+ def api_version(self):
+ """The version of the Storage API used for requests.
+
+ :type: str
+ """
+ return self._client._config.version # pylint: disable=protected-access
+
+ def _format_query_string(self, sas_token, credential, snapshot=None, share_snapshot=None):
+ query_str = "?"
+ if snapshot:
+ query_str += "snapshot={}&".format(self.snapshot)
+ if share_snapshot:
+ query_str += "sharesnapshot={}&".format(self.snapshot)
+ if sas_token and not credential:
+ query_str += sas_token
+ elif is_credential_sastoken(credential):
+ query_str += credential.lstrip("?")
+ credential = None
+ return query_str.rstrip("?&"), credential
+
+ def _create_pipeline(self, credential, **kwargs):
+ # type: (Any, **Any) -> Tuple[Configuration, Pipeline]
+ self._credential_policy = None
+ if hasattr(credential, "get_token"):
+ self._credential_policy = BearerTokenCredentialPolicy(credential, STORAGE_OAUTH_SCOPE)
+ elif isinstance(credential, SharedKeyCredentialPolicy):
+ self._credential_policy = credential
+ elif credential is not None:
+ raise TypeError("Unsupported credential: {}".format(credential))
+
+ config = kwargs.get("_configuration") or create_configuration(**kwargs)
+ if kwargs.get("_pipeline"):
+ return config, kwargs["_pipeline"]
+ config.transport = kwargs.get("transport") # type: ignore
+ kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT)
+ kwargs.setdefault("read_timeout", READ_TIMEOUT)
+ if not config.transport:
+ config.transport = RequestsTransport(**kwargs)
+ policies = [
+ config.headers_policy,
+ config.proxy_policy,
+ config.user_agent_policy,
+ StorageRequestHook(**kwargs),
+ self._credential_policy,
+ ContentDecodePolicy(response_encoding="utf-8"),
+ RedirectPolicy(**kwargs),
+ StorageHosts(hosts=self._hosts, **kwargs),
+ config.retry_policy,
+ config.logging_policy,
+ StorageResponseHook(**kwargs),
+ DistributedTracingPolicy(**kwargs),
+ HttpLoggingPolicy(**kwargs)
+ ]
+ return config, Pipeline(config.transport, policies=policies)
+
+ def _batch_send(
+ self, *reqs, # type: HttpRequest
+ **kwargs
+ ):
+ """Given a series of request, do a Storage batch call.
+ """
+ # Pop it here, so requests doesn't feel bad about additional kwarg
+ raise_on_any_failure = kwargs.pop("raise_on_any_failure", True)
+ request = self._client._client.post( # pylint: disable=protected-access
+ url='https://{}/?comp=batch'.format(self.primary_hostname),
+ headers={
+ 'x-ms-version': self.api_version
+ }
+ )
+
+ request.set_multipart_mixed(
+ *reqs,
+ policies=[
+ StorageHeadersPolicy(),
+ self._credential_policy
+ ],
+ enforce_https=False
+ )
+
+ pipeline_response = self._pipeline.run(
+ request, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ try:
+ if response.status_code not in [202]:
+ raise HttpResponseError(response=response)
+ parts = response.parts()
+ if raise_on_any_failure:
+ parts = list(response.parts())
+ if any(p for p in parts if not 200 <= p.status_code < 300):
+ error = PartialBatchErrorException(
+ message="There is a partial failure in the batch operation.",
+ response=response, parts=parts
+ )
+ raise error
+ return iter(parts)
+ return parts
+ except HttpResponseError as error:
+ process_storage_error(error)
+
+class TransportWrapper(HttpTransport):
+ """Wrapper class that ensures that an inner client created
+ by a `get_client` method does not close the outer transport for the parent
+ when used in a context manager.
+ """
+ def __init__(self, transport):
+ self._transport = transport
+
+ def send(self, request, **kwargs):
+ return self._transport.send(request, **kwargs)
+
+ def open(self):
+ pass
+
+ def close(self):
+ pass
+
+ def __enter__(self):
+ pass
+
+ def __exit__(self, *args): # pylint: disable=arguments-differ
+ pass
+
+
+def format_shared_key_credential(account, credential):
+ if isinstance(credential, six.string_types):
+ if len(account) < 2:
+ raise ValueError("Unable to determine account name for shared key credential.")
+ credential = {"account_name": account[0], "account_key": credential}
+ if isinstance(credential, dict):
+ if "account_name" not in credential:
+ raise ValueError("Shared key credential missing 'account_name")
+ if "account_key" not in credential:
+ raise ValueError("Shared key credential missing 'account_key")
+ return SharedKeyCredentialPolicy(**credential)
+ return credential
+
+
+def parse_connection_str(conn_str, credential, service):
+ conn_str = conn_str.rstrip(";")
+ conn_settings = [s.split("=", 1) for s in conn_str.split(";")]
+ if any(len(tup) != 2 for tup in conn_settings):
+ raise ValueError("Connection string is either blank or malformed.")
+ conn_settings = dict(conn_settings)
+ endpoints = _SERVICE_PARAMS[service]
+ primary = None
+ secondary = None
+ if not credential:
+ try:
+ credential = {"account_name": conn_settings["AccountName"], "account_key": conn_settings["AccountKey"]}
+ except KeyError:
+ credential = conn_settings.get("SharedAccessSignature")
+ if endpoints["primary"] in conn_settings:
+ primary = conn_settings[endpoints["primary"]]
+ if endpoints["secondary"] in conn_settings:
+ secondary = conn_settings[endpoints["secondary"]]
+ else:
+ if endpoints["secondary"] in conn_settings:
+ raise ValueError("Connection string specifies only secondary endpoint.")
+ try:
+ primary = "{}://{}.{}.{}".format(
+ conn_settings["DefaultEndpointsProtocol"],
+ conn_settings["AccountName"],
+ service,
+ conn_settings["EndpointSuffix"],
+ )
+ secondary = "{}-secondary.{}.{}".format(
+ conn_settings["AccountName"], service, conn_settings["EndpointSuffix"]
+ )
+ except KeyError:
+ pass
+
+ if not primary:
+ try:
+ primary = "https://{}.{}.{}".format(
+ conn_settings["AccountName"], service, conn_settings.get("EndpointSuffix", SERVICE_HOST_BASE)
+ )
+ except KeyError:
+ raise ValueError("Connection string missing required connection details.")
+ return primary, secondary, credential
+
+
+def create_configuration(**kwargs):
+ # type: (**Any) -> Configuration
+ config = Configuration(**kwargs)
+ config.headers_policy = StorageHeadersPolicy(**kwargs)
+ config.user_agent_policy = UserAgentPolicy(
+ sdk_moniker="storage-{}/{}".format(kwargs.pop('storage_sdk'), VERSION), **kwargs)
+ config.retry_policy = kwargs.get("retry_policy") or ExponentialRetry(**kwargs)
+ config.logging_policy = StorageLoggingPolicy(**kwargs)
+ config.proxy_policy = ProxyPolicy(**kwargs)
+
+# all can be ignored
+ # Storage settings
+ config.max_single_put_size = kwargs.get("max_single_put_size", 64 * 1024 * 1024)
+ config.copy_polling_interval = 15
+
+ # Block blob uploads
+ config.max_block_size = kwargs.get("max_block_size", 4 * 1024 * 1024)
+ config.min_large_block_upload_threshold = kwargs.get("min_large_block_upload_threshold", 4 * 1024 * 1024 + 1)
+ config.use_byte_buffer = kwargs.get("use_byte_buffer", False)
+
+ # Page blob uploads
+ config.max_page_size = kwargs.get("max_page_size", 4 * 1024 * 1024)
+
+ # Blob downloads
+ config.max_single_get_size = kwargs.get("max_single_get_size", 32 * 1024 * 1024)
+ config.max_chunk_get_size = kwargs.get("max_chunk_get_size", 4 * 1024 * 1024)
+
+ # File uploads
+ config.max_range_size = kwargs.get("max_range_size", 4 * 1024 * 1024)
+ return config
+
+
+def parse_query(query_str):
+ sas_values = QueryStringConstants.to_list()
+ parsed_query = {k: v[0] for k, v in parse_qs(query_str).items()}
+ sas_params = ["{}={}".format(k, quote(v, safe='')) for k, v in parsed_query.items() if k in sas_values]
+ sas_token = None
+ if sas_params:
+ sas_token = "&".join(sas_params)
+
+ snapshot = parsed_query.get("snapshot") or parsed_query.get("sharesnapshot")
+ return snapshot, sas_token
+
+
+def is_credential_sastoken(credential):
+ if not credential or not isinstance(credential, six.string_types):
+ return False
+
+ sas_values = QueryStringConstants.to_list()
+ parsed_query = parse_qs(credential.lstrip("?"))
+ if parsed_query and all([k in sas_values for k in parsed_query.keys()]):
+ return True
+ return False
diff --git a/sdk/table/azure/azure_table/_shared/base_client_async.py b/sdk/table/azure/azure_table/_shared/base_client_async.py
new file mode 100644
index 000000000000..5028051511ad
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/base_client_async.py
@@ -0,0 +1,176 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+from typing import ( # pylint: disable=unused-import
+ Union, Optional, Any, Iterable, Dict, List, Type, Tuple,
+ TYPE_CHECKING
+)
+import logging
+from azure.core.pipeline import AsyncPipeline
+from azure.core.async_paging import AsyncList
+from azure.core.exceptions import HttpResponseError
+from azure.core.pipeline.policies import (
+ ContentDecodePolicy,
+ AsyncBearerTokenCredentialPolicy,
+ AsyncRedirectPolicy,
+ DistributedTracingPolicy,
+ HttpLoggingPolicy,
+)
+from azure.core.pipeline.transport import AsyncHttpTransport
+
+from .constants import STORAGE_OAUTH_SCOPE, CONNECTION_TIMEOUT, READ_TIMEOUT
+from .authentication import SharedKeyCredentialPolicy
+from .base_client import create_configuration
+from .policies import (
+ StorageContentValidation,
+ StorageRequestHook,
+ StorageHosts,
+ StorageHeadersPolicy,
+)
+from .policies_async import AsyncStorageResponseHook
+
+from .._generated.models import StorageErrorException
+from .response_handlers import process_storage_error, PartialBatchErrorException
+
+if TYPE_CHECKING:
+ from azure.core.pipeline import Pipeline
+ from azure.core.pipeline.transport import HttpRequest
+ from azure.core.configuration import Configuration
+_LOGGER = logging.getLogger(__name__)
+
+
+class AsyncStorageAccountHostsMixin(object):
+
+ def __enter__(self):
+ raise TypeError("Async client only supports 'async with'.")
+
+ def __exit__(self, *args):
+ pass
+
+ async def __aenter__(self):
+ await self._client.__aenter__()
+ return self
+
+ async def __aexit__(self, *args):
+ await self._client.__aexit__(*args)
+
+ async def close(self):
+ """ This method is to close the sockets opened by the client.
+ It need not be used when using with a context manager.
+ """
+ await self._client.close()
+
+ def _create_pipeline(self, credential, **kwargs):
+ # type: (Any, **Any) -> Tuple[Configuration, Pipeline]
+ self._credential_policy = None
+ if hasattr(credential, 'get_token'):
+ self._credential_policy = AsyncBearerTokenCredentialPolicy(credential, STORAGE_OAUTH_SCOPE)
+ elif isinstance(credential, SharedKeyCredentialPolicy):
+ self._credential_policy = credential
+ elif credential is not None:
+ raise TypeError("Unsupported credential: {}".format(credential))
+ config = kwargs.get('_configuration') or create_configuration(**kwargs)
+ if kwargs.get('_pipeline'):
+ return config, kwargs['_pipeline']
+ config.transport = kwargs.get('transport') # type: ignore
+ kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT)
+ kwargs.setdefault("read_timeout", READ_TIMEOUT)
+ if not config.transport:
+ try:
+ from azure.core.pipeline.transport import AioHttpTransport
+ except ImportError:
+ raise ImportError("Unable to create async transport. Please check aiohttp is installed.")
+ config.transport = AioHttpTransport(**kwargs)
+ policies = [
+ QueueMessagePolicy(),
+ config.headers_policy,
+ config.proxy_policy,
+ config.user_agent_policy,
+ StorageContentValidation(),
+ StorageRequestHook(**kwargs),
+ self._credential_policy,
+ ContentDecodePolicy(response_encoding="utf-8"),
+ AsyncRedirectPolicy(**kwargs),
+ StorageHosts(hosts=self._hosts, **kwargs), # type: ignore
+ config.retry_policy,
+ config.logging_policy,
+ AsyncStorageResponseHook(**kwargs),
+ DistributedTracingPolicy(**kwargs),
+ HttpLoggingPolicy(**kwargs),
+ ]
+ return config, AsyncPipeline(config.transport, policies=policies)
+
+ async def _batch_send(
+ self, *reqs: 'HttpRequest',
+ **kwargs
+ ):
+ """Given a series of request, do a Storage batch call.
+ """
+ # Pop it here, so requests doesn't feel bad about additional kwarg
+ raise_on_any_failure = kwargs.pop("raise_on_any_failure", True)
+ request = self._client._client.post( # pylint: disable=protected-access
+ url='https://{}/?comp=batch'.format(self.primary_hostname),
+ headers={
+ 'x-ms-version': self.api_version
+ }
+ )
+
+ request.set_multipart_mixed(
+ *reqs,
+ policies=[
+ StorageHeadersPolicy(),
+ self._credential_policy
+ ],
+ enforce_https=False
+ )
+
+ pipeline_response = await self._pipeline.run(
+ request, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ try:
+ if response.status_code not in [202]:
+ raise HttpResponseError(response=response)
+ parts = response.parts() # Return an AsyncIterator
+ if raise_on_any_failure:
+ parts_list = []
+ async for part in parts:
+ parts_list.append(part)
+ if any(p for p in parts_list if not 200 <= p.status_code < 300):
+ error = PartialBatchErrorException(
+ message="There is a partial failure in the batch operation.",
+ response=response, parts=parts_list
+ )
+ raise error
+ return AsyncList(parts_list)
+ return parts
+ except StorageErrorException as error:
+ process_storage_error(error)
+
+
+class AsyncTransportWrapper(AsyncHttpTransport):
+ """Wrapper class that ensures that an inner client created
+ by a `get_client` method does not close the outer transport for the parent
+ when used in a context manager.
+ """
+ def __init__(self, async_transport):
+ self._transport = async_transport
+
+ async def send(self, request, **kwargs):
+ return await self._transport.send(request, **kwargs)
+
+ async def open(self):
+ pass
+
+ async def close(self):
+ pass
+
+ async def __aenter__(self):
+ pass
+
+ async def __aexit__(self, *args): # pylint: disable=arguments-differ
+ pass
diff --git a/sdk/table/azure/azure_table/_shared/constants.py b/sdk/table/azure/azure_table/_shared/constants.py
new file mode 100644
index 000000000000..7fb05b559850
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/constants.py
@@ -0,0 +1,26 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+import sys
+from .._generated.version import VERSION
+
+
+X_MS_VERSION = VERSION
+
+# Socket timeout in seconds
+CONNECTION_TIMEOUT = 20
+READ_TIMEOUT = 20
+
+# for python 3.5+, there was a change to the definition of the socket timeout (as far as socket.sendall is concerned)
+# The socket timeout is now the maximum total duration to send all data.
+if sys.version_info >= (3, 5):
+ # the timeout to connect is 20 seconds, and the read timeout is 2000 seconds
+ # the 2000 seconds was calculated with: 100MB (max block size)/ 50KB/s (an arbitrarily chosen minimum upload speed)
+ READ_TIMEOUT = 2000
+
+STORAGE_OAUTH_SCOPE = "https://storage.azure.com/.default"
+
+SERVICE_HOST_BASE = 'core.windows.net'
diff --git a/sdk/table/azure/azure_table/_shared/encryption.py b/sdk/table/azure/azure_table/_shared/encryption.py
new file mode 100644
index 000000000000..62607cc0cf85
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/encryption.py
@@ -0,0 +1,542 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+import os
+from os import urandom
+from json import (
+ dumps,
+ loads,
+)
+from collections import OrderedDict
+
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives.ciphers import Cipher
+from cryptography.hazmat.primitives.ciphers.algorithms import AES
+from cryptography.hazmat.primitives.ciphers.modes import CBC
+from cryptography.hazmat.primitives.padding import PKCS7
+
+from azure.core.exceptions import HttpResponseError
+
+from .._version import VERSION
+from . import encode_base64, decode_base64_to_bytes
+
+
+_ENCRYPTION_PROTOCOL_V1 = '1.0'
+_ERROR_OBJECT_INVALID = \
+ '{0} does not define a complete interface. Value of {1} is either missing or invalid.'
+
+
+def _validate_not_none(param_name, param):
+ if param is None:
+ raise ValueError('{0} should not be None.'.format(param_name))
+
+
+def _validate_key_encryption_key_wrap(kek):
+ # Note that None is not callable and so will fail the second clause of each check.
+ if not hasattr(kek, 'wrap_key') or not callable(kek.wrap_key):
+ raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'wrap_key'))
+ if not hasattr(kek, 'get_kid') or not callable(kek.get_kid):
+ raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid'))
+ if not hasattr(kek, 'get_key_wrap_algorithm') or not callable(kek.get_key_wrap_algorithm):
+ raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_key_wrap_algorithm'))
+
+
+class _EncryptionAlgorithm(object):
+ '''
+ Specifies which client encryption algorithm is used.
+ '''
+ AES_CBC_256 = 'AES_CBC_256'
+
+
+class _WrappedContentKey:
+ '''
+ Represents the envelope key details stored on the service.
+ '''
+
+ def __init__(self, algorithm, encrypted_key, key_id):
+ '''
+ :param str algorithm:
+ The algorithm used for wrapping.
+ :param bytes encrypted_key:
+ The encrypted content-encryption-key.
+ :param str key_id:
+ The key-encryption-key identifier string.
+ '''
+
+ _validate_not_none('algorithm', algorithm)
+ _validate_not_none('encrypted_key', encrypted_key)
+ _validate_not_none('key_id', key_id)
+
+ self.algorithm = algorithm
+ self.encrypted_key = encrypted_key
+ self.key_id = key_id
+
+
+class _EncryptionAgent:
+ '''
+ Represents the encryption agent stored on the service.
+ It consists of the encryption protocol version and encryption algorithm used.
+ '''
+
+ def __init__(self, encryption_algorithm, protocol):
+ '''
+ :param _EncryptionAlgorithm encryption_algorithm:
+ The algorithm used for encrypting the message contents.
+ :param str protocol:
+ The protocol version used for encryption.
+ '''
+
+ _validate_not_none('encryption_algorithm', encryption_algorithm)
+ _validate_not_none('protocol', protocol)
+
+ self.encryption_algorithm = str(encryption_algorithm)
+ self.protocol = protocol
+
+
+class _EncryptionData:
+ '''
+ Represents the encryption data that is stored on the service.
+ '''
+
+ def __init__(self, content_encryption_IV, encryption_agent, wrapped_content_key,
+ key_wrapping_metadata):
+ '''
+ :param bytes content_encryption_IV:
+ The content encryption initialization vector.
+ :param _EncryptionAgent encryption_agent:
+ The encryption agent.
+ :param _WrappedContentKey wrapped_content_key:
+ An object that stores the wrapping algorithm, the key identifier,
+ and the encrypted key bytes.
+ :param dict key_wrapping_metadata:
+ A dict containing metadata related to the key wrapping.
+ '''
+
+ _validate_not_none('content_encryption_IV', content_encryption_IV)
+ _validate_not_none('encryption_agent', encryption_agent)
+ _validate_not_none('wrapped_content_key', wrapped_content_key)
+
+ self.content_encryption_IV = content_encryption_IV
+ self.encryption_agent = encryption_agent
+ self.wrapped_content_key = wrapped_content_key
+ self.key_wrapping_metadata = key_wrapping_metadata
+
+
+def _generate_encryption_data_dict(kek, cek, iv):
+ '''
+ Generates and returns the encryption metadata as a dict.
+
+ :param object kek: The key encryption key. See calling functions for more information.
+ :param bytes cek: The content encryption key.
+ :param bytes iv: The initialization vector.
+ :return: A dict containing all the encryption metadata.
+ :rtype: dict
+ '''
+ # Encrypt the cek.
+ wrapped_cek = kek.wrap_key(cek)
+
+ # Build the encryption_data dict.
+ # Use OrderedDict to comply with Java's ordering requirement.
+ wrapped_content_key = OrderedDict()
+ wrapped_content_key['KeyId'] = kek.get_kid()
+ wrapped_content_key['EncryptedKey'] = encode_base64(wrapped_cek)
+ wrapped_content_key['Algorithm'] = kek.get_key_wrap_algorithm()
+
+ encryption_agent = OrderedDict()
+ encryption_agent['Protocol'] = _ENCRYPTION_PROTOCOL_V1
+ encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_CBC_256
+
+ encryption_data_dict = OrderedDict()
+ encryption_data_dict['WrappedContentKey'] = wrapped_content_key
+ encryption_data_dict['EncryptionAgent'] = encryption_agent
+ encryption_data_dict['ContentEncryptionIV'] = encode_base64(iv)
+ encryption_data_dict['KeyWrappingMetadata'] = {'EncryptionLibrary': 'Python ' + VERSION}
+
+ return encryption_data_dict
+
+
+def _dict_to_encryption_data(encryption_data_dict):
+ '''
+ Converts the specified dictionary to an EncryptionData object for
+ eventual use in decryption.
+
+ :param dict encryption_data_dict:
+ The dictionary containing the encryption data.
+ :return: an _EncryptionData object built from the dictionary.
+ :rtype: _EncryptionData
+ '''
+ try:
+ if encryption_data_dict['EncryptionAgent']['Protocol'] != _ENCRYPTION_PROTOCOL_V1:
+ raise ValueError("Unsupported encryption version.")
+ except KeyError:
+ raise ValueError("Unsupported encryption version.")
+ wrapped_content_key = encryption_data_dict['WrappedContentKey']
+ wrapped_content_key = _WrappedContentKey(wrapped_content_key['Algorithm'],
+ decode_base64_to_bytes(wrapped_content_key['EncryptedKey']),
+ wrapped_content_key['KeyId'])
+
+ encryption_agent = encryption_data_dict['EncryptionAgent']
+ encryption_agent = _EncryptionAgent(encryption_agent['EncryptionAlgorithm'],
+ encryption_agent['Protocol'])
+
+ if 'KeyWrappingMetadata' in encryption_data_dict:
+ key_wrapping_metadata = encryption_data_dict['KeyWrappingMetadata']
+ else:
+ key_wrapping_metadata = None
+
+ encryption_data = _EncryptionData(decode_base64_to_bytes(encryption_data_dict['ContentEncryptionIV']),
+ encryption_agent,
+ wrapped_content_key,
+ key_wrapping_metadata)
+
+ return encryption_data
+
+
+def _generate_AES_CBC_cipher(cek, iv):
+ '''
+ Generates and returns an encryption cipher for AES CBC using the given cek and iv.
+
+ :param bytes[] cek: The content encryption key for the cipher.
+ :param bytes[] iv: The initialization vector for the cipher.
+ :return: A cipher for encrypting in AES256 CBC.
+ :rtype: ~cryptography.hazmat.primitives.ciphers.Cipher
+ '''
+
+ backend = default_backend()
+ algorithm = AES(cek)
+ mode = CBC(iv)
+ return Cipher(algorithm, mode, backend)
+
+
+def _validate_and_unwrap_cek(encryption_data, key_encryption_key=None, key_resolver=None):
+ '''
+ Extracts and returns the content_encryption_key stored in the encryption_data object
+ and performs necessary validation on all parameters.
+ :param _EncryptionData encryption_data:
+ The encryption metadata of the retrieved value.
+ :param obj key_encryption_key:
+ The key_encryption_key used to unwrap the cek. Please refer to high-level service object
+ instance variables for more details.
+ :param func key_resolver:
+ A function used that, given a key_id, will return a key_encryption_key. Please refer
+ to high-level service object instance variables for more details.
+ :return: the content_encryption_key stored in the encryption_data object.
+ :rtype: bytes[]
+ '''
+
+ _validate_not_none('content_encryption_IV', encryption_data.content_encryption_IV)
+ _validate_not_none('encrypted_key', encryption_data.wrapped_content_key.encrypted_key)
+
+ if _ENCRYPTION_PROTOCOL_V1 != encryption_data.encryption_agent.protocol:
+ raise ValueError('Encryption version is not supported.')
+
+ content_encryption_key = None
+
+ # If the resolver exists, give priority to the key it finds.
+ if key_resolver is not None:
+ key_encryption_key = key_resolver(encryption_data.wrapped_content_key.key_id)
+
+ _validate_not_none('key_encryption_key', key_encryption_key)
+ if not hasattr(key_encryption_key, 'get_kid') or not callable(key_encryption_key.get_kid):
+ raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid'))
+ if not hasattr(key_encryption_key, 'unwrap_key') or not callable(key_encryption_key.unwrap_key):
+ raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'unwrap_key'))
+ if encryption_data.wrapped_content_key.key_id != key_encryption_key.get_kid():
+ raise ValueError('Provided or resolved key-encryption-key does not match the id of key used to encrypt.')
+ # Will throw an exception if the specified algorithm is not supported.
+ content_encryption_key = key_encryption_key.unwrap_key(encryption_data.wrapped_content_key.encrypted_key,
+ encryption_data.wrapped_content_key.algorithm)
+ _validate_not_none('content_encryption_key', content_encryption_key)
+
+ return content_encryption_key
+
+
+def _decrypt_message(message, encryption_data, key_encryption_key=None, resolver=None):
+ '''
+ Decrypts the given ciphertext using AES256 in CBC mode with 128 bit padding.
+ Unwraps the content-encryption-key using the user-provided or resolved key-encryption-key (kek).
+ Returns the original plaintex.
+
+ :param str message:
+ The ciphertext to be decrypted.
+ :param _EncryptionData encryption_data:
+ The metadata associated with this ciphertext.
+ :param object key_encryption_key:
+ The user-provided key-encryption-key. Must implement the following methods:
+ unwrap_key(key, algorithm)
+ - returns the unwrapped form of the specified symmetric key using the string-specified algorithm.
+ get_kid()
+ - returns a string key id for this key-encryption-key.
+ :param function resolver(kid):
+ The user-provided key resolver. Uses the kid string to return a key-encryption-key
+ implementing the interface defined above.
+ :return: The decrypted plaintext.
+ :rtype: str
+ '''
+ _validate_not_none('message', message)
+ content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, resolver)
+
+ if _EncryptionAlgorithm.AES_CBC_256 != encryption_data.encryption_agent.encryption_algorithm:
+ raise ValueError('Specified encryption algorithm is not supported.')
+
+ cipher = _generate_AES_CBC_cipher(content_encryption_key, encryption_data.content_encryption_IV)
+
+ # decrypt data
+ decrypted_data = message
+ decryptor = cipher.decryptor()
+ decrypted_data = (decryptor.update(decrypted_data) + decryptor.finalize())
+
+ # unpad data
+ unpadder = PKCS7(128).unpadder()
+ decrypted_data = (unpadder.update(decrypted_data) + unpadder.finalize())
+
+ return decrypted_data
+
+
+def encrypt_blob(blob, key_encryption_key):
+ '''
+ Encrypts the given blob using AES256 in CBC mode with 128 bit padding.
+ Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek).
+ Returns a json-formatted string containing the encryption metadata. This method should
+ only be used when a blob is small enough for single shot upload. Encrypting larger blobs
+ is done as a part of the upload_data_chunks method.
+
+ :param bytes blob:
+ The blob to be encrypted.
+ :param object key_encryption_key:
+ The user-provided key-encryption-key. Must implement the following methods:
+ wrap_key(key)--wraps the specified key using an algorithm of the user's choice.
+ get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key.
+ get_kid()--returns a string key id for this key-encryption-key.
+ :return: A tuple of json-formatted string containing the encryption metadata and the encrypted blob data.
+ :rtype: (str, bytes)
+ '''
+
+ _validate_not_none('blob', blob)
+ _validate_not_none('key_encryption_key', key_encryption_key)
+ _validate_key_encryption_key_wrap(key_encryption_key)
+
+ # AES256 uses 256 bit (32 byte) keys and always with 16 byte blocks
+ content_encryption_key = urandom(32)
+ initialization_vector = urandom(16)
+
+ cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector)
+
+ # PKCS7 with 16 byte blocks ensures compatibility with AES.
+ padder = PKCS7(128).padder()
+ padded_data = padder.update(blob) + padder.finalize()
+
+ # Encrypt the data.
+ encryptor = cipher.encryptor()
+ encrypted_data = encryptor.update(padded_data) + encryptor.finalize()
+ encryption_data = _generate_encryption_data_dict(key_encryption_key, content_encryption_key,
+ initialization_vector)
+ encryption_data['EncryptionMode'] = 'FullBlob'
+
+ return dumps(encryption_data), encrypted_data
+
+
+def generate_blob_encryption_data(key_encryption_key):
+ '''
+ Generates the encryption_metadata for the blob.
+
+ :param bytes key_encryption_key:
+ The key-encryption-key used to wrap the cek associate with this blob.
+ :return: A tuple containing the cek and iv for this blob as well as the
+ serialized encryption metadata for the blob.
+ :rtype: (bytes, bytes, str)
+ '''
+ encryption_data = None
+ content_encryption_key = None
+ initialization_vector = None
+ if key_encryption_key:
+ _validate_key_encryption_key_wrap(key_encryption_key)
+ content_encryption_key = urandom(32)
+ initialization_vector = urandom(16)
+ encryption_data = _generate_encryption_data_dict(key_encryption_key,
+ content_encryption_key,
+ initialization_vector)
+ encryption_data['EncryptionMode'] = 'FullBlob'
+ encryption_data = dumps(encryption_data)
+
+ return content_encryption_key, initialization_vector, encryption_data
+
+
+def decrypt_blob(require_encryption, key_encryption_key, key_resolver,
+ content, start_offset, end_offset, response_headers):
+ '''
+ Decrypts the given blob contents and returns only the requested range.
+
+ :param bool require_encryption:
+ Whether or not the calling blob service requires objects to be decrypted.
+ :param object key_encryption_key:
+ The user-provided key-encryption-key. Must implement the following methods:
+ wrap_key(key)--wraps the specified key using an algorithm of the user's choice.
+ get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key.
+ get_kid()--returns a string key id for this key-encryption-key.
+ :param key_resolver(kid):
+ The user-provided key resolver. Uses the kid string to return a key-encryption-key
+ implementing the interface defined above.
+ :return: The decrypted blob content.
+ :rtype: bytes
+ '''
+ try:
+ encryption_data = _dict_to_encryption_data(loads(response_headers['x-ms-meta-encryptiondata']))
+ except: # pylint: disable=bare-except
+ if require_encryption:
+ raise ValueError(
+ 'Encryption required, but received data does not contain appropriate metatadata.' + \
+ 'Data was either not encrypted or metadata has been lost.')
+
+ return content
+
+ if encryption_data.encryption_agent.encryption_algorithm != _EncryptionAlgorithm.AES_CBC_256:
+ raise ValueError('Specified encryption algorithm is not supported.')
+
+ blob_type = response_headers['x-ms-blob-type']
+
+ iv = None
+ unpad = False
+ if 'content-range' in response_headers:
+ content_range = response_headers['content-range']
+ # Format: 'bytes x-y/size'
+
+ # Ignore the word 'bytes'
+ content_range = content_range.split(' ')
+
+ content_range = content_range[1].split('-')
+ content_range = content_range[1].split('/')
+ end_range = int(content_range[0])
+ blob_size = int(content_range[1])
+
+ if start_offset >= 16:
+ iv = content[:16]
+ content = content[16:]
+ start_offset -= 16
+ else:
+ iv = encryption_data.content_encryption_IV
+
+ if end_range == blob_size - 1:
+ unpad = True
+ else:
+ unpad = True
+ iv = encryption_data.content_encryption_IV
+
+ if blob_type == 'PageBlob':
+ unpad = False
+
+ content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, key_resolver)
+ cipher = _generate_AES_CBC_cipher(content_encryption_key, iv)
+ decryptor = cipher.decryptor()
+
+ content = decryptor.update(content) + decryptor.finalize()
+ if unpad:
+ unpadder = PKCS7(128).unpadder()
+ content = unpadder.update(content) + unpadder.finalize()
+
+ return content[start_offset: len(content) - end_offset]
+
+
+def get_blob_encryptor_and_padder(cek, iv, should_pad):
+ encryptor = None
+ padder = None
+
+ if cek is not None and iv is not None:
+ cipher = _generate_AES_CBC_cipher(cek, iv)
+ encryptor = cipher.encryptor()
+ padder = PKCS7(128).padder() if should_pad else None
+
+ return encryptor, padder
+
+
+def encrypt_queue_message(message, key_encryption_key):
+ '''
+ Encrypts the given plain text message using AES256 in CBC mode with 128 bit padding.
+ Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek).
+ Returns a json-formatted string containing the encrypted message and the encryption metadata.
+
+ :param object message:
+ The plain text messge to be encrypted.
+ :param object key_encryption_key:
+ The user-provided key-encryption-key. Must implement the following methods:
+ wrap_key(key)--wraps the specified key using an algorithm of the user's choice.
+ get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key.
+ get_kid()--returns a string key id for this key-encryption-key.
+ :return: A json-formatted string containing the encrypted message and the encryption metadata.
+ :rtype: str
+ '''
+
+ _validate_not_none('message', message)
+ _validate_not_none('key_encryption_key', key_encryption_key)
+ _validate_key_encryption_key_wrap(key_encryption_key)
+
+ # AES256 uses 256 bit (32 byte) keys and always with 16 byte blocks
+ content_encryption_key = os.urandom(32)
+ initialization_vector = os.urandom(16)
+
+ # Queue encoding functions all return unicode strings, and encryption should
+ # operate on binary strings.
+ message = message.encode('utf-8')
+
+ cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector)
+
+ # PKCS7 with 16 byte blocks ensures compatibility with AES.
+ padder = PKCS7(128).padder()
+ padded_data = padder.update(message) + padder.finalize()
+
+ # Encrypt the data.
+ encryptor = cipher.encryptor()
+ encrypted_data = encryptor.update(padded_data) + encryptor.finalize()
+
+ # Build the dictionary structure.
+ queue_message = {'EncryptedMessageContents': encode_base64(encrypted_data),
+ 'EncryptionData': _generate_encryption_data_dict(key_encryption_key,
+ content_encryption_key,
+ initialization_vector)}
+
+ return dumps(queue_message)
+
+
+def decrypt_queue_message(message, response, require_encryption, key_encryption_key, resolver):
+ '''
+ Returns the decrypted message contents from an EncryptedQueueMessage.
+ If no encryption metadata is present, will return the unaltered message.
+ :param str message:
+ The JSON formatted QueueEncryptedMessage contents with all associated metadata.
+ :param bool require_encryption:
+ If set, will enforce that the retrieved messages are encrypted and decrypt them.
+ :param object key_encryption_key:
+ The user-provided key-encryption-key. Must implement the following methods:
+ unwrap_key(key, algorithm)
+ - returns the unwrapped form of the specified symmetric key usingthe string-specified algorithm.
+ get_kid()
+ - returns a string key id for this key-encryption-key.
+ :param function resolver(kid):
+ The user-provided key resolver. Uses the kid string to return a key-encryption-key
+ implementing the interface defined above.
+ :return: The plain text message from the queue message.
+ :rtype: str
+ '''
+
+ try:
+ message = loads(message)
+
+ encryption_data = _dict_to_encryption_data(message['EncryptionData'])
+ decoded_data = decode_base64_to_bytes(message['EncryptedMessageContents'])
+ except (KeyError, ValueError):
+ # Message was not json formatted and so was not encrypted
+ # or the user provided a json formatted message.
+ if require_encryption:
+ raise ValueError('Message was not encrypted.')
+
+ return message
+ try:
+ return _decrypt_message(decoded_data, encryption_data, key_encryption_key, resolver).decode('utf-8')
+ except Exception as error:
+ raise HttpResponseError(
+ message="Decryption failed.",
+ response=response,
+ error=error)
diff --git a/sdk/table/azure/azure_table/_shared/models.py b/sdk/table/azure/azure_table/_shared/models.py
new file mode 100644
index 000000000000..8fcc6815dbdd
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/models.py
@@ -0,0 +1,459 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+import sys
+from enum import Enum
+
+from pyparsing import unicode
+
+if sys.version_info < (3,):
+ from collections import Iterable
+
+ _unicode_type = unicode
+else:
+ from collections.abc import Iterable
+
+ _unicode_type = str
+
+
+def get_enum_value(value):
+ if value is None or value in ["None", ""]:
+ return None
+ try:
+ return value.value
+ except AttributeError:
+ return value
+
+
+class StorageErrorCode(str, Enum):
+
+ # Generic storage values
+ account_already_exists = "AccountAlreadyExists"
+ account_being_created = "AccountBeingCreated"
+ account_is_disabled = "AccountIsDisabled"
+ authentication_failed = "AuthenticationFailed"
+ authorization_failure = "AuthorizationFailure"
+ no_authentication_information = "NoAuthenticationInformation"
+ condition_headers_not_supported = "ConditionHeadersNotSupported"
+ condition_not_met = "ConditionNotMet"
+ empty_metadata_key = "EmptyMetadataKey"
+ insufficient_account_permissions = "InsufficientAccountPermissions"
+ internal_error = "InternalError"
+ invalid_authentication_info = "InvalidAuthenticationInfo"
+ invalid_header_value = "InvalidHeaderValue"
+ invalid_http_verb = "InvalidHttpVerb"
+ invalid_input = "InvalidInput"
+ invalid_md5 = "InvalidMd5"
+ invalid_metadata = "InvalidMetadata"
+ invalid_query_parameter_value = "InvalidQueryParameterValue"
+ invalid_range = "InvalidRange"
+ invalid_resource_name = "InvalidResourceName"
+ invalid_uri = "InvalidUri"
+ invalid_xml_document = "InvalidXmlDocument"
+ invalid_xml_node_value = "InvalidXmlNodeValue"
+ md5_mismatch = "Md5Mismatch"
+ metadata_too_large = "MetadataTooLarge"
+ missing_content_length_header = "MissingContentLengthHeader"
+ missing_required_query_parameter = "MissingRequiredQueryParameter"
+ missing_required_header = "MissingRequiredHeader"
+ missing_required_xml_node = "MissingRequiredXmlNode"
+ multiple_condition_headers_not_supported = "MultipleConditionHeadersNotSupported"
+ operation_timed_out = "OperationTimedOut"
+ out_of_range_input = "OutOfRangeInput"
+ out_of_range_query_parameter_value = "OutOfRangeQueryParameterValue"
+ request_body_too_large = "RequestBodyTooLarge"
+ resource_type_mismatch = "ResourceTypeMismatch"
+ request_url_failed_to_parse = "RequestUrlFailedToParse"
+ resource_already_exists = "ResourceAlreadyExists"
+ resource_not_found = "ResourceNotFound"
+ server_busy = "ServerBusy"
+ unsupported_header = "UnsupportedHeader"
+ unsupported_xml_node = "UnsupportedXmlNode"
+ unsupported_query_parameter = "UnsupportedQueryParameter"
+ unsupported_http_verb = "UnsupportedHttpVerb"
+
+ # Blob values
+ append_position_condition_not_met = "AppendPositionConditionNotMet"
+ blob_already_exists = "BlobAlreadyExists"
+ blob_not_found = "BlobNotFound"
+ blob_overwritten = "BlobOverwritten"
+ blob_tier_inadequate_for_content_length = "BlobTierInadequateForContentLength"
+ block_count_exceeds_limit = "BlockCountExceedsLimit"
+ block_list_too_long = "BlockListTooLong"
+ cannot_change_to_lower_tier = "CannotChangeToLowerTier"
+ cannot_verify_copy_source = "CannotVerifyCopySource"
+ container_already_exists = "ContainerAlreadyExists"
+ container_being_deleted = "ContainerBeingDeleted"
+ container_disabled = "ContainerDisabled"
+ container_not_found = "ContainerNotFound"
+ content_length_larger_than_tier_limit = "ContentLengthLargerThanTierLimit"
+ copy_across_accounts_not_supported = "CopyAcrossAccountsNotSupported"
+ copy_id_mismatch = "CopyIdMismatch"
+ feature_version_mismatch = "FeatureVersionMismatch"
+ incremental_copy_blob_mismatch = "IncrementalCopyBlobMismatch"
+ incremental_copy_of_eralier_version_snapshot_not_allowed = "IncrementalCopyOfEralierVersionSnapshotNotAllowed"
+ incremental_copy_source_must_be_snapshot = "IncrementalCopySourceMustBeSnapshot"
+ infinite_lease_duration_required = "InfiniteLeaseDurationRequired"
+ invalid_blob_or_block = "InvalidBlobOrBlock"
+ invalid_blob_tier = "InvalidBlobTier"
+ invalid_blob_type = "InvalidBlobType"
+ invalid_block_id = "InvalidBlockId"
+ invalid_block_list = "InvalidBlockList"
+ invalid_operation = "InvalidOperation"
+ invalid_page_range = "InvalidPageRange"
+ invalid_source_blob_type = "InvalidSourceBlobType"
+ invalid_source_blob_url = "InvalidSourceBlobUrl"
+ invalid_version_for_page_blob_operation = "InvalidVersionForPageBlobOperation"
+ lease_already_present = "LeaseAlreadyPresent"
+ lease_already_broken = "LeaseAlreadyBroken"
+ lease_id_mismatch_with_blob_operation = "LeaseIdMismatchWithBlobOperation"
+ lease_id_mismatch_with_container_operation = "LeaseIdMismatchWithContainerOperation"
+ lease_id_mismatch_with_lease_operation = "LeaseIdMismatchWithLeaseOperation"
+ lease_id_missing = "LeaseIdMissing"
+ lease_is_breaking_and_cannot_be_acquired = "LeaseIsBreakingAndCannotBeAcquired"
+ lease_is_breaking_and_cannot_be_changed = "LeaseIsBreakingAndCannotBeChanged"
+ lease_is_broken_and_cannot_be_renewed = "LeaseIsBrokenAndCannotBeRenewed"
+ lease_lost = "LeaseLost"
+ lease_not_present_with_blob_operation = "LeaseNotPresentWithBlobOperation"
+ lease_not_present_with_container_operation = "LeaseNotPresentWithContainerOperation"
+ lease_not_present_with_lease_operation = "LeaseNotPresentWithLeaseOperation"
+ max_blob_size_condition_not_met = "MaxBlobSizeConditionNotMet"
+ no_pending_copy_operation = "NoPendingCopyOperation"
+ operation_not_allowed_on_incremental_copy_blob = "OperationNotAllowedOnIncrementalCopyBlob"
+ pending_copy_operation = "PendingCopyOperation"
+ previous_snapshot_cannot_be_newer = "PreviousSnapshotCannotBeNewer"
+ previous_snapshot_not_found = "PreviousSnapshotNotFound"
+ previous_snapshot_operation_not_supported = "PreviousSnapshotOperationNotSupported"
+ sequence_number_condition_not_met = "SequenceNumberConditionNotMet"
+ sequence_number_increment_too_large = "SequenceNumberIncrementTooLarge"
+ snapshot_count_exceeded = "SnapshotCountExceeded"
+ snaphot_operation_rate_exceeded = "SnaphotOperationRateExceeded"
+ snapshots_present = "SnapshotsPresent"
+ source_condition_not_met = "SourceConditionNotMet"
+ system_in_use = "SystemInUse"
+ target_condition_not_met = "TargetConditionNotMet"
+ unauthorized_blob_overwrite = "UnauthorizedBlobOverwrite"
+ blob_being_rehydrated = "BlobBeingRehydrated"
+ blob_archived = "BlobArchived"
+ blob_not_archived = "BlobNotArchived"
+
+ # Queue values
+ invalid_marker = "InvalidMarker"
+ message_not_found = "MessageNotFound"
+ message_too_large = "MessageTooLarge"
+ pop_receipt_mismatch = "PopReceiptMismatch"
+ queue_already_exists = "QueueAlreadyExists"
+ queue_being_deleted = "QueueBeingDeleted"
+ queue_disabled = "QueueDisabled"
+ queue_not_empty = "QueueNotEmpty"
+ queue_not_found = "QueueNotFound"
+
+ # File values
+ cannot_delete_file_or_directory = "CannotDeleteFileOrDirectory"
+ client_cache_flush_delay = "ClientCacheFlushDelay"
+ delete_pending = "DeletePending"
+ directory_not_empty = "DirectoryNotEmpty"
+ file_lock_conflict = "FileLockConflict"
+ invalid_file_or_directory_path_name = "InvalidFileOrDirectoryPathName"
+ parent_not_found = "ParentNotFound"
+ read_only_attribute = "ReadOnlyAttribute"
+ share_already_exists = "ShareAlreadyExists"
+ share_being_deleted = "ShareBeingDeleted"
+ share_disabled = "ShareDisabled"
+ share_not_found = "ShareNotFound"
+ sharing_violation = "SharingViolation"
+ share_snapshot_in_progress = "ShareSnapshotInProgress"
+ share_snapshot_count_exceeded = "ShareSnapshotCountExceeded"
+ share_snapshot_operation_not_supported = "ShareSnapshotOperationNotSupported"
+ share_has_snapshots = "ShareHasSnapshots"
+ container_quota_downgrade_not_allowed = "ContainerQuotaDowngradeNotAllowed"
+
+ # DataLake values
+ content_length_must_be_zero = 'ContentLengthMustBeZero'
+ path_already_exists = 'PathAlreadyExists'
+ invalid_flush_position = 'InvalidFlushPosition'
+ invalid_property_name = 'InvalidPropertyName'
+ invalid_source_uri = 'InvalidSourceUri'
+ unsupported_rest_version = 'UnsupportedRestVersion'
+ file_system_not_found = 'FilesystemNotFound'
+ path_not_found = 'PathNotFound'
+ rename_destination_parent_path_not_found = 'RenameDestinationParentPathNotFound'
+ source_path_not_found = 'SourcePathNotFound'
+ destination_path_is_being_deleted = 'DestinationPathIsBeingDeleted'
+ file_system_already_exists = 'FilesystemAlreadyExists'
+ file_system_being_deleted = 'FilesystemBeingDeleted'
+ invalid_destination_path = 'InvalidDestinationPath'
+ invalid_rename_source_path = 'InvalidRenameSourcePath'
+ invalid_source_or_destination_resource_type = 'InvalidSourceOrDestinationResourceType'
+ lease_is_already_broken = 'LeaseIsAlreadyBroken'
+ lease_name_mismatch = 'LeaseNameMismatch'
+ path_conflict = 'PathConflict'
+ source_path_is_being_deleted = 'SourcePathIsBeingDeleted'
+
+
+class DictMixin(object):
+
+ def __setitem__(self, key, item):
+ self.__dict__[key] = item
+
+ def __getitem__(self, key):
+ return self.__dict__[key]
+
+ def __repr__(self):
+ return str(self)
+
+ def __len__(self):
+ return len(self.keys())
+
+ def __delitem__(self, key):
+ self.__dict__[key] = None
+
+ def __eq__(self, other):
+ """Compare objects by comparing all attributes."""
+ if isinstance(other, self.__class__):
+ return self.__dict__ == other.__dict__
+ return False
+
+ def __ne__(self, other):
+ """Compare objects by comparing all attributes."""
+ return not self.__eq__(other)
+
+ def __str__(self):
+ return str({k: v for k, v in self.__dict__.items() if not k.startswith('_')})
+
+ def has_key(self, k):
+ return k in self.__dict__
+
+ def update(self, *args, **kwargs):
+ return self.__dict__.update(*args, **kwargs)
+
+ def keys(self):
+ return [k for k in self.__dict__ if not k.startswith('_')]
+
+ def values(self):
+ return [v for k, v in self.__dict__.items() if not k.startswith('_')]
+
+ def items(self):
+ return [(k, v) for k, v in self.__dict__.items() if not k.startswith('_')]
+
+ def get(self, key, default=None):
+ if key in self.__dict__:
+ return self.__dict__[key]
+ return default
+
+
+class LocationMode(object):
+ """
+ Specifies the location the request should be sent to. This mode only applies
+ for RA-GRS accounts which allow secondary read access. All other account types
+ must use PRIMARY.
+ """
+
+ PRIMARY = 'primary' #: Requests should be sent to the primary location.
+ SECONDARY = 'secondary' #: Requests should be sent to the secondary location, if possible.
+
+
+class ResourceTypes(object):
+ """
+ Specifies the resource types that are accessible with the account SAS.
+
+ :param bool service:
+ Access to service-level APIs (e.g., Get/Set Service Properties,
+ Get Service Stats, List Containers/Queues/Shares)
+ :param bool container:
+ Access to container-level APIs (e.g., Create/Delete Container,
+ Create/Delete Queue, Create/Delete Share,
+ List Blobs/Files and Directories)
+ :param bool object:
+ Access to object-level APIs for blobs, queue messages, and
+ files(e.g. Put Blob, Query Entity, Get Messages, Create File, etc.)
+ """
+
+ def __init__(self, service=False, container=False, object=False): # pylint: disable=redefined-builtin
+ self.service = service
+ self.container = container
+ self.object = object
+ self._str = (('s' if self.service else '') +
+ ('c' if self.container else '') +
+ ('o' if self.object else ''))
+
+ def __str__(self):
+ return self._str
+
+ @classmethod
+ def from_string(cls, string):
+ """Create a ResourceTypes from a string.
+
+ To specify service, container, or object you need only to
+ include the first letter of the word in the string. E.g. service and container,
+ you would provide a string "sc".
+
+ :param str string: Specify service, container, or object in
+ in the string with the first letter of the word.
+ :return: A ResourceTypes object
+ :rtype: ~azure.storage.queue.ResourceTypes
+ """
+ res_service = 's' in string
+ res_container = 'c' in string
+ res_object = 'o' in string
+
+ parsed = cls(res_service, res_container, res_object)
+ parsed._str = string # pylint: disable = protected-access
+ return parsed
+
+
+class AccountSasPermissions(object):
+ """
+ :class:`~ResourceTypes` class to be used with generate_account_sas
+ function and for the AccessPolicies used with set_*_acl. There are two types of
+ SAS which may be used to grant resource access. One is to grant access to a
+ specific resource (resource-specific). Another is to grant access to the
+ entire service for a specific account and allow certain operations based on
+ perms found here.
+
+ :param bool read:
+ Valid for all signed resources types (Service, Container, and Object).
+ Permits read permissions to the specified resource type.
+ :param bool write:
+ Valid for all signed resources types (Service, Container, and Object).
+ Permits write permissions to the specified resource type.
+ :param bool delete:
+ Valid for Container and Object resource types, except for queue messages.
+ :param bool list:
+ Valid for Service and Container resource types only.
+ :param bool add:
+ Valid for the following Object resource types only: queue messages, and append blobs.
+ :param bool create:
+ Valid for the following Object resource types only: blobs and files.
+ Users can create new blobs or files, but may not overwrite existing
+ blobs or files.
+ :param bool update:
+ Valid for the following Object resource types only: queue messages.
+ :param bool process:
+ Valid for the following Object resource type only: queue messages.
+ """
+ def __init__(self, read=False, write=False, delete=False, list=False, # pylint: disable=redefined-builtin
+ add=False, create=False, update=False, process=False):
+ self.read = read
+ self.write = write
+ self.delete = delete
+ self.list = list
+ self.add = add
+ self.create = create
+ self.update = update
+ self.process = process
+ self._str = (('r' if self.read else '') +
+ ('w' if self.write else '') +
+ ('d' if self.delete else '') +
+ ('l' if self.list else '') +
+ ('a' if self.add else '') +
+ ('c' if self.create else '') +
+ ('u' if self.update else '') +
+ ('p' if self.process else ''))
+
+ def __str__(self):
+ return self._str
+
+ @classmethod
+ def from_string(cls, permission):
+ """Create AccountSasPermissions from a string.
+
+ To specify read, write, delete, etc. permissions you need only to
+ include the first letter of the word in the string. E.g. for read and write
+ permissions you would provide a string "rw".
+
+ :param str permission: Specify permissions in
+ the string with the first letter of the word.
+ :return: A AccountSasPermissions object
+ :rtype: ~azure.storage.queue.AccountSasPermissions
+ """
+ p_read = 'r' in permission
+ p_write = 'w' in permission
+ p_delete = 'd' in permission
+ p_list = 'l' in permission
+ p_add = 'a' in permission
+ p_create = 'c' in permission
+ p_update = 'u' in permission
+ p_process = 'p' in permission
+
+ parsed = cls(p_read, p_write, p_delete, p_list, p_add, p_create, p_update, p_process)
+ parsed._str = permission # pylint: disable = protected-access
+ return parsed
+
+class Services(object):
+ """Specifies the services accessible with the account SAS.
+
+ :param bool blob:
+ Access for the `~azure.storage.blob.BlobServiceClient`
+ :param bool queue:
+ Access for the `~azure.storage.queue.QueueServiceClient`
+ :param bool fileshare:
+ Access for the `~azure.storage.fileshare.ShareServiceClient`
+ """
+
+ def __init__(self, blob=False, queue=False, fileshare=False):
+ self.blob = blob
+ self.queue = queue
+ self.fileshare = fileshare
+ self._str = (('b' if self.blob else '') +
+ ('q' if self.queue else '') +
+ ('f' if self.fileshare else ''))
+
+ def __str__(self):
+ return self._str
+
+ @classmethod
+ def from_string(cls, string):
+ """Create Services from a string.
+
+ To specify blob, queue, or file you need only to
+ include the first letter of the word in the string. E.g. for blob and queue
+ you would provide a string "bq".
+
+ :param str string: Specify blob, queue, or file in
+ in the string with the first letter of the word.
+ :return: A Services object
+ :rtype: ~azure.storage.queue.Services
+ """
+ res_blob = 'b' in string
+ res_queue = 'q' in string
+ res_file = 'f' in string
+
+ parsed = cls(res_blob, res_queue, res_file)
+ parsed._str = string # pylint: disable = protected-access
+ return parsed
+
+
+class UserDelegationKey(object):
+ """
+ Represents a user delegation key, provided to the user by Azure Storage
+ based on their Azure Active Directory access token.
+
+ The fields are saved as simple strings since the user does not have to interact with this object;
+ to generate an identify SAS, the user can simply pass it to the right API.
+
+ :ivar str signed_oid:
+ Object ID of this token.
+ :ivar str signed_tid:
+ Tenant ID of the tenant that issued this token.
+ :ivar str signed_start:
+ The datetime this token becomes valid.
+ :ivar str signed_expiry:
+ The datetime this token expires.
+ :ivar str signed_service:
+ What service this key is valid for.
+ :ivar str signed_version:
+ The version identifier of the REST service that created this token.
+ :ivar str value:
+ The user delegation key.
+ """
+ def __init__(self):
+ self.signed_oid = None
+ self.signed_tid = None
+ self.signed_start = None
+ self.signed_expiry = None
+ self.signed_service = None
+ self.signed_version = None
+ self.value = None
diff --git a/sdk/table/azure/azure_table/_shared/parser.py b/sdk/table/azure/azure_table/_shared/parser.py
new file mode 100644
index 000000000000..c6feba8a6393
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/parser.py
@@ -0,0 +1,20 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+import sys
+
+if sys.version_info < (3,):
+ def _str(value):
+ if isinstance(value, unicode): # pylint: disable=undefined-variable
+ return value.encode('utf-8')
+
+ return str(value)
+else:
+ _str = str
+
+
+def _to_utc_datetime(value):
+ return value.strftime('%Y-%m-%dT%H:%M:%SZ')
diff --git a/sdk/table/azure/azure_table/_shared/policies.py b/sdk/table/azure/azure_table/_shared/policies.py
new file mode 100644
index 000000000000..6ecb9baba938
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/policies.py
@@ -0,0 +1,599 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+import base64
+import hashlib
+import re
+import random
+from time import time
+from io import SEEK_SET, UnsupportedOperation
+import logging
+import uuid
+import types
+from typing import Any, TYPE_CHECKING
+from wsgiref.handlers import format_date_time
+try:
+ from urllib.parse import (
+ urlparse,
+ parse_qsl,
+ urlunparse,
+ urlencode,
+ )
+except ImportError:
+ from urllib import urlencode # type: ignore
+ from urlparse import ( # type: ignore
+ urlparse,
+ parse_qsl,
+ urlunparse,
+ )
+
+from azure.core.pipeline.policies import (
+ HeadersPolicy,
+ SansIOHTTPPolicy,
+ NetworkTraceLoggingPolicy,
+ HTTPPolicy,
+ RequestHistory
+)
+from azure.core.exceptions import AzureError, ServiceRequestError, ServiceResponseError
+
+from .models import LocationMode
+
+try:
+ _unicode_type = unicode # type: ignore
+except NameError:
+ _unicode_type = str
+
+if TYPE_CHECKING:
+ from azure.core.pipeline import PipelineRequest, PipelineResponse
+
+
+_LOGGER = logging.getLogger(__name__)
+
+
+def encode_base64(data):
+ if isinstance(data, _unicode_type):
+ data = data.encode('utf-8')
+ encoded = base64.b64encode(data)
+ return encoded.decode('utf-8')
+
+
+def is_exhausted(settings):
+ """Are we out of retries?"""
+ retry_counts = (settings['total'], settings['connect'], settings['read'], settings['status'])
+ retry_counts = list(filter(None, retry_counts))
+ if not retry_counts:
+ return False
+ return min(retry_counts) < 0
+
+
+def retry_hook(settings, **kwargs):
+ if settings['hook']:
+ settings['hook'](retry_count=settings['count'] - 1, location_mode=settings['mode'], **kwargs)
+
+
+def is_retry(response, mode):
+ """Is this method/status code retryable? (Based on whitelists and control
+ variables such as the number of total retries to allow, whether to
+ respect the Retry-After header, whether this header is present, and
+ whether the returned status code is on the list of status codes to
+ be retried upon on the presence of the aforementioned header)
+ """
+ status = response.http_response.status_code
+ if 300 <= status < 500:
+ # An exception occured, but in most cases it was expected. Examples could
+ # include a 309 Conflict or 412 Precondition Failed.
+ if status == 404 and mode == LocationMode.SECONDARY:
+ # Response code 404 should be retried if secondary was used.
+ return True
+ if status == 408:
+ # Response code 408 is a timeout and should be retried.
+ return True
+ return False
+ if status >= 500:
+ # Response codes above 500 with the exception of 501 Not Implemented and
+ # 505 Version Not Supported indicate a server issue and should be retried.
+ if status in [501, 505]:
+ return False
+ return True
+ return False
+
+
+def urljoin(base_url, stub_url):
+ parsed = urlparse(base_url)
+ parsed = parsed._replace(path=parsed.path + '/' + stub_url)
+ return parsed.geturl()
+
+class StorageHeadersPolicy(HeadersPolicy):
+ request_id_header_name = 'x-ms-client-request-id'
+
+ def on_request(self, request):
+ # type: (PipelineRequest, Any) -> None
+ super(StorageHeadersPolicy, self).on_request(request)
+ current_time = format_date_time(time())
+ request.http_request.headers['x-ms-date'] = current_time
+ request.http_request.headers['Date'] = current_time
+ custom_id = request.context.options.pop('client_request_id', None)
+ request.http_request.headers['x-ms-client-request-id'] = custom_id or str(uuid.uuid1())
+
+ # def on_response(self, request, response):
+ # # raise exception if the echoed client request id from the service is not identical to the one we sent
+ # if self.request_id_header_name in response.http_response.headers:
+
+ # client_request_id = request.http_request.headers.get(self.request_id_header_name)
+
+ # if response.http_response.headers[self.request_id_header_name] != client_request_id:
+ # raise AzureError(
+ # "Echoed client request ID: {} does not match sent client request ID: {}. "
+ # "Service request ID: {}".format(
+ # response.http_response.headers[self.request_id_header_name], client_request_id,
+ # response.http_response.headers['x-ms-request-id']),
+ # response=response.http_response
+ # )
+
+
+class StorageHosts(SansIOHTTPPolicy):
+
+ def __init__(self, hosts=None, **kwargs): # pylint: disable=unused-argument
+ self.hosts = hosts
+ super(StorageHosts, self).__init__()
+
+ def on_request(self, request):
+ # type: (PipelineRequest, Any) -> None
+ request.context.options['hosts'] = self.hosts
+ parsed_url = urlparse(request.http_request.url)
+
+ # Detect what location mode we're currently requesting with
+ location_mode = LocationMode.PRIMARY
+ for key, value in self.hosts.items():
+ if parsed_url.netloc == value:
+ location_mode = key
+
+ # See if a specific location mode has been specified, and if so, redirect
+ use_location = request.context.options.pop('use_location', None)
+ if use_location:
+ # Lock retries to the specific location
+ request.context.options['retry_to_secondary'] = False
+ if use_location not in self.hosts:
+ raise ValueError("Attempting to use undefined host location {}".format(use_location))
+ if use_location != location_mode:
+ # Update request URL to use the specified location
+ updated = parsed_url._replace(netloc=self.hosts[use_location])
+ request.http_request.url = updated.geturl()
+ location_mode = use_location
+
+ request.context.options['location_mode'] = location_mode
+
+
+class StorageLoggingPolicy(NetworkTraceLoggingPolicy):
+ """A policy that logs HTTP request and response to the DEBUG logger.
+
+ This accepts both global configuration, and per-request level with "enable_http_logger"
+ """
+
+ def on_request(self, request):
+ # type: (PipelineRequest, Any) -> None
+ http_request = request.http_request
+ options = request.context.options
+ if options.pop("logging_enable", self.enable_http_logger):
+ request.context["logging_enable"] = True
+ if not _LOGGER.isEnabledFor(logging.DEBUG):
+ return
+
+ try:
+ log_url = http_request.url
+ query_params = http_request.query
+ if 'sig' in query_params:
+ log_url = log_url.replace(query_params['sig'], "sig=*****")
+ _LOGGER.debug("Request URL: %r", log_url)
+ _LOGGER.debug("Request method: %r", http_request.method)
+ _LOGGER.debug("Request headers:")
+ for header, value in http_request.headers.items():
+ if header.lower() == 'authorization':
+ value = '*****'
+ elif header.lower() == 'x-ms-copy-source' and 'sig' in value:
+ # take the url apart and scrub away the signed signature
+ scheme, netloc, path, params, query, fragment = urlparse(value)
+ parsed_qs = dict(parse_qsl(query))
+ parsed_qs['sig'] = '*****'
+
+ # the SAS needs to be put back together
+ value = urlunparse((scheme, netloc, path, params, urlencode(parsed_qs), fragment))
+
+ _LOGGER.debug(" %r: %r", header, value)
+ _LOGGER.debug("Request body:")
+
+ # We don't want to log the binary data of a file upload.
+ if isinstance(http_request.body, types.GeneratorType):
+ _LOGGER.debug("File upload")
+ else:
+ _LOGGER.debug(str(http_request.body))
+ except Exception as err: # pylint: disable=broad-except
+ _LOGGER.debug("Failed to log request: %r", err)
+
+ def on_response(self, request, response):
+ # type: (PipelineRequest, PipelineResponse, Any) -> None
+ if response.context.pop("logging_enable", self.enable_http_logger):
+ if not _LOGGER.isEnabledFor(logging.DEBUG):
+ return
+
+ try:
+ _LOGGER.debug("Response status: %r", response.http_response.status_code)
+ _LOGGER.debug("Response headers:")
+ for res_header, value in response.http_response.headers.items():
+ _LOGGER.debug(" %r: %r", res_header, value)
+
+ # We don't want to log binary data if the response is a file.
+ _LOGGER.debug("Response content:")
+ pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE)
+ header = response.http_response.headers.get('content-disposition')
+
+ if header and pattern.match(header):
+ filename = header.partition('=')[2]
+ _LOGGER.debug("File attachments: %s", filename)
+ elif response.http_response.headers.get("content-type", "").endswith("octet-stream"):
+ _LOGGER.debug("Body contains binary data.")
+ elif response.http_response.headers.get("content-type", "").startswith("image"):
+ _LOGGER.debug("Body contains image data.")
+ else:
+ if response.context.options.get('stream', False):
+ _LOGGER.debug("Body is streamable")
+ else:
+ _LOGGER.debug(response.http_response.text())
+ except Exception as err: # pylint: disable=broad-except
+ _LOGGER.debug("Failed to log response: %s", repr(err))
+
+
+class StorageRequestHook(SansIOHTTPPolicy):
+
+ def __init__(self, **kwargs): # pylint: disable=unused-argument
+ self._request_callback = kwargs.get('raw_request_hook')
+ super(StorageRequestHook, self).__init__()
+
+ def on_request(self, request):
+ # type: (PipelineRequest, **Any) -> PipelineResponse
+ request_callback = request.context.options.pop('raw_request_hook', self._request_callback)
+ if request_callback:
+ request_callback(request)
+
+
+class StorageResponseHook(HTTPPolicy):
+
+ def __init__(self, **kwargs): # pylint: disable=unused-argument
+ self._response_callback = kwargs.get('raw_response_hook')
+ super(StorageResponseHook, self).__init__()
+
+ def send(self, request):
+ # type: (PipelineRequest) -> PipelineResponse
+ data_stream_total = request.context.get('data_stream_total') or \
+ request.context.options.pop('data_stream_total', None)
+ download_stream_current = request.context.get('download_stream_current') or \
+ request.context.options.pop('download_stream_current', None)
+ upload_stream_current = request.context.get('upload_stream_current') or \
+ request.context.options.pop('upload_stream_current', None)
+ response_callback = request.context.get('response_callback') or \
+ request.context.options.pop('raw_response_hook', self._response_callback)
+
+ response = self.next.send(request)
+ will_retry = is_retry(response, request.context.options.get('mode'))
+ if not will_retry and download_stream_current is not None:
+ download_stream_current += int(response.http_response.headers.get('Content-Length', 0))
+ if data_stream_total is None:
+ content_range = response.http_response.headers.get('Content-Range')
+ if content_range:
+ data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1])
+ else:
+ data_stream_total = download_stream_current
+ elif not will_retry and upload_stream_current is not None:
+ upload_stream_current += int(response.http_request.headers.get('Content-Length', 0))
+ for pipeline_obj in [request, response]:
+ pipeline_obj.context['data_stream_total'] = data_stream_total
+ pipeline_obj.context['download_stream_current'] = download_stream_current
+ pipeline_obj.context['upload_stream_current'] = upload_stream_current
+ if response_callback:
+ response_callback(response)
+ request.context['response_callback'] = response_callback
+ return response
+
+
+class StorageContentValidation(SansIOHTTPPolicy):
+ """A simple policy that sends the given headers
+ with the request.
+
+ This will overwrite any headers already defined in the request.
+ """
+ header_name = 'Content-MD5'
+
+ def __init__(self, **kwargs): # pylint: disable=unused-argument
+ super(StorageContentValidation, self).__init__()
+
+ @staticmethod
+ def get_content_md5(data):
+ md5 = hashlib.md5()
+ if isinstance(data, bytes):
+ md5.update(data)
+ elif hasattr(data, 'read'):
+ pos = 0
+ try:
+ pos = data.tell()
+ except: # pylint: disable=bare-except
+ pass
+ for chunk in iter(lambda: data.read(4096), b""):
+ md5.update(chunk)
+ try:
+ data.seek(pos, SEEK_SET)
+ except (AttributeError, IOError):
+ raise ValueError("Data should be bytes or a seekable file-like object.")
+ else:
+ raise ValueError("Data should be bytes or a seekable file-like object.")
+
+ return md5.digest()
+
+ def on_request(self, request):
+ # type: (PipelineRequest, Any) -> None
+ validate_content = request.context.options.pop('validate_content', False)
+ if validate_content and request.http_request.method != 'GET':
+ computed_md5 = encode_base64(StorageContentValidation.get_content_md5(request.http_request.data))
+ request.http_request.headers[self.header_name] = computed_md5
+ request.context['validate_content_md5'] = computed_md5
+ request.context['validate_content'] = validate_content
+
+ def on_response(self, request, response):
+ if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'):
+ computed_md5 = request.context.get('validate_content_md5') or \
+ encode_base64(StorageContentValidation.get_content_md5(response.http_response.body()))
+ if response.http_response.headers['content-md5'] != computed_md5:
+ raise AzureError(
+ 'MD5 mismatch. Expected value is \'{0}\', computed value is \'{1}\'.'.format(
+ response.http_response.headers['content-md5'], computed_md5),
+ response=response.http_response
+ )
+
+
+class StorageRetryPolicy(HTTPPolicy):
+ """
+ The base class for Exponential and Linear retries containing shared code.
+ """
+
+ def __init__(self, **kwargs):
+ self.total_retries = kwargs.pop('retry_total', 10)
+ self.connect_retries = kwargs.pop('retry_connect', 3)
+ self.read_retries = kwargs.pop('retry_read', 3)
+ self.status_retries = kwargs.pop('retry_status', 3)
+ self.retry_to_secondary = kwargs.pop('retry_to_secondary', False)
+ super(StorageRetryPolicy, self).__init__()
+
+ def _set_next_host_location(self, settings, request): # pylint: disable=no-self-use
+ """
+ A function which sets the next host location on the request, if applicable.
+
+ :param ~azure.storage.models.RetryContext context:
+ The retry context containing the previous host location and the request
+ to evaluate and possibly modify.
+ """
+ if settings['hosts'] and all(settings['hosts'].values()):
+ url = urlparse(request.url)
+ # If there's more than one possible location, retry to the alternative
+ if settings['mode'] == LocationMode.PRIMARY:
+ settings['mode'] = LocationMode.SECONDARY
+ else:
+ settings['mode'] = LocationMode.PRIMARY
+ updated = url._replace(netloc=settings['hosts'].get(settings['mode']))
+ request.url = updated.geturl()
+
+ def configure_retries(self, request): # pylint: disable=no-self-use
+ body_position = None
+ if hasattr(request.http_request.body, 'read'):
+ try:
+ body_position = request.http_request.body.tell()
+ except (AttributeError, UnsupportedOperation):
+ # if body position cannot be obtained, then retries will not work
+ pass
+ options = request.context.options
+ return {
+ 'total': options.pop("retry_total", self.total_retries),
+ 'connect': options.pop("retry_connect", self.connect_retries),
+ 'read': options.pop("retry_read", self.read_retries),
+ 'status': options.pop("retry_status", self.status_retries),
+ 'retry_secondary': options.pop("retry_to_secondary", self.retry_to_secondary),
+ 'mode': options.pop("location_mode", LocationMode.PRIMARY),
+ 'hosts': options.pop("hosts", None),
+ 'hook': options.pop("retry_hook", None),
+ 'body_position': body_position,
+ 'count': 0,
+ 'history': []
+ }
+
+ def get_backoff_time(self, settings): # pylint: disable=unused-argument,no-self-use
+ """ Formula for computing the current backoff.
+ Should be calculated by child class.
+
+ :rtype: float
+ """
+ return 0
+
+ def sleep(self, settings, transport):
+ backoff = self.get_backoff_time(settings)
+ if not backoff or backoff < 0:
+ return
+ transport.sleep(backoff)
+
+ def increment(self, settings, request, response=None, error=None):
+ """Increment the retry counters.
+
+ :param response: A pipeline response object.
+ :param error: An error encountered during the request, or
+ None if the response was received successfully.
+
+ :return: Whether the retry attempts are exhausted.
+ """
+ settings['total'] -= 1
+
+ if error and isinstance(error, ServiceRequestError):
+ # Errors when we're fairly sure that the server did not receive the
+ # request, so it should be safe to retry.
+ settings['connect'] -= 1
+ settings['history'].append(RequestHistory(request, error=error))
+
+ elif error and isinstance(error, ServiceResponseError):
+ # Errors that occur after the request has been started, so we should
+ # assume that the server began processing it.
+ settings['read'] -= 1
+ settings['history'].append(RequestHistory(request, error=error))
+
+ else:
+ # Incrementing because of a server error like a 500 in
+ # status_forcelist and a the given method is in the whitelist
+ if response:
+ settings['status'] -= 1
+ settings['history'].append(RequestHistory(request, http_response=response))
+
+ if not is_exhausted(settings):
+ if request.method not in ['PUT'] and settings['retry_secondary']:
+ self._set_next_host_location(settings, request)
+
+ # rewind the request body if it is a stream
+ if request.body and hasattr(request.body, 'read'):
+ # no position was saved, then retry would not work
+ if settings['body_position'] is None:
+ return False
+ try:
+ # attempt to rewind the body to the initial position
+ request.body.seek(settings['body_position'], SEEK_SET)
+ except (UnsupportedOperation, ValueError):
+ # if body is not seekable, then retry would not work
+ return False
+ settings['count'] += 1
+ return True
+ return False
+
+ def send(self, request):
+ retries_remaining = True
+ response = None
+ retry_settings = self.configure_retries(request)
+ while retries_remaining:
+ try:
+ response = self.next.send(request)
+ if is_retry(response, retry_settings['mode']):
+ retries_remaining = self.increment(
+ retry_settings,
+ request=request.http_request,
+ response=response.http_response)
+ if retries_remaining:
+ retry_hook(
+ retry_settings,
+ request=request.http_request,
+ response=response.http_response,
+ error=None)
+ self.sleep(retry_settings, request.context.transport)
+ continue
+ break
+ except AzureError as err:
+ retries_remaining = self.increment(
+ retry_settings, request=request.http_request, error=err)
+ if retries_remaining:
+ retry_hook(
+ retry_settings,
+ request=request.http_request,
+ response=None,
+ error=err)
+ self.sleep(retry_settings, request.context.transport)
+ continue
+ raise err
+ if retry_settings['history']:
+ response.context['history'] = retry_settings['history']
+ response.http_response.location_mode = retry_settings['mode']
+ return response
+
+
+class ExponentialRetry(StorageRetryPolicy):
+ """Exponential retry."""
+
+ def __init__(self, initial_backoff=15, increment_base=3, retry_total=3,
+ retry_to_secondary=False, random_jitter_range=3, **kwargs):
+ '''
+ Constructs an Exponential retry object. The initial_backoff is used for
+ the first retry. Subsequent retries are retried after initial_backoff +
+ increment_power^retry_count seconds. For example, by default the first retry
+ occurs after 15 seconds, the second after (15+3^1) = 18 seconds, and the
+ third after (15+3^2) = 24 seconds.
+
+ :param int initial_backoff:
+ The initial backoff interval, in seconds, for the first retry.
+ :param int increment_base:
+ The base, in seconds, to increment the initial_backoff by after the
+ first retry.
+ :param int max_attempts:
+ The maximum number of retry attempts.
+ :param bool retry_to_secondary:
+ Whether the request should be retried to secondary, if able. This should
+ only be enabled of RA-GRS accounts are used and potentially stale data
+ can be handled.
+ :param int random_jitter_range:
+ A number in seconds which indicates a range to jitter/randomize for the back-off interval.
+ For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3.
+ '''
+ self.initial_backoff = initial_backoff
+ self.increment_base = increment_base
+ self.random_jitter_range = random_jitter_range
+ super(ExponentialRetry, self).__init__(
+ retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
+
+ def get_backoff_time(self, settings):
+ """
+ Calculates how long to sleep before retrying.
+
+ :return:
+ An integer indicating how long to wait before retrying the request,
+ or None to indicate no retry should be performed.
+ :rtype: int or None
+ """
+ random_generator = random.Random()
+ backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count']))
+ random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0
+ random_range_end = backoff + self.random_jitter_range
+ return random_generator.uniform(random_range_start, random_range_end)
+
+
+class LinearRetry(StorageRetryPolicy):
+ """Linear retry."""
+
+ def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_jitter_range=3, **kwargs):
+ """
+ Constructs a Linear retry object.
+
+ :param int backoff:
+ The backoff interval, in seconds, between retries.
+ :param int max_attempts:
+ The maximum number of retry attempts.
+ :param bool retry_to_secondary:
+ Whether the request should be retried to secondary, if able. This should
+ only be enabled of RA-GRS accounts are used and potentially stale data
+ can be handled.
+ :param int random_jitter_range:
+ A number in seconds which indicates a range to jitter/randomize for the back-off interval.
+ For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3.
+ """
+ self.backoff = backoff
+ self.random_jitter_range = random_jitter_range
+ super(LinearRetry, self).__init__(
+ retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
+
+ def get_backoff_time(self, settings):
+ """
+ Calculates how long to sleep before retrying.
+
+ :return:
+ An integer indicating how long to wait before retrying the request,
+ or None to indicate no retry should be performed.
+ :rtype: int or None
+ """
+ random_generator = random.Random()
+ # the backoff interval normally does not change, however there is the possibility
+ # that it was modified by accessing the property directly after initializing the object
+ random_range_start = self.backoff - self.random_jitter_range \
+ if self.backoff > self.random_jitter_range else 0
+ random_range_end = self.backoff + self.random_jitter_range
+ return random_generator.uniform(random_range_start, random_range_end)
diff --git a/sdk/table/azure/azure_table/_shared/policies_async.py b/sdk/table/azure/azure_table/_shared/policies_async.py
new file mode 100644
index 000000000000..c0a44767b177
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/policies_async.py
@@ -0,0 +1,219 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+import asyncio
+import random
+import logging
+from typing import Any, TYPE_CHECKING
+
+from azure.core.pipeline.policies import AsyncHTTPPolicy
+from azure.core.exceptions import AzureError
+
+from .policies import is_retry, StorageRetryPolicy
+
+if TYPE_CHECKING:
+ from azure.core.pipeline import PipelineRequest, PipelineResponse
+
+
+_LOGGER = logging.getLogger(__name__)
+
+
+async def retry_hook(settings, **kwargs):
+ if settings['hook']:
+ if asyncio.iscoroutine(settings['hook']):
+ await settings['hook'](
+ retry_count=settings['count'] - 1,
+ location_mode=settings['mode'],
+ **kwargs)
+ else:
+ settings['hook'](
+ retry_count=settings['count'] - 1,
+ location_mode=settings['mode'],
+ **kwargs)
+
+
+class AsyncStorageResponseHook(AsyncHTTPPolicy):
+
+ def __init__(self, **kwargs): # pylint: disable=unused-argument
+ self._response_callback = kwargs.get('raw_response_hook')
+ super(AsyncStorageResponseHook, self).__init__()
+
+ async def send(self, request):
+ # type: (PipelineRequest) -> PipelineResponse
+ data_stream_total = request.context.get('data_stream_total') or \
+ request.context.options.pop('data_stream_total', None)
+ download_stream_current = request.context.get('download_stream_current') or \
+ request.context.options.pop('download_stream_current', None)
+ upload_stream_current = request.context.get('upload_stream_current') or \
+ request.context.options.pop('upload_stream_current', None)
+ response_callback = request.context.get('response_callback') or \
+ request.context.options.pop('raw_response_hook', self._response_callback)
+
+ response = await self.next.send(request)
+ await response.http_response.load_body()
+
+ will_retry = is_retry(response, request.context.options.get('mode'))
+ if not will_retry and download_stream_current is not None:
+ download_stream_current += int(response.http_response.headers.get('Content-Length', 0))
+ if data_stream_total is None:
+ content_range = response.http_response.headers.get('Content-Range')
+ if content_range:
+ data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1])
+ else:
+ data_stream_total = download_stream_current
+ elif not will_retry and upload_stream_current is not None:
+ upload_stream_current += int(response.http_request.headers.get('Content-Length', 0))
+ for pipeline_obj in [request, response]:
+ pipeline_obj.context['data_stream_total'] = data_stream_total
+ pipeline_obj.context['download_stream_current'] = download_stream_current
+ pipeline_obj.context['upload_stream_current'] = upload_stream_current
+ if response_callback:
+ if asyncio.iscoroutine(response_callback):
+ await response_callback(response)
+ else:
+ response_callback(response)
+ request.context['response_callback'] = response_callback
+ return response
+
+class AsyncStorageRetryPolicy(StorageRetryPolicy):
+ """
+ The base class for Exponential and Linear retries containing shared code.
+ """
+
+ async def sleep(self, settings, transport):
+ backoff = self.get_backoff_time(settings)
+ if not backoff or backoff < 0:
+ return
+ await transport.sleep(backoff)
+
+ async def send(self, request):
+ retries_remaining = True
+ response = None
+ retry_settings = self.configure_retries(request)
+ while retries_remaining:
+ try:
+ response = await self.next.send(request)
+ if is_retry(response, retry_settings['mode']):
+ retries_remaining = self.increment(
+ retry_settings,
+ request=request.http_request,
+ response=response.http_response)
+ if retries_remaining:
+ await retry_hook(
+ retry_settings,
+ request=request.http_request,
+ response=response.http_response,
+ error=None)
+ await self.sleep(retry_settings, request.context.transport)
+ continue
+ break
+ except AzureError as err:
+ retries_remaining = self.increment(
+ retry_settings, request=request.http_request, error=err)
+ if retries_remaining:
+ await retry_hook(
+ retry_settings,
+ request=request.http_request,
+ response=None,
+ error=err)
+ await self.sleep(retry_settings, request.context.transport)
+ continue
+ raise err
+ if retry_settings['history']:
+ response.context['history'] = retry_settings['history']
+ response.http_response.location_mode = retry_settings['mode']
+ return response
+
+
+class ExponentialRetry(AsyncStorageRetryPolicy):
+ """Exponential retry."""
+
+ def __init__(self, initial_backoff=15, increment_base=3, retry_total=3,
+ retry_to_secondary=False, random_jitter_range=3, **kwargs):
+ '''
+ Constructs an Exponential retry object. The initial_backoff is used for
+ the first retry. Subsequent retries are retried after initial_backoff +
+ increment_power^retry_count seconds. For example, by default the first retry
+ occurs after 15 seconds, the second after (15+3^1) = 18 seconds, and the
+ third after (15+3^2) = 24 seconds.
+
+ :param int initial_backoff:
+ The initial backoff interval, in seconds, for the first retry.
+ :param int increment_base:
+ The base, in seconds, to increment the initial_backoff by after the
+ first retry.
+ :param int max_attempts:
+ The maximum number of retry attempts.
+ :param bool retry_to_secondary:
+ Whether the request should be retried to secondary, if able. This should
+ only be enabled of RA-GRS accounts are used and potentially stale data
+ can be handled.
+ :param int random_jitter_range:
+ A number in seconds which indicates a range to jitter/randomize for the back-off interval.
+ For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3.
+ '''
+ self.initial_backoff = initial_backoff
+ self.increment_base = increment_base
+ self.random_jitter_range = random_jitter_range
+ super(ExponentialRetry, self).__init__(
+ retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
+
+ def get_backoff_time(self, settings):
+ """
+ Calculates how long to sleep before retrying.
+
+ :return:
+ An integer indicating how long to wait before retrying the request,
+ or None to indicate no retry should be performed.
+ :rtype: int or None
+ """
+ random_generator = random.Random()
+ backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count']))
+ random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0
+ random_range_end = backoff + self.random_jitter_range
+ return random_generator.uniform(random_range_start, random_range_end)
+
+
+class LinearRetry(AsyncStorageRetryPolicy):
+ """Linear retry."""
+
+ def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_jitter_range=3, **kwargs):
+ """
+ Constructs a Linear retry object.
+
+ :param int backoff:
+ The backoff interval, in seconds, between retries.
+ :param int max_attempts:
+ The maximum number of retry attempts.
+ :param bool retry_to_secondary:
+ Whether the request should be retried to secondary, if able. This should
+ only be enabled of RA-GRS accounts are used and potentially stale data
+ can be handled.
+ :param int random_jitter_range:
+ A number in seconds which indicates a range to jitter/randomize for the back-off interval.
+ For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3.
+ """
+ self.backoff = backoff
+ self.random_jitter_range = random_jitter_range
+ super(LinearRetry, self).__init__(
+ retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
+
+ def get_backoff_time(self, settings):
+ """
+ Calculates how long to sleep before retrying.
+
+ :return:
+ An integer indicating how long to wait before retrying the request,
+ or None to indicate no retry should be performed.
+ :rtype: int or None
+ """
+ random_generator = random.Random()
+ # the backoff interval normally does not change, however there is the possibility
+ # that it was modified by accessing the property directly after initializing the object
+ random_range_start = self.backoff - self.random_jitter_range \
+ if self.backoff > self.random_jitter_range else 0
+ random_range_end = self.backoff + self.random_jitter_range
+ return random_generator.uniform(random_range_start, random_range_end)
diff --git a/sdk/table/azure/azure_table/_shared/request_handlers.py b/sdk/table/azure/azure_table/_shared/request_handlers.py
new file mode 100644
index 000000000000..2ce74d43db21
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/request_handlers.py
@@ -0,0 +1,147 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+from typing import ( # pylint: disable=unused-import
+ Union, Optional, Any, Iterable, Dict, List, Type, Tuple,
+ TYPE_CHECKING
+)
+
+import logging
+from os import fstat
+from io import (SEEK_END, SEEK_SET, UnsupportedOperation)
+
+import isodate
+
+from azure.core.exceptions import raise_with_traceback
+
+
+_LOGGER = logging.getLogger(__name__)
+
+
+def serialize_iso(attr):
+ """Serialize Datetime object into ISO-8601 formatted string.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: str
+ :raises: ValueError if format invalid.
+ """
+ if not attr:
+ return None
+ if isinstance(attr, str):
+ attr = isodate.parse_datetime(attr)
+ try:
+ utc = attr.utctimetuple()
+ if utc.tm_year > 9999 or utc.tm_year < 1:
+ raise OverflowError("Hit max or min date")
+
+ date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
+ utc.tm_year, utc.tm_mon, utc.tm_mday,
+ utc.tm_hour, utc.tm_min, utc.tm_sec)
+ return date + 'Z'
+ except (ValueError, OverflowError) as err:
+ msg = "Unable to serialize datetime object."
+ raise_with_traceback(ValueError, msg, err)
+ except AttributeError as err:
+ msg = "ISO-8601 object must be valid Datetime object."
+ raise_with_traceback(TypeError, msg, err)
+
+
+def get_length(data):
+ length = None
+ # Check if object implements the __len__ method, covers most input cases such as bytearray.
+ try:
+ length = len(data)
+ except: # pylint: disable=bare-except
+ pass
+
+ if not length:
+ # Check if the stream is a file-like stream object.
+ # If so, calculate the size using the file descriptor.
+ try:
+ fileno = data.fileno()
+ except (AttributeError, UnsupportedOperation):
+ pass
+ else:
+ try:
+ return fstat(fileno).st_size
+ except OSError:
+ # Not a valid fileno, may be possible requests returned
+ # a socket number?
+ pass
+
+ # If the stream is seekable and tell() is implemented, calculate the stream size.
+ try:
+ current_position = data.tell()
+ data.seek(0, SEEK_END)
+ length = data.tell() - current_position
+ data.seek(current_position, SEEK_SET)
+ except (AttributeError, UnsupportedOperation):
+ pass
+
+ return length
+
+
+def read_length(data):
+ try:
+ if hasattr(data, 'read'):
+ read_data = b''
+ for chunk in iter(lambda: data.read(4096), b""):
+ read_data += chunk
+ return len(read_data), read_data
+ if hasattr(data, '__iter__'):
+ read_data = b''
+ for chunk in data:
+ read_data += chunk
+ return len(read_data), read_data
+ except: # pylint: disable=bare-except
+ pass
+ raise ValueError("Unable to calculate content length, please specify.")
+
+
+def validate_and_format_range_headers(
+ start_range, end_range, start_range_required=True,
+ end_range_required=True, check_content_md5=False, align_to_page=False):
+ # If end range is provided, start range must be provided
+ if (start_range_required or end_range is not None) and start_range is None:
+ raise ValueError("start_range value cannot be None.")
+ if end_range_required and end_range is None:
+ raise ValueError("end_range value cannot be None.")
+
+ # Page ranges must be 512 aligned
+ if align_to_page:
+ if start_range is not None and start_range % 512 != 0:
+ raise ValueError("Invalid page blob start_range: {0}. "
+ "The size must be aligned to a 512-byte boundary.".format(start_range))
+ if end_range is not None and end_range % 512 != 511:
+ raise ValueError("Invalid page blob end_range: {0}. "
+ "The size must be aligned to a 512-byte boundary.".format(end_range))
+
+ # Format based on whether end_range is present
+ range_header = None
+ if end_range is not None:
+ range_header = 'bytes={0}-{1}'.format(start_range, end_range)
+ elif start_range is not None:
+ range_header = "bytes={0}-".format(start_range)
+
+ # Content MD5 can only be provided for a complete range less than 4MB in size
+ range_validation = None
+ if check_content_md5:
+ if start_range is None or end_range is None:
+ raise ValueError("Both start and end range requied for MD5 content validation.")
+ if end_range - start_range > 4 * 1024 * 1024:
+ raise ValueError("Getting content MD5 for a range greater than 4MB is not supported.")
+ range_validation = 'true'
+
+ return range_header, range_validation
+
+
+def add_metadata_headers(metadata=None):
+ # type: (Optional[Dict[str, str]]) -> Dict[str, str]
+ headers = {}
+ if metadata:
+ for key, value in metadata.items():
+ headers['x-ms-meta-{}'.format(key)] = value
+ return headers
diff --git a/sdk/table/azure/azure_table/_shared/response_handlers.py b/sdk/table/azure/azure_table/_shared/response_handlers.py
new file mode 100644
index 000000000000..58502402477e
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/response_handlers.py
@@ -0,0 +1,159 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+from typing import ( # pylint: disable=unused-import
+ Union, Optional, Any, Iterable, Dict, List, Type, Tuple,
+ TYPE_CHECKING
+)
+import logging
+
+from azure.core.pipeline.policies import ContentDecodePolicy
+from azure.core.exceptions import (
+ HttpResponseError,
+ ResourceNotFoundError,
+ ResourceModifiedError,
+ ResourceExistsError,
+ ClientAuthenticationError,
+ DecodeError)
+
+from .parser import _to_utc_datetime
+from .models import StorageErrorCode, UserDelegationKey, get_enum_value
+
+
+if TYPE_CHECKING:
+ from datetime import datetime
+ from azure.core.exceptions import AzureError
+
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class PartialBatchErrorException(HttpResponseError):
+ """There is a partial failure in batch operations.
+
+ :param str message: The message of the exception.
+ :param response: Server response to be deserialized.
+ :param list parts: A list of the parts in multipart response.
+ """
+
+ def __init__(self, message, response, parts):
+ self.parts = parts
+ super(PartialBatchErrorException, self).__init__(message=message, response=response)
+
+
+def parse_length_from_content_range(content_range):
+ '''
+ Parses the blob length from the content range header: bytes 1-3/65537
+ '''
+ if content_range is None:
+ return None
+
+ # First, split in space and take the second half: '1-3/65537'
+ # Next, split on slash and take the second half: '65537'
+ # Finally, convert to an int: 65537
+ return int(content_range.split(' ', 1)[1].split('/', 1)[1])
+
+
+def normalize_headers(headers):
+ normalized = {}
+ for key, value in headers.items():
+ if key.startswith('x-ms-'):
+ key = key[5:]
+ normalized[key.lower().replace('-', '_')] = get_enum_value(value)
+ return normalized
+
+
+def deserialize_metadata(response, obj, headers): # pylint: disable=unused-argument
+ raw_metadata = {k: v for k, v in response.headers.items() if k.startswith("x-ms-meta-")}
+ return {k[10:]: v for k, v in raw_metadata.items()}
+
+
+def return_response_headers(response, deserialized, response_headers): # pylint: disable=unused-argument
+ return normalize_headers(response_headers)
+
+
+def return_headers_and_deserialized(response, deserialized, response_headers): # pylint: disable=unused-argument
+ return normalize_headers(response_headers), deserialized
+
+
+def return_context_and_deserialized(response, deserialized, response_headers): # pylint: disable=unused-argument
+ return response.http_response.location_mode, deserialized, response_headers
+
+
+def process_storage_error(storage_error):
+ raise_error = HttpResponseError
+ error_code = storage_error.response.headers.get('x-ms-error-code')
+ error_message = storage_error.message
+ additional_data = {}
+ try:
+ error_body = ContentDecodePolicy.deserialize_from_http_generics(storage_error.response)
+ if error_body:
+ for info in error_body.iter():
+ if info.tag.lower() == 'code':
+ error_code = info.text
+ elif info.tag.lower() == 'message':
+ error_message = info.text
+ else:
+ additional_data[info.tag] = info.text
+ except DecodeError:
+ pass
+
+ try:
+ if error_code:
+ error_code = StorageErrorCode(error_code)
+ if error_code in [StorageErrorCode.condition_not_met,
+ StorageErrorCode.blob_overwritten]:
+ raise_error = ResourceModifiedError
+ if error_code in [StorageErrorCode.invalid_authentication_info,
+ StorageErrorCode.authentication_failed]:
+ raise_error = ClientAuthenticationError
+ if error_code in [StorageErrorCode.resource_not_found,
+ StorageErrorCode.cannot_verify_copy_source,
+ StorageErrorCode.blob_not_found,
+ StorageErrorCode.queue_not_found,
+ StorageErrorCode.container_not_found,
+ StorageErrorCode.parent_not_found,
+ StorageErrorCode.share_not_found]:
+ raise_error = ResourceNotFoundError
+ if error_code in [StorageErrorCode.account_already_exists,
+ StorageErrorCode.account_being_created,
+ StorageErrorCode.resource_already_exists,
+ StorageErrorCode.resource_type_mismatch,
+ StorageErrorCode.blob_already_exists,
+ StorageErrorCode.queue_already_exists,
+ StorageErrorCode.container_already_exists,
+ StorageErrorCode.container_being_deleted,
+ StorageErrorCode.queue_being_deleted,
+ StorageErrorCode.share_already_exists,
+ StorageErrorCode.share_being_deleted]:
+ raise_error = ResourceExistsError
+ except ValueError:
+ # Got an unknown error code
+ pass
+
+ try:
+ error_message += "\nErrorCode:{}".format(error_code.value)
+ except AttributeError:
+ error_message += "\nErrorCode:{}".format(error_code)
+ for name, info in additional_data.items():
+ error_message += "\n{}:{}".format(name, info)
+
+ error = raise_error(message=error_message, response=storage_error.response)
+ error.error_code = error_code
+ error.additional_info = additional_data
+ raise error
+
+
+def parse_to_internal_user_delegation_key(service_user_delegation_key):
+ internal_user_delegation_key = UserDelegationKey()
+ internal_user_delegation_key.signed_oid = service_user_delegation_key.signed_oid
+ internal_user_delegation_key.signed_tid = service_user_delegation_key.signed_tid
+ internal_user_delegation_key.signed_start = _to_utc_datetime(service_user_delegation_key.signed_start)
+ internal_user_delegation_key.signed_expiry = _to_utc_datetime(service_user_delegation_key.signed_expiry)
+ internal_user_delegation_key.signed_service = service_user_delegation_key.signed_service
+ internal_user_delegation_key.signed_version = service_user_delegation_key.signed_version
+ internal_user_delegation_key.value = service_user_delegation_key.value
+ return internal_user_delegation_key
diff --git a/sdk/table/azure/azure_table/_shared/shared_access_signature.py b/sdk/table/azure/azure_table/_shared/shared_access_signature.py
new file mode 100644
index 000000000000..f8344b0b657c
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/shared_access_signature.py
@@ -0,0 +1,271 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+from datetime import date
+
+from .parser import _str, _to_utc_datetime
+from .constants import X_MS_VERSION
+from . import sign_string, url_quote
+
+
+def generate_account_sas(
+ account_name, # type: str
+ account_key, # type: str
+ resource_types, # type: Union[ResourceTypes, str]
+ permission, # type: Union[AccountSasPermissions, str]
+ expiry, # type: Optional[Union[datetime, str]]
+ **kwargs # type: Any
+ ): # type: (...) -> str
+ """Generates a shared access signature for the DataLake service.
+
+ Use the returned signature as the credential parameter of any DataLakeServiceClient,
+ FileSystemClient, DataLakeDirectoryClient or DataLakeFileClient.
+
+ :param str account_name:
+ The storage account name used to generate the shared access signature.
+ :param str account_key:
+ The access key to generate the shared access signature.
+ :param resource_types:
+ Specifies the resource types that are accessible with the account SAS.
+ :type resource_types: str or ~azure.storage.filedatalake.ResourceTypes
+ :param permission:
+ The permissions associated with the shared access signature. The
+ user is restricted to operations allowed by the permissions.
+ Required unless an id is given referencing a stored access policy
+ which contains this field. This field must be omitted if it has been
+ specified in an associated stored access policy.
+ :type permission: str or ~azure.storage.filedatalake.AccountSasPermissions
+ :param expiry:
+ The time at which the shared access signature becomes invalid.
+ Required unless an id is given referencing a stored access policy
+ which contains this field. This field must be omitted if it has
+ been specified in an associated stored access policy. Azure will always
+ convert values to UTC. If a date is passed in without timezone info, it
+ is assumed to be UTC.
+ :type expiry: ~datetime.datetime or str
+ :keyword start:
+ The time at which the shared access signature becomes valid. If
+ omitted, start time for this call is assumed to be the time when the
+ storage service receives the request. Azure will always convert values
+ to UTC. If a date is passed in without timezone info, it is assumed to
+ be UTC.
+ :paramtype start: ~datetime.datetime or str
+ :keyword str ip:
+ Specifies an IP address or a range of IP addresses from which to accept requests.
+ If the IP address from which the request originates does not match the IP address
+ or address range specified on the SAS token, the request is not authenticated.
+ For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS
+ restricts the request to those IP addresses.
+ :keyword str protocol:
+ Specifies the protocol permitted for a request made. The default value is https.
+ :return: A Shared Access Signature (sas) token.
+ :rtype: str
+ """
+ return generate_account_sas(
+ account_name=account_name,
+ account_key=account_key,
+ resource_types=resource_types,
+ permission=permission,
+ expiry=expiry,
+ **kwargs
+ )
+
+class QueryStringConstants(object):
+ SIGNED_SIGNATURE = 'sig'
+ SIGNED_PERMISSION = 'sp'
+ SIGNED_START = 'st'
+ SIGNED_EXPIRY = 'se'
+ SIGNED_RESOURCE = 'sr'
+ SIGNED_IDENTIFIER = 'si'
+ SIGNED_IP = 'sip'
+ SIGNED_PROTOCOL = 'spr'
+ SIGNED_VERSION = 'sv'
+ SIGNED_CACHE_CONTROL = 'rscc'
+ SIGNED_CONTENT_DISPOSITION = 'rscd'
+ SIGNED_CONTENT_ENCODING = 'rsce'
+ SIGNED_CONTENT_LANGUAGE = 'rscl'
+ SIGNED_CONTENT_TYPE = 'rsct'
+ START_PK = 'spk'
+ START_RK = 'srk'
+ END_PK = 'epk'
+ END_RK = 'erk'
+ SIGNED_RESOURCE_TYPES = 'srt'
+ SIGNED_SERVICES = 'ss'
+ SIGNED_OID = 'skoid'
+ SIGNED_TID = 'sktid'
+ SIGNED_KEY_START = 'skt'
+ SIGNED_KEY_EXPIRY = 'ske'
+ SIGNED_KEY_SERVICE = 'sks'
+ SIGNED_KEY_VERSION = 'skv'
+
+ @staticmethod
+ def to_list():
+ return [
+ QueryStringConstants.SIGNED_SIGNATURE,
+ QueryStringConstants.SIGNED_PERMISSION,
+ QueryStringConstants.SIGNED_START,
+ QueryStringConstants.SIGNED_EXPIRY,
+ QueryStringConstants.SIGNED_RESOURCE,
+ QueryStringConstants.SIGNED_IDENTIFIER,
+ QueryStringConstants.SIGNED_IP,
+ QueryStringConstants.SIGNED_PROTOCOL,
+ QueryStringConstants.SIGNED_VERSION,
+ QueryStringConstants.SIGNED_CACHE_CONTROL,
+ QueryStringConstants.SIGNED_CONTENT_DISPOSITION,
+ QueryStringConstants.SIGNED_CONTENT_ENCODING,
+ QueryStringConstants.SIGNED_CONTENT_LANGUAGE,
+ QueryStringConstants.SIGNED_CONTENT_TYPE,
+ QueryStringConstants.START_PK,
+ QueryStringConstants.START_RK,
+ QueryStringConstants.END_PK,
+ QueryStringConstants.END_RK,
+ QueryStringConstants.SIGNED_RESOURCE_TYPES,
+ QueryStringConstants.SIGNED_SERVICES,
+ QueryStringConstants.SIGNED_OID,
+ QueryStringConstants.SIGNED_TID,
+ QueryStringConstants.SIGNED_KEY_START,
+ QueryStringConstants.SIGNED_KEY_EXPIRY,
+ QueryStringConstants.SIGNED_KEY_SERVICE,
+ QueryStringConstants.SIGNED_KEY_VERSION,
+ ]
+
+
+class SharedAccessSignature(object):
+ '''
+ Provides a factory for creating account access
+ signature tokens with an account name and account key. Users can either
+ use the factory or can construct the appropriate service and use the
+ generate_*_shared_access_signature method directly.
+ '''
+
+ def __init__(self, account_name, account_key, x_ms_version=X_MS_VERSION):
+ '''
+ :param str account_name:
+ The storage account name used to generate the shared access signatures.
+ :param str account_key:
+ The access key to generate the shares access signatures.
+ :param str x_ms_version:
+ The service version used to generate the shared access signatures.
+ '''
+ self.account_name = account_name
+ self.account_key = account_key
+ self.x_ms_version = x_ms_version
+
+ def generate_account(self, services, resource_types, permission, expiry, start=None,
+ ip=None, protocol=None):
+ '''
+ Generates a shared access signature for the account.
+ Use the returned signature with the sas_token parameter of the service
+ or to create a new account object.
+
+ :param ResourceTypes resource_types:
+ Specifies the resource types that are accessible with the account
+ SAS. You can combine values to provide access to more than one
+ resource type.
+ :param AccountSasPermissions permission:
+ The permissions associated with the shared access signature. The
+ user is restricted to operations allowed by the permissions.
+ Required unless an id is given referencing a stored access policy
+ which contains this field. This field must be omitted if it has been
+ specified in an associated stored access policy. You can combine
+ values to provide more than one permission.
+ :param expiry:
+ The time at which the shared access signature becomes invalid.
+ Required unless an id is given referencing a stored access policy
+ which contains this field. This field must be omitted if it has
+ been specified in an associated stored access policy. Azure will always
+ convert values to UTC. If a date is passed in without timezone info, it
+ is assumed to be UTC.
+ :type expiry: datetime or str
+ :param start:
+ The time at which the shared access signature becomes valid. If
+ omitted, start time for this call is assumed to be the time when the
+ storage service receives the request. Azure will always convert values
+ to UTC. If a date is passed in without timezone info, it is assumed to
+ be UTC.
+ :type start: datetime or str
+ :param str ip:
+ Specifies an IP address or a range of IP addresses from which to accept requests.
+ If the IP address from which the request originates does not match the IP address
+ or address range specified on the SAS token, the request is not authenticated.
+ For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS
+ restricts the request to those IP addresses.
+ :param str protocol:
+ Specifies the protocol permitted for a request made. The default value
+ is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values.
+ '''
+ sas = _SharedAccessHelper()
+ sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version)
+ sas.add_account(services, resource_types)
+ sas.add_account_signature(self.account_name, self.account_key)
+
+ return sas.get_token()
+
+
+class _SharedAccessHelper(object):
+ def __init__(self):
+ self.query_dict = {}
+
+ def _add_query(self, name, val):
+ if val:
+ self.query_dict[name] = _str(val) if val is not None else None
+
+ def add_base(self, permission, expiry, start, ip, protocol, x_ms_version):
+ if isinstance(start, date):
+ start = _to_utc_datetime(start)
+
+ if isinstance(expiry, date):
+ expiry = _to_utc_datetime(expiry)
+
+ self._add_query(QueryStringConstants.SIGNED_START, start)
+ self._add_query(QueryStringConstants.SIGNED_EXPIRY, expiry)
+ self._add_query(QueryStringConstants.SIGNED_PERMISSION, permission)
+ self._add_query(QueryStringConstants.SIGNED_IP, ip)
+ self._add_query(QueryStringConstants.SIGNED_PROTOCOL, protocol)
+ self._add_query(QueryStringConstants.SIGNED_VERSION, x_ms_version)
+
+ def add_resource(self, resource):
+ self._add_query(QueryStringConstants.SIGNED_RESOURCE, resource)
+
+ def add_id(self, policy_id):
+ self._add_query(QueryStringConstants.SIGNED_IDENTIFIER, policy_id)
+
+ def add_account(self, services, resource_types):
+ self._add_query(QueryStringConstants.SIGNED_SERVICES, services)
+ self._add_query(QueryStringConstants.SIGNED_RESOURCE_TYPES, resource_types)
+
+ def add_override_response_headers(self, cache_control,
+ content_disposition,
+ content_encoding,
+ content_language,
+ content_type):
+ self._add_query(QueryStringConstants.SIGNED_CACHE_CONTROL, cache_control)
+ self._add_query(QueryStringConstants.SIGNED_CONTENT_DISPOSITION, content_disposition)
+ self._add_query(QueryStringConstants.SIGNED_CONTENT_ENCODING, content_encoding)
+ self._add_query(QueryStringConstants.SIGNED_CONTENT_LANGUAGE, content_language)
+ self._add_query(QueryStringConstants.SIGNED_CONTENT_TYPE, content_type)
+
+ def add_account_signature(self, account_name, account_key):
+ def get_value_to_append(query):
+ return_value = self.query_dict.get(query) or ''
+ return return_value + '\n'
+
+ string_to_sign = \
+ (account_name + '\n' +
+ get_value_to_append(QueryStringConstants.SIGNED_PERMISSION) +
+ get_value_to_append(QueryStringConstants.SIGNED_SERVICES) +
+ get_value_to_append(QueryStringConstants.SIGNED_RESOURCE_TYPES) +
+ get_value_to_append(QueryStringConstants.SIGNED_START) +
+ get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) +
+ get_value_to_append(QueryStringConstants.SIGNED_IP) +
+ get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) +
+ get_value_to_append(QueryStringConstants.SIGNED_VERSION))
+
+ self._add_query(QueryStringConstants.SIGNED_SIGNATURE,
+ sign_string(account_key, string_to_sign))
+
+ def get_token(self):
+ return '&'.join(['{0}={1}'.format(n, url_quote(v)) for n, v in self.query_dict.items() if v is not None])
diff --git a/sdk/table/azure/azure_table/_shared/uploads.py b/sdk/table/azure/azure_table/_shared/uploads.py
new file mode 100644
index 000000000000..13b814e11040
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/uploads.py
@@ -0,0 +1,548 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+# pylint: disable=no-self-use
+
+from concurrent import futures
+from io import (BytesIO, IOBase, SEEK_CUR, SEEK_END, SEEK_SET, UnsupportedOperation)
+from threading import Lock
+from itertools import islice
+from math import ceil
+
+import six
+
+from azure.core.tracing.common import with_current_context
+
+from . import encode_base64, url_quote
+from .request_handlers import get_length
+from .response_handlers import return_response_headers
+from .encryption import get_blob_encryptor_and_padder
+
+
+_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024
+_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = "{0} should be a seekable file-like/io.IOBase type stream object."
+
+
+def _parallel_uploads(executor, uploader, pending, running):
+ range_ids = []
+ while True:
+ # Wait for some download to finish before adding a new one
+ done, running = futures.wait(running, return_when=futures.FIRST_COMPLETED)
+ range_ids.extend([chunk.result() for chunk in done])
+ try:
+ next_chunk = next(pending)
+ except StopIteration:
+ break
+ else:
+ running.add(executor.submit(with_current_context(uploader), next_chunk))
+
+ # Wait for the remaining uploads to finish
+ done, _running = futures.wait(running)
+ range_ids.extend([chunk.result() for chunk in done])
+ return range_ids
+
+
+def upload_data_chunks(
+ service=None,
+ uploader_class=None,
+ total_size=None,
+ chunk_size=None,
+ max_concurrency=None,
+ stream=None,
+ validate_content=None,
+ encryption_options=None,
+ **kwargs):
+
+ if encryption_options:
+ encryptor, padder = get_blob_encryptor_and_padder(
+ encryption_options.get('cek'),
+ encryption_options.get('vector'),
+ uploader_class is not PageBlobChunkUploader)
+ kwargs['encryptor'] = encryptor
+ kwargs['padder'] = padder
+
+ parallel = max_concurrency > 1
+ if parallel and 'modified_access_conditions' in kwargs:
+ # Access conditions do not work with parallelism
+ kwargs['modified_access_conditions'] = None
+
+ uploader = uploader_class(
+ service=service,
+ total_size=total_size,
+ chunk_size=chunk_size,
+ stream=stream,
+ parallel=parallel,
+ validate_content=validate_content,
+ **kwargs)
+ if parallel:
+ executor = futures.ThreadPoolExecutor(max_concurrency)
+ upload_tasks = uploader.get_chunk_streams()
+ running_futures = [
+ executor.submit(with_current_context(uploader.process_chunk), u)
+ for u in islice(upload_tasks, 0, max_concurrency)
+ ]
+ range_ids = _parallel_uploads(executor, uploader.process_chunk, upload_tasks, running_futures)
+ else:
+ range_ids = [uploader.process_chunk(result) for result in uploader.get_chunk_streams()]
+ if any(range_ids):
+ return [r[1] for r in sorted(range_ids, key=lambda r: r[0])]
+ return uploader.response_headers
+
+
+def upload_substream_blocks(
+ service=None,
+ uploader_class=None,
+ total_size=None,
+ chunk_size=None,
+ max_concurrency=None,
+ stream=None,
+ **kwargs):
+ parallel = max_concurrency > 1
+ if parallel and 'modified_access_conditions' in kwargs:
+ # Access conditions do not work with parallelism
+ kwargs['modified_access_conditions'] = None
+ uploader = uploader_class(
+ service=service,
+ total_size=total_size,
+ chunk_size=chunk_size,
+ stream=stream,
+ parallel=parallel,
+ **kwargs)
+
+ if parallel:
+ executor = futures.ThreadPoolExecutor(max_concurrency)
+ upload_tasks = uploader.get_substream_blocks()
+ running_futures = [
+ executor.submit(with_current_context(uploader.process_substream_block), u)
+ for u in islice(upload_tasks, 0, max_concurrency)
+ ]
+ range_ids = _parallel_uploads(executor, uploader.process_substream_block, upload_tasks, running_futures)
+ else:
+ range_ids = [uploader.process_substream_block(b) for b in uploader.get_substream_blocks()]
+ return sorted(range_ids)
+
+
+class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes
+
+ def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor=None, padder=None, **kwargs):
+ self.service = service
+ self.total_size = total_size
+ self.chunk_size = chunk_size
+ self.stream = stream
+ self.parallel = parallel
+
+ # Stream management
+ self.stream_start = stream.tell() if parallel else None
+ self.stream_lock = Lock() if parallel else None
+
+ # Progress feedback
+ self.progress_total = 0
+ self.progress_lock = Lock() if parallel else None
+
+ # Encryption
+ self.encryptor = encryptor
+ self.padder = padder
+ self.response_headers = None
+ self.etag = None
+ self.last_modified = None
+ self.request_options = kwargs
+
+ def get_chunk_streams(self):
+ index = 0
+ while True:
+ data = b""
+ read_size = self.chunk_size
+
+ # Buffer until we either reach the end of the stream or get a whole chunk.
+ while True:
+ if self.total_size:
+ read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data)))
+ temp = self.stream.read(read_size)
+ if not isinstance(temp, six.binary_type):
+ raise TypeError("Blob data should be of type bytes.")
+ data += temp or b""
+
+ # We have read an empty string and so are at the end
+ # of the buffer or we have read a full chunk.
+ if temp == b"" or len(data) == self.chunk_size:
+ break
+
+ if len(data) == self.chunk_size:
+ if self.padder:
+ data = self.padder.update(data)
+ if self.encryptor:
+ data = self.encryptor.update(data)
+ yield index, data
+ else:
+ if self.padder:
+ data = self.padder.update(data) + self.padder.finalize()
+ if self.encryptor:
+ data = self.encryptor.update(data) + self.encryptor.finalize()
+ if data:
+ yield index, data
+ break
+ index += len(data)
+
+ def process_chunk(self, chunk_data):
+ chunk_bytes = chunk_data[1]
+ chunk_offset = chunk_data[0]
+ return self._upload_chunk_with_progress(chunk_offset, chunk_bytes)
+
+ def _update_progress(self, length):
+ if self.progress_lock is not None:
+ with self.progress_lock:
+ self.progress_total += length
+ else:
+ self.progress_total += length
+
+ def _upload_chunk(self, chunk_offset, chunk_data):
+ raise NotImplementedError("Must be implemented by child class.")
+
+ def _upload_chunk_with_progress(self, chunk_offset, chunk_data):
+ range_id = self._upload_chunk(chunk_offset, chunk_data)
+ self._update_progress(len(chunk_data))
+ return range_id
+
+ def get_substream_blocks(self):
+ assert self.chunk_size is not None
+ lock = self.stream_lock
+ blob_length = self.total_size
+
+ if blob_length is None:
+ blob_length = get_length(self.stream)
+ if blob_length is None:
+ raise ValueError("Unable to determine content length of upload data.")
+
+ blocks = int(ceil(blob_length / (self.chunk_size * 1.0)))
+ last_block_size = self.chunk_size if blob_length % self.chunk_size == 0 else blob_length % self.chunk_size
+
+ for i in range(blocks):
+ index = i * self.chunk_size
+ length = last_block_size if i == blocks - 1 else self.chunk_size
+ yield ('BlockId{}'.format("%05d" % i), SubStream(self.stream, index, length, lock))
+
+ def process_substream_block(self, block_data):
+ return self._upload_substream_block_with_progress(block_data[0], block_data[1])
+
+ def _upload_substream_block(self, block_id, block_stream):
+ raise NotImplementedError("Must be implemented by child class.")
+
+ def _upload_substream_block_with_progress(self, block_id, block_stream):
+ range_id = self._upload_substream_block(block_id, block_stream)
+ self._update_progress(len(block_stream))
+ return range_id
+
+ def set_response_properties(self, resp):
+ self.etag = resp.etag
+ self.last_modified = resp.last_modified
+
+
+class BlockBlobChunkUploader(_ChunkUploader):
+
+ def __init__(self, *args, **kwargs):
+ kwargs.pop("modified_access_conditions", None)
+ super(BlockBlobChunkUploader, self).__init__(*args, **kwargs)
+ self.current_length = None
+
+ def _upload_chunk(self, chunk_offset, chunk_data):
+ # TODO: This is incorrect, but works with recording.
+ index = '{0:032d}'.format(chunk_offset)
+ block_id = encode_base64(url_quote(encode_base64(index)))
+ self.service.stage_block(
+ block_id,
+ len(chunk_data),
+ chunk_data,
+ data_stream_total=self.total_size,
+ upload_stream_current=self.progress_total,
+ **self.request_options
+ )
+ return index, block_id
+
+ def _upload_substream_block(self, block_id, block_stream):
+ try:
+ self.service.stage_block(
+ block_id,
+ len(block_stream),
+ block_stream,
+ data_stream_total=self.total_size,
+ upload_stream_current=self.progress_total,
+ **self.request_options
+ )
+ finally:
+ block_stream.close()
+ return block_id
+
+
+class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method
+
+ def _is_chunk_empty(self, chunk_data):
+ # read until non-zero byte is encountered
+ # if reached the end without returning, then chunk_data is all 0's
+ return not any(bytearray(chunk_data))
+
+ def _upload_chunk(self, chunk_offset, chunk_data):
+ # avoid uploading the empty pages
+ if not self._is_chunk_empty(chunk_data):
+ chunk_end = chunk_offset + len(chunk_data) - 1
+ content_range = "bytes={0}-{1}".format(chunk_offset, chunk_end)
+ computed_md5 = None
+ self.response_headers = self.service.upload_pages(
+ chunk_data,
+ content_length=len(chunk_data),
+ transactional_content_md5=computed_md5,
+ range=content_range,
+ cls=return_response_headers,
+ data_stream_total=self.total_size,
+ upload_stream_current=self.progress_total,
+ **self.request_options
+ )
+
+ if not self.parallel and self.request_options.get('modified_access_conditions'):
+ self.request_options['modified_access_conditions'].if_match = self.response_headers['etag']
+
+
+class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method
+
+ def __init__(self, *args, **kwargs):
+ super(AppendBlobChunkUploader, self).__init__(*args, **kwargs)
+ self.current_length = None
+
+ def _upload_chunk(self, chunk_offset, chunk_data):
+ if self.current_length is None:
+ self.response_headers = self.service.append_block(
+ chunk_data,
+ content_length=len(chunk_data),
+ cls=return_response_headers,
+ data_stream_total=self.total_size,
+ upload_stream_current=self.progress_total,
+ **self.request_options
+ )
+ self.current_length = int(self.response_headers["blob_append_offset"])
+ else:
+ self.request_options['append_position_access_conditions'].append_position = \
+ self.current_length + chunk_offset
+ self.response_headers = self.service.append_block(
+ chunk_data,
+ content_length=len(chunk_data),
+ cls=return_response_headers,
+ data_stream_total=self.total_size,
+ upload_stream_current=self.progress_total,
+ **self.request_options
+ )
+
+
+class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method
+
+ def _upload_chunk(self, chunk_offset, chunk_data):
+ length = len(chunk_data)
+ chunk_end = chunk_offset + length - 1
+ response = self.service.upload_range(
+ chunk_data,
+ chunk_offset,
+ length,
+ data_stream_total=self.total_size,
+ upload_stream_current=self.progress_total,
+ **self.request_options
+ )
+ return 'bytes={0}-{1}'.format(chunk_offset, chunk_end), response
+
+
+class SubStream(IOBase):
+
+ def __init__(self, wrapped_stream, stream_begin_index, length, lockObj):
+ # Python 2.7: file-like objects created with open() typically support seek(), but are not
+ # derivations of io.IOBase and thus do not implement seekable().
+ # Python > 3.0: file-like objects created with open() are derived from io.IOBase.
+ try:
+ # only the main thread runs this, so there's no need grabbing the lock
+ wrapped_stream.seek(0, SEEK_CUR)
+ except:
+ raise ValueError("Wrapped stream must support seek().")
+
+ self._lock = lockObj
+ self._wrapped_stream = wrapped_stream
+ self._position = 0
+ self._stream_begin_index = stream_begin_index
+ self._length = length
+ self._buffer = BytesIO()
+
+ # we must avoid buffering more than necessary, and also not use up too much memory
+ # so the max buffer size is capped at 4MB
+ self._max_buffer_size = (
+ length if length < _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE else _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE
+ )
+ self._current_buffer_start = 0
+ self._current_buffer_size = 0
+ super(SubStream, self).__init__()
+
+ def __len__(self):
+ return self._length
+
+ def close(self):
+ if self._buffer:
+ self._buffer.close()
+ self._wrapped_stream = None
+ IOBase.close(self)
+
+ def fileno(self):
+ return self._wrapped_stream.fileno()
+
+ def flush(self):
+ pass
+
+ def read(self, size=None):
+ if self.closed: # pylint: disable=using-constant-test
+ raise ValueError("Stream is closed.")
+
+ if size is None:
+ size = self._length - self._position
+
+ # adjust if out of bounds
+ if size + self._position >= self._length:
+ size = self._length - self._position
+
+ # return fast
+ if size == 0 or self._buffer.closed:
+ return b""
+
+ # attempt first read from the read buffer and update position
+ read_buffer = self._buffer.read(size)
+ bytes_read = len(read_buffer)
+ bytes_remaining = size - bytes_read
+ self._position += bytes_read
+
+ # repopulate the read buffer from the underlying stream to fulfill the request
+ # ensure the seek and read operations are done atomically (only if a lock is provided)
+ if bytes_remaining > 0:
+ with self._buffer:
+ # either read in the max buffer size specified on the class
+ # or read in just enough data for the current block/sub stream
+ current_max_buffer_size = min(self._max_buffer_size, self._length - self._position)
+
+ # lock is only defined if max_concurrency > 1 (parallel uploads)
+ if self._lock:
+ with self._lock:
+ # reposition the underlying stream to match the start of the data to read
+ absolute_position = self._stream_begin_index + self._position
+ self._wrapped_stream.seek(absolute_position, SEEK_SET)
+ # If we can't seek to the right location, our read will be corrupted so fail fast.
+ if self._wrapped_stream.tell() != absolute_position:
+ raise IOError("Stream failed to seek to the desired location.")
+ buffer_from_stream = self._wrapped_stream.read(current_max_buffer_size)
+ else:
+ buffer_from_stream = self._wrapped_stream.read(current_max_buffer_size)
+
+ if buffer_from_stream:
+ # update the buffer with new data from the wrapped stream
+ # we need to note down the start position and size of the buffer, in case seek is performed later
+ self._buffer = BytesIO(buffer_from_stream)
+ self._current_buffer_start = self._position
+ self._current_buffer_size = len(buffer_from_stream)
+
+ # read the remaining bytes from the new buffer and update position
+ second_read_buffer = self._buffer.read(bytes_remaining)
+ read_buffer += second_read_buffer
+ self._position += len(second_read_buffer)
+
+ return read_buffer
+
+ def readable(self):
+ return True
+
+ def readinto(self, b):
+ raise UnsupportedOperation
+
+ def seek(self, offset, whence=0):
+ if whence is SEEK_SET:
+ start_index = 0
+ elif whence is SEEK_CUR:
+ start_index = self._position
+ elif whence is SEEK_END:
+ start_index = self._length
+ offset = -offset
+ else:
+ raise ValueError("Invalid argument for the 'whence' parameter.")
+
+ pos = start_index + offset
+
+ if pos > self._length:
+ pos = self._length
+ elif pos < 0:
+ pos = 0
+
+ # check if buffer is still valid
+ # if not, drop buffer
+ if pos < self._current_buffer_start or pos >= self._current_buffer_start + self._current_buffer_size:
+ self._buffer.close()
+ self._buffer = BytesIO()
+ else: # if yes seek to correct position
+ delta = pos - self._current_buffer_start
+ self._buffer.seek(delta, SEEK_SET)
+
+ self._position = pos
+ return pos
+
+ def seekable(self):
+ return True
+
+ def tell(self):
+ return self._position
+
+ def write(self):
+ raise UnsupportedOperation
+
+ def writelines(self):
+ raise UnsupportedOperation
+
+ def writeable(self):
+ return False
+
+
+class IterStreamer(object):
+ """
+ File-like streaming iterator.
+ """
+
+ def __init__(self, generator, encoding="UTF-8"):
+ self.generator = generator
+ self.iterator = iter(generator)
+ self.leftover = b""
+ self.encoding = encoding
+
+ def __len__(self):
+ return self.generator.__len__()
+
+ def __iter__(self):
+ return self.iterator
+
+ def seekable(self):
+ return False
+
+ def next(self):
+ return next(self.iterator)
+
+ def tell(self, *args, **kwargs):
+ raise UnsupportedOperation("Data generator does not support tell.")
+
+ def seek(self, *args, **kwargs):
+ raise UnsupportedOperation("Data generator is unseekable.")
+
+ def read(self, size):
+ data = self.leftover
+ count = len(self.leftover)
+ try:
+ while count < size:
+ chunk = self.next()
+ if isinstance(chunk, six.text_type):
+ chunk = chunk.encode(self.encoding)
+ data += chunk
+ count += len(chunk)
+ except StopIteration:
+ pass
+
+ if count > size:
+ self.leftover = data[size:]
+
+ return data[:size]
diff --git a/sdk/table/azure/azure_table/_shared/uploads_async.py b/sdk/table/azure/azure_table/_shared/uploads_async.py
new file mode 100644
index 000000000000..92fcab5ef5f0
--- /dev/null
+++ b/sdk/table/azure/azure_table/_shared/uploads_async.py
@@ -0,0 +1,350 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+# pylint: disable=no-self-use
+
+import asyncio
+from asyncio import Lock
+from itertools import islice
+import threading
+
+from math import ceil
+
+import six
+
+from . import encode_base64, url_quote
+from .request_handlers import get_length
+from .response_handlers import return_response_headers
+from .encryption import get_blob_encryptor_and_padder
+from .uploads import SubStream, IterStreamer # pylint: disable=unused-import
+
+
+_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024
+_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = '{0} should be a seekable file-like/io.IOBase type stream object.'
+
+
+async def _parallel_uploads(uploader, pending, running):
+ range_ids = []
+ while True:
+ # Wait for some download to finish before adding a new one
+ done, running = await asyncio.wait(running, return_when=asyncio.FIRST_COMPLETED)
+ range_ids.extend([chunk.result() for chunk in done])
+ try:
+ next_chunk = next(pending)
+ except StopIteration:
+ break
+ else:
+ running.add(asyncio.ensure_future(uploader(next_chunk)))
+
+ # Wait for the remaining uploads to finish
+ if running:
+ done, _running = await asyncio.wait(running)
+ range_ids.extend([chunk.result() for chunk in done])
+ return range_ids
+
+
+async def upload_data_chunks(
+ service=None,
+ uploader_class=None,
+ total_size=None,
+ chunk_size=None,
+ max_concurrency=None,
+ stream=None,
+ encryption_options=None,
+ **kwargs):
+
+ if encryption_options:
+ encryptor, padder = get_blob_encryptor_and_padder(
+ encryption_options.get('cek'),
+ encryption_options.get('vector'),
+ uploader_class is not PageBlobChunkUploader)
+ kwargs['encryptor'] = encryptor
+ kwargs['padder'] = padder
+
+ parallel = max_concurrency > 1
+ if parallel and 'modified_access_conditions' in kwargs:
+ # Access conditions do not work with parallelism
+ kwargs['modified_access_conditions'] = None
+
+ uploader = uploader_class(
+ service=service,
+ total_size=total_size,
+ chunk_size=chunk_size,
+ stream=stream,
+ parallel=parallel,
+ **kwargs)
+
+ if parallel:
+ upload_tasks = uploader.get_chunk_streams()
+ running_futures = [
+ asyncio.ensure_future(uploader.process_chunk(u))
+ for u in islice(upload_tasks, 0, max_concurrency)
+ ]
+ range_ids = await _parallel_uploads(uploader.process_chunk, upload_tasks, running_futures)
+ else:
+ range_ids = []
+ for chunk in uploader.get_chunk_streams():
+ range_ids.append(await uploader.process_chunk(chunk))
+
+ if any(range_ids):
+ return [r[1] for r in sorted(range_ids, key=lambda r: r[0])]
+ return uploader.response_headers
+
+
+async def upload_substream_blocks(
+ service=None,
+ uploader_class=None,
+ total_size=None,
+ chunk_size=None,
+ max_concurrency=None,
+ stream=None,
+ **kwargs):
+ parallel = max_concurrency > 1
+ if parallel and 'modified_access_conditions' in kwargs:
+ # Access conditions do not work with parallelism
+ kwargs['modified_access_conditions'] = None
+ uploader = uploader_class(
+ service=service,
+ total_size=total_size,
+ chunk_size=chunk_size,
+ stream=stream,
+ parallel=parallel,
+ **kwargs)
+
+ if parallel:
+ upload_tasks = uploader.get_substream_blocks()
+ running_futures = [
+ asyncio.ensure_future(uploader.process_substream_block(u))
+ for u in islice(upload_tasks, 0, max_concurrency)
+ ]
+ range_ids = await _parallel_uploads(uploader.process_substream_block, upload_tasks, running_futures)
+ else:
+ range_ids = []
+ for block in uploader.get_substream_blocks():
+ range_ids.append(await uploader.process_substream_block(block))
+ return sorted(range_ids)
+
+
+class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes
+
+ def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor=None, padder=None, **kwargs):
+ self.service = service
+ self.total_size = total_size
+ self.chunk_size = chunk_size
+ self.stream = stream
+ self.parallel = parallel
+
+ # Stream management
+ self.stream_start = stream.tell() if parallel else None
+ self.stream_lock = threading.Lock() if parallel else None
+
+ # Progress feedback
+ self.progress_total = 0
+ self.progress_lock = Lock() if parallel else None
+
+ # Encryption
+ self.encryptor = encryptor
+ self.padder = padder
+ self.response_headers = None
+ self.etag = None
+ self.last_modified = None
+ self.request_options = kwargs
+
+ def get_chunk_streams(self):
+ index = 0
+ while True:
+ data = b''
+ read_size = self.chunk_size
+
+ # Buffer until we either reach the end of the stream or get a whole chunk.
+ while True:
+ if self.total_size:
+ read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data)))
+ temp = self.stream.read(read_size)
+ if not isinstance(temp, six.binary_type):
+ raise TypeError('Blob data should be of type bytes.')
+ data += temp or b""
+
+ # We have read an empty string and so are at the end
+ # of the buffer or we have read a full chunk.
+ if temp == b'' or len(data) == self.chunk_size:
+ break
+
+ if len(data) == self.chunk_size:
+ if self.padder:
+ data = self.padder.update(data)
+ if self.encryptor:
+ data = self.encryptor.update(data)
+ yield index, data
+ else:
+ if self.padder:
+ data = self.padder.update(data) + self.padder.finalize()
+ if self.encryptor:
+ data = self.encryptor.update(data) + self.encryptor.finalize()
+ if data:
+ yield index, data
+ break
+ index += len(data)
+
+ async def process_chunk(self, chunk_data):
+ chunk_bytes = chunk_data[1]
+ chunk_offset = chunk_data[0]
+ return await self._upload_chunk_with_progress(chunk_offset, chunk_bytes)
+
+ async def _update_progress(self, length):
+ if self.progress_lock is not None:
+ async with self.progress_lock:
+ self.progress_total += length
+ else:
+ self.progress_total += length
+
+ async def _upload_chunk(self, chunk_offset, chunk_data):
+ raise NotImplementedError("Must be implemented by child class.")
+
+ async def _upload_chunk_with_progress(self, chunk_offset, chunk_data):
+ range_id = await self._upload_chunk(chunk_offset, chunk_data)
+ await self._update_progress(len(chunk_data))
+ return range_id
+
+ def get_substream_blocks(self):
+ assert self.chunk_size is not None
+ lock = self.stream_lock
+ blob_length = self.total_size
+
+ if blob_length is None:
+ blob_length = get_length(self.stream)
+ if blob_length is None:
+ raise ValueError("Unable to determine content length of upload data.")
+
+ blocks = int(ceil(blob_length / (self.chunk_size * 1.0)))
+ last_block_size = self.chunk_size if blob_length % self.chunk_size == 0 else blob_length % self.chunk_size
+
+ for i in range(blocks):
+ index = i * self.chunk_size
+ length = last_block_size if i == blocks - 1 else self.chunk_size
+ yield ('BlockId{}'.format("%05d" % i), SubStream(self.stream, index, length, lock))
+
+ async def process_substream_block(self, block_data):
+ return await self._upload_substream_block_with_progress(block_data[0], block_data[1])
+
+ async def _upload_substream_block(self, block_id, block_stream):
+ raise NotImplementedError("Must be implemented by child class.")
+
+ async def _upload_substream_block_with_progress(self, block_id, block_stream):
+ range_id = await self._upload_substream_block(block_id, block_stream)
+ await self._update_progress(len(block_stream))
+ return range_id
+
+ def set_response_properties(self, resp):
+ self.etag = resp.etag
+ self.last_modified = resp.last_modified
+
+
+class BlockBlobChunkUploader(_ChunkUploader):
+
+ def __init__(self, *args, **kwargs):
+ kwargs.pop('modified_access_conditions', None)
+ super(BlockBlobChunkUploader, self).__init__(*args, **kwargs)
+ self.current_length = None
+
+ async def _upload_chunk(self, chunk_offset, chunk_data):
+ # TODO: This is incorrect, but works with recording.
+ index = '{0:032d}'.format(chunk_offset)
+ block_id = encode_base64(url_quote(encode_base64(index)))
+ await self.service.stage_block(
+ block_id,
+ len(chunk_data),
+ chunk_data,
+ data_stream_total=self.total_size,
+ upload_stream_current=self.progress_total,
+ **self.request_options)
+ return index, block_id
+
+ async def _upload_substream_block(self, block_id, block_stream):
+ try:
+ await self.service.stage_block(
+ block_id,
+ len(block_stream),
+ block_stream,
+ data_stream_total=self.total_size,
+ upload_stream_current=self.progress_total,
+ **self.request_options)
+ finally:
+ block_stream.close()
+ return block_id
+
+
+class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method
+
+ def _is_chunk_empty(self, chunk_data):
+ # read until non-zero byte is encountered
+ # if reached the end without returning, then chunk_data is all 0's
+ for each_byte in chunk_data:
+ if each_byte not in [0, b'\x00']:
+ return False
+ return True
+
+ async def _upload_chunk(self, chunk_offset, chunk_data):
+ # avoid uploading the empty pages
+ if not self._is_chunk_empty(chunk_data):
+ chunk_end = chunk_offset + len(chunk_data) - 1
+ content_range = 'bytes={0}-{1}'.format(chunk_offset, chunk_end)
+ computed_md5 = None
+ self.response_headers = await self.service.upload_pages(
+ chunk_data,
+ content_length=len(chunk_data),
+ transactional_content_md5=computed_md5,
+ range=content_range,
+ cls=return_response_headers,
+ data_stream_total=self.total_size,
+ upload_stream_current=self.progress_total,
+ **self.request_options)
+
+ if not self.parallel and self.request_options.get('modified_access_conditions'):
+ self.request_options['modified_access_conditions'].if_match = self.response_headers['etag']
+
+
+class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method
+
+ def __init__(self, *args, **kwargs):
+ super(AppendBlobChunkUploader, self).__init__(*args, **kwargs)
+ self.current_length = None
+
+ async def _upload_chunk(self, chunk_offset, chunk_data):
+ if self.current_length is None:
+ self.response_headers = await self.service.append_block(
+ chunk_data,
+ content_length=len(chunk_data),
+ cls=return_response_headers,
+ data_stream_total=self.total_size,
+ upload_stream_current=self.progress_total,
+ **self.request_options)
+ self.current_length = int(self.response_headers['blob_append_offset'])
+ else:
+ self.request_options['append_position_access_conditions'].append_position = \
+ self.current_length + chunk_offset
+ self.response_headers = await self.service.append_block(
+ chunk_data,
+ content_length=len(chunk_data),
+ cls=return_response_headers,
+ data_stream_total=self.total_size,
+ upload_stream_current=self.progress_total,
+ **self.request_options)
+
+
+class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method
+
+ async def _upload_chunk(self, chunk_offset, chunk_data):
+ chunk_end = chunk_offset + len(chunk_data) - 1
+ response = await self.service.upload_range(
+ chunk_data,
+ chunk_offset,
+ chunk_end,
+ data_stream_total=self.total_size,
+ upload_stream_current=self.progress_total,
+ **self.request_options
+ )
+ range_id = 'bytes={0}-{1}'.format(chunk_offset, chunk_end)
+ return range_id, response
diff --git a/sdk/table/azure/azure_table/_table_client.py b/sdk/table/azure/azure_table/_table_client.py
new file mode 100644
index 000000000000..a9e6c9bfea36
--- /dev/null
+++ b/sdk/table/azure/azure_table/_table_client.py
@@ -0,0 +1,181 @@
+from urllib.parse import urlparse, quote
+
+import six
+from azure.azure_table._deserialize import deserialize_table_creation
+from azure.azure_table._generated import AzureTable
+from azure.azure_table._generated.models import TableProperties, AccessPolicy, SignedIdentifier
+from azure.azure_table._message_encoding import NoEncodePolicy, NoDecodePolicy
+from azure.azure_table._shared.base_client import StorageAccountHostsMixin, parse_query, parse_connection_str
+from azure.azure_table._shared.request_handlers import add_metadata_headers, serialize_iso
+from azure.azure_table._shared.response_handlers import process_storage_error
+from azure.azure_table._version import VERSION
+from azure.core.exceptions import HttpResponseError
+from azure.core.tracing.decorator import distributed_trace
+
+
+class TableClient(StorageAccountHostsMixin):
+ def __init__(
+ self, account_url, # type: str
+ table_name, # type: str
+ credential=None, # type: Optional[Any]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ try:
+ if not account_url.lower().startswith('http'):
+ account_url = "https://" + account_url
+ except AttributeError:
+ raise ValueError("Account URL must be a string.")
+ parsed_url = urlparse(account_url.rstrip('/'))
+ if not table_name:
+ raise ValueError("Please specify a queue name.")
+ if not parsed_url.netloc:
+ raise ValueError("Invalid URL: {}".format(parsed_url))
+
+ _, sas_token = parse_query(parsed_url.query)
+ if not sas_token and not credential:
+ raise ValueError("You need to provide either a SAS token or an account shared key to authenticate.")
+
+ self.table_name = table_name
+ self._query_str, credential = self._format_query_string(sas_token, credential)
+ super(TableClient, self).__init__(parsed_url, service='table', credential=credential, **kwargs)
+
+ self._config.message_encode_policy = kwargs.get('message_encode_policy', None) or NoEncodePolicy()
+ self._config.message_decode_policy = kwargs.get('message_decode_policy', None) or NoDecodePolicy()
+ self._client = AzureTable(self.url, pipeline=self._pipeline)
+ self._client._config.version = kwargs.get('api_version', VERSION) # pylint: disable=protected-access
+
+ def _format_url(self, hostname):
+ """Format the endpoint URL according to the current location
+ mode hostname.
+ """
+ table_name = self.table_name
+ if isinstance(table_name, six.text_type):
+ table_name = table_name.encode('UTF-8')
+ return "{}://{}/{}{}".format(
+ self.scheme,
+ hostname,
+ quote(table_name),
+ self._query_str)
+
+ @classmethod
+ def from_connection_string(
+ cls, conn_str, # type: str
+ table_name, # type: str
+ credential=None, # type: Any
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Create QueueClient from a Connection String.
+
+ :param str conn_str:
+ A connection string to an Azure Storage account.
+ :param table_name: The queue name.
+ :type table_name: str
+ :param credential:
+ The credentials with which to authenticate. This is optional if the
+ account URL already has a SAS token, or the connection string already has shared
+ access key values. The value can be a SAS token string, an account shared access
+ key, or an instance of a TokenCredentials class from azure.identity.
+ :returns: A queue client.
+ :rtype: ~azure.storage.queue.QueueClient
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/queue_samples_message.py
+ :start-after: [START create_queue_client_from_connection_string]
+ :end-before: [END create_queue_client_from_connection_string]
+ :language: python
+ :dedent: 8
+ :caption: Create the queue client from connection string.
+ """
+ account_url, secondary, credential = parse_connection_str(
+ conn_str, credential, 'queue')
+ if 'secondary_hostname' not in kwargs:
+ kwargs['secondary_hostname'] = secondary
+ return cls(account_url, table_name=table_name, credential=credential, **kwargs) # type: ignore
+
+ @distributed_trace
+ def create_queue(self, table_name, **kwargs):
+ # type: (Optional[Any]) -> None
+ table_properties = TableProperties(table_name=table_name)
+ request_id_parameter = kwargs.pop('request_id_parameter', None)
+ response_preference = kwargs.pop('response_preference', None)
+ query_option = kwargs.pop('query_options', None)
+ try:
+ return self._client.table.create( # type: ignore
+ table_properties=table_properties,
+ request_id_parameter=request_id_parameter,
+ response_preference=response_preference,
+ query_options=query_option,
+ **kwargs)
+ except HttpResponseError as error:
+ process_storage_error(error)
+
+ @distributed_trace
+ def delete_queue(self, table_name, **kwargs):
+ # type: (Optional[Any]) -> None
+ request_id_parameter = kwargs.pop('request_id_parameter', None)
+ try:
+ self._client.table.delete(
+ table_name=table_name,
+ request_id_parameter=request_id_parameter,
+ **kwargs)
+ except HttpResponseError as error:
+ process_storage_error(error)
+
+ @distributed_trace
+ def get_table_properties(self, **kwargs):
+ # type: (Optional[Any]) -> TableProperties
+ timeout = kwargs.pop('timeout', None)
+ request_id_parameter = kwargs.pop('request_id_parameter', None)
+ try:
+ response = self._client.table.get_properties(
+ timeout=timeout,
+ request_id_parameter=request_id_parameter,
+ **kwargs)
+ except HttpResponseError as error:
+ process_storage_error(error)
+ response.name = self.table_name
+ return response # type: ignore
+
+ @distributed_trace
+ def get_table_access_policy(self, table, **kwargs):
+ # type: (Optional[Any]) -> Dict[str, Any]
+ timeout = kwargs.pop('timeout', None)
+ request_id_parameter = kwargs.pop('request_id_parameter', None)
+ try:
+ _, identifiers = self._client.table.get_access_policy(
+ table=table,
+ timeout=timeout,
+ request_id_parameter=request_id_parameter,
+ **kwargs)
+ except HttpResponseError as error:
+ process_storage_error(error)
+ return {s.id: s.access_policy or AccessPolicy() for s in identifiers}
+
+ @distributed_trace
+ def set_table_access_policy(self, table, signed_identifiers, **kwargs):
+ # type: (Dict[str, AccessPolicy], Optional[Any]) -> None
+ timeout = kwargs.pop('timeout', None)
+ request_id_parameter = kwargs.pop('request_id_parameter', None)
+ if len(signed_identifiers) > 15:
+ raise ValueError(
+ 'Too many access policies provided. The server does not support setting '
+ 'more than 15 access policies on a single resource.')
+ identifiers = []
+ for key, value in signed_identifiers.items():
+ if value:
+ value.start = serialize_iso(value.start)
+ value.expiry = serialize_iso(value.expiry)
+ identifiers.append(SignedIdentifier(id=key, access_policy=value))
+ signed_identifiers = identifiers # type: ignore
+ try:
+ self._client.table.set_access_policy(
+ table,
+ timeout=timeout,
+ request_id_parameter=request_id_parameter,
+ table_acl=signed_identifiers or None,
+ **kwargs)
+ except HttpResponseError as error:
+ process_storage_error(error)
diff --git a/sdk/table/azure/azure_table/_table_service_client.py b/sdk/table/azure/azure_table/_table_service_client.py
new file mode 100644
index 000000000000..339ed716502d
--- /dev/null
+++ b/sdk/table/azure/azure_table/_table_service_client.py
@@ -0,0 +1,345 @@
+import functools
+from urllib.parse import urlparse
+
+from azure.azure_table._generated import AzureTable
+from azure.azure_table._generated.models import TableProperties, TableServiceStats, TableServiceProperties, \
+ AccessPolicy, SignedIdentifier
+from azure.azure_table._models import TablePropertiesPaged, service_stats_deserialize, service_properties_deserialize
+from azure.azure_table._shared.base_client import StorageAccountHostsMixin, parse_connection_str, parse_query, \
+ TransportWrapper
+from azure.azure_table._shared.models import LocationMode
+from azure.azure_table._shared.request_handlers import serialize_iso
+from azure.azure_table._shared.response_handlers import process_storage_error, return_headers_and_deserialized
+from azure.azure_table._version import VERSION
+from azure.core.exceptions import HttpResponseError
+from azure.core.paging import ItemPaged
+from azure.core.tracing.decorator import distributed_trace
+from azure.azure_table._table_client import TableClient
+from msrest.pipeline import Pipeline
+
+
+class TableServiceClient(StorageAccountHostsMixin):
+ def __init__(
+ self, account_url, # type: str
+ credential=None, # type: Optional[Any]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ try:
+ if not account_url.lower().startswith('http'):
+ account_url = "https://" + account_url
+ except AttributeError:
+ raise ValueError("Account URL must be a string.")
+ parsed_url = urlparse(account_url.rstrip('/'))
+ if not parsed_url.netloc:
+ raise ValueError("Invalid URL: {}".format(account_url))
+
+ _, sas_token = parse_query(parsed_url.query)
+ if not sas_token and not credential:
+ raise ValueError("You need to provide either a SAS token or an account shared key to authenticate.")
+ self._query_str, credential = self._format_query_string(sas_token, credential)
+ super(TableServiceClient, self).__init__(parsed_url, service='table', credential=credential, **kwargs)
+ self._client = AzureTable(self.url, pipeline=self._pipeline)
+ self._client._config.version = kwargs.get('api_version', VERSION) # pylint: disable=protected-access
+
+ def _format_url(self, hostname):
+ """Format the endpoint URL according to the current location
+ mode hostname.
+ """
+ return "{}://{}/{}".format(self.scheme, hostname, self._query_str)
+
+ @classmethod
+ def from_connection_string(
+ cls, conn_str, # type: str
+ credential=None, # type: Optional[Any]
+ **kwargs # type: Any
+ ): # type: (...) -> TableServiceClient
+ """Create TableServiceClient from a Connection String.
+
+ :param str conn_str:
+ A connection string to an Azure Storage account.
+ :param credential:
+ The credentials with which to authenticate. This is optional if the
+ account URL already has a SAS token, or the connection string already has shared
+ access key values. The value can be a SAS token string, an account shared access
+ key, or an instance of a TokenCredentials class from azure.identity.
+ :returns: A Table service client.
+ :rtype: ~azure.storage.table.TableClient
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/queue_samples_authentication.py
+ :start-after: [START auth_from_connection_string]
+ :end-before: [END auth_from_connection_string]
+ :language: python
+ :dedent: 8
+ :caption: Creating the TableServiceClient with a connection string.
+ """
+ account_url, secondary, credential = parse_connection_str(
+ conn_str, credential, 'table')
+ if 'secondary_hostname' not in kwargs:
+ kwargs['secondary_hostname'] = secondary
+ return cls(account_url, credential=credential, **kwargs)
+
+ @distributed_trace
+ def get_table_access_policy(
+ self,
+ table_name,
+ **kwargs
+ ):
+ timeout = kwargs.pop('timeout', None)
+ try:
+ _, identifiers = self._client.table.get_access_policy(
+ table=table_name,
+ timeout=timeout,
+ cls=return_headers_and_deserialized,
+ **kwargs)
+ except HttpResponseError as error:
+ process_storage_error(error)
+ return {s.id: s.access_policy or AccessPolicy() for s in identifiers}
+
+ @distributed_trace
+ def set_table_access_policy(self, table_name, signed_identifiers, **kwargs):
+ # type: (Dict[str, AccessPolicy], Optional[Any]) -> None
+ if len(signed_identifiers) > 5:
+ raise ValueError(
+ 'Too many access policies provided. The server does not support setting '
+ 'more than 5 access policies on a single resource.')
+ identifiers = []
+ for key, value in signed_identifiers.items():
+ if value:
+ value.start = serialize_iso(value.start)
+ value.expiry = serialize_iso(value.expiry)
+ identifiers.append(SignedIdentifier(id=key, access_policy=value))
+ try:
+ self._client.table.set_access_policy(
+ table=table_name,
+ table_acl=identifiers or None,
+ **kwargs)
+ except HttpResponseError as error:
+ process_storage_error(error)
+
+ @distributed_trace
+ def get_service_stats(self, **kwargs):
+ # type: (Optional[Any]) -> Dict[str, Any]
+ try:
+ # failing on get_statistics
+ stats = self._client.service.get_statistics(**kwargs)
+ return service_stats_deserialize(stats)
+ except HttpResponseError as error:
+ process_storage_error(error)
+
+ @distributed_trace
+ def get_service_properties(self, **kwargs):
+ # type: (Optional[Any]) -> Dict[str, Any]
+ timeout = kwargs.pop('timeout', None)
+ try:
+ service_props = self._client.service.get_properties(timeout=timeout, **kwargs) # type: ignore
+ return service_properties_deserialize(service_props)
+ except HttpResponseError as error:
+ process_storage_error(error)
+
+ @distributed_trace
+ def set_service_properties( # type: ignore
+ self, analytics_logging=None, # type: Optional[TableAnalyticsLogging]
+ hour_metrics=None, # type: Optional[Metrics]
+ minute_metrics=None, # type: Optional[Metrics]
+ cors=None, # type: Optional[List[CorsRule]]
+ **kwargs
+ ):
+ # type: (...) -> None
+
+ timeout = kwargs.pop('timeout', None)
+ props = TableServiceProperties(
+ logging=analytics_logging,
+ hour_metrics=hour_metrics,
+ minute_metrics=minute_metrics,
+ cors=cors
+ )
+ try:
+ return self._client.service.set_properties(props, timeout=timeout, **kwargs) # type: ignore
+ except HttpResponseError as error:
+ process_storage_error(error)
+
+ @distributed_trace
+ def create_table(
+ self,
+ table_name
+ ):
+ table_properties = TableProperties(table_name=table_name)
+ response = self._client.table.create(table_properties)
+ return response
+ # table = self.get_table_client(table=table_name)
+
+ @distributed_trace
+ def delete_table(
+ self,
+ table_name,
+ request_id_parameter=None
+ ):
+ response = self._client.table.delete(table=table_name, request_id_parameter=request_id_parameter)
+ return response
+ # table = self.get_table_client(table=table_name)
+ # table.delete_queue(table_name)
+
+ @distributed_trace
+ def list_tables(
+ self,
+ request_id_parameter=None,
+ next_table_name=None, # type: Optional[str]
+ query_options=None, # type: Optional[QueryOptions]
+ **kwargs
+ ):
+ command = functools.partial(
+ self._client.table.query,
+ **kwargs)
+ return ItemPaged(
+ command, results_per_page=query_options,
+ page_iterator_class=TablePropertiesPaged
+ )
+
+ @distributed_trace
+ def query_tables(
+ self,
+ request_id_parameter=None,
+ next_table_name=None,
+ query_options=None,
+ **kwargs
+ ):
+ command = functools.partial(self._client.table.query,
+ **kwargs)
+ return ItemPaged(
+ command, results_per_page=query_options,
+ page_iterator_class=TablePropertiesPaged
+ )
+
+ def upsert_item(self,
+ table_name,
+ timeout=None,
+ request_id_parameter=None,
+ if_match=None,
+ table_entity_properties=None,
+ query_options=None
+ ):
+ response = self._client.table.insert_entity(table=table_name, table_entity_properties=table_entity_properties)
+ return response
+
+ @distributed_trace
+ def query_table_entities(
+ self,
+ table_name,
+ timeout=None,
+ request_id_parameter=None,
+ next_partition_key=None,
+ next_row_key=None,
+ query_options=None
+ ):
+ response = self._client.table.query_entities(table_name=table_name)
+
+ @distributed_trace
+ def query_table_entities_with_partition_and_row_key(
+ self,
+ table_name,
+ partition_key,
+ row_key,
+ timeout=None,
+ request_id_parameter=None,
+ query_options=None
+ ):
+ response = self._client.table.query_entities_with_partition_and_row_key(table_name=table_name)
+
+ @distributed_trace
+ def update_entity(
+ self,
+ table_name,
+ partition_key,
+ row_key,
+ timeout=None,
+ request_id_parameter=None,
+ if_match=None,
+ table_entity_properties=None,
+ query_options=None
+ ):
+ response = self._client.table.update_entity()
+
+ @distributed_trace
+ def merge_entity(
+ self,
+ table_name,
+ partition_key,
+ row_key,
+ timeout=None,
+ request_id_parameter=None,
+ if_match=None,
+ table_entity_properties=None,
+ query_options=None
+ ):
+ response = self._client.table.merge_entity()
+
+ @distributed_trace
+ def delete_entity(
+ self,
+ table_name,
+ partition_key,
+ row_key,
+ if_match,
+ timeout=None,
+ request_id_parameter=None,
+ query_options=None
+ ):
+ response = self._client.table.delete_entity()
+
+ @distributed_trace
+ def insert_entity(
+ self,
+ table_name,
+ timeout=None,
+ request_id_parameter=None,
+ if_match=None,
+ table_entity_properties=None,
+ query_options=None
+ ):
+ response = self._client.table.insert_entity()
+
+ def get_access_policy(
+ self,
+ table_name,
+ timeout=None,
+ request_id_parameter=None
+ ):
+ response = self._client.table.get_access_policy(table=table_name)
+
+ def set_access_policy(
+ self,
+ table_name,
+ timeout=None,
+ request_id_parameter=None,
+ table_acl=None
+ ):
+ response = self._client.table.set_access_policy()
+
+ def batch(
+ self,
+ *reqs
+ ):
+ response = self.batch(*reqs)
+ return response
+
+ def get_table_client(self, table, **kwargs):
+ # type: (Union[TableProperties, str], Optional[Any]) -> TableClient
+
+ try:
+ table_name = table.name
+ except AttributeError:
+ table_name = table
+
+ _pipeline = Pipeline(
+ transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access
+ policies=self._pipeline._impl_policies # pylint: disable = protected-access
+ )
+
+ return TableClient(
+ self.url, table_name=table_name, credential=self.credential,
+ key_resolver_function=self.key_resolver_function, require_encryption=self.require_encryption,
+ key_encryption_key=self.key_encryption_key, api_version=self.api_version, _pipeline=_pipeline,
+ _configuration=self._config, _location_mode=self._location_mode, _hosts=self._hosts, **kwargs)
diff --git a/sdk/table/azure/azure_table/_version.py b/sdk/table/azure/azure_table/_version.py
new file mode 100644
index 000000000000..8528164c46da
--- /dev/null
+++ b/sdk/table/azure/azure_table/_version.py
@@ -0,0 +1 @@
+VERSION = "2019-07-07"
\ No newline at end of file
diff --git a/sdk/table/samples/create_batch.py b/sdk/table/samples/create_batch.py
new file mode 100644
index 000000000000..0c88b6fbbc0b
--- /dev/null
+++ b/sdk/table/samples/create_batch.py
@@ -0,0 +1,13 @@
+class CreateBatch(object):
+ connection_string = "DefaultEndpointsProtocol=https;AccountName=example;AccountKey=fasgfbhBDFAShjDQ4jkvbnaBFHJOWS6gkjngdakeKFNLK==;EndpointSuffix=core.windows.net"
+ table_name = "NAME"
+ account_url = "https://example.table.core.windows.net/"
+ account_name = "example"
+ access_key = "fasgfbhBDFAShjDQ4jkvbnaBFHJOWS6gkjngdakeKFNLK=="
+
+ # demonstrate building a batch of operations and commit that batch
+
+ def build_batch_operations(self):
+ from azure.azure_table import TableServiceClient
+ table_client = TableServiceClient(account_url=self.account_url, credential=self.access_key)
+ batch_operations = table_client.batch(*self.reqs)
diff --git a/sdk/table/samples/create_query_entities.py b/sdk/table/samples/create_query_entities.py
new file mode 100644
index 000000000000..041cb84887ad
--- /dev/null
+++ b/sdk/table/samples/create_query_entities.py
@@ -0,0 +1,36 @@
+from azure.azure_table._generated.models import QueryOptions
+
+
+class CreateODataQuery(object):
+ connection_string = "DefaultEndpointsProtocol=https;AccountName=example;AccountKey" \
+ "=fasgfbhBDFAShjDQ4jkvbnaBFHJOWS6gkjngdakeKFNLK==;EndpointSuffix=core.windows.net "
+ account_url = "https://example.table.core.windows.net/"
+ account_name = "example"
+ access_key = "fasgfbhBDFAShjDQ4jkvbnaBFHJOWS6gkjngdakeKFNLK=="
+
+ # Assuming there is a created table
+ partition_key = "1"
+ row_key = "1"
+ # Creating query filter for that table
+ table_name = "Office Supplies"
+ entity_name = "marker"
+ name_filter = "EntityName eq '{}'".format(entity_name)
+ # change select filter
+ query_options = QueryOptions(filter=name_filter)
+
+ def create_query_entities(self):
+
+ from azure.azure_table import TableClient
+ from azure.core.exceptions import HttpResponseError
+
+ table_client = TableClient(account_url=self.account_url, credential=self.access_key)
+ try:
+ queried_entities = table_client.query_entity(query_options=self.query_options)
+
+ # queried_entities type is ItemPaged
+ for entity_chosen in queried_entities:
+ # create a list of the entities and iterate through them to print each one out
+ # calls to the service to get more entities are made without user knowledge
+ print(entity_chosen)
+ except HttpResponseError as e:
+ print(e.message)
diff --git a/sdk/table/samples/creation_deletion_of_table.py b/sdk/table/samples/creation_deletion_of_table.py
new file mode 100644
index 000000000000..8adaf88d265e
--- /dev/null
+++ b/sdk/table/samples/creation_deletion_of_table.py
@@ -0,0 +1,45 @@
+class CreateDeleteTable(object):
+ connection_string = "DefaultEndpointsProtocol=https;AccountName=example;AccountKey=fasgfbhBDFAShjDQ4jkvbnaBFHJOWS6gkjngdakeKFNLK==;EndpointSuffix=core.windows.net"
+ table_name = "OfficeSupplies"
+ account_url = "https://example.table.core.windows.net/"
+ account_name = "example"
+ access_key = "fasgfbhBDFAShjDQ4jkvbnaBFHJOWS6gkjngdakeKFNLK=="
+
+# Table Service Client the rest are table level so no table name
+ def shared_key_credential(self):
+ from azure.azure_table import TableServiceClient
+
+ table_service = TableServiceClient(account_url=self.account_url, credential=self.access_key)
+
+ def create_table(self):
+ from azure.azure_table import TableServiceClient
+ from azure.core.exceptions import HttpResponseError, ResourceExistsError
+
+ table_service_client = TableServiceClient(account_url=self.account_url, credential=self.access_key)
+
+ try:
+ table_created = table_service_client.create_table(table_name=self.table_name)
+ print(table_created.table_name)
+ except HttpResponseError:
+ print(HttpResponseError.response)
+ except ResourceExistsError:
+ raise ResourceExistsError
+
+ def delete_table(self):
+ from azure.azure_table import TableServiceClient
+ from azure.core.exceptions import HttpResponseError, ResourceNotFoundError
+
+ table_service_client = TableServiceClient(account_url=self.account_url, credential=self.access_key)
+ try:
+ table_deleted = table_service_client.delete_table(table_name=self.table_name)
+ # table_deleted type is None
+ except HttpResponseError:
+ print(HttpResponseError.response)
+ except ResourceNotFoundError:
+ raise ResourceNotFoundError
+
+
+if __name__ == '__main__':
+ sample = CreateDeleteTable()
+ sample.create_table()
+ # sample.delete_table()
diff --git a/sdk/table/samples/inserting_deleting_entities.py b/sdk/table/samples/inserting_deleting_entities.py
new file mode 100644
index 000000000000..89d819f18cca
--- /dev/null
+++ b/sdk/table/samples/inserting_deleting_entities.py
@@ -0,0 +1,45 @@
+class InsertDeleteEntity(object):
+ connection_string = "DefaultEndpointsProtocol=https;AccountName=example;AccountKey=fasgfbhBDFAShjDQ4jkvbnaBFHJOWS6gkjngdakeKFNLK==;EndpointSuffix=core.windows.net"
+ table_name = "NAME"
+ account_url = "https://example.table.core.windows.net/"
+ account_name = "example"
+ access_key = "fasgfbhBDFAShjDQ4jkvbnaBFHJOWS6gkjngdakeKFNLK=="
+
+ # Assuming there is a created table
+ partition_key = "Crayola Marker"
+ row_key = "Marker"
+ entity = {
+ 'product': 'Marker', 'color': 'Purple', 'price': '$5'}
+
+
+ def insert_entity(self):
+
+ from azure.azure_table import TableClient
+ from azure.core.exceptions import HttpResponseError, ResourceExistsError
+
+ table_client = TableClient(account_url=self.account_url, credential=self.access_key)
+ try:
+ inserted_entity = table_client.insert_entity(partition_key=self.partition_key,
+ row_key=self.row_key,
+ table_entity_properties=self.entity)
+ # inserted_entity type is dict[str,object]
+ print(inserted_entity.items()) # print out key-value pair of entity
+ except HttpResponseError:
+ print(HttpResponseError.response)
+ except ResourceExistsError:
+ raise ResourceExistsError
+
+ def delete_entity(self):
+
+ from azure.azure_table import TableClient
+ from azure.core.exceptions import HttpResponseError, ResourceNotFoundError
+
+ table_client = TableClient(account_url=self.account_url, credential=self.access_key)
+ try:
+ table_client.delete_entity(partition_key=self.partition_key,
+ row_key=self.row_key)
+ # deleted_entity type is None
+ except HttpResponseError:
+ print(HttpResponseError.response)
+ except ResourceNotFoundError:
+ raise ResourceNotFoundError
diff --git a/sdk/table/samples/querying_table.py b/sdk/table/samples/querying_table.py
new file mode 100644
index 000000000000..43c7ab6876af
--- /dev/null
+++ b/sdk/table/samples/querying_table.py
@@ -0,0 +1,36 @@
+from azure.azure_table._generated.models import QueryOptions
+
+
+class QueryTable(object):
+ connection_string = "DefaultEndpointsProtocol=https;AccountName=example;AccountKey=fasgfbhBDFAShjDQ4jkvbnaBFHJOWS6gkjngdakeKFNLK==;EndpointSuffix=core.windows.net"
+ table_name = "NAME"
+ account_url = "https://example.table.core.windows.net/"
+ account_name = "example"
+ access_key = "fasgfbhBDFAShjDQ4jkvbnaBFHJOWS6gkjngdakeKFNLK=="
+
+ # Creating query filter for that table
+ table_name = "Office Supplies"
+ name_filter = "TableName eq '{}'".format(table_name)
+ query_options = QueryOptions(filter=name_filter)
+
+ def list_tables(self):
+ from azure.azure_table import TableClient
+
+ table_client = TableClient(account_url=self.account_url, credential=self.access_key)
+ tables = table_client.list_tables()
+ # table_client.list_tables() returns an itemPaged
+ # tables is a list of tables
+
+ for table in tables:
+ print(table.table_name)
+
+ def query_tables(self):
+ from azure.azure_table import TableServiceClient
+
+ table_service_client = TableServiceClient(account_url=self.account_url, credential=self.access_key)
+ queried_tables = table_service_client.query_tables(query_options=self.name_filter)
+ # table_client.query_tables() returns an itemPaged
+ # queried_tables is a list of filtered tables
+
+ for table in queried_tables:
+ print(table.table_name)
diff --git a/sdk/table/samples/update_entity.py b/sdk/table/samples/update_entity.py
new file mode 100644
index 000000000000..cc3c5dd2a1c2
--- /dev/null
+++ b/sdk/table/samples/update_entity.py
@@ -0,0 +1,38 @@
+class UpdateEntity(object):
+ connection_string = "DefaultEndpointsProtocol=https;AccountName=example;AccountKey" \
+ "=fasgfbhBDFAShjDQ4jkvbnaBFHJOWS6gkjngdakeKFNLK==;EndpointSuffix=core.windows.net "
+ table_name = "OfficeSupplies"
+ account_url = "https://example.table.core.windows.net/"
+ account_name = "example"
+ access_key = "fasgfbhBDFAShjDQ4jkvbnaBFHJOWS6gkjngdakeKFNLK=="
+
+ partition_key = "1"
+ row_key = "1"
+ # making keys not able to change - SEPARATE
+ entity = {
+ 'text': 'Marker',
+ 'color': 'Purple',
+ 'price': '5'
+ }
+
+ def update_entity(self):
+ from azure.azure_table import TableClient
+ from azure.core.exceptions import HttpResponseError
+
+ table_client = TableClient(account_url=self.account_url, credential=self.access_key)
+ try:
+ table_client.update_entity(partition_key=self.partition_key, row_key=self.row_key,
+ table_entity_properties=self.entity)
+ except HttpResponseError as e:
+ print(e.response)
+
+ def upsert_entity(self):
+ from azure.azure_table import TableClient
+ from azure.core.exceptions import HttpResponseError
+
+ table_client = TableClient(account_url=self.account_url, credential=self.access_key)
+ try:
+ table_client.upsert_entity(partition_key=self.partition_key, row_key=self.row_key,
+ table_entity_properties=self.entity)
+ except HttpResponseError as e:
+ print(e.response)
diff --git a/sdk/table/setup.py b/sdk/table/setup.py
new file mode 100644
index 000000000000..979b09337a23
--- /dev/null
+++ b/sdk/table/setup.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+
+#-------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#--------------------------------------------------------------------------
+
+import re
+import os.path
+from io import open
+from setuptools import find_packages, setup # type: ignore
+
+# Change the PACKAGE_NAME only to change folder and different name
+PACKAGE_NAME = "azure-table"
+PACKAGE_PPRINT_NAME = "Azure Table"
+
+# a-b-c => a/b/c
+package_folder_path = PACKAGE_NAME.replace('-', '/')
+# a-b-c => a.b.c
+namespace_name = PACKAGE_NAME.replace('-', '.')
+
+# azure v0.x is not compatible with this package
+# azure v0.x used to have a __version__ attribute (newer versions don't)
+try:
+ import azure
+ try:
+ ver = azure.__version__ # type: ignore
+ raise Exception(
+ 'This package is incompatible with azure=={}. '.format(ver) +
+ 'Uninstall it with "pip uninstall azure".'
+ )
+ except AttributeError:
+ pass
+except ImportError:
+ pass
+
+# Version extraction inspired from 'requests'
+with open(os.path.join(package_folder_path, '_version.py'), 'r') as fd:
+ version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', # type: ignore
+ fd.read(), re.MULTILINE).group(1)
+
+if not version:
+ raise RuntimeError('Cannot find version information')
+
+setup(
+ name=PACKAGE_NAME,
+ version=version,
+ description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
+ long_description='\n\n',
+ long_description_content_type='text/markdown',
+ license='MIT License',
+ author='Microsoft Corporation',
+ author_email='ascl@microsoft.com',
+ url='https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/storage/azure-storage-queue',
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'License :: OSI Approved :: MIT License',
+ ],
+ zip_safe=False,
+ packages=find_packages(exclude=[
+ # Exclude packages that will be covered by PEP420 or nspkg
+ 'azure',
+ 'azure.storage',
+ 'tests',
+ 'tests.queue',
+ 'tests.common'
+ ]),
+ install_requires=[
+ "azure-core<2.0.0,>=1.2.2",
+ "msrest>=0.6.10",
+ "cryptography>=2.1.4"
+ ],
+ extras_require={
+ ":python_version<'3.0'": ['futures', 'azure-storage-nspkg<4.0.0,>=3.0.0'],
+ ":python_version<'3.4'": ['enum34>=1.0.4'],
+ ":python_version<'3.5'": ["typing"]
+ },
+)
diff --git a/sdk/table/tests/_shared/__init__.py b/sdk/table/tests/_shared/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/sdk/table/tests/_shared/asynctestcase.py b/sdk/table/tests/_shared/asynctestcase.py
new file mode 100644
index 000000000000..b23fb1974198
--- /dev/null
+++ b/sdk/table/tests/_shared/asynctestcase.py
@@ -0,0 +1,56 @@
+
+# coding: utf-8
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+import asyncio
+import functools
+
+from azure_devtools.scenario_tests.utilities import trim_kwargs_from_test_function
+
+from azure.core.credentials import AccessToken
+
+from .testcase import TableTestCase
+
+LOGGING_FORMAT = '%(asctime)s %(name)-20s %(levelname)-5s %(message)s'
+
+class AsyncFakeTokenCredential(object):
+ """Protocol for classes able to provide OAuth tokens.
+ :param str scopes: Lets you specify the type of access needed.
+ """
+ def __init__(self):
+ self.token = AccessToken("YOU SHALL NOT PASS", 0)
+
+ async def get_token(self, *args):
+ return self.token
+
+
+class AsyncTableTestCase(TableTestCase):
+ @staticmethod
+ def await_prepared_test(test_fn):
+ """Synchronous wrapper for async test methods. Used to avoid making changes
+ upstream to AbstractPreparer (which doesn't await the functions it wraps)
+ """
+
+ @functools.wraps(test_fn)
+ def run(test_class_instance, *args, **kwargs):
+ trim_kwargs_from_test_function(test_fn, kwargs)
+ loop = asyncio.get_event_loop()
+ return loop.run_until_complete(test_fn(test_class_instance, **kwargs))
+
+ return run
+
+ def generate_oauth_token(self):
+ if self.is_live:
+ from azure.identity.aio import ClientSecretCredential
+ return ClientSecretCredential(
+ self.get_settings_value("TENANT_ID"),
+ self.get_settings_value("CLIENT_ID"),
+ self.get_settings_value("CLIENT_SECRET"),
+ )
+ return self.generate_fake_token()
+
+ def generate_fake_token(self):
+ return AsyncFakeTokenCredential()
diff --git a/sdk/table/tests/_shared/testcase.py b/sdk/table/tests/_shared/testcase.py
new file mode 100644
index 000000000000..bff1dea6c766
--- /dev/null
+++ b/sdk/table/tests/_shared/testcase.py
@@ -0,0 +1,440 @@
+# coding: utf-8
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+from __future__ import division
+import os
+import os.path
+import time
+from datetime import datetime, timedelta
+
+from azure.azure_table import generate_account_sas
+from azure.azure_table._shared.models import ResourceTypes, AccountSasPermissions
+
+from pyparsing import basestring
+
+try:
+ import unittest.mock as mock
+except ImportError:
+ import mock
+
+import zlib
+import sys
+import random
+import re
+import logging
+from devtools_testutils import (
+ AzureMgmtTestCase,
+ AzureMgmtPreparer,
+ ResourceGroupPreparer,
+ StorageAccountPreparer,
+ FakeResource,
+)
+from azure_devtools.scenario_tests import RecordingProcessor, AzureTestError
+
+try:
+ from cStringIO import StringIO # Python 2
+except ImportError:
+ from io import StringIO
+
+from azure.core.credentials import AccessToken
+#from azure.tables import generate_account_sas, AccountSasPermissions, ResourceTypes
+from azure.mgmt.storage.models import StorageAccount, Endpoints
+
+try:
+ from devtools_testutils import mgmt_settings_real as settings
+except ImportError:
+ from devtools_testutils import mgmt_settings_fake as settings
+
+import pytest
+
+
+LOGGING_FORMAT = '%(asctime)s %(name)-20s %(levelname)-5s %(message)s'
+
+class FakeTokenCredential(object):
+ """Protocol for classes able to provide OAuth tokens.
+ :param str scopes: Lets you specify the type of access needed.
+ """
+ def __init__(self):
+ self.token = AccessToken("YOU SHALL NOT PASS", 0)
+
+ def get_token(self, *args):
+ return self.token
+
+
+class XMSRequestIDBody(RecordingProcessor):
+ """This process is used for Storage batch call only, to avoid the echo policy.
+ """
+ def process_response(self, response):
+ content_type = None
+ for key, value in response.get('headers', {}).items():
+ if key.lower() == 'content-type':
+ content_type = (value[0] if isinstance(value, list) else value).lower()
+ break
+
+ if content_type and 'multipart/mixed' in content_type:
+ response['body']['string'] = re.sub(b"x-ms-client-request-id: [a-f0-9-]+\r\n", b"", response['body']['string'])
+
+ return response
+
+
+class GlobalStorageAccountPreparer(AzureMgmtPreparer):
+ def __init__(self):
+ super(GlobalStorageAccountPreparer, self).__init__(
+ name_prefix='',
+ random_name_length=42
+ )
+
+ def create_resource(self, name, **kwargs):
+ storage_account = TableTestCase._STORAGE_ACCOUNT
+ if self.is_live:
+ self.test_class_instance.scrubber.register_name_pair(
+ storage_account.name,
+ "storagename"
+ )
+ else:
+ name = "storagename"
+ storage_account.name = name
+ storage_account.primary_endpoints.table = 'https://{}.{}.core.windows.net'.format(name, 'table')
+
+ return {
+ 'location': 'westus',
+ 'resource_group': TableTestCase._RESOURCE_GROUP,
+ 'storage_account': storage_account,
+ 'storage_account_key': TableTestCase._STORAGE_KEY,
+ 'storage_account_cs': TableTestCase._STORAGE_CONNECTION_STRING,
+ }
+
+class GlobalResourceGroupPreparer(AzureMgmtPreparer):
+ def __init__(self):
+ super(GlobalResourceGroupPreparer, self).__init__(
+ name_prefix='',
+ random_name_length=42
+ )
+
+ def create_resource(self, name, **kwargs):
+ rg = TableTestCase._RESOURCE_GROUP
+ if self.is_live:
+ self.test_class_instance.scrubber.register_name_pair(
+ rg.name,
+ "rgname"
+ )
+ else:
+ rg = FakeResource(
+ name="rgname",
+ id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rgname"
+ )
+
+ return {
+ 'location': 'westus',
+ 'resource_group': rg,
+ }
+
+
+class TableTestCase(AzureMgmtTestCase):
+
+ def __init__(self, *args, **kwargs):
+ super(TableTestCase, self).__init__(*args, **kwargs)
+ self.replay_processors.append(XMSRequestIDBody())
+
+ def connection_string(self, account, key):
+ return "DefaultEndpointsProtocol=https;AccountName=" + account.name + ";AccountKey=" + str(key) + ";EndpointSuffix=core.windows.net"
+
+ def account_url(self, account, endpoint_type):
+ """Return an url of storage account.
+
+ :param str storage_account: Storage account name
+ :param str storage_type: The Storage type part of the URL. Should be "blob", or "queue", etc.
+ """
+ try:
+ if endpoint_type == "table":
+ return account.primary_endpoints.table.rstrip("/")
+ if endpoint_type == "cosmos":
+ return "https://{}.table.cosmos.azure.com".format(account.name)
+ else:
+ raise ValueError("Unknown storage type {}".format(endpoint_type))
+ except AttributeError: # Didn't find "primary_endpoints"
+ return 'https://{}.{}.core.windows.net'.format(account, endpoint_type)
+
+ def configure_logging(self):
+ try:
+ enable_logging = self.get_settings_value("ENABLE_LOGGING")
+ except AzureTestError:
+ enable_logging = True # That's the default value in fake settings
+
+ self.enable_logging() if enable_logging else self.disable_logging()
+
+ def enable_logging(self):
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter(LOGGING_FORMAT))
+ self.logger.handlers = [handler]
+ self.logger.setLevel(logging.INFO)
+ self.logger.propagate = True
+ self.logger.disabled = False
+
+ def disable_logging(self):
+ self.logger.propagate = False
+ self.logger.disabled = True
+ self.logger.handlers = []
+
+ def sleep(self, seconds):
+ if self.is_live:
+ time.sleep(seconds)
+
+ def get_random_bytes(self, size):
+ # recordings don't like random stuff. making this more
+ # deterministic.
+ return b'a'*size
+
+ def get_random_text_data(self, size):
+ '''Returns random unicode text data exceeding the size threshold for
+ chunking blob upload.'''
+ checksum = zlib.adler32(self.qualified_test_name.encode()) & 0xffffffff
+ rand = random.Random(checksum)
+ text = u''
+ words = [u'hello', u'world', u'python', u'啊齄丂狛狜']
+ while (len(text) < size):
+ index = int(rand.random()*(len(words) - 1))
+ text = text + u' ' + words[index]
+
+ return text
+
+ @staticmethod
+ def _set_test_proxy(service, settings):
+ if settings.USE_PROXY:
+ service.set_proxy(
+ settings.PROXY_HOST,
+ settings.PROXY_PORT,
+ settings.PROXY_USER,
+ settings.PROXY_PASSWORD,
+ )
+
+ def assertNamedItemInContainer(self, container, item_name, msg=None):
+ def _is_string(obj):
+ if sys.version_info >= (3,):
+ return isinstance(obj, str)
+ else:
+ return isinstance(obj, basestring)
+ for item in container:
+ if _is_string(item):
+ if item == item_name:
+ return
+ elif item.name == item_name:
+ return
+ elif hasattr(item, 'snapshot') and item.snapshot == item_name:
+ return
+
+
+ standardMsg = '{0} not found in {1}'.format(
+ repr(item_name), [str(c) for c in container])
+ self.fail(self._formatMessage(msg, standardMsg))
+
+ def assertNamedItemNotInContainer(self, container, item_name, msg=None):
+ for item in container:
+ if item.name == item_name:
+ standardMsg = '{0} unexpectedly found in {1}'.format(
+ repr(item_name), repr(container))
+ self.fail(self._formatMessage(msg, standardMsg))
+
+ def generate_oauth_token(self):
+ if self.is_live:
+ from azure.identity import ClientSecretCredential
+ return ClientSecretCredential(
+ self.get_settings_value("TENANT_ID"),
+ self.get_settings_value("CLIENT_ID"),
+ self.get_settings_value("CLIENT_SECRET"),
+ )
+ return self.generate_fake_token()
+
+ def generate_sas_token(self):
+ fake_key = 'a'*30 + 'b'*30
+
+ return '?' + generate_account_sas(
+ account_name = 'test', # name of the storage account
+ account_key = fake_key, # key for the storage account
+ resource_types = ResourceTypes(object=True),
+ permission = AccountSasPermissions(read=True,list=True),
+ start = datetime.now() - timedelta(hours = 24),
+ expiry = datetime.now() + timedelta(days = 8)
+ )
+
+ def generate_fake_token(self):
+ return FakeTokenCredential()
+
+
+def not_for_emulator(test):
+ def skip_test_if_targeting_emulator(self):
+ test(self)
+ return skip_test_if_targeting_emulator
+
+
+class RetryCounter(object):
+ def __init__(self):
+ self.count = 0
+
+ def simple_count(self, retry_context):
+ self.count += 1
+
+
+class ResponseCallback(object):
+ def __init__(self, status=None, new_status=None):
+ self.status = status
+ self.new_status = new_status
+ self.first = True
+ self.count = 0
+
+ def override_first_status(self, response):
+ if self.first and response.http_response.status_code == self.status:
+ response.http_response.status_code = self.new_status
+ self.first = False
+ self.count += 1
+
+ def override_status(self, response):
+ if response.http_response.status_code == self.status:
+ response.http_response.status_code = self.new_status
+ self.count += 1
+
+
+class LogCaptured(object):
+ def __init__(self, test_case=None):
+ # accept the test case so that we may reset logging after capturing logs
+ self.test_case = test_case
+
+ def __enter__(self):
+ # enable logging
+ # it is possible that the global logging flag is turned off
+ self.test_case.enable_logging()
+
+ # create a string stream to send the logs to
+ self.log_stream = StringIO()
+
+ # the handler needs to be stored so that we can remove it later
+ self.handler = logging.StreamHandler(self.log_stream)
+ self.handler.setFormatter(logging.Formatter(LOGGING_FORMAT))
+
+ # get and enable the logger to send the outputs to the string stream
+ self.logger = logging.getLogger('azure.storage')
+ self.logger.level = logging.DEBUG
+ self.logger.addHandler(self.handler)
+
+ # the stream is returned to the user so that the capture logs can be retrieved
+ return self.log_stream
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ # stop the handler, and close the stream to exit
+ self.logger.removeHandler(self.handler)
+ self.log_stream.close()
+
+ # reset logging since we messed with the setting
+ self.test_case.configure_logging()
+
+
+@pytest.fixture(scope="session")
+def storage_account():
+ test_case = AzureMgmtTestCase("__init__")
+ rg_preparer = ResourceGroupPreparer(random_name_enabled=True, name_prefix='pystorage')
+ storage_preparer = StorageAccountPreparer(random_name_enabled=True, name_prefix='pyacrstorage')
+
+ # Create
+ subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", None)
+ location = os.environ.get("AZURE_LOCATION", "westus")
+
+ existing_rg_name = os.environ.get("AZURE_RESOURCEGROUP_NAME")
+ existing_storage_name = os.environ.get("AZURE_STORAGE_ACCOUNT_NAME")
+ existing_storage_key = os.environ.get("AZURE_STORAGE_ACCOUNT_KEY")
+ storage_connection_string = os.environ.get("AZURE_STORAGE_CONNECTION_STRING")
+
+ i_need_to_create_rg = not (existing_rg_name or existing_storage_name or storage_connection_string)
+ got_storage_info_from_env = existing_storage_name or storage_connection_string
+
+ try:
+ if i_need_to_create_rg:
+ rg_name, rg_kwargs = rg_preparer._prepare_create_resource(test_case)
+ rg = rg_kwargs['resource_group']
+ else:
+ rg_name = existing_rg_name or "no_rg_needed"
+ rg = FakeResource(
+ name=rg_name,
+ id="/subscriptions/{}/resourceGroups/{}".format(subscription_id, rg_name)
+ )
+ TableTestCase._RESOURCE_GROUP = rg
+
+ try:
+ if got_storage_info_from_env:
+
+ if storage_connection_string:
+ storage_connection_string_parts = dict([
+ part.split('=', 1)
+ for part in storage_connection_string.split(";")
+ ])
+
+ storage_account = None
+ if existing_storage_name:
+ storage_name = existing_storage_name
+ storage_account = StorageAccount(
+ location=location,
+ )
+ storage_account.name = storage_name
+ storage_account.id = storage_name
+ storage_account.primary_endpoints=Endpoints()
+ storage_account.primary_endpoints.table = 'https://{}.{}.core.windows.net'.format(storage_name, 'table')
+ storage_key = existing_storage_key
+
+ if not storage_connection_string:
+ # It means I have received a storage name from env
+ storage_connection_string=";".join([
+ "DefaultEndpointsProtocol=https",
+ "AccountName={}".format(storage_name),
+ "AccountKey={}".format(storage_key),
+ "TableEndpoint={}".format(storage_account.primary_endpoints.table),
+ ])
+
+ if not storage_account:
+ # It means I have received a connection string
+ storage_name = storage_connection_string_parts["AccountName"]
+ storage_account = StorageAccount(
+ location=location,
+ )
+
+ def build_service_endpoint(service):
+ try:
+ suffix = storage_connection_string_parts["EndpointSuffix"]
+ except KeyError:
+ suffix = "cosmos.azure.com"
+ return "{}://{}.{}.{}".format(
+ storage_connection_string_parts.get("DefaultEndpointsProtocol", "https"),
+ storage_connection_string_parts["AccountName"],
+ service,
+ suffix
+ )
+
+ storage_account.name = storage_name
+ storage_account.id = storage_name
+ storage_account.primary_endpoints=Endpoints()
+ storage_account.primary_endpoints.table = storage_connection_string_parts.get("TableEndpoint", build_service_endpoint("table"))
+ storage_account.secondary_endpoints=Endpoints()
+ storage_account.secondary_endpoints.table = storage_connection_string_parts.get("TableSecondaryEndpoint", build_service_endpoint("table"))
+ storage_key = storage_connection_string_parts["AccountKey"]
+
+ else:
+ storage_name, storage_kwargs = storage_preparer._prepare_create_resource(test_case, **rg_kwargs)
+ storage_account = storage_kwargs['storage_account']
+ storage_key = storage_kwargs['storage_account_key']
+ storage_connection_string = storage_kwargs['storage_account_cs']
+
+ TableTestCase._STORAGE_ACCOUNT = storage_account
+ TableTestCase._STORAGE_KEY = storage_key
+ TableTestCase._STORAGE_CONNECTION_STRING = storage_connection_string
+ yield
+ finally:
+ if not got_storage_info_from_env:
+ storage_preparer.remove_resource(
+ storage_name,
+ resource_group=rg
+ )
+ finally:
+ if i_need_to_create_rg:
+ rg_preparer.remove_resource(rg_name)
+ TableTestCase._RESOURCE_GROUP = None
diff --git a/sdk/table/tests/conftest.py b/sdk/table/tests/conftest.py
new file mode 100644
index 000000000000..b73f2c91f9b2
--- /dev/null
+++ b/sdk/table/tests/conftest.py
@@ -0,0 +1,40 @@
+# --------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the ""Software""), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# --------------------------------------------------------------------------
+import sys
+
+# fixture needs to be visible from conftest
+from _shared.testcase import storage_account
+
+# Ignore async tests for Python < 3.5
+collect_ignore_glob = []
+if sys.version_info < (3, 5):
+ collect_ignore_glob.append("*_async.py")
+
+def pytest_configure(config):
+ # register an additional marker
+ config.addinivalue_line(
+ "usefixtures", "storage_account"
+ )
diff --git a/sdk/table/tests/encryption_test_helper.py b/sdk/table/tests/encryption_test_helper.py
new file mode 100644
index 000000000000..a7548a549cb8
--- /dev/null
+++ b/sdk/table/tests/encryption_test_helper.py
@@ -0,0 +1,90 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives.asymmetric.padding import (
+ OAEP,
+ MGF1,
+)
+from cryptography.hazmat.primitives.asymmetric.rsa import generate_private_key
+from cryptography.hazmat.primitives.hashes import SHA1
+from cryptography.hazmat.primitives.keywrap import (
+ aes_key_wrap,
+ aes_key_unwrap,
+)
+
+
+class KeyWrapper:
+ def __init__(self, kid='local:key1'):
+ # Must have constant key value for recorded tests, otherwise we could use a random generator.
+ self.kek = b'\xbe\xa4\x11K\x9eJ\x07\xdafF\x83\xad+\xadvA C\xe8\xbc\x90\xa4\x11}G\xc3\x0f\xd4\xb4\x19m\x11'
+ self.backend = default_backend()
+ self.kid = kid
+
+ def wrap_key(self, key, algorithm='A256KW'):
+ if algorithm == 'A256KW':
+ return aes_key_wrap(self.kek, key, self.backend)
+
+ raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM)
+
+ def unwrap_key(self, key, algorithm):
+ if algorithm == 'A256KW':
+ return aes_key_unwrap(self.kek, key, self.backend)
+
+ raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM)
+
+ def get_key_wrap_algorithm(self):
+ return 'A256KW'
+
+ def get_kid(self):
+ return self.kid
+
+
+class KeyResolver:
+ def __init__(self):
+ self.keys = {}
+
+ def put_key(self, key):
+ self.keys[key.get_kid()] = key
+
+ def resolve_key(self, kid):
+ return self.keys[kid]
+
+
+class RSAKeyWrapper:
+ def __init__(self, kid='local:key2'):
+ self.private_key = generate_private_key(public_exponent=65537,
+ key_size=2048,
+ backend=default_backend())
+ self.public_key = self.private_key.public_key()
+ self.kid = kid
+
+ def wrap_key(self, key, algorithm='RSA'):
+ if algorithm == 'RSA':
+ return self.public_key.encrypt(key,
+ OAEP(
+ mgf=MGF1(algorithm=SHA1()),
+ algorithm=SHA1(),
+ label=None)
+ )
+
+ raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM)
+
+ def unwrap_key(self, key, algorithm):
+ if algorithm == 'RSA':
+ return self.private_key.decrypt(key,
+ OAEP(
+ mgf=MGF1(algorithm=SHA1()),
+ algorithm=SHA1(),
+ label=None)
+ )
+
+ raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM)
+
+ def get_key_wrap_algorithm(self):
+ return 'RSA'
+
+ def get_kid(self):
+ return self.kid
diff --git a/sdk/table/tests/recordings/test_table.test_account_sas.yaml b/sdk/table/tests/recordings/test_table.test_account_sas.yaml
new file mode 100644
index 000000000000..5dc054eb6de1
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_account_sas.yaml
@@ -0,0 +1,190 @@
+interactions:
+- request:
+ body: '{"TableName": "pytablesync99dc0b08"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '36'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Wed, 10 Jun 2020 13:48:25 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 13:48:25 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"pytablesync99dc0b08"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Wed, 10 Jun 2020 13:48:25 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('pytablesync99dc0b08')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '{"PartitionKey": "test", "RowKey": "test1", "text": "hello"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '60'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Wed, 10 Jun 2020 13:48:26 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 13:48:26 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/pytablesync99dc0b08
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#pytablesync99dc0b08/@Element","odata.etag":"W/\"datetime''2020-06-10T13%3A48%3A26.3305519Z''\"","PartitionKey":"test","RowKey":"test1","Timestamp":"2020-06-10T13:48:26.3305519Z","text":"hello"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Wed, 10 Jun 2020 13:48:26 GMT
+ etag:
+ - W/"datetime'2020-06-10T13%3A48%3A26.3305519Z'"
+ location:
+ - https://storagename.table.core.windows.net/pytablesync99dc0b08(PartitionKey='test',RowKey='test1')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '{"PartitionKey": "test", "RowKey": "test2", "text": "hello"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '60'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Wed, 10 Jun 2020 13:48:26 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 13:48:26 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/pytablesync99dc0b08
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#pytablesync99dc0b08/@Element","odata.etag":"W/\"datetime''2020-06-10T13%3A48%3A26.4616441Z''\"","PartitionKey":"test","RowKey":"test2","Timestamp":"2020-06-10T13:48:26.4616441Z","text":"hello"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Wed, 10 Jun 2020 13:48:26 GMT
+ etag:
+ - W/"datetime'2020-06-10T13%3A48%3A26.4616441Z'"
+ location:
+ - https://storagename.table.core.windows.net/pytablesync99dc0b08(PartitionKey='test',RowKey='test2')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Date:
+ - Wed, 10 Jun 2020 13:48:26 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 13:48:26 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: DELETE
+ uri: https://storagename.table.core.windows.net/Tables('pytablesync99dc0b08')
+ response:
+ body:
+ string: ''
+ headers:
+ cache-control:
+ - no-cache
+ content-length:
+ - '0'
+ date:
+ - Wed, 10 Jun 2020 13:48:26 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 204
+ message: No Content
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_create_table.yaml b/sdk/table/tests/recordings/test_table.test_create_table.yaml
new file mode 100644
index 000000000000..2c9a784d203d
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_create_table.yaml
@@ -0,0 +1,90 @@
+interactions:
+- request:
+ body: '{"TableName": "pytablesynca4ed0b50"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '36'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:52 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:52 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"pytablesynca4ed0b50"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:52 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('pytablesynca4ed0b50')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:52 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:52 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: DELETE
+ uri: https://storagename.table.core.windows.net/Tables('pytablesynca4ed0b50')
+ response:
+ body:
+ string: ''
+ headers:
+ cache-control:
+ - no-cache
+ content-length:
+ - '0'
+ date:
+ - Thu, 11 Jun 2020 14:06:52 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 204
+ message: No Content
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_create_table_fail_on_exist.yaml b/sdk/table/tests/recordings/test_table.test_create_table_fail_on_exist.yaml
new file mode 100644
index 000000000000..0396bc1e3df5
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_create_table_fail_on_exist.yaml
@@ -0,0 +1,137 @@
+interactions:
+- request:
+ body: '{"TableName": "pytablesync6d7c1113"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '36'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:53 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:53 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"pytablesync6d7c1113"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:53 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('pytablesync6d7c1113')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '{"TableName": "pytablesync6d7c1113"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '36'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:53 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:53 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.error":{"code":"TableAlreadyExists","message":{"lang":"en-US","value":"The
+ table specified already exists.\nRequestId:a8cf1036-f002-0067-59f9-3f90cc000000\nTime:2020-06-11T14:06:53.7759946Z"}}}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:53 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 409
+ message: Conflict
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:53 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:53 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: DELETE
+ uri: https://storagename.table.core.windows.net/Tables('pytablesync6d7c1113')
+ response:
+ body:
+ string: ''
+ headers:
+ cache-control:
+ - no-cache
+ content-length:
+ - '0'
+ date:
+ - Thu, 11 Jun 2020 14:06:53 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 204
+ message: No Content
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_delete_table_with_existing_table.yaml b/sdk/table/tests/recordings/test_table.test_delete_table_with_existing_table.yaml
new file mode 100644
index 000000000000..bcb9a0727f25
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_delete_table_with_existing_table.yaml
@@ -0,0 +1,90 @@
+interactions:
+- request:
+ body: '{"TableName": "pytablesyncded1139b"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '36'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:53 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:53 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"pytablesyncded1139b"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:53 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('pytablesyncded1139b')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:54 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:54 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: DELETE
+ uri: https://storagename.table.core.windows.net/Tables('pytablesyncded1139b')
+ response:
+ body:
+ string: ''
+ headers:
+ cache-control:
+ - no-cache
+ content-length:
+ - '0'
+ date:
+ - Thu, 11 Jun 2020 14:06:54 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 204
+ message: No Content
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_delete_table_with_non_existing_table_fail_not_exist.yaml b/sdk/table/tests/recordings/test_table.test_delete_table_with_non_existing_table_fail_not_exist.yaml
new file mode 100644
index 000000000000..79be5c940765
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_delete_table_with_non_existing_table_fail_not_exist.yaml
@@ -0,0 +1,49 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:54 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:54 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: DELETE
+ uri: https://storagename.table.core.windows.net/Tables('pytablesynca12c1b7c')
+ response:
+ body:
+ string: 'ResourceNotFound
The specified resource does not exist.
+
+ RequestId:c526fa55-0002-003d-0ff9-3f964d000000
+
+ Time:2020-06-11T14:06:54.9397921Z'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/xml;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:54 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 404
+ message: Not Found
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_get_table_acl.yaml b/sdk/table/tests/recordings/test_table.test_get_table_acl.yaml
new file mode 100644
index 000000000000..708294cf2988
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_get_table_acl.yaml
@@ -0,0 +1,127 @@
+interactions:
+- request:
+ body: '{"TableName": "pytablesyncb07a0bab"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '36'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Wed, 10 Jun 2020 15:51:31 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 15:51:31 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"pytablesyncb07a0bab"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Wed, 10 Jun 2020 15:51:31 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('pytablesyncb07a0bab')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Date:
+ - Wed, 10 Jun 2020 15:51:32 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 15:51:32 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: GET
+ uri: https://storagename.table.core.windows.net/pytablesyncb07a0bab?comp=acl
+ response:
+ body:
+ string: "\uFEFF"
+ headers:
+ content-type:
+ - application/xml
+ date:
+ - Wed, 10 Jun 2020 15:51:32 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 200
+ message: OK
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Date:
+ - Wed, 10 Jun 2020 15:51:32 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 15:51:32 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: DELETE
+ uri: https://storagename.table.core.windows.net/Tables('pytablesyncb07a0bab')
+ response:
+ body:
+ string: ''
+ headers:
+ cache-control:
+ - no-cache
+ content-length:
+ - '0'
+ date:
+ - Wed, 10 Jun 2020 15:51:32 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 204
+ message: No Content
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_list_tables.yaml b/sdk/table/tests/recordings/test_table.test_list_tables.yaml
new file mode 100644
index 000000000000..0c77bb26c5d2
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_list_tables.yaml
@@ -0,0 +1,132 @@
+interactions:
+- request:
+ body: '{"TableName": "pytablesync9a730b0b"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '36'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:54 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:54 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"pytablesync9a730b0b"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:54 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('pytablesync9a730b0b')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:55 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:55 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: GET
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables","value":[{"TableName":"pytablesync9a730b0b"}]}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:54 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 200
+ message: OK
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:55 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:55 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: DELETE
+ uri: https://storagename.table.core.windows.net/Tables('pytablesync9a730b0b')
+ response:
+ body:
+ string: ''
+ headers:
+ cache-control:
+ - no-cache
+ content-length:
+ - '0'
+ date:
+ - Thu, 11 Jun 2020 14:06:54 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 204
+ message: No Content
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_list_tables_with_filter.yaml b/sdk/table/tests/recordings/test_table.test_list_tables_with_filter.yaml
new file mode 100644
index 000000000000..ad35d227d979
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_list_tables_with_filter.yaml
@@ -0,0 +1,132 @@
+interactions:
+- request:
+ body: '{"TableName": "pytablesync3f57100b"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '36'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:55 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:55 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"pytablesync3f57100b"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:55 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('pytablesync3f57100b')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:55 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:55 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: GET
+ uri: https://storagename.table.core.windows.net/Tables?$filter=TableName%20eq%20%27pytablesync3f57100b%27
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables","value":[{"TableName":"pytablesync3f57100b"}]}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:55 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 200
+ message: OK
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: DELETE
+ uri: https://storagename.table.core.windows.net/Tables('pytablesync3f57100b')
+ response:
+ body:
+ string: ''
+ headers:
+ cache-control:
+ - no-cache
+ content-length:
+ - '0'
+ date:
+ - Thu, 11 Jun 2020 14:06:55 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 204
+ message: No Content
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_list_tables_with_marker.yaml b/sdk/table/tests/recordings/test_table.test_list_tables_with_marker.yaml
new file mode 100644
index 000000000000..a38b2ddf911a
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_list_tables_with_marker.yaml
@@ -0,0 +1,282 @@
+interactions:
+- request:
+ body: '{"TableName": "listtable03f561007"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '35'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"listtable03f561007"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('listtable03f561007')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '{"TableName": "listtable13f561007"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '35'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"listtable13f561007"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('listtable13f561007')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '{"TableName": "listtable23f561007"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '35'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"listtable23f561007"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('listtable23f561007')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '{"TableName": "listtable33f561007"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '35'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"listtable33f561007"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('listtable33f561007')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: GET
+ uri: https://storagename.table.core.windows.net/Tables?$top=2
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables","value":[{"TableName":"listtable03f561007"},{"TableName":"listtable13f561007"}]}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-continuation-nexttablename:
+ - 1!48!bGlzdHRhYmxlMjNmNTYxMDA3ATAxZDYzZmY5OTE0YjI3M2Q-
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 200
+ message: OK
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: GET
+ uri: https://storagename.table.core.windows.net/Tables?$top=2&NextTableName=1%2148%21bGlzdHRhYmxlMjNmNTYxMDA3ATAxZDYzZmY5OTE0YjI3M2Q-
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables","value":[{"TableName":"listtable23f561007"},{"TableName":"listtable33f561007"}]}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:56 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-continuation-nexttablename:
+ - 1!48!cHl0YWJsZXN5bmMzZjU3MTAwYgEwMWQ2M2ZmOTkwYWVjYjA1
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 200
+ message: OK
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_list_tables_with_num_results.yaml b/sdk/table/tests/recordings/test_table.test_list_tables_with_num_results.yaml
new file mode 100644
index 000000000000..0cf4a0e1aa4c
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_list_tables_with_num_results.yaml
@@ -0,0 +1,280 @@
+interactions:
+- request:
+ body: '{"TableName": "listtable0967e1246"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '35'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"listtable0967e1246"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('listtable0967e1246')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '{"TableName": "listtable1967e1246"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '35'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"listtable1967e1246"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('listtable1967e1246')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '{"TableName": "listtable2967e1246"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '35'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"listtable2967e1246"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('listtable2967e1246')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '{"TableName": "listtable3967e1246"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '35'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"listtable3967e1246"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('listtable3967e1246')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:58 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:58 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: GET
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables","value":[{"TableName":"listtable03f561007"},{"TableName":"listtable0967e1246"},{"TableName":"listtable13f561007"},{"TableName":"listtable1967e1246"},{"TableName":"listtable23f561007"},{"TableName":"listtable2967e1246"},{"TableName":"listtable33f561007"},{"TableName":"listtable3967e1246"}]}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 200
+ message: OK
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:58 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:58 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: GET
+ uri: https://storagename.table.core.windows.net/Tables?$top=3
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables","value":[{"TableName":"listtable03f561007"},{"TableName":"listtable0967e1246"},{"TableName":"listtable13f561007"}]}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:57 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-continuation-nexttablename:
+ - 1!48!bGlzdHRhYmxlMTk2N2UxMjQ2ATAxZDYzZmY5OTFkYWFmZTE-
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 200
+ message: OK
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_set_table_acl_too_many_ids.yaml b/sdk/table/tests/recordings/test_table.test_set_table_acl_too_many_ids.yaml
new file mode 100644
index 000000000000..e0075297ccaa
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_set_table_acl_too_many_ids.yaml
@@ -0,0 +1,90 @@
+interactions:
+- request:
+ body: '{"TableName": "pytablesync6f17111b"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '36'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Wed, 10 Jun 2020 15:51:32 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 15:51:32 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"pytablesync6f17111b"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Wed, 10 Jun 2020 15:51:32 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('pytablesync6f17111b')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Date:
+ - Wed, 10 Jun 2020 15:51:32 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 15:51:32 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: DELETE
+ uri: https://storagename.table.core.windows.net/Tables('pytablesync6f17111b')
+ response:
+ body:
+ string: ''
+ headers:
+ cache-control:
+ - no-cache
+ content-length:
+ - '0'
+ date:
+ - Wed, 10 Jun 2020 15:51:32 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 204
+ message: No Content
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_set_table_acl_with_empty_signed_identifier.yaml b/sdk/table/tests/recordings/test_table.test_set_table_acl_with_empty_signed_identifier.yaml
new file mode 100644
index 000000000000..af449aaa2219
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_set_table_acl_with_empty_signed_identifier.yaml
@@ -0,0 +1,90 @@
+interactions:
+- request:
+ body: '{"TableName": "pytablesyncb9bd17bb"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '36'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Wed, 10 Jun 2020 16:52:25 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 16:52:25 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"pytablesyncb9bd17bb"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Wed, 10 Jun 2020 16:52:25 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('pytablesyncb9bd17bb')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Date:
+ - Wed, 10 Jun 2020 16:52:25 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 16:52:25 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: DELETE
+ uri: https://storagename.table.core.windows.net/Tables('pytablesyncb9bd17bb')
+ response:
+ body:
+ string: ''
+ headers:
+ cache-control:
+ - no-cache
+ content-length:
+ - '0'
+ date:
+ - Wed, 10 Jun 2020 16:52:25 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 204
+ message: No Content
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_set_table_acl_with_empty_signed_identifiers.yaml b/sdk/table/tests/recordings/test_table.test_set_table_acl_with_empty_signed_identifiers.yaml
new file mode 100644
index 000000000000..aac2964e1431
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_set_table_acl_with_empty_signed_identifiers.yaml
@@ -0,0 +1,165 @@
+interactions:
+- request:
+ body: '{"TableName": "pytablesyncd1eb182e"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '36'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Wed, 10 Jun 2020 16:52:26 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 16:52:26 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"pytablesyncd1eb182e"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Wed, 10 Jun 2020 16:52:26 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('pytablesyncd1eb182e')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Content-Type:
+ - application/xml
+ Date:
+ - Wed, 10 Jun 2020 16:52:27 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 16:52:27 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: PUT
+ uri: https://storagename.table.core.windows.net/pytablesyncd1eb182e?comp=acl
+ response:
+ body:
+ string: ''
+ headers:
+ content-length:
+ - '0'
+ date:
+ - Wed, 10 Jun 2020 16:52:26 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 204
+ message: No Content
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Date:
+ - Wed, 10 Jun 2020 16:52:27 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 16:52:27 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: GET
+ uri: https://storagename.table.core.windows.net/pytablesyncd1eb182e?comp=acl
+ response:
+ body:
+ string: "\uFEFF"
+ headers:
+ content-type:
+ - application/xml
+ date:
+ - Wed, 10 Jun 2020 16:52:26 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 200
+ message: OK
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Date:
+ - Wed, 10 Jun 2020 16:52:27 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 16:52:27 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: DELETE
+ uri: https://storagename.table.core.windows.net/Tables('pytablesyncd1eb182e')
+ response:
+ body:
+ string: ''
+ headers:
+ cache-control:
+ - no-cache
+ content-length:
+ - '0'
+ date:
+ - Wed, 10 Jun 2020 16:52:26 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 204
+ message: No Content
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_set_table_acl_with_signed_identifiers.yaml b/sdk/table/tests/recordings/test_table.test_set_table_acl_with_signed_identifiers.yaml
new file mode 100644
index 000000000000..734ccd8ba0b1
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_set_table_acl_with_signed_identifiers.yaml
@@ -0,0 +1,139 @@
+interactions:
+- request:
+ body: '{"TableName": "pytablesync45dd15a0"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '36'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Wed, 10 Jun 2020 16:52:27 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 16:52:27 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"pytablesync45dd15a0"}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Wed, 10 Jun 2020 16:52:28 GMT
+ location:
+ - https://storagename.table.core.windows.net/Tables('pytablesync45dd15a0')
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 201
+ message: Created
+- request:
+ body: '
+
+ testid2020-06-10T16:47:28.000Z2020-06-10T17:52:28.000Zr'
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '265'
+ Content-Type:
+ - application/xml
+ Date:
+ - Wed, 10 Jun 2020 16:52:28 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 16:52:28 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: PUT
+ uri: https://storagename.table.core.windows.net/pytablesync45dd15a0?comp=acl
+ response:
+ body:
+ string: 'InvalidXmlDocumentXML specified is not syntactically valid.
+
+ RequestId:fd56f767-b002-00d6-7347-3fa6d8000000
+
+ Time:2020-06-10T16:52:28.5016570Z'
+ headers:
+ content-length:
+ - '327'
+ content-type:
+ - application/xml
+ date:
+ - Wed, 10 Jun 2020 16:52:28 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-ms-error-code:
+ - InvalidXmlDocument
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 400
+ message: XML specified is not syntactically valid.
+- request:
+ body: null
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '0'
+ Date:
+ - Wed, 10 Jun 2020 16:52:28 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 10 Jun 2020 16:52:28 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: DELETE
+ uri: https://storagename.table.core.windows.net/Tables('pytablesync45dd15a0')
+ response:
+ body:
+ string: ''
+ headers:
+ cache-control:
+ - no-cache
+ content-length:
+ - '0'
+ date:
+ - Wed, 10 Jun 2020 16:52:28 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 204
+ message: No Content
+version: 1
diff --git a/sdk/table/tests/recordings/test_table.test_unicode_create_table_unicode_name.yaml b/sdk/table/tests/recordings/test_table.test_unicode_create_table_unicode_name.yaml
new file mode 100644
index 000000000000..e7dd3fb7246a
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table.test_unicode_create_table_unicode_name.yaml
@@ -0,0 +1,49 @@
+interactions:
+- request:
+ body: '{"TableName": "\u554a\u9f44\u4e02\u72db\u72dc"}'
+ headers:
+ Accept:
+ - application/json;odata=minimalmetadata
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '47'
+ Content-Type:
+ - application/json;odata=nometadata
+ DataServiceVersion:
+ - '3.0'
+ Date:
+ - Thu, 11 Jun 2020 14:06:58 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 11 Jun 2020 14:06:58 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: POST
+ uri: https://storagename.table.core.windows.net/Tables
+ response:
+ body:
+ string: '{"odata.error":{"code":"InvalidResourceName","message":{"lang":"en-US","value":"The
+ specifed resource name contains invalid characters.\nRequestId:ac6b88f7-c002-0009-2af9-3f39e5000000\nTime:2020-06-11T14:06:59.0077911Z"}}}'
+ headers:
+ cache-control:
+ - no-cache
+ content-type:
+ - application/json;odata=minimalmetadata;streaming=true;charset=utf-8
+ date:
+ - Thu, 11 Jun 2020 14:06:58 GMT
+ server:
+ - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0
+ transfer-encoding:
+ - chunked
+ x-content-type-options:
+ - nosniff
+ x-ms-version:
+ - '2019-07-07'
+ status:
+ code: 400
+ message: Bad Request
+version: 1
diff --git a/sdk/table/tests/recordings/test_table_service_properties.test_table_service_properties.yaml b/sdk/table/tests/recordings/test_table_service_properties.test_table_service_properties.yaml
new file mode 100644
index 000000000000..57f02723835d
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table_service_properties.test_table_service_properties.yaml
@@ -0,0 +1,52 @@
+interactions:
+- request:
+ body: '
+
+ 1.0falsefalsefalsefalse1.0falsefalse1.0falsefalse'
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '524'
+ Content-Type:
+ - application/xml
+ Date:
+ - Wed, 03 Jun 2020 19:45:20 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Wed, 03 Jun 2020 19:45:20 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: PUT
+ uri: https://storagename.table.core.windows.net/?restype=service&comp=properties
+ response:
+ body:
+ string: 'AuthenticationFailedServer failed to authenticate the request. Make sure the
+ value of Authorization header is formed correctly including the signature.
+
+ RequestId:37b2f3b5-4002-0051-2edf-3998c2000000
+
+ Time:2020-06-03T19:45:20.1658316Z'
+ headers:
+ content-length:
+ - '419'
+ content-type:
+ - application/xml
+ date:
+ - Wed, 03 Jun 2020 19:45:19 GMT
+ server:
+ - Microsoft-HTTPAPI/2.0
+ x-ms-error-code:
+ - AuthenticationFailed
+ status:
+ code: 403
+ message: Server failed to authenticate the request. Make sure the value of Authorization
+ header is formed correctly including the signature.
+version: 1
diff --git a/sdk/table/tests/recordings/test_table_service_stats.test_table_service_stats_f.yaml b/sdk/table/tests/recordings/test_table_service_stats.test_table_service_stats_f.yaml
new file mode 100644
index 000000000000..ee9540bdf6c0
--- /dev/null
+++ b/sdk/table/tests/recordings/test_table_service_stats.test_table_service_stats_f.yaml
@@ -0,0 +1,45 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept:
+ - application/xml
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Date:
+ - Thu, 04 Jun 2020 12:38:27 GMT
+ User-Agent:
+ - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0)
+ x-ms-date:
+ - Thu, 04 Jun 2020 12:38:27 GMT
+ x-ms-version:
+ - '2019-07-07'
+ method: GET
+ uri: https://pyacrstorage.table.core.windows.net/?restype=service&comp=stats
+ response:
+ body:
+ string: 'AuthenticationFailedServer failed to authenticate the request. Make sure the
+ value of Authorization header is formed correctly including the signature.
+
+ RequestId:da868a4c-d002-0002-406d-3a08f0000000
+
+ Time:2020-06-04T12:38:27.0031660Z'
+ headers:
+ content-length:
+ - '419'
+ content-type:
+ - application/xml
+ date:
+ - Thu, 04 Jun 2020 12:38:26 GMT
+ server:
+ - Microsoft-HTTPAPI/2.0
+ x-ms-error-code:
+ - AuthenticationFailed
+ status:
+ code: 403
+ message: Server failed to authenticate the request. Make sure the value of Authorization
+ header is formed correctly including the signature.
+version: 1
diff --git a/sdk/table/tests/test_table.py b/sdk/table/tests/test_table.py
new file mode 100644
index 000000000000..16387e7e9a9e
--- /dev/null
+++ b/sdk/table/tests/test_table.py
@@ -0,0 +1,457 @@
+# coding: utf-8
+
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+import pytest
+import sys
+import locale
+import os
+from azure.azure_table import TableServiceClient, generate_account_sas
+from time import time
+from wsgiref.handlers import format_date_time
+from datetime import (
+ datetime,
+ timedelta,
+)
+
+from azure.azure_table._generated.models import AccessPolicy, QueryOptions
+from azure.azure_table._models import TableSasPermissions
+from azure.azure_table._shared.models import ResourceTypes, AccountSasPermissions
+from azure.core.pipeline import Pipeline
+from azure.core.pipeline.policies import (
+ HeadersPolicy,
+ ContentDecodePolicy,
+)
+
+from _shared.testcase import TableTestCase, GlobalStorageAccountPreparer
+from azure.azure_table._shared.authentication import SharedKeyCredentialPolicy
+from azure.core.pipeline.transport import RequestsTransport
+from azure.core.exceptions import (
+ HttpResponseError,
+ ResourceNotFoundError,
+ ResourceExistsError)
+
+# from azure.tables import (
+# TableServiceClient,
+# TableClient,
+# TableSasPermissions,
+# AccessPolicy,
+# ResourceTypes,
+# AccountSasPermissions,
+# generate_account_sas,
+# generate_table_sas
+# )
+
+# ------------------------------------------------------------------------------
+
+TEST_TABLE_PREFIX = 'pytablesync'
+
+
+# ------------------------------------------------------------------------------
+
+def _create_pipeline(account, credential, **kwargs):
+ # type: (Any, **Any) -> Tuple[Configuration, Pipeline]
+ credential_policy = SharedKeyCredentialPolicy(account_name=account.name, account_key=credential)
+ transport = RequestsTransport(**kwargs)
+ policies = [
+ HeadersPolicy(),
+ credential_policy,
+ ContentDecodePolicy(response_encoding="utf-8")]
+ return Pipeline(transport, policies=policies)
+
+
+class StorageTableTest(TableTestCase):
+
+ # --Helpers-----------------------------------------------------------------
+ def _get_table_reference(self, prefix=TEST_TABLE_PREFIX):
+ table_name = self.get_resource_name(prefix)
+ return table_name
+
+ def _create_table(self, ts, prefix=TEST_TABLE_PREFIX, table_list=None):
+ table_name = self._get_table_reference(prefix)
+ try:
+ table = ts.create_table(table_name)
+ if table_list is not None:
+ table_list.append(table)
+ except ResourceExistsError:
+ table = ts.get_table_client(table_name)
+ return table
+
+ def _delete_table(self, ts, table):
+ if table is None:
+ return
+ try:
+ ts.delete_table(table.table_name)
+ except ResourceNotFoundError:
+ pass
+
+ # --Test cases for tables --------------------------------------------------
+ # @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_table(self, resource_group, location, storage_account, storage_account_key):
+ # # Arrange
+ ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ # response = ts.create_table(table_name)
+ # assert response.table_name == table_name
+
+ table_name = self._get_table_reference()
+ # table_client = ts.get_table_client(table_name)
+
+ # Act
+ created = ts.create_table(table_name)
+
+ # Assert
+ assert created.table_name == table_name
+ # existing = list(ts.query_tables("TableName eq '{}'".format(table_name)))
+ # This is causing problems
+ # self.assertEqual(existing, [table_name])
+ ts.delete_table(table_name)
+
+ # @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_table_fail_on_exist(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ table_name = self._get_table_reference()
+ # btable_client = ts.get_table_client(table_name)
+
+ # Act
+ created = ts.create_table(table_name)
+ with self.assertRaises(ResourceExistsError):
+ ts.create_table(table_name)
+
+ # Assert
+ self.assertTrue(created)
+ # existing = list(ts.query_tables(query_options=QueryOptions(filter="TableName eq '{}'".format(table_name))))
+ # self.assertEqual(existing[0], [table_name])
+ ts.delete_table(table_name)
+
+ # @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_list_tables(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ table = self._create_table(ts)
+
+ # Act
+ tables = list(ts.list_tables())
+
+ # Assert
+ self.assertIsNotNone(tables)
+ self.assertGreaterEqual(len(tables), 1)
+ self.assertIsNotNone(tables[0])
+ # self.assertNamedItemInContainer(tables, table.table_name)
+ ts.delete_table(table.table_name)
+
+ # @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_list_tables_with_filter(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ table = self._create_table(ts)
+
+ # Act
+ name_filter = "TableName eq '{}'".format(table.table_name)
+ tables = list(ts.query_tables(query_options=QueryOptions(filter=name_filter)))
+ # Assert
+ self.assertIsNotNone(tables)
+ self.assertEqual(len(tables), 1)
+ # self.assertEqual(tables[0].table_name, [table.table_name])
+ # table.delete_table()
+ ts.delete_table(table.table_name)
+
+ # @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_list_tables_with_num_results(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ query = QueryOptions()
+ query.top = 3
+ prefix = 'listtable'
+ ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ table_list = []
+ for i in range(0, 4):
+ self._create_table(ts, prefix + str(i), table_list)
+
+ # Act
+ big_page = list(next(ts.list_tables().by_page()))
+ small_page = list(next(ts.list_tables(query_options=query).by_page()))
+
+ # Assert
+ self.assertEqual(len(small_page), 3)
+ self.assertGreaterEqual(len(big_page), 4)
+
+ # @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_list_tables_with_marker(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ prefix = 'listtable'
+ table_names = []
+ for i in range(0, 4):
+ self._create_table(ts, prefix + str(i), table_names)
+
+ # table_names.sort()
+
+ # Act
+ generator1 = ts.list_tables(query_options=QueryOptions(top=2)).by_page()
+ next(generator1)
+ generator2 = ts.list_tables(query_options=QueryOptions(top=2)).by_page(
+ continuation_token=generator1.continuation_token)
+ next(generator2)
+
+ tables1 = generator1._current_page
+ tables2 = generator2._current_page
+
+ # Assert
+ self.assertEqual(len(tables1), 2)
+ self.assertEqual(len(tables2), 2)
+ self.assertNotEqual(tables1, tables2)
+
+ # @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_delete_table_with_existing_table(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ table = self._create_table(ts)
+
+ # Act
+ # deleted = table.delete_table()
+ deleted = ts.delete_table(table_name=table.table_name)
+
+ # Assert
+ self.assertIsNone(deleted)
+ # existing = list(ts.query_tables("TableName eq '{}'".format(table.table_name)))
+ # self.assertEqual(existing, [])
+
+ # @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_delete_table_with_non_existing_table_fail_not_exist(self, resource_group, location, storage_account,
+ storage_account_key):
+ # Arrange
+ ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ table_name = self._get_table_reference()
+
+ # Act
+ with self.assertRaises(ResourceNotFoundError):
+ ts.delete_table(table_name)
+
+ # Assert
+
+ # @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_unicode_create_table_unicode_name(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos URLs support unicode table names")
+ ts = TableServiceClient(url, storage_account_key)
+ table_name = u'啊齄丂狛狜'
+
+ # Act
+ with self.assertRaises(HttpResponseError):
+ # not supported - table name must be alphanumeric, lowercase
+ ts.create_table(table_name)
+
+ # Assert
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_get_table_acl(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos endpoint does not support this")
+ ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ table = self._create_table(ts)
+ try:
+ # Act
+ acl = ts.get_table_access_policy(table_name=table.table_name)
+ # acl = table.get_table_access_policy()
+
+ # Assert
+ self.assertIsNotNone(acl)
+ self.assertEqual(len(acl), 0)
+ finally:
+ # self._delete_table(table)
+ ts.delete_table(table.table_name)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_set_table_acl_with_empty_signed_identifiers(self, resource_group, location, storage_account,
+ storage_account_key):
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos endpoint does not support this")
+ ts = TableServiceClient(url, storage_account_key)
+ table = self._create_table(ts)
+ try:
+ # Act
+ ts.set_table_access_policy(table_name=table.table_name, signed_identifiers={})
+
+ # Assert
+ acl = ts.get_table_access_policy(table_name=table.table_name)
+ self.assertIsNotNone(acl)
+ self.assertEqual(len(acl), 0)
+ finally:
+ # self._delete_table(table)
+ ts.delete_table(table.table_name)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_set_table_acl_with_empty_signed_identifier(self, resource_group, location, storage_account,
+ storage_account_key):
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos endpoint does not support this")
+ ts = TableServiceClient(url, storage_account_key)
+ table = self._create_table(ts)
+ try:
+ # Act
+ ts.set_table_access_policy(table_name=table.table_name, signed_identifiers={'empty': None})
+ # Assert
+ acl = ts.get_table_access_policy(table_name=table.table_name)
+ self.assertIsNotNone(acl)
+ self.assertEqual(len(acl), 1)
+ self.assertIsNotNone(acl['empty'])
+ self.assertIsNone(acl['empty'].permission)
+ self.assertIsNone(acl['empty'].expiry)
+ self.assertIsNone(acl['empty'].start)
+ finally:
+ # self._delete_table(table)
+ ts.delete_table(table.table_name)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_set_table_acl_with_signed_identifiers(self, resource_group, location, storage_account,
+ storage_account_key):
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos endpoint does not support this")
+ ts = TableServiceClient(url, storage_account_key)
+ table = self._create_table(ts)
+
+ # Act
+ access_policy = AccessPolicy(permission=TableSasPermissions(read=True),
+ expiry=datetime.utcnow() + timedelta(hours=1),
+ start=datetime.utcnow() - timedelta(minutes=5))
+ identifiers = {'testid': access_policy}
+ # identifiers = dict()
+ # identifiers['testid'] = AccessPolicy(start=datetime.utcnow() - timedelta(minutes=5),
+ # expiry=datetime.utcnow() + timedelta(hours=1),
+ # permission=TableSasPermissions(query=True))
+ try:
+ ts.set_table_access_policy(table_name=table.table_name,signed_identifiers=identifiers)
+ # Assert
+ acl = ts.get_table_access_policy(table_name=table.table_name)
+ self.assertIsNotNone(acl)
+ self.assertEqual(len(acl), 1)
+ self.assertTrue('testid' in acl)
+ finally:
+ # self._delete_table(table)
+ ts.delete_table(table.table_name)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_set_table_acl_too_many_ids(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos endpoint does not support this")
+ ts = TableServiceClient(url, storage_account_key)
+ table = self._create_table(ts)
+ try:
+ # Act
+ identifiers = dict()
+ for i in range(0, 6):
+ identifiers['id{}'.format(i)] = None
+
+ # Assert
+ with self.assertRaisesRegex(ValueError,
+ 'Too many access policies provided. The server does not support setting more than 5 access policies on a single resource.'):
+ ts.set_table_access_policy(table_name=table.table_name, signed_identifiers=identifiers)
+ finally:
+ ts.delete_table(table.table_name)
+
+ @pytest.mark.skip("pending")
+ @pytest.mark.live_test_only
+ @GlobalStorageAccountPreparer()
+ def test_account_sas(self, resource_group, location, storage_account, storage_account_key):
+ # SAS URL is calculated from storage key, so this test runs live only
+
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support sas")
+ tsc = TableServiceClient(url, storage_account_key)
+ table = self._create_table(tsc)
+ try:
+ entity = {
+ 'PartitionKey': 'test',
+ 'RowKey': 'test1',
+ 'text': 'hello',
+ }
+ tsc.upsert_item(table_name=table.table_name,table_entity_properties=entity)
+
+ entity['RowKey'] = 'test2'
+ tsc.upsert_item(table_name=table.table_name,table_entity_properties=entity)
+
+ token = generate_account_sas(
+ storage_account.name,
+ storage_account_key,
+ resource_types=ResourceTypes(object=True),
+ permission=AccountSasPermissions(read=True),
+ expiry=datetime.utcnow() + timedelta(hours=1),
+ start=datetime.utcnow() - timedelta(minutes=1),
+ )
+
+ # Act
+ service = TableServiceClient(
+ self.account_url(storage_account, "table"),
+ credential=token,
+ )
+ sas_table = service.get_table_client(table.table_name)
+ entities = list(sas_table.read_all_items())
+
+ # Assert
+ self.assertEqual(len(entities), 2)
+ self.assertEqual(entities[0].text, 'hello')
+ self.assertEqual(entities[1].text, 'hello')
+ finally:
+ tsc.delete_table(table.table_name)
+
+ @pytest.mark.skip("msrest fails deserialization: https://github.com/Azure/msrest-for-python/issues/192")
+ @GlobalStorageAccountPreparer()
+ def test_locale(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ table = (self._get_table_reference())
+ init_locale = locale.getlocale()
+ if os.name is "nt":
+ culture = "Spanish_Spain"
+ elif os.name is 'posix':
+ culture = 'es_ES.UTF-8'
+ else:
+ culture = 'es_ES.utf8'
+
+ try:
+ locale.setlocale(locale.LC_ALL, culture)
+ e = None
+
+ # Act
+ table.create_table()
+ try:
+ resp = ts.list_tables()
+ except:
+ e = sys.exc_info()[0]
+
+ # Assert
+ self.assertIsNone(e)
+ finally:
+ ts.delete_table(table.table_name)
+ locale.setlocale(locale.LC_ALL, init_locale[0] or 'en_US')
diff --git a/sdk/table/tests/test_table_batch.py b/sdk/table/tests/test_table_batch.py
new file mode 100644
index 000000000000..d726f3bd3bcc
--- /dev/null
+++ b/sdk/table/tests/test_table_batch.py
@@ -0,0 +1,658 @@
+# coding: utf-8
+
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+import unittest
+import pytest
+
+import uuid
+from base64 import b64encode
+from datetime import datetime
+from dateutil.tz import tzutc
+
+from azure.core import MatchConditions
+from azure.core.exceptions import (
+ HttpResponseError,
+ ResourceNotFoundError,
+ ResourceExistsError,
+ ClientAuthenticationError)
+# from azure.tables import (
+# Entity,
+# EntityProperty,
+# TableServiceClient,
+# TableBatchClient,
+# EdmType,
+# PartialBatchErrorException
+# )
+
+from _shared.testcase import GlobalStorageAccountPreparer, TableTestCase, LogCaptured
+
+#------------------------------------------------------------------------------
+TEST_TABLE_PREFIX = 'table'
+#------------------------------------------------------------------------------
+
+class StorageTableBatchTest(TableTestCase):
+
+ def _set_up(self, storage_account, storage_account_key):
+ self.ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ self.table_name = self.get_resource_name('uttable')
+ self.table = self.ts.get_table_client(self.table_name)
+ if self.is_live:
+ try:
+ self.ts.create_table(self.table_name)
+ except ResourceExistsError:
+ pass
+
+ self.test_tables = []
+
+ def _tear_down(self):
+ if self.is_live:
+ try:
+ self.ts.delete_table(self.table_name)
+ except:
+ pass
+
+ for table_name in self.test_tables:
+ try:
+ self.ts.delete_table(table_name)
+ except:
+ pass
+
+ #--Helpers-----------------------------------------------------------------
+
+ def _get_table_reference(self, prefix=TEST_TABLE_PREFIX):
+ table_name = self.get_resource_name(prefix)
+ self.test_tables.append(table_name)
+ return self.ts.get_table_client(table_name)
+
+ def _create_random_entity_dict(self, pk=None, rk=None):
+ '''
+ Creates a dictionary-based entity with fixed values, using all
+ of the supported data types.
+ '''
+ partition = pk if pk is not None else self.get_resource_name('pk')
+ row = rk if rk is not None else self.get_resource_name('rk')
+ properties = {
+ 'PartitionKey': partition,
+ 'RowKey': row,
+ 'age': 39,
+ 'sex': 'male',
+ 'married': True,
+ 'deceased': False,
+ 'optional': None,
+ 'ratio': 3.1,
+ 'evenratio': 3.0,
+ 'large': 933311100,
+ 'Birthday': datetime(1973, 10, 4, tzinfo=tzutc()),
+ 'birthday': datetime(1970, 10, 4, tzinfo=tzutc()),
+ 'binary': b'binary',
+ 'other': EntityProperty(EdmType.INT32, 20),
+ 'clsid': uuid.UUID('c9da6455-213d-42c9-9a79-3e9149a57833')
+ }
+ return Entity(**properties)
+
+ def _create_updated_entity_dict(self, partition, row):
+ '''
+ Creates a dictionary-based entity with fixed values, with a
+ different set of values than the default entity. It
+ adds fields, changes field values, changes field types,
+ and removes fields when compared to the default entity.
+ '''
+ return {
+ 'PartitionKey': partition,
+ 'RowKey': row,
+ 'age': 'abc',
+ 'sex': 'female',
+ 'sign': 'aquarius',
+ 'birthday': datetime(1991, 10, 4, tzinfo=tzutc())
+ }
+
+ def _assert_default_entity(self, entity, headers=None):
+ '''
+ Asserts that the entity passed in matches the default entity.
+ '''
+ self.assertEqual(entity['age'], 39)
+ self.assertEqual(entity['sex'], 'male')
+ self.assertEqual(entity['married'], True)
+ self.assertEqual(entity['deceased'], False)
+ self.assertFalse("optional" in entity)
+ self.assertFalse("aquarius" in entity)
+ self.assertEqual(entity['ratio'], 3.1)
+ self.assertEqual(entity['evenratio'], 3.0)
+ self.assertEqual(entity['large'], 933311100)
+ self.assertEqual(entity['Birthday'], datetime(1973, 10, 4, tzinfo=tzutc()))
+ self.assertEqual(entity['birthday'], datetime(1970, 10, 4, tzinfo=tzutc()))
+ self.assertEqual(entity['binary'], b'binary')
+ self.assertIsInstance(entity['other'], EntityProperty)
+ self.assertEqual(entity['other'].type, EdmType.INT32)
+ self.assertEqual(entity['other'].value, 20)
+ self.assertEqual(entity['clsid'], uuid.UUID('c9da6455-213d-42c9-9a79-3e9149a57833'))
+ self.assertTrue('metadata' in entity.odata)
+ self.assertIsNotNone(entity.timestamp)
+ self.assertIsInstance(entity.timestamp, datetime)
+ if headers:
+ self.assertTrue("etag" in headers)
+ self.assertIsNotNone(headers['etag'])
+
+ def _assert_updated_entity(self, entity):
+ '''
+ Asserts that the entity passed in matches the updated entity.
+ '''
+ self.assertEqual(entity.age, 'abc')
+ self.assertEqual(entity.sex, 'female')
+ self.assertFalse(hasattr(entity, "married"))
+ self.assertFalse(hasattr(entity, "deceased"))
+ self.assertEqual(entity.sign, 'aquarius')
+ self.assertFalse(hasattr(entity, "optional"))
+ self.assertFalse(hasattr(entity, "ratio"))
+ self.assertFalse(hasattr(entity, "evenratio"))
+ self.assertFalse(hasattr(entity, "large"))
+ self.assertFalse(hasattr(entity, "Birthday"))
+ self.assertEqual(entity.birthday, datetime(1991, 10, 4, tzinfo=tzutc()))
+ self.assertFalse(hasattr(entity, "other"))
+ self.assertFalse(hasattr(entity, "clsid"))
+ self.assertIsNotNone(entity.odata['etag'])
+ self.assertIsNotNone(entity.timestamp)
+ self.assertIsInstance(entity.timestamp, datetime)
+
+ #--Test cases for batch ---------------------------------------------
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_insert(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Act
+ entity = Entity()
+ entity.PartitionKey = '001'
+ entity.RowKey = 'batch_insert'
+ entity.test = EntityProperty(EdmType.BOOLEAN, 'true')
+ entity.test2 = 'value'
+ entity.test3 = 3
+ entity.test4 = EntityProperty(EdmType.INT64, '1234567890')
+ entity.test5 = datetime.utcnow()
+
+ batch = self.table.create_batch()
+ batch.create_item(entity)
+ resp = self.table.commit_batch(batch)
+
+ # Assert
+ self.assertIsNotNone(resp)
+ result, headers = self.table.read_item('001', 'batch_insert', response_hook=lambda e, h: (e, h))
+ self.assertEqual(list(resp)[0].headers['Etag'], headers['etag'])
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_update(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Act
+ entity = Entity()
+ entity.PartitionKey = '001'
+ entity.RowKey = 'batch_update'
+ entity.test = EntityProperty(EdmType.BOOLEAN, 'true')
+ entity.test2 = 'value'
+ entity.test3 = 3
+ entity.test4 = EntityProperty(EdmType.INT64, '1234567890')
+ entity.test5 = datetime.utcnow()
+ self.table.create_item(entity)
+
+ entity = self.table.read_item('001', 'batch_update')
+ self.assertEqual(3, entity.test3)
+ entity.test2 = 'value1'
+
+ batch = self.table.create_batch()
+ batch.update_item(entity)
+ resp = self.table.commit_batch(batch)
+
+ # Assert
+ self.assertIsNotNone(resp)
+ result, headers = self.table.read_item('001', 'batch_update', response_hook=lambda e, h: (e, h))
+ self.assertEqual('value1', result.test2)
+ self.assertEqual(list(resp)[0].headers['Etag'], headers['etag'])
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_merge(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Act
+ entity = Entity()
+ entity.PartitionKey = '001'
+ entity.RowKey = 'batch_merge'
+ entity.test = EntityProperty(EdmType.BOOLEAN, 'true')
+ entity.test2 = 'value'
+ entity.test3 = 3
+ entity.test4 = EntityProperty(EdmType.INT64, '1234567890')
+ entity.test5 = datetime.utcnow()
+ self.table.create_item(entity)
+
+ entity = self.table.read_item('001', 'batch_merge')
+ self.assertEqual(3, entity.test3)
+ entity = Entity()
+ entity.PartitionKey = '001'
+ entity.RowKey = 'batch_merge'
+ entity.test2 = 'value1'
+
+ batch = self.table.create_batch()
+ batch.update_item(entity, mode='MERGE')
+ resp = self.table.commit_batch(batch)
+
+ # Assert
+ self.assertIsNotNone(resp)
+ entity, headers = self.table.read_item('001', 'batch_merge', response_hook=lambda e, h: (e, h))
+ self.assertEqual('value1', entity.test2)
+ self.assertEqual(1234567890, entity.test4)
+ self.assertEqual(list(resp)[0].headers['Etag'], headers['etag'])
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_update_if_match(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_entity_dict()
+ etag = self.table.create_item(entity, response_hook=lambda e, h: h['etag'])
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity['PartitionKey'], entity['RowKey'])
+ batch = self.table.create_batch()
+ batch.update_item(sent_entity, etag=etag, match_condition=MatchConditions.IfNotModified)
+ resp = self.table.commit_batch(batch)
+
+ # Assert
+ self.assertIsNotNone(resp)
+ entity, headers = self.table.read_item(entity['PartitionKey'], entity['RowKey'], response_hook=lambda e, h: (e, h))
+ self._assert_updated_entity(entity)
+ self.assertEqual(list(resp)[0].headers['Etag'], headers['etag'])
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_update_if_doesnt_match(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_entity_dict()
+ self.table.create_item(entity)
+
+ # Act
+ sent_entity1 = self._create_updated_entity_dict(entity['PartitionKey'], entity['RowKey'])
+
+ batch = self.table.create_batch()
+ batch.update_item(
+ sent_entity1,
+ etag=u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"',
+ match_condition=MatchConditions.IfNotModified)
+ try:
+ self.table.commit_batch(batch)
+ except PartialBatchErrorException as error:
+ pass # TODO
+ #self.assertEqual(error.code, 'UpdateConditionNotSatisfied')
+ #self.assertTrue('The update condition specified in the request was not satisfied.' in str(error))
+ else:
+ self.fail('AzureBatchOperationError was expected')
+
+ # Assert
+ received_entity = self.table.read_item(entity['PartitionKey'], entity['RowKey'])
+ self._assert_default_entity(received_entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_insert_replace(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Act
+ entity = Entity()
+ entity.PartitionKey = '001'
+ entity.RowKey = 'batch_insert_replace'
+ entity.test = True
+ entity.test2 = 'value'
+ entity.test3 = 3
+ entity.test4 = EntityProperty(EdmType.INT64, '1234567890')
+ entity.test5 = datetime.utcnow()
+
+ batch = self.table.create_batch()
+ batch.upsert_item(entity)
+ resp = self.table.commit_batch(batch)
+
+ # Assert
+ self.assertIsNotNone(resp)
+ entity, headers = self.table.read_item('001', 'batch_insert_replace', response_hook=lambda e, h: (e, h))
+ self.assertIsNotNone(entity)
+ self.assertEqual('value', entity.test2)
+ self.assertEqual(1234567890, entity.test4)
+ self.assertEqual(list(resp)[0].headers['Etag'], headers['etag'])
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_insert_merge(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Act
+ entity = Entity()
+ entity.PartitionKey = '001'
+ entity.RowKey = 'batch_insert_merge'
+ entity.test = True
+ entity.test2 = 'value'
+ entity.test3 = 3
+ entity.test4 = EntityProperty(EdmType.INT64, '1234567890')
+ entity.test5 = datetime.utcnow()
+
+ batch = self.table.create_batch()
+ batch.upsert_item(entity, mode='MERGE')
+ resp = self.table.commit_batch(batch)
+
+ # Assert
+ self.assertIsNotNone(resp)
+ entity, headers = self.table.read_item('001', 'batch_insert_merge', response_hook=lambda e, h: (e, h))
+ self.assertIsNotNone(entity)
+ self.assertEqual('value', entity.test2)
+ self.assertEqual(1234567890, entity.test4)
+ self.assertEqual(list(resp)[0].headers['Etag'], headers['etag'])
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_delete(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Act
+ entity = Entity()
+ entity.PartitionKey = '001'
+ entity.RowKey = 'batch_delete'
+ entity.test = EntityProperty(EdmType.BOOLEAN, 'true')
+ entity.test2 = 'value'
+ entity.test3 = 3
+ entity.test4 = EntityProperty(EdmType.INT64, '1234567890')
+ entity.test5 = datetime.utcnow()
+ self.table.create_item(entity)
+
+ entity = self.table.read_item('001', 'batch_delete')
+ self.assertEqual(3, entity.test3)
+
+ batch = self.table.create_batch()
+ batch.delete_item('001', 'batch_delete')
+ resp = self.table.commit_batch(batch)
+
+ # Assert
+ self.assertIsNotNone(resp)
+ self.assertEqual(list(resp)[0].status_code, 204)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_inserts(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Act
+ entity = Entity()
+ entity.PartitionKey = 'batch_inserts'
+ entity.test = EntityProperty(EdmType.BOOLEAN, 'true')
+ entity.test2 = 'value'
+ entity.test3 = 3
+ entity.test4 = EntityProperty(EdmType.INT64, '1234567890')
+
+ batch = self.table.create_batch()
+ for i in range(100):
+ entity.RowKey = str(i)
+ batch.create_item(entity)
+ self.table.commit_batch(batch)
+
+ entities = list(self.table.query_items("PartitionKey eq 'batch_inserts'"))
+
+ # Assert
+ self.assertIsNotNone(entities)
+ self.assertEqual(100, len(entities))
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_all_operations_together(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Act
+ entity = Entity()
+ entity.PartitionKey = '003'
+ entity.RowKey = 'batch_all_operations_together-1'
+ entity.test = EntityProperty(EdmType.BOOLEAN, 'true')
+ entity.test2 = 'value'
+ entity.test3 = 3
+ entity.test4 = EntityProperty(EdmType.INT64, '1234567890')
+ entity.test5 = datetime.utcnow()
+ self.table.create_item(entity)
+ entity.RowKey = 'batch_all_operations_together-2'
+ self.table.create_item(entity)
+ entity.RowKey = 'batch_all_operations_together-3'
+ self.table.create_item(entity)
+ entity.RowKey = 'batch_all_operations_together-4'
+ self.table.create_item(entity)
+
+ batch = self.table.create_batch()
+ entity.RowKey = 'batch_all_operations_together'
+ batch.create_item(entity)
+ entity.RowKey = 'batch_all_operations_together-1'
+ batch.delete_item(entity.PartitionKey, entity.RowKey)
+ entity.RowKey = 'batch_all_operations_together-2'
+ entity.test3 = 10
+ batch.update_item(entity)
+ entity.RowKey = 'batch_all_operations_together-3'
+ entity.test3 = 100
+ batch.update_item(entity, mode='MERGE')
+ entity.RowKey = 'batch_all_operations_together-4'
+ entity.test3 = 10
+ batch.upsert_item(entity)
+ entity.RowKey = 'batch_all_operations_together-5'
+ batch.upsert_item(entity, mode='MERGE')
+ resp = self.table.commit_batch(batch)
+
+ # Assert
+ self.assertEqual(6, len(list(resp)))
+ entities = list(self.table.query_items("PartitionKey eq '003'"))
+ self.assertEqual(5, len(entities))
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_all_operations_together_context_manager(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Act
+ entity = Entity()
+ entity.PartitionKey = '003'
+ entity.RowKey = 'batch_all_operations_together-1'
+ entity.test = EntityProperty(EdmType.BOOLEAN, 'true')
+ entity.test2 = 'value'
+ entity.test3 = 3
+ entity.test4 = EntityProperty(EdmType.INT64, '1234567890')
+ entity.test5 = datetime.utcnow()
+ self.table.create_item(entity)
+ entity.RowKey = 'batch_all_operations_together-2'
+ self.table.create_item(entity)
+ entity.RowKey = 'batch_all_operations_together-3'
+ self.table.create_item(entity)
+ entity.RowKey = 'batch_all_operations_together-4'
+ self.table.create_item(entity)
+
+ with self.table.create_batch() as batch:
+ entity.RowKey = 'batch_all_operations_together'
+ batch.create_item(entity)
+ entity.RowKey = 'batch_all_operations_together-1'
+ batch.delete_item(entity.PartitionKey, entity.RowKey)
+ entity.RowKey = 'batch_all_operations_together-2'
+ entity.test3 = 10
+ batch.update_item(entity)
+ entity.RowKey = 'batch_all_operations_together-3'
+ entity.test3 = 100
+ batch.update_item(entity, mode='MERGE')
+ entity.RowKey = 'batch_all_operations_together-4'
+ entity.test3 = 10
+ batch.upsert_item(entity)
+ entity.RowKey = 'batch_all_operations_together-5'
+ batch.upsert_item(entity, mode='MERGE')
+
+ # Assert
+ entities = list(self.table.query_items("PartitionKey eq '003'"))
+ self.assertEqual(5, len(entities))
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_reuse(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ table2 = self._get_table_reference('table2')
+ table2.create_table()
+
+ # Act
+ entity = Entity()
+ entity.PartitionKey = '003'
+ entity.RowKey = 'batch_all_operations_together-1'
+ entity.test = EntityProperty(EdmType.BOOLEAN, 'true')
+ entity.test2 = 'value'
+ entity.test3 = 3
+ entity.test4 = EntityProperty(EdmType.INT64, '1234567890')
+ entity.test5 = datetime.utcnow()
+
+ batch = TableBatchClient()
+ batch.create_item(entity)
+ entity.RowKey = 'batch_all_operations_together-2'
+ batch.create_item(entity)
+ entity.RowKey = 'batch_all_operations_together-3'
+ batch.create_item(entity)
+ entity.RowKey = 'batch_all_operations_together-4'
+ batch.create_item(entity)
+
+ self.table.commit_batch(batch)
+ table2.commit_batch(batch)
+
+ batch = TableBatchClient()
+ entity.RowKey = 'batch_all_operations_together'
+ batch.create_item(entity)
+ entity.RowKey = 'batch_all_operations_together-1'
+ batch.delete_item(entity.PartitionKey, entity.RowKey)
+ entity.RowKey = 'batch_all_operations_together-2'
+ entity.test3 = 10
+ batch.update_item(entity)
+ entity.RowKey = 'batch_all_operations_together-3'
+ entity.test3 = 100
+ batch.update_item(entity, mode='MERGE')
+ entity.RowKey = 'batch_all_operations_together-4'
+ entity.test3 = 10
+ batch.upsert_item(entity)
+ entity.RowKey = 'batch_all_operations_together-5'
+ batch.upsert_item(entity, mode='MERGE')
+
+ self.table.commit_batch(batch)
+ resp = table2.commit_batch(batch)
+
+ # Assert
+ self.assertEqual(6, len(list(resp)))
+ entities = list(self.table.query_items("PartitionKey eq '003'"))
+ self.assertEqual(5, len(entities))
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_same_row_operations_fail(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_entity_dict('001', 'batch_negative_1')
+ self.table.create_item(entity)
+
+ # Act
+ batch = self.table.create_batch()
+
+ entity = self._create_updated_entity_dict(
+ '001', 'batch_negative_1')
+ batch.update_item(entity)
+ entity = self._create_random_entity_dict(
+ '001', 'batch_negative_1')
+
+ # Assert
+ with self.assertRaises(ValueError):
+ batch.update_item(entity, mode='MERGE')
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_different_partition_operations_fail(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_entity_dict('001', 'batch_negative_1')
+ self.table.create_item(entity)
+
+ # Act
+ batch = self.table.create_batch()
+
+ entity = self._create_updated_entity_dict(
+ '001', 'batch_negative_1')
+ batch.update_item(entity)
+
+ entity = self._create_random_entity_dict(
+ '002', 'batch_negative_1')
+
+ # Assert
+ with self.assertRaises(ValueError):
+ batch.create_item(entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_batch_too_many_ops(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_entity_dict('001', 'batch_negative_1')
+ self.table.create_item(entity)
+
+ # Act
+ with self.assertRaises(ValueError):
+ batch = self.table.create_batch()
+ for i in range(0, 101):
+ entity = Entity()
+ entity.PartitionKey = 'large'
+ entity.RowKey = 'item{0}'.format(i)
+ batch.create_item(entity)
+
+ # Assert
+ finally:
+ self._tear_down()
+
+#------------------------------------------------------------------------------
+if __name__ == '__main__':
+ unittest.main()
diff --git a/sdk/table/tests/test_table_client.py b/sdk/table/tests/test_table_client.py
new file mode 100644
index 000000000000..c9d25ee98175
--- /dev/null
+++ b/sdk/table/tests/test_table_client.py
@@ -0,0 +1,581 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+import unittest
+import pytest
+import platform
+from devtools_testutils import ResourceGroupPreparer, StorageAccountPreparer
+# from azure.tables import (
+# VERSION,
+# TableServiceClient,
+# TableClient,
+# )
+from _shared.testcase import GlobalStorageAccountPreparer, TableTestCase
+
+# ------------------------------------------------------------------------------
+SERVICES = {
+ #TableServiceClient: 'table',
+ #TableClient: 'table',
+ #TableServiceClient: 'cosmos',
+ #TableClient: 'cosmos',
+}
+
+_CONNECTION_ENDPOINTS = {'table': 'TableEndpoint', 'cosmos': 'TableEndpoint'}
+
+_CONNECTION_ENDPOINTS_SECONDARY = {'table': 'TableSecondaryEndpoint', 'cosmos': 'TableSecondaryEndpoint'}
+
+class StorageTableClientTest(TableTestCase):
+ def setUp(self):
+ super(StorageTableClientTest, self).setUp()
+ self.sas_token = self.generate_sas_token()
+ self.token_credential = self.generate_oauth_token()
+
+ # --Helpers-----------------------------------------------------------------
+ def validate_standard_account_endpoints(self, service, account_name, account_key):
+ self.assertIsNotNone(service)
+ self.assertEqual(service.account_name, account_name)
+ self.assertEqual(service.credential.account_name, account_name)
+ self.assertEqual(service.credential.account_key, account_key)
+ self.assertTrue(
+ ('{}.{}'.format(account_name, 'table.core.windows.net') in service.url) or
+ ('{}.{}'.format(account_name, 'table.cosmos.azure.com') in service.url))
+ self.assertTrue(
+ ('{}-secondary.{}'.format(account_name, 'table.core.windows.net') in service.secondary_endpoint) or
+ ('{}-secondary.{}'.format(account_name, 'table.cosmos.azure.com') in service.secondary_endpoint))
+
+ # --Direct Parameters Test Cases --------------------------------------------
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_key(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+
+ for client, url in SERVICES.items():
+ # Act
+ service = client(
+ self.account_url(storage_account, url), credential=storage_account_key, table_name='foo')
+
+ # Assert
+ self.validate_standard_account_endpoints(service, storage_account.name, storage_account_key)
+ self.assertEqual(service.scheme, 'https')
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_connection_string(self, resource_group, location, storage_account, storage_account_key):
+
+ for service_type in SERVICES.items():
+ # Act
+ service = service_type[0].from_connection_string(
+ self.connection_string(storage_account, storage_account_key), table_name="test")
+
+ # Assert
+ self.validate_standard_account_endpoints(service, storage_account.name, storage_account_key)
+ self.assertEqual(service.scheme, 'https')
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_sas(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ suffix = '.table.core.windows.net'
+ if 'cosmos' in url:
+ suffix = '.table.cosmos.azure.com'
+ for service_type in SERVICES:
+ # Act
+ service = service_type(
+ self.account_url(storage_account, "table"), credential=self.sas_token, table_name='foo')
+
+ # Assert
+ self.assertIsNotNone(service)
+ self.assertEqual(service.account_name, storage_account.name)
+ self.assertTrue(service.url.startswith('https://' + storage_account.name + suffix))
+ self.assertTrue(service.url.endswith(self.sas_token))
+ self.assertIsNone(service.credential)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_token(self, resource_group, location, storage_account, storage_account_key):
+ url = self.account_url(storage_account, "table")
+ suffix = '.table.core.windows.net'
+ if 'cosmos' in url:
+ suffix = '.table.cosmos.azure.com'
+ for service_type in SERVICES:
+ # Act
+ service = service_type(url, credential=self.token_credential, table_name='foo')
+
+ # Assert
+ self.assertIsNotNone(service)
+ self.assertEqual(service.account_name, storage_account.name)
+ self.assertTrue(service.url.startswith('https://' + storage_account.name + suffix))
+ self.assertEqual(service.credential, self.token_credential)
+ self.assertFalse(hasattr(service.credential, 'account_key'))
+ self.assertTrue(hasattr(service.credential, 'get_token'))
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_token_and_http(self, resource_group, location, storage_account, storage_account_key):
+ for service_type in SERVICES:
+ # Act
+ with self.assertRaises(ValueError):
+ url = self.account_url(storage_account, "table").replace('https', 'http')
+ service_type(url, credential=self.token_credential, table_name='foo')
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_china(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ # TODO: Confirm regional cloud cosmos URLs
+ for service_type in SERVICES.items():
+ # Act
+ url = self.account_url(storage_account, "table").replace('core.windows.net', 'core.chinacloudapi.cn')
+ if 'cosmos.azure' in url:
+ pytest.skip("Confirm cosmos national cloud URLs")
+ service = service_type[0](
+ url, credential=storage_account_key, table_name='foo')
+
+ # Assert
+ self.assertIsNotNone(service)
+ self.assertEqual(service.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_key, storage_account_key)
+ self.assertTrue(service.primary_endpoint.startswith(
+ 'https://{}.{}.core.chinacloudapi.cn'.format(storage_account.name, "table")))
+ self.assertTrue(service.secondary_endpoint.startswith(
+ 'https://{}-secondary.{}.core.chinacloudapi.cn'.format(storage_account.name, "table")))
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_protocol(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+
+ for service_type in SERVICES.items():
+ # Act
+ url = self.account_url(storage_account, "table").replace('https', 'http')
+ service = service_type[0](
+ url, credential=storage_account_key, table_name='foo')
+
+ # Assert
+ self.validate_standard_account_endpoints(service, storage_account.name, storage_account_key)
+ self.assertEqual(service.scheme, 'http')
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_empty_key(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ TABLE_SERVICES = [TableServiceClient, TableClient]
+
+ for service_type in TABLE_SERVICES:
+ # Act
+ with self.assertRaises(ValueError) as e:
+ test_service = service_type('testaccount', credential='', table_name='foo')
+
+ self.assertEqual(
+ str(e.exception), "You need to provide either a SAS token or an account shared key to authenticate.")
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_socket_timeout(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+
+ for service_type in SERVICES.items():
+ # Act
+ default_service = service_type[0](
+ self.account_url(storage_account, "table"), credential=storage_account_key, table_name='foo')
+ service = service_type[0](
+ self.account_url(storage_account, "table"), credential=storage_account_key,
+ table_name='foo', connection_timeout=22)
+
+ # Assert
+ self.validate_standard_account_endpoints(service, storage_account.name, storage_account_key)
+ assert service._client._client._pipeline._transport.connection_config.timeout == 22
+ assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)]
+
+ # --Connection String Test Cases --------------------------------------------
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_connection_string_key(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ conn_string = 'AccountName={};AccountKey={};'.format(storage_account.name, storage_account_key)
+
+ for service_type in SERVICES.items():
+ # Act
+ service = service_type[0].from_connection_string(conn_string, table_name='foo')
+
+ # Assert
+ self.validate_standard_account_endpoints(service, storage_account.name, storage_account_key)
+ self.assertEqual(service.scheme, 'https')
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_connection_string_sas(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ conn_string = 'AccountName={};SharedAccessSignature={};'.format(storage_account.name, self.sas_token)
+
+ for service_type in SERVICES:
+ # Act
+ service = service_type.from_connection_string(conn_string, table_name='foo')
+
+ # Assert
+ self.assertIsNotNone(service)
+ self.assertEqual(service.account_name, storage_account.name)
+ self.assertTrue(service.url.startswith('https://' + storage_account.name + '.table.core.windows.net'))
+ self.assertTrue(service.url.endswith(self.sas_token))
+ self.assertIsNone(service.credential)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer() # TODO: Prepare Cosmos tables account
+ def test_create_service_with_connection_string_cosmos(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ conn_string = 'DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};TableEndpoint=https://{0}.table.cosmos.azure.com:443/;'.format(
+ storage_account.name, storage_account_key)
+
+ for service_type in SERVICES:
+ # Act
+ service = service_type.from_connection_string(conn_string, table_name='foo')
+
+ # Assert
+ self.assertIsNotNone(service)
+ self.assertEqual(service.account_name, storage_account.name)
+ self.assertTrue(service.url.startswith('https://' + storage_account.name + '.table.cosmos.azure.com'))
+ self.assertEqual(service.credential.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_key, storage_account_key)
+ self.assertTrue(service.primary_endpoint.startswith('https://' + storage_account.name + '.table.cosmos.azure.com'))
+ self.assertTrue(service.secondary_endpoint.startswith('https://' + storage_account.name + '-secondary.table.cosmos.azure.com'))
+ self.assertEqual(service.scheme, 'https')
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_connection_string_endpoint_protocol(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ conn_string = 'AccountName={};AccountKey={};DefaultEndpointsProtocol=http;EndpointSuffix=core.chinacloudapi.cn;'.format(
+ storage_account.name, storage_account_key)
+
+ for service_type in SERVICES.items():
+ # Act
+ service = service_type[0].from_connection_string(conn_string, table_name="foo")
+
+ # Assert
+ self.assertIsNotNone(service)
+ self.assertEqual(service.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_key, storage_account_key)
+ self.assertTrue(
+ service.primary_endpoint.startswith(
+ 'http://{}.{}.core.chinacloudapi.cn'.format(storage_account.name, "table")))
+ self.assertTrue(
+ service.secondary_endpoint.startswith(
+ 'http://{}-secondary.{}.core.chinacloudapi.cn'.format(storage_account.name, "table")))
+ self.assertEqual(service.scheme, 'http')
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_connection_string_emulated(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ for service_type in SERVICES.items():
+ conn_string = 'UseDevelopmentStorage=true;'.format(storage_account.name, storage_account_key)
+
+ # Act
+ with self.assertRaises(ValueError):
+ service = service_type[0].from_connection_string(conn_string, table_name="foo")
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_connection_string_custom_domain(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ for service_type in SERVICES.items():
+ conn_string = 'AccountName={};AccountKey={};TableEndpoint=www.mydomain.com;'.format(
+ storage_account.name, storage_account_key)
+
+ # Act
+ service = service_type[0].from_connection_string(conn_string, table_name="foo")
+
+ # Assert
+ self.assertIsNotNone(service)
+ self.assertEqual(service.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_key, storage_account_key)
+ self.assertTrue(service.primary_endpoint.startswith('https://www.mydomain.com'))
+ self.assertTrue(service.secondary_endpoint.startswith('https://' + storage_account.name + '-secondary.table.core.windows.net'))
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_conn_str_custom_domain_trailing_slash(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ for service_type in SERVICES.items():
+ conn_string = 'AccountName={};AccountKey={};TableEndpoint=www.mydomain.com/;'.format(
+ storage_account.name, storage_account_key)
+
+ # Act
+ service = service_type[0].from_connection_string(conn_string, table_name="foo")
+
+ # Assert
+ self.assertIsNotNone(service)
+ self.assertEqual(service.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_key, storage_account_key)
+ self.assertTrue(service.primary_endpoint.startswith('https://www.mydomain.com'))
+ self.assertTrue(service.secondary_endpoint.startswith('https://' + storage_account.name + '-secondary.table.core.windows.net'))
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_conn_str_custom_domain_sec_override(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ for service_type in SERVICES.items():
+ conn_string = 'AccountName={};AccountKey={};TableEndpoint=www.mydomain.com/;'.format(
+ storage_account.name, storage_account_key)
+
+ # Act
+ service = service_type[0].from_connection_string(
+ conn_string, secondary_hostname="www-sec.mydomain.com", table_name="foo")
+
+ # Assert
+ self.assertIsNotNone(service)
+ self.assertEqual(service.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_key, storage_account_key)
+ self.assertTrue(service.primary_endpoint.startswith('https://www.mydomain.com'))
+ self.assertTrue(service.secondary_endpoint.startswith('https://www-sec.mydomain.com'))
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_conn_str_fails_if_sec_without_primary(self, resource_group, location, storage_account, storage_account_key):
+ for service_type in SERVICES.items():
+ # Arrange
+ conn_string = 'AccountName={};AccountKey={};{}=www.mydomain.com;'.format(
+ storage_account.name, storage_account_key,
+ _CONNECTION_ENDPOINTS_SECONDARY.get(service_type[1]))
+
+ # Act
+
+ # Fails if primary excluded
+ with self.assertRaises(ValueError):
+ service = service_type[0].from_connection_string(conn_string, table_name="foo")
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_conn_str_succeeds_if_sec_with_primary(self, resource_group, location, storage_account, storage_account_key):
+ for service_type in SERVICES.items():
+ # Arrange
+ conn_string = 'AccountName={};AccountKey={};{}=www.mydomain.com;{}=www-sec.mydomain.com;'.format(
+ storage_account.name,
+ storage_account_key,
+ _CONNECTION_ENDPOINTS.get(service_type[1]),
+ _CONNECTION_ENDPOINTS_SECONDARY.get(service_type[1]))
+
+ # Act
+ service = service_type[0].from_connection_string(conn_string, table_name="foo")
+
+ # Assert
+ self.assertIsNotNone(service)
+ self.assertEqual(service.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_key, storage_account_key)
+ self.assertTrue(service.primary_endpoint.startswith('https://www.mydomain.com'))
+ self.assertTrue(service.secondary_endpoint.startswith('https://www-sec.mydomain.com'))
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_service_with_custom_account_endpoint_path(self, resource_group, location, storage_account, storage_account_key):
+ custom_account_url = "http://local-machine:11002/custom/account/path/" + self.sas_token
+ for service_type in SERVICES.items():
+ conn_string = 'DefaultEndpointsProtocol=http;AccountName={};AccountKey={};TableEndpoint={};'.format(
+ storage_account.name, storage_account_key, custom_account_url)
+
+ # Act
+ service = service_type[0].from_connection_string(conn_string, table_name="foo")
+
+ # Assert
+ self.assertEqual(service.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_name, storage_account.name)
+ self.assertEqual(service.credential.account_key, storage_account_key)
+ self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path')
+
+ service = TableServiceClient(account_url=custom_account_url)
+ self.assertEqual(service.account_name, None)
+ self.assertEqual(service.credential, None)
+ self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path')
+ self.assertTrue(service.url.startswith('http://local-machine:11002/custom/account/path/?'))
+
+ service = TableClient(account_url=custom_account_url, table_name="foo")
+ self.assertEqual(service.account_name, None)
+ self.assertEqual(service.table_name, "foo")
+ self.assertEqual(service.credential, None)
+ self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path')
+ self.assertTrue(service.url.startswith('http://local-machine:11002/custom/account/path?'))
+
+ # TODO
+ #service = TableClient.from_table_url("http://local-machine:11002/custom/account/path/foo" + self.sas_token)
+ #self.assertEqual(service.account_name, None)
+ #self.assertEqual(service.table_name, "foo")
+ #self.assertEqual(service.credential, None)
+ #self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path')
+ #self.assertTrue(service.url.startswith('http://local-machine:11002/custom/account/path/foo?'))
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_request_callback_signed_header(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ service = TableServiceClient(self.account_url(storage_account, "table"), credential=storage_account_key)
+ name = self.get_resource_name('cont')
+
+ # Act
+ try:
+ headers = {'x-ms-meta-hello': 'world'}
+ table = service.create_table(name, headers=headers)
+
+ # Assert
+ # metadata = queue.get_queue_properties().metadata
+ # self.assertEqual(metadata, {'hello': 'world'})
+ finally:
+ service.delete_table(name)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_response_callback(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ service = TableServiceClient(self.account_url(storage_account, "table"), credential=storage_account_key)
+ name = self.get_resource_name('cont')
+ table = service.get_table_client(name)
+
+ # Act
+ def callback(response):
+ response.http_response.status_code = 200
+ response.http_response.headers.clear()
+
+ # Assert
+ #exists = queue.get_queue_properties(raw_response_hook=callback)
+ #self.assertTrue(exists)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_user_agent_default(self, resource_group, location, storage_account, storage_account_key):
+ service = TableServiceClient(self.account_url(storage_account, "table"), credential=storage_account_key)
+
+ def callback(response):
+ self.assertTrue('User-Agent' in response.http_request.headers)
+ self.assertEqual(
+ response.http_request.headers['User-Agent'],
+ "azsdk-python-table/{} Python/{} ({})".format(
+ VERSION,
+ platform.python_version(),
+ platform.platform()))
+
+ tables = list(service.list_tables(raw_response_hook=callback))
+ self.assertIsInstance(tables, list)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_user_agent_custom(self, resource_group, location, storage_account, storage_account_key):
+ custom_app = "TestApp/v1.0"
+ service = TableServiceClient(
+ self.account_url(storage_account, "table"), credential=storage_account_key, user_agent=custom_app)
+
+ def callback(response):
+ self.assertTrue('User-Agent' in response.http_request.headers)
+ self.assertEqual(
+ response.http_request.headers['User-Agent'],
+ "TestApp/v1.0 azsdk-python-table/{} Python/{} ({})".format(
+ VERSION,
+ platform.python_version(),
+ platform.platform()))
+
+ tables = list(service.list_tables(raw_response_hook=callback))
+ self.assertIsInstance(tables, list)
+
+ def callback(response):
+ self.assertTrue('User-Agent' in response.http_request.headers)
+ self.assertEqual(
+ response.http_request.headers['User-Agent'],
+ "TestApp/v2.0 azsdk-python-table/{} Python/{} ({})".format(
+ VERSION,
+ platform.python_version(),
+ platform.platform()))
+
+ tables = list(service.list_tables(raw_response_hook=callback, user_agent="TestApp/v2.0"))
+ self.assertIsInstance(tables, list)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_user_agent_append(self, resource_group, location, storage_account, storage_account_key):
+ service = TableServiceClient(self.account_url(storage_account, "table"), credential=storage_account_key)
+
+ def callback(response):
+ self.assertTrue('User-Agent' in response.http_request.headers)
+ self.assertEqual(
+ response.http_request.headers['User-Agent'],
+ "azsdk-python-table/{} Python/{} ({}) customer_user_agent".format(
+ VERSION,
+ platform.python_version(),
+ platform.platform()))
+
+ custom_headers = {'User-Agent': 'customer_user_agent'}
+ tables = list(service.list_tables(raw_response_hook=callback, headers=custom_headers))
+ self.assertIsInstance(tables, list)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_table_client_with_complete_table_url(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ table_url = self.account_url(storage_account, "table") + "/foo"
+ service = TableClient(table_url, table_name='bar', credential=storage_account_key)
+
+ # Assert
+ self.assertEqual(service.scheme, 'https')
+ self.assertEqual(service.table_name, 'bar')
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_create_table_client_with_complete_cosmos_url(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ table_url = "https://{}.table.cosmos.azure.com:443/foo".format(storage_account.name)
+ service = TableClient(table_url, table_name='bar', credential=storage_account_key)
+
+ # Assert
+ self.assertEqual(service.scheme, 'https')
+ self.assertEqual(service.table_name, 'bar')
+ self.assertEqual(service.account_name, storage_account.name)
+
+ @pytest.mark.skip("pending")
+ def test_error_with_malformed_conn_str(self):
+ # Arrange
+
+ for conn_str in ["", "foobar", "foobar=baz=foo", "foo;bar;baz", "foo=;bar=;", "=", ";", "=;=="]:
+ for service_type in SERVICES.items():
+ # Act
+ with self.assertRaises(ValueError) as e:
+ service = service_type[0].from_connection_string(conn_str, table_name="test")
+
+ if conn_str in("", "foobar", "foo;bar;baz", ";"):
+ self.assertEqual(
+ str(e.exception), "Connection string is either blank or malformed.")
+ elif conn_str in ("foobar=baz=foo" , "foo=;bar=;", "=", "=;=="):
+ self.assertEqual(
+ str(e.exception), "Connection string missing required connection details.")
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_closing_pipeline_client(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ for client, url in SERVICES.items():
+ # Act
+ service = client(
+ self.account_url(storage_account, "table"), credential=storage_account_key, table_name='table')
+
+ # Assert
+ with service:
+ assert hasattr(service, 'close')
+ service.close()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_closing_pipeline_client_simple(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ for client, url in SERVICES.items():
+ # Act
+ service = client(
+ self.account_url(storage_account, "table"), credential=storage_account_key, table_name='table')
+ service.close()
+# ------------------------------------------------------------------------------
+if __name__ == '__main__':
+ unittest.main()
diff --git a/sdk/table/tests/test_table_encryption.py b/sdk/table/tests/test_table_encryption.py
new file mode 100644
index 000000000000..a724efb68914
--- /dev/null
+++ b/sdk/table/tests/test_table_encryption.py
@@ -0,0 +1,981 @@
+# coding: utf-8
+
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+import unittest
+import pytest
+from datetime import datetime
+
+from azure.ai.textanalytics._generated.models import Entity
+from dateutil.tz import tzutc
+from os import urandom
+from json import loads
+from copy import deepcopy
+
+pytestmark = pytest.mark.skip
+
+# from tests.testcase import (
+# TableTestCase,
+# TestMode,
+# record,
+# )
+# from azure.storage.table import (
+# Entity,
+# EntityProperty,
+# TableService,
+# EdmType,
+# TableBatch,
+# )
+# from azure.storage.models import(
+# AccessPolicy,
+# )
+# from tests.test_encryption_helper import(
+# KeyWrapper,
+# KeyResolver,
+# RSAKeyWrapper,
+# )
+# from azure.storage.table.models import(
+# TablePayloadFormat,
+# TablePermissions,
+# )
+# from azure.storage.table._error import(
+# _ERROR_UNSUPPORTED_TYPE_FOR_ENCRYPTION,
+# )
+# from azure.storage._error import(
+# _ERROR_OBJECT_INVALID,
+# _ERROR_DECRYPTION_FAILURE,
+# AzureException,
+# )
+# from azure.storage._encryption import(
+# _dict_to_encryption_data,
+# _generate_AES_CBC_cipher,
+# )
+# from azure.storage._common_conversion import(
+# _decode_base64_to_bytes,
+# _encode_base64,
+# )
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives.ciphers.algorithms import AES
+from cryptography.hazmat.primitives.ciphers.modes import CBC
+from cryptography.hazmat.primitives.padding import PKCS7
+from cryptography.hazmat.primitives.ciphers import Cipher
+from cryptography.hazmat.primitives.hashes import(
+ Hash,
+ SHA256,
+)
+
+from _shared.testcase import GlobalStorageAccountPreparer, TableTestCase, LogCaptured
+
+
+class StorageTableEncryptionTest(TableTestCase):
+
+ def setUp(self):
+ super(StorageTableEncryptionTest, self).setUp()
+
+ self.ts = self._create_storage_service(TableService, self.settings)
+
+ self.table_name = self.get_resource_name('uttable')
+
+ if not self.is_playback():
+ self.ts.create_table(self.table_name)
+
+ self.query_tables = []
+
+ def tearDown(self):
+ if not self.is_playback():
+ try:
+ self.ts.delete_table(self.table_name)
+ except:
+ pass
+
+ for table_name in self.query_tables:
+ try:
+ self.ts.delete_table(table_name)
+ except:
+ pass
+
+ return super(StorageTableEncryptionTest, self).tearDown()
+
+ #--Helpers-----------------------------------------------------------------
+
+ def _create_query_table_encrypted(self, entity_count):
+ '''
+ Creates a table with the specified name and adds entities with the
+ default set of values. PartitionKey is set to 'MyPartition' and RowKey
+ is set to a unique counter value starting at 1 (as a string). The
+ 'sex' attribute is set to be encrypted.
+ '''
+ table_name = self.get_resource_name('querytable')
+ self.ts.create_table(table_name, True)
+ self.query_tables.append(table_name)
+ self.ts.require_encryption = True
+
+ entity = self._create_default_entity_for_encryption()
+ with self.ts.batch(table_name) as batch:
+ for i in range(1, entity_count + 1):
+ entity['RowKey'] = entity['RowKey'] + str(i)
+ batch.insert_entity(entity)
+ return table_name
+
+ def _create_random_base_entity_class(self):
+ '''
+ Creates a class-based entity with only pk and rk.
+ '''
+ partition = self.get_resource_name('pk')
+ row = self.get_resource_name('rk')
+ entity = Entity()
+ entity.PartitionKey = partition
+ entity.RowKey = row
+ return entity
+
+ def _create_random_base_entity_dict(self):
+ '''
+ Creates a dict-based entity with only pk and rk.
+ '''
+ partition = self.get_resource_name('pk')
+ row = self.get_resource_name('rk')
+ return {'PartitionKey': partition,
+ 'RowKey': row,
+ }
+
+ def _create_random_entity_class(self, pk=None, rk=None):
+ '''
+ Creates a class-based entity with fixed values, using all
+ of the supported data types.
+ '''
+ partition = pk if pk is not None else self.get_resource_name('pk')
+ row = rk if rk is not None else self.get_resource_name('rk')
+ entity = Entity()
+ entity.PartitionKey = partition
+ entity.RowKey = row
+ entity.age = 39
+ entity.sex = 'male'
+ entity.name = 'John Doe'
+ entity.married = True
+ entity.deceased = False
+ entity.optional = None
+ entity.evenratio = 3.0
+ entity.ratio = 3.1
+ entity.large = 933311100
+ entity.Birthday = datetime(1973, 10, 4)
+ entity.birthday = datetime(1970, 10, 4)
+ entity.binary = EntityProperty(EdmType.BINARY, b'binary')
+ entity.other = EntityProperty(EdmType.INT32, 20)
+ entity.clsid = EntityProperty(
+ EdmType.GUID, 'c9da6455-213d-42c9-9a79-3e9149a57833')
+ return entity
+
+ def _create_default_entity_for_encryption(self):
+ entity = self._create_random_entity_class()
+ entity['sex'] = EntityProperty(EdmType.STRING, entity['sex'], True)
+ entity['name'] = EntityProperty(EdmType.STRING, entity['name'], True)
+ return entity
+
+ def _create_default_entity_dict(self, pk=None, rk=None):
+ '''
+ Creates a dictionary-based entity with fixed values, using all
+ of the supported data types.
+ '''
+ partition = pk if pk is not None else self.get_resource_name('pk')
+ row = rk if rk is not None else self.get_resource_name('rk')
+ return {'PartitionKey': partition,
+ 'RowKey': row,
+ 'age': 39,
+ 'sex': 'male',
+ 'name': 'John Doe',
+ 'married': True,
+ 'deceased': False,
+ 'optional': None,
+ 'ratio': 3.1,
+ 'evenratio': 3.0,
+ 'large': 933311100,
+ 'Birthday': datetime(1973, 10, 4),
+ 'birthday': datetime(1970, 10, 4),
+ 'binary': EntityProperty(EdmType.BINARY, b'binary'),
+ 'other': EntityProperty(EdmType.INT32, 20),
+ 'clsid': EntityProperty(
+ EdmType.GUID,
+ 'c9da6455-213d-42c9-9a79-3e9149a57833')}
+
+ def _assert_default_entity(self, entity):
+ '''
+ Asserts that the entity passed in matches the default entity.
+ '''
+ self.assertEqual(entity.age, 39)
+ self.assertEqual(entity.sex, 'male')
+ self.assertEqual(entity.name, 'John Doe')
+ self.assertEqual(entity.married, True)
+ self.assertEqual(entity.deceased, False)
+ self.assertFalse(hasattr(entity, "optional"))
+ self.assertFalse(hasattr(entity, "aquarius"))
+ self.assertEqual(entity.ratio, 3.1)
+ self.assertEqual(entity.evenratio, 3.0)
+ self.assertEqual(entity.large, 933311100)
+ self.assertEqual(entity.Birthday, datetime(1973, 10, 4, tzinfo=tzutc()))
+ self.assertEqual(entity.birthday, datetime(1970, 10, 4, tzinfo=tzutc()))
+ self.assertIsInstance(entity.binary, EntityProperty)
+ self.assertEqual(entity.binary.type, EdmType.BINARY)
+ self.assertEqual(entity.binary.value, b'binary')
+ self.assertIsInstance(entity.other, EntityProperty)
+ self.assertEqual(entity.other.type, EdmType.INT32)
+ self.assertEqual(entity.other.value, 20)
+ self.assertIsInstance(entity.clsid, EntityProperty)
+ self.assertEqual(entity.clsid.type, EdmType.GUID)
+ self.assertEqual(entity.clsid.value,
+ 'c9da6455-213d-42c9-9a79-3e9149a57833')
+ self.assertTrue(hasattr(entity, "Timestamp"))
+ self.assertIsInstance(entity.Timestamp, datetime)
+ self.assertIsNotNone(entity.etag)
+
+ def _assert_default_entity_json_no_metadata(self, entity):
+ '''
+ Asserts that the entity passed in matches the default entity.
+ '''
+ self.assertEqual(entity.age, '39')
+ self.assertEqual(entity.sex, 'male')
+ self.assertEqual(entity.name, 'John Doe')
+ self.assertEqual(entity.married, True)
+ self.assertEqual(entity.deceased, False)
+ self.assertFalse(hasattr(entity, "optional"))
+ self.assertFalse(hasattr(entity, "aquarius"))
+ self.assertEqual(entity.ratio, 3.1)
+ self.assertEqual(entity.evenratio, 3.0)
+ self.assertEqual(entity.large, '933311100')
+ self.assertEqual(entity.Birthday, '1973-10-04T00:00:00Z')
+ self.assertEqual(entity.birthday, '1970-10-04T00:00:00Z')
+ self.assertEqual(entity.binary, _encode_base64(b'binary'))
+ self.assertIsInstance(entity.other, EntityProperty)
+ self.assertEqual(entity.other.type, EdmType.INT32)
+ self.assertEqual(entity.other.value, 20)
+ self.assertEqual(entity.clsid, 'c9da6455-213d-42c9-9a79-3e9149a57833')
+ self.assertTrue(hasattr(entity, "Timestamp"))
+ self.assertIsInstance(entity.Timestamp, datetime)
+ self.assertIsNotNone(entity.etag)
+
+ def _default_encryption_resolver(self, x,y,property):
+ return (property=='sex' or property=='name')
+
+ #@record
+ def test_get_encrypted_dict(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_dict()
+ entity['sex'] = EntityProperty(EdmType.STRING, entity['sex'], True)
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ new_entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+ # Assert
+ self._assert_default_entity(new_entity)
+
+ #@record
+ def test_get_encrypted_entity(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_for_encryption()
+ # Only want to encrypt one property in this test
+ entity['name'] = 'John Doe'
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ new_entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+
+ # Assert
+ self._assert_default_entity(new_entity)
+
+ #@record
+ def test_get_encrypt_multiple_properties(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_for_encryption()
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ new_entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+
+ # Assert
+ self._assert_default_entity(new_entity)
+
+ #@record
+ def test_get_encrypted_entity_key_resolver(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_for_encryption()
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ key_resolver = KeyResolver()
+ key_resolver.put_key(self.ts.key_encryption_key)
+ self.ts.key_resolver_function = key_resolver.resolve_key
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ self.ts.key_encryption_key = None
+ new_entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+
+ # Assert
+ self._assert_default_entity(new_entity)
+
+ #@record
+ def test_get_encrypted_entity_encryption_resolver(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_random_entity_class()
+ self.ts.encryption_resolver_function = self._default_encryption_resolver
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ new_entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+ self.ts.key_encryption_key = None
+ self.ts.require_encryption = False
+ # Retrive a second copy without decrypting to ensure properties were encrypted.
+ new_entity2 = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+
+ # Assert
+ self._assert_default_entity(new_entity)
+ self.assertEqual(EdmType.BINARY, new_entity2['sex'].type)
+ self.assertEqual(EdmType.BINARY, new_entity2['name'].type)
+
+ #@record
+ def test_get_encrypted_entity_properties_and_resolver(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_for_encryption()
+ self.ts.encryption_resolver_function = self._default_encryption_resolver
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ new_entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+ # Assert
+ self._assert_default_entity(new_entity)
+
+ def _get_with_payload_format(self, format):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_for_encryption()
+ entity['RowKey'] = entity['RowKey'] + format[len('application/json;odata='):]
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ new_entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'],
+ accept=format)
+
+ # Assert
+ if format == TablePayloadFormat.JSON_NO_METADATA:
+ self._assert_default_entity_json_no_metadata(new_entity)
+ else:
+ self._assert_default_entity(new_entity)
+
+ #@record
+ def test_get_payload_formats(self):
+ self._get_with_payload_format(TablePayloadFormat.JSON_FULL_METADATA)
+ self._get_with_payload_format(TablePayloadFormat.JSON_MINIMAL_METADATA)
+ self._get_with_payload_format(TablePayloadFormat.JSON_NO_METADATA)
+
+ def test_get_entity_kek_RSA(self):
+ # We can only generate random RSA keys, so this must be run live or
+ # the playback test will fail due to a change in kek values.
+ if TestMode.need_recording_file(self.test_mode):
+ return
+
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_for_encryption()
+ self.ts.key_encryption_key = RSAKeyWrapper('key2')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ new_entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+ # Assert
+ self._assert_default_entity(new_entity)
+
+ #@record
+ def test_get_entity_nonmatching_kid(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_random_entity_class()
+ self.ts.encryption_resolver_function = self._default_encryption_resolver
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ self.ts.key_encryption_key.kid = 'Invalid'
+
+ # Assert
+ try:
+ self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+ self.fail()
+ except AzureException as e:
+ self.assertEqual(str(e), _ERROR_DECRYPTION_FAILURE)
+
+ #@record
+ def test_get_entity_invalid_value_kek_wrap(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_for_encryption()
+ self.ts.key_encryption_key = KeyWrapper('key1')
+
+ self.ts.key_encryption_key.get_key_wrap_algorithm = None
+ try:
+ self.ts.insert_entity(self.table_name, entity)
+ self.fail()
+ except AttributeError as e:
+ self.assertEqual(str(e), _ERROR_OBJECT_INVALID.format('key encryption key', 'get_key_wrap_algorithm'))
+
+ self.ts.key_encryption_key = KeyWrapper('key1')
+
+ self.ts.key_encryption_key.get_kid = None
+ with self.assertRaises(AttributeError):
+ self.ts.insert_entity(self.table_name, entity)
+
+ self.ts.key_encryption_key = KeyWrapper('key1')
+
+ self.ts.key_encryption_key.wrap_key = None
+ with self.assertRaises(AttributeError):
+ self.ts.insert_entity(self.table_name, entity)
+
+ #@record
+ def test_get_entity_invalid_value_kek_unwrap(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_for_encryption()
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ self.ts.key_encryption_key.unwrap_key = None
+ try:
+ self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+ self.fail()
+ except AzureException as e:
+ self.assertEqual(str(e), _ERROR_DECRYPTION_FAILURE)
+
+ self.ts.key_encryption_key = KeyWrapper('key1')
+
+ self.ts.key_encryption_key.get_kid = None
+ with self.assertRaises(AzureException):
+ self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+ #@record
+ def test_insert_entity_missing_attribute_kek_wrap(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_for_encryption()
+ valid_key = KeyWrapper('key1')
+
+ # Act
+ invalid_key_1 = lambda: None #functions are objects, so this effectively creates an empty object
+ invalid_key_1.get_key_wrap_algorithm = valid_key.get_key_wrap_algorithm
+ invalid_key_1.get_kid = valid_key.get_kid
+ #No attribute wrap_key
+ self.ts.key_encryption_key = invalid_key_1
+ with self.assertRaises(AttributeError):
+ self.ts.insert_entity(self.table_name, entity)
+
+ invalid_key_2 = lambda: None #functions are objects, so this effectively creates an empty object
+ invalid_key_2.wrap_key = valid_key.wrap_key
+ invalid_key_2.get_kid = valid_key.get_kid
+ #No attribute get_key_wrap_algorithm
+ self.ts.key_encryption_key = invalid_key_2
+ with self.assertRaises(AttributeError):
+ self.ts.insert_entity(self.table_name, entity)
+
+ invalid_key_3 = lambda: None #functions are objects, so this effectively creates an empty object
+ invalid_key_3.get_key_wrap_algorithm = valid_key.get_key_wrap_algorithm
+ invalid_key_3.wrap_key = valid_key.wrap_key
+ #No attribute get_kid
+ self.ts.key_encryption_key = invalid_key_3
+ with self.assertRaises(AttributeError):
+ self.ts.insert_entity(self.table_name, entity)
+
+ #@record
+ def test_get_entity_missing_attribute_kek_unwrap(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_for_encryption()
+ valid_key = KeyWrapper('key1')
+ self.ts.key_encryption_key = valid_key
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ invalid_key_1 = lambda: None #functions are objects, so this effectively creates an empty object
+ invalid_key_1.get_kid = valid_key.get_kid
+ #No attribute unwrap_key
+ self.ts.key_encryption_key = invalid_key_1
+ with self.assertRaises(AzureException):
+ self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+ invalid_key_2 = lambda: None #functions are objects, so this effectively creates an empty object
+ invalid_key_2.unwrap_key = valid_key.unwrap_key
+ #No attribute get_kid
+ self.ts.key_encryption_key = invalid_key_2
+ with self.assertRaises(AzureException):
+ self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+ #@record
+ def test_get_entity_no_decryption(self):
+ # Arrange
+ entity = self._create_default_entity_for_encryption()
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ self.ts.key_encryption_key = None
+ new_entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+
+ # Assert
+ # Access the properties to ensure they are still on the entity
+ new_entity['_ClientEncryptionMetadata1']
+ new_entity['_ClientEncryptionMetadata2']
+
+ value = new_entity['sex']
+ self.assertEqual(value.type, EdmType.BINARY)
+
+ #@record
+ def test_replace_entity(self):
+ # Arrange
+ entity = self._create_random_entity_class()
+ self.ts.insert_entity(self.table_name, entity)
+ entity['sex'] = EntityProperty(EdmType.STRING, 'female', True)
+ self.ts.key_encryption_key = KeyWrapper('key1')
+
+ # Act
+ self.ts.require_encryption = True
+ self.ts.update_entity(self.table_name, entity)
+ new_entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+ # Assert
+ self.assertEqual(new_entity['sex'], entity['sex'].value)
+
+ #@record
+ def test_insert_strict_mode(self):
+ # Arrange
+ entity = self._create_default_entity_for_encryption()
+ self.ts.require_encryption = True
+
+ # Assert
+ with self.assertRaises(ValueError):
+ self.ts.insert_entity(self.table_name, entity)
+
+ #@record
+ def test_strict_mode_policy_no_encrypted_properties(self):
+ # Arrange
+ entity = self._create_random_entity_class()
+ self.ts.require_encryption = True
+ self.ts.key_encryption_key = KeyWrapper('key1')
+
+ # Act
+ # Even when require encryption is true, it should be possilbe to insert
+ # an entity that happens to not have any properties marked for encyrption.
+ self.ts.insert_entity(self.table_name, entity)
+ new_entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+ # Assert
+ self._assert_default_entity(new_entity)
+
+ #@record
+ def test_get_strict_mode_no_key(self):
+ # Arrange
+ entity = self._create_default_entity_for_encryption()
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ self.ts.key_encryption_key = None
+ self.ts.require_encryption = True
+
+ # Assert
+ with self.assertRaises(AzureException):
+ self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+ #@record
+ def test_get_strict_mode_unencrypted_entity(self):
+ # Arrange
+ entity = self._create_random_base_entity_class()
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ self.ts.require_encryption = True
+ self.ts.key_encryption_key = KeyWrapper('key1')
+
+ # Assert
+ with self.assertRaises(AzureException):
+ self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+ #@record
+ def test_batch_entity_inserts_context_manager(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity1 = self._create_random_entity_class()
+ entity2 = self._create_random_entity_class(rk='Entity2')
+ entity3 = self._create_random_entity_class(rk='Entity3')
+ entity2['PartitionKey'] = entity1['PartitionKey']
+ entity3['PartitionKey'] = entity1['PartitionKey']
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.require_encryption = True
+ self.ts.encryption_resolver_function = self._default_encryption_resolver
+ self.ts.insert_entity(self.table_name, entity3)
+ entity3['sex'] = 'female'
+
+ # Act
+ with self.ts.batch(self.table_name) as batch:
+ batch.insert_entity(entity1)
+ batch.insert_or_replace_entity(entity2)
+ batch.update_entity(entity3)
+
+ new_entity1 = self.ts.get_entity(self.table_name, entity1['PartitionKey'], entity1['RowKey'])
+ new_entity2 = self.ts.get_entity(self.table_name, entity2['PartitionKey'], entity2['RowKey'])
+ new_entity3 = self.ts.get_entity(self.table_name, entity3['PartitionKey'], entity3['RowKey'])
+
+ # Assert
+ self.assertEqual(new_entity1['sex'], entity1['sex'])
+ self.assertEqual(new_entity2['sex'], entity2['sex'])
+ self.assertEqual(new_entity3['sex'], entity3['sex'])
+
+ #@record
+ def test_batch_strict_mode(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_for_encryption()
+
+ # Act
+ batch = TableBatch(require_encryption=True)
+
+ # Assert
+ with self.assertRaises(ValueError):
+ batch.insert_entity(entity)
+
+ #@record
+ def test_property_resolver_decrypt_conflict(self):
+ # Tests that the encrypted properties list is given priorty
+ # over the property resolver when deserializng (i.e. the
+ # EdmType should be binary, not the result of the resolver)
+
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_default_entity_for_encryption()
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ property_resolver = lambda x,y,name,a,b:EdmType.STRING if name=='sex' else None
+
+ # Act
+ new_entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'],
+ property_resolver=property_resolver)
+
+
+ # Assert
+ # If the encrypted property list correctly took priority, this field will have been
+ # properly decrypted
+ self.assertEqual(new_entity['sex'], 'male')
+
+ #@record
+ def test_validate_encryption(self):
+ # Arrange
+ entity = self._create_default_entity_for_encryption()
+ key_encryption_key = KeyWrapper('key1')
+ self.ts.key_encryption_key = key_encryption_key
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ self.ts.key_encryption_key = None
+ entity = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey'])
+
+ # Note the minor discrepancy from the normal decryption process: because the entity was retrieved
+ # without being decrypted, the encrypted_properties list is now stored in an EntityProperty object
+ # and is already raw bytes.
+ encrypted_properties_list = entity['_ClientEncryptionMetadata2'].value
+ encryption_data = entity['_ClientEncryptionMetadata1']
+ encryption_data = _dict_to_encryption_data(loads(encryption_data))
+
+ content_encryption_key = key_encryption_key.unwrap_key(encryption_data.wrapped_content_key.encrypted_key,
+ encryption_data.wrapped_content_key.algorithm)
+
+ digest = Hash(SHA256(), default_backend())
+ digest.update(encryption_data.content_encryption_IV +
+ (entity['RowKey'] + entity['PartitionKey'] + '_ClientEncryptionMetadata2').encode('utf-8'))
+ metadataIV = digest.finalize()
+ metadataIV = metadataIV[:16]
+
+ cipher = _generate_AES_CBC_cipher(content_encryption_key, metadataIV)
+
+ # Decrypt the data.
+ decryptor = cipher.decryptor()
+ encrypted_properties_list = decryptor.update(encrypted_properties_list) + decryptor.finalize()
+
+ # Unpad the data.
+ unpadder = PKCS7(128).unpadder()
+ encrypted_properties_list = unpadder.update(encrypted_properties_list) + unpadder.finalize()
+
+ encrypted_properties_list = encrypted_properties_list.decode('utf-8')
+
+ # Strip the square braces from the ends and split string into list.
+ encrypted_properties_list = loads(encrypted_properties_list)
+
+ entity_iv, encrypted_properties, content_encryption_key = \
+ (encryption_data.content_encryption_IV, encrypted_properties_list, content_encryption_key)
+
+ decrypted_entity = deepcopy(entity)
+
+ for property in encrypted_properties_list:
+ value = entity[property]
+
+ digest = Hash(SHA256(), default_backend())
+ digest.update(entity_iv +
+ (entity['RowKey'] + entity['PartitionKey'] + property).encode('utf-8'))
+ propertyIV = digest.finalize()
+ propertyIV = propertyIV[:16]
+
+ cipher = _generate_AES_CBC_cipher(content_encryption_key,
+ propertyIV)
+
+ # Decrypt the property.
+ decryptor = cipher.decryptor()
+ decrypted_data = (decryptor.update(value.value) + decryptor.finalize())
+
+ # Unpad the data.
+ unpadder = PKCS7(128).unpadder()
+ decrypted_data = (unpadder.update(decrypted_data) + unpadder.finalize())
+
+ decrypted_data = decrypted_data.decode('utf-8')
+
+ decrypted_entity[property] = decrypted_data
+
+ decrypted_entity.pop('_ClientEncryptionMetadata1')
+ decrypted_entity.pop('_ClientEncryptionMetadata2')
+
+
+ # Assert
+ self.assertEqual(decrypted_entity['sex'], 'male')
+
+ #@record
+ def test_insert_encrypt_invalid_types(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity_binary = self._create_random_entity_class()
+ entity_binary['bytes'] = EntityProperty(EdmType.BINARY, urandom(10), True)
+ entity_boolean = self._create_random_entity_class()
+ entity_boolean['married'] = EntityProperty(EdmType.BOOLEAN, True, True)
+ entity_date_time = self._create_random_entity_class()
+ entity_date_time['birthday'] = EntityProperty(EdmType.DATETIME, entity_date_time['birthday'], True)
+ entity_double = self._create_random_entity_class()
+ entity_double['ratio'] = EntityProperty(EdmType.DATETIME, entity_double['ratio'], True)
+ entity_guid = self._create_random_entity_class()
+ entity_guid['clsid'].encrypt = True
+ entity_int32 = self._create_random_entity_class()
+ entity_int32['other'].encrypt = True
+ entity_int64 = self._create_random_entity_class()
+ entity_int64['large'] = EntityProperty(EdmType.INT64, entity_int64['large'], True)
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ entity_none_str = self._create_random_entity_class()
+ entity_none_str['none_str'] = EntityProperty(EdmType.STRING, None, True)
+
+ # Act
+
+ # Assert
+ try:
+ self.ts.insert_entity(self.table_name, entity_binary)
+ self.fail()
+ except ValueError as e:
+ self.assertEqual(str(e), _ERROR_UNSUPPORTED_TYPE_FOR_ENCRYPTION)
+ with self.assertRaises(ValueError):
+ self.ts.insert_entity(self.table_name, entity_boolean)
+ with self.assertRaises(ValueError):
+ self.ts.insert_entity(self.table_name, entity_date_time)
+ with self.assertRaises(ValueError):
+ self.ts.insert_entity(self.table_name, entity_double)
+ with self.assertRaises(ValueError):
+ self.ts.insert_entity(self.table_name, entity_guid)
+ with self.assertRaises(ValueError):
+ self.ts.insert_entity(self.table_name, entity_int32)
+ with self.assertRaises(ValueError):
+ self.ts.insert_entity(self.table_name, entity_int64)
+ with self.assertRaises(ValueError):
+ self.ts.insert_entity(self.table_name, entity_none_str)
+
+ #@record
+ def test_invalid_encryption_operations_fail(self):
+ # Arrange
+ entity = self._create_default_entity_for_encryption()
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Assert
+ with self.assertRaises(ValueError):
+ self.ts.merge_entity(self.table_name, entity)
+
+ with self.assertRaises(ValueError):
+ self.ts.insert_or_merge_entity(self.table_name, entity)
+
+ self.ts.require_encryption = True
+ self.ts.key_encryption_key = None
+
+ with self.assertRaises(ValueError):
+ self.ts.merge_entity(self.table_name, entity)
+
+ with self.assertRaises(ValueError):
+ self.ts.insert_or_merge_entity(self.table_name, entity)
+
+ #@record
+ def test_invalid_encryption_operations_fail_batch(self):
+ # Arrange
+ entity = self._create_default_entity_for_encryption()
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ batch = TableBatch(require_encryption=True, key_encryption_key=self.ts.key_encryption_key)
+
+ # Assert
+ with self.assertRaises(ValueError):
+ batch.merge_entity(entity)
+
+ with self.assertRaises(ValueError):
+ batch.insert_or_merge_entity(entity)
+
+
+ #@record
+ def test_query_entities_all_properties(self):
+ # Arrange
+ self.ts.require_encryption = True
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ table_name = self._create_query_table_encrypted(5)
+ default_entity = self._create_random_entity_class()
+
+ # Act
+ resp = self.ts.query_entities(table_name, num_results=5)
+
+ # Assert
+ self.assertEqual(len(resp.items), 5)
+ for entity in resp.items:
+ self.assertEqual(default_entity['sex'], entity['sex'])
+
+ #@record
+ def test_query_entities_projection(self):
+ # Arrange
+ self.ts.require_encryption = True
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ table_name = self._create_query_table_encrypted(5)
+ default_entity = self._create_random_entity_class()
+
+ # Act
+ resp = self.ts.query_entities(table_name, num_results=5, select='PartitionKey,RowKey,sex')
+
+ # Assert
+ for entity in resp.items:
+ self.assertEqual(default_entity['sex'], entity['sex'])
+ self.assertFalse(hasattr(entity, '_ClientEncryptionMetadata1'))
+ self.assertFalse(hasattr(entity, '_ClientEncryptionMetadata2'))
+
+ #@record
+ def test_query_entities_mixed_mode(self):
+ # Arrange
+ entity = self._create_random_entity_class(rk='unencrypted')
+ entity['RowKey'] += 'unencrypted'
+ self.ts.insert_entity(self.table_name, entity)
+ entity = self._create_default_entity_for_encryption()
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ self.ts.insert_entity(self.table_name, entity)
+
+ # Act
+ # Pass with out encryption_required
+ self.ts.query_entities(self.table_name)
+
+ # Assert
+ # Fail with encryption_required because not all returned entities
+ # will be encrypted.
+ self.ts.require_encryption = True
+ with self.assertRaises(AzureException):
+ self.ts.query_entities(self.table_name)
+
+
+ #@record
+ def test_insert_entity_too_many_properties(self):
+ # Arrange
+ self.ts.require_encryption = True
+ entity = self._create_random_base_entity_dict()
+ self.ts.key_encryption_key = KeyWrapper('key1')
+ for i in range(251):
+ entity['key{0}'.format(i)] = 'value{0}'.format(i)
+
+ # Act
+ with self.assertRaises(ValueError):
+ resp = self.ts.insert_entity(self.table_name, entity)
+
+ #@record
+ def test_validate_swapping_properties_fails(self):
+ # Arrange
+ entity1 = self._create_random_entity_class(rk='entity1')
+ entity2 = self._create_random_entity_class(rk='entity2')
+ kek = KeyWrapper('key1')
+ self.ts.key_encryption_key = kek
+ self.ts.encryption_resolver_function = self._default_encryption_resolver
+ self.ts.insert_entity(self.table_name, entity1)
+ self.ts.insert_entity(self.table_name, entity2)
+
+ # Act
+ self.ts.key_encryption_key = None
+ new_entity1 = self.ts.get_entity(self.table_name, entity1['PartitionKey'], entity1['RowKey'])
+ new_entity2 = deepcopy(new_entity1)
+ new_entity2['PartitionKey'] = entity2['PartitionKey']
+ new_entity2['RowKey'] = entity2['RowKey']
+ self.ts.update_entity(self.table_name, new_entity2)
+ self.ts.key_encryption_key = kek
+
+ # Assert
+ with self.assertRaises(AzureException):
+ self.ts.get_entity(self.table_name, new_entity2['PartitionKey'], new_entity2['RowKey'])
+
+ #@record
+ def test_table_ops_ignore_encryption(self):
+ table_name = self.get_resource_name('EncryptionTableOps')
+ try:
+ # Arrange
+ self.ts.require_encryption = True
+ self.ts.key_encryption_key = KeyWrapper('key1')
+
+ # Act
+ self.assertTrue(self.ts.create_table(table_name))
+
+ self.assertTrue(self.ts.exists(table_name))
+
+ list_tables = self.ts.list_tables()
+ test_table_exists = False
+ for table in list_tables:
+ if table.name == table_name:
+ test_table_exists = True
+ self.assertTrue(test_table_exists)
+
+ permissions = self.ts.get_table_acl(table_name)
+ new_policy = AccessPolicy(TablePermissions(_str='r'), expiry=datetime(2017,9,9))
+ permissions['samplePolicy'] = new_policy
+ self.ts.set_table_acl(table_name, permissions)
+ permissions = self.ts.get_table_acl(table_name)
+ permissions['samplePolicy']
+ self.ts.key_encryption_key = None
+ permissions = self.ts.get_table_acl(table_name)
+ permissions['samplePolicy']
+
+ self.ts.delete_table(table_name)
+ self.assertFalse(self.ts.exists(table_name))
+ finally:
+ self.ts.delete_table(table_name)
+
+#------------------------------------------------------------------------------
+if __name__ == '__main__':
+ unittest.main()
\ No newline at end of file
diff --git a/sdk/table/tests/test_table_entity.py b/sdk/table/tests/test_table_entity.py
new file mode 100644
index 000000000000..bc0cbb4f83a1
--- /dev/null
+++ b/sdk/table/tests/test_table_entity.py
@@ -0,0 +1,1668 @@
+# coding: utf-8
+
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+import unittest
+from xml.dom.minidom import Entity
+
+import pytest
+
+import uuid
+from base64 import b64encode
+from datetime import datetime, timedelta
+
+from azure.azure_table import TableServiceClient
+from dateutil.tz import tzutc, tzoffset
+from math import isnan
+
+from azure.core import MatchConditions
+from azure.core.exceptions import (
+ HttpResponseError,
+ ResourceNotFoundError,
+ ResourceExistsError,
+ ClientAuthenticationError)
+
+# from azure.tables import (
+# AccessPolicy,
+# TableSasPermissions,
+# TableServiceClient,
+# EdmType,
+# Entity,
+# EntityProperty,
+# generate_table_sas
+# )
+
+# from azure.storage.table import (
+# TableBatch,
+# )
+
+from _shared.testcase import GlobalStorageAccountPreparer, TableTestCase, LogCaptured
+
+#------------------------------------------------------------------------------
+
+#------------------------------------------------------------------------------
+
+class StorageTableEntityTest(TableTestCase):
+
+ def _set_up(self, storage_account, storage_account_key):
+ self.ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ self.table_name = self.get_resource_name('uttable')
+ self.table = self.ts.get_table_client(self.table_name)
+ if self.is_live:
+ try:
+ self.ts.create_table(self.table_name)
+ except ResourceExistsError:
+ pass
+
+ self.query_tables = []
+
+ def _tear_down(self):
+ if self.is_live:
+ try:
+ self.ts.delete_table(self.table_name)
+ except:
+ pass
+
+ for table_name in self.query_tables:
+ try:
+ self.ts.delete_table(table_name)
+ except:
+ pass
+
+ #--Helpers-----------------------------------------------------------------
+
+ def _create_query_table(self, entity_count):
+ '''
+ Creates a table with the specified name and adds entities with the
+ default set of values. PartitionKey is set to 'MyPartition' and RowKey
+ is set to a unique counter value starting at 1 (as a string).
+ '''
+ table_name = self.get_resource_name('querytable')
+ table = self.ts.create_table(table_name)
+ self.query_tables.append(table_name)
+
+ entity = self._create_random_entity_dict()
+ for i in range(1, entity_count + 1):
+ entity['RowKey'] = entity['RowKey'] + str(i)
+ table.create_item(entity)
+ #with self.ts.batch(table_name) as batch:
+ # for i in range(1, entity_count + 1):
+ # entity['RowKey'] = entity['RowKey'] + str(i)
+ # batch.insert_entity(entity)
+ return table
+
+
+ def _create_random_base_entity_dict(self):
+ '''
+ Creates a dict-based entity with only pk and rk.
+ '''
+ partition = self.get_resource_name('pk')
+ row = self.get_resource_name('rk')
+ return {
+ 'PartitionKey': partition,
+ 'RowKey': row,
+ }
+
+ def _create_random_entity_dict(self, pk=None, rk=None):
+ '''
+ Creates a dictionary-based entity with fixed values, using all
+ of the supported data types.
+ '''
+ partition = pk if pk is not None else self.get_resource_name('pk')
+ row = rk if rk is not None else self.get_resource_name('rk')
+ properties = {
+ 'PartitionKey': partition,
+ 'RowKey': row,
+ 'age': 39,
+ 'sex': 'male',
+ 'married': True,
+ 'deceased': False,
+ 'optional': None,
+ 'ratio': 3.1,
+ 'evenratio': 3.0,
+ 'large': 933311100,
+ 'Birthday': datetime(1973, 10, 4, tzinfo=tzutc()),
+ 'birthday': datetime(1970, 10, 4, tzinfo=tzutc()),
+ 'binary': b'binary',
+ 'other': EntityProperty(EdmType.INT32, 20),
+ 'clsid': uuid.UUID('c9da6455-213d-42c9-9a79-3e9149a57833')
+ }
+ return Entity(**properties)
+
+ def _insert_random_entity(self, pk=None, rk=None):
+ entity = self._create_random_entity_dict(pk, rk)
+ etag = self.table.create_item(entity, response_hook=lambda e, h: h['etag'])
+ return entity, etag
+
+ def _create_updated_entity_dict(self, partition, row):
+ '''
+ Creates a dictionary-based entity with fixed values, with a
+ different set of values than the default entity. It
+ adds fields, changes field values, changes field types,
+ and removes fields when compared to the default entity.
+ '''
+ return {
+ 'PartitionKey': partition,
+ 'RowKey': row,
+ 'age': 'abc',
+ 'sex': 'female',
+ 'sign': 'aquarius',
+ 'birthday': datetime(1991, 10, 4, tzinfo=tzutc())
+ }
+
+ def _assert_default_entity(self, entity, headers=None):
+ '''
+ Asserts that the entity passed in matches the default entity.
+ '''
+ self.assertEqual(entity['age'], 39)
+ self.assertEqual(entity['sex'], 'male')
+ self.assertEqual(entity['married'], True)
+ self.assertEqual(entity['deceased'], False)
+ self.assertFalse("optional" in entity)
+ self.assertFalse("aquarius" in entity)
+ self.assertEqual(entity['ratio'], 3.1)
+ self.assertEqual(entity['evenratio'], 3.0)
+ self.assertEqual(entity['large'], 933311100)
+ self.assertEqual(entity['Birthday'], datetime(1973, 10, 4, tzinfo=tzutc()))
+ self.assertEqual(entity['birthday'], datetime(1970, 10, 4, tzinfo=tzutc()))
+ self.assertEqual(entity['binary'], b'binary')
+ self.assertIsInstance(entity['other'], EntityProperty)
+ self.assertEqual(entity['other'].type, EdmType.INT32)
+ self.assertEqual(entity['other'].value, 20)
+ self.assertEqual(entity['clsid'], uuid.UUID('c9da6455-213d-42c9-9a79-3e9149a57833'))
+ self.assertTrue('metadata' in entity.odata)
+ self.assertIsNotNone(entity.timestamp)
+ self.assertIsInstance(entity.timestamp, datetime)
+ if headers:
+ self.assertTrue("etag" in headers)
+ self.assertIsNotNone(headers['etag'])
+
+ def _assert_default_entity_json_full_metadata(self, entity, headers=None):
+ '''
+ Asserts that the entity passed in matches the default entity.
+ '''
+ self.assertEqual(entity['age'], 39)
+ self.assertEqual(entity['sex'], 'male')
+ self.assertEqual(entity['married'], True)
+ self.assertEqual(entity['deceased'], False)
+ self.assertFalse("optional" in entity)
+ self.assertFalse("aquarius" in entity)
+ self.assertEqual(entity['ratio'], 3.1)
+ self.assertEqual(entity['evenratio'], 3.0)
+ self.assertEqual(entity['large'], 933311100)
+ self.assertEqual(entity['Birthday'], datetime(1973, 10, 4, tzinfo=tzutc()))
+ self.assertEqual(entity['birthday'], datetime(1970, 10, 4, tzinfo=tzutc()))
+ self.assertEqual(entity['binary'], b'binary')
+ self.assertIsInstance(entity['other'], EntityProperty)
+ self.assertEqual(entity['other'].type, EdmType.INT32)
+ self.assertEqual(entity['other'].value, 20)
+ self.assertEqual(entity['clsid'], uuid.UUID('c9da6455-213d-42c9-9a79-3e9149a57833'))
+ self.assertTrue('metadata' in entity.odata)
+ self.assertTrue('id' in entity.odata)
+ self.assertTrue('type' in entity.odata)
+ self.assertTrue('etag' in entity.odata)
+ self.assertTrue('editLink' in entity.odata)
+ self.assertIsNotNone(entity.timestamp)
+ self.assertIsInstance(entity.timestamp, datetime)
+ if headers:
+ self.assertTrue("etag" in headers)
+ self.assertIsNotNone(headers['etag'])
+
+ def _assert_default_entity_json_no_metadata(self, entity, headers=None):
+ '''
+ Asserts that the entity passed in matches the default entity.
+ '''
+ self.assertEqual(entity['age'], '39')
+ self.assertEqual(entity['sex'], 'male')
+ self.assertEqual(entity['married'], True)
+ self.assertEqual(entity['deceased'], False)
+ self.assertFalse("optional" in entity)
+ self.assertFalse("aquarius" in entity)
+ self.assertEqual(entity['ratio'], 3.1)
+ self.assertEqual(entity['evenratio'], 3.0)
+ self.assertEqual(entity['large'], '933311100')
+ self.assertTrue(entity['Birthday'].startswith('1973-10-04T00:00:00'))
+ self.assertTrue(entity['birthday'].startswith('1970-10-04T00:00:00'))
+ self.assertTrue(entity['Birthday'].endswith('00Z'))
+ self.assertTrue(entity['birthday'].endswith('00Z'))
+ self.assertEqual(entity['binary'], b64encode(b'binary').decode('utf-8'))
+ self.assertIsInstance(entity['other'], EntityProperty)
+ self.assertEqual(entity['other'].type, EdmType.INT32)
+ self.assertEqual(entity['other'].value, 20)
+ self.assertEqual(entity['clsid'], 'c9da6455-213d-42c9-9a79-3e9149a57833')
+ self.assertIsNone(entity.odata)
+ self.assertIsNotNone(entity.timestamp)
+ self.assertIsInstance(entity.timestamp, datetime)
+ if headers:
+ self.assertTrue("etag" in headers)
+ self.assertIsNotNone(headers['etag'])
+
+ def _assert_updated_entity(self, entity):
+ '''
+ Asserts that the entity passed in matches the updated entity.
+ '''
+ self.assertEqual(entity.age, 'abc')
+ self.assertEqual(entity.sex, 'female')
+ self.assertFalse(hasattr(entity, "married"))
+ self.assertFalse(hasattr(entity, "deceased"))
+ self.assertEqual(entity.sign, 'aquarius')
+ self.assertFalse(hasattr(entity, "optional"))
+ self.assertFalse(hasattr(entity, "ratio"))
+ self.assertFalse(hasattr(entity, "evenratio"))
+ self.assertFalse(hasattr(entity, "large"))
+ self.assertFalse(hasattr(entity, "Birthday"))
+ self.assertEqual(entity.birthday, datetime(1991, 10, 4, tzinfo=tzutc()))
+ self.assertFalse(hasattr(entity, "other"))
+ self.assertFalse(hasattr(entity, "clsid"))
+ self.assertIsNotNone(entity.odata['etag'])
+ self.assertIsNotNone(entity.timestamp)
+ self.assertIsInstance(entity.timestamp, datetime)
+
+ def _assert_merged_entity(self, entity):
+ '''
+ Asserts that the entity passed in matches the default entity
+ merged with the updated entity.
+ '''
+ self.assertEqual(entity.age, 'abc')
+ self.assertEqual(entity.sex, 'female')
+ self.assertEqual(entity.sign, 'aquarius')
+ self.assertEqual(entity.married, True)
+ self.assertEqual(entity.deceased, False)
+ self.assertEqual(entity.sign, 'aquarius')
+ self.assertEqual(entity.ratio, 3.1)
+ self.assertEqual(entity.evenratio, 3.0)
+ self.assertEqual(entity.large, 933311100)
+ self.assertEqual(entity.Birthday, datetime(1973, 10, 4, tzinfo=tzutc()))
+ self.assertEqual(entity.birthday, datetime(1991, 10, 4, tzinfo=tzutc()))
+ self.assertIsInstance(entity.other, EntityProperty)
+ self.assertEqual(entity.other.type, EdmType.INT32)
+ self.assertEqual(entity.other.value, 20)
+ self.assertIsInstance(entity.clsid, uuid.UUID)
+ self.assertEqual(str(entity.clsid), 'c9da6455-213d-42c9-9a79-3e9149a57833')
+ self.assertIsNotNone(entity.odata['etag'])
+ self.assertIsNotNone(entity.timestamp)
+ self.assertIsInstance(entity.timestamp, datetime)
+
+ #--Test cases for entities ------------------------------------------
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_dictionary(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_entity_dict()
+
+ # Act
+ resp = self.table.create_item(entity)
+
+ # Assert
+ self.assertIsNone(resp)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_with_hook(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_entity_dict()
+
+ # Act
+ resp = self.table.create_item(entity, response_hook=lambda e, h: (e, h))
+
+ # Assert
+ self.assertIsNotNone(resp)
+ self._assert_default_entity(*resp)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_with_no_metadata(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_entity_dict()
+
+ # Act
+ resp = self.table.create_item(
+ entity,
+ headers={'Accept': 'application/json;odata=nometadata'},
+ response_hook=lambda e, h: (e, h))
+
+ # Assert
+ self.assertIsNotNone(resp)
+ self._assert_default_entity_json_no_metadata(*resp)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_with_full_metadata(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_entity_dict()
+
+ # Act
+ resp = self.table.create_item(
+ entity,
+ headers={'Accept': 'application/json;odata=fullmetadata'},
+ response_hook=lambda e, h: (e, h))
+
+ # Assert
+ self.assertIsNotNone(resp)
+ self._assert_default_entity_json_full_metadata(*resp)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_conflict(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ with self.assertRaises(ResourceExistsError):
+ self.table.create_item(entity)
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_with_large_int32_value_throws(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Act
+ dict32 = self._create_random_base_entity_dict()
+ dict32['large'] = EntityProperty(EdmType.INT32, 2**31)
+
+ # Assert
+ with self.assertRaisesRegex(TypeError,
+ '{0} is too large to be cast to type Edm.Int32.'.format(2**31)):
+ self.table.create_item(dict32)
+
+ dict32['large'] = EntityProperty(EdmType.INT32, -(2**31 + 1))
+ with self.assertRaisesRegex(TypeError,
+ '{0} is too large to be cast to type Edm.Int32.'.format(-(2**31 + 1))):
+ self.table.create_item(dict32)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_with_large_int64_value_throws(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Act
+ dict64 = self._create_random_base_entity_dict()
+ dict64['large'] = EntityProperty(EdmType.INT64, 2**63)
+
+ # Assert
+ with self.assertRaisesRegex(TypeError,
+ '{0} is too large to be cast to type Edm.Int64.'.format(2**63)):
+ self.table.create_item(dict64)
+
+ dict64['large'] = EntityProperty(EdmType.INT64, -(2**63 + 1))
+ with self.assertRaisesRegex(TypeError,
+ '{0} is too large to be cast to type Edm.Int64.'.format(-(2**63 + 1))):
+ self.table.create_item(dict64)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_missing_pk(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = {'RowKey': 'rk'}
+
+ # Act
+ with self.assertRaises(ValueError):
+ resp = self.table.create_item(entity)
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_empty_string_pk(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = {'RowKey': 'rk', 'PartitionKey': ''}
+
+ # Act
+ if 'cosmos' in self.table.url:
+ with self.assertRaises(HttpResponseError):
+ self.table.create_item(entity)
+ else:
+ resp = self.table.create_item(entity)
+
+ # Assert
+ self.assertIsNone(resp)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_missing_rk(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = {'PartitionKey': 'pk'}
+
+ # Act
+ with self.assertRaises(ValueError):
+ resp = self.table.create_item(entity)
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_empty_string_rk(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = {'PartitionKey': 'pk', 'RowKey': ''}
+
+ # Act
+ if 'cosmos' in self.table.url:
+ with self.assertRaises(HttpResponseError):
+ self.table.create_item(entity)
+ else:
+ resp = self.table.create_item(entity)
+
+ # Assert
+ self.assertIsNone(resp)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_too_many_properties(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ if 'cosmos' in self.table.url:
+ pytest.skip("Cosmos supports large number of properties.")
+ try:
+ entity = self._create_random_base_entity_dict()
+ for i in range(255):
+ entity['key{0}'.format(i)] = 'value{0}'.format(i)
+
+ # Act
+ with self.assertRaises(HttpResponseError):
+ resp = self.table.create_item(entity)
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_entity_property_name_too_long(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ if 'cosmos' in self.table.url:
+ pytest.skip("Cosmos supports large property names.")
+ try:
+ entity = self._create_random_base_entity_dict()
+ entity['a'*256] = 'badval'
+
+ # Act
+ with self.assertRaises(HttpResponseError):
+ resp = self.table.create_item(entity)
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_get_entity(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ resp = self.table.read_item(entity['PartitionKey'], entity['RowKey'])
+
+ # Assert
+ self.assertEqual(resp['PartitionKey'], entity['PartitionKey'])
+ self.assertEqual(resp['RowKey'], entity['RowKey'])
+ self._assert_default_entity(resp)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_get_entity_with_hook(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ resp, headers = self.table.read_item(
+ entity['PartitionKey'],
+ entity['RowKey'],
+ response_hook=lambda e, h: (e, h))
+
+ # Assert
+ self.assertEqual(resp['PartitionKey'], entity['PartitionKey'])
+ self.assertEqual(resp['RowKey'], entity['RowKey'])
+ self._assert_default_entity(resp, headers)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_get_entity_if_match(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, etag = self._insert_random_entity()
+
+ # Act
+ # Do a get and confirm the etag is parsed correctly by using it
+ # as a condition to delete.
+ resp = self.table.read_item(entity['PartitionKey'], entity['RowKey'])
+ self.table.delete_item(
+ resp['PartitionKey'],
+ resp['RowKey'],
+ etag=etag,
+ match_condition=MatchConditions.IfNotModified
+ )
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_get_entity_full_metadata(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ resp = self.table.read_item(
+ entity.PartitionKey,
+ entity.RowKey,
+ headers={'accept':'application/json;odata=fullmetadata'})
+
+ # Assert
+ self.assertEqual(resp.PartitionKey, entity.PartitionKey)
+ self.assertEqual(resp.RowKey, entity.RowKey)
+ self._assert_default_entity_json_full_metadata(resp)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_get_entity_no_metadata(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ resp = self.table.read_item(
+ entity.PartitionKey,
+ entity.RowKey,
+ headers={'accept':'application/json;odata=nometadata'})
+
+ # Assert
+ self.assertEqual(resp.PartitionKey, entity.PartitionKey)
+ self.assertEqual(resp.RowKey, entity.RowKey)
+ self._assert_default_entity_json_no_metadata(resp)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_get_entity_not_existing(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_entity_dict()
+
+ # Act
+ with self.assertRaises(ResourceNotFoundError):
+ self.table.read_item(entity.PartitionKey, entity.RowKey)
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_get_entity_with_select(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ resp = self.table.read_item(entity.PartitionKey, entity.RowKey, select=['age','sex','xyz'])
+
+ # Assert
+ self.assertEqual(resp.age, 39)
+ self.assertEqual(resp.sex, 'male')
+ self.assertEqual(resp.xyz, None)
+ self.assertFalse(hasattr(resp, "birthday"))
+ self.assertFalse(hasattr(resp, "married"))
+ self.assertFalse(hasattr(resp, "deceased"))
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_get_entity_with_special_doubles(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_base_entity_dict()
+ entity.update({
+ 'inf': float('inf'),
+ 'negativeinf': float('-inf'),
+ 'nan': float('nan')
+ })
+ self.table.create_item(entity)
+
+ # Act
+ resp = self.table.read_item(entity['PartitionKey'], entity['RowKey'])
+
+ # Assert
+ self.assertEqual(resp.inf, float('inf'))
+ self.assertEqual(resp.negativeinf, float('-inf'))
+ self.assertTrue(isnan(resp.nan))
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_update_entity(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity.PartitionKey, entity.RowKey)
+ resp = self.table.update_item(sent_entity, response_hook=lambda e, h: h)
+
+ # Assert
+ self.assertTrue(resp)
+ received_entity = self.table.read_item(entity.PartitionKey, entity.RowKey)
+ self._assert_updated_entity(received_entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_update_entity_not_existing(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_base_entity_dict()
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity['PartitionKey'], entity['RowKey'])
+ with self.assertRaises(ResourceNotFoundError):
+ self.table.update_item(sent_entity)
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_update_entity_with_if_matches(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, etag = self._insert_random_entity()
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity.PartitionKey, entity.RowKey)
+ resp = self.table.update_item(
+ sent_entity, etag=etag, match_condition=MatchConditions.IfNotModified, response_hook=lambda e, h: h)
+
+ # Assert
+ self.assertTrue(resp)
+ received_entity = self.table.read_item(entity.PartitionKey, entity.RowKey)
+ self._assert_updated_entity(received_entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_update_entity_with_if_doesnt_match(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity.PartitionKey, entity.RowKey)
+ with self.assertRaises(HttpResponseError):
+ self.table.update_item(
+ sent_entity,
+ etag=u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"',
+ match_condition=MatchConditions.IfNotModified)
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_or_merge_entity_with_existing_entity(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity.PartitionKey, entity.RowKey)
+ resp = self.table.upsert_item(sent_entity, mode='MERGE')
+
+ # Assert
+ self.assertIsNone(resp)
+ received_entity = self.table.read_item(entity.PartitionKey, entity.RowKey)
+ self._assert_merged_entity(received_entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_or_merge_entity_with_non_existing_entity(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_base_entity_dict()
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity['PartitionKey'], entity['RowKey'])
+ resp = self.table.upsert_item(sent_entity, mode='MERGE')
+
+ # Assert
+ self.assertIsNone(resp)
+ received_entity = self.table.read_item(entity['PartitionKey'], entity['RowKey'])
+ self._assert_updated_entity(received_entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_or_replace_entity_with_existing_entity(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity.PartitionKey, entity.RowKey)
+ resp = self.table.upsert_item(sent_entity)
+
+ # Assert
+ self.assertIsNone(resp)
+ received_entity = self.table.read_item(entity.PartitionKey, entity.RowKey)
+ self._assert_updated_entity(received_entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_insert_or_replace_entity_with_non_existing_entity(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_base_entity_dict()
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity['PartitionKey'], entity['RowKey'])
+ resp = self.table.upsert_item(sent_entity)
+
+ # Assert
+ self.assertIsNone(resp)
+ received_entity = self.table.read_item(entity['PartitionKey'], entity['RowKey'])
+ self._assert_updated_entity(received_entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_merge_entity(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity.PartitionKey, entity.RowKey)
+ resp = self.table.update_item(sent_entity, mode='MERGE')
+
+ # Assert
+ self.assertIsNone(resp)
+ received_entity = self.table.read_item(entity.PartitionKey, entity.RowKey)
+ self._assert_merged_entity(received_entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_merge_entity_not_existing(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_base_entity_dict()
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity['PartitionKey'], entity['RowKey'])
+ with self.assertRaises(ResourceNotFoundError):
+ self.table.update_item(sent_entity, mode='MERGE')
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_merge_entity_with_if_matches(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, etag = self._insert_random_entity()
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity.PartitionKey, entity.RowKey)
+ resp = self.table.update_item(
+ sent_entity, mode='MERGE', etag=etag, match_condition=MatchConditions.IfNotModified)
+
+ # Assert
+ self.assertIsNone(resp)
+ received_entity = self.table.read_item(entity.PartitionKey, entity.RowKey)
+ self._assert_merged_entity(received_entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_merge_entity_with_if_doesnt_match(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity.PartitionKey, entity.RowKey)
+ with self.assertRaises(HttpResponseError):
+ self.table.update_item(
+ sent_entity, mode='MERGE', etag=u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"',
+ match_condition=MatchConditions.IfNotModified)
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_delete_entity(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ resp = self.table.delete_item(entity.PartitionKey, entity.RowKey)
+
+ # Assert
+ self.assertIsNone(resp)
+ with self.assertRaises(ResourceNotFoundError):
+ self.table.read_item(entity.PartitionKey, entity.RowKey)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_delete_entity_not_existing(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_base_entity_dict()
+
+ # Act
+ with self.assertRaises(ResourceNotFoundError):
+ self.table.delete_item(entity['PartitionKey'], entity['RowKey'])
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_delete_entity_with_if_matches(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, etag = self._insert_random_entity()
+
+ # Act
+ resp = self.table.delete_item(entity.PartitionKey, entity.RowKey, etag=etag, match_condition=MatchConditions.IfNotModified)
+
+ # Assert
+ self.assertIsNone(resp)
+ with self.assertRaises(ResourceNotFoundError):
+ self.table.read_item(entity.PartitionKey, entity.RowKey)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_delete_entity_with_if_doesnt_match(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ with self.assertRaises(HttpResponseError):
+ self.table.delete_item(
+ entity.PartitionKey, entity.RowKey,
+ etag=u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"',
+ match_condition=MatchConditions.IfNotModified)
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_unicode_property_value(self, resource_group, location, storage_account, storage_account_key):
+ ''' regression test for github issue #57'''
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_base_entity_dict()
+ entity1 = entity.copy()
+ entity1.update({'Description': u'ꀕ'})
+ entity2 = entity.copy()
+ entity2.update({'RowKey': 'test2', 'Description': 'ꀕ'})
+
+ # Act
+ self.table.create_item(entity1)
+ self.table.create_item(entity2)
+ entities = list(self.table.query_items("PartitionKey eq '{}'".format(entity['PartitionKey'])))
+
+ # Assert
+ self.assertEqual(len(entities), 2)
+ self.assertEqual(entities[0].Description, u'ꀕ')
+ self.assertEqual(entities[1].Description, u'ꀕ')
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_unicode_property_name(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_base_entity_dict()
+ entity1 = entity.copy()
+ entity1.update({u'啊齄丂狛狜': u'ꀕ'})
+ entity2 = entity.copy()
+ entity2.update({'RowKey': 'test2', u'啊齄丂狛狜': 'hello'})
+
+ # Act
+ self.table.create_item(entity1)
+ self.table.create_item(entity2)
+ entities = list(self.table.query_items("PartitionKey eq '{}'".format(entity['PartitionKey'])))
+
+ # Assert
+ self.assertEqual(len(entities), 2)
+ self.assertEqual(entities[0][u'啊齄丂狛狜'], u'ꀕ')
+ self.assertEqual(entities[1][u'啊齄丂狛狜'], u'hello')
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_operations_on_entity_with_partition_key_having_single_quote(self, resource_group, location, storage_account, storage_account_key):
+
+ # Arrange
+ partition_key_with_single_quote = "a''''b"
+ row_key_with_single_quote = "a''''b"
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity(pk=partition_key_with_single_quote, rk=row_key_with_single_quote)
+
+ # Act
+ sent_entity = self._create_updated_entity_dict(entity.PartitionKey, entity.RowKey)
+ resp = self.table.upsert_item(sent_entity)
+
+ # Assert
+ self.assertIsNone(resp)
+ received_entity = self.table.read_item(entity.PartitionKey, entity.RowKey)
+ self._assert_updated_entity(received_entity)
+
+ # Act
+ sent_entity['newField'] = 'newFieldValue'
+ resp = self.table.update_item(sent_entity)
+
+ # Assert
+ self.assertIsNone(resp)
+ received_entity = self.table.read_item(entity.PartitionKey, entity.RowKey)
+ self._assert_updated_entity(received_entity)
+ self.assertEqual(received_entity['newField'], 'newFieldValue')
+
+ # Act
+ resp = self.table.delete_item(received_entity.PartitionKey, received_entity.RowKey)
+
+ # Assert
+ self.assertIsNone(resp)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_empty_and_spaces_property_value(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_base_entity_dict()
+ entity.update({
+ 'EmptyByte': '',
+ 'EmptyUnicode': u'',
+ 'SpacesOnlyByte': ' ',
+ 'SpacesOnlyUnicode': u' ',
+ 'SpacesBeforeByte': ' Text',
+ 'SpacesBeforeUnicode': u' Text',
+ 'SpacesAfterByte': 'Text ',
+ 'SpacesAfterUnicode': u'Text ',
+ 'SpacesBeforeAndAfterByte': ' Text ',
+ 'SpacesBeforeAndAfterUnicode': u' Text ',
+ })
+
+ # Act
+ self.table.create_item(entity)
+ resp = self.table.read_item(entity['PartitionKey'], entity['RowKey'])
+
+ # Assert
+ self.assertIsNotNone(resp)
+ self.assertEqual(resp.EmptyByte, '')
+ self.assertEqual(resp.EmptyUnicode, u'')
+ self.assertEqual(resp.SpacesOnlyByte, ' ')
+ self.assertEqual(resp.SpacesOnlyUnicode, u' ')
+ self.assertEqual(resp.SpacesBeforeByte, ' Text')
+ self.assertEqual(resp.SpacesBeforeUnicode, u' Text')
+ self.assertEqual(resp.SpacesAfterByte, 'Text ')
+ self.assertEqual(resp.SpacesAfterUnicode, u'Text ')
+ self.assertEqual(resp.SpacesBeforeAndAfterByte, ' Text ')
+ self.assertEqual(resp.SpacesBeforeAndAfterUnicode, u' Text ')
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_none_property_value(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity = self._create_random_base_entity_dict()
+ entity.update({'NoneValue': None})
+
+ # Act
+ self.table.create_item(entity)
+ resp = self.table.read_item(entity['PartitionKey'], entity['RowKey'])
+
+ # Assert
+ self.assertIsNotNone(resp)
+ self.assertFalse(hasattr(resp, 'NoneValue'))
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_binary_property_value(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ binary_data = b'\x01\x02\x03\x04\x05\x06\x07\x08\t\n'
+ entity = self._create_random_base_entity_dict()
+ entity.update({'binary': b'\x01\x02\x03\x04\x05\x06\x07\x08\t\n'})
+
+ # Act
+ self.table.create_item(entity)
+ resp = self.table.read_item(entity['PartitionKey'], entity['RowKey'])
+
+ # Assert
+ self.assertIsNotNone(resp)
+ self.assertEqual(resp.binary, binary_data)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_timezone(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ local_tz = tzoffset('BRST', -10800)
+ local_date = datetime(2003, 9, 27, 9, 52, 43, tzinfo=local_tz)
+ entity = self._create_random_base_entity_dict()
+ entity.update({'date': local_date})
+
+ # Act
+ self.table.create_item(entity)
+ resp = self.table.read_item(entity['PartitionKey'], entity['RowKey'])
+
+ # Assert
+ self.assertIsNotNone(resp)
+ self.assertEqual(resp.date, local_date.astimezone(tzutc()))
+ self.assertEqual(resp.date.astimezone(local_tz), local_date)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_query_entities(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ table = self._create_query_table(2)
+
+ # Act
+ entities = list(table.read_all_items())
+
+ # Assert
+ self.assertEqual(len(entities), 2)
+ for entity in entities:
+ self._assert_default_entity(entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_query_zero_entities(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ table = self._create_query_table(0)
+
+ # Act
+ entities = list(table.read_all_items())
+
+ # Assert
+ self.assertEqual(len(entities), 0)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_query_entities_full_metadata(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ table = self._create_query_table(2)
+
+ # Act
+ entities = list(table.read_all_items(headers={'accept': 'application/json;odata=fullmetadata'}))
+
+ # Assert
+ self.assertEqual(len(entities), 2)
+ for entity in entities:
+ self._assert_default_entity_json_full_metadata(entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_query_entities_no_metadata(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ table = self._create_query_table(2)
+
+ # Act
+ entities = list(table.read_all_items(headers={'accept': 'application/json;odata=nometadata'}))
+
+ # Assert
+ self.assertEqual(len(entities), 2)
+ for entity in entities:
+ self._assert_default_entity_json_no_metadata(entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("Batch not implemented")
+ @GlobalStorageAccountPreparer()
+ def test_query_entities_large(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ table_name = self._create_query_table(0)
+ total_entities_count = 1000
+ entities_per_batch = 50
+
+ for j in range(total_entities_count // entities_per_batch):
+ batch = TableBatch()
+ for i in range(entities_per_batch):
+ entity = Entity()
+ entity.PartitionKey = 'large'
+ entity.RowKey = 'batch{0}-item{1}'.format(j, i)
+ entity.test = EntityProperty(EdmType.BOOLEAN, 'true')
+ entity.test2 = 'hello world;' * 100
+ entity.test3 = 3
+ entity.test4 = EntityProperty(EdmType.INT64, '1234567890')
+ entity.test5 = datetime(2016, 12, 31, 11, 59, 59, 0)
+ batch.insert_entity(entity)
+ self.ts.commit_batch(table_name, batch)
+
+ # Act
+ start_time = datetime.now()
+ entities = list(self.ts.query_entities(table_name))
+ elapsed_time = datetime.now() - start_time
+
+ # Assert
+ print('query_entities took {0} secs.'.format(elapsed_time.total_seconds()))
+ # azure allocates 5 seconds to execute a query
+ # if it runs slowly, it will return fewer results and make the test fail
+ self.assertEqual(len(entities), total_entities_count)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_query_entities_with_filter(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ entity, _ = self._insert_random_entity()
+
+ # Act
+ entities = list(self.table.query_items("PartitionKey eq '{}'".format(entity.PartitionKey)))
+
+ # Assert
+ self.assertEqual(len(entities), 1)
+ self.assertEqual(entity.PartitionKey, entities[0].PartitionKey)
+ self._assert_default_entity(entities[0])
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_query_entities_with_select(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ table = self._create_query_table(2)
+
+ # Act
+ entities = list(table.read_all_items(select=['age','sex']))
+
+ # Assert
+ self.assertEqual(len(entities), 2)
+ self.assertEqual(entities[0].age, 39)
+ self.assertEqual(entities[0].sex, 'male')
+ self.assertFalse(hasattr(entities[0], "birthday"))
+ self.assertFalse(hasattr(entities[0], "married"))
+ self.assertFalse(hasattr(entities[0], "deceased"))
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_query_entities_with_top(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ table = self._create_query_table(3)
+
+ # Act
+ entities = list(next(table.read_all_items(results_per_page=2).by_page()))
+
+ # Assert
+ self.assertEqual(len(entities), 2)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_query_entities_with_top_and_next(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ self._set_up(storage_account, storage_account_key)
+ try:
+ table = self._create_query_table(5)
+
+ # Act
+ resp1 = table.read_all_items(results_per_page=2).by_page()
+ next(resp1)
+ resp2 = table.read_all_items(results_per_page=2).by_page(continuation_token=resp1.continuation_token)
+ next(resp2)
+ resp3 = table.read_all_items(results_per_page=2).by_page(continuation_token=resp2.continuation_token)
+ next(resp3)
+
+ entities1 = resp1._current_page
+ entities2 = resp2._current_page
+ entities3 = resp3._current_page
+
+ # Assert
+ self.assertEqual(len(entities1), 2)
+ self.assertEqual(len(entities2), 2)
+ self.assertEqual(len(entities3), 1)
+ self._assert_default_entity(entities1[0])
+ self._assert_default_entity(entities1[1])
+ self._assert_default_entity(entities2[0])
+ self._assert_default_entity(entities2[1])
+ self._assert_default_entity(entities3[0])
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @pytest.mark.live_test_only
+ @GlobalStorageAccountPreparer()
+ def test_sas_query(self, resource_group, location, storage_account, storage_account_key):
+ # SAS URL is calculated from storage key, so this test runs live only
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support sas")
+
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Arrange
+ entity, _ = self._insert_random_entity()
+ token = generate_table_sas(
+ storage_account.name,
+ self.table_name,
+ storage_account_key,
+ permission=TableSasPermissions(query=True),
+ expiry=datetime.utcnow() + timedelta(hours=1),
+ start=datetime.utcnow() - timedelta(minutes=1),
+ )
+
+ # Act
+ service = TableServiceClient(
+ self.account_url(storage_account, "table"),
+ credential=token,
+ )
+ table = service.get_table_client(self.table_name)
+ entities = list(table.query_items("PartitionKey eq '{}'".format(entity['PartitionKey'])))
+
+ # Assert
+ self.assertEqual(len(entities), 1)
+ self._assert_default_entity(entities[0])
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @pytest.mark.live_test_only
+ @GlobalStorageAccountPreparer()
+ def test_sas_add(self, resource_group, location, storage_account, storage_account_key):
+ # SAS URL is calculated from storage key, so this test runs live only
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support sas")
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Arrange
+ token = generate_table_sas(
+ storage_account.name,
+ self.table_name,
+ storage_account_key,
+ permission=TableSasPermissions(add=True),
+ expiry=datetime.utcnow() + timedelta(hours=1),
+ start=datetime.utcnow() - timedelta(minutes=1),
+ )
+
+ # Act
+ service = TableServiceClient(
+ self.account_url(storage_account, "table"),
+ credential=token,
+ )
+ table = service.get_table_client(self.table_name)
+
+ entity = self._create_random_entity_dict()
+ table.create_item(entity)
+
+ # Assert
+ resp = self.table.read_item(entity['PartitionKey'], entity['RowKey'])
+ self._assert_default_entity(resp)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @pytest.mark.live_test_only
+ @GlobalStorageAccountPreparer()
+ def test_sas_add_inside_range(self, resource_group, location, storage_account, storage_account_key):
+ # SAS URL is calculated from storage key, so this test runs live only
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support sas")
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Arrange
+ token = generate_table_sas(
+ storage_account.name,
+ self.table_name,
+ storage_account_key,
+ permission=TableSasPermissions(add=True),
+ expiry=datetime.utcnow() + timedelta(hours=1),
+ start_pk='test', start_rk='test1',
+ end_pk='test', end_rk='test1',
+ )
+
+ # Act
+ service = TableServiceClient(
+ self.account_url(storage_account, "table"),
+ credential=token,
+ )
+ table = service.get_table_client(self.table_name)
+ entity = self._create_random_entity_dict('test', 'test1')
+ table.create_item(entity)
+
+ # Assert
+ resp = self.table.read_item('test', 'test1')
+ self._assert_default_entity(resp)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @pytest.mark.live_test_only
+ @GlobalStorageAccountPreparer()
+ def test_sas_add_outside_range(self, resource_group, location, storage_account, storage_account_key):
+ # SAS URL is calculated from storage key, so this test runs live only
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support sas")
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Arrange
+ token = generate_table_sas(
+ storage_account.name,
+ self.table_name,
+ storage_account_key,
+ permission=TableSasPermissions(add=True),
+ expiry=datetime.utcnow() + timedelta(hours=1),
+ start_pk='test', start_rk='test1',
+ end_pk='test', end_rk='test1',
+ )
+
+ # Act
+ service = TableServiceClient(
+ self.account_url(storage_account, "table"),
+ credential=token,
+ )
+ table = service.get_table_client(self.table_name)
+ with self.assertRaises(HttpResponseError):
+ entity = self._create_random_entity_dict()
+ table.create_item(entity)
+
+ # Assert
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @pytest.mark.live_test_only
+ @GlobalStorageAccountPreparer()
+ def test_sas_update(self, resource_group, location, storage_account, storage_account_key):
+ # SAS URL is calculated from storage key, so this test runs live only
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support sas")
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Arrange
+ entity, _ = self._insert_random_entity()
+ token = generate_table_sas(
+ storage_account.name,
+ self.table_name,
+ storage_account_key,
+ permission=TableSasPermissions(update=True),
+ expiry=datetime.utcnow() + timedelta(hours=1),
+ )
+
+ # Act
+ service = TableServiceClient(
+ self.account_url(storage_account, "table"),
+ credential=token,
+ )
+ table = service.get_table_client(self.table_name)
+ updated_entity = self._create_updated_entity_dict(entity.PartitionKey, entity.RowKey)
+ resp = table.update_item(updated_entity)
+
+ # Assert
+ received_entity = self.table.read_item(entity.PartitionKey, entity.RowKey)
+ self._assert_updated_entity(received_entity)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @pytest.mark.live_test_only
+ @GlobalStorageAccountPreparer()
+ def test_sas_delete(self, resource_group, location, storage_account, storage_account_key):
+ # SAS URL is calculated from storage key, so this test runs live only
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support sas")
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Arrange
+ entity, _ = self._insert_random_entity()
+ token = generate_table_sas(
+ storage_account.name,
+ self.table_name,
+ storage_account_key,
+ permission=TableSasPermissions(delete=True),
+ expiry=datetime.utcnow() + timedelta(hours=1),
+ )
+
+ # Act
+ service = TableServiceClient(
+ self.account_url(storage_account, "table"),
+ credential=token,
+ )
+ table = service.get_table_client(self.table_name)
+ table.delete_item(entity.PartitionKey, entity.RowKey)
+
+ # Assert
+ with self.assertRaises(ResourceNotFoundError):
+ self.table.read_item(entity.PartitionKey, entity.RowKey)
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @pytest.mark.live_test_only
+ @GlobalStorageAccountPreparer()
+ def test_sas_upper_case_table_name(self, resource_group, location, storage_account, storage_account_key):
+ # SAS URL is calculated from storage key, so this test runs live only
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support sas")
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Arrange
+ entity, _ = self._insert_random_entity()
+
+ # Table names are case insensitive, so simply upper case our existing table name to test
+ token = generate_table_sas(
+ storage_account.name,
+ self.table_name.upper(),
+ storage_account_key,
+ permission=TableSasPermissions(query=True),
+ expiry=datetime.utcnow() + timedelta(hours=1),
+ start=datetime.utcnow() - timedelta(minutes=1),
+ )
+
+ # Act
+ service = TableServiceClient(
+ self.account_url(storage_account, "table"),
+ credential=token,
+ )
+ table = service.get_table_client(self.table_name)
+ entities = list(table.query_items("PartitionKey eq '{}'".format(entity['PartitionKey'])))
+
+ # Assert
+ self.assertEqual(len(entities), 1)
+ self._assert_default_entity(entities[0])
+ finally:
+ self._tear_down()
+
+ @pytest.mark.skip("pending")
+ @pytest.mark.live_test_only
+ @GlobalStorageAccountPreparer()
+ def test_sas_signed_identifier(self, resource_group, location, storage_account, storage_account_key):
+ # SAS URL is calculated from storage key, so this test runs live only
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support sas")
+ self._set_up(storage_account, storage_account_key)
+ try:
+ # Arrange
+ entity, _ = self._insert_random_entity()
+
+ access_policy = AccessPolicy()
+ access_policy.start = datetime(2011, 10, 11)
+ access_policy.expiry = datetime(2020, 10, 12)
+ access_policy.permission = TableSasPermissions(query=True)
+ identifiers = {'testid': access_policy}
+
+ self.table.set_table_access_policy(identifiers)
+
+ token = generate_table_sas(
+ storage_account.name,
+ self.table_name,
+ storage_account_key,
+ policy_id='testid',
+ )
+
+ # Act
+ service = TableServiceClient(
+ self.account_url(storage_account, "table"),
+ credential=token,
+ )
+ table = service.get_table_client(self.table_name)
+ entities = list(table.query_items("PartitionKey eq '{}'".format(entity.PartitionKey)))
+
+ # Assert
+ self.assertEqual(len(entities), 1)
+ self._assert_default_entity(entities[0])
+ finally:
+ self._tear_down()
+
+#------------------------------------------------------------------------------
+if __name__ == '__main__':
+ unittest.main()
diff --git a/sdk/table/tests/test_table_service_properties.py b/sdk/table/tests/test_table_service_properties.py
new file mode 100644
index 000000000000..3e3facf67b73
--- /dev/null
+++ b/sdk/table/tests/test_table_service_properties.py
@@ -0,0 +1,262 @@
+# coding: utf-8
+
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+import unittest
+import time
+import pytest
+from azure.azure_table._models import TableAnalyticsLogging, Metrics, RetentionPolicy, CorsRule
+
+from msrest.exceptions import ValidationError # TODO This should be an azure-core error.
+from devtools_testutils import ResourceGroupPreparer, StorageAccountPreparer
+from azure.core.exceptions import HttpResponseError
+
+# from azure.tables import (
+# TableServiceClient,
+# TableClient,
+# TableAnalyticsLogging,
+# Metrics,
+# CorsRule,
+# RetentionPolicy,
+# )
+
+from azure.azure_table import TableServiceClient
+
+from _shared.testcase import GlobalStorageAccountPreparer, TableTestCase
+
+# ------------------------------------------------------------------------------
+
+
+class TableServicePropertiesTest(TableTestCase):
+ # --Helpers-----------------------------------------------------------------
+ def _assert_properties_default(self, prop):
+ self.assertIsNotNone(prop)
+
+ self._assert_logging_equal(prop['analytics_logging'], TableAnalyticsLogging())
+ self._assert_metrics_equal(prop['hour_metrics'], Metrics())
+ self._assert_metrics_equal(prop['minute_metrics'], Metrics())
+ self._assert_cors_equal(prop['cors'], list())
+
+ def _assert_logging_equal(self, log1, log2):
+ if log1 is None or log2 is None:
+ self.assertEqual(log1, log2)
+ return
+
+ self.assertEqual(log1.version, log2.version)
+ self.assertEqual(log1.read, log2.read)
+ self.assertEqual(log1.write, log2.write)
+ self.assertEqual(log1.delete, log2.delete)
+ self._assert_retention_equal(log1.retention_policy, log2.retention_policy)
+
+ def _assert_delete_retention_policy_equal(self, policy1, policy2):
+ if policy1 is None or policy2 is None:
+ self.assertEqual(policy1, policy2)
+ return
+
+ self.assertEqual(policy1.enabled, policy2.enabled)
+ self.assertEqual(policy1.days, policy2.days)
+
+ def _assert_static_website_equal(self, prop1, prop2):
+ if prop1 is None or prop2 is None:
+ self.assertEqual(prop1, prop2)
+ return
+
+ self.assertEqual(prop1.enabled, prop2.enabled)
+ self.assertEqual(prop1.index_document, prop2.index_document)
+ self.assertEqual(prop1.error_document404_path, prop2.error_document404_path)
+
+ def _assert_delete_retention_policy_not_equal(self, policy1, policy2):
+ if policy1 is None or policy2 is None:
+ self.assertNotEqual(policy1, policy2)
+ return
+
+ self.assertFalse(policy1.enabled == policy2.enabled
+ and policy1.days == policy2.days)
+
+ def _assert_metrics_equal(self, metrics1, metrics2):
+ if metrics1 is None or metrics2 is None:
+ self.assertEqual(metrics1, metrics2)
+ return
+
+ self.assertEqual(metrics1.version, metrics2.version)
+ self.assertEqual(metrics1.enabled, metrics2.enabled)
+ self.assertEqual(metrics1.include_apis, metrics2.include_apis)
+ self._assert_retention_equal(metrics1.retention_policy, metrics2.retention_policy)
+
+ def _assert_cors_equal(self, cors1, cors2):
+ if cors1 is None or cors2 is None:
+ self.assertEqual(cors1, cors2)
+ return
+
+ self.assertEqual(len(cors1), len(cors2))
+
+ for i in range(0, len(cors1)):
+ rule1 = cors1[i]
+ rule2 = cors2[i]
+ self.assertEqual(len(rule1.allowed_origins), len(rule2.allowed_origins))
+ self.assertEqual(len(rule1.allowed_methods), len(rule2.allowed_methods))
+ self.assertEqual(rule1.max_age_in_seconds, rule2.max_age_in_seconds)
+ self.assertEqual(len(rule1.exposed_headers), len(rule2.exposed_headers))
+ self.assertEqual(len(rule1.allowed_headers), len(rule2.allowed_headers))
+
+ def _assert_retention_equal(self, ret1, ret2):
+ self.assertEqual(ret1.enabled, ret2.enabled)
+ self.assertEqual(ret1.days, ret2.days)
+
+ # --Test cases per service ---------------------------------------
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_table_service_properties(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support service properties")
+ tsc = TableServiceClient(url, storage_account_key)
+ # Act
+ resp = tsc.set_service_properties(
+ analytics_logging=TableAnalyticsLogging(),
+ hour_metrics=Metrics(),
+ minute_metrics=Metrics(),
+ cors=list())
+
+ # Assert
+ self.assertIsNone(resp)
+ if self.is_live:
+ time.sleep(30)
+ self._assert_properties_default(tsc.get_service_properties())
+
+
+ # --Test cases per feature ---------------------------------------
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_set_logging(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support service properties")
+ tsc = TableServiceClient(url, storage_account_key)
+ logging = TableAnalyticsLogging(read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5))
+
+ # Act
+ tsc.set_service_properties(analytics_logging=logging)
+
+ # Assert
+ if self.is_live:
+ time.sleep(30)
+ received_props = tsc.get_service_properties()
+ self._assert_logging_equal(received_props['analytics_logging'], logging)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_set_hour_metrics(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support service properties")
+ tsc = TableServiceClient(url, storage_account_key)
+ hour_metrics = Metrics(enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5))
+
+ # Act
+ tsc.set_service_properties(hour_metrics=hour_metrics)
+
+ # Assert
+ if self.is_live:
+ time.sleep(30)
+ received_props = tsc.get_service_properties()
+ self._assert_metrics_equal(received_props['hour_metrics'], hour_metrics)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_set_minute_metrics(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support service properties")
+ tsc = TableServiceClient(url, storage_account_key)
+ minute_metrics = Metrics(enabled=True, include_apis=True,
+ retention_policy=RetentionPolicy(enabled=True, days=5))
+
+ # Act
+ tsc.set_service_properties(minute_metrics=minute_metrics)
+
+ # Assert
+ if self.is_live:
+ time.sleep(30)
+ received_props = tsc.get_service_properties()
+ self._assert_metrics_equal(received_props['minute_metrics'], minute_metrics)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_set_cors(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ url = self.account_url(storage_account, "table")
+ if 'cosmos' in url:
+ pytest.skip("Cosmos Tables does not yet support service properties")
+ tsc = TableServiceClient(url, storage_account_key)
+ cors_rule1 = CorsRule(['www.xyz.com'], ['GET'])
+
+ allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"]
+ allowed_methods = ['GET', 'PUT']
+ max_age_in_seconds = 500
+ exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"]
+ allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"]
+ cors_rule2 = CorsRule(
+ allowed_origins,
+ allowed_methods,
+ max_age_in_seconds=max_age_in_seconds,
+ exposed_headers=exposed_headers,
+ allowed_headers=allowed_headers)
+
+ cors = [cors_rule1, cors_rule2]
+
+ # Act
+ tsc.set_service_properties(cors=cors)
+
+ # Assert
+ if self.is_live:
+ time.sleep(30)
+ received_props = tsc.get_service_properties()
+ self._assert_cors_equal(received_props['cors'], cors)
+
+ # --Test cases for errors ---------------------------------------
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_retention_no_days(self, resource_group, location, storage_account, storage_account_key):
+ # Assert
+ self.assertRaises(ValueError,
+ RetentionPolicy,
+ True, None)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_too_many_cors_rules(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ tsc = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ cors = []
+ for i in range(0, 6):
+ cors.append(CorsRule(['www.xyz.com'], ['GET']))
+
+ # Assert
+ self.assertRaises(HttpResponseError,
+ tsc.set_service_properties, None, None, None, cors)
+
+ @pytest.mark.skip("pending")
+ @GlobalStorageAccountPreparer()
+ def test_retention_too_long(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ tsc = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+ minute_metrics = Metrics(enabled=True, include_apis=True,
+ retention_policy=RetentionPolicy(enabled=True, days=366))
+
+ # Assert
+ self.assertRaises(HttpResponseError,
+ tsc.set_service_properties,
+ None, None, minute_metrics)
+
+
+# ------------------------------------------------------------------------------
+if __name__ == '__main__':
+ unittest.main()
diff --git a/sdk/table/tests/test_table_service_stats.py b/sdk/table/tests/test_table_service_stats.py
new file mode 100644
index 000000000000..52353d0ed347
--- /dev/null
+++ b/sdk/table/tests/test_table_service_stats.py
@@ -0,0 +1,81 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+import unittest
+import pytest
+
+# from azure.tables import TableServiceClient
+from azure.azure_table import TableServiceClient
+from devtools_testutils import ResourceGroupPreparer, StorageAccountPreparer
+from _shared.testcase import GlobalResourceGroupPreparer, TableTestCase, GlobalStorageAccountPreparer
+
+SERVICE_UNAVAILABLE_RESP_BODY = 'unavailable '
+
+SERVICE_LIVE_RESP_BODY = 'liveWed, 19 Jan 2021 22:28:43 GMT '
+
+
+# --Test Class -----------------------------------------------------------------
+class TableServiceStatsTest(TableTestCase):
+ # --Helpers-----------------------------------------------------------------
+ def _assert_stats_default(self, stats):
+ self.assertIsNotNone(stats)
+ self.assertIsNotNone(stats['geo_replication'])
+
+ self.assertEqual(stats['geo_replication']['status'], 'live')
+ self.assertIsNotNone(stats['geo_replication']['last_sync_time'])
+
+ def _assert_stats_unavailable(self, stats):
+ self.assertIsNotNone(stats)
+ self.assertIsNotNone(stats['geo_replication'])
+
+ self.assertEqual(stats['geo_replication']['status'], 'unavailable')
+ self.assertIsNone(stats['geo_replication']['last_sync_time'])
+
+ @staticmethod
+ def override_response_body_with_unavailable_status(response):
+ response.http_response.text = lambda _: SERVICE_UNAVAILABLE_RESP_BODY
+
+ @staticmethod
+ def override_response_body_with_live_status(response):
+ response.http_response.text = lambda encoding=None: SERVICE_LIVE_RESP_BODY
+ # response.http_response.text = lambda _: SERVICE_LIVE_RESP_BODY
+
+ # --Test cases per service ---------------------------------------
+
+ @pytest.mark.skip("pending")
+ # @GlobalStorageAccountPreparer()
+ @GlobalResourceGroupPreparer()
+ @StorageAccountPreparer(name_prefix='pyacrstorage', sku='Standard_RAGRS', random_name_enabled=True)
+ def test_table_service_stats_f(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ tsc = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+
+ # Act
+ stats = tsc.get_service_stats(raw_response_hook=self.override_response_body_with_live_status)
+ # Assert
+ self._assert_stats_default(stats)
+
+ @pytest.mark.skip("pending")
+ @GlobalResourceGroupPreparer()
+ @StorageAccountPreparer(name_prefix='pyacrstorage', sku='Standard_RAGRS', random_name_enabled=True)
+ def test_table_service_stats_when_unavailable(self, resource_group, location, storage_account, storage_account_key):
+ # Arrange
+ tsc = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key)
+
+ # Act
+ stats = tsc.get_service_stats(
+ raw_response_hook=self.override_response_body_with_unavailable_status)
+
+ # Assert
+ self._assert_stats_unavailable(stats)
+
+
+# ------------------------------------------------------------------------------
+if __name__ == '__main__':
+ unittest.main()