diff --git a/sdk/Table/azure/storage/tables/__init__.py b/sdk/Table/__init__.py similarity index 100% rename from sdk/Table/azure/storage/tables/__init__.py rename to sdk/Table/__init__.py diff --git a/sdk/Table/azure/azure_table/__init__.py b/sdk/Table/azure/azure_table/__init__.py new file mode 100644 index 000000000000..4c7aa6cd2db6 --- /dev/null +++ b/sdk/Table/azure/azure_table/__init__.py @@ -0,0 +1,5 @@ +__all__ = [ + 'generate_account_sas', +] + +from azure.table import generate_account_sas diff --git a/sdk/Table/azure/storage/tables/_deserialize.py b/sdk/Table/azure/azure_table/_deserialize.py similarity index 100% rename from sdk/Table/azure/storage/tables/_deserialize.py rename to sdk/Table/azure/azure_table/_deserialize.py diff --git a/sdk/Table/azure/storage/tables/_generated/__init__.py b/sdk/Table/azure/azure_table/_generated/__init__.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/__init__.py rename to sdk/Table/azure/azure_table/_generated/__init__.py diff --git a/sdk/Table/azure/storage/tables/_generated/_azure_table.py b/sdk/Table/azure/azure_table/_generated/_azure_table.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/_azure_table.py rename to sdk/Table/azure/azure_table/_generated/_azure_table.py diff --git a/sdk/Table/azure/storage/tables/_generated/_configuration.py b/sdk/Table/azure/azure_table/_generated/_configuration.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/_configuration.py rename to sdk/Table/azure/azure_table/_generated/_configuration.py diff --git a/sdk/Table/azure/storage/tables/_generated/aio/__init__.py b/sdk/Table/azure/azure_table/_generated/aio/__init__.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/aio/__init__.py rename to sdk/Table/azure/azure_table/_generated/aio/__init__.py diff --git a/sdk/Table/azure/storage/tables/_generated/aio/_azure_table_async.py b/sdk/Table/azure/azure_table/_generated/aio/_azure_table_async.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/aio/_azure_table_async.py rename to sdk/Table/azure/azure_table/_generated/aio/_azure_table_async.py diff --git a/sdk/Table/azure/storage/tables/_generated/aio/_configuration_async.py b/sdk/Table/azure/azure_table/_generated/aio/_configuration_async.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/aio/_configuration_async.py rename to sdk/Table/azure/azure_table/_generated/aio/_configuration_async.py diff --git a/sdk/Table/azure/storage/tables/_generated/aio/operations_async/__init__.py b/sdk/Table/azure/azure_table/_generated/aio/operations_async/__init__.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/aio/operations_async/__init__.py rename to sdk/Table/azure/azure_table/_generated/aio/operations_async/__init__.py diff --git a/sdk/Table/azure/storage/tables/_generated/aio/operations_async/_service_operations_async.py b/sdk/Table/azure/azure_table/_generated/aio/operations_async/_service_operations_async.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/aio/operations_async/_service_operations_async.py rename to sdk/Table/azure/azure_table/_generated/aio/operations_async/_service_operations_async.py diff --git a/sdk/Table/azure/storage/tables/_generated/aio/operations_async/_table_operations_async.py b/sdk/Table/azure/azure_table/_generated/aio/operations_async/_table_operations_async.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/aio/operations_async/_table_operations_async.py rename to sdk/Table/azure/azure_table/_generated/aio/operations_async/_table_operations_async.py diff --git a/sdk/Table/azure/storage/tables/_generated/models/__init__.py b/sdk/Table/azure/azure_table/_generated/models/__init__.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/models/__init__.py rename to sdk/Table/azure/azure_table/_generated/models/__init__.py diff --git a/sdk/Table/azure/storage/tables/_generated/models/_azure_table_enums.py b/sdk/Table/azure/azure_table/_generated/models/_azure_table_enums.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/models/_azure_table_enums.py rename to sdk/Table/azure/azure_table/_generated/models/_azure_table_enums.py diff --git a/sdk/Table/azure/storage/tables/_generated/models/_models.py b/sdk/Table/azure/azure_table/_generated/models/_models.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/models/_models.py rename to sdk/Table/azure/azure_table/_generated/models/_models.py diff --git a/sdk/Table/azure/storage/tables/_generated/models/_models_py3.py b/sdk/Table/azure/azure_table/_generated/models/_models_py3.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/models/_models_py3.py rename to sdk/Table/azure/azure_table/_generated/models/_models_py3.py diff --git a/sdk/Table/azure/storage/tables/_generated/operations/__init__.py b/sdk/Table/azure/azure_table/_generated/operations/__init__.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/operations/__init__.py rename to sdk/Table/azure/azure_table/_generated/operations/__init__.py diff --git a/sdk/Table/azure/storage/tables/_generated/operations/_service_operations.py b/sdk/Table/azure/azure_table/_generated/operations/_service_operations.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/operations/_service_operations.py rename to sdk/Table/azure/azure_table/_generated/operations/_service_operations.py diff --git a/sdk/Table/azure/storage/tables/_generated/operations/_table_operations.py b/sdk/Table/azure/azure_table/_generated/operations/_table_operations.py similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/operations/_table_operations.py rename to sdk/Table/azure/azure_table/_generated/operations/_table_operations.py diff --git a/sdk/Table/azure/storage/tables/_generated/py.typed b/sdk/Table/azure/azure_table/_generated/py.typed similarity index 100% rename from sdk/Table/azure/storage/tables/_generated/py.typed rename to sdk/Table/azure/azure_table/_generated/py.typed diff --git a/sdk/Table/azure/azure_table/_generated/version.py b/sdk/Table/azure/azure_table/_generated/version.py new file mode 100644 index 000000000000..629812170000 --- /dev/null +++ b/sdk/Table/azure/azure_table/_generated/version.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +VERSION = "2019-07-07" + diff --git a/sdk/Table/azure/storage/tables/_message_encoding.py b/sdk/Table/azure/azure_table/_message_encoding.py similarity index 100% rename from sdk/Table/azure/storage/tables/_message_encoding.py rename to sdk/Table/azure/azure_table/_message_encoding.py diff --git a/sdk/Table/azure/storage/tables/_shared/__init__.py b/sdk/Table/azure/azure_table/_shared/__init__.py similarity index 100% rename from sdk/Table/azure/storage/tables/_shared/__init__.py rename to sdk/Table/azure/azure_table/_shared/__init__.py diff --git a/sdk/Table/azure/storage/tables/_shared/_common_conversion.py b/sdk/Table/azure/azure_table/_shared/_common_conversion.py similarity index 99% rename from sdk/Table/azure/storage/tables/_shared/_common_conversion.py rename to sdk/Table/azure/azure_table/_shared/_common_conversion.py index 8b50afbe1afb..80c92353848f 100644 --- a/sdk/Table/azure/storage/tables/_shared/_common_conversion.py +++ b/sdk/Table/azure/azure_table/_shared/_common_conversion.py @@ -11,6 +11,7 @@ from io import (SEEK_SET) from dateutil.tz import tzutc +from pyparsing import unicode from ._error import ( _ERROR_VALUE_SHOULD_BE_BYTES_OR_STREAM, diff --git a/sdk/Table/azure/azure_table/_shared/_constants.py b/sdk/Table/azure/azure_table/_shared/_constants.py new file mode 100644 index 000000000000..858875b6af28 --- /dev/null +++ b/sdk/Table/azure/azure_table/_shared/_constants.py @@ -0,0 +1,51 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import platform +import sys + +__author__ = 'Microsoft Corp. ' +__version__ = '1.4.2' + +# UserAgent string sample: 'Azure-Storage/0.37.0-0.38.0 (Python CPython 3.4.2; Windows 8)' +# First version(0.37.0) is the common package, and the second version(0.38.0) is the service package +USER_AGENT_STRING_PREFIX = 'Azure-Storage/{}-'.format(__version__) +USER_AGENT_STRING_SUFFIX = '(Python {} {}; {} {})'.format(platform.python_implementation(), + platform.python_version(), platform.system(), + platform.release()) + +# default values for common package, in case it is used directly +DEFAULT_X_MS_VERSION = '2018-03-28' +DEFAULT_USER_AGENT_STRING = '{}None {}'.format(USER_AGENT_STRING_PREFIX, USER_AGENT_STRING_SUFFIX) + +# Live ServiceClient URLs +SERVICE_HOST_BASE = 'core.windows.net' +DEFAULT_PROTOCOL = 'https' + +# Development ServiceClient URLs +DEV_BLOB_HOST = '127.0.0.1:10000' +DEV_QUEUE_HOST = '127.0.0.1:10001' + +# Default credentials for Development Storage Service +DEV_ACCOUNT_NAME = 'devstoreaccount1' +DEV_ACCOUNT_SECONDARY_NAME = 'devstoreaccount1-secondary' +DEV_ACCOUNT_KEY = 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==' + +# Socket timeout in seconds +DEFAULT_SOCKET_TIMEOUT = 20 + +# for python 3.5+, there was a change to the definition of the socket timeout (as far as socket.sendall is concerned) +# The socket timeout is now the maximum total duration to send all data. +if sys.version_info >= (3, 5): + # the timeout to connect is 20 seconds, and the read timeout is 2000 seconds + # the 2000 seconds was calculated with: 100MB (max block size)/ 50KB/s (an arbitrarily chosen minimum upload speed) + DEFAULT_SOCKET_TIMEOUT = (20, 2000) + +# Encryption constants +_ENCRYPTION_PROTOCOL_V1 = '1.0' + +_AUTHORIZATION_HEADER_NAME = 'Authorization' +_COPY_SOURCE_HEADER_NAME = 'x-ms-copy-source' +_REDACTED_VALUE = 'REDACTED' diff --git a/sdk/Table/azure/storage/tables/_shared/_error.py b/sdk/Table/azure/azure_table/_shared/_error.py similarity index 99% rename from sdk/Table/azure/storage/tables/_shared/_error.py rename to sdk/Table/azure/azure_table/_shared/_error.py index a6e0beb69e5d..49e0fffeb68e 100644 --- a/sdk/Table/azure/storage/tables/_shared/_error.py +++ b/sdk/Table/azure/azure_table/_shared/_error.py @@ -5,6 +5,8 @@ # -------------------------------------------------------------------------- from sys import version_info +from pyparsing import unicode + if version_info < (3,): def _str(value): if isinstance(value, unicode): diff --git a/sdk/Table/azure/storage/tables/_shared/authentication.py b/sdk/Table/azure/azure_table/_shared/authentication.py similarity index 70% rename from sdk/Table/azure/storage/tables/_shared/authentication.py rename to sdk/Table/azure/azure_table/_shared/authentication.py index a8db96d4a051..aae5f61e0df5 100644 --- a/sdk/Table/azure/storage/tables/_shared/authentication.py +++ b/sdk/Table/azure/azure_table/_shared/authentication.py @@ -8,10 +8,10 @@ import sys try: - from urllib.parse import urlparse, unquote + from urllib.parse import urlparse, unquote, parse_qsl except ImportError: - from urlparse import urlparse # type: ignore - from urllib2 import unquote # type: ignore + from urlparse import urlparse # type: ignore + from urllib2 import unquote # type: ignore try: from yarl import URL @@ -26,11 +26,21 @@ from azure.core.exceptions import ClientAuthenticationError from azure.core.pipeline.policies import SansIOHTTPPolicy -from . import sign_string +from ._common_conversion import ( + _sign_string, +) +from azure.table import ( + DEV_ACCOUNT_NAME, + DEV_ACCOUNT_SECONDARY_NAME +) -logger = logging.getLogger(__name__) +from ._error import ( + AzureSigningError, + _wrap_exception, +) +logger = logging.getLogger(__name__) # wraps a given exception with the desired exception type @@ -59,35 +69,36 @@ class AzureSigningError(ClientAuthenticationError): # pylint: disable=no-self-use class SharedKeyCredentialPolicy(SansIOHTTPPolicy): - def __init__(self, account_name, account_key): + def __init__(self, account_name, account_key, is_emulated=False): self.account_name = account_name self.account_key = account_key - super(SharedKeyCredentialPolicy, self).__init__() + self.is_emulated = is_emulated def _get_headers(self, request, headers_to_sign): - headers = dict((name.lower(), value) for name, value in request.http_request.headers.items() if value) + headers = dict((name.lower(), value) for name, value in request.headers.items() if value) if 'content-length' in headers and headers['content-length'] == '0': del headers['content-length'] return '\n'.join(headers.get(x, '') for x in headers_to_sign) + '\n' def _get_verb(self, request): - return request.http_request.method + '\n' + return request.method + '\n' def _get_canonicalized_resource(self, request): - uri_path = urlparse(request.http_request.url).path - try: - if isinstance(request.context.transport, AioHttpTransport) or \ - isinstance(getattr(request.context.transport, "_transport", None), AioHttpTransport): - uri_path = URL(uri_path) - return '/' + self.account_name + str(uri_path) - except TypeError: - pass + #uri_path = request.path.split('?')[0] + uri_path = urlparse(request.url).path + + # for emulator, use the DEV_ACCOUNT_NAME instead of DEV_ACCOUNT_SECONDARY_NAME + # as this is how the emulator works + if self.is_emulated and uri_path.find(DEV_ACCOUNT_SECONDARY_NAME) == 1: + # only replace the first instance + uri_path = uri_path.replace(DEV_ACCOUNT_SECONDARY_NAME, DEV_ACCOUNT_NAME, 1) + return '/' + self.account_name + uri_path def _get_canonicalized_headers(self, request): string_to_sign = '' x_ms_headers = [] - for name, value in request.http_request.headers.items(): + for name, value in request.headers.items(): if name.startswith('x-ms-'): x_ms_headers.append((name.lower(), value)) x_ms_headers.sort() @@ -96,41 +107,39 @@ def _get_canonicalized_headers(self, request): string_to_sign += ''.join([name, ':', value, '\n']) return string_to_sign - def _get_canonicalized_resource_query(self, request): - sorted_queries = [(name, value) for name, value in request.http_request.query.items()] - sorted_queries.sort() - - string_to_sign = '' - for name, value in sorted_queries: - if value is not None: - string_to_sign += '\n' + name.lower() + ':' + unquote(value) - - return string_to_sign - def _add_authorization_header(self, request, string_to_sign): try: - signature = sign_string(self.account_key, string_to_sign) + signature = _sign_string(self.account_key, string_to_sign) auth_string = 'SharedKey ' + self.account_name + ':' + signature - request.http_request.headers['Authorization'] = auth_string + request.headers['Authorization'] = auth_string except Exception as ex: # Wrap any error that occurred as signing error # Doing so will clarify/locate the source of problem raise _wrap_exception(ex, AzureSigningError) - def on_request(self, request): + def on_request(self, request): # type: (PipelineRequest) -> Union[None, Awaitable[None]] + self.sign_request(request.http_request) + + def sign_request(self, request): string_to_sign = \ self._get_verb(request) + \ self._get_headers( request, - [ - 'content-encoding', 'content-language', 'content-length', - 'content-md5', 'content-type', 'date', 'if-modified-since', - 'if-match', 'if-none-match', 'if-unmodified-since', 'byte_range' - ] + ['content-md5', 'content-type', 'x-ms-date'], ) + \ - self._get_canonicalized_headers(request) + \ self._get_canonicalized_resource(request) + \ self._get_canonicalized_resource_query(request) self._add_authorization_header(request, string_to_sign) - #logger.debug("String_to_sign=%s", string_to_sign) + logger.debug("String_to_sign=%s", string_to_sign) + + def _get_canonicalized_resource_query(self, request): + sorted_queries = [(name, value) for name, value in request.query.items()] + sorted_queries.sort() + + string_to_sign = '' + for name, value in sorted_queries: + if value is not None: + string_to_sign += '\n' + name.lower() + ':' + value + + return string_to_sign diff --git a/sdk/Table/azure/storage/tables/_shared/base_client.py b/sdk/Table/azure/azure_table/_shared/base_client.py similarity index 96% rename from sdk/Table/azure/storage/tables/_shared/base_client.py rename to sdk/Table/azure/azure_table/_shared/base_client.py index b81ced71e5ee..52b06bc485db 100644 --- a/sdk/Table/azure/storage/tables/_shared/base_client.py +++ b/sdk/Table/azure/azure_table/_shared/base_client.py @@ -50,11 +50,10 @@ StorageResponseHook, StorageLoggingPolicy, StorageHosts, - QueueMessagePolicy, ExponentialRetry, ) from .._version import VERSION -from .._generated.models import StorageErrorException +# from .._generated.models import StorageErrorException from .response_handlers import process_storage_error, PartialBatchErrorException @@ -63,6 +62,7 @@ "blob": {"primary": "BlobEndpoint", "secondary": "BlobSecondaryEndpoint"}, "queue": {"primary": "QueueEndpoint", "secondary": "QueueSecondaryEndpoint"}, "file": {"primary": "FileEndpoint", "secondary": "FileSecondaryEndpoint"}, + "table": {"primary": "TableEndpoint", "secondary": "TableSecondaryEndpoint"}, "dfs": {"primary": "BlobEndpoint", "secondary": "BlobEndpoint"}, } @@ -80,7 +80,7 @@ def __init__( self._hosts = kwargs.get("_hosts") self.scheme = parsed_url.scheme - if service not in ["blob", "queue", "file-share", "dfs"]: + if service not in ["blob", "queue", "file-share", "dfs", "table"]: raise ValueError("Invalid service: {}".format(service)) service_name = service.split('-')[0] account = parsed_url.netloc.split(".{}.core.".format(service_name)) @@ -230,19 +230,17 @@ def _create_pipeline(self, credential, **kwargs): if not config.transport: config.transport = RequestsTransport(**kwargs) policies = [ - QueueMessagePolicy(), config.headers_policy, config.proxy_policy, config.user_agent_policy, - StorageContentValidation(), - StorageRequestHook(**kwargs), + # StorageRequestHook(**kwargs), self._credential_policy, ContentDecodePolicy(response_encoding="utf-8"), RedirectPolicy(**kwargs), - StorageHosts(hosts=self._hosts, **kwargs), + # StorageHosts(hosts=self._hosts, **kwargs), config.retry_policy, config.logging_policy, - StorageResponseHook(**kwargs), + # StorageResponseHook(**kwargs), DistributedTracingPolicy(**kwargs), HttpLoggingPolicy(**kwargs) ] @@ -291,7 +289,7 @@ def _batch_send( raise error return iter(parts) return parts - except StorageErrorException as error: + except HttpResponseError as error: process_storage_error(error) class TransportWrapper(HttpTransport): @@ -328,7 +326,9 @@ def format_shared_key_credential(account, credential): raise ValueError("Shared key credential missing 'account_name") if "account_key" not in credential: raise ValueError("Shared key credential missing 'account_key") + print('SharedKey ', credential) return SharedKeyCredentialPolicy(**credential) + print(credential) return credential @@ -386,6 +386,7 @@ def create_configuration(**kwargs): config.logging_policy = StorageLoggingPolicy(**kwargs) config.proxy_policy = ProxyPolicy(**kwargs) +# all can be ignored # Storage settings config.max_single_put_size = kwargs.get("max_single_put_size", 64 * 1024 * 1024) config.copy_polling_interval = 15 diff --git a/sdk/Table/azure/storage/tables/_shared/base_client_async.py b/sdk/Table/azure/azure_table/_shared/base_client_async.py similarity index 100% rename from sdk/Table/azure/storage/tables/_shared/base_client_async.py rename to sdk/Table/azure/azure_table/_shared/base_client_async.py diff --git a/sdk/Table/azure/azure_table/_shared/constants.py b/sdk/Table/azure/azure_table/_shared/constants.py new file mode 100644 index 000000000000..7fb05b559850 --- /dev/null +++ b/sdk/Table/azure/azure_table/_shared/constants.py @@ -0,0 +1,26 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import sys +from .._generated.version import VERSION + + +X_MS_VERSION = VERSION + +# Socket timeout in seconds +CONNECTION_TIMEOUT = 20 +READ_TIMEOUT = 20 + +# for python 3.5+, there was a change to the definition of the socket timeout (as far as socket.sendall is concerned) +# The socket timeout is now the maximum total duration to send all data. +if sys.version_info >= (3, 5): + # the timeout to connect is 20 seconds, and the read timeout is 2000 seconds + # the 2000 seconds was calculated with: 100MB (max block size)/ 50KB/s (an arbitrarily chosen minimum upload speed) + READ_TIMEOUT = 2000 + +STORAGE_OAUTH_SCOPE = "https://storage.azure.com/.default" + +SERVICE_HOST_BASE = 'core.windows.net' diff --git a/sdk/Table/azure/storage/tables/_shared/encryption.py b/sdk/Table/azure/azure_table/_shared/encryption.py similarity index 100% rename from sdk/Table/azure/storage/tables/_shared/encryption.py rename to sdk/Table/azure/azure_table/_shared/encryption.py diff --git a/sdk/Table/azure/storage/tables/_shared/models.py b/sdk/Table/azure/azure_table/_shared/models.py similarity index 98% rename from sdk/Table/azure/storage/tables/_shared/models.py rename to sdk/Table/azure/azure_table/_shared/models.py index 0da6b8d3d273..8fcc6815dbdd 100644 --- a/sdk/Table/azure/storage/tables/_shared/models.py +++ b/sdk/Table/azure/azure_table/_shared/models.py @@ -3,9 +3,20 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - +import sys from enum import Enum +from pyparsing import unicode + +if sys.version_info < (3,): + from collections import Iterable + + _unicode_type = unicode +else: + from collections.abc import Iterable + + _unicode_type = str + def get_enum_value(value): if value is None or value in ["None", ""]: diff --git a/sdk/Table/azure/storage/tables/_shared/parser.py b/sdk/Table/azure/azure_table/_shared/parser.py similarity index 100% rename from sdk/Table/azure/storage/tables/_shared/parser.py rename to sdk/Table/azure/azure_table/_shared/parser.py diff --git a/sdk/Table/azure/storage/tables/_shared/policies.py b/sdk/Table/azure/azure_table/_shared/policies.py similarity index 98% rename from sdk/Table/azure/storage/tables/_shared/policies.py rename to sdk/Table/azure/azure_table/_shared/policies.py index b4a2f9eecbdf..6b86d0d52438 100644 --- a/sdk/Table/azure/storage/tables/_shared/policies.py +++ b/sdk/Table/azure/azure_table/_shared/policies.py @@ -106,17 +106,6 @@ def urljoin(base_url, stub_url): parsed = parsed._replace(path=parsed.path + '/' + stub_url) return parsed.geturl() - -class QueueMessagePolicy(SansIOHTTPPolicy): - - def on_request(self, request): - message_id = request.context.options.pop('queue_message_id', None) - if message_id: - request.http_request.url = urljoin( - request.http_request.url, - message_id) - - class StorageHeadersPolicy(HeadersPolicy): request_id_header_name = 'x-ms-client-request-id' @@ -125,7 +114,7 @@ def on_request(self, request): super(StorageHeadersPolicy, self).on_request(request) current_time = format_date_time(time()) request.http_request.headers['x-ms-date'] = current_time - + request.http_request.headers['Date'] = current_time custom_id = request.context.options.pop('client_request_id', None) request.http_request.headers['x-ms-client-request-id'] = custom_id or str(uuid.uuid1()) @@ -188,6 +177,7 @@ def on_request(self, request): # type: (PipelineRequest, Any) -> None http_request = request.http_request options = request.context.options + print(request.http_request.headers) if options.pop("logging_enable", self.enable_http_logger): request.context["logging_enable"] = True if not _LOGGER.isEnabledFor(logging.DEBUG): diff --git a/sdk/Table/azure/storage/tables/_shared/policies_async.py b/sdk/Table/azure/azure_table/_shared/policies_async.py similarity index 100% rename from sdk/Table/azure/storage/tables/_shared/policies_async.py rename to sdk/Table/azure/azure_table/_shared/policies_async.py diff --git a/sdk/Table/azure/storage/tables/_shared/request_handlers.py b/sdk/Table/azure/azure_table/_shared/request_handlers.py similarity index 100% rename from sdk/Table/azure/storage/tables/_shared/request_handlers.py rename to sdk/Table/azure/azure_table/_shared/request_handlers.py diff --git a/sdk/Table/azure/storage/tables/_shared/response_handlers.py b/sdk/Table/azure/azure_table/_shared/response_handlers.py similarity index 100% rename from sdk/Table/azure/storage/tables/_shared/response_handlers.py rename to sdk/Table/azure/azure_table/_shared/response_handlers.py diff --git a/sdk/Table/azure/storage/tables/_shared/shared_access_signature.py b/sdk/Table/azure/azure_table/_shared/shared_access_signature.py similarity index 75% rename from sdk/Table/azure/storage/tables/_shared/shared_access_signature.py rename to sdk/Table/azure/azure_table/_shared/shared_access_signature.py index 367c6554ef89..f8344b0b657c 100644 --- a/sdk/Table/azure/storage/tables/_shared/shared_access_signature.py +++ b/sdk/Table/azure/azure_table/_shared/shared_access_signature.py @@ -11,6 +11,68 @@ from . import sign_string, url_quote +def generate_account_sas( + account_name, # type: str + account_key, # type: str + resource_types, # type: Union[ResourceTypes, str] + permission, # type: Union[AccountSasPermissions, str] + expiry, # type: Optional[Union[datetime, str]] + **kwargs # type: Any + ): # type: (...) -> str + """Generates a shared access signature for the DataLake service. + + Use the returned signature as the credential parameter of any DataLakeServiceClient, + FileSystemClient, DataLakeDirectoryClient or DataLakeFileClient. + + :param str account_name: + The storage account name used to generate the shared access signature. + :param str account_key: + The access key to generate the shared access signature. + :param resource_types: + Specifies the resource types that are accessible with the account SAS. + :type resource_types: str or ~azure.storage.filedatalake.ResourceTypes + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or ~azure.storage.filedatalake.AccountSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: ~datetime.datetime or str + :keyword start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :paramtype start: ~datetime.datetime or str + :keyword str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :keyword str protocol: + Specifies the protocol permitted for a request made. The default value is https. + :return: A Shared Access Signature (sas) token. + :rtype: str + """ + return generate_account_sas( + account_name=account_name, + account_key=account_key, + resource_types=resource_types, + permission=permission, + expiry=expiry, + **kwargs + ) + class QueryStringConstants(object): SIGNED_SIGNATURE = 'sig' SIGNED_PERMISSION = 'sp' diff --git a/sdk/Table/azure/storage/tables/_shared/uploads.py b/sdk/Table/azure/azure_table/_shared/uploads.py similarity index 100% rename from sdk/Table/azure/storage/tables/_shared/uploads.py rename to sdk/Table/azure/azure_table/_shared/uploads.py diff --git a/sdk/Table/azure/storage/tables/_shared/uploads_async.py b/sdk/Table/azure/azure_table/_shared/uploads_async.py similarity index 100% rename from sdk/Table/azure/storage/tables/_shared/uploads_async.py rename to sdk/Table/azure/azure_table/_shared/uploads_async.py diff --git a/sdk/Table/azure/azure_table/_table_service_client.py b/sdk/Table/azure/azure_table/_table_service_client.py new file mode 100644 index 000000000000..aa64dc0b5033 --- /dev/null +++ b/sdk/Table/azure/azure_table/_table_service_client.py @@ -0,0 +1,112 @@ +from urllib.parse import urlparse + +from azure.table import AzureTable +from azure.table import TableProperties +from azure.table import StorageAccountHostsMixin, parse_connection_str, parse_query +from azure.table import VERSION + + +class TableServiceClient(StorageAccountHostsMixin): + def __init__( + self, account_url, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> None + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError: + raise ValueError("Account URL must be a string.") + parsed_url = urlparse(account_url.rstrip('/')) + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(account_url)) + + _, sas_token = parse_query(parsed_url.query) + if not sas_token and not credential: + raise ValueError("You need to provide either a SAS token or an account shared key to authenticate.") + self._query_str, credential = self._format_query_string(sas_token, credential) + super(TableServiceClient, self).__init__(parsed_url, service='table', credential=credential, **kwargs) + self._client = AzureTable(self.url, pipeline=self._pipeline) + self._client._config.version = kwargs.get('api_version', VERSION) # pylint: disable=protected-access + + def _format_url(self, hostname): + """Format the endpoint URL according to the current location + mode hostname. + """ + return "{}://{}/{}".format(self.scheme, hostname, self._query_str) + + @classmethod + def from_connection_string( + cls, conn_str, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): # type: (...) -> TableServiceClient + """Create TableServiceClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, an account shared access + key, or an instance of a TokenCredentials class from azure.identity. + :returns: A Table service client. + :rtype: ~azure.storage.table.TableClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/queue_samples_authentication.py + :start-after: [START auth_from_connection_string] + :end-before: [END auth_from_connection_string] + :language: python + :dedent: 8 + :caption: Creating the QueueServiceClient with a connection string. + """ + account_url, secondary, credential = parse_connection_str( + conn_str, credential, 'queue') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls(account_url, credential=credential, **kwargs) + + def create_table(self, table_name): + table_properties = TableProperties(table_name=table_name) + response = self._client.table.create(table_properties) + return response + + def delete_table(self, table_name): + response = self._client.table.delete(table=table_name) + return response + + def query_table(self, table_name): + # somehow use self._query_string to query things + response = self._client.table.query(table_name=table_name) + return response + + def query_table_entities(self, table_name): + response = self._client.table.query_entities(table_name=table_name) + + def query_table_entities_with_partition_and_row_key(self, table_name): + response = self._client.table.query_entities_with_partition_and_row_key(table_name=table_name) + + def insert_entity(self): + response = self._client.table.insert_entity() + + def delete_entity(self): + response = self._client.table.delete_entity() + + def merge_entity(self): + response = self._client.table.merge_entity() + + def update_entity(self): + response = self._client.table.update_entity() + + def get_access_policy(self): + response = self._client.table.get_access_policy() + + def set_access_policy(self): + response = self._client.table.set_access_policy() + + def batch(self, *reqs): + response = self.batch(*reqs) + return response diff --git a/sdk/Table/azure/azure_table/_version.py b/sdk/Table/azure/azure_table/_version.py new file mode 100644 index 000000000000..8528164c46da --- /dev/null +++ b/sdk/Table/azure/azure_table/_version.py @@ -0,0 +1 @@ +VERSION = "2019-07-07" \ No newline at end of file diff --git a/sdk/Table/azure/storage/__init__.py b/sdk/Table/azure/storage/__init__.py deleted file mode 100644 index 5960c353a898..000000000000 --- a/sdk/Table/azure/storage/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore \ No newline at end of file diff --git a/sdk/Table/azure/storage/tables/_shared/_constants.py b/sdk/Table/azure/storage/tables/_shared/_constants.py deleted file mode 100644 index 40c5c98dd351..000000000000 --- a/sdk/Table/azure/storage/tables/_shared/_constants.py +++ /dev/null @@ -1,27 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -------------------------------------------------------------------------- -import platform - -__author__ = 'Microsoft Corp. ' -__version__ = '1.0.6' - -# x-ms-version for storage service. -X_MS_VERSION = '2017-04-17' - -# UserAgent string sample: 'Azure-CosmosDB/0.32.0 (Python CPython 3.4.2; Windows 8)' -USER_AGENT_STRING = 'Azure-CosmosDB/{} (Python {} {}; {} {})'.format(__version__, platform.python_implementation(), - platform.python_version(), platform.system(), - platform.release()) -DEV_TABLE_HOST = '127.0.0.1:10002' \ No newline at end of file diff --git a/sdk/Table/azure/storage/tables/_version.py b/sdk/Table/azure/storage/tables/_version.py deleted file mode 100644 index 8658b149b57a..000000000000 --- a/sdk/Table/azure/storage/tables/_version.py +++ /dev/null @@ -1 +0,0 @@ -VERSION = "12.1.2" \ No newline at end of file diff --git a/sdk/Table/samples/create_batch.py b/sdk/Table/samples/create_batch.py new file mode 100644 index 000000000000..aee39bab94f3 --- /dev/null +++ b/sdk/Table/samples/create_batch.py @@ -0,0 +1,5 @@ +class CreateBatch(object): + def build_batch_operations(self): + from azure.storage.tables import TableServiceClient + table_client = TableServiceClient(account_url=self.account_url,credential=self.credential) + batch_operations = table_client.batch(*self.reqs) diff --git a/sdk/Table/samples/create_query.py b/sdk/Table/samples/create_query.py new file mode 100644 index 000000000000..695e5a8adece --- /dev/null +++ b/sdk/Table/samples/create_query.py @@ -0,0 +1,13 @@ +class CreateODataQuery(object): + + def creating_odata_query_entities(self): + + from azure.storage.tables import TableServiceClient + from azure.storage.tables._generated.operations._service_operations import HttpResponseError + + table_client = TableServiceClient(account_url=self.account_url, credential=self.account_key) + try: + queried_table = table_client.query_table_entities(table_name=self.table_name,partition_key=self.partition_key,row_key=self.row_key) + print(queried_table.table_name) + except HttpResponseError as e: + print(e.message) diff --git a/sdk/Table/samples/creation_deletion_of_table.py b/sdk/Table/samples/creation_deletion_of_table.py new file mode 100644 index 000000000000..780ba1cedd62 --- /dev/null +++ b/sdk/Table/samples/creation_deletion_of_table.py @@ -0,0 +1,50 @@ +import os + + +class CreateDeleteTable(object): + connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING") + table_name = "NAME" + account_url = os.getenv("AZURE_STORAGE_ACCOUNT_URL") + account_name = os.getenv("AZURE_STORAGE_ACCOUNT_NAME") + access_key = os.getenv("AZURE_STORAGE_ACCESS_KEY") + + active_directory_application_id = os.getenv("ACTIVE_DIRECTORY_APPLICATION_ID") + active_directory_application_secret = os.getenv("ACTIVE_DIRECTORY_APPLICATION_SECRET") + active_directory_tenant_id = os.getenv("ACTIVE_DIRECTORY_TENANT_ID") + + # functions correctly + def create_table(self): + from azure.storage.tables import TableServiceClient + from azure.storage.tables._generated.operations._service_operations import HttpResponseError + from azure.storage.tables._generated.operations._service_operations import ResourceExistsError + table_client = TableServiceClient(account_url=self.account_url, credential=self.access_key) + + # add in existing table error handling + try: + table_created = table_client.create_table(table_name=self.table_name) + print(table_created.table_name) + return table_created.table_name + except HttpResponseError and ResourceExistsError: + raise ResourceExistsError + print(HttpResponseError.response) + + def delete_table(self): + from azure.storage.tables import TableServiceClient + from azure.storage.tables._generated.operations._service_operations import HttpResponseError + from azure.storage.tables._generated.operations._service_operations import ResourceNotFoundError + table_client = TableServiceClient(account_url=self.account_url, credential=self.access_key) + + #check table is there to delete + try: + table_deleted = table_client.delete_table(table_name=self.table_name) + print(table_deleted) + return table_deleted + except HttpResponseError and ResourceNotFoundError: + raise ResourceNotFoundError + print (HttpResponseError.response) + + +if __name__ == '__main__': + sample = CreateDeleteTable() + sample.create_table() + # sample.delete_table() diff --git a/sdk/Table/samples/inserting_deleting_entities.py b/sdk/Table/samples/inserting_deleting_entities.py new file mode 100644 index 000000000000..e8257131a881 --- /dev/null +++ b/sdk/Table/samples/inserting_deleting_entities.py @@ -0,0 +1,61 @@ +class InsertDeleteEntity(object): + + def insert_entity(self): + """Insert entity in a table. + + :param + + table, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + response_preference=None, # type: Optional[Union[str, "models.ResponseFormat"]] + table_entity_properties=None, # type: Optional[Dict[str, object]] + query_options=None, # type: Optional["models.QueryOptions"] + **kwargs # type: Any + + :return: dict mapping str to object, or the result of cls(response) + :rtype: dict[str, object] or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + + + from azure.storage.tables import TableServiceClient + from azure.storage.tables._generated.operations._service_operations import HttpResponseError + from azure.storage.tables._generated.operations._service_operations import ResourceExistsError + table_client = TableServiceClient(account_url=self.account_url,credential=self.credential) + try: + inserted_entity = table_client.insert_entity(table_name=self.table_name) + print(inserted_entity) + except HttpResponseError and ResourceExistsError: + raise ResourceExistsError + print(HttpResponseError.response) + + def delete_entity(self): + """Deletes the specified entity in a table. + + :param + + table, # type: str + partition_key, # type: str + row_key, # type: str + if_match, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + query_options=None, # type: Optional["models.QueryOptions"] + **kwargs # type: Any + + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + + from azure.storage.tables import TableServiceClient + from azure.storage.tables._generated.operations._service_operations import HttpResponseError + from azure.storage.tables._generated.operations._service_operations import ResourceNotFoundError + table_client = TableServiceClient(account_url=self.account_url, credential=self.credential) + try: + deleted_entity = table_client.delete_entity(table_name=self.table_name) + print(deleted_entity) + except HttpResponseError and ResourceNotFoundError: + raise ResourceNotFoundError + print(HttpResponseError.response) \ No newline at end of file diff --git a/sdk/Table/samples/querying_table.py b/sdk/Table/samples/querying_table.py new file mode 100644 index 000000000000..fd25ddd13761 --- /dev/null +++ b/sdk/Table/samples/querying_table.py @@ -0,0 +1,12 @@ +class QueryTable(object): + + def queryTable(self): + from azure.storage.tables import TableServiceClient + table_client = TableServiceClient(account_url=self.account_url, credential=self.credential) + queried_table = table_client.query_table(table_name=self.table_name) + print(queried_table.table_name) + + +if __name__ == '__main__': + sample = QueryTable() + sample.queryTable() diff --git a/sdk/Table/samples/update_entity.py b/sdk/Table/samples/update_entity.py new file mode 100644 index 000000000000..d7e59fd9b75f --- /dev/null +++ b/sdk/Table/samples/update_entity.py @@ -0,0 +1,29 @@ +class UpdateEntity(object): + def update_entity(self): + """Update entity in a table. + + :param + + table, # type: str + partition_key, # type: str + row_key, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + if_match=None, # type: Optional[str] + table_entity_properties=None, # type: Optional[Dict[str, object]] + query_options=None, # type: Optional["models.QueryOptions"] + **kwargs # type: Any + + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + from azure.storage.tables import TableServiceClient + from azure.storage.tables._generated.operations._service_operations import HttpResponseError + table_client = TableServiceClient(account_url=self.account_url, credential=self.credential) + try: + updated_entity = table_client.update_entity(table_name=self.table_name, partition_key=self.partition_key + , row_key=self.row_key) + except HttpResponseError as e: + print(e.response) + return updated_entity diff --git a/sdk/Table/tests/_shared/testcase.py b/sdk/Table/tests/_shared/testcase.py index f56a50a220f7..bff1dea6c766 100644 --- a/sdk/Table/tests/_shared/testcase.py +++ b/sdk/Table/tests/_shared/testcase.py @@ -5,23 +5,23 @@ # license information. # -------------------------------------------------------------------------- from __future__ import division -from contextlib import contextmanager -import copy -import inspect import os import os.path import time from datetime import datetime, timedelta +from azure.azure_table import generate_account_sas +from azure.azure_table._shared.models import ResourceTypes, AccountSasPermissions + +from pyparsing import basestring + try: import unittest.mock as mock except ImportError: import mock import zlib -import math import sys -import string import random import re import logging @@ -32,7 +32,8 @@ StorageAccountPreparer, FakeResource, ) -from azure_devtools.scenario_tests import RecordingProcessor, AzureTestError, create_random_name +from azure_devtools.scenario_tests import RecordingProcessor, AzureTestError + try: from cStringIO import StringIO # Python 2 except ImportError: @@ -153,7 +154,7 @@ def account_url(self, account, endpoint_type): if endpoint_type == "cosmos": return "https://{}.table.cosmos.azure.com".format(account.name) else: - raise ValueError("Unknown storage type {}".format(storage_type)) + raise ValueError("Unknown storage type {}".format(endpoint_type)) except AttributeError: # Didn't find "primary_endpoints" return 'https://{}.{}.core.windows.net'.format(account, endpoint_type) diff --git a/sdk/Table/tests/recordings/test_table.test_create_table.yaml b/sdk/Table/tests/recordings/test_table.test_create_table.yaml index 2fa33e6a7bcb..5475ddb45d39 100644 --- a/sdk/Table/tests/recordings/test_table.test_create_table.yaml +++ b/sdk/Table/tests/recordings/test_table.test_create_table.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"TableName": "Tables"}' + body: '{"TableName": "myTable"}' headers: Accept: - application/json;odata=minimalmetadata @@ -9,35 +9,42 @@ interactions: Connection: - keep-alive Content-Length: - - '23' + - '24' Content-Type: - application/json;odata=nometadata DataServiceVersion: - '3.0' + Date: + - Fri, 29 May 2020 14:04:04 GMT User-Agent: - - azsdk-python-azuretable/unknown Python/3.8.3 (Windows-10-10.0.18362-SP0) + - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.19041-SP0) + x-ms-date: + - Fri, 29 May 2020 14:04:04 GMT x-ms-version: - - '2019-02-02' + - '2019-07-07' method: POST uri: https://storagename.table.core.windows.net/Tables response: body: - string: '{"odata.error":{"code":"ResourceNotFound","message":{"lang":"en-US","value":"The - specified resource does not exist.\nRequestId:fa2fea1a-f002-0015-6b4b-3048b6000000\nTime:2020-05-22T15:14:43.9458401Z"}}}' + string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"myTable"}' headers: - content-length: - - '202' + cache-control: + - no-cache content-type: - - application/json + - application/json;odata=minimalmetadata;streaming=true;charset=utf-8 date: - - Fri, 22 May 2020 15:14:43 GMT + - Fri, 29 May 2020 14:04:02 GMT + location: + - https://storagename.table.core.windows.net/Tables('myTable') server: - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 - x-ms-error-code: - - ResourceNotFound + transfer-encoding: + - chunked + x-content-type-options: + - nosniff x-ms-version: - - '2019-02-02' + - '2019-07-07' status: - code: 404 - message: The specified resource does not exist. + code: 201 + message: Created version: 1 diff --git a/sdk/Table/tests/recordings/test_table.test_delete_table_with_existing_table.yaml b/sdk/Table/tests/recordings/test_table.test_delete_table_with_existing_table.yaml new file mode 100644 index 000000000000..bf2581ea07a4 --- /dev/null +++ b/sdk/Table/tests/recordings/test_table.test_delete_table_with_existing_table.yaml @@ -0,0 +1,50 @@ +interactions: +- request: + body: '{"TableName": "pytablesyncded1139b"}' + headers: + Accept: + - application/json;odata=minimalmetadata + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '36' + Content-Type: + - application/json;odata=nometadata + DataServiceVersion: + - '3.0' + Date: + - Thu, 28 May 2020 14:12:58 GMT + User-Agent: + - azsdk-python-storage-table/2019-07-07 Python/3.8.3 (Windows-10-10.0.18362-SP0) + x-ms-date: + - Thu, 28 May 2020 14:12:58 GMT + x-ms-version: + - '2019-07-07' + method: POST + uri: https://storagename.table.core.windows.net/Tables + response: + body: + string: '{"odata.metadata":"https://storagename.table.core.windows.net/$metadata#Tables/@Element","TableName":"pytablesyncded1139b"}' + headers: + cache-control: + - no-cache + content-type: + - application/json;odata=minimalmetadata;streaming=true;charset=utf-8 + date: + - Thu, 28 May 2020 14:12:56 GMT + location: + - https://storagename.table.core.windows.net/Tables('pytablesyncded1139b') + server: + - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 + transfer-encoding: + - chunked + x-content-type-options: + - nosniff + x-ms-version: + - '2019-07-07' + status: + code: 201 + message: Created +version: 1 diff --git a/sdk/Table/tests/test_table.py b/sdk/Table/tests/test_table.py index 5982938a28b2..c541355edb05 100644 --- a/sdk/Table/tests/test_table.py +++ b/sdk/Table/tests/test_table.py @@ -5,27 +5,31 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - -from collections import namedtuple -import unittest import pytest import sys import locale import os -from dateutil.tz import tzutc +from azure.azure_table import TableServiceClient +from time import time +from wsgiref.handlers import format_date_time from datetime import ( datetime, timedelta, - date, ) -from devtools_testutils import ResourceGroupPreparer, StorageAccountPreparer -from azure.mgmt.storage.models import Endpoints +from azure.core.pipeline import Pipeline +from azure.core.pipeline.policies import ( + HeadersPolicy, + ContentDecodePolicy, +) + + +from _shared.testcase import TableTestCase, GlobalStorageAccountPreparer +from azure.azure_table._shared.authentication import SharedKeyCredentialPolicy from azure.core.pipeline.transport import RequestsTransport from azure.core.exceptions import ( HttpResponseError, ResourceNotFoundError, - ResourceExistsError, - ClientAuthenticationError) + ResourceExistsError) # from azure.tables import ( # TableServiceClient, @@ -38,20 +42,24 @@ # generate_table_sas # ) -from _shared.testcase import GlobalStorageAccountPreparer, TableTestCase, LogCaptured - -from azure.storage.tables._generated import ( - AzureTable -) +# ------------------------------------------------------------------------------ -from azure.storage.tables._generated.models._models_py3 import TableProperties -# ------------------------------------------------------------------------------ TEST_TABLE_PREFIX = 'pytablesync' # ------------------------------------------------------------------------------ +def _create_pipeline(account, credential, **kwargs): + # type: (Any, **Any) -> Tuple[Configuration, Pipeline] + credential_policy = SharedKeyCredentialPolicy(account_name=account.name, account_key=credential) + transport = RequestsTransport(**kwargs) + policies = [ + HeadersPolicy(), + credential_policy, + ContentDecodePolicy(response_encoding="utf-8")] + return Pipeline(transport, policies=policies) + class StorageTableTest(TableTestCase): @@ -77,359 +85,384 @@ def _delete_table(self, table): pass # --Test cases for tables -------------------------------------------------- - # @pytest.mark.skip("pending") + # @pytest.mark.skip("pending") @GlobalStorageAccountPreparer() def test_create_table(self, resource_group, location, storage_account, storage_account_key): - table_client = AzureTable(self.account_url(storage_account, "table")) - table_name = "Tables" - table_properties = TableProperties(table_name=table_name) - response = table_client.table.create(table_properties) - self.assertEqual(response.table_name,table_name) - - - # # Arrange - # ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) - # table_name = self._get_table_reference() - # table_client = ts.get_table_client(table_name) - # - # # Act - # created = table_client.create_table() - # - # # Assert - # self.assertTrue(created) - # existing = list(ts.query_tables("TableName eq '{}'".format(table_name))) - # self.assertEqual(existing, [table_name]) - # ts.delete_table(table_name) - - @pytest.mark.skip("pending") - @GlobalStorageAccountPreparer() - def test_create_table_fail_on_exist(self, resource_group, location, storage_account, storage_account_key): - # Arrange + current_time = format_date_time(time()) + # authorization = 'foo' + # headers = {} + # headers['x-ms-date'] = current_time + # headers['Authorization'] = authorization + # headers['Method'] = 'GET' + # headers['ContentType'] = 'application/json' + # headers['Accept-Charset'] = 'utf-8' + # pipeline = _create_pipeline(storage_account, storage_account_key) + # table_client = AzureTable(self.account_url(storage_account, "table"), pipeline=pipeline) + table_name = "myTable" + # table_properties = TableProperties(table_name=table_name) + # response = table_client.table.create(table_properties, headers=headers) + # self.assertEqual(response.table_name, table_name) + + # # Arrange ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) - table_name = self._get_table_reference() - table_client = ts.get_table_client(table_name) + response = ts.create_table(table_name) + assert response.table_name == table_name + # table_name = self._get_table_reference() + # table_client = ts.get_table_client(table_name) + # + # # Act + # created = table_client.create_table() + # + # # Assert + # self.assertTrue(created) + # existing = list(ts.query_tables("TableName eq '{}'".format(table_name))) + # self.assertEqual(existing, [table_name]) + # ts.delete_table(table_name) + + +@pytest.mark.skip("pending") +@GlobalStorageAccountPreparer() +def test_create_table_fail_on_exist(self, resource_group, location, storage_account, storage_account_key): + # Arrange + ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) + table_name = self._get_table_reference() + table_client = ts.get_table_client(table_name) + + # Act + created = table_client.create_table() + with self.assertRaises(ResourceExistsError): + table_client.create_table() + + # Assert + self.assertTrue(created) + existing = list(ts.query_tables("TableName eq '{}'".format(table_name))) + self.assertEqual(existing, [table_name]) + ts.delete_table(table_name) + + +@pytest.mark.skip("pending") +@GlobalStorageAccountPreparer() +def test_list_tables(self, resource_group, location, storage_account, storage_account_key): + # Arrange + ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) + table = self._create_table(ts) + + # Act + tables = list(ts.list_tables()) + + # Assert + self.assertIsNotNone(tables) + self.assertGreaterEqual(len(tables), 1) + self.assertIsNotNone(tables[0]) + self.assertNamedItemInContainer(tables, table.table_name) + table.delete_table() + + +@pytest.mark.skip("pending") +@GlobalStorageAccountPreparer() +def test_list_tables_with_filter(self, resource_group, location, storage_account, storage_account_key): + # Arrange + ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) + table = self._create_table(ts) + + # Act + name_filter = "TableName eq '{}'".format(table.table_name) + tables = list(ts.query_tables(name_filter)) + + # Assert + self.assertIsNotNone(tables) + self.assertEqual(len(tables), 1) + self.assertEqual(tables, [table.table_name]) + table.delete_table() + + +@pytest.mark.skip("pending") +@GlobalStorageAccountPreparer() +def test_list_tables_with_num_results(self, resource_group, location, storage_account, storage_account_key): + # Arrange + ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) + for i in range(0, 4): + self._create_table(ts) + + # Act + big_page = list(next(ts.list_tables().by_page())) + small_page = list(next(ts.list_tables(results_per_page=3).by_page())) + + # Assert + self.assertEqual(len(small_page), 3) + self.assertGreaterEqual(len(big_page), 4) + + +@pytest.mark.skip("pending") +@GlobalStorageAccountPreparer() +def test_list_tables_with_marker(self, resource_group, location, storage_account, storage_account_key): + # Arrange + ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) + prefix = 'listtable' + table_names = [] + for i in range(0, 4): + table_names.append(self._create_table(ts, prefix + str(i)).table_name) + + table_names.sort() + + # Act + generator1 = ts.list_tables(results_per_page=2).by_page() + next(generator1) + generator2 = ts.list_tables(results_per_page=2).by_page(continuation_token=generator1.continuation_token) + next(generator2) + + tables1 = generator1._current_page + tables2 = generator2._current_page + + # Assert + self.assertEqual(len(tables1), 2) + self.assertEqual(len(tables2), 2) + self.assertNotEqual(tables1, tables2) - # Act - created = table_client.create_table() - with self.assertRaises(ResourceExistsError): - table_client.create_table() - - # Assert - self.assertTrue(created) - existing = list(ts.query_tables("TableName eq '{}'".format(table_name))) - self.assertEqual(existing, [table_name]) - ts.delete_table(table_name) @pytest.mark.skip("pending") @GlobalStorageAccountPreparer() - def test_list_tables(self, resource_group, location, storage_account, storage_account_key): - # Arrange + def test_delete_table_with_existing_table(self, resource_group, location, storage_account, storage_account_key): + # Arrange ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) table = self._create_table(ts) - # Act - tables = list(ts.list_tables()) - - # Assert - self.assertIsNotNone(tables) - self.assertGreaterEqual(len(tables), 1) - self.assertIsNotNone(tables[0]) - self.assertNamedItemInContainer(tables, table.table_name) - table.delete_table() + # Act + deleted = table.delete_table() - @pytest.mark.skip("pending") - @GlobalStorageAccountPreparer() - def test_list_tables_with_filter(self, resource_group, location, storage_account, storage_account_key): - # Arrange - ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) - table = self._create_table(ts) + # Assert + self.assertIsNone(deleted) + #existing = list(ts.query_tables("TableName eq '{}'".format(table.table_name))) + #self.assertEqual(existing, []) - # Act - name_filter = "TableName eq '{}'".format(table.table_name) - tables = list(ts.query_tables(name_filter)) - # Assert - self.assertIsNotNone(tables) - self.assertEqual(len(tables), 1) - self.assertEqual(tables, [table.table_name]) - table.delete_table() +@pytest.mark.skip("pending") +@GlobalStorageAccountPreparer() +def test_delete_table_with_non_existing_table_fail_not_exist(self, resource_group, location, storage_account, + storage_account_key): + # Arrange + ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) + table_name = self._get_table_reference() - @pytest.mark.skip("pending") - @GlobalStorageAccountPreparer() - def test_list_tables_with_num_results(self, resource_group, location, storage_account, storage_account_key): - # Arrange - ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) - for i in range(0, 4): - self._create_table(ts) + # Act + with self.assertRaises(ResourceNotFoundError): + ts.delete_table(table_name) + # Assert + + +@pytest.mark.skip("pending") +@GlobalStorageAccountPreparer() +def test_unicode_create_table_unicode_name(self, resource_group, location, storage_account, storage_account_key): + # Arrange + url = self.account_url(storage_account, "table") + if 'cosmos' in url: + pytest.skip("Cosmos URLs support unicode table names") + ts = TableServiceClient(url, storage_account_key) + table_name = u'啊齄丂狛狜' + + # Act + with self.assertRaises(HttpResponseError): + # not supported - table name must be alphanumeric, lowercase + ts.create_table(table_name) + + # Assert + + +@pytest.mark.skip("pending") +@GlobalStorageAccountPreparer() +def test_get_table_acl(self, resource_group, location, storage_account, storage_account_key): + # Arrange + url = self.account_url(storage_account, "table") + if 'cosmos' in url: + pytest.skip("Cosmos endpoint does not support this") + ts = TableServiceClient(url, storage_account_key) + table = self._create_table(ts) + try: # Act - big_page = list(next(ts.list_tables().by_page())) - small_page = list(next(ts.list_tables(results_per_page=3).by_page())) + acl = table.get_table_access_policy() # Assert - self.assertEqual(len(small_page), 3) - self.assertGreaterEqual(len(big_page), 4) - - @pytest.mark.skip("pending") - @GlobalStorageAccountPreparer() - def test_list_tables_with_marker(self, resource_group, location, storage_account, storage_account_key): - # Arrange - ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) - prefix = 'listtable' - table_names = [] - for i in range(0, 4): - table_names.append(self._create_table(ts, prefix + str(i)).table_name) - - table_names.sort() - + self.assertIsNotNone(acl) + self.assertEqual(len(acl), 0) + finally: + self._delete_table(table) + + +@pytest.mark.skip("pending") +@GlobalStorageAccountPreparer() +def test_set_table_acl_with_empty_signed_identifiers(self, resource_group, location, storage_account, + storage_account_key): + # Arrange + url = self.account_url(storage_account, "table") + if 'cosmos' in url: + pytest.skip("Cosmos endpoint does not support this") + ts = TableServiceClient(url, storage_account_key) + table = self._create_table(ts) + try: # Act - generator1 = ts.list_tables(results_per_page=2).by_page() - next(generator1) - generator2 = ts.list_tables(results_per_page=2).by_page(continuation_token=generator1.continuation_token) - next(generator2) - - tables1 = generator1._current_page - tables2 = generator2._current_page + table.set_table_access_policy(signed_identifiers={}) # Assert - self.assertEqual(len(tables1), 2) - self.assertEqual(len(tables2), 2) - self.assertNotEqual(tables1, tables2) - - @pytest.mark.skip("pending") - @GlobalStorageAccountPreparer() - def test_delete_table_with_existing_table(self, resource_group, location, storage_account, storage_account_key): - # Arrange - ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) - table = self._create_table(ts) - + acl = table.get_table_access_policy() + self.assertIsNotNone(acl) + self.assertEqual(len(acl), 0) + finally: + self._delete_table(table) + + +@pytest.mark.skip("pending") +@GlobalStorageAccountPreparer() +def test_set_table_acl_with_empty_signed_identifier(self, resource_group, location, storage_account, + storage_account_key): + # Arrange + url = self.account_url(storage_account, "table") + if 'cosmos' in url: + pytest.skip("Cosmos endpoint does not support this") + ts = TableServiceClient(url, storage_account_key) + table = self._create_table(ts) + try: # Act - deleted = table.delete_table() + table.set_table_access_policy({'empty': None}) # Assert - self.assertIsNone(deleted) - existing = list(ts.query_tables("TableName eq '{}'".format(table.table_name))) - self.assertEqual(existing, []) - - @pytest.mark.skip("pending") - @GlobalStorageAccountPreparer() - def test_delete_table_with_non_existing_table_fail_not_exist(self, resource_group, location, storage_account, - storage_account_key): - # Arrange - ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) - table_name = self._get_table_reference() + acl = table.get_table_access_policy() + self.assertIsNotNone(acl) + self.assertEqual(len(acl), 1) + self.assertIsNotNone(acl['empty']) + self.assertIsNone(acl['empty'].permission) + self.assertIsNone(acl['empty'].expiry) + self.assertIsNone(acl['empty'].start) + finally: + self._delete_table(table) + + +@pytest.mark.skip("pending") +@GlobalStorageAccountPreparer() +def test_set_table_acl_with_signed_identifiers(self, resource_group, location, storage_account, + storage_account_key): + # Arrange + url = self.account_url(storage_account, "table") + if 'cosmos' in url: + pytest.skip("Cosmos endpoint does not support this") + ts = TableServiceClient(url, storage_account_key) + table = self._create_table(ts) + + # Act + identifiers = dict() + identifiers['testid'] = AccessPolicy(start=datetime.utcnow() - timedelta(minutes=5), + expiry=datetime.utcnow() + timedelta(hours=1), + permission=TableSasPermissions(query=True)) + try: + table.set_table_access_policy(identifiers) + # Assert + acl = table.get_table_access_policy() + self.assertIsNotNone(acl) + self.assertEqual(len(acl), 1) + self.assertTrue('testid' in acl) + finally: + self._delete_table(table) + + +@pytest.mark.skip("pending") +@GlobalStorageAccountPreparer() +def test_set_table_acl_too_many_ids(self, resource_group, location, storage_account, storage_account_key): + # Arrange + url = self.account_url(storage_account, "table") + if 'cosmos' in url: + pytest.skip("Cosmos endpoint does not support this") + ts = TableServiceClient(url, storage_account_key) + table = self._create_table(ts) + try: # Act - with self.assertRaises(ResourceNotFoundError): - ts.delete_table(table_name) + identifiers = dict() + for i in range(0, 6): + identifiers['id{}'.format(i)] = None # Assert - - @pytest.mark.skip("pending") - @GlobalStorageAccountPreparer() - def test_unicode_create_table_unicode_name(self, resource_group, location, storage_account, storage_account_key): - # Arrange - url = self.account_url(storage_account, "table") - if 'cosmos' in url: - pytest.skip("Cosmos URLs support unicode table names") - ts = TableServiceClient(url, storage_account_key) - table_name = u'啊齄丂狛狜' + with self.assertRaisesRegex(ValueError, + 'Too many access policies provided. The server does not support setting more than 5 access policies on a single resource.'): + table.set_table_access_policy(identifiers) + finally: + self._delete_table(table) + + +@pytest.mark.skip("pending") +@pytest.mark.live_test_only +@GlobalStorageAccountPreparer() +def test_account_sas(self, resource_group, location, storage_account, storage_account_key): + # SAS URL is calculated from storage key, so this test runs live only + + # Arrange + url = self.account_url(storage_account, "table") + if 'cosmos' in url: + pytest.skip("Cosmos Tables does not yet support sas") + tsc = TableServiceClient(url, storage_account_key) + table = self._create_table(tsc) + try: + entity = { + 'PartitionKey': 'test', + 'RowKey': 'test1', + 'text': 'hello', + } + table.upsert_item(entity) + + entity['RowKey'] = 'test2' + table.upsert_item(entity) + + token = generate_account_sas( + storage_account.name, + storage_account_key, + resource_types=ResourceTypes(object=True), + permission=AccountSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1), + start=datetime.utcnow() - timedelta(minutes=1), + ) # Act - with self.assertRaises(HttpResponseError): - # not supported - table name must be alphanumeric, lowercase - ts.create_table(table_name) + service = TableServiceClient( + self.account_url(storage_account, "table"), + credential=token, + ) + sas_table = service.get_table_client(table.table_name) + entities = list(sas_table.read_all_items()) # Assert - - @pytest.mark.skip("pending") - @GlobalStorageAccountPreparer() - def test_get_table_acl(self, resource_group, location, storage_account, storage_account_key): - # Arrange - url = self.account_url(storage_account, "table") - if 'cosmos' in url: - pytest.skip("Cosmos endpoint does not support this") - ts = TableServiceClient(url, storage_account_key) - table = self._create_table(ts) - try: - # Act - acl = table.get_table_access_policy() - - # Assert - self.assertIsNotNone(acl) - self.assertEqual(len(acl), 0) - finally: - self._delete_table(table) - - @pytest.mark.skip("pending") - @GlobalStorageAccountPreparer() - def test_set_table_acl_with_empty_signed_identifiers(self, resource_group, location, storage_account, - storage_account_key): - # Arrange - url = self.account_url(storage_account, "table") - if 'cosmos' in url: - pytest.skip("Cosmos endpoint does not support this") - ts = TableServiceClient(url, storage_account_key) - table = self._create_table(ts) - try: - # Act - table.set_table_access_policy(signed_identifiers={}) - - # Assert - acl = table.get_table_access_policy() - self.assertIsNotNone(acl) - self.assertEqual(len(acl), 0) - finally: - self._delete_table(table) - - @pytest.mark.skip("pending") - @GlobalStorageAccountPreparer() - def test_set_table_acl_with_empty_signed_identifier(self, resource_group, location, storage_account, - storage_account_key): - # Arrange - url = self.account_url(storage_account, "table") - if 'cosmos' in url: - pytest.skip("Cosmos endpoint does not support this") - ts = TableServiceClient(url, storage_account_key) - table = self._create_table(ts) - try: - # Act - table.set_table_access_policy({'empty': None}) - - # Assert - acl = table.get_table_access_policy() - self.assertIsNotNone(acl) - self.assertEqual(len(acl), 1) - self.assertIsNotNone(acl['empty']) - self.assertIsNone(acl['empty'].permission) - self.assertIsNone(acl['empty'].expiry) - self.assertIsNone(acl['empty'].start) - finally: - self._delete_table(table) - - @pytest.mark.skip("pending") - @GlobalStorageAccountPreparer() - def test_set_table_acl_with_signed_identifiers(self, resource_group, location, storage_account, - storage_account_key): - # Arrange - url = self.account_url(storage_account, "table") - if 'cosmos' in url: - pytest.skip("Cosmos endpoint does not support this") - ts = TableServiceClient(url, storage_account_key) - table = self._create_table(ts) + self.assertEqual(len(entities), 2) + self.assertEqual(entities[0].text, 'hello') + self.assertEqual(entities[1].text, 'hello') + finally: + self._delete_table(table) + + +@pytest.mark.skip("msrest fails deserialization: https://github.com/Azure/msrest-for-python/issues/192") +@GlobalStorageAccountPreparer() +def test_locale(self, resource_group, location, storage_account, storage_account_key): + # Arrange + ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) + table = ts.get_table_client(self._get_table_reference()) + init_locale = locale.getlocale() + if os.name is "nt": + culture = "Spanish_Spain" + elif os.name is 'posix': + culture = 'es_ES.UTF-8' + else: + culture = 'es_ES.utf8' + + try: + locale.setlocale(locale.LC_ALL, culture) + e = None # Act - identifiers = dict() - identifiers['testid'] = AccessPolicy(start=datetime.utcnow() - timedelta(minutes=5), - expiry=datetime.utcnow() + timedelta(hours=1), - permission=TableSasPermissions(query=True)) + table.create_table() try: - table.set_table_access_policy(identifiers) + resp = ts.list_tables() + except: + e = sys.exc_info()[0] - # Assert - acl = table.get_table_access_policy() - self.assertIsNotNone(acl) - self.assertEqual(len(acl), 1) - self.assertTrue('testid' in acl) - finally: - self._delete_table(table) - - @pytest.mark.skip("pending") - @GlobalStorageAccountPreparer() - def test_set_table_acl_too_many_ids(self, resource_group, location, storage_account, storage_account_key): - # Arrange - url = self.account_url(storage_account, "table") - if 'cosmos' in url: - pytest.skip("Cosmos endpoint does not support this") - ts = TableServiceClient(url, storage_account_key) - table = self._create_table(ts) - try: - # Act - identifiers = dict() - for i in range(0, 6): - identifiers['id{}'.format(i)] = None - - # Assert - with self.assertRaisesRegex(ValueError, - 'Too many access policies provided. The server does not support setting more than 5 access policies on a single resource.'): - table.set_table_access_policy(identifiers) - finally: - self._delete_table(table) - - @pytest.mark.skip("pending") - @pytest.mark.live_test_only - @GlobalStorageAccountPreparer() - def test_account_sas(self, resource_group, location, storage_account, storage_account_key): - # SAS URL is calculated from storage key, so this test runs live only - - # Arrange - url = self.account_url(storage_account, "table") - if 'cosmos' in url: - pytest.skip("Cosmos Tables does not yet support sas") - tsc = TableServiceClient(url, storage_account_key) - table = self._create_table(tsc) - try: - entity = { - 'PartitionKey': 'test', - 'RowKey': 'test1', - 'text': 'hello', - } - table.upsert_item(entity) - - entity['RowKey'] = 'test2' - table.upsert_item(entity) - - token = generate_account_sas( - storage_account.name, - storage_account_key, - resource_types=ResourceTypes(object=True), - permission=AccountSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1), - start=datetime.utcnow() - timedelta(minutes=1), - ) - - # Act - service = TableServiceClient( - self.account_url(storage_account, "table"), - credential=token, - ) - sas_table = service.get_table_client(table.table_name) - entities = list(sas_table.read_all_items()) - - # Assert - self.assertEqual(len(entities), 2) - self.assertEqual(entities[0].text, 'hello') - self.assertEqual(entities[1].text, 'hello') - finally: - self._delete_table(table) - - @pytest.mark.skip("msrest fails deserialization: https://github.com/Azure/msrest-for-python/issues/192") - @GlobalStorageAccountPreparer() - def test_locale(self, resource_group, location, storage_account, storage_account_key): - # Arrange - ts = TableServiceClient(self.account_url(storage_account, "table"), storage_account_key) - table = ts.get_table_client(self._get_table_reference()) - init_locale = locale.getlocale() - if os.name is "nt": - culture = "Spanish_Spain" - elif os.name is 'posix': - culture = 'es_ES.UTF-8' - else: - culture = 'es_ES.utf8' - - try: - locale.setlocale(locale.LC_ALL, culture) - e = None - - # Act - table.create_table() - try: - resp = ts.list_tables() - except: - e = sys.exc_info()[0] - - # Assert - self.assertIsNone(e) - finally: - self._delete_table(table) - locale.setlocale(locale.LC_ALL, init_locale[0] or 'en_US') + # Assert + self.assertIsNone(e) + finally: + self._delete_table(table) + locale.setlocale(locale.LC_ALL, init_locale[0] or 'en_US')