diff --git a/README.md b/README.md index 6bdb2f580788..3bba4f23244a 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ To get the source code of the SDK via **git** just type: Alternatively, to get the source code via the Python Package Index (PyPI), type - %SystemDrive%\Python27\Scripts\pip.exe install pyazure + %SystemDrive%\Python27\Scripts\pip.exe install azure You can use these packages against the cloud Windows Azure Services, or against the local Storage Emulator (with the exception of Service Bus features). @@ -45,20 +45,21 @@ the local Storage Emulator (with the exception of Service Bus features). # Usage ## Table Storage -To ensure a table exists, call **create_table**: +To ensure a table exists, call **create\_table**: ```Python from azure.storage import TableService ts = TableService(account_name, account_key) -table = ts.create_table('tasktable') +ts.create_table('tasktable') ``` -A new entity can be added by calling **insert_entity**: +A new entity can be added by calling **insert\_entity**: ```Python +from datetime import datetime ts = TableService(account_name, account_key) -table = ts.create_table('tasktable') -table.insert_entity( +ts.create_table('tasktable') +ts.insert_entity( 'tasktable', { 'PartitionKey' : 'tasksSeattle', @@ -69,7 +70,7 @@ table.insert_entity( ) ``` -The method **get_entity** can then be used to fetch the entity that was just inserted: +The method **get\_entity** can then be used to fetch the entity that was just inserted: ```Python ts = TableService(account_name, account_key) @@ -78,27 +79,25 @@ entity = ts.get_entity('tasktable', 'tasksSeattle', '1') ## Blob Storage -The **create_container** method can be used to create a +The **create\_container** method can be used to create a container in which to store a blob: ```Python from azure.storage import BlobService -blob_service = BlobService() -container = blob_service.create_container('taskcontainer') +blob_service = BlobService(account_name, account_key) +blob_service.create_container('taskcontainer') ``` -To upload a file (assuming it is called task1-upload.txt, it contains the exact text "hello world" (no quotation marks), and it is placed in the same folder as the script below), the method **put_blob** can be used: +To upload a file (assuming it is called task1-upload.txt, it contains the exact text "hello world" (no quotation marks), and it is placed in the same folder as the script below), the method **put\_blob** can be used: ```Python from azure.storage import BlobService blob_service = BlobService(account_name, account_key) -blob_service.put_blob('taskcontainer', 'task1', -blobService = azure.createBlobService() -blobService.put_blob('taskcontainer', 'task1', file('task1-upload.txt').read()) +blob_service.put_blob('taskcontainer', 'task1', file('task1-upload.txt').read(), 'BlockBlob') ``` -To download the blob and write it to the file system, the **get_blob** method can be used: +To download the blob and write it to the file system, the **get\_blob** method can be used: ```Python from azure.storage import BlobService @@ -108,15 +107,15 @@ blob = blob_service.get_blob('taskcontainer', 'task1') ## Storage Queues -The **create_queue** method can be used to ensure a queue exists: +The **create\_queue** method can be used to ensure a queue exists: ```Python from azure.storage import QueueService queue_service = QueueService(account_name, account_key) -queue = queue_service.create_queue('taskqueue') +queue_service.create_queue('taskqueue') ``` -The **put_message** method can then be called to insert the message into the queue: +The **put\_message** method can then be called to insert the message into the queue: ```Python from azure.storage import QueueService @@ -124,69 +123,73 @@ queue_service = QueueService(account_name, account_key) queue_service.put_message('taskqueue', 'Hello world!') ``` -It is then possible to call the **get___messages** method, process the message and then call **delete_message** on the messages ID. This two-step process ensures messages don't get lost when they are removed from the queue. +It is then possible to call the **get\_messages** method, process the message and then call **delete\_message** with the message id and receipt. This two-step process ensures messages don't get lost when they are removed from the queue. ```Python from azure.storage import QueueService queue_service = QueueService(account_name, account_key) messages = queue_service.get_messages('taskqueue') -queue_service.delete_message('taskqueue', messages[0].message_id) +queue_service.delete_message('taskqueue', messages[0].message_id, messages[0].pop_receipt) ``` ## ServiceBus Queues ServiceBus Queues are an alternative to Storage Queues that might be useful in scenarios where more advanced messaging features are needed (larger message sizes, message ordering, single-operaiton destructive reads, scheduled delivery) using push-style delivery (using long polling). -The **create_queue** method can be used to ensure a queue exists: +The **create\_queue** method can be used to ensure a queue exists: ```Python from azure.servicebus import ServiceBusService -sbs = ServiceBusService(service_namespace, account_key) -queue = sbs.create_queue('taskqueue'); +sbs = ServiceBusService(service_namespace, account_key, 'owner') +sbs.create_queue('taskqueue') ``` -The **send__queue__message** method can then be called to insert the message into the queue: +The **send\_queue\_message** method can then be called to insert the message into the queue: ```Python -from azure.servicebus import ServiceBusService -sbs = ServiceBusService(service_namespace, account_key) -sbs.send_queue_message('taskqueue', 'Hello World!') +from azure.servicebus import ServiceBusService, Message +sbs = ServiceBusService(service_namespace, account_key, 'owner') +msg = Message('Hello World!') +sbs.send_queue_message('taskqueue', msg) ``` -It is then possible to call the **read__delete___queue__message** method to dequeue the message. +It is then possible to call the **receive\_queue\_message** method to dequeue the message. ```Python from azure.servicebus import ServiceBusService -sbs = ServiceBusService(service_namespace, account_key) -msg = sbs.read_delete_queue_message('taskqueue') +sbs = ServiceBusService(service_namespace, account_key, 'owner') +msg = sbs.receive_queue_message('taskqueue') ``` ## ServiceBus Topics ServiceBus topics are an abstraction on top of ServiceBus Queues that make pub/sub scenarios easy to implement. -The **create_topic** method can be used to create a server-side topic: +The **create\_topic** method can be used to create a server-side topic: ```Python from azure.servicebus import ServiceBusService -sbs = ServiceBusService(service_namespace, account_key) -topic = sbs.create_topic('taskdiscussion') +sbs = ServiceBusService(service_namespace, account_key, 'owner') +sbs.create_topic('taskdiscussion') ``` -The **send__topic__message** method can be used to send a message to a topic: +The **send\_topic\_message** method can be used to send a message to a topic: ```Python -from azure.servicebus import ServiceBusService -sbs = ServiceBusService(service_namespace, account_key) -sbs.send_topic_message('taskdiscussion', 'Hello world!') +from azure.servicebus import ServiceBusService, Message +sbs = ServiceBusService(service_namespace, account_key, 'owner') +msg = Message('Hello World!') +sbs.send_topic_message('taskdiscussion', msg) ``` -A client can then create a subscription and start consuming messages by calling the **create__subscription** method followed by the **receive__subscription__message** method. Please note that any messages sent before the subscription is created will not be received. +A client can then create a subscription and start consuming messages by calling the **create\_subscription** method followed by the **receive\_subscription\_message** method. Please note that any messages sent before the subscription is created will not be received. ```Python -from azure.servicebus import ServiceBusService -sbs = ServiceBusService(service_namespace, account_key) +from azure.servicebus import ServiceBusService, Message +sbs = ServiceBusService(service_namespace, account_key, 'owner') sbs.create_subscription('taskdiscussion', 'client1') +msg = Message('Hello World!') +sbs.send_topic_message('taskdiscussion', msg) msg = sbs.receive_subscription_message('taskdiscussion', 'client1') ``` diff --git a/src/azure/__init__.py b/src/azure/__init__.py index eccc0d681e02..bb0f4ad116c8 100644 --- a/src/azure/__init__.py +++ b/src/azure/__init__.py @@ -43,12 +43,12 @@ _ERROR_CANNOT_FIND_ROW_KEY = 'Cannot find row key in request.' _ERROR_INCORRECT_TABLE_IN_BATCH = 'Table should be the same in a batch operations' _ERROR_INCORRECT_PARTITION_KEY_IN_BATCH = 'Partition Key should be the same in a batch operations' -_ERROR_DUPLICATE_ROW_KEY_IN_BATCH = 'Partition Key should be the same in a batch operations' +_ERROR_DUPLICATE_ROW_KEY_IN_BATCH = 'Row Keys should not be the same in a batch operations' _ERROR_BATCH_COMMIT_FAIL = 'Batch Commit Fail' _ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE = 'Message is not peek locked and cannot be deleted.' _ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK = 'Message is not peek locked and cannot be unlocked.' -_ERROR_QUEUE_NOT_FOUND = 'Queue is not Found' -_ERROR_TOPIC_NOT_FOUND = 'Topic is not Found' +_ERROR_QUEUE_NOT_FOUND = 'Queue was not found' +_ERROR_TOPIC_NOT_FOUND = 'Topic was not found' _ERROR_CONFLICT = 'Conflict' _ERROR_NOT_FOUND = 'Not found' _ERROR_UNKNOWN = 'Unknown error (%s)' @@ -58,6 +58,8 @@ _ERROR_VALUE_SHOULD_NOT_BE_NULL = '%s should not be None.' _ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY = 'Cannot serialize the specified value (%s) to an entity. Please use an EntityProperty (which can specify custom types), int, str, bool, or datetime' +METADATA_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices/metadata' + class WindowsAzureData(object): ''' This is the base of data class. It is only used to check whether it is instance or not. ''' pass @@ -80,8 +82,11 @@ def __init__(self, message): self.message = message class Feed: - def __init__(self, type): - self.type = type + pass + +class HeaderDict(dict): + def __getitem__(self, index): + return super(HeaderDict, self).__getitem__(index.lower()) def _get_readable_id(id_name): """simplified an id to be more friendly for us people""" @@ -97,6 +102,9 @@ def _get_entry_properties(xmlstr, include_id): properties = {} for entry in _get_child_nodes(xmldoc, 'entry'): + etag = entry.getAttributeNS(METADATA_NS, 'etag') + if etag: + properties['etag'] = etag for updated in _get_child_nodes(entry, 'updated'): properties['updated'] = updated.firstChild.nodeValue for name in _get_children_from_path(entry, 'author', 'name'): @@ -109,6 +117,14 @@ def _get_entry_properties(xmlstr, include_id): return properties +def _get_first_child_node_value(parent_node, node_name): + xml_attrs = _get_child_nodes(parent_node, node_name) + if xml_attrs: + xml_attr = xml_attrs[0] + if xml_attr.firstChild: + value = xml_attr.firstChild.nodeValue + return value + def _get_child_nodes(node, tagName): return [childNode for childNode in node.getElementsByTagName(tagName) if childNode.parentNode == node] @@ -142,7 +158,7 @@ def _create_entry(entry_body): updated_str += '+00:00' entry_start = ''' - + <updated>{updated}</updated><author><name /></author><id /> <content type="application/xml"> {body}</content></entry>''' @@ -242,9 +258,23 @@ def _clone_node_with_namespaces(node_to_clone, original_doc): return clone def _convert_response_to_feeds(response, convert_func): - feeds = [] + if response is None: + return None + + feeds = _list_of(Feed) + + x_ms_continuation = HeaderDict() + for name, value in response.headers: + if 'x-ms-continuation' in name: + x_ms_continuation[name[len('x-ms-continuation')+1:]] = value + if x_ms_continuation: + setattr(feeds, 'x_ms_continuation', x_ms_continuation) + xmldoc = minidom.parseString(response.body) - for xml_entry in _get_children_from_path(xmldoc, 'feed', 'entry'): + xml_entries = _get_children_from_path(xmldoc, 'feed', 'entry') + if not xml_entries: + xml_entries = _get_children_from_path(xmldoc, 'entry') #in some cases, response contains only entry but no feed + for xml_entry in xml_entries: new_node = _clone_node_with_namespaces(xml_entry, xmldoc) feeds.append(convert_func(new_node.toxml())) @@ -254,16 +284,19 @@ def _validate_not_none(param_name, param): if param is None: raise TypeError(_ERROR_VALUE_SHOULD_NOT_BE_NULL % (param_name)) -def _html_encode(html): - ch_map = (('&', '&'), ('<', '<'), ('>', '>'), ('"', '"'), ('\'', '&apos')) - for name, value in ch_map: - html = html.replace(name, value) - return html - def _fill_list_of(xmldoc, element_type): xmlelements = _get_child_nodes(xmldoc, element_type.__name__) return [_parse_response_body(xmlelement.toxml(), element_type) for xmlelement in xmlelements] +def _fill_dict(xmldoc, element_name): + xmlelements = _get_child_nodes(xmldoc, element_name) + if xmlelements: + return_obj = {} + for child in xmlelements[0].childNodes: + if child.firstChild: + return_obj[child.nodeName] = child.firstChild.nodeValue + return return_obj + def _fill_instance_child(xmldoc, element_name, return_type): '''Converts a child of the current dom element to the specified type. The child name ''' @@ -272,7 +305,10 @@ def _fill_instance_child(xmldoc, element_name, return_type): if not xmlelements: return None - return _fill_instance_element(xmlelements[0], return_type) + return_obj = return_type() + _fill_data_to_return_object(xmlelements[0], return_obj) + + return return_obj def _fill_instance_element(element, return_type): """Converts a DOM element into the specified object""" @@ -367,6 +403,19 @@ def _parse_response(response, return_type): ''' return _parse_response_body(response.body, return_type) +def _fill_data_to_return_object(node, return_obj): + for name, value in vars(return_obj).iteritems(): + if isinstance(value, _list_of): + setattr(return_obj, name, _fill_list_of(node, value.list_type)) + elif isinstance(value, WindowsAzureData): + setattr(return_obj, name, _fill_instance_child(node, name, value.__class__)) + elif isinstance(value, dict): + setattr(return_obj, name, _fill_dict(node, _get_serialization_name(name))) + else: + value = _fill_data_minidom(node, name, value) + if value is not None: + setattr(return_obj, name, value) + def _parse_response_body(respbody, return_type): ''' parse the xml and fill all the data into a class of return_type @@ -374,15 +423,7 @@ def _parse_response_body(respbody, return_type): doc = minidom.parseString(respbody) return_obj = return_type() for node in _get_child_nodes(doc, return_type.__name__): - for name, value in vars(return_obj).iteritems(): - if isinstance(value, _list_of): - setattr(return_obj, name, _fill_list_of(node, value.list_type)) - elif isinstance(value, WindowsAzureData): - setattr(return_obj, name, _fill_instance_child(node, name, value.__class__)) - else: - value = _fill_data_minidom(node, name, value) - if value is not None: - setattr(return_obj, name, value) + _fill_data_to_return_object(node, return_obj) return return_obj @@ -446,11 +487,12 @@ def _dont_fail_not_exist(error): def _parse_response_for_dict(response): ''' Extracts name-values from response header. Filter out the standard http headers.''' - + + if response is None: + return None http_headers = ['server', 'date', 'location', 'host', - 'via', 'proxy-connection', 'x-ms-version', 'connection', - 'content-length'] - return_dict = {} + 'via', 'proxy-connection', 'connection'] + return_dict = HeaderDict() if response.headers: for name, value in response.headers: if not name.lower() in http_headers: @@ -461,6 +503,8 @@ def _parse_response_for_dict(response): def _parse_response_for_dict_prefix(response, prefix): ''' Extracts name-values for names starting with prefix from response header. Filter out the standard http headers.''' + if response is None: + return None return_dict = {} orig_dict = _parse_response_for_dict(response) if orig_dict: @@ -475,6 +519,8 @@ def _parse_response_for_dict_prefix(response, prefix): def _parse_response_for_dict_filter(response, filter): ''' Extracts name-values for names in filter from response header. Filter out the standard http headers.''' + if response is None: + return None return_dict = {} orig_dict = _parse_response_for_dict(response) if orig_dict: diff --git a/src/azure/http/__init__.py b/src/azure/http/__init__.py index 3a2dfc515a6f..3aeb36ebe1f2 100644 --- a/src/azure/http/__init__.py +++ b/src/azure/http/__init__.py @@ -13,6 +13,7 @@ # limitations under the License. #-------------------------------------------------------------------------- +HTTP_RESPONSE_NO_CONTENT = 204 class HTTPError(Exception): ''' HTTP Exception when response status code >= 300 ''' diff --git a/src/azure/http/winhttp.py b/src/azure/http/winhttp.py index f67f6de49ee0..4340389214aa 100644 --- a/src/azure/http/winhttp.py +++ b/src/azure/http/winhttp.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. #-------------------------------------------------------------------------- -from ctypes import c_void_p, c_long, c_ulong, c_longlong, c_ulonglong, c_short, c_ushort, c_wchar_p, c_byte +from ctypes import c_void_p, c_long, c_ulong, c_longlong, c_ulonglong, c_short, c_ushort, c_wchar_p, c_byte, c_size_t from ctypes import byref, Structure, Union, POINTER, WINFUNCTYPE, HRESULT, oledll, WinDLL, cast, create_string_buffer import ctypes import urllib2 @@ -41,11 +41,23 @@ _oleaut32 = WinDLL('oleaut32') _CLSIDFromString = _ole32.CLSIDFromString _CoInitialize = _ole32.CoInitialize +_CoInitialize.argtypes = [c_void_p] + _CoCreateInstance = _ole32.CoCreateInstance + _SysAllocString = _oleaut32.SysAllocString +_SysAllocString.restype = c_void_p +_SysAllocString.argtypes = [c_wchar_p] + _SysFreeString = _oleaut32.SysFreeString +_SysFreeString.argtypes = [c_void_p] + _SafeArrayDestroy = _oleaut32.SafeArrayDestroy +_SafeArrayDestroy.argtypes = [c_void_p] + _CoTaskMemAlloc = _ole32.CoTaskMemAlloc +_CoTaskMemAlloc.restype = c_void_p +_CoTaskMemAlloc.argtypes = [c_size_t] #------------------------------------------------------------------------------ class BSTR(c_wchar_p): @@ -215,17 +227,7 @@ def status_text(self): status_text = bstr_status_text.value _SysFreeString(bstr_status_text) return status_text - - def response_text(self): - ''' Gets response body as text. ''' - - bstr_resptext = c_void_p() - _WinHttpRequest._ResponseText(self, byref(bstr_resptext)) - bstr_resptext = ctypes.cast(bstr_resptext, c_wchar_p) - resptext = bstr_resptext.value - _SysFreeString(bstr_resptext) - return resptext - + def response_body(self): ''' Gets response body as a SAFEARRAY and converts the SAFEARRAY to str. If it is an xml @@ -283,7 +285,7 @@ def __init__(self, host, cert_file=None, key_file=None, protocol='http'): self.protocol = protocol clsid = GUID('{2087C2F4-2CEF-4953-A8AB-66779B670495}') iid = GUID('{016FE2EC-B2C8-45F8-B23B-39E53A75396B}') - _CoInitialize(0) + _CoInitialize(None) _CoCreateInstance(byref(clsid), 0, 1, byref(iid), byref(self._httprequest)) def putrequest(self, method, uri): @@ -330,7 +332,7 @@ def getresponse(self): for resp_header in fixed_headers: if ':' in resp_header: pos = resp_header.find(':') - headers.append((resp_header[:pos], resp_header[pos+1:].strip())) + headers.append((resp_header[:pos].lower(), resp_header[pos+1:].strip())) body = self._httprequest.response_body() length = len(body) diff --git a/src/azure/servicebus/__init__.py b/src/azure/servicebus/__init__.py index a0a959bae615..3cca653ab3d5 100644 --- a/src/azure/servicebus/__init__.py +++ b/src/azure/servicebus/__init__.py @@ -23,10 +23,10 @@ from azure.http import HTTPError from azure import (WindowsAzureError, WindowsAzureData, - _create_entry, _get_entry_properties, _html_encode, + _create_entry, _get_entry_properties, xml_escape, _get_child_nodes, WindowsAzureMissingResourceError, WindowsAzureConflictError, _get_serialization_name, - _get_children_from_path) + _get_children_from_path, _get_first_child_node_value) import azure #default rule name for subscription @@ -47,52 +47,90 @@ class Queue(WindowsAzureData): ''' Queue class corresponding to Queue Description: http://msdn.microsoft.com/en-us/library/windowsazure/hh780773''' - def __init__(self): - self.lock_duration = None - self.max_size_in_megabytes = None - self.duplicate_detection = None - self.requires_duplicate_detection = None - self.requires_session = None - self.default_message_time_to_live = None - self.enable_dead_lettering_on_message_expiration = None - self.duplicate_detection_history_time_window = None - self.max_delivery_count = None - self.enable_batched_operations = None - self.size_in_bytes = None - self.message_count = None + def __init__(self, + lock_duration=None, + max_size_in_megabytes=None, + requires_duplicate_detection=None, + requires_session=None, + default_message_time_to_live=None, + dead_lettering_on_message_expiration=None, + duplicate_detection_history_time_window=None, + max_delivery_count=None, + enable_batched_operations=None, + size_in_bytes=None, + message_count=None): + + self.lock_duration = lock_duration + self.max_size_in_megabytes = max_size_in_megabytes + self.requires_duplicate_detection = requires_duplicate_detection + self.requires_session = requires_session + self.default_message_time_to_live = default_message_time_to_live + self.dead_lettering_on_message_expiration = dead_lettering_on_message_expiration + self.duplicate_detection_history_time_window = duplicate_detection_history_time_window + self.max_delivery_count = max_delivery_count + self.enable_batched_operations = enable_batched_operations + self.size_in_bytes = size_in_bytes + self.message_count = message_count class Topic(WindowsAzureData): ''' Topic class corresponding to Topic Description: http://msdn.microsoft.com/en-us/library/windowsazure/hh780749. ''' - def __init__(self): - self.default_message_time_to_live = None - self.max_size_in_mega_bytes = None - self.requires_duplicate_detection = None - self.duplicate_detection_history_time_window = None - self.enable_batched_operations = None - self.size_in_bytes = None + def __init__(self, + default_message_time_to_live=None, + max_size_in_megabytes=None, + requires_duplicate_detection=None, + duplicate_detection_history_time_window=None, + enable_batched_operations=None, + size_in_bytes=None): + + self.default_message_time_to_live = default_message_time_to_live + self.max_size_in_megabytes = max_size_in_megabytes + self.requires_duplicate_detection = requires_duplicate_detection + self.duplicate_detection_history_time_window = duplicate_detection_history_time_window + self.enable_batched_operations = enable_batched_operations + self.size_in_bytes = size_in_bytes + + @property + def max_size_in_mega_bytes(self): + import warnings + warnings.warn('This attribute has been changed to max_size_in_megabytes.') + return self.max_size_in_megabytes + + @max_size_in_mega_bytes.setter + def max_size_in_mega_bytes(self, value): + self.max_size_in_megabytes = value + class Subscription(WindowsAzureData): ''' Subscription class corresponding to Subscription Description: http://msdn.microsoft.com/en-us/library/windowsazure/hh780763. ''' - def __init__(self): - self.lock_duration = None - self.requires_session = None - self.default_message_time_to_live = None - self.dead_lettering_on_message_expiration = None - self.dead_lettering_on_filter_evaluation_exceptions = None - self.enable_batched_operations = None - self.max_delivery_count = None - self.message_count = None + def __init__(self, + lock_duration=None, + requires_session=None, + default_message_time_to_live=None, + dead_lettering_on_message_expiration=None, + dead_lettering_on_filter_evaluation_exceptions=None, + enable_batched_operations=None, + max_delivery_count=None, + message_count=None): + + self.lock_duration = lock_duration + self.requires_session = requires_session + self.default_message_time_to_live = default_message_time_to_live + self.dead_lettering_on_message_expiration = dead_lettering_on_message_expiration + self.dead_lettering_on_filter_evaluation_exceptions = dead_lettering_on_filter_evaluation_exceptions + self.enable_batched_operations = enable_batched_operations + self.max_delivery_count = max_delivery_count + self.message_count = message_count class Rule(WindowsAzureData): ''' Rule class corresponding to Rule Description: http://msdn.microsoft.com/en-us/library/windowsazure/hh780753. ''' - def __init__(self): - self.filter_type = '' - self.filter_expression = '' - self.action_type = '' - self.action_expression = '' + def __init__(self, filter_type=None, filter_expression=None, action_type=None, action_expression=None): + self.filter_type = filter_type + self.filter_expression = filter_expression + self.action_type = action_type + self.action_expression = action_type class Message(WindowsAzureData): ''' Message class that used in send message/get mesage apis. ''' @@ -156,7 +194,7 @@ def add_headers(self, request): elif isinstance(value, datetime): request.headers.append((name, '"' + value.strftime('%a, %d %b %Y %H:%M:%S GMT') + '"')) else: - request.headers.append((name, str(value))) + request.headers.append((name, str(value).lower())) # Adds content-type request.headers.append(('Content-Type', self.type)) @@ -268,11 +306,23 @@ def _create_message(response, service_instance): message_location = value elif name.lower() not in ['content-type', 'brokerproperties', 'transfer-encoding', 'server', 'location', 'date']: if '"' in value: - custom_properties[name] = value[1:-1] - else: - custom_properties[name] = value + value = value[1:-1] + try: + custom_properties[name] = datetime.strptime(value, '%a, %d %b %Y %H:%M:%S GMT') + except ValueError: + custom_properties[name] = value + else: #only int, float or boolean + if value.lower() == 'true': + custom_properties[name] = True + elif value.lower() == 'false': + custom_properties[name] = False + elif str(int(float(value))) == value: #int('3.1') doesn't work so need to get float('3.14') first + custom_properties[name] = int(value) + else: + custom_properties[name] = float(value) + if message_type == None: - message = Message(respbody, service_instance, message_location, custom_properties, broker_properties) + message = Message(respbody, service_instance, message_location, custom_properties, 'application/atom+xml;type=entry;charset=utf-8', broker_properties) else: message = Message(respbody, service_instance, message_location, custom_properties, message_type, broker_properties) return message @@ -332,18 +382,6 @@ def _parse_bool(value): return True return False - -_QUEUE_CONVERSION = { - 'MaxSizeInMegaBytes': int, - 'RequiresGroupedReceives': _parse_bool, - 'SupportsDuplicateDetection': _parse_bool, - 'SizeinBytes': int, - 'MessageCount': int, - 'EnableBatchedOperations': _parse_bool, - 'RequiresSession': _parse_bool, - 'LockDuration': int, -} - def _convert_xml_to_queue(xmlstr): ''' Converts xml response to queue object. @@ -363,18 +401,51 @@ def _convert_xml_to_queue(xmlstr): invalid_queue = True #get node for each attribute in Queue class, if nothing found then the response is not valid xml for Queue. - for queue_desc in _get_children_from_path(xmldoc, 'entry', 'content', 'QueueDescription'): - for attr_name, attr_value in vars(queue).iteritems(): - xml_attrs = _get_child_nodes(queue_desc, _get_serialization_name(attr_name)) - if xml_attrs: - xml_attr = xml_attrs[0] - if xml_attr.firstChild: - value = xml_attr.firstChild.nodeValue - conversion = _QUEUE_CONVERSION.get(attr_name) - if conversion is not None: - value = conversion(value) - setattr(queue, attr_name, value) - invalid_queue = False + for desc in _get_children_from_path(xmldoc, 'entry', 'content', 'QueueDescription'): + node_value = _get_first_child_node_value(desc, 'LockDuration') + if node_value is not None: + queue.lock_duration = node_value + invalid_queue = False + node_value = _get_first_child_node_value(desc, 'MaxSizeInMegabytes') + if node_value is not None: + queue.max_size_in_megabytes = int(node_value) + invalid_queue = False + node_value = _get_first_child_node_value(desc, 'RequiresDuplicateDetection') + if node_value is not None: + queue.requires_duplicate_detection = _parse_bool(node_value) + invalid_queue = False + node_value = _get_first_child_node_value(desc, 'RequiresSession') + if node_value is not None: + queue.requires_session = _parse_bool(node_value) + invalid_queue = False + node_value = _get_first_child_node_value(desc, 'DefaultMessageTimeToLive') + if node_value is not None: + queue.default_message_time_to_live = node_value + invalid_queue = False + node_value = _get_first_child_node_value(desc, 'DeadLetteringOnMessageExpiration') + if node_value is not None: + queue.dead_lettering_on_message_expiration = _parse_bool(node_value) + invalid_queue = False + node_value = _get_first_child_node_value(desc, 'DuplicateDetectionHistoryTimeWindow') + if node_value is not None: + queue.duplicate_detection_history_time_window = node_value + invalid_queue = False + node_value = _get_first_child_node_value(desc, 'EnableBatchedOperations') + if node_value is not None: + queue.enable_batched_operations = _parse_bool(node_value) + invalid_queue = False + node_value = _get_first_child_node_value(desc, 'MaxDeliveryCount') + if node_value is not None: + queue.max_delivery_count = int(node_value) + invalid_queue = False + node_value = _get_first_child_node_value(desc, 'MessageCount') + if node_value is not None: + queue.message_count = int(node_value) + invalid_queue = False + node_value = _get_first_child_node_value(desc, 'SizeInBytes') + if node_value is not None: + queue.size_in_bytes = int(node_value) + invalid_queue = False if invalid_queue: raise WindowsAzureError(azure._ERROR_QUEUE_NOT_FOUND) @@ -388,12 +459,6 @@ def _convert_xml_to_queue(xmlstr): def _convert_response_to_topic(response): return _convert_xml_to_topic(response.body) -_TOPIC_CONVERSION = { - 'MaxSizeInMegaBytes': int, - 'RequiresDuplicateDetection': _parse_bool, - 'DeadLetteringOnFilterEvaluationExceptions': _parse_bool -} - def _convert_xml_to_topic(xmlstr): '''Converts xml response to topic @@ -402,7 +467,7 @@ def _convert_xml_to_topic(xmlstr): <content type='application/xml'> <TopicDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> <DefaultMessageTimeToLive>P10675199DT2H48M5.4775807S</DefaultMessageTimeToLive> - <MaxSizeInMegaBytes>1024</MaxSizeInMegaBytes> + <MaxSizeInMegabytes>1024</MaxSizeInMegabytes> <RequiresDuplicateDetection>false</RequiresDuplicateDetection> <DuplicateDetectionHistoryTimeWindow>P7D</DuplicateDetectionHistoryTimeWindow> <DeadLetteringOnFilterEvaluationExceptions>true</DeadLetteringOnFilterEvaluationExceptions> @@ -414,20 +479,34 @@ def _convert_xml_to_topic(xmlstr): topic = Topic() invalid_topic = True + #get node for each attribute in Topic class, if nothing found then the response is not valid xml for Topic. for desc in _get_children_from_path(xmldoc, 'entry', 'content', 'TopicDescription'): invalid_topic = True - for attr_name, attr_value in vars(topic).iteritems(): - xml_attrs = _get_child_nodes(desc, _get_serialization_name(attr_name)) - if xml_attrs: - xml_attr = xml_attrs[0] - if xml_attr.firstChild: - value = xml_attr.firstChild.nodeValue - conversion = _TOPIC_CONVERSION.get(attr_name) - if conversion is not None: - value = conversion(value) - setattr(topic, attr_name, value) - invalid_topic = False + node_value = _get_first_child_node_value(desc, 'DefaultMessageTimeToLive') + if node_value is not None: + topic.default_message_time_to_live = node_value + invalid_topic = False + node_value = _get_first_child_node_value(desc, 'MaxSizeInMegabytes') + if node_value is not None: + topic.max_size_in_megabytes = int(node_value) + invalid_topic = False + node_value = _get_first_child_node_value(desc, 'RequiresDuplicateDetection') + if node_value is not None: + topic.requires_duplicate_detection = _parse_bool(node_value) + invalid_topic = False + node_value = _get_first_child_node_value(desc, 'DuplicateDetectionHistoryTimeWindow') + if node_value is not None: + topic.duplicate_detection_history_time_window = node_value + invalid_topic = False + node_value = _get_first_child_node_value(desc, 'EnableBatchedOperations') + if node_value is not None: + topic.enable_batched_operations = _parse_bool(node_value) + invalid_topic = False + node_value = _get_first_child_node_value(desc, 'SizeInBytes') + if node_value is not None: + topic.size_in_bytes = int(node_value) + invalid_topic = False if invalid_topic: raise WindowsAzureError(azure._ERROR_TOPIC_NOT_FOUND) @@ -440,15 +519,6 @@ def _convert_xml_to_topic(xmlstr): def _convert_response_to_subscription(response): return _convert_xml_to_subscription(response.body) -_SUBSCRIPTION_CONVERSION = { - 'RequiresSession' : _parse_bool, - 'DeadLetteringOnMessageExpiration': _parse_bool, - 'DefaultMessageTimeToLive': int, - 'EnableBatchedOperations': _parse_bool, - 'MaxDeliveryCount': int, - 'MessageCount': int, -} - def _convert_xml_to_subscription(xmlstr): '''Converts xml response to subscription @@ -467,18 +537,31 @@ def _convert_xml_to_subscription(xmlstr): xmldoc = minidom.parseString(xmlstr) subscription = Subscription() - for desc in _get_children_from_path(xmldoc, 'entry', 'content', 'subscriptiondescription'): - for attr_name, attr_value in vars(subscription).iteritems(): - tag_name = attr_name.replace('_', '') - xml_attrs = _get_child_nodes(desc, tag_name) - if xml_attrs: - xml_attr = xml_attrs[0] - if xml_attr.firstChild: - value = xml_attr.firstChild.nodeValue - conversion = _SUBSCRIPTION_CONVERSION.get(attr_name) - if conversion is not None: - value = conversion(value) - setattr(subscription, attr_name, value) + for desc in _get_children_from_path(xmldoc, 'entry', 'content', 'SubscriptionDescription'): + node_value = _get_first_child_node_value(desc, 'LockDuration') + if node_value is not None: + subscription.lock_duration = node_value + node_value = _get_first_child_node_value(desc, 'RequiresSession') + if node_value is not None: + subscription.requires_session = _parse_bool(node_value) + node_value = _get_first_child_node_value(desc, 'DefaultMessageTimeToLive') + if node_value is not None: + subscription.default_message_time_to_live = node_value + node_value = _get_first_child_node_value(desc, 'DeadLetteringOnFilterEvaluationExceptions') + if node_value is not None: + subscription.dead_lettering_on_filter_evaluation_exceptions = _parse_bool(node_value) + node_value = _get_first_child_node_value(desc, 'DeadLetteringOnMessageExpiration') + if node_value is not None: + subscription.dead_lettering_on_message_expiration = _parse_bool(node_value) + node_value = _get_first_child_node_value(desc, 'EnableBatchedOperations') + if node_value is not None: + subscription.enable_batched_operations = _parse_bool(node_value) + node_value = _get_first_child_node_value(desc, 'MaxDeliveryCount') + if node_value is not None: + subscription.max_delivery_count = int(node_value) + node_value = _get_first_child_node_value(desc, 'MessageCount') + if node_value is not None: + subscription.message_count = int(node_value) for name, value in _get_entry_properties(xmlstr, True).iteritems(): setattr(subscription, name, value) @@ -496,21 +579,21 @@ def convert_subscription_to_xml(subscription): subscription_body = '<SubscriptionDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">' if subscription: if subscription.lock_duration is not None: - subscription_body += ''.join(['<LockDuration>', subscription.lock_duration, '</LockDuration>']) + subscription_body += ''.join(['<LockDuration>', str(subscription.lock_duration), '</LockDuration>']) if subscription.requires_session is not None: - subscription_body += ''.join(['<RequiresSession>', subscription.requires_session, '</RequiresSession>']) + subscription_body += ''.join(['<RequiresSession>', str(subscription.requires_session).lower(), '</RequiresSession>']) if subscription.default_message_time_to_live is not None: - subscription_body += ''.join(['<DefaultMessageTimeToLive>', subscription.default_message_time_to_live, '</DefaultMessageTimeToLive>']) + subscription_body += ''.join(['<DefaultMessageTimeToLive>', str(subscription.default_message_time_to_live), '</DefaultMessageTimeToLive>']) if subscription.dead_lettering_on_message_expiration is not None: - subscription_body += ''.join(['<DeadLetteringOnMessageExpiration>', subscription.dead_lettering_on_message_expiration, '</DeadLetteringOnMessageExpiration>']) + subscription_body += ''.join(['<DeadLetteringOnMessageExpiration>', str(subscription.dead_lettering_on_message_expiration).lower(), '</DeadLetteringOnMessageExpiration>']) if subscription.dead_lettering_on_filter_evaluation_exceptions is not None: - subscription_body += ''.join(['<DeadLetteringOnFilterEvaluationExceptions>', subscription.dead_lettering_on_filter_evaluation_exceptions, '</DeadLetteringOnFilterEvaluationExceptions>']) + subscription_body += ''.join(['<DeadLetteringOnFilterEvaluationExceptions>', str(subscription.dead_lettering_on_filter_evaluation_exceptions).lower(), '</DeadLetteringOnFilterEvaluationExceptions>']) if subscription.enable_batched_operations is not None: - subscription_body += ''.join(['<EnableBatchedOperations>', subscription.enable_batched_operations, '</EnableBatchedOperations>']) + subscription_body += ''.join(['<EnableBatchedOperations>', str(subscription.enable_batched_operations).lower(), '</EnableBatchedOperations>']) if subscription.max_delivery_count is not None: - subscription_body += ''.join(['<MaxDeliveryCount>', subscription.max_delivery_count, '</MaxDeliveryCount>']) + subscription_body += ''.join(['<MaxDeliveryCount>', str(subscription.max_delivery_count), '</MaxDeliveryCount>']) if subscription.message_count is not None: - subscription_body += ''.join(['<MessageCount>', subscription.message_count, '</MessageCount>']) + subscription_body += ''.join(['<MessageCount>', str(subscription.message_count), '</MessageCount>']) subscription_body += '</SubscriptionDescription>' return _create_entry(subscription_body) @@ -525,17 +608,18 @@ def convert_rule_to_xml(rule): rule_body = '<RuleDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">' if rule: if rule.filter_type: - rule_body += ''.join(['<Filter i:type="', _html_encode(rule.filter_type), '">']) + rule_body += ''.join(['<Filter i:type="', xml_escape(rule.filter_type), '">']) if rule.filter_type == 'CorrelationFilter': - rule_body += ''.join(['<CorrelationId>', _html_encode(rule.filter_expression), '</CorrelationId>']) + rule_body += ''.join(['<CorrelationId>', xml_escape(rule.filter_expression), '</CorrelationId>']) else: - rule_body += ''.join(['<SqlExpression>', _html_encode(rule.filter_expression), '</SqlExpression>']) + rule_body += ''.join(['<SqlExpression>', xml_escape(rule.filter_expression), '</SqlExpression>']) rule_body += '<CompatibilityLevel>20</CompatibilityLevel>' rule_body += '</Filter>' if rule.action_type: - rule_body += ''.join(['<Action i:type="', _html_encode(rule.action_type), '">']) - if rule.action_type == 'SqlFilterAction': - rule_body += ''.join(['<SqlExpression>', _html_encode(rule.action_expression), '</SqlExpression>']) + rule_body += ''.join(['<Action i:type="', xml_escape(rule.action_type), '">']) + if rule.action_type == 'SqlRuleAction': + rule_body += ''.join(['<SqlExpression>', xml_escape(rule.action_expression), '</SqlExpression>']) + rule_body += '<CompatibilityLevel>20</CompatibilityLevel>' rule_body += '</Action>' rule_body += '</RuleDescription>' @@ -553,16 +637,16 @@ def convert_topic_to_xml(topic): if topic: if topic.default_message_time_to_live is not None: topic_body += ''.join(['<DefaultMessageTimeToLive>', str(topic.default_message_time_to_live), '</DefaultMessageTimeToLive>']) - if topic.max_size_in_mega_bytes is not None: + if topic.max_size_in_megabytes is not None: topic_body += ''.join(['<MaxSizeInMegabytes>', str(topic.max_size_in_megabytes), '</MaxSizeInMegabytes>']) if topic.requires_duplicate_detection is not None: - topic_body += ''.join(['<RequiresDuplicateDetection>', str(topic.requires_duplicate_detection), '</RequiresDuplicateDetection>']) + topic_body += ''.join(['<RequiresDuplicateDetection>', str(topic.requires_duplicate_detection).lower(), '</RequiresDuplicateDetection>']) if topic.duplicate_detection_history_time_window is not None: topic_body += ''.join(['<DuplicateDetectionHistoryTimeWindow>', str(topic.duplicate_detection_history_time_window), '</DuplicateDetectionHistoryTimeWindow>']) if topic.enable_batched_operations is not None: - topic_body += ''.join(['<EnableBatchedOperations>', str(topic.enable_batched_operations), '</EnableBatchedOperations>']) + topic_body += ''.join(['<EnableBatchedOperations>', str(topic.enable_batched_operations).lower(), '</EnableBatchedOperations>']) if topic.size_in_bytes is not None: - topic_body += ''.join(['<SizeinBytes>', str(topic.size_in_bytes), '</SizeinBytes>']) + topic_body += ''.join(['<SizeInBytes>', str(topic.size_in_bytes), '</SizeInBytes>']) topic_body += '</TopicDescription>' return _create_entry(topic_body) @@ -581,21 +665,21 @@ def convert_queue_to_xml(queue): if queue.max_size_in_megabytes is not None: queue_body += ''.join(['<MaxSizeInMegabytes>', str(queue.max_size_in_megabytes), '</MaxSizeInMegabytes>']) if queue.requires_duplicate_detection is not None: - queue_body += ''.join(['<RequiresDuplicateDetection>', str(queue.requires_duplicate_detection), '</RequiresDuplicateDetection>']) + queue_body += ''.join(['<RequiresDuplicateDetection>', str(queue.requires_duplicate_detection).lower(), '</RequiresDuplicateDetection>']) if queue.requires_session is not None: - queue_body += ''.join(['<RequiresSession>', str(queue.requires_session), '</RequiresSession>']) + queue_body += ''.join(['<RequiresSession>', str(queue.requires_session).lower(), '</RequiresSession>']) if queue.default_message_time_to_live is not None: queue_body += ''.join(['<DefaultMessageTimeToLive>', str(queue.default_message_time_to_live), '</DefaultMessageTimeToLive>']) - if queue.enable_dead_lettering_on_message_expiration is not None: - queue_body += ''.join(['<EnableDeadLetteringOnMessageExpiration>', str(queue.enable_dead_lettering_on_message_expiration), '</EnableDeadLetteringOnMessageExpiration>']) + if queue.dead_lettering_on_message_expiration is not None: + queue_body += ''.join(['<DeadLetteringOnMessageExpiration>', str(queue.dead_lettering_on_message_expiration).lower(), '</DeadLetteringOnMessageExpiration>']) if queue.duplicate_detection_history_time_window is not None: queue_body += ''.join(['<DuplicateDetectionHistoryTimeWindow>', str(queue.duplicate_detection_history_time_window), '</DuplicateDetectionHistoryTimeWindow>']) if queue.max_delivery_count is not None: queue_body += ''.join(['<MaxDeliveryCount>', str(queue.max_delivery_count), '</MaxDeliveryCount>']) if queue.enable_batched_operations is not None: - queue_body += ''.join(['<EnableBatchedOperations>', str(queue.enable_batched_operations), '</EnableBatchedOperations>']) + queue_body += ''.join(['<EnableBatchedOperations>', str(queue.enable_batched_operations).lower(), '</EnableBatchedOperations>']) if queue.size_in_bytes is not None: - queue_body += ''.join(['<SizeinBytes>', str(queue.size_in_bytes), '</SizeinBytes>']) + queue_body += ''.join(['<SizeInBytes>', str(queue.size_in_bytes), '</SizeInBytes>']) if queue.message_count is not None: queue_body += ''.join(['<MessageCount>', str(queue.message_count), '</MessageCount>']) diff --git a/src/azure/servicebus/servicebusservice.py b/src/azure/servicebus/servicebusservice.py index 6694a6b0ef3f..ccd4d2f9d13c 100644 --- a/src/azure/servicebus/servicebusservice.py +++ b/src/azure/servicebus/servicebusservice.py @@ -17,7 +17,7 @@ import urllib2 from azure.http.httpclient import _HTTPClient -from azure.http import HTTPError +from azure.http import HTTPError, HTTP_RESPONSE_NO_CONTENT from azure.servicebus import (_update_service_bus_header, _create_message, convert_topic_to_xml, _convert_response_to_topic, convert_queue_to_xml, _convert_response_to_queue, @@ -27,11 +27,11 @@ _convert_xml_to_subscription, _convert_xml_to_rule, _service_bus_error_handler, AZURE_SERVICEBUS_NAMESPACE, AZURE_SERVICEBUS_ACCESS_KEY, AZURE_SERVICEBUS_ISSUER) -from azure.http import HTTPRequest +from azure.http import HTTPRequest, HTTP_RESPONSE_NO_CONTENT from azure import (_validate_not_none, Feed, _convert_response_to_feeds, _str_or_none, _int_or_none, _get_request_body, _update_request_uri_query, - _dont_fail_on_exist, _dont_fail_not_exist, + _dont_fail_on_exist, _dont_fail_not_exist, WindowsAzureConflictError, WindowsAzureError, _parse_response, _convert_class_to_xml, _parse_response_for_dict, _parse_response_for_dict_prefix, _parse_response_for_dict_filter, @@ -699,7 +699,5 @@ def _perform_request(self, request): except HTTPError as e: return _service_bus_error_handler(e) - if not resp: - return None return resp diff --git a/src/azure/storage/__init__.py b/src/azure/storage/__init__.py index bbb6847b8182..85767d895789 100644 --- a/src/azure/storage/__init__.py +++ b/src/azure/storage/__init__.py @@ -21,8 +21,8 @@ import types from datetime import datetime -from azure import (_create_entry, - _get_entry_properties, _html_encode, WindowsAzureError, +from azure import (_create_entry, METADATA_NS, _parse_response_for_dict, + _get_entry_properties, WindowsAzureError, _get_child_nodes, _get_child_nodesNS, WindowsAzureConflictError, WindowsAzureMissingResourceError, _list_of, @@ -51,10 +51,13 @@ class ContainerEnumResults(EnumResultsBase): def __init__(self): EnumResultsBase.__init__(self) self.containers = _list_of(Container) + def __iter__(self): return iter(self.containers) + def __len__(self): return len(self.containers) + def __getitem__(self, index): return self.containers[index] @@ -65,7 +68,7 @@ def __init__(self): self.name = '' self.url = '' self.properties = Properties() - self.metadata = Metadata() + self.metadata = {} class Properties(WindowsAzureData): ''' Blob container's properties class. ''' @@ -74,29 +77,20 @@ def __init__(self): self.last_modified = '' self.etag = '' -class Metadata(WindowsAzureData): - ''' Metadata class. ''' - - def __init__(self): - self.metadata_name = '' - class RetentionPolicy(WindowsAzureData): ''' RetentionPolicy in service properties. ''' + def __init__(self): self.enabled = False self.__dict__['days'] = None - def get_days(self): - + def get_days(self): #convert days to int value return int(self.__dict__['days']) def set_days(self, value): ''' set default days if days is set to empty. ''' - if value == '': - self.__dict__['days'] = 10 - else: - self.__dict__['days'] = value + self.__dict__['days'] = value days = property(fget=get_days, fset=set_days) @@ -143,10 +137,18 @@ def __init__(self): class SignedIdentifiers(WindowsAzureData): ''' SignedIdentifier list. ''' + def __init__(self): - self.signed_identifiers = _list_of(SignedIdentifier) + self.signed_identifiers = _list_of(SignedIdentifier) + def __iter__(self): - return self.signed_identifiers + return iter(self.signed_identifiers) + + def __len__(self): + return len(self.signed_identifiers) + + def __getitem__(self, index): + return self.signed_identifiers[index] class BlobEnumResults(EnumResultsBase): ''' Blob list.''' @@ -154,13 +156,24 @@ class BlobEnumResults(EnumResultsBase): def __init__(self): EnumResultsBase.__init__(self) self.blobs = _list_of(Blob) + def __iter__(self): return iter(self.blobs) + def __len__(self): return len(self.blobs) + def __getitem__(self, index): return self.blobs[index] +class BlobResult(str): + + def __new__(cls, blob, properties): + return str.__new__(cls, blob) + + def __init__(self, blob, properties): + self.properties = properties + class Blob(WindowsAzureData): ''' Blob class. ''' @@ -169,7 +182,7 @@ def __init__(self): self.snapshot = '' self.url = '' self.properties = BlobProperties() - self.metadata = Metadata() + self.metadata = {} self.blob_prefix = BlobPrefix() class BlobProperties(WindowsAzureData): @@ -202,20 +215,14 @@ def __init__(self, id=None, size=None): class BlobBlockList(WindowsAzureData): ''' BlobBlockList class ''' + def __init__(self): self.committed_blocks = [] self.uncommitted_blocks = [] -class BlockList(WindowsAzureData): - ''' BlockList used to submit block list. ''' - - def __init__(self): - self.committed = [] - self.uncommitted = [] - self.latest = [] - class PageRange(WindowsAzureData): ''' Page Range for page blob. ''' + def __init__(self): self.start = 0 self.end = 0 @@ -225,8 +232,15 @@ class PageList: def __init__(self): self.page_ranges = _list_of(PageRange) + def __iter__(self): - return self.page_ranges + return iter(self.page_ranges) + + def __len__(self): + return len(self.page_ranges) + + def __getitem__(self, index): + return self.page_ranges[index] class QueueEnumResults(EnumResultsBase): ''' Queue list''' @@ -234,10 +248,13 @@ class QueueEnumResults(EnumResultsBase): def __init__(self): EnumResultsBase.__init__(self) self.queues = _list_of(Queue) + def __iter__(self): return iter(self.queues) + def __len__(self): return len(self.queues) + def __getitem__(self, index): return self.queues[index] @@ -247,17 +264,20 @@ class Queue(WindowsAzureData): def __init__(self): self.name = '' self.url = '' - self.metadata = Metadata() + self.metadata = {} class QueueMessagesList(WindowsAzureData): ''' Queue message list. ''' def __init__(self): self.queue_messages = _list_of(QueueMessage) + def __iter__(self): return iter(self.queue_messages) + def __len__(self): return len(self.queue_messages) + def __getitem__(self, index): return self.queue_messages[index] @@ -273,17 +293,6 @@ def __init__(self): self.dequeue_count = '' self.message_text = '' -class TableEnumResult(EnumResultsBase): - def __init__(): - EnumResultsBase.__init__(self) - self.tables = _list_of(Table) - def __iter__(self): - return iter(self.tables) - def __len__(self): - return len(self.tables) - def __getitem__(self, index): - return self.tables[index] - class Entity(WindowsAzureData): ''' Entity class. The attributes of entity will be created dynamically. ''' pass @@ -430,15 +439,18 @@ def _sign_storage_table_request(request, account_name, account_key): auth_string = 'SharedKey ' + account_name + ':' + base64.b64encode(signed_hmac_sha256.digest()) return auth_string - - def _to_python_bool(value): if value.lower() == 'true': return True return False def _to_entity_int(data): - return 'Edm.Int32', str(data) + int_max = (2 << 30) - 1 + import sys + if data > (int_max) or data < (int_max + 1)*(-1): + return 'Edm.Int64', str(data) + else: + return 'Edm.Int32', str(data) def _to_entity_bool(value): if value: @@ -469,7 +481,10 @@ def _from_entity_int(value): return int(value) def _from_entity_datetime(value): - return datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ') + if value.endswith('Z'): + return datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ') + else: + return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S') _ENTITY_TO_PYTHON_CONVERSIONS = { 'Edm.Int32': _from_entity_int, @@ -542,9 +557,12 @@ def convert_entity_to_xml(source): #form the property node properties_str += ''.join(['<d:', name]) - if mtype: - properties_str += ''.join([' m:type="', mtype, '"']) - properties_str += ''.join(['>', xml_escape(value), '</d:', name, '>']) + if value == '': + properties_str += ' m:null="true" />' + else: + if mtype: + properties_str += ''.join([' m:type="', mtype, '"']) + properties_str += ''.join(['>', xml_escape(value), '</d:', name, '>']) #generate the entity_body entity_body = entity_body.format(properties=properties_str) @@ -576,6 +594,10 @@ def convert_block_list_to_xml(block_id_list): return xml+'</BlockList>' +def _create_blob_result(response): + blob_properties = _parse_response_for_dict(response) + return BlobResult(response.body, blob_properties) + def convert_response_to_block_list(response): ''' Converts xml response to block list class. @@ -601,8 +623,9 @@ def _remove_prefix(name): return name[colon + 1:] return name -METADATA_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices/metadata' def _convert_response_to_entity(response): + if response is None: + return response return _convert_xml_to_entity(response.body) def _convert_xml_to_entity(xmlstr): @@ -644,7 +667,6 @@ def _convert_xml_to_entity(xmlstr): return None entity = Entity() - #extract each property node and get the type from attribute and node value for xml_property in xml_properties[0].childNodes: if xml_property.firstChild: @@ -662,19 +684,24 @@ def _convert_xml_to_entity(xmlstr): #if not isnull and no type info, then it is a string and we just need the str type to hold the property. if not isnull and not mtype: setattr(entity, name, value) + elif isnull == 'true': + if mtype: + property = EntityProperty(mtype, None) + else: + property = EntityProperty('Edm.String', None) else: #need an object to hold the property conv = _ENTITY_TO_PYTHON_CONVERSIONS.get(mtype) if conv is not None: property = conv(value) else: - property = EntityProperty() - setattr(property, 'value', value) - if isnull: - property.isnull = str(isnull) - if mtype: - property.type = str(mtype) + property = EntityProperty(mtype, value) setattr(entity, name, property) + #extract id, updated and name value from feed entry and set them of rule. + for name, value in _get_entry_properties(xmlstr, True).iteritems(): + if name in ['etag']: + setattr(entity, name, value) + return entity def _convert_xml_to_table(xmlstr): diff --git a/src/azure/storage/blobservice.py b/src/azure/storage/blobservice.py index 28bbacc1f453..92186e8638a1 100644 --- a/src/azure/storage/blobservice.py +++ b/src/azure/storage/blobservice.py @@ -18,13 +18,13 @@ from azure.storage import * from azure.storage.storageclient import _StorageClient -from azure.storage import (_update_storage_blob_header, +from azure.storage import (_update_storage_blob_header, _create_blob_result, convert_block_list_to_xml, convert_response_to_block_list) -from azure.http import HTTPRequest +from azure.http import HTTPRequest, HTTP_RESPONSE_NO_CONTENT from azure import (_validate_not_none, Feed, _convert_response_to_feeds, _str_or_none, _int_or_none, _get_request_body, _update_request_uri_query, - _dont_fail_on_exist, _dont_fail_not_exist, + _dont_fail_on_exist, _dont_fail_not_exist, WindowsAzureConflictError, WindowsAzureError, _parse_response, _convert_class_to_xml, _parse_response_for_dict, _parse_response_for_dict_prefix, _parse_response_for_dict_filter, @@ -49,7 +49,8 @@ def list_containers(self, prefix=None, marker=None, maxresults=None, include=Non with the next list operation. maxresults: Optional. Specifies the maximum number of containers to return. include: Optional. Include this parameter to specify that the container's metadata be - returned as part of the response body. + returned as part of the response body. set this parameter to string 'metadata' to + get container's metadata. ''' request = HTTPRequest() request.method = 'GET' @@ -312,7 +313,7 @@ def set_blob_properties(self, container_name, blob_name, x_ms_blob_cache_control request.headers = _update_storage_blob_header(request, self.account_name, self.account_key) response = self._perform_request(request) - def put_blob(self, container_name, blob_name, blob, x_ms_blob_type, content_encoding=None, content_language=None, content_m_d5=None, cache_control=None, x_ms_blob_content_type=None, x_ms_blob_content_encoding=None, x_ms_blob_content_language=None, x_ms_blob_content_md5=None, x_ms_blob_cache_control=None, x_ms_meta_name_values=None, x_ms_lease_id=None, x_ms_blob_content_length=None, x_ms_blob_sequence_number=None): + def put_blob(self, container_name, blob_name, blob, x_ms_blob_type, content_encoding=None, content_language=None, content_md5=None, cache_control=None, x_ms_blob_content_type=None, x_ms_blob_content_encoding=None, x_ms_blob_content_language=None, x_ms_blob_content_md5=None, x_ms_blob_cache_control=None, x_ms_meta_name_values=None, x_ms_lease_id=None, x_ms_blob_content_length=None, x_ms_blob_sequence_number=None): ''' Creates a new block blob or page blob, or updates the content of an existing block blob. @@ -335,7 +336,7 @@ def put_blob(self, container_name, blob_name, blob, x_ms_blob_type, content_enco ('x-ms-blob-type', _str_or_none(x_ms_blob_type)), ('Content-Encoding', _str_or_none(content_encoding)), ('Content-Language', _str_or_none(content_language)), - ('Content-MD5', _str_or_none(content_m_d5)), + ('Content-MD5', _str_or_none(content_md5)), ('Cache-Control', _str_or_none(cache_control)), ('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)), ('x-ms-blob-content-encoding', _str_or_none(x_ms_blob_content_encoding)), @@ -376,7 +377,7 @@ def get_blob(self, container_name, blob_name, snapshot=None, x_ms_range=None, x_ request.headers = _update_storage_blob_header(request, self.account_name, self.account_key) response = self._perform_request(request) - return response.body + return _create_blob_result(response) def get_blob_metadata(self, container_name, blob_name, snapshot=None, x_ms_lease_id=None): ''' @@ -481,6 +482,8 @@ def snapshot_blob(self, container_name, blob_name, x_ms_meta_name_values=None, i request.headers = _update_storage_blob_header(request, self.account_name, self.account_key) response = self._perform_request(request) + return _parse_response_for_dict_filter(response, filter=['x-ms-snapshot', 'etag', 'last-modified']) + def copy_blob(self, container_name, blob_name, x_ms_copy_source, x_ms_meta_name_values=None, x_ms_source_if_modified_since=None, x_ms_source_if_unmodified_since=None, x_ms_source_if_match=None, x_ms_source_if_none_match=None, if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None, x_ms_lease_id=None, x_ms_source_lease_id=None): ''' Copies a blob to a destination within the storage account. @@ -559,7 +562,7 @@ def delete_blob(self, container_name, blob_name, snapshot=None, x_ms_lease_id=No request.headers = _update_storage_blob_header(request, self.account_name, self.account_key) response = self._perform_request(request) - def put_block(self, container_name, blob_name, block, blockid, content_m_d5=None, x_ms_lease_id=None): + def put_block(self, container_name, blob_name, block, blockid, content_md5=None, x_ms_lease_id=None): ''' Creates a new block to be committed as part of a blob. @@ -580,7 +583,7 @@ def put_block(self, container_name, blob_name, block, blockid, content_m_d5=None request.host = _get_blob_host(self.account_name, self.use_local_storage) request.path = '/' + str(container_name) + '/' + str(blob_name) + '?comp=block' request.headers = [ - ('Content-MD5', _str_or_none(content_m_d5)), + ('Content-MD5', _str_or_none(content_md5)), ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) ] request.query = [('blockid', base64.b64encode(_str_or_none(blockid)))] @@ -589,7 +592,7 @@ def put_block(self, container_name, blob_name, block, blockid, content_m_d5=None request.headers = _update_storage_blob_header(request, self.account_name, self.account_key) response = self._perform_request(request) - def put_block_list(self, container_name, blob_name, block_list, content_m_d5=None, x_ms_blob_cache_control=None, x_ms_blob_content_type=None, x_ms_blob_content_encoding=None, x_ms_blob_content_language=None, x_ms_blob_content_md5=None, x_ms_meta_name_values=None, x_ms_lease_id=None): + def put_block_list(self, container_name, blob_name, block_list, content_md5=None, x_ms_blob_cache_control=None, x_ms_blob_content_type=None, x_ms_blob_content_encoding=None, x_ms_blob_content_language=None, x_ms_blob_content_md5=None, x_ms_meta_name_values=None, x_ms_lease_id=None): ''' Writes a blob by specifying the list of block IDs that make up the blob. In order to be written as part of a blob, a block must have been successfully written to the server @@ -624,7 +627,7 @@ def put_block_list(self, container_name, blob_name, block_list, content_m_d5=Non request.host = _get_blob_host(self.account_name, self.use_local_storage) request.path = '/' + str(container_name) + '/' + str(blob_name) + '?comp=blocklist' request.headers = [ - ('Content-MD5', _str_or_none(content_m_d5)), + ('Content-MD5', _str_or_none(content_md5)), ('x-ms-blob-cache-control', _str_or_none(x_ms_blob_cache_control)), ('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)), ('x-ms-blob-content-encoding', _str_or_none(x_ms_blob_content_encoding)), @@ -666,7 +669,7 @@ def get_block_list(self, container_name, blob_name, snapshot=None, blocklisttype return convert_response_to_block_list(response) - def put_page(self, container_name, blob_name, page, x_ms_range, x_ms_page_write, timeout=None, content_m_d5=None, x_ms_lease_id=None, x_ms_if_sequence_number_lte=None, x_ms_if_sequence_number_lt=None, x_ms_if_sequence_number_eq=None, if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None): + def put_page(self, container_name, blob_name, page, x_ms_range, x_ms_page_write, timeout=None, content_md5=None, x_ms_lease_id=None, x_ms_if_sequence_number_lte=None, x_ms_if_sequence_number_lt=None, x_ms_if_sequence_number_eq=None, if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None): ''' Writes a range of pages to a page blob. @@ -698,7 +701,7 @@ def put_page(self, container_name, blob_name, page, x_ms_range, x_ms_page_write, request.path = '/' + str(container_name) + '/' + str(blob_name) + '?comp=page' request.headers = [ ('x-ms-range', _str_or_none(x_ms_range)), - ('Content-MD5', _str_or_none(content_m_d5)), + ('Content-MD5', _str_or_none(content_md5)), ('x-ms-page-write', _str_or_none(x_ms_page_write)), ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), ('x-ms-if-sequence-number-lte', _str_or_none(x_ms_if_sequence_number_lte)), diff --git a/src/azure/storage/queueservice.py b/src/azure/storage/queueservice.py index 602f71f7177a..baffc0a005c2 100644 --- a/src/azure/storage/queueservice.py +++ b/src/azure/storage/queueservice.py @@ -19,11 +19,11 @@ from azure.storage import * from azure.storage.storageclient import _StorageClient from azure.storage import (_update_storage_queue_header) -from azure.http import HTTPRequest +from azure.http import HTTPRequest, HTTP_RESPONSE_NO_CONTENT from azure import (_validate_not_none, Feed, _convert_response_to_feeds, _str_or_none, _int_or_none, _get_request_body, _update_request_uri_query, - _dont_fail_on_exist, _dont_fail_not_exist, + _dont_fail_on_exist, _dont_fail_not_exist, WindowsAzureConflictError, WindowsAzureError, _parse_response, _convert_class_to_xml, _parse_response_for_dict, _parse_response_for_dict_prefix, _parse_response_for_dict_filter, @@ -96,13 +96,17 @@ def create_queue(self, queue_name, x_ms_meta_name_values=None, fail_on_exist=Fal request.headers = _update_storage_queue_header(request, self.account_name, self.account_key) if not fail_on_exist: try: - self._perform_request(request) + response = self._perform_request(request) + if response.status == HTTP_RESPONSE_NO_CONTENT: + return False return True except WindowsAzureError as e: _dont_fail_on_exist(e) return False else: - self._perform_request(request) + response = self._perform_request(request) + if response.status == HTTP_RESPONSE_NO_CONTENT: + raise WindowsAzureConflictError(azure._ERROR_CONFLICT) return True def delete_queue(self, queue_name, fail_not_exist=False): diff --git a/src/azure/storage/storageclient.py b/src/azure/storage/storageclient.py index 15ff95378a52..862da608b450 100644 --- a/src/azure/storage/storageclient.py +++ b/src/azure/storage/storageclient.py @@ -18,6 +18,7 @@ import hashlib import os + from azure.storage import _storage_error_handler, X_MS_VERSION from azure.http.httpclient import _HTTPClient from azure.http import HTTPError @@ -38,8 +39,15 @@ class _StorageClient(object): ''' def __init__(self, account_name=None, account_key=None, protocol='http'): - self.account_name = account_name - self.account_key = account_key + if account_name is not None: + self.account_name = account_name.encode('ascii', 'ignore') + else: + self.account_name = None + if account_key is not None: + self.account_key = account_key.encode('ascii', 'ignore') + else: + self.account_key = None + self.requestid = None self.protocol = protocol @@ -60,7 +68,7 @@ def __init__(self, account_name=None, account_key=None, protocol='http'): #get the account and key from environment variables if the app is not run #in azure emulator or use default development storage account and key if #app is run in emulator. - if not account_name or not account_key: + if not self.account_name or not self.account_key: if self.is_emulated: self.account_name = DEV_ACCOUNT_NAME self.account_key = DEV_ACCOUNT_KEY @@ -70,15 +78,12 @@ def __init__(self, account_name=None, account_key=None, protocol='http'): self.account_name = os.environ[AZURE_STORAGE_ACCOUNT] if os.environ.has_key(AZURE_STORAGE_ACCESS_KEY): self.account_key = os.environ[AZURE_STORAGE_ACCESS_KEY] - else: - self.account_name = account_name - self.account_key = account_key if not self.account_name or not self.account_key: raise WindowsAzureError(azure._ERROR_STORAGE_MISSING_INFO) self.x_ms_version = X_MS_VERSION - self._httpclient = _HTTPClient(service_instance=self, account_key=account_key, account_name=account_name, x_ms_version=self.x_ms_version, protocol=protocol) + self._httpclient = _HTTPClient(service_instance=self, account_key=self.account_key, account_name=self.account_name, x_ms_version=self.x_ms_version, protocol=protocol) self._batchclient = None self._filter = self._perform_request_worker @@ -111,6 +116,4 @@ def _perform_request(self, request): except HTTPError as e: _storage_error_handler(e) - if not resp: - return None return resp \ No newline at end of file diff --git a/src/azure/storage/tableservice.py b/src/azure/storage/tableservice.py index 722342756571..9de4858d7b9e 100644 --- a/src/azure/storage/tableservice.py +++ b/src/azure/storage/tableservice.py @@ -23,11 +23,11 @@ convert_entity_to_xml, _convert_response_to_entity, _convert_xml_to_entity, _sign_storage_table_request) from azure.http.batchclient import _BatchClient -from azure.http import HTTPRequest +from azure.http import HTTPRequest, HTTP_RESPONSE_NO_CONTENT from azure import (_validate_not_none, Feed, _convert_response_to_feeds, _str_or_none, _int_or_none, _get_request_body, _update_request_uri_query, - _dont_fail_on_exist, _dont_fail_not_exist, + _dont_fail_on_exist, _dont_fail_not_exist, WindowsAzureConflictError, WindowsAzureError, _parse_response, _convert_class_to_xml, _parse_response_for_dict, _parse_response_for_dict_prefix, _parse_response_for_dict_filter, @@ -90,7 +90,7 @@ def set_table_service_properties(self, storage_service_properties): return _parse_response_for_dict(response) - def query_tables(self, table_name = None, top=None): + def query_tables(self, table_name = None, top=None, next_table_name=None): ''' Returns a list of tables under the specified account. @@ -105,7 +105,10 @@ def query_tables(self, table_name = None, top=None): else: uri_part_table_name = "" request.path = '/Tables' + uri_part_table_name + '' - request.query = [('$top', _int_or_none(top))] + request.query = [ + ('$top', _int_or_none(top)), + ('NextTableName', _str_or_none(next_table_name)) + ] request.path, request.query = _update_request_uri_query_local_storage(request, self.use_local_storage) request.headers = _update_storage_table_header(request) response = self._perform_request(request) @@ -116,7 +119,9 @@ def create_table(self, table, fail_on_exist=False): ''' Creates a new table in the storage account. - table: name of the table to create. + table: name of the table to create. Table name may contain only alphanumeric characters + and cannot begin with a numeric character. It is case-insensitive and must be from + 3 to 63 characters long. fail_on_exist: specify whether throw exception when table exists. ''' _validate_not_none('table', table) @@ -184,7 +189,7 @@ def get_entity(self, table_name, partition_key, row_key, select=''): return _convert_response_to_entity(response) - def query_entities(self, table_name, filter=None, select=None, top=None): + def query_entities(self, table_name, filter=None, select=None, top=None, next_partition_key=None, next_row_key=None): ''' Get entities in a table; includes the $filter and $select options. @@ -201,7 +206,9 @@ def query_entities(self, table_name, filter=None, select=None, top=None): request.query = [ ('$filter', _str_or_none(filter)), ('$select', _str_or_none(select)), - ('$top', _int_or_none(top)) + ('$top', _int_or_none(top)), + ('NextPartitionKey', _str_or_none(next_partition_key)), + ('NextRowKey', _str_or_none(next_row_key)) ] request.path, request.query = _update_request_uri_query_local_storage(request, self.use_local_storage) request.headers = _update_storage_table_header(request) @@ -229,6 +236,8 @@ def insert_entity(self, table_name, entity, content_type='application/atom+xml') request.headers = _update_storage_table_header(request) response = self._perform_request(request) + return _convert_response_to_entity(response) + def update_entity(self, table_name, partition_key, row_key, entity, content_type='application/atom+xml', if_match='*'): ''' Updates an existing entity in a table. The Update Entity operation replaces the entire @@ -257,6 +266,8 @@ def update_entity(self, table_name, partition_key, row_key, entity, content_type request.headers = _update_storage_table_header(request) response = self._perform_request(request) + return _parse_response_for_dict_filter(response, filter=['etag']) + def merge_entity(self, table_name, partition_key, row_key, entity, content_type='application/atom+xml', if_match='*'): ''' Updates an existing entity by updating the entity's properties. This operation does @@ -285,6 +296,8 @@ def merge_entity(self, table_name, partition_key, row_key, entity, content_type= request.headers = _update_storage_table_header(request) response = self._perform_request(request) + return _parse_response_for_dict_filter(response, filter=['etag']) + def delete_entity(self, table_name, partition_key, row_key, content_type='application/atom+xml', if_match='*'): ''' Deletes an existing entity in a table. @@ -338,7 +351,9 @@ def insert_or_replace_entity(self, table_name, partition_key, row_key, entity, c request.headers = _update_storage_table_header(request) response = self._perform_request(request) - def insert_or_merge_entity(self, table_name, partition_key, row_key, entity, content_type='application/atom+xml', if_match='*'): + return _parse_response_for_dict_filter(response, filter=['etag']) + + def insert_or_merge_entity(self, table_name, partition_key, row_key, entity, content_type='application/atom+xml'): ''' Merges an existing entity or inserts a new entity if it does not exist in the table. Because this operation can insert or update an entity, it is also known as an "upsert" @@ -358,15 +373,14 @@ def insert_or_merge_entity(self, table_name, partition_key, row_key, entity, con request.method = 'MERGE' request.host = _get_table_host(self.account_name, self.use_local_storage) request.path = '/' + str(table_name) + '(PartitionKey=\'' + str(partition_key) + '\',RowKey=\'' + str(row_key) + '\')' - request.headers = [ - ('Content-Type', _str_or_none(content_type)), - ('If-Match', _str_or_none(if_match)) - ] + request.headers = [('Content-Type', _str_or_none(content_type))] request.body = _get_request_body(convert_entity_to_xml(entity)) request.path, request.query = _update_request_uri_query_local_storage(request, self.use_local_storage) request.headers = _update_storage_table_header(request) response = self._perform_request(request) + return _parse_response_for_dict_filter(response, filter=['etag']) + def _perform_request_worker(self, request): auth = _sign_storage_table_request(request, diff --git a/src/codegenerator/blob_input.txt b/src/codegenerator/blob_input.txt index 550b5dec6939..55b52f616939 100644 --- a/src/codegenerator/blob_input.txt +++ b/src/codegenerator/blob_input.txt @@ -21,7 +21,8 @@ marker: Optional. A string value that identifies the portion of the list to be r with the next list operation. maxresults: Optional. Specifies the maximum number of containers to return. include: Optional. Include this parameter to specify that the container's metadata be - returned as part of the response body. + returned as part of the response body. set this parameter to string 'metadata' to + get container's metadata. [return] ContainerEnumResults [url] @@ -243,7 +244,7 @@ container_name: the name of container to get the blob blob_name: the name of blob x_ms_range: Optional. Return only the bytes of the blob in the specified range. [return] -str +BlobResult [url] GET http://<account-name>.blob.core.windows.net/<container-name>/<blob-name> [query] @@ -320,6 +321,8 @@ x_ms_lease_id: Optional. If this header is specified, the operation will be perf 1. The blob's lease is currently active 2. The lease ID specified in the request matches that of the blob. [return] +dict +filter=['x-ms-snapshot', 'etag', 'last-modified'] [url] PUT http://<account-name>.blob.core.windows.net/<container-name>/<blob-name>?comp=snapshot [query] diff --git a/src/codegenerator/codegenerator.py b/src/codegenerator/codegenerator.py index 587df94d6dae..eb6dc6e4d814 100644 --- a/src/codegenerator/codegenerator.py +++ b/src/codegenerator/codegenerator.py @@ -40,8 +40,8 @@ def to_legalname(name): if ch.isupper(): legalname += '_' legalname += ch - legalname = legalname.replace('__', '_').replace('_m_d5', '_md5') - return legalname.lower() + legalname = legalname.replace('__', '_').lower().replace('_m_d5', '_md5') + return legalname def normalize_xml(xmlstr): if xmlstr: @@ -99,7 +99,7 @@ def output_import(output_file, class_name): output_str += 'from azure.storage import *\n' output_str += 'from azure.storage.storageclient import _StorageClient\n' if 'Blob' in class_name: - output_str += 'from azure.storage import (_update_storage_blob_header,\n' + output_str += 'from azure.storage import (_update_storage_blob_header, _create_blob_result,\n' output_str += indent*8 + 'convert_block_list_to_xml, convert_response_to_block_list) \n' elif 'Queue' in class_name: output_str += 'from azure.storage import (_update_storage_queue_header)\n' @@ -115,7 +115,7 @@ def output_import(output_file, class_name): output_str += 'from azure import (_validate_not_none, Feed,\n' output_str += indent*8 + '_convert_response_to_feeds, _str_or_none, _int_or_none,\n' output_str += indent*8 + '_get_request_body, _update_request_uri_query, \n' - output_str += indent*8 + '_dont_fail_on_exist, _dont_fail_not_exist, \n' + output_str += indent*8 + '_dont_fail_on_exist, _dont_fail_not_exist, WindowsAzureConflictError, \n' output_str += indent*8 + 'WindowsAzureError, _parse_response, _convert_class_to_xml, \n' output_str += indent*8 + '_parse_response_for_dict, _parse_response_for_dict_prefix, \n' output_str += indent*8 + '_parse_response_for_dict_filter, \n' @@ -260,7 +260,7 @@ def output_list(list_name, request_list, validate_conversions): return output_list_str -def output_method_body(return_type, method_params, uri_param, req_protocol, req_host, host_param, req_method, req_uri, req_query, req_header, req_body, req_param): +def output_method_body(method_name, return_type, method_params, uri_param, req_protocol, req_host, host_param, req_method, req_uri, req_query, req_header, req_body, req_param): indent = ' ' output_body = ''.join([indent*2, 'request = HTTPRequest()\n']) @@ -341,16 +341,32 @@ def output_method_body(return_type, method_params, uri_param, req_protocol, req_ for name, value in method_params: if 'fail_on_exist' in name: - output_body += indent*2 + 'if not ' + name + ':\n' - output_body += indent*3 + 'try:\n' - output_body += ''.join([indent*4, 'self._perform_request(request)\n']) - output_body += ''.join([indent*4, 'return True\n']) - output_body += indent*3 + 'except WindowsAzureError as e:\n' - output_body += indent*4 + '_dont_fail_on_exist(e)\n' - output_body += indent*4 + 'return False\n' - output_body += indent*2 + 'else:\n' - output_body += ''.join([indent*3, 'self._perform_request(request)\n']) - output_body += ''.join([indent*3, 'return True\n\n']) + if method_name == 'create_queue' and 'queue.core' in req_host: #QueueService create_queue + output_body += indent*2 + 'if not ' + name + ':\n' + output_body += indent*3 + 'try:\n' + output_body += ''.join([indent*4, 'response = self._perform_request(request)\n']) + output_body += ''.join([indent*4, 'if response.status == 204:\n']) + output_body += ''.join([indent*5, 'return False\n']) + output_body += ''.join([indent*4, 'return True\n']) + output_body += indent*3 + 'except WindowsAzureError as e:\n' + output_body += indent*4 + '_dont_fail_on_exist(e)\n' + output_body += indent*4 + 'return False\n' + output_body += indent*2 + 'else:\n' + output_body += ''.join([indent*3, 'response = self._perform_request(request)\n']) + output_body += ''.join([indent*3, 'if response.status == 204:\n']) + output_body += ''.join([indent*4, 'raise WindowsAzureConflictError(azure._ERROR_CONFLICT)\n']) + output_body += ''.join([indent*3, 'return True\n\n']) + else: + output_body += indent*2 + 'if not ' + name + ':\n' + output_body += indent*3 + 'try:\n' + output_body += ''.join([indent*4, 'self._perform_request(request)\n']) + output_body += ''.join([indent*4, 'return True\n']) + output_body += indent*3 + 'except WindowsAzureError as e:\n' + output_body += indent*4 + '_dont_fail_on_exist(e)\n' + output_body += indent*4 + 'return False\n' + output_body += indent*2 + 'else:\n' + output_body += ''.join([indent*3, 'self._perform_request(request)\n']) + output_body += ''.join([indent*3, 'return True\n\n']) break elif 'fail_not_exist' in name: output_body += indent*2 + 'if not ' + name + ':\n' @@ -383,13 +399,15 @@ def output_method_body(return_type, method_params, uri_param, req_protocol, req_ elif return_type == 'PageList': output_body += indent*2 + 'return _parse_simple_list(response, PageList, PageRange, "page_ranges")' else: - if return_type == 'Message': + if return_type == 'BlobResult': + output_body += indent*2 + 'return _create_blob_result(response)\n\n' + elif return_type == 'Message': output_body += indent*2 + 'return _create_message(response, self)\n\n' elif return_type == 'str': output_body += indent*2 + 'return response.body\n\n' elif return_type == 'BlobBlockList': output_body += indent*2 + 'return convert_response_to_block_list(response)\n\n' - elif 'Feed' in return_type: + elif 'Feed' in return_type: for name in ['table', 'entity', 'topic', 'subscription', 'queue', 'rule']: if name +'\'),' in return_type: convert_func = '_convert_xml_to_' + name @@ -412,7 +430,7 @@ def output_method(output_file, method_name, method_params, method_comment, retur output_str += output_method_def(method_name, method_params, uri_param, req_param, req_query, req_header) output_str += output_method_comments(method_comment, req_param, req_query, req_header) output_str += output_method_validates(uri_param, req_param, req_query, req_header) - output_str += output_method_body(return_type, method_params, uri_param, req_protocol, req_host, host_param, req_method, req_uri, req_query, req_header, req_body, req_param) + output_str += output_method_body(method_name, return_type, method_params, uri_param, req_protocol, req_host, host_param, req_method, req_uri, req_query, req_header, req_body, req_param) output_file.write(output_str) @@ -686,20 +704,32 @@ def auto_codegen(source_filename, output_filename='output.py'): auto_codegen('queue_input.txt', '../azure/storage/queueservice.py') auto_codegen('servicebus_input.txt', '../azure/servicebus/servicebusservice.py') - def add_license(license_file_name, output_file_name): - license_file = open(license_file_name, 'r') + def add_license(license_str, output_file_name): output_file = open(output_file_name, 'r') content = output_file.read() - license_txt = license_file.read() - license_file.close() output_file.close() output_file = open(output_file_name, 'w') - output_file.write(license_txt) + output_file.write(license_str) output_file.write(content) output_file.close() + license_str = '''#------------------------------------------------------------------------- +# Copyright 2011 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +''' - add_license('license.txt', '../azure/storage/blobservice.py') - add_license('license.txt', '../azure/storage/tableservice.py') - add_license('license.txt', '../azure/storage/queueservice.py') - add_license('license.txt', '../azure/servicebus/servicebusservice.py') \ No newline at end of file + add_license(license_str, '../azure/storage/blobservice.py') + add_license(license_str, '../azure/storage/tableservice.py') + add_license(license_str, '../azure/storage/queueservice.py') + add_license(license_str, '../azure/servicebus/servicebusservice.py') \ No newline at end of file diff --git a/src/codegenerator/servicebus_input.txt b/src/codegenerator/servicebus_input.txt index 226fbf12b640..8aac7ac4edd5 100644 --- a/src/codegenerator/servicebus_input.txt +++ b/src/codegenerator/servicebus_input.txt @@ -470,8 +470,6 @@ def _perform_request(self, request): except HTTPError as e: return _service_bus_error_handler(e) - if not resp: - return None return resp [end] diff --git a/src/codegenerator/table_input.txt b/src/codegenerator/table_input.txt index 5cb5c124e67c..be01432922c8 100644 --- a/src/codegenerator/table_input.txt +++ b/src/codegenerator/table_input.txt @@ -46,13 +46,16 @@ top: the maximum number of tables to return GET http://<account-name>.table.core.windows.net/Tables<?table_name:('[table_name]')> [query] $top= +NextTableName= [method] create_table [comment] Creates a new table in the storage account. -table: name of the table to create. +table: name of the table to create. Table name may contain only alphanumeric characters + and cannot begin with a numeric character. It is case-insensitive and must be from + 3 to 63 characters long. fail_on_exist: specify whether throw exception when table exists. [params] fail_on_exist=False @@ -105,6 +108,8 @@ GET http://<account-name>.table.core.windows.net/<table-name>() $filter= $select= $top= +NextPartitionKey= +NextRowKey= [method] insert_entity @@ -113,6 +118,7 @@ Inserts a new entity into a table. entity: Required. The entity object to insert. Could be a dict format or entity object. [return] +Feed('entity') [url] POST http://<account-name>.table.core.windows.net/<table-name> [requestheader] @@ -130,6 +136,8 @@ entity: Required. The entity object to insert. Could be a dict format or entity partition_key: PartitionKey of the entity. row_key: RowKey of the entity. [return] +dict +filter=['etag'] [url] PUT http://<account-name>.table.core.windows.net/<table-name>(PartitionKey=\'<partition-key>\',RowKey=\'<row-key>\') [requestheader] @@ -148,6 +156,8 @@ entity: Required. The entity object to insert. Can be a dict format or entity ob partition_key: PartitionKey of the entity. row_key: RowKey of the entity. [return] +dict +filter=['etag'] [url] MERGE http://<account-name>.table.core.windows.net/<table-name>(PartitionKey=\'<partition-key>\',RowKey=\'<row-key>\') [requestheader] @@ -183,6 +193,8 @@ entity: Required. The entity object to insert. Could be a dict format or entity partition_key: PartitionKey of the entity. row_key: RowKey of the entity. [return] +dict +filter=['etag'] [url] PUT http://<account-name>.table.core.windows.net/<table-name>(PartitionKey=\'<partition-key>\',RowKey=\'<row-key>\') [requestheader] @@ -201,11 +213,12 @@ entity: Required. The entity object to insert. Could be a dict format or entity partition_key: PartitionKey of the entity. row_key: RowKey of the entity. [return] +dict +filter=['etag'] [url] MERGE http://<account-name>.table.core.windows.net/<table-name>(PartitionKey=\'<partition-key>\',RowKey=\'<row-key>\') [requestheader] Content-Type=application/atom+xml;required:application/atom+xml|#this is required and has to be set to application/atom+xml -If-Match=* [requestbody] feed:entity;required:feed diff --git a/test/azuretest.pyproj b/test/azuretest.pyproj index 2fe4cb6bcb84..d787ba2083c6 100644 --- a/test/azuretest.pyproj +++ b/test/azuretest.pyproj @@ -6,7 +6,7 @@ <ProjectGuid>{c0742a2d-4862-40e4-8a28-036eecdbc614}</ProjectGuid> <ProjectHome> </ProjectHome> - <StartupFile>azuretest\test_tableservice.py</StartupFile> + <StartupFile>azuretest\test_servicebusservice.py</StartupFile> <WorkingDirectory>.</WorkingDirectory> <OutputPath>.</OutputPath> <Name>azuretest</Name> @@ -19,8 +19,13 @@ <ClusterRunEnvironment>localhost/1/Core/</ClusterRunEnvironment> <ClusterTargetPlatform>X86</ClusterTargetPlatform> <IsWindowsApplication>False</IsWindowsApplication> - <InterpreterId>2af0f10d-7135-4994-9156-5d01c9c11b7e</InterpreterId> + <InterpreterId>9a7a9026-48c1-4688-9d5d-e5699d47d074</InterpreterId> <InterpreterVersion>2.7</InterpreterVersion> + <SearchPath>C:\Users\a-huvalo\Documents\Visual Studio 2010\Projects\PTVS\Open_Source\Incubation\windowsazure\src\</SearchPath> + <SccProjectName>$/TCWCS/Python/Main/Open_Source/Incubation/windowsazure/test</SccProjectName> + <SccProvider>{4CA58AB2-18FA-4F8D-95D4-32DDF27D184C}</SccProvider> + <SccAuxPath>http://tcvstf:8080/tfs/tc</SccAuxPath> + <SccLocalPath>.</SccLocalPath> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)' == 'Debug' "> <DebugSymbols>true</DebugSymbols> @@ -34,14 +39,23 @@ <Folder Include="azuretest" /> </ItemGroup> <ItemGroup> + <Compile Include="azuretest\clean.py" /> + <Compile Include="azuretest\doctest_blobservice.py" /> + <Compile Include="azuretest\doctest_queueservice.py" /> + <Compile Include="azuretest\doctest_servicebusservicequeue.py" /> + <Compile Include="azuretest\doctest_servicebusservicetopic.py" /> + <Compile Include="azuretest\doctest_tableservice.py" /> <Compile Include="azuretest\test_blobservice.py" /> + <Compile Include="azuretest\test_cloudstorageaccount.py" /> <Compile Include="azuretest\test_queueservice.py" /> + <Compile Include="azuretest\test_sharedaccesssignature.py" /> <Compile Include="azuretest\test_tableservice.py" /> <Compile Include="azuretest\test_servicebusservice.py" /> <Compile Include="azuretest\util.py" /> <Compile Include="azuretest\__init__.py" /> </ItemGroup> <ItemGroup> + <Content Include="run.bash" /> <Content Include="run.bat" /> </ItemGroup> <Import Project="$(MSBuildToolsPath)\Microsoft.Common.targets" /> diff --git a/test/azuretest/clean.py b/test/azuretest/clean.py new file mode 100644 index 000000000000..76035675e512 --- /dev/null +++ b/test/azuretest/clean.py @@ -0,0 +1,78 @@ +#------------------------------------------------------------------------- +# Copyright 2011 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- + +from azure import * +from azure.storage import * +from azure.servicebus import * +from azuretest.util import * + +print('WARNING!!!') +print('') +print('This program cleans the storage account and the service namespace specified') +print('by the unit test credentials file (windowsazurecredentials.json) located in') +print('your home directory.') +print('') +print('You should not run this program while tests are running as this will') +print('interfere with the tests.') +print('') +print('The following will be deleted from the storage account:') +print(' - All containers') +print(' - All tables') +print(' - All queues') +print('') +print('The following will be deleted from the service namespace:') +print(' - All queues') +print(' - All topics') +print('') +print('Enter YES to proceed, or anything else to cancel') +print('') + +input = raw_input('>') +if input == 'YES': + print('Cleaning storage account...') + + bc = BlobService(credentials.getStorageServicesName(), + credentials.getStorageServicesKey()) + + ts = TableService(credentials.getStorageServicesName(), + credentials.getStorageServicesKey()) + + qs = QueueService(credentials.getStorageServicesName(), + credentials.getStorageServicesKey()) + + for container in bc.list_containers(): + bc.delete_container(container.name) + + for table in ts.query_tables(): + ts.delete_table(table.name) + + for queue in qs.list_queues(): + qs.delete_queue(queue.name) + + print('Cleaning service namespace...') + + sbs = ServiceBusService(credentials.getServiceBusNamespace(), + credentials.getServiceBusKey(), + 'owner') + + for queue in sbs.list_queues(): + sbs.delete_queue(queue.name) + + for topic in sbs.list_topics(): + sbs.delete_topic(topic.name) + + print('Done.') +else: + print('Canceled.') diff --git a/test/azuretest/doctest_blobservice.py b/test/azuretest/doctest_blobservice.py new file mode 100644 index 000000000000..ca1f29456c90 --- /dev/null +++ b/test/azuretest/doctest_blobservice.py @@ -0,0 +1,65 @@ +#------------------------------------------------------------------------- +# Copyright 2011 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- + +""" +How to: Create a Container +-------------------------- +>>> from azure.storage import * +>>> blob_service = BlobService(name, key) +>>> blob_service.create_container('mycontainer') +True + +>>> blob_service.create_container('mycontainer2', x_ms_blob_public_access='container') +True + +>>> blob_service.set_container_acl('mycontainer', x_ms_blob_public_access='container') + +How to: Upload a Blob into a Container +-------------------------------------- +>>> myblob = 'hello blob' +>>> blob_service.put_blob('mycontainer', 'myblob', myblob, x_ms_blob_type='BlockBlob') + +How to: List the Blobs in a Container +------------------------------------- +>>> blobs = blob_service.list_blobs('mycontainer') +>>> for blob in blobs: +... print(blob.name) +myblob + +How to: Download Blobs +---------------------- +>>> blob = blob_service.get_blob('mycontainer', 'myblob') +>>> print(blob) +hello blob + +How to: Delete a Blob +--------------------- +>>> blob_service.delete_blob('mycontainer', 'myblob') + +>>> blob_service.delete_container('mycontainer') +True + +>>> blob_service.delete_container('mycontainer2') +True + +""" +from azuretest.util import * + +name = credentials.getStorageServicesName() +key = credentials.getStorageServicesKey() + +if __name__ == "__main__": + import doctest + doctest.testmod() diff --git a/test/azuretest/doctest_queueservice.py b/test/azuretest/doctest_queueservice.py new file mode 100644 index 000000000000..5c1cb4e8cb90 --- /dev/null +++ b/test/azuretest/doctest_queueservice.py @@ -0,0 +1,81 @@ +#------------------------------------------------------------------------- +# Copyright 2011 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- + +""" +How To: Create a Queue +---------------------- +>>> from azure.storage import * +>>> queue_service = QueueService(name, key) +>>> queue_service.create_queue('taskqueue') +True + +How To: Insert a Message into a Queue +------------------------------------- +>>> queue_service.put_message('taskqueue', 'Hello World') + +How To: Peek at the Next Message +-------------------------------- +>>> messages = queue_service.peek_messages('taskqueue') +>>> for message in messages: +... print(message.message_text) +... +Hello World + +How To: Dequeue the Next Message +-------------------------------- +>>> messages = queue_service.get_messages('taskqueue') +>>> for message in messages: +... print(message.message_text) +... queue_service.delete_message('taskqueue', message.message_id, message.pop_receipt) +Hello World + +How To: Change the Contents of a Queued Message +----------------------------------------------- +>>> queue_service.put_message('taskqueue', 'Hello World') +>>> messages = queue_service.get_messages('taskqueue') +>>> for message in messages: +... res = queue_service.update_message('taskqueue', message.message_id, 'Hello World Again', message.pop_receipt, 0) + +How To: Additional Options for Dequeuing Messages +------------------------------------------------- +>>> queue_service.put_message('taskqueue', 'Hello World') +>>> messages = queue_service.get_messages('taskqueue', numofmessages=16, visibilitytimeout=5*60) +>>> for message in messages: +... print(message.message_text) +... queue_service.delete_message('taskqueue', message.message_id, message.pop_receipt) +Hello World Again +Hello World + +How To: Get the Queue Length +---------------------------- +>>> queue_metadata = queue_service.get_queue_metadata('taskqueue') +>>> count = queue_metadata['x-ms-approximate-messages-count'] +>>> count +u'0' + +How To: Delete a Queue +---------------------- +>>> queue_service.delete_queue('taskqueue') +True + +""" +from azuretest.util import * + +name = credentials.getStorageServicesName() +key = credentials.getStorageServicesKey() + +if __name__ == "__main__": + import doctest + doctest.testmod() diff --git a/test/azuretest/doctest_servicebusservicequeue.py b/test/azuretest/doctest_servicebusservicequeue.py new file mode 100644 index 000000000000..984170d68d90 --- /dev/null +++ b/test/azuretest/doctest_servicebusservicequeue.py @@ -0,0 +1,64 @@ +#------------------------------------------------------------------------- +# Copyright 2011 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- + +""" +How To: Create a Queue +---------------------- +>>> from azure.servicebus import * +>>> bus_service = ServiceBusService(ns, key, 'owner') +>>> bus_service.create_queue('taskqueue') +True + +>>> queue_options = Queue() +>>> queue_options.max_size_in_megabytes = '5120' +>>> queue_options.default_message_time_to_live = 'PT1M' +>>> bus_service.create_queue('taskqueue2', queue_options) +True + +How to Send Messages to a Queue +------------------------------- +>>> msg = Message('Test Message') +>>> bus_service.send_queue_message('taskqueue', msg) + +How to Receive Messages from a Queue +------------------------------------ +>>> msg = bus_service.receive_queue_message('taskqueue') +>>> print(msg.body) +Test Message + +>>> msg = Message('Test Message') +>>> bus_service.send_queue_message('taskqueue', msg) + +>>> msg = bus_service.receive_queue_message('taskqueue', peek_lock=True) +>>> print(msg.body) +Test Message +>>> msg.delete() + + +>>> bus_service.delete_queue('taskqueue') +True + +>>> bus_service.delete_queue('taskqueue2') +True + +""" +from azuretest.util import * + +ns = credentials.getServiceBusNamespace() +key = credentials.getServiceBusKey() + +if __name__ == "__main__": + import doctest + doctest.testmod() diff --git a/test/azuretest/doctest_servicebusservicetopic.py b/test/azuretest/doctest_servicebusservicetopic.py new file mode 100644 index 000000000000..d6f502c51542 --- /dev/null +++ b/test/azuretest/doctest_servicebusservicetopic.py @@ -0,0 +1,95 @@ +#------------------------------------------------------------------------- +# Copyright 2011 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- + +""" +How to Create a Topic +--------------------- +>>> from azure.servicebus import * +>>> bus_service = ServiceBusService(ns, key, 'owner') +>>> bus_service.create_topic('mytopic') +True + +>>> topic_options = Topic() +>>> topic_options.max_size_in_megabytes = '5120' +>>> topic_options.default_message_time_to_live = 'PT1M' +>>> bus_service.create_topic('mytopic2', topic_options) +True + +How to Create Subscriptions +--------------------------- +>>> bus_service.create_subscription('mytopic', 'AllMessages') +True + +>>> bus_service.create_subscription('mytopic', 'HighMessages') +True + +>>> rule = Rule() +>>> rule.filter_type = 'SqlFilter' +>>> rule.filter_expression = 'messagenumber > 3' +>>> bus_service.create_rule('mytopic', 'HighMessages', 'HighMessageFilter', rule) +True + +>>> bus_service.delete_rule('mytopic', 'HighMessages', DEFAULT_RULE_NAME) +True + +>>> bus_service.create_subscription('mytopic', 'LowMessages') +True + +>>> rule = Rule() +>>> rule.filter_type = 'SqlFilter' +>>> rule.filter_expression = 'messagenumber <= 3' +>>> bus_service.create_rule('mytopic', 'LowMessages', 'LowMessageFilter', rule) +True + +>>> bus_service.delete_rule('mytopic', 'LowMessages', DEFAULT_RULE_NAME) +True + +How to Send Messages to a Topic +------------------------------- +>>> for i in range(5): +... msg = Message('Msg ' + str(i), custom_properties={'messagenumber':i}) +... bus_service.send_topic_message('mytopic', msg) + +How to Receive Messages from a Subscription +------------------------------------------- +>>> msg = bus_service.receive_subscription_message('mytopic', 'LowMessages') +>>> print(msg.body) +Msg 0 + +>>> msg = bus_service.receive_subscription_message('mytopic', 'LowMessages', peek_lock=True) +>>> print(msg.body) +Msg 1 +>>> msg.delete() + +How to Delete Topics and Subscriptions +-------------------------------------- +>>> bus_service.delete_subscription('mytopic', 'HighMessages') +True + +>>> bus_service.delete_queue('mytopic') +True + +>>> bus_service.delete_queue('mytopic2') +True + +""" +from azuretest.util import * + +ns = credentials.getServiceBusNamespace() +key = credentials.getServiceBusKey() + +if __name__ == "__main__": + import doctest + doctest.testmod() diff --git a/test/azuretest/doctest_tableservice.py b/test/azuretest/doctest_tableservice.py new file mode 100644 index 000000000000..b93b0274ae74 --- /dev/null +++ b/test/azuretest/doctest_tableservice.py @@ -0,0 +1,116 @@ +#------------------------------------------------------------------------- +# Copyright 2011 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- + +""" +How To: Create a Table +---------------------- +>>> from azure.storage import * +>>> table_service = TableService(name, key) +>>> table_service.create_table('tasktable') +True + +How to Add an Entity to a Table +------------------------------- +>>> task = {'PartitionKey': 'tasksSeattle', 'RowKey': '1', 'description' : 'Take out the trash', 'priority' : 200} +>>> table_service.insert_entity('tasktable', task) + +>>> task = Entity() +>>> task.PartitionKey = 'tasksSeattle' +>>> task.RowKey = '2' +>>> task.description = 'Wash the car' +>>> task.priority = 100 +>>> table_service.insert_entity('tasktable', task) + +How to Update an Entity +----------------------- +>>> task = {'description' : 'Take out the garbage', 'priority' : 250} +>>> table_service.update_entity('tasktable', 'tasksSeattle', '1', task) + +>>> task = {'description' : 'Take out the garbage again', 'priority' : 250} +>>> table_service.insert_or_replace_entity('tasktable', 'tasksSeattle', '1', task) + +>>> task = {'description' : 'Buy detergent', 'priority' : 300} +>>> table_service.insert_or_replace_entity('tasktable', 'tasksSeattle', '3', task) + + +How to Change a Group of Entities +--------------------------------- +>>> task10 = {'PartitionKey': 'tasksSeattle', 'RowKey': '10', 'description' : 'Go grocery shopping', 'priority' : 400} +>>> task11 = {'PartitionKey': 'tasksSeattle', 'RowKey': '11', 'description' : 'Clean the bathroom', 'priority' : 100} +>>> table_service.begin_batch() +>>> table_service.insert_entity('tasktable', task10) +>>> table_service.insert_entity('tasktable', task11) +>>> table_service.commit_batch() + +How to Query for an Entity +-------------------------- +>>> task = table_service.get_entity('tasktable', 'tasksSeattle', '1') +>>> print(task.description) +Take out the garbage again +>>> print(task.priority) +250 + +>>> task = table_service.get_entity('tasktable', 'tasksSeattle', '10') +>>> print(task.description) +Go grocery shopping +>>> print(task.priority) +400 + +How to Query a Set of Entities +------------------------------ +>>> tasks = table_service.query_entities('tasktable', "PartitionKey eq 'tasksSeattle'") +>>> for task in tasks: +... print(task.description) +... print(task.priority) +Take out the garbage again +250 +Go grocery shopping +400 +Clean the bathroom +100 +Wash the car +100 +Buy detergent +300 + +How to Query a Subset of Entity Properties +------------------------------------------ +>>> tasks = table_service.query_entities('tasktable', "PartitionKey eq 'tasksSeattle'", 'description') +>>> for task in tasks: +... print(task.description) +Take out the garbage again +Go grocery shopping +Clean the bathroom +Wash the car +Buy detergent + +How to Delete an Entity +----------------------- +>>> table_service.delete_entity('tasktable', 'tasksSeattle', '1') + +How to Delete a Table +--------------------- +>>> table_service.delete_table('tasktable') +True + +""" +from azuretest.util import * + +name = credentials.getStorageServicesName() +key = credentials.getStorageServicesKey() + +if __name__ == "__main__": + import doctest + doctest.testmod() diff --git a/test/azuretest/test_blobservice.py b/test/azuretest/test_blobservice.py index 0d37c9747edf..a04e5fca19c1 100644 --- a/test/azuretest/test_blobservice.py +++ b/test/azuretest/test_blobservice.py @@ -14,28 +14,27 @@ #-------------------------------------------------------------------------- from azure.storage.blobservice import * -from azure.storage import Metrics, BlockList +from azure.storage import Metrics +from azure.storage.storageclient import AZURE_STORAGE_ACCESS_KEY, AZURE_STORAGE_ACCOUNT, EMULATED, DEV_ACCOUNT_NAME, DEV_ACCOUNT_KEY from azure import WindowsAzureError from azuretest.util import * from azure.http import HTTPRequest, HTTPResponse import unittest -import time #------------------------------------------------------------------------------ -class BlobServiceTest(unittest.TestCase): +class BlobServiceTest(AzureTestCase): def setUp(self): self.bc = BlobService(account_name=credentials.getStorageServicesName(), account_key=credentials.getStorageServicesKey()) - # TODO: it may be overkill to use the machine name from - # getUniqueTestRunID, current time may be unique enough __uid = getUniqueTestRunID() container_base_name = u'mytestcontainer%s' % (__uid) self.container_name = getUniqueNameBasedOnCurrentTime(container_base_name) + self.additional_container_names = [] def tearDown(self): self.cleanup() @@ -46,26 +45,12 @@ def cleanup(self): self.bc.delete_container(self.container_name) except: pass - #--Helpers----------------------------------------------------------------- - - # TODO: move this function out of here so other tests can use them - # TODO: find out how to import/use safe_repr instead repr - def assertNamedItemInContainer(self, container, item_name, msg=None): - for item in container: - if item.name == item_name: - return - - standardMsg = '%s not found in %s' % (repr(item_name), repr(container)) - self.fail(self._formatMessage(msg, standardMsg)) - - # TODO: move this function out of here so other tests can use them - # TODO: find out how to import/use safe_repr instead repr - def assertNamedItemNotInContainer(self, container, item_name, msg=None): - for item in container: - if item.name == item_name: - standardMsg = '%s unexpectedly found in %s' % (repr(item_name), repr(container)) - self.fail(self._formatMessage(msg, standardMsg)) + for name in self.additional_container_names: + try: + self.bc.delete_container(name) + except: pass + #--Helpers----------------------------------------------------------------- def _create_container(self, container_name): self.bc.create_container(container_name, None, None, True) @@ -79,6 +64,92 @@ def _create_container_and_page_blob(self, container_name, blob_name, content_len resp = self.bc.put_blob(self.container_name, blob_name, '', 'PageBlob', x_ms_blob_content_length=str(content_length)) self.assertIsNone(resp) + #--Test cases for blob service -------------------------------------------- + def test_create_blob_service_missing_arguments(self): + # Arrange + if os.environ.has_key(AZURE_STORAGE_ACCOUNT): + del os.environ[AZURE_STORAGE_ACCOUNT] + if os.environ.has_key(AZURE_STORAGE_ACCESS_KEY): + del os.environ[AZURE_STORAGE_ACCESS_KEY] + if os.environ.has_key(EMULATED): + del os.environ[EMULATED] + + # Act + with self.assertRaises(WindowsAzureError): + bs = BlobService() + + # Assert + + def test_create_blob_service_env_variables(self): + # Arrange + os.environ[AZURE_STORAGE_ACCOUNT] = credentials.getStorageServicesName() + os.environ[AZURE_STORAGE_ACCESS_KEY] = credentials.getStorageServicesKey() + + # Act + bs = BlobService() + + if os.environ.has_key(AZURE_STORAGE_ACCOUNT): + del os.environ[AZURE_STORAGE_ACCOUNT] + if os.environ.has_key(AZURE_STORAGE_ACCESS_KEY): + del os.environ[AZURE_STORAGE_ACCESS_KEY] + + # Assert + self.assertIsNotNone(bs) + self.assertEquals(bs.account_name, credentials.getStorageServicesName()) + self.assertEquals(bs.account_key, credentials.getStorageServicesKey()) + self.assertEquals(bs.is_emulated, False) + + def test_create_blob_service_emulated_true(self): + # Arrange + os.environ[EMULATED] = 'true' + + # Act + bs = BlobService() + + if os.environ.has_key(EMULATED): + del os.environ[EMULATED] + + # Assert + self.assertIsNotNone(bs) + self.assertEquals(bs.account_name, DEV_ACCOUNT_NAME) + self.assertEquals(bs.account_key, DEV_ACCOUNT_KEY) + self.assertEquals(bs.is_emulated, True) + + def test_create_blob_service_emulated_false(self): + # Arrange + os.environ[EMULATED] = 'false' + + # Act + with self.assertRaises(WindowsAzureError): + bs = BlobService() + + if os.environ.has_key(EMULATED): + del os.environ[EMULATED] + + # Assert + + def test_create_blob_service_emulated_false_env_variables(self): + # Arrange + os.environ[EMULATED] = 'false' + os.environ[AZURE_STORAGE_ACCOUNT] = credentials.getStorageServicesName() + os.environ[AZURE_STORAGE_ACCESS_KEY] = credentials.getStorageServicesKey() + + # Act + bs = BlobService() + + if os.environ.has_key(EMULATED): + del os.environ[EMULATED] + if os.environ.has_key(AZURE_STORAGE_ACCOUNT): + del os.environ[AZURE_STORAGE_ACCOUNT] + if os.environ.has_key(AZURE_STORAGE_ACCESS_KEY): + del os.environ[AZURE_STORAGE_ACCESS_KEY] + + # Assert + self.assertIsNotNone(bs) + self.assertEquals(bs.account_name, credentials.getStorageServicesName()) + self.assertEquals(bs.account_key, credentials.getStorageServicesKey()) + self.assertEquals(bs.is_emulated, False) + #--Test cases for containers ----------------------------------------- def test_create_container_no_options(self): # Arrange @@ -98,6 +169,17 @@ def test_create_container_no_options_fail_on_exist(self): # Assert self.assertTrue(created) + def test_create_container_with_already_existing_container(self): + # Arrange + + # Act + created1 = self.bc.create_container(self.container_name) + created2 = self.bc.create_container(self.container_name) + + # Assert + self.assertTrue(created1) + self.assertFalse(created2) + def test_create_container_with_already_existing_container_fail_on_exist(self): # Arrange @@ -155,8 +237,63 @@ def test_list_containers_no_options(self): # Assert self.assertIsNotNone(containers) + self.assertGreaterEqual(len(containers), 1) + self.assertIsNotNone(containers[0]) self.assertNamedItemInContainer(containers, self.container_name) + def test_list_containers_with_prefix(self): + # Arrange + self.bc.create_container(self.container_name) + + # Act + containers = self.bc.list_containers(self.container_name) + + # Assert + self.assertIsNotNone(containers) + self.assertEqual(len(containers), 1) + self.assertIsNotNone(containers[0]) + self.assertEqual(containers[0].name, self.container_name) + self.assertIsNone(containers[0].metadata); + + def test_list_containers_with_include_metadata(self): + # Arrange + self.bc.create_container(self.container_name) + resp = self.bc.set_container_metadata(self.container_name, {'hello':'world', 'bar':'43'}) + + # Act + containers = self.bc.list_containers(self.container_name, None, None, 'metadata') + + # Assert + self.assertIsNotNone(containers) + self.assertGreaterEqual(len(containers), 1) + self.assertIsNotNone(containers[0]) + self.assertNamedItemInContainer(containers, self.container_name) + self.assertEqual(containers[0].metadata['hello'], 'world') + self.assertEqual(containers[0].metadata['bar'], '43') + + def test_list_containers_with_maxresults_and_marker(self): + # Arrange + self.additional_container_names = [self.container_name + 'a', + self.container_name + 'b', + self.container_name + 'c', + self.container_name + 'd'] + for name in self.additional_container_names: + self.bc.create_container(name) + + # Act + containers1 = self.bc.list_containers(self.container_name, None, 2) + containers2 = self.bc.list_containers(self.container_name, containers1.next_marker, 2) + + # Assert + self.assertIsNotNone(containers1) + self.assertEqual(len(containers1), 2) + self.assertNamedItemInContainer(containers1, self.container_name + 'a') + self.assertNamedItemInContainer(containers1, self.container_name + 'b') + self.assertIsNotNone(containers2) + self.assertEqual(len(containers2), 2) + self.assertNamedItemInContainer(containers2, self.container_name + 'c') + self.assertNamedItemInContainer(containers2, self.container_name + 'd') + def test_set_container_metadata(self): # Arrange self.bc.create_container(self.container_name) @@ -183,12 +320,19 @@ def test_set_container_metadata_with_non_existing_container(self): def test_get_container_metadata(self): # Arrange self.bc.create_container(self.container_name) + self.bc.set_container_acl(self.container_name, None, 'container') + self.bc.set_container_metadata(self.container_name, {'hello':'world','foo':'42'}) # Act md = self.bc.get_container_metadata(self.container_name) # Assert self.assertIsNotNone(md) + self.assertEquals(md['x-ms-meta-hello'], 'world') + self.assertEquals(md['x-ms-meta-foo'], '42') + # TODO: + # get_container_properties returns container lease information whereas get_container_metadata doesn't + # we should lease the container in the arrange section and verify that we do not receive that info def test_get_container_metadata_with_non_existing_container(self): # Arrange @@ -202,12 +346,19 @@ def test_get_container_metadata_with_non_existing_container(self): def test_get_container_properties(self): # Arrange self.bc.create_container(self.container_name) + self.bc.set_container_acl(self.container_name, None, 'container') + self.bc.set_container_metadata(self.container_name, {'hello':'world','foo':'42'}) # Act props = self.bc.get_container_properties(self.container_name) # Assert self.assertIsNotNone(props) + self.assertEquals(props['x-ms-meta-hello'], 'world') + self.assertEquals(props['x-ms-meta-foo'], '42') + # TODO: + # get_container_properties returns container lease information whereas get_container_metadata doesn't + # we should lease the container in the arrange section and verify that we receive that info def test_get_container_properties_with_non_existing_container(self): # Arrange @@ -229,6 +380,20 @@ def test_get_container_acl(self): self.assertIsNotNone(acl) self.assertEqual(len(acl.signed_identifiers), 0) + def test_get_container_acl_iter(self): + # Arrange + self.bc.create_container(self.container_name) + + # Act + acl = self.bc.get_container_acl(self.container_name) + for signed_identifier in acl: + pass + + # Assert + self.assertIsNotNone(acl) + self.assertEqual(len(acl.signed_identifiers), 0) + self.assertEqual(len(acl), 0) + def test_get_container_acl_with_non_existing_container(self): # Arrange @@ -274,6 +439,45 @@ def test_set_container_acl_with_public_access_blob(self): acl = self.bc.get_container_acl(self.container_name) self.assertIsNotNone(acl) + def test_set_container_acl_with_empty_signed_identifiers(self): + # Arrange + self.bc.create_container(self.container_name) + + # Act + identifiers = SignedIdentifiers() + + resp = self.bc.set_container_acl(self.container_name, identifiers) + + # Assert + self.assertIsNone(resp) + acl = self.bc.get_container_acl(self.container_name) + self.assertIsNotNone(acl) + self.assertEqual(len(acl.signed_identifiers), 0) + + def test_set_container_acl_with_signed_identifiers(self): + # Arrange + self.bc.create_container(self.container_name) + + # Act + si = SignedIdentifier() + si.id = 'testid' + si.access_policy.start = '2011-10-11' + si.access_policy.expiry = '2011-10-12' + si.access_policy.permission = 'r' + identifiers = SignedIdentifiers() + identifiers.signed_identifiers.append(si) + + resp = self.bc.set_container_acl(self.container_name, identifiers) + + # Assert + self.assertIsNone(resp) + acl = self.bc.get_container_acl(self.container_name) + self.assertIsNotNone(acl) + self.assertEqual(len(acl.signed_identifiers), 1) + self.assertEqual(len(acl), 1) + self.assertEqual(acl.signed_identifiers[0].id, 'testid') + self.assertEqual(acl[0].id, 'testid') + def test_set_container_acl_with_non_existing_container(self): # Arrange @@ -379,8 +583,8 @@ def test_list_blobs(self): # Arrange self._create_container(self.container_name) data = 'hello world' - resp = self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob') - resp = self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob') + self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob') + self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob') # Act blobs = self.bc.list_blobs(self.container_name) @@ -389,8 +593,170 @@ def test_list_blobs(self): # Assert self.assertIsNotNone(blobs) + self.assertGreaterEqual(len(blobs), 2) + self.assertIsNotNone(blobs[0]) self.assertNamedItemInContainer(blobs, 'blob1') self.assertNamedItemInContainer(blobs, 'blob2') + self.assertEqual(blobs[0].properties.content_length, 11) + self.assertEqual(blobs[1].properties.content_type, 'application/octet-stream Charset=UTF-8') + + def test_list_blobs_with_prefix(self): + # Arrange + self._create_container(self.container_name) + data = 'hello world' + self.bc.put_blob(self.container_name, 'bloba1', data, 'BlockBlob') + self.bc.put_blob(self.container_name, 'bloba2', data, 'BlockBlob') + self.bc.put_blob(self.container_name, 'blobb1', data, 'BlockBlob') + + # Act + blobs = self.bc.list_blobs(self.container_name, 'bloba') + + # Assert + self.assertIsNotNone(blobs) + self.assertEqual(len(blobs), 2) + self.assertNamedItemInContainer(blobs, 'bloba1') + self.assertNamedItemInContainer(blobs, 'bloba2') + + def test_list_blobs_with_maxresults(self): + # Arrange + self._create_container(self.container_name) + data = 'hello world' + self.bc.put_blob(self.container_name, 'bloba1', data, 'BlockBlob') + self.bc.put_blob(self.container_name, 'bloba2', data, 'BlockBlob') + self.bc.put_blob(self.container_name, 'bloba3', data, 'BlockBlob') + self.bc.put_blob(self.container_name, 'blobb1', data, 'BlockBlob') + + # Act + blobs = self.bc.list_blobs(self.container_name, None, None, 2) + + # Assert + self.assertIsNotNone(blobs) + self.assertEqual(len(blobs), 2) + self.assertNamedItemInContainer(blobs, 'bloba1') + self.assertNamedItemInContainer(blobs, 'bloba2') + + def test_list_blobs_with_maxresults_and_marker(self): + # Arrange + self._create_container(self.container_name) + data = 'hello world' + self.bc.put_blob(self.container_name, 'bloba1', data, 'BlockBlob') + self.bc.put_blob(self.container_name, 'bloba2', data, 'BlockBlob') + self.bc.put_blob(self.container_name, 'bloba3', data, 'BlockBlob') + self.bc.put_blob(self.container_name, 'blobb1', data, 'BlockBlob') + + # Act + blobs1 = self.bc.list_blobs(self.container_name, None, None, 2) + blobs2 = self.bc.list_blobs(self.container_name, None, blobs1.next_marker, 2) + + # Assert + self.assertEqual(len(blobs1), 2) + self.assertEqual(len(blobs2), 2) + self.assertNamedItemInContainer(blobs1, 'bloba1') + self.assertNamedItemInContainer(blobs1, 'bloba2') + self.assertNamedItemInContainer(blobs2, 'bloba3') + self.assertNamedItemInContainer(blobs2, 'blobb1') + + def test_list_blobs_with_include_snapshots(self): + # Arrange + self._create_container(self.container_name) + data = 'hello world' + self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob') + self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob') + self.bc.snapshot_blob(self.container_name, 'blob1') + + # Act + blobs = self.bc.list_blobs(self.container_name, include='snapshots') + + # Assert + self.assertEqual(len(blobs), 3) + self.assertEqual(blobs[0].name, 'blob1') + self.assertNotEqual(blobs[0].snapshot, '') + self.assertEqual(blobs[1].name, 'blob1') + self.assertEqual(blobs[1].snapshot, '') + self.assertEqual(blobs[2].name, 'blob2') + self.assertEqual(blobs[2].snapshot, '') + + def test_list_blobs_with_include_metadata(self): + # Arrange + self._create_container(self.container_name) + data = 'hello world' + self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_meta_name_values={'foo':'1','bar':'bob'}) + self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob', x_ms_meta_name_values={'foo':'2','bar':'car'}) + self.bc.snapshot_blob(self.container_name, 'blob1') + + # Act + blobs = self.bc.list_blobs(self.container_name, include='metadata') + + # Assert + self.assertEqual(len(blobs), 2) + self.assertEqual(blobs[0].name, 'blob1') + self.assertEqual(blobs[0].metadata['foo'], '1') + self.assertEqual(blobs[0].metadata['bar'], 'bob') + self.assertEqual(blobs[1].name, 'blob2') + self.assertEqual(blobs[1].metadata['foo'], '2') + self.assertEqual(blobs[1].metadata['bar'], 'car') + + def test_list_blobs_with_include_uncommittedblobs(self): + # Arrange + self._create_container(self.container_name) + data = 'hello world' + self.bc.put_block(self.container_name, 'blob1', 'AAA', '1') + self.bc.put_block(self.container_name, 'blob1', 'BBB', '2') + self.bc.put_block(self.container_name, 'blob1', 'CCC', '3') + self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob', x_ms_meta_name_values={'foo':'2','bar':'car'}) + + # Act + blobs = self.bc.list_blobs(self.container_name, include='uncommittedblobs') + + # Assert + self.assertEqual(len(blobs), 2) + self.assertEqual(blobs[0].name, 'blob1') + self.assertEqual(blobs[1].name, 'blob2') + + #def test_list_blobs_with_include_copy(self): + # # Arrange + # self._create_container(self.container_name) + # data = 'hello world' + # self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_meta_name_values={'status':'original'}) + # sourceblob = '/%s/%s/%s' % (credentials.getStorageServicesName(), + # self.container_name, + # 'blob1') + # self.bc.copy_blob(self.container_name, 'blob1copy', sourceblob, {'status':'copy'}) + + # # Act + # blobs = self.bc.list_blobs(self.container_name, include='copy') + + # # Assert + # self.assertEqual(len(blobs), 2) + # self.assertEqual(blobs[0].name, 'blob1') + # self.assertEqual(blobs[1].name, 'blob2') + # #TODO: check for metadata related to copy blob + + def test_list_blobs_with_include_multiple(self): + # Arrange + self._create_container(self.container_name) + data = 'hello world' + self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_meta_name_values={'foo':'1','bar':'bob'}) + self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob', x_ms_meta_name_values={'foo':'2','bar':'car'}) + self.bc.snapshot_blob(self.container_name, 'blob1') + + # Act + blobs = self.bc.list_blobs(self.container_name, include='snapshots,metadata') + + # Assert + self.assertEqual(len(blobs), 3) + self.assertEqual(blobs[0].name, 'blob1') + self.assertNotEqual(blobs[0].snapshot, '') + self.assertEqual(blobs[0].metadata['foo'], '1') + self.assertEqual(blobs[0].metadata['bar'], 'bob') + self.assertEqual(blobs[1].name, 'blob1') + self.assertEqual(blobs[1].snapshot, '') + self.assertEqual(blobs[1].metadata['foo'], '1') + self.assertEqual(blobs[1].metadata['bar'], 'bob') + self.assertEqual(blobs[2].name, 'blob2') + self.assertEqual(blobs[2].snapshot, '') + self.assertEqual(blobs[2].metadata['foo'], '2') + self.assertEqual(blobs[2].metadata['bar'], 'car') def test_put_blob_block_blob(self): # Arrange @@ -413,6 +779,35 @@ def test_put_blob_page_blob(self): # Assert self.assertIsNone(resp) + def test_put_blob_with_lease_id(self): + # Arrange + self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world') + lease = self.bc.lease_blob(self.container_name, 'blob1', 'acquire') + lease_id = lease['x-ms-lease-id'] + + # Act + data = 'hello world again' + resp = self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_lease_id=lease_id) + + # Assert + self.assertIsNone(resp) + blob = self.bc.get_blob(self.container_name, 'blob1', x_ms_lease_id=lease_id) + self.assertEqual(blob, 'hello world again') + + def test_put_blob_with_metadata(self): + # Arrange + self._create_container(self.container_name) + + # Act + data = 'hello world' + resp = self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_meta_name_values={'hello':'world','foo':'42'}) + + # Assert + self.assertIsNone(resp) + md = self.bc.get_blob_metadata(self.container_name, 'blob1') + self.assertEquals(md['x-ms-meta-hello'], 'world') + self.assertEquals(md['x-ms-meta-foo'], '42') + def test_get_blob_with_existing_blob(self): # Arrange self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world') @@ -421,7 +816,84 @@ def test_get_blob_with_existing_blob(self): blob = self.bc.get_blob(self.container_name, 'blob1') # Assert - self.assertEqual(type(blob), str) + self.assertIsInstance(blob, BlobResult) + self.assertEquals(blob, 'hello world') + + def test_get_blob_with_snapshot(self): + # Arrange + self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world') + snapshot = self.bc.snapshot_blob(self.container_name, 'blob1') + + # Act + blob = self.bc.get_blob(self.container_name, 'blob1', snapshot['x-ms-snapshot']) + + # Assert + self.assertIsInstance(blob, BlobResult) + self.assertEquals(blob, 'hello world') + + def test_get_blob_with_snapshot_previous(self): + # Arrange + self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world') + snapshot = self.bc.snapshot_blob(self.container_name, 'blob1') + self.bc.put_blob(self.container_name, 'blob1', 'hello world again', 'BlockBlob') + + # Act + blob_previous = self.bc.get_blob(self.container_name, 'blob1', snapshot['x-ms-snapshot']) + blob_latest = self.bc.get_blob(self.container_name, 'blob1') + + # Assert + self.assertIsInstance(blob_previous, BlobResult) + self.assertIsInstance(blob_latest, BlobResult) + self.assertEquals(blob_previous, 'hello world') + self.assertEquals(blob_latest, 'hello world again') + + def test_get_blob_with_range(self): + # Arrange + self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world') + + # Act + blob = self.bc.get_blob(self.container_name, 'blob1', x_ms_range='bytes=0-5') + + # Assert + self.assertIsInstance(blob, BlobResult) + self.assertEquals(blob, 'hello ') + + def test_get_blob_with_range_and_get_content_md5(self): + # Arrange + self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world') + + # Act + blob = self.bc.get_blob(self.container_name, 'blob1', x_ms_range='bytes=0-5', x_ms_range_get_content_md5='true') + + # Assert + self.assertIsInstance(blob, BlobResult) + self.assertEquals(blob, 'hello ') + self.assertEquals(blob.properties['content-md5'], '+BSJN3e8wilf/wXwDlCNpg==') + + def test_get_blob_with_lease(self): + # Arrange + self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world') + lease = self.bc.lease_blob(self.container_name, 'blob1', 'acquire') + lease_id = lease['x-ms-lease-id'] + + # Act + blob = self.bc.get_blob(self.container_name, 'blob1', x_ms_lease_id=lease_id) + self.bc.lease_blob(self.container_name, 'blob1', 'release', lease_id) + + # Assert + self.assertIsInstance(blob, BlobResult) + self.assertEquals(blob, 'hello world') + + def test_get_blob_on_leased_blob_without_lease_id(self): + # Arrange + self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world') + self.bc.lease_blob(self.container_name, 'blob1', 'acquire') + + # Act + blob = self.bc.get_blob(self.container_name, 'blob1') # get_blob is allowed without lease id + + # Assert + self.assertIsInstance(blob, BlobResult) self.assertEquals(blob, 'hello world') def test_get_blob_with_non_existing_container(self): @@ -453,7 +925,7 @@ def test_set_blob_properties_with_existing_blob(self): # Assert self.assertIsNone(resp) props = self.bc.get_blob_properties(self.container_name, 'blob1') - self.assertEquals(props['Content-Language'], 'spanish') + self.assertEquals(props['content-language'], 'spanish') def test_set_blob_properties_with_non_existing_container(self): # Arrange @@ -485,6 +957,7 @@ def test_get_blob_properties_with_existing_blob(self): self.assertIsNotNone(props) self.assertEquals(props['x-ms-blob-type'], 'BlockBlob') self.assertEquals(props['x-ms-lease-status'], 'unlocked') + self.assertEquals(props['content-length'], '11') def test_get_blob_properties_with_non_existing_container(self): # Arrange @@ -571,7 +1044,8 @@ def test_snapshot_blob(self): resp = self.bc.snapshot_blob(self.container_name, 'blob1') # Assert - self.assertIsNone(resp) + self.assertIsNotNone(resp) + self.assertIsNotNone(resp['x-ms-snapshot']) def test_lease_blob_acquire_and_release(self): # Arrange @@ -625,6 +1099,8 @@ def test_put_block_list(self): # Assert self.assertIsNone(resp) + blob = self.bc.get_blob(self.container_name, 'blob1') + self.assertEqual(blob, 'AAABBBCCC') def test_get_block_list_no_blocks(self): # Arrange @@ -724,6 +1200,23 @@ def test_get_page_ranges_2_pages(self): self.assertEquals(ranges.page_ranges[1].start, 1024) self.assertEquals(ranges.page_ranges[1].end, 1535) + def test_get_page_ranges_iter(self): + # Arrange + self._create_container_and_page_blob(self.container_name, 'blob1', 2048) + data = 'abcdefghijklmnop' * 32 + resp1 = self.bc.put_page(self.container_name, 'blob1', data, 'bytes=0-511', 'update') + resp2 = self.bc.put_page(self.container_name, 'blob1', data, 'bytes=1024-1535', 'update') + + # Act + ranges = self.bc.get_page_ranges(self.container_name, 'blob1') + for range in ranges: + pass + + # Assert + self.assertEquals(len(ranges), 2) + self.assertIsInstance(ranges[0], PageRange) + self.assertIsInstance(ranges[1], PageRange) + def test_with_filter(self): # Single filter called = [] diff --git a/test/azuretest/test_cloudstorageaccount.py b/test/azuretest/test_cloudstorageaccount.py new file mode 100644 index 000000000000..198a61c251a7 --- /dev/null +++ b/test/azuretest/test_cloudstorageaccount.py @@ -0,0 +1,77 @@ +#------------------------------------------------------------------------- +# Copyright 2011 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- + +from azure.storage import * +from azuretest.util import * + +import unittest + +#------------------------------------------------------------------------------ +class CloudStorageAccountTest(AzureTestCase): + + def setUp(self): + self.account = CloudStorageAccount(account_name=credentials.getStorageServicesName(), + account_key=credentials.getStorageServicesKey()) + + #--Test cases -------------------------------------------------------- + def test_create_blob_service(self): + # Arrange + + # Act + service = self.account.create_blob_service() + + # Assert + self.assertIsNotNone(service) + self.assertIsInstance(service, BlobService) + self.assertEqual(service.account_name, credentials.getStorageServicesName()) + self.assertEqual(service.account_key, credentials.getStorageServicesKey()) + + def test_create_blob_service_empty_credentials(self): + # Arrange + + # Act + bad_account = CloudStorageAccount('', '') + with self.assertRaises(WindowsAzureError): + service = bad_account.create_blob_service() + + # Assert + + def test_create_table_service(self): + # Arrange + + # Act + service = self.account.create_table_service() + + # Assert + self.assertIsNotNone(service) + self.assertIsInstance(service, TableService) + self.assertEqual(service.account_name, credentials.getStorageServicesName()) + self.assertEqual(service.account_key, credentials.getStorageServicesKey()) + + def test_create_queue_service(self): + # Arrange + + # Act + service = self.account.create_queue_service() + + # Assert + self.assertIsNotNone(service) + self.assertIsInstance(service, QueueService) + self.assertEqual(service.account_name, credentials.getStorageServicesName()) + self.assertEqual(service.account_key, credentials.getStorageServicesKey()) + +#------------------------------------------------------------------------------ +if __name__ == '__main__': + unittest.main() diff --git a/test/azuretest/test_queueservice.py b/test/azuretest/test_queueservice.py index a04f9c2160d8..a89cdbba2a1d 100644 --- a/test/azuretest/test_queueservice.py +++ b/test/azuretest/test_queueservice.py @@ -19,27 +19,25 @@ from azuretest.util import * import unittest -import time #------------------------------------------------------------------------------ TEST_QUEUE_PREFIX = 'mytestqueue' #------------------------------------------------------------------------------ -class QueueServiceTest(unittest.TestCase): +class QueueServiceTest(AzureTestCase): def setUp(self): self.queue_client = QueueService(account_name=credentials.getStorageServicesName(), account_key=credentials.getStorageServicesKey()) - # TODO: it may be overkill to use the machine name from - # getUniqueTestRunID, current time may be unique enough + __uid = getUniqueTestRunID() queue_base_name = u'%s' % (__uid) self.test_queues = [] self.creatable_queues = [] for i in range(10): - self.test_queues.append(TEST_QUEUE_PREFIX + getUniqueNameBasedOnCurrentTime(queue_base_name)) + self.test_queues.append(TEST_QUEUE_PREFIX + str(i) + getUniqueNameBasedOnCurrentTime(queue_base_name)) for i in range(4): - self.creatable_queues.append('mycreatablequeue' + getUniqueNameBasedOnCurrentTime(queue_base_name)) + self.creatable_queues.append('mycreatablequeue' + str(i) + getUniqueNameBasedOnCurrentTime(queue_base_name)) for queue_name in self.test_queues: self.queue_client.create_queue(queue_name) @@ -107,6 +105,24 @@ def test_create_queue(self): self.assertIsNotNone(result) self.assertEqual(result['x-ms-approximate-messages-count'], '0') + def test_create_queue_already_exist(self): + #Action + created1 = self.queue_client.create_queue(self.creatable_queues[0]) + created2 = self.queue_client.create_queue(self.creatable_queues[0]) + + #Asserts + self.assertTrue(created1) + self.assertFalse(created2) + + def test_create_queue_fail_on_exist(self): + #Action + created = self.queue_client.create_queue(self.creatable_queues[0], None, True) + with self.assertRaises(WindowsAzureError): + self.queue_client.create_queue(self.creatable_queues[0], None, True) + + #Asserts + self.assertTrue(created) + def test_create_queue_with_options(self): #Action self.queue_client.create_queue(self.creatable_queues[1], x_ms_meta_name_values = {'foo':'test', 'bar':'blah'}) @@ -118,9 +134,34 @@ def test_create_queue_with_options(self): self.assertEqual('test', result['x-ms-meta-foo']) self.assertEqual('blah', result['x-ms-meta-bar']) + def test_delete_queue_not_exist(self): + #Action + deleted = self.queue_client.delete_queue(self.creatable_queues[0]) + + #Asserts + self.assertFalse(deleted) + + def test_delete_queue_fail_not_exist_not_exist(self): + #Action + with self.assertRaises(WindowsAzureError): + self.queue_client.delete_queue(self.creatable_queues[0], True) + + #Asserts + + def test_delete_queue_fail_not_exist_already_exist(self): + #Action + created = self.queue_client.create_queue(self.creatable_queues[0]) + deleted = self.queue_client.delete_queue(self.creatable_queues[0], True) + + #Asserts + self.assertTrue(created) + self.assertTrue(deleted) + def test_list_queues(self): #Action queues = self.queue_client.list_queues() + for queue in queues: + pass #Asserts self.assertIsNotNone(queues) @@ -172,7 +213,7 @@ def test_put_message(self): self.queue_client.put_message(self.test_queues[0], 'message3') self.queue_client.put_message(self.test_queues[0], 'message4') - def test_get_messges(self): + def test_get_messages(self): #Action self.queue_client.put_message(self.test_queues[1], 'message1') self.queue_client.put_message(self.test_queues[1], 'message2') diff --git a/test/azuretest/test_servicebusservice.py b/test/azuretest/test_servicebusservice.py index 46edc29686ec..7ff352e11b08 100644 --- a/test/azuretest/test_servicebusservice.py +++ b/test/azuretest/test_servicebusservice.py @@ -1,15 +1,17 @@ -#------------------------------------------------------------------------------ -# Copyright (c) Microsoft Corporation. +#------------------------------------------------------------------------- +# Copyright 2011 Microsoft Corporation # -# This source code is subject to terms and conditions of the Apache License, -# Version 2.0. A copy of the license can be found in the License.html file at -# the root of this distribution. If you cannot locate the Apache License, -# Version 2.0, please send an email to vspython@microsoft.com. By using this -# source code in any fashion, you are agreeing to be bound by the terms of the -# Apache License, Version 2.0. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 # -# You must not remove this notice, or any other, from this software. -#------------------------------------------------------------------------------ +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- from azure import * from azure.servicebus import * @@ -18,14 +20,12 @@ import unittest #------------------------------------------------------------------------------ -class ServiceBusTest(unittest.TestCase): +class ServiceBusTest(AzureTestCase): def setUp(self): self.sbs = ServiceBusService(credentials.getServiceBusNamespace(), credentials.getServiceBusKey(), 'owner') - # TODO: it may be overkill to use the machine name from - # getUniqueTestRunID, current time may be unique enough __uid = getUniqueTestRunID() queue_base_name = u'mytestqueue%s' % (__uid) @@ -48,25 +48,6 @@ def cleanup(self): except: pass #--Helpers----------------------------------------------------------------- - - # TODO: move this function out of here so other tests can use them - # TODO: find out how to import/use safe_repr instead repr - def assertNamedItemInContainer(self, container, item_name, msg=None): - for item in container: - if item.name == item_name: - return - - standardMsg = '%s not found in %s' % (repr(item_name), repr(container)) - self.fail(self._formatMessage(msg, standardMsg)) - - # TODO: move this function out of here so other tests can use them - # TODO: find out how to import/use safe_repr instead repr - def assertNamedItemNotInContainer(self, container, item_name, msg=None): - for item in container: - if item.name == item_name: - standardMsg = '%s unexpectedly found in %s' % (repr(item_name), repr(container)) - self.fail(self._formatMessage(msg, standardMsg)) - def _create_queue(self, queue_name): self.sbs.create_queue(queue_name, None, True) @@ -84,6 +65,44 @@ def _create_topic_and_subscription(self, topic_name, subscription_name): def _create_subscription(self, topic_name, subscription_name): self.sbs.create_subscription(topic_name, subscription_name, None, True) + #--Test cases for service bus service ------------------------------------- + def test_create_service_bus_missing_arguments(self): + # Arrange + if os.environ.has_key(AZURE_SERVICEBUS_NAMESPACE): + del os.environ[AZURE_SERVICEBUS_NAMESPACE] + if os.environ.has_key(AZURE_SERVICEBUS_ACCESS_KEY): + del os.environ[AZURE_SERVICEBUS_ACCESS_KEY] + if os.environ.has_key(AZURE_SERVICEBUS_ISSUER): + del os.environ[AZURE_SERVICEBUS_ISSUER] + + # Act + with self.assertRaises(WindowsAzureError): + sbs = ServiceBusService() + + # Assert + + def test_create_service_bus_env_variables(self): + # Arrange + os.environ[AZURE_SERVICEBUS_NAMESPACE] = credentials.getServiceBusNamespace() + os.environ[AZURE_SERVICEBUS_ACCESS_KEY] = credentials.getServiceBusKey() + os.environ[AZURE_SERVICEBUS_ISSUER] = 'owner' + + # Act + sbs = ServiceBusService() + + if os.environ.has_key(AZURE_SERVICEBUS_NAMESPACE): + del os.environ[AZURE_SERVICEBUS_NAMESPACE] + if os.environ.has_key(AZURE_SERVICEBUS_ACCESS_KEY): + del os.environ[AZURE_SERVICEBUS_ACCESS_KEY] + if os.environ.has_key(AZURE_SERVICEBUS_ISSUER): + del os.environ[AZURE_SERVICEBUS_ISSUER] + + # Assert + self.assertIsNotNone(sbs) + self.assertEquals(sbs.service_namespace, credentials.getServiceBusNamespace()) + self.assertEquals(sbs.account_key, credentials.getServiceBusKey()) + self.assertEquals(sbs.issuer, 'owner') + #--Test cases for queues -------------------------------------------------- def test_create_queue_no_options(self): # Arrange @@ -108,12 +127,33 @@ def test_create_queue_with_options(self): # Act queue_options = Queue() - queue_options.max_size_in_megabytes = 5120 queue_options.default_message_time_to_live = 'PT1M' + queue_options.duplicate_detection_history_time_window = 'PT5M' + queue_options.enable_batched_operations = False + queue_options.dead_lettering_on_message_expiration = False + queue_options.lock_duration = 'PT1M' + queue_options.max_delivery_count = 15 + queue_options.max_size_in_megabytes = 5120 + queue_options.message_count = 0 + queue_options.requires_duplicate_detection = False + queue_options.requires_session = False + queue_options.size_in_bytes = 0 created = self.sbs.create_queue(self.queue_name, queue_options) # Assert self.assertTrue(created) + queue = self.sbs.get_queue(self.queue_name) + self.assertEquals('PT1M', queue.default_message_time_to_live) + self.assertEquals('PT5M', queue.duplicate_detection_history_time_window) + self.assertEquals(False, queue.enable_batched_operations) + self.assertEquals(False, queue.dead_lettering_on_message_expiration) + self.assertEquals('PT1M', queue.lock_duration) + self.assertEquals(15, queue.max_delivery_count) + self.assertEquals(5120, queue.max_size_in_megabytes) + self.assertEquals(0, queue.message_count) + self.assertEquals(False, queue.requires_duplicate_detection) + self.assertEquals(False, queue.requires_session) + self.assertEquals(0, queue.size_in_bytes) def test_create_queue_with_already_existing_queue(self): # Arrange @@ -319,7 +359,14 @@ def test_send_queue_message_with_custom_message_properties(self): self._create_queue(self.queue_name) # Act - sent_msg = Message('message with properties', custom_properties={'hello':'world', 'foo':42}) + props = {'hello':'world', + 'foo':42, + 'active':True, + 'deceased':False, + 'large':8555111000, + 'floating':3.14, + 'dob':datetime(2011, 12, 14)} + sent_msg = Message('message with properties', custom_properties=props) self.sbs.send_queue_message(self.queue_name, sent_msg) received_msg = self.sbs.receive_queue_message(self.queue_name, True, 5) received_msg.delete() @@ -327,7 +374,12 @@ def test_send_queue_message_with_custom_message_properties(self): # Assert self.assertIsNotNone(received_msg) self.assertEquals(received_msg.custom_properties['hello'], 'world') - self.assertEquals(received_msg.custom_properties['foo'], '42') # TODO: note that the integer became a string + self.assertEquals(received_msg.custom_properties['foo'], 42) + self.assertEquals(received_msg.custom_properties['active'], True) + self.assertEquals(received_msg.custom_properties['deceased'], False) + self.assertEquals(received_msg.custom_properties['large'], 8555111000) + self.assertEquals(received_msg.custom_properties['floating'], 3.14) + self.assertEquals(received_msg.custom_properties['dob'], datetime(2011, 12, 14)) #--Test cases for topics/subscriptions ------------------------------------ def test_create_topic_no_options(self): @@ -353,12 +405,24 @@ def test_create_topic_with_options(self): # Act topic_options = Topic() - topic_options.max_size_in_megabytes = '5120' topic_options.default_message_time_to_live = 'PT1M' + topic_options.duplicate_detection_history_time_window = 'PT5M' + topic_options.enable_batched_operations = False + topic_options.max_size_in_megabytes = 5120 + topic_options.requires_duplicate_detection = False + topic_options.size_in_bytes = 0 + #TODO: MaximumNumberOfSubscriptions is not supported? created = self.sbs.create_topic(self.topic_name, topic_options) # Assert self.assertTrue(created) + topic = self.sbs.get_topic(self.topic_name) + self.assertEquals('PT1M', topic.default_message_time_to_live) + self.assertEquals('PT5M', topic.duplicate_detection_history_time_window) + self.assertEquals(False, topic.enable_batched_operations) + self.assertEquals(5120, topic.max_size_in_megabytes) + self.assertEquals(False, topic.requires_duplicate_detection) + self.assertEquals(0, topic.size_in_bytes) def test_create_topic_with_already_existing_topic(self): # Arrange @@ -382,6 +446,23 @@ def test_create_topic_with_already_existing_topic_fail_on_exist(self): # Assert self.assertTrue(created) + def test_topic_backwards_compatibility_warning(self): + # Arrange + topic_options = Topic() + topic_options.max_size_in_megabytes = 5120 + + # Act + val = topic_options.max_size_in_mega_bytes + + # Assert + self.assertEqual(val, 5120) + + # Act + topic_options.max_size_in_mega_bytes = 1024 + + # Assert + self.assertEqual(topic_options.max_size_in_megabytes, 1024) + def test_get_topic_with_existing_topic(self): # Arrange self._create_topic(self.topic_name) @@ -467,6 +548,35 @@ def test_create_subscription(self): # Assert self.assertTrue(created) + def test_create_subscription_with_options(self): + # Arrange + self._create_topic(self.topic_name) + + # Act + subscription_options = Subscription() + subscription_options.dead_lettering_on_filter_evaluation_exceptions = False + subscription_options.dead_lettering_on_message_expiration = False + subscription_options.default_message_time_to_live = 'PT15M' + subscription_options.enable_batched_operations = False + subscription_options.lock_duration = 'PT1M' + subscription_options.max_delivery_count = 15 + #message_count is read-only + subscription_options.message_count = 0 + subscription_options.requires_session = False + created = self.sbs.create_subscription(self.topic_name, 'MySubscription', subscription_options) + + # Assert + self.assertTrue(created) + subscription = self.sbs.get_subscription(self.topic_name, 'MySubscription') + self.assertEquals(False, subscription.dead_lettering_on_filter_evaluation_exceptions) + self.assertEquals(False, subscription.dead_lettering_on_message_expiration) + self.assertEquals('PT15M', subscription.default_message_time_to_live) + self.assertEquals(False, subscription.enable_batched_operations) + self.assertEquals('PT1M', subscription.lock_duration) + #self.assertEquals(15, subscription.max_delivery_count) #no idea why max_delivery_count is always 10 + self.assertEquals(0, subscription.message_count) + self.assertEquals(False, subscription.requires_session) + def test_create_subscription_fail_on_exist(self): # Arrange self._create_topic(self.topic_name) @@ -630,7 +740,7 @@ def test_create_rule_with_already_existing_rule_fail_on_exist(self): # Assert self.assertTrue(created) - def test_create_rule_with_options(self): + def test_create_rule_with_options_sql_filter(self): # Arrange self._create_topic_and_subscription(self.topic_name, 'MySubscription') @@ -643,6 +753,71 @@ def test_create_rule_with_options(self): # Assert self.assertTrue(created) + def test_create_rule_with_options_true_filter(self): + # Arrange + self._create_topic_and_subscription(self.topic_name, 'MySubscription') + + # Act + rule1 = Rule() + rule1.filter_type = 'TrueFilter' + rule1.filter_expression = '1=1' + created = self.sbs.create_rule(self.topic_name, 'MySubscription', 'MyRule1', rule1) + + # Assert + self.assertTrue(created) + + def test_create_rule_with_options_false_filter(self): + # Arrange + self._create_topic_and_subscription(self.topic_name, 'MySubscription') + + # Act + rule1 = Rule() + rule1.filter_type = 'FalseFilter' + rule1.filter_expression = '1=0' + created = self.sbs.create_rule(self.topic_name, 'MySubscription', 'MyRule1', rule1) + + # Assert + self.assertTrue(created) + + def test_create_rule_with_options_correlation_filter(self): + # Arrange + self._create_topic_and_subscription(self.topic_name, 'MySubscription') + + # Act + rule1 = Rule() + rule1.filter_type = 'CorrelationFilter' + rule1.filter_expression = 'myid' + created = self.sbs.create_rule(self.topic_name, 'MySubscription', 'MyRule1', rule1) + + # Assert + self.assertTrue(created) + + def test_create_rule_with_options_empty_rule_action(self): + # Arrange + self._create_topic_and_subscription(self.topic_name, 'MySubscription') + + # Act + rule1 = Rule() + rule1.action_type = 'EmptyRuleAction' + rule1.action_expression = '' + created = self.sbs.create_rule(self.topic_name, 'MySubscription', 'MyRule1', rule1) + + # Assert + self.assertTrue(created) + + def test_create_rule_with_options_sql_rule_action(self): + # Arrange + self._create_topic_and_subscription(self.topic_name, 'MySubscription') + + # Act + rule1 = Rule() + rule1.action_type = 'SqlRuleAction' + rule1.action_expression = "SET foo = 5" + created = self.sbs.create_rule(self.topic_name, 'MySubscription', 'MyRule1', rule1) + + # Assert + self.assertTrue(created) + def test_list_rules(self): # Arrange self._create_topic_and_subscription(self.topic_name, 'MySubscription') @@ -675,6 +850,27 @@ def test_get_rule_with_non_existing_rule(self): # Assert + def test_get_rule_with_existing_rule_with_options(self): + # Arrange + self._create_topic_and_subscription(self.topic_name, 'MySubscription') + sent_rule = Rule() + sent_rule.filter_type = 'SqlFilter' + sent_rule.filter_expression = 'foo > 40' + sent_rule.action_type = 'SqlRuleAction' + sent_rule.action_expression = 'SET foo = 5' + self.sbs.create_rule(self.topic_name, 'MySubscription', 'MyRule1', sent_rule) + + # Act + received_rule = self.sbs.get_rule(self.topic_name, 'MySubscription', 'MyRule1') + + # Assert + self.assertIsNotNone(received_rule) + self.assertEquals(received_rule.name, 'MyRule1') + self.assertEquals(received_rule.filter_type, sent_rule.filter_type) + self.assertEquals(received_rule.filter_expression, sent_rule.filter_expression) + self.assertEquals(received_rule.action_type, sent_rule.action_type) + self.assertEquals(received_rule.action_expression, sent_rule.action_expression) + def test_delete_rule_with_existing_rule(self): # Arrange self._create_topic_and_subscription(self.topic_name, 'MySubscription') diff --git a/test/azuretest/test_sharedaccesssignature.py b/test/azuretest/test_sharedaccesssignature.py new file mode 100644 index 000000000000..c602d374bf3e --- /dev/null +++ b/test/azuretest/test_sharedaccesssignature.py @@ -0,0 +1,137 @@ +#------------------------------------------------------------------------- +# Copyright 2011 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- + +from azure import DEV_ACCOUNT_NAME, DEV_ACCOUNT_KEY +from azure.storage.sharedaccesssignature import (SharedAccessSignature, + SharedAccessPolicy, + Permission, + WebResource) +from azure.storage import AccessPolicy +from azuretest.util import AzureTestCase + +import unittest + +#------------------------------------------------------------------------------ +SIGNED_START = 'st' +SIGNED_EXPIRY = 'se' +SIGNED_RESOURCE = 'sr' +SIGNED_PERMISSION = 'sp' +SIGNED_IDENTIFIER = 'si' +SIGNED_SIGNATURE = 'sig' +RESOURCE_BLOB = 'blob' +RESOURCE_CONTAINER = 'container' +SIGNED_RESOURCE_TYPE = 'resource' +SHARED_ACCESS_PERMISSION = 'permission' + +#------------------------------------------------------------------------------ +class SharedAccessSignatureTest(AzureTestCase): + + def setUp(self): + self.sas = SharedAccessSignature(account_name=DEV_ACCOUNT_NAME, + account_key=DEV_ACCOUNT_KEY) + def tearDown(self): + return super(SharedAccessSignatureTest, self).tearDown() + + def test_generate_signature_container(self): + accss_plcy = AccessPolicy() + accss_plcy.start = '2011-10-11' + accss_plcy.expiry = '2011-10-12' + accss_plcy.permission = 'r' + signed_identifier = 'YWJjZGVmZw==' + sap = SharedAccessPolicy(accss_plcy, signed_identifier) + signature = self.sas._generate_signature('images', + RESOURCE_CONTAINER, + sap) + self.assertEqual(signature, + 'VdlALM4TYEYYNf94Bvt3dn48TsA01wk45ltwP3zeKp4=') + + def test_generate_signature_blob(self): + accss_plcy = AccessPolicy() + accss_plcy.start = '2011-10-11T11:03:40Z' + accss_plcy.expiry = '2011-10-12T11:53:40Z' + accss_plcy.permission = 'r' + sap = SharedAccessPolicy(accss_plcy) + + signature = self.sas._generate_signature('images/pic1.png', + RESOURCE_BLOB, + sap) + self.assertEqual(signature, + '7NIEip+VOrQ5ZV80pORPK1MOsJc62wwCNcbMvE+lQ0s=') + + def test_blob_signed_query_string(self): + accss_plcy = AccessPolicy() + accss_plcy.start = '2011-10-11' + accss_plcy.expiry = '2011-10-12' + accss_plcy.permission = 'w' + sap = SharedAccessPolicy(accss_plcy) + qry_str = self.sas.generate_signed_query_string('images/pic1.png', + RESOURCE_BLOB, + sap) + self.assertEqual(qry_str[SIGNED_START], '2011-10-11') + self.assertEqual(qry_str[SIGNED_EXPIRY], '2011-10-12') + self.assertEqual(qry_str[SIGNED_RESOURCE], RESOURCE_BLOB) + self.assertEqual(qry_str[SIGNED_PERMISSION], 'w') + self.assertEqual(qry_str[SIGNED_SIGNATURE], + 'k8uyTrn3pgLXuhwgZhxeAH6mZ/es9k2vqHPJEuIH4CE=') + + def test_container_signed_query_string(self): + accss_plcy = AccessPolicy() + accss_plcy.start = '2011-10-11' + accss_plcy.expiry = '2011-10-12' + accss_plcy.permission = 'r' + signed_identifier = 'YWJjZGVmZw==' + sap = SharedAccessPolicy(accss_plcy, signed_identifier) + qry_str = self.sas.generate_signed_query_string('images', + RESOURCE_CONTAINER, + sap) + self.assertEqual(qry_str[SIGNED_START], '2011-10-11') + self.assertEqual(qry_str[SIGNED_EXPIRY], '2011-10-12') + self.assertEqual(qry_str[SIGNED_RESOURCE], RESOURCE_CONTAINER) + self.assertEqual(qry_str[SIGNED_PERMISSION], 'r') + self.assertEqual(qry_str[SIGNED_IDENTIFIER], 'YWJjZGVmZw==') + self.assertEqual(qry_str[SIGNED_SIGNATURE], + 'VdlALM4TYEYYNf94Bvt3dn48TsA01wk45ltwP3zeKp4=') + + def test_sign_request(self): + accss_plcy = AccessPolicy() + accss_plcy.start = '2011-10-11' + accss_plcy.expiry = '2011-10-12' + accss_plcy.permission = 'r' + sap = SharedAccessPolicy(accss_plcy) + qry_str = self.sas.generate_signed_query_string('images/pic1.png', + RESOURCE_BLOB, + sap) + + permission = Permission() + permission.path = '/images/pic1.png' + permission.query_string = qry_str + self.sas.permission_set = [permission] + + web_rsrc = WebResource() + web_rsrc.properties[SIGNED_RESOURCE_TYPE] = RESOURCE_BLOB + web_rsrc.properties[SHARED_ACCESS_PERMISSION] = 'r' + web_rsrc.path = '/images/pic1.png?comp=metadata' + web_rsrc.request_url = '/images/pic1.png?comp=metadata' + + web_rsrc = self.sas.sign_request(web_rsrc) + + self.assertEqual(web_rsrc.request_url, + '/images/pic1.png?comp=metadata&' + + self.sas._convert_query_string(qry_str)) + +#------------------------------------------------------------------------------ +if __name__ == '__main__': + unittest.main() + \ No newline at end of file diff --git a/test/azuretest/test_tableservice.py b/test/azuretest/test_tableservice.py index 4d38a76936e2..009304cd27d8 100644 --- a/test/azuretest/test_tableservice.py +++ b/test/azuretest/test_tableservice.py @@ -24,76 +24,176 @@ STATUS_CREATED, STATUS_ACCEPTED, STATUS_NO_CONTENT, - getUniqueNameBasedOnCurrentTime) + getUniqueNameBasedOnCurrentTime, + AzureTestCase) import unittest import time from datetime import datetime #------------------------------------------------------------------------------ -__uid = getUniqueTestRunID() - -TABLE_TO_DELETE = 'mytesttabletodelete%s' % (__uid) -TABLE_NO_DELETE = 'mytesttablenodelete%s' % (__uid) -ENTITY_TO_DELETE = 'mytestentitytodelete%s' % (__uid) -ENTITY_NO_DELETE = 'mytestentitynodelete%s' % (__uid) -BATCH_TABLE = 'mytestbatchtable%s' % (__uid) -FILTER_TABLE = 'mytestfiltertable%s' % (__uid) + +MAX_RETRY = 60 #------------------------------------------------------------------------------ -class StorageTest(unittest.TestCase): - ''' - TODO: - - comprehensive, positive test cases for all table client methods - - comprehensive, negative test cases all table client methods - - missing coverage for begin_batch - - missing coverage for cancel_batch - - missing coverage for commit_batch - - get_table_service_properties busted - - set_table_service_properties busted - ''' +class StorageTest(AzureTestCase): def setUp(self): - self.tc = TableService(account_name=credentials.getStorageServicesName().encode('ascii', 'ignore'), - account_key=credentials.getStorageServicesKey().encode('ascii', 'ignore')) + self.tc = TableService(account_name=credentials.getStorageServicesName(), + account_key=credentials.getStorageServicesKey()) __uid = getUniqueTestRunID() - test_table_base_name = u'testtable%s' % (__uid) - self.test_table = getUniqueNameBasedOnCurrentTime(test_table_base_name) - self.tc.create_table(self.test_table) - - #time.sleep(10) + table_base_name = u'testtable%s' % (__uid) + self.table_name = getUniqueNameBasedOnCurrentTime(table_base_name) + self.additional_table_names = [] def tearDown(self): self.cleanup() return super(StorageTest, self).tearDown() def cleanup(self): - for cont in [TABLE_NO_DELETE, TABLE_TO_DELETE]: - try: self.tc.delete_table(cont) + try: + self.tc.delete_table(self.table_name) + except: pass + + for name in self.additional_table_names: + try: + self.tc.delete_table(name) except: pass - self.tc.delete_table(self.test_table) - def test_sanity(self): - self.sanity_create_table() - time.sleep(10) - self.sanity_query_tables() - - self.sanity_delete_table() - - self.sanity_insert_entity() - self.sanity_get_entity() - self.sanity_query_entities() - self.sanity_update_entity() - self.sanity_insert_or_merge_entity() - self.sanity_insert_or_replace_entity() - self.sanity_merge_entity() - self.sanity_delete_entity() - - self.sanity_begin_batch() - self.sanity_commit_batch() - self.sanity_cancel_batch() - - def test_sanity_get_set_table_service_properties(self): + #--Helpers----------------------------------------------------------------- + def _create_table(self, table_name): + ''' + Creates a table with the specified name. + ''' + self.tc.create_table(table_name, True) + + def _create_table_with_default_entities(self, table_name, entity_count): + ''' + Creates a table with the specified name and adds entities with the + default set of values. PartitionKey is set to 'MyPartition' and RowKey + is set to a unique counter value starting at 1 (as a string). + ''' + entities = [] + self._create_table(table_name) + for i in range(1, entity_count + 1): + entities.append(self.tc.insert_entity(table_name, self._create_default_entity_dict('MyPartition', str(i)))) + return entities + + def _create_default_entity_class(self, partition, row): + ''' + Creates a class-based entity with fixed values, using all + of the supported data types. + ''' + # TODO: Edm.Binary and null + entity = Entity() + entity.PartitionKey = partition + entity.RowKey = row + entity.age = 39 + entity.sex = 'male' + entity.married = True + entity.deceased = False + entity.optional = None + entity.ratio = 3.1 + entity.large = 9333111000 + entity.Birthday = datetime(1973,10,04) + entity.birthday = datetime(1970,10,04) + entity.binary = None + entity.other = EntityProperty('Edm.Int64', 20) + entity.clsid = EntityProperty('Edm.Guid', 'c9da6455-213d-42c9-9a79-3e9149a57833') + return entity + + def _create_default_entity_dict(self, partition, row): + ''' + Creates a dictionary-based entity with fixed values, using all + of the supported data types. + ''' + # TODO: Edm.Binary and null + return {'PartitionKey':partition, + 'RowKey':row, + 'age':39, + 'sex':'male', + 'married':True, + 'deceased':False, + 'optional':None, + 'ratio':3.1, + 'large':9333111000, + 'Birthday':datetime(1973,10,04), + 'birthday':datetime(1970,10,04), + 'binary':EntityProperty('Edm.Binary', None), + 'other':EntityProperty('Edm.Int64', 20), + 'clsid':EntityProperty('Edm.Guid', 'c9da6455-213d-42c9-9a79-3e9149a57833')} + + def _create_updated_entity_dict(self, partition, row): + ''' + Creates a dictionary-based entity with fixed values, with a + different set of values than the default entity. It + adds fields, changes field values, changes field types, + and removes fields when compared to the default entity. + ''' + return {'PartitionKey':partition, + 'RowKey':row, + 'age':'abc', + 'sex':'female', + 'sign':'aquarius', + 'birthday':datetime(1991,10,04)} + + def _assert_default_entity(self, entity): + ''' + Asserts that the entity passed in matches the default entity. + ''' + self.assertEquals(entity.age, 39) + self.assertEquals(entity.sex, 'male') + self.assertEquals(entity.married, True) + self.assertEquals(entity.deceased, False) + self.assertFalse(hasattr(entity, "aquarius")) + self.assertEquals(entity.ratio, 3.1) + self.assertEquals(entity.large, 9333111000) + self.assertEquals(entity.Birthday, datetime(1973,10,04)) + self.assertEquals(entity.birthday, datetime(1970,10,04)) + self.assertEquals(entity.other, 20) + self.assertIsInstance(entity.clsid, EntityProperty) + self.assertEquals(entity.clsid.type, 'Edm.Guid') + self.assertEquals(entity.clsid.value, 'c9da6455-213d-42c9-9a79-3e9149a57833') + + def _assert_updated_entity(self, entity): + ''' + Asserts that the entity passed in matches the updated entity. + ''' + self.assertEquals(entity.age, 'abc') + self.assertEquals(entity.sex, 'female') + self.assertFalse(hasattr(entity, "married")) + self.assertFalse(hasattr(entity, "deceased")) + self.assertEquals(entity.sign, 'aquarius') + self.assertFalse(hasattr(entity, "optional")) + self.assertFalse(hasattr(entity, "ratio")) + self.assertFalse(hasattr(entity, "large")) + self.assertFalse(hasattr(entity, "Birthday")) + self.assertEquals(entity.birthday, datetime(1991,10,04)) + self.assertFalse(hasattr(entity, "other")) + self.assertFalse(hasattr(entity, "clsid")) + + def _assert_merged_entity(self, entity): + ''' + Asserts that the entity passed in matches the default entity + merged with the updated entity. + ''' + self.assertEquals(entity.age, 'abc') + self.assertEquals(entity.sex, 'female') + self.assertEquals(entity.sign, 'aquarius') + self.assertEquals(entity.married, True) + self.assertEquals(entity.deceased, False) + self.assertEquals(entity.sign, 'aquarius') + self.assertEquals(entity.ratio, 3.1) + self.assertEquals(entity.large, 9333111000) + self.assertEquals(entity.Birthday, datetime(1973,10,04)) + self.assertEquals(entity.birthday, datetime(1991,10,04)) + self.assertEquals(entity.other, 20) + self.assertIsInstance(entity.clsid, EntityProperty) + self.assertEquals(entity.clsid.type, 'Edm.Guid') + self.assertEquals(entity.clsid.value, 'c9da6455-213d-42c9-9a79-3e9149a57833') + + #--Test cases for table service ------------------------------------------- + def test_get_set_table_service_properties(self): table_properties = self.tc.get_table_service_properties() self.tc.set_table_service_properties(table_properties) @@ -114,10 +214,16 @@ def test_sanity_get_set_table_service_properties(self): setattr(cur, last_attr, value) self.tc.set_table_service_properties(table_properties) - table_properties = self.tc.get_table_service_properties() - cur = table_properties - for component in path.split('.'): - cur = getattr(cur, component) + retry_count = 0 + while retry_count < MAX_RETRY: + table_properties = self.tc.get_table_service_properties() + cur = table_properties + for component in path.split('.'): + cur = getattr(cur, component) + if value == cur: + break + time.sleep(1) + retry_count += 1 self.assertEquals(value, cur) @@ -150,260 +256,501 @@ def test_table_service_set_both(self): self.assertEquals(5, table_properties.logging.retention_policy.days) + #--Test cases for tables -------------------------------------------------- + def test_create_table(self): + # Arrange - #--Helpers----------------------------------------------------------------- - def sanity_create_table(self): - resp = self.tc.create_table(TABLE_TO_DELETE) - self.assertTrue(resp) - #self.assertEqual(resp.cache_control, u'no-cache') - - resp = self.tc.create_table(TABLE_NO_DELETE) - self.assertTrue(resp) - #self.assertEqual(resp.cache_control, u'no-cache') - - def sanity_query_tables(self): - resp = self.tc.query_tables() - self.assertEqual(type(resp), list) - tableNames = [x.name for x in resp] - self.assertGreaterEqual(len(tableNames), 2) - self.assertIn(TABLE_NO_DELETE, tableNames) - self.assertIn(TABLE_TO_DELETE, tableNames) - - def sanity_delete_table(self): - resp = self.tc.delete_table(TABLE_TO_DELETE) - self.assertTrue(resp) - - def sanity_insert_entity(self): - resp = self.tc.insert_entity(TABLE_NO_DELETE, {'PartitionKey':'Lastname', - 'RowKey':'Firstname', - 'age':39, - 'sex':'male', - 'birthday':datetime(1973,10,04)}) - self.assertEquals(resp, None) + # Act + created = self.tc.create_table(self.table_name) - entity = Entity() - entity.PartitionKey = 'Lastname' - entity.RowKey = 'Firstname1' - entity.age = 39 - entity.Birthday = EntityProperty('Edm.Int64', 20) - - resp = self.tc.insert_entity(TABLE_NO_DELETE, entity) - self.assertEquals(resp, None) - - def sanity_get_entity(self): - ln = u'Lastname' - fn1 = u'Firstname1' - resp = self.tc.get_entity(TABLE_NO_DELETE, - ln, - fn1, - '') - self.assertEquals(resp.PartitionKey, ln) - self.assertEquals(resp.RowKey, fn1) - self.assertEquals(resp.age, 39) - self.assertEquals(resp.Birthday, 20) + # Assert + self.assertTrue(created) - def sanity_query_entities(self): - resp = self.tc.query_entities(TABLE_NO_DELETE, '', '') - self.assertEquals(len(resp), 2) - self.assertEquals(resp[0].birthday, datetime(1973, 10, 04)) - self.assertEquals(resp[1].Birthday, 20) - - def sanity_update_entity(self): - ln = u'Lastname' - fn = u'Firstname' - resp = self.tc.update_entity(TABLE_NO_DELETE, - ln, - fn, - {'PartitionKey':'Lastname', - 'RowKey':'Firstname', - 'age':21, - 'sex':'female', - 'birthday':datetime(1991,10,04)}) - self.assertEquals(resp, None) - - resp = self.tc.get_entity(TABLE_NO_DELETE, - ln, - fn, - '') - self.assertEquals(resp.PartitionKey, ln) - self.assertEquals(resp.RowKey, fn) - self.assertEquals(resp.age, 21) - self.assertEquals(resp.sex, u'female') - self.assertEquals(resp.birthday, datetime(1991, 10, 04)) - - def sanity_insert_or_merge_entity(self): - ln = u'Lastname' - fn = u'Firstname' - resp = self.tc.insert_or_merge_entity(TABLE_NO_DELETE, - ln, - fn, - {'PartitionKey':'Lastname', - 'RowKey':'Firstname', - 'age': u'abc', #changed type - 'sex':'male', #changed value - 'birthday':datetime(1991,10,04), - 'sign' : 'aquarius' #new - }) - self.assertEquals(resp, None) + def test_create_table_fail_on_exist(self): + # Arrange + + # Act + created = self.tc.create_table(self.table_name, True) + + # Assert + self.assertTrue(created) + + def test_create_table_with_already_existing_table(self): + # Arrange + + # Act + created1 = self.tc.create_table(self.table_name) + created2 = self.tc.create_table(self.table_name) + + # Assert + self.assertTrue(created1) + self.assertFalse(created2) + + def test_create_table_with_already_existing_table_fail_on_exist(self): + # Arrange + + # Act + created = self.tc.create_table(self.table_name) + with self.assertRaises(WindowsAzureError): + self.tc.create_table(self.table_name, True) + + # Assert + self.assertTrue(created) + + def test_query_tables(self): + # Arrange + self._create_table(self.table_name) + + # Act + tables = self.tc.query_tables() + for table in tables: + pass - resp = self.tc.get_entity(TABLE_NO_DELETE, - ln, - fn, - '') - self.assertEquals(resp.PartitionKey, ln) - self.assertEquals(resp.RowKey, fn) - self.assertEquals(resp.age, u'abc') - self.assertEquals(resp.sex, u'male') - self.assertEquals(resp.birthday, datetime(1991, 10, 4)) - self.assertEquals(resp.sign, u'aquarius') - - def sanity_insert_or_replace_entity(self): - ln = u'Lastname' - fn = u'Firstname' - resp = self.tc.insert_or_replace_entity(TABLE_NO_DELETE, - ln, - fn, - {'PartitionKey':'Lastname', - 'RowKey':'Firstname', - 'age':1, - 'sex':'male'}) - self.assertEquals(resp, None) - - resp = self.tc.get_entity(TABLE_NO_DELETE, - ln, - fn, - '') - self.assertEquals(resp.PartitionKey, ln) - self.assertEquals(resp.RowKey, fn) - self.assertEquals(resp.age, 1) - self.assertEquals(resp.sex, u'male') - self.assertFalse(hasattr(resp, "birthday")) - self.assertFalse(hasattr(resp, "sign")) - - def sanity_merge_entity(self): - ln = u'Lastname' - fn = u'Firstname' - resp = self.tc.merge_entity(TABLE_NO_DELETE, - ln, - fn, - {'PartitionKey':'Lastname', - 'RowKey':'Firstname', - 'sex':'female', - 'fact': 'nice person'}) - self.assertEquals(resp, None) - - resp = self.tc.get_entity(TABLE_NO_DELETE, - ln, - fn, - '') - self.assertEquals(resp.PartitionKey, ln) - self.assertEquals(resp.RowKey, fn) - self.assertEquals(resp.age, 1) - self.assertEquals(resp.sex, u'female') - self.assertEquals(resp.fact, u'nice person') + # Assert + tableNames = [x.name for x in tables] + self.assertGreaterEqual(len(tableNames), 1) + self.assertGreaterEqual(len(tables), 1) + self.assertIn(self.table_name, tableNames) + + def test_query_tables_with_table_name(self): + # Arrange + self._create_table(self.table_name) + + # Act + tables = self.tc.query_tables(self.table_name) + for table in tables: + pass + + # Assert + self.assertEqual(len(tables), 1) + self.assertEqual(tables[0].name, self.table_name) + + def test_query_tables_with_table_name_no_tables(self): + # Arrange + + # Act + with self.assertRaises(WindowsAzureError): + self.tc.query_tables(self.table_name) + + # Assert + + def test_query_tables_with_top(self): + # Arrange + self.additional_table_names = [self.table_name + suffix for suffix in 'abcd'] + for name in self.additional_table_names: + self.tc.create_table(name) + + # Act + tables = self.tc.query_tables(None, 3) + for table in tables: + pass + + # Assert + self.assertEqual(len(tables), 3) + + def test_query_tables_with_top_and_next_table_name(self): + # Arrange + self.additional_table_names = [self.table_name + suffix for suffix in 'abcd'] + for name in self.additional_table_names: + self.tc.create_table(name) + + # Act + tables_set1 = self.tc.query_tables(None, 3) + tables_set2 = self.tc.query_tables(None, 3, tables_set1.x_ms_continuation['NextTableName']) + + # Assert + self.assertEqual(len(tables_set1), 3) + self.assertGreaterEqual(len(tables_set2), 1) + self.assertLessEqual(len(tables_set2), 3) + + def test_delete_table_with_existing_table(self): + # Arrange + self._create_table(self.table_name) + + # Act + deleted = self.tc.delete_table(self.table_name) + + # Assert + self.assertTrue(deleted) + tables = self.tc.query_tables() + self.assertNamedItemNotInContainer(tables, self.table_name) + + def test_delete_table_with_existing_table_fail_not_exist(self): + # Arrange + self._create_table(self.table_name) + + # Act + deleted = self.tc.delete_table(self.table_name, True) + + # Assert + self.assertTrue(deleted) + tables = self.tc.query_tables() + self.assertNamedItemNotInContainer(tables, self.table_name) + + def test_delete_table_with_non_existing_table(self): + # Arrange + + # Act + deleted = self.tc.delete_table(self.table_name) + + # Assert + self.assertFalse(deleted) + + def test_delete_table_with_non_existing_table_fail_not_exist(self): + # Arrange + + # Act + with self.assertRaises(WindowsAzureError): + self.tc.delete_table(self.table_name, True) + + # Assert + + #--Test cases for entities ------------------------------------------ + def test_insert_entity_dictionary(self): + # Arrange + self._create_table(self.table_name) + + # Act + dict = self._create_default_entity_dict('MyPartition', '1') + resp = self.tc.insert_entity(self.table_name, dict) + + # Assert + self.assertIsNotNone(resp) + + def test_insert_entity_class_instance(self): + # Arrange + self._create_table(self.table_name) + + # Act + entity = self._create_default_entity_class('MyPartition', '1') + resp = self.tc.insert_entity(self.table_name, entity) + + # Assert + self.assertIsNotNone(resp) + + def test_insert_entity_conflict(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 1) + + # Act + with self.assertRaises(WindowsAzureError): + self.tc.insert_entity(self.table_name, self._create_default_entity_dict('MyPartition', '1')) + + # Assert + + def test_get_entity(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 1) + + # Act + resp = self.tc.get_entity(self.table_name, 'MyPartition', '1') + + # Assert + self.assertEquals(resp.PartitionKey, 'MyPartition') + self.assertEquals(resp.RowKey, '1') + self._assert_default_entity(resp) + + def test_get_entity_not_existing(self): + # Arrange + self._create_table(self.table_name) + + # Act + with self.assertRaises(WindowsAzureError): + self.tc.get_entity(self.table_name, 'MyPartition', '1') + + # Assert + + def test_get_entity_with_select(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 1) + + # Act + resp = self.tc.get_entity(self.table_name, 'MyPartition', '1', 'age,sex') + + # Assert + self.assertEquals(resp.age, 39) + self.assertEquals(resp.sex, 'male') self.assertFalse(hasattr(resp, "birthday")) + self.assertFalse(hasattr(resp, "married")) + self.assertFalse(hasattr(resp, "deceased")) - def sanity_delete_entity(self): - ln = u'Lastname' - fn = u'Firstname' - resp = self.tc.delete_entity(TABLE_NO_DELETE, - ln, - fn) - self.assertEquals(resp, None) + def test_query_entities(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 2) - self.assertRaises(WindowsAzureError, - lambda: self.tc.get_entity(TABLE_NO_DELETE, ln, fn, '')) - - def test_batch_partition_key(self): - tn = BATCH_TABLE + 'pk' - self.tc.create_table(tn) - try: - self.tc.begin_batch() - self.tc.insert_entity(TABLE_NO_DELETE, {'PartitionKey':'Lastname', - 'RowKey':'Firstname', - 'age':39, - 'sex':'male', - 'birthday':datetime(1973,10,04)}) - - self.tc.insert_entity(TABLE_NO_DELETE, {'PartitionKey':'Lastname', - 'RowKey':'Firstname2', - 'age':39, - 'sex':'male', - 'birthday':datetime(1973,10,04)}) - - self.tc.commit_batch() - finally: - self.tc.delete_table(tn) - - def test_sanity_batch(self): - return - self.tc.create_table(BATCH_TABLE) - - #resp = self.tc.begin_batch() - #self.assertEquals(resp, None) - - resp = self.tc.insert_entity(BATCH_TABLE, {'PartitionKey':'Lastname', - 'RowKey':'Firstname', - 'age':39, - 'sex':'male', - 'birthday':datetime(1973,10,04)}) - - #resp = self.tc.insert_entity(BATCH_TABLE, {'PartitionKey':'Lastname', - # 'RowKey':'Firstname2', - # 'age':35, - # 'sex':'female', - # 'birthday':datetime(1977,12,5)}) - # - resp = self.tc.query_entities(BATCH_TABLE, '', '') - self.assertEquals(len(resp), 0) - - #self.tc.commit_batch() - return - resp = self.tc.query_entities(BATCH_TABLE, '', '') + # Act + resp = self.tc.query_entities(self.table_name) + + # Assert self.assertEquals(len(resp), 2) + for entity in resp: + self.assertEquals(entity.PartitionKey, 'MyPartition') + self._assert_default_entity(entity) + self.assertEquals(resp[0].RowKey, '1') + self.assertEquals(resp[1].RowKey, '2') - self.tc.delete_table(BATCH_TABLE) - - def sanity_begin_batch(self): - resp = self.tc.begin_batch() - self.assertEquals(resp, None) + def test_query_entities_with_filter(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 2) + self.tc.insert_entity(self.table_name, self._create_default_entity_dict('MyOtherPartition', '3')) + + # Act + resp = self.tc.query_entities(self.table_name, "PartitionKey eq 'MyPartition'") + + # Assert + self.assertEquals(len(resp), 2) + for entity in resp: + self.assertEquals(entity.PartitionKey, 'MyPartition') + self._assert_default_entity(entity) + + def test_query_entities_with_select(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 2) + + # Act + resp = self.tc.query_entities(self.table_name, None, 'age,sex') + + # Assert + self.assertEquals(len(resp), 2) + self.assertEquals(resp[0].age, 39) + self.assertEquals(resp[0].sex, 'male') + self.assertFalse(hasattr(resp[0], "birthday")) + self.assertFalse(hasattr(resp[0], "married")) + self.assertFalse(hasattr(resp[0], "deceased")) + + def test_query_entities_with_top(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 3) + + # Act + resp = self.tc.query_entities(self.table_name, None, None, 2) + + # Assert + self.assertEquals(len(resp), 2) + + def test_query_entities_with_top_and_next(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 5) + + # Act + resp1 = self.tc.query_entities(self.table_name, None, None, 2) + resp2 = self.tc.query_entities(self.table_name, None, None, 2, resp1.x_ms_continuation['NextPartitionKey'], resp1.x_ms_continuation['NextRowKey']) + resp3 = self.tc.query_entities(self.table_name, None, None, 2, resp2.x_ms_continuation['NextPartitionKey'], resp2.x_ms_continuation['NextRowKey']) + + # Assert + self.assertEquals(len(resp1), 2) + self.assertEquals(len(resp2), 2) + self.assertEquals(len(resp3), 1) + self.assertEquals(resp1[0].RowKey, '1') + self.assertEquals(resp1[1].RowKey, '2') + self.assertEquals(resp2[0].RowKey, '3') + self.assertEquals(resp2[1].RowKey, '4') + self.assertEquals(resp3[0].RowKey, '5') + + def test_update_entity(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 1) + + # Act + sent_entity = self._create_updated_entity_dict('MyPartition','1') + resp = self.tc.update_entity(self.table_name, 'MyPartition', '1', sent_entity) + + # Assert + self.assertIsNotNone(resp) + received_entity = self.tc.get_entity(self.table_name, 'MyPartition', '1') + self._assert_updated_entity(received_entity) + + def test_update_entity_with_if_matches(self): + # Arrange + entities = self._create_table_with_default_entities(self.table_name, 1) + + # Act + sent_entity = self._create_updated_entity_dict('MyPartition','1') + resp = self.tc.update_entity(self.table_name, 'MyPartition', '1', sent_entity, if_match=entities[0].etag) + + # Assert + self.assertIsNotNone(resp) + received_entity = self.tc.get_entity(self.table_name, 'MyPartition', '1') + self._assert_updated_entity(received_entity) + + def test_update_entity_with_if_doesnt_match(self): + # Arrange + entities = self._create_table_with_default_entities(self.table_name, 1) + + # Act + sent_entity = self._create_updated_entity_dict('MyPartition','1') + with self.assertRaises(WindowsAzureError): + self.tc.update_entity(self.table_name, 'MyPartition', '1', sent_entity, if_match=u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"') + + # Assert + + def test_insert_or_merge_entity_with_existing_entity(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 1) + + # Act + sent_entity = self._create_updated_entity_dict('MyPartition','1') + resp = self.tc.insert_or_merge_entity(self.table_name, 'MyPartition', '1', sent_entity) + + # Assert + self.assertIsNotNone(resp) + received_entity = self.tc.get_entity(self.table_name, 'MyPartition', '1') + self._assert_merged_entity(received_entity) + + def test_insert_or_merge_entity_with_non_existing_entity(self): + # Arrange + self._create_table(self.table_name) + + # Act + sent_entity = self._create_updated_entity_dict('MyPartition','1') + resp = self.tc.insert_or_merge_entity(self.table_name, 'MyPartition', '1', sent_entity) + + # Assert + self.assertIsNotNone(resp) + received_entity = self.tc.get_entity(self.table_name, 'MyPartition', '1') + self._assert_updated_entity(received_entity) + + def test_insert_or_replace_entity_with_existing_entity(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 1) + + # Act + sent_entity = self._create_updated_entity_dict('MyPartition','1') + resp = self.tc.insert_or_replace_entity(self.table_name, 'MyPartition', '1', sent_entity) + + # Assert + self.assertIsNotNone(resp) + received_entity = self.tc.get_entity(self.table_name, 'MyPartition', '1') + self._assert_updated_entity(received_entity) + + def test_insert_or_replace_entity_with_non_existing_entity(self): + # Arrange + self._create_table(self.table_name) + + # Act + sent_entity = self._create_updated_entity_dict('MyPartition','1') + resp = self.tc.insert_or_replace_entity(self.table_name, 'MyPartition', '1', sent_entity) + + # Assert + self.assertIsNotNone(resp) + received_entity = self.tc.get_entity(self.table_name, 'MyPartition', '1') + self._assert_updated_entity(received_entity) + + def test_merge_entity(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 1) + + # Act + sent_entity = self._create_updated_entity_dict('MyPartition','1') + resp = self.tc.merge_entity(self.table_name, 'MyPartition', '1', sent_entity) + + # Assert + self.assertIsNotNone(resp) + received_entity = self.tc.get_entity(self.table_name, 'MyPartition', '1') + self._assert_merged_entity(received_entity) + + def test_merge_entity_not_existing(self): + # Arrange + self._create_table(self.table_name) + + # Act + sent_entity = self._create_updated_entity_dict('MyPartition','1') + with self.assertRaises(WindowsAzureError): + self.tc.merge_entity(self.table_name, 'MyPartition', '1', sent_entity) + + # Assert + + def test_merge_entity_with_if_matches(self): + # Arrange + entities = self._create_table_with_default_entities(self.table_name, 1) + + # Act + sent_entity = self._create_updated_entity_dict('MyPartition','1') + resp = self.tc.merge_entity(self.table_name, 'MyPartition', '1', sent_entity, if_match=entities[0].etag) + + # Assert + self.assertIsNotNone(resp) + received_entity = self.tc.get_entity(self.table_name, 'MyPartition', '1') + self._assert_merged_entity(received_entity) + + def test_merge_entity_with_if_doesnt_match(self): + # Arrange + entities = self._create_table_with_default_entities(self.table_name, 1) + + # Act + sent_entity = self._create_updated_entity_dict('MyPartition','1') + with self.assertRaises(WindowsAzureError): + self.tc.merge_entity(self.table_name, 'MyPartition', '1', sent_entity, if_match=u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"') + + # Assert + + def test_delete_entity(self): + # Arrange + self._create_table_with_default_entities(self.table_name, 1) + + # Act + resp = self.tc.delete_entity(self.table_name, 'MyPartition', '1') + + # Assert + self.assertIsNone(resp) + with self.assertRaises(WindowsAzureError): + self.tc.get_entity(self.table_name, 'MyPartition', '1') + + def test_delete_entity_not_existing(self): + # Arrange + self._create_table(self.table_name) + + # Act + with self.assertRaises(WindowsAzureError): + self.tc.delete_entity(self.table_name, 'MyPartition', '1') + + # Assert + + def test_delete_entity_with_if_matches(self): + # Arrange + entities = self._create_table_with_default_entities(self.table_name, 1) - def sanity_commit_batch(self): - resp = self.tc.commit_batch() - self.assertEquals(resp, None) + # Act + resp = self.tc.delete_entity(self.table_name, 'MyPartition', '1', if_match=entities[0].etag) - def sanity_cancel_batch(self): - resp = self.tc.cancel_batch() - self.assertEquals(resp, None) + # Assert + self.assertIsNone(resp) + with self.assertRaises(WindowsAzureError): + self.tc.get_entity(self.table_name, 'MyPartition', '1') - def test_query_tables_top(self): - table_id = getUniqueTestRunID() - for i in xrange(20): - self.tc.create_table(table_id + str(i)) + def test_delete_entity_with_if_doesnt_match(self): + # Arrange + entities = self._create_table_with_default_entities(self.table_name, 1) - res = self.tc.query_tables(top = 5) - self.assertEqual(len(res), 5) + # Act + with self.assertRaises(WindowsAzureError): + self.tc.delete_entity(self.table_name, 'MyPartition', '1', if_match=u'W/"datetime\'2012-06-15T22%3A51%3A44.9662825Z\'"') - def test_with_filter(self): - # Single filter + # Assert + + #--Test cases for batch --------------------------------------------- + def test_with_filter_single(self): called = [] + def my_filter(request, next): called.append(True) return next(request) tc = self.tc.with_filter(my_filter) - tc.create_table(FILTER_TABLE) + tc.create_table(self.table_name) self.assertTrue(called) del called[:] - tc.delete_table(FILTER_TABLE) + tc.delete_table(self.table_name) self.assertTrue(called) del called[:] - # Chained filters + def test_with_filter_chained(self): + called = [] + def filter_a(request, next): called.append('a') return next(request) @@ -413,14 +760,17 @@ def filter_b(request, next): return next(request) tc = self.tc.with_filter(filter_a).with_filter(filter_b) - tc.create_table(FILTER_TABLE + '0') + tc.create_table(self.table_name) self.assertEqual(called, ['b', 'a']) - tc.delete_table(FILTER_TABLE + '0') + tc.delete_table(self.table_name) def test_batch_insert(self): - #Act + # Arrange + self._create_table(self.table_name) + + # Act entity = Entity() entity.PartitionKey = '001' entity.RowKey = 'batch_insert' @@ -431,15 +781,18 @@ def test_batch_insert(self): entity.test5 = datetime.utcnow() self.tc.begin_batch() - self.tc.insert_entity(self.test_table, entity) + self.tc.insert_entity(self.table_name, entity) self.tc.commit_batch() - #Assert - result = self.tc.get_entity(self.test_table, '001', 'batch_insert') + # Assert + result = self.tc.get_entity(self.table_name, '001', 'batch_insert') self.assertIsNotNone(result) def test_batch_update(self): - #Act + # Arrange + self._create_table(self.table_name) + + # Act entity = Entity() entity.PartitionKey = '001' entity.RowKey = 'batch_update' @@ -448,21 +801,24 @@ def test_batch_update(self): entity.test3 = 3 entity.test4 = EntityProperty('Edm.Int64', '1234567890') entity.test5 = datetime.utcnow() - self.tc.insert_entity(self.test_table, entity) + self.tc.insert_entity(self.table_name, entity) - entity = self.tc.get_entity(self.test_table, '001', 'batch_update') + entity = self.tc.get_entity(self.table_name, '001', 'batch_update') self.assertEqual(3, entity.test3) entity.test2 = 'value1' self.tc.begin_batch() - self.tc.update_entity(self.test_table, '001', 'batch_update', entity) + self.tc.update_entity(self.table_name, '001', 'batch_update', entity) self.tc.commit_batch() - entity = self.tc.get_entity(self.test_table, '001', 'batch_update') + entity = self.tc.get_entity(self.table_name, '001', 'batch_update') - #Assert + # Assert self.assertEqual('value1', entity.test2) def test_batch_merge(self): - #Act + # Arrange + self._create_table(self.table_name) + + # Act entity = Entity() entity.PartitionKey = '001' entity.RowKey = 'batch_merge' @@ -471,25 +827,28 @@ def test_batch_merge(self): entity.test3 = 3 entity.test4 = EntityProperty('Edm.Int64', '1234567890') entity.test5 = datetime.utcnow() - self.tc.insert_entity(self.test_table, entity) + self.tc.insert_entity(self.table_name, entity) - entity = self.tc.get_entity(self.test_table, '001', 'batch_merge') + entity = self.tc.get_entity(self.table_name, '001', 'batch_merge') self.assertEqual(3, entity.test3) entity = Entity() entity.PartitionKey = '001' entity.RowKey = 'batch_merge' entity.test2 = 'value1' self.tc.begin_batch() - self.tc.merge_entity(self.test_table, '001', 'batch_merge', entity) + self.tc.merge_entity(self.table_name, '001', 'batch_merge', entity) self.tc.commit_batch() - entity = self.tc.get_entity(self.test_table, '001', 'batch_merge') + entity = self.tc.get_entity(self.table_name, '001', 'batch_merge') - #Assert + # Assert self.assertEqual('value1', entity.test2) self.assertEqual(1234567890, entity.test4) def test_batch_insert_replace(self): - #Act + # Arrange + self._create_table(self.table_name) + + # Act entity = Entity() entity.PartitionKey = '001' entity.RowKey = 'batch_insert_replace' @@ -499,18 +858,21 @@ def test_batch_insert_replace(self): entity.test4 = EntityProperty('Edm.Int64', '1234567890') entity.test5 = datetime.utcnow() self.tc.begin_batch() - self.tc.insert_or_replace_entity(self.test_table, entity.PartitionKey, entity.RowKey, entity) + self.tc.insert_or_replace_entity(self.table_name, entity.PartitionKey, entity.RowKey, entity) self.tc.commit_batch() - entity = self.tc.get_entity(self.test_table, '001', 'batch_insert_replace') + entity = self.tc.get_entity(self.table_name, '001', 'batch_insert_replace') - #Assert + # Assert self.assertIsNotNone(entity) self.assertEqual('value', entity.test2) self.assertEqual(1234567890, entity.test4) def test_batch_insert_merge(self): - #Act + # Arrange + self._create_table(self.table_name) + + # Act entity = Entity() entity.PartitionKey = '001' entity.RowKey = 'batch_insert_merge' @@ -520,18 +882,21 @@ def test_batch_insert_merge(self): entity.test4 = EntityProperty('Edm.Int64', '1234567890') entity.test5 = datetime.utcnow() self.tc.begin_batch() - self.tc.insert_or_merge_entity(self.test_table, entity.PartitionKey, entity.RowKey, entity) + self.tc.insert_or_merge_entity(self.table_name, entity.PartitionKey, entity.RowKey, entity) self.tc.commit_batch() - entity = self.tc.get_entity(self.test_table, '001', 'batch_insert_merge') + entity = self.tc.get_entity(self.table_name, '001', 'batch_insert_merge') - #Assert + # Assert self.assertIsNotNone(entity) self.assertEqual('value', entity.test2) self.assertEqual(1234567890, entity.test4) def test_batch_delete(self): - #Act + # Arrange + self._create_table(self.table_name) + + # Act entity = Entity() entity.PartitionKey = '001' entity.RowKey = 'batch_delete' @@ -540,16 +905,19 @@ def test_batch_delete(self): entity.test3 = 3 entity.test4 = EntityProperty('Edm.Int64', '1234567890') entity.test5 = datetime.utcnow() - self.tc.insert_entity(self.test_table, entity) + self.tc.insert_entity(self.table_name, entity) - entity = self.tc.get_entity(self.test_table, '001', 'batch_delete') + entity = self.tc.get_entity(self.table_name, '001', 'batch_delete') #self.assertEqual(3, entity.test3) self.tc.begin_batch() - self.tc.delete_entity(self.test_table, '001', 'batch_delete') + self.tc.delete_entity(self.table_name, '001', 'batch_delete') self.tc.commit_batch() def test_batch_inserts(self): - #Act + # Arrange + self._create_table(self.table_name) + + # Act entity = Entity() entity.PartitionKey = 'batch_inserts' entity.test = EntityProperty('Edm.Boolean', 'true') @@ -560,17 +928,20 @@ def test_batch_inserts(self): self.tc.begin_batch() for i in range(100): entity.RowKey = str(i) - self.tc.insert_entity(self.test_table, entity) + self.tc.insert_entity(self.table_name, entity) self.tc.commit_batch() - entities = self.tc.query_entities(self.test_table, "PartitionKey eq 'batch_inserts'", '') + entities = self.tc.query_entities(self.table_name, "PartitionKey eq 'batch_inserts'", '') - #Assert + # Assert self.assertIsNotNone(entities); self.assertEqual(100, len(entities)) def test_batch_all_operations_together(self): - #Act + # Arrange + self._create_table(self.table_name) + + # Act entity = Entity() entity.PartitionKey = '003' entity.RowKey = 'batch_all_operations_together-1' @@ -579,79 +950,96 @@ def test_batch_all_operations_together(self): entity.test3 = 3 entity.test4 = EntityProperty('Edm.Int64', '1234567890') entity.test5 = datetime.utcnow() - self.tc.insert_entity(self.test_table, entity) + self.tc.insert_entity(self.table_name, entity) entity.RowKey = 'batch_all_operations_together-2' - self.tc.insert_entity(self.test_table, entity) + self.tc.insert_entity(self.table_name, entity) entity.RowKey = 'batch_all_operations_together-3' - self.tc.insert_entity(self.test_table, entity) + self.tc.insert_entity(self.table_name, entity) entity.RowKey = 'batch_all_operations_together-4' - self.tc.insert_entity(self.test_table, entity) + self.tc.insert_entity(self.table_name, entity) self.tc.begin_batch() entity.RowKey = 'batch_all_operations_together' - self.tc.insert_entity(self.test_table, entity) + self.tc.insert_entity(self.table_name, entity) entity.RowKey = 'batch_all_operations_together-1' - self.tc.delete_entity(self.test_table, entity.PartitionKey, entity.RowKey) + self.tc.delete_entity(self.table_name, entity.PartitionKey, entity.RowKey) entity.RowKey = 'batch_all_operations_together-2' entity.test3 = 10 - self.tc.update_entity(self.test_table, entity.PartitionKey, entity.RowKey, entity) + self.tc.update_entity(self.table_name, entity.PartitionKey, entity.RowKey, entity) entity.RowKey = 'batch_all_operations_together-3' entity.test3 = 100 - self.tc.merge_entity(self.test_table, entity.PartitionKey, entity.RowKey, entity) + self.tc.merge_entity(self.table_name, entity.PartitionKey, entity.RowKey, entity) entity.RowKey = 'batch_all_operations_together-4' entity.test3 = 10 - self.tc.insert_or_replace_entity(self.test_table, entity.PartitionKey, entity.RowKey, entity) + self.tc.insert_or_replace_entity(self.table_name, entity.PartitionKey, entity.RowKey, entity) entity.RowKey = 'batch_all_operations_together-5' - self.tc.insert_or_merge_entity(self.test_table, entity.PartitionKey, entity.RowKey, entity) + self.tc.insert_or_merge_entity(self.table_name, entity.PartitionKey, entity.RowKey, entity) self.tc.commit_batch() - #Assert - entities = self.tc.query_entities(self.test_table, "PartitionKey eq '003'", '') + # Assert + entities = self.tc.query_entities(self.table_name, "PartitionKey eq '003'", '') self.assertEqual(5, len(entities)) - def test_batch_negative(self): - #Act - entity = Entity() - entity.PartitionKey = '001' - entity.RowKey = 'batch_negative_1' - entity.test = 1 - - self.tc.insert_entity(self.test_table, entity) - entity.test = 2 - entity.RowKey = 'batch_negative_2' - self.tc.insert_entity(self.test_table, entity) - entity.test = 3 - entity.RowKey = 'batch_negative_3' - self.tc.insert_entity(self.test_table, entity) - entity.test = -2 - self.tc.update_entity(self.test_table, entity.PartitionKey, entity.RowKey, entity) - - try: + def test_batch_same_row_operations_fail(self): + # Arrange + self._create_table(self.table_name) + entity = self._create_default_entity_dict('001', 'batch_negative_1') + self.tc.insert_entity(self.table_name, entity) + + # Act + with self.assertRaises(WindowsAzureError): self.tc.begin_batch() - entity.RowKey = 'batch_negative_1' - self.tc.update_entity(self.test_table, entity.PartitionKey, entity.RowKey, entity) - self.tc.merge_entity(self.test_table, entity.PartitionKey, entity.RowKey, entity) - self.fail('Should raise WindowsAzueError exception') - self.tc.commit_batch() - except: - self.tc.cancel_batch() - pass - - try: + entity = self._create_updated_entity_dict('001', 'batch_negative_1') + self.tc.update_entity(self.table_name, entity['PartitionKey'], entity['RowKey'], entity) + + entity = self._create_default_entity_dict('001', 'batch_negative_1') + self.tc.merge_entity(self.table_name, entity['PartitionKey'], entity['RowKey'], entity) + + self.tc.cancel_batch() + + # Assert + + def test_batch_different_partition_operations_fail(self): + # Arrange + self._create_table(self.table_name) + entity = self._create_default_entity_dict('001', 'batch_negative_1') + self.tc.insert_entity(self.table_name, entity) + + # Act + with self.assertRaises(WindowsAzureError): self.tc.begin_batch() - entity.PartitionKey = '001' - entity.RowKey = 'batch_negative_1' - self.tc.update_entity(self.test_table, entity.PartitionKey, entity.RowKey, entity) - entity.PartitionKey = '002' - entity.RowKey = 'batch_negative_1' - self.tc.insert_entity(self.test_table, entity) - self.fail('Should raise WindowsAzueError exception') - self.tc.commit_batch() - except: - self.tc.cancel_batch() - pass - + + entity = self._create_updated_entity_dict('001', 'batch_negative_1') + self.tc.update_entity(self.table_name, entity['PartitionKey'], entity['RowKey'], entity) + + entity = self._create_default_entity_dict('002', 'batch_negative_1') + self.tc.insert_entity(self.table_name, entity) + + self.tc.cancel_batch() + + # Assert + + def test_batch_different_table_operations_fail(self): + # Arrange + other_table_name = self.table_name + 'other' + self.additional_table_names = [other_table_name] + self._create_table(self.table_name) + self._create_table(other_table_name) + + # Act + with self.assertRaises(WindowsAzureError): + self.tc.begin_batch() + + entity = self._create_default_entity_dict('001', 'batch_negative_1') + self.tc.insert_entity(self.table_name, entity) + + entity = self._create_default_entity_dict('001', 'batch_negative_2') + self.tc.insert_entity(other_table_name, entity) + + self.tc.cancel_batch() + + # Assert #------------------------------------------------------------------------------ if __name__ == '__main__': diff --git a/test/azuretest/util.py b/test/azuretest/util.py index 5a803bd297cc..6ea3461d7c0d 100644 --- a/test/azuretest/util.py +++ b/test/azuretest/util.py @@ -14,6 +14,7 @@ import json import os import time +import unittest from exceptions import EnvironmentError STATUS_OK = 200 @@ -60,9 +61,6 @@ def getStorageServicesKey(self): def getStorageServicesName(self): return self.ns[u'storageservicesname'] - def getHostServiceID(self): - return self.ns[u'hostserviceid'] - credentials = Credentials() def getUniqueTestRunID(): @@ -83,7 +81,8 @@ def getUniqueTestRunID(): for bad in ["-", "_", " ", "."]: ret_val = ret_val.replace(bad, "") ret_val = ret_val.lower().strip() - return ret_val + #only return the first 20 characters so the lenghth of queue, table name will be less than 64. It may not be unique but doesn't really matter for the tests. + return ret_val[:20] def getUniqueNameBasedOnCurrentTime(base_name): ''' @@ -96,3 +95,18 @@ def getUniqueNameBasedOnCurrentTime(base_name): cur_time = cur_time.replace(bad, "") cur_time = cur_time.lower().strip() return base_name + cur_time + +class AzureTestCase(unittest.TestCase): + def assertNamedItemInContainer(self, container, item_name, msg=None): + for item in container: + if item.name == item_name: + return + + standardMsg = '%s not found in %s' % (repr(item_name), repr(container)) + self.fail(self._formatMessage(msg, standardMsg)) + + def assertNamedItemNotInContainer(self, container, item_name, msg=None): + for item in container: + if item.name == item_name: + standardMsg = '%s unexpectedly found in %s' % (repr(item_name), repr(container)) + self.fail(self._formatMessage(msg, standardMsg))