Skip to content

Commit b191e1b

Browse files
authored
Merge branch 'master' into restapi_auto_compute/resource-manager
2 parents 2172cf8 + c034a52 commit b191e1b

File tree

692 files changed

+1814
-6217
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

692 files changed

+1814
-6217
lines changed

.gitignore

+3-1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
__pycache__/
33
*.pyc
44
.pytest_cache
5+
.mypy_cache
6+
.cache
57

68
# Virtual environment
79
env*/
@@ -26,7 +28,7 @@ build/
2628
# Test results
2729
TestResults/
2830

29-
# Credentials
31+
# Credentials
3032
credentials_real.json
3133
testsettings_local.json
3234
testsettings_local.cfg

azure-applicationinsights/MANIFEST.in

-1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1 @@
11
include *.rst
2-
include azure_bdist_wheel.py

azure-applicationinsights/README.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ This is the Microsoft Azure Application Insights Client Library.
66
Azure Resource Manager (ARM) is the next generation of management APIs that
77
replace the old Azure Service Management (ASM).
88

9-
This package has been tested with Python 2.7, 3.4, 3.5 and 3.6.
9+
This package has been tested with Python 2.7, 3.4, 3.5, 3.6 and 3.7.
1010

1111
For the older Azure Service Management (ASM) libraries, see
1212
`azure-servicemanagement-legacy <https://pypi.python.org/pypi/azure-servicemanagement-legacy>`__ library.
+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__import__('pkg_resources').declare_namespace(__name__)
1+
__path__ = __import__('pkgutil').extend_path(__path__, __name__)

azure-applicationinsights/azure_bdist_wheel.py

-54
This file was deleted.

azure-applicationinsights/setup.cfg

-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,2 @@
11
[bdist_wheel]
22
universal=1
3-
azure-namespace-package=azure-nspkg

azure-applicationinsights/setup.py

+9-9
Original file line numberDiff line numberDiff line change
@@ -10,12 +10,6 @@
1010
import os.path
1111
from io import open
1212
from setuptools import find_packages, setup
13-
try:
14-
from azure_bdist_wheel import cmdclass
15-
except ImportError:
16-
from distutils import log as logger
17-
logger.warn("Wheel is not available, disabling bdist_wheel hook")
18-
cmdclass = {}
1913

2014
# Change the PACKAGE_NAME only to change folder and different name
2115
PACKAGE_NAME = "azure-applicationinsights"
@@ -76,10 +70,16 @@
7670
'License :: OSI Approved :: MIT License',
7771
],
7872
zip_safe=False,
79-
packages=find_packages(exclude=["tests"]),
73+
packages=find_packages(exclude=[
74+
'tests',
75+
# Exclude packages that will be covered by PEP420 or nspkg
76+
'azure',
77+
]),
8078
install_requires=[
81-
'msrest>=0.5.4,<2.0.0',
79+
'msrest>=0.5.0',
8280
'azure-common~=1.1',
8381
],
84-
cmdclass=cmdclass
82+
extras_require={
83+
":python_version<'3.0'": ['azure-nspkg'],
84+
}
8585
)

azure-batch/azure/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__import__('pkg_resources').declare_namespace(__name__)
1+
__path__ = __import__('pkgutil').extend_path(__path__, __name__)

azure-batch/azure/batch/batch_service_client.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -113,5 +113,6 @@ def __init__(
113113
self._client, self.config, self._serialize, self._deserialize)
114114
self.compute_node = ComputeNodeOperations(
115115
self._client, self.config, self._serialize, self._deserialize)
116-
117-
patch_client(self)
116+
117+
118+
patch_client()

azure-batch/azure/batch/custom/patch.py

+102-97
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ class _TaskWorkflowManager(object):
3333
def __init__(
3434
self,
3535
client,
36+
original_add_collection,
3637
job_id,
3738
tasks_to_add,
3839
task_add_collection_options=None,
@@ -55,8 +56,8 @@ def __init__(
5556
self._pending_queue_lock = threading.Lock()
5657

5758
# Variables to be used for task add_collection requests
58-
self._client = TaskOperations(
59-
client._client, client.config, client._serialize, client._deserialize)
59+
self._client = client
60+
self._original_add_collection = original_add_collection
6061
self._job_id = job_id
6162
self._task_add_collection_options = task_add_collection_options
6263
self._custom_headers = custom_headers
@@ -76,7 +77,8 @@ def _bulk_add_tasks(self, results_queue, chunk_tasks_to_add):
7677
"""
7778

7879
try:
79-
add_collection_response = self._client.add_collection(
80+
add_collection_response = self._original_add_collection(
81+
self._client,
8082
self._job_id,
8183
chunk_tasks_to_add,
8284
self._task_add_collection_options,
@@ -193,104 +195,107 @@ def _handle_output(results_queue):
193195
results.append(queue_item)
194196
return results
195197

196-
def patch_client(client):
198+
199+
def build_new_add_collection(original_add_collection):
200+
def bulk_add_collection(
201+
self,
202+
job_id,
203+
value,
204+
task_add_collection_options=None,
205+
custom_headers=None,
206+
raw=False,
207+
threads=0,
208+
**operation_config):
209+
"""Adds a collection of tasks to the specified job.
210+
211+
Note that each task must have a unique ID. The Batch service may not
212+
return the results for each task in the same order the tasks were
213+
submitted in this request. If the server times out or the connection is
214+
closed during the request, the request may have been partially or fully
215+
processed, or not at all. In such cases, the user should re-issue the
216+
request. Note that it is up to the user to correctly handle failures
217+
when re-issuing a request. For example, you should use the same task
218+
IDs during a retry so that if the prior operation succeeded, the retry
219+
will not create extra tasks unexpectedly. If the response contains any
220+
tasks which failed to add, a client can retry the request. In a retry,
221+
it is most efficient to resubmit only tasks that failed to add, and to
222+
omit tasks that were successfully added on the first attempt.
223+
224+
:param job_id: The ID of the job to which the task collection is to be
225+
added.
226+
:type job_id: str
227+
:param value: The collection of tasks to add. The total serialized
228+
size of this collection must be less than 4MB. If it is greater than
229+
4MB (for example if each task has 100's of resource files or
230+
environment variables), the request will fail with code
231+
'RequestBodyTooLarge' and should be retried again with fewer tasks.
232+
:type value: list of :class:`TaskAddParameter
233+
<azure.batch.models.TaskAddParameter>`
234+
:param task_add_collection_options: Additional parameters for the
235+
operation
236+
:type task_add_collection_options: :class:`TaskAddCollectionOptions
237+
<azure.batch.models.TaskAddCollectionOptions>`
238+
:param dict custom_headers: headers that will be added to the request
239+
:param bool raw: returns the direct response alongside the
240+
deserialized response
241+
:param int threads: number of threads to use in parallel when adding tasks. If specified
242+
and greater than 0, will start additional threads to submit requests and wait for them to finish.
243+
Otherwise will submit add_collection requests sequentially on main thread
244+
:return: :class:`TaskAddCollectionResult
245+
<azure.batch.models.TaskAddCollectionResult>` or
246+
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
247+
raw=true
248+
:rtype: :class:`TaskAddCollectionResult
249+
<azure.batch.models.TaskAddCollectionResult>` or
250+
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
251+
:raises:
252+
:class:`CreateTasksErrorException<azure.batch.custom.CreateTasksErrorException>`
253+
"""
254+
255+
results_queue = collections.deque() # deque operations(append/pop) are thread-safe
256+
task_workflow_manager = _TaskWorkflowManager(
257+
self,
258+
original_add_collection,
259+
job_id,
260+
value,
261+
task_add_collection_options,
262+
custom_headers,
263+
raw,
264+
**operation_config)
265+
266+
# multi-threaded behavior
267+
if threads:
268+
if threads < 0:
269+
raise ValueError("Threads must be positive or 0")
270+
271+
active_threads = []
272+
for i in range(threads):
273+
active_threads.append(threading.Thread(
274+
target=task_workflow_manager.task_collection_thread_handler,
275+
args=(results_queue,)))
276+
active_threads[-1].start()
277+
for thread in active_threads:
278+
thread.join()
279+
# single-threaded behavior
280+
else:
281+
task_workflow_manager.task_collection_thread_handler(results_queue)
282+
283+
if task_workflow_manager.error:
284+
raise task_workflow_manager.error # pylint: disable=raising-bad-type
285+
else:
286+
submitted_tasks = _handle_output(results_queue)
287+
return TaskAddCollectionResult(value=submitted_tasks)
288+
bulk_add_collection.metadata = {'url': '/jobs/{jobId}/addtaskcollection'}
289+
return bulk_add_collection
290+
291+
292+
def patch_client():
197293
try:
198294
models = sys.modules['azure.batch.models']
199295
except KeyError:
200296
models = importlib.import_module('azure.batch.models')
201297
setattr(models, 'CreateTasksErrorException', CreateTasksErrorException)
202298
sys.modules['azure.batch.models'] = models
203-
client.task.add_collection = types.MethodType(bulk_add_collection, client.task)
204-
205-
def bulk_add_collection(
206-
client,
207-
job_id,
208-
value,
209-
task_add_collection_options=None,
210-
custom_headers=None,
211-
raw=False,
212-
threads=0,
213-
**operation_config):
214-
"""Adds a collection of tasks to the specified job.
215-
216-
Note that each task must have a unique ID. The Batch service may not
217-
return the results for each task in the same order the tasks were
218-
submitted in this request. If the server times out or the connection is
219-
closed during the request, the request may have been partially or fully
220-
processed, or not at all. In such cases, the user should re-issue the
221-
request. Note that it is up to the user to correctly handle failures
222-
when re-issuing a request. For example, you should use the same task
223-
IDs during a retry so that if the prior operation succeeded, the retry
224-
will not create extra tasks unexpectedly. If the response contains any
225-
tasks which failed to add, a client can retry the request. In a retry,
226-
it is most efficient to resubmit only tasks that failed to add, and to
227-
omit tasks that were successfully added on the first attempt. The
228-
maximum lifetime of a task from addition to completion is 7 days. If a
229-
task has not completed within 7 days of being added it will be
230-
terminated by the Batch service and left in whatever state it was in at
231-
that time.
232-
233-
:param job_id: The ID of the job to which the task collection is to be
234-
added.
235-
:type job_id: str
236-
:param value: The collection of tasks to add. The total serialized
237-
size of this collection must be less than 4MB. If it is greater than
238-
4MB (for example if each task has 100's of resource files or
239-
environment variables), the request will fail with code
240-
'RequestBodyTooLarge' and should be retried again with fewer tasks.
241-
:type value: list of :class:`TaskAddParameter
242-
<azure.batch.models.TaskAddParameter>`
243-
:param task_add_collection_options: Additional parameters for the
244-
operation
245-
:type task_add_collection_options: :class:`TaskAddCollectionOptions
246-
<azure.batch.models.TaskAddCollectionOptions>`
247-
:param dict custom_headers: headers that will be added to the request
248-
:param bool raw: returns the direct response alongside the
249-
deserialized response
250-
:param int threads: number of threads to use in parallel when adding tasks. If specified
251-
and greater than 0, will start additional threads to submit requests and wait for them to finish.
252-
Otherwise will submit add_collection requests sequentially on main thread
253-
:return: :class:`TaskAddCollectionResult
254-
<azure.batch.models.TaskAddCollectionResult>` or
255-
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
256-
raw=true
257-
:rtype: :class:`TaskAddCollectionResult
258-
<azure.batch.models.TaskAddCollectionResult>` or
259-
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
260-
:raises:
261-
:class:`BatchErrorException<azure.batch.models.BatchErrorException>`
262-
"""
263299

264-
results_queue = collections.deque() # deque operations(append/pop) are thread-safe
265-
task_workflow_manager = _TaskWorkflowManager(
266-
client,
267-
job_id,
268-
value,
269-
task_add_collection_options,
270-
custom_headers,
271-
raw,
272-
**operation_config)
273-
274-
# multi-threaded behavior
275-
if threads:
276-
if threads < 0:
277-
raise ValueError("Threads must be positive or 0")
278-
279-
active_threads = []
280-
for i in range(threads):
281-
active_threads.append(threading.Thread(
282-
target=task_workflow_manager.task_collection_thread_handler,
283-
args=(results_queue,)))
284-
active_threads[-1].start()
285-
for thread in active_threads:
286-
thread.join()
287-
# single-threaded behavior
288-
else:
289-
task_workflow_manager.task_collection_thread_handler(results_queue)
290-
291-
if task_workflow_manager.error:
292-
raise task_workflow_manager.error # pylint: disable=raising-bad-type
293-
else:
294-
submitted_tasks = _handle_output(results_queue)
295-
return TaskAddCollectionResult(value=submitted_tasks)
296-
bulk_add_collection.metadata = {'url': '/jobs/{jobId}/addtaskcollection'}
300+
operations_modules = importlib.import_module('azure.batch.operations')
301+
operations_modules.TaskOperations.add_collection = build_new_add_collection(operations_modules.TaskOperations.add_collection)

azure-batch/sdk_packaging.toml

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
[packaging]
2+
auto_update = false
Original file line numberDiff line numberDiff line change
@@ -1,2 +1 @@
11
include *.rst
2-
include azure_bdist_wheel.py
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__import__('pkg_resources').declare_namespace(__name__)
1+
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__import__('pkg_resources').declare_namespace(__name__)
1+
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__import__('pkg_resources').declare_namespace(__name__)
1+
__path__ = __import__('pkgutil').extend_path(__path__, __name__)

0 commit comments

Comments
 (0)