Skip to content

Commit dd38b6f

Browse files
dargilcosofiar-msft
authored andcommitted
Minor Image Analysis SDK update for beta.2 release (Azure#34213)
Per feedback from arch board review, use a dedicated "analyze_from_url" instead of overloading "analyze" to support both analysis from image URL and image buffer. This change is done in the customized code (_patch.py files), not in the auto-generated code. This is documented as a breaking change. The SDK was re-emitted with latest tools, so there are other changes, but they do not affect the functionality and public API surface.
1 parent e774617 commit dd38b6f

File tree

12 files changed

+176
-158
lines changed

12 files changed

+176
-158
lines changed

sdk/vision/azure-ai-vision-imageanalysis/CHANGELOG.md

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,10 @@
11
# Release History
22

3-
## 1.0.0b2 (Unreleased)
4-
5-
### Features Added
3+
## 1.0.0b2 (2024-02-09)
64

75
### Breaking Changes
86

9-
### Bugs Fixed
10-
11-
### Other Changes
7+
- In the previous version, you would call the `analyze` method on the `ImageAnalysisClient` to analyze an image from a publicly accessible URL, or from a memory buffer. To better align with other Azure client libraires, this was changed in this release. Call the new dedicated `analyze_from_url` method to analyze an image from URL. Keep calling the `analyze` method to analyze an image from a memory buffer.
128

139
## 1.0.0b1 (2024-01-09)
1410

sdk/vision/azure-ai-vision-imageanalysis/README.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ This example is similar to the above, expect it calls the `analyze` method and p
168168

169169
```python
170170
# Get a caption for the image. This will be a synchronously (blocking) call.
171-
result = client.analyze(
171+
result = client.analyze_from_url(
172172
image_url="https://aka.ms/azsdk/image-analysis/sample.jpg",
173173
visual_features=[VisualFeatures.CAPTION],
174174
gender_neutral_caption=True, # Optional (default is False)
@@ -224,7 +224,7 @@ This example is similar to the above, expect it calls the `analyze` method and p
224224

225225
```python
226226
# Extract text (OCR) from an image stream. This will be a synchronously (blocking) call.
227-
result = client.analyze(
227+
result = client.analyze_from_url(
228228
image_url="https://aka.ms/azsdk/image-analysis/sample.jpg",
229229
visual_features=[VisualFeatures.READ]
230230
)

sdk/vision/azure-ai-vision-imageanalysis/azure/ai/vision/imageanalysis/_model_base.py

+28-5
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
import re
1717
import copy
1818
import typing
19-
import email
19+
import email.utils
2020
from datetime import datetime, date, time, timedelta, timezone
2121
from json import JSONEncoder
2222
import isodate
@@ -462,7 +462,13 @@ def _get_rest_field(
462462

463463

464464
def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any:
465-
return _deserialize(rf._type, value) if (rf and rf._is_model) else _serialize(value, rf._format if rf else None)
465+
if not rf:
466+
return _serialize(value, None)
467+
if rf._is_multipart_file_input:
468+
return value
469+
if rf._is_model:
470+
return _deserialize(rf._type, value)
471+
return _serialize(value, rf._format)
466472

467473

468474
class Model(_MyMutableMapping):
@@ -559,15 +565,22 @@ def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.
559565
for k, v in self.items():
560566
if exclude_readonly and k in readonly_props: # pyright: ignore[reportUnboundVariable]
561567
continue
562-
result[k] = Model._as_dict_value(v, exclude_readonly=exclude_readonly)
568+
is_multipart_file_input = False
569+
try:
570+
is_multipart_file_input = next(
571+
rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k
572+
)._is_multipart_file_input
573+
except StopIteration:
574+
pass
575+
result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly)
563576
return result
564577

565578
@staticmethod
566579
def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any:
567580
if v is None or isinstance(v, _Null):
568581
return None
569582
if isinstance(v, (list, tuple, set)):
570-
return [Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v]
583+
return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v)
571584
if isinstance(v, dict):
572585
return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()}
573586
return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v
@@ -762,6 +775,7 @@ def __init__(
762775
visibility: typing.Optional[typing.List[str]] = None,
763776
default: typing.Any = _UNSET,
764777
format: typing.Optional[str] = None,
778+
is_multipart_file_input: bool = False,
765779
):
766780
self._type = type
767781
self._rest_name_input = name
@@ -771,6 +785,7 @@ def __init__(
771785
self._is_model = False
772786
self._default = default
773787
self._format = format
788+
self._is_multipart_file_input = is_multipart_file_input
774789

775790
@property
776791
def _rest_name(self) -> str:
@@ -816,8 +831,16 @@ def rest_field(
816831
visibility: typing.Optional[typing.List[str]] = None,
817832
default: typing.Any = _UNSET,
818833
format: typing.Optional[str] = None,
834+
is_multipart_file_input: bool = False,
819835
) -> typing.Any:
820-
return _RestField(name=name, type=type, visibility=visibility, default=default, format=format)
836+
return _RestField(
837+
name=name,
838+
type=type,
839+
visibility=visibility,
840+
default=default,
841+
format=format,
842+
is_multipart_file_input=is_multipart_file_input,
843+
)
821844

822845

823846
def rest_discriminator(

sdk/vision/azure-ai-vision-imageanalysis/azure/ai/vision/imageanalysis/_operations/_operations.py

-5
Original file line numberDiff line numberDiff line change
@@ -170,9 +170,6 @@ def _analyze_from_image_data(
170170
If however you would like to make sure analysis results do not change over time, set this
171171
value to a specific model version. Default value is None.
172172
:paramtype model_version: str
173-
:keyword content_type: The format of the HTTP payload. Default value is
174-
"application/octet-stream".
175-
:paramtype content_type: str
176173
:return: ImageAnalysisResult. The ImageAnalysisResult is compatible with MutableMapping
177174
:rtype: ~azure.ai.vision.imageanalysis.models.ImageAnalysisResult
178175
:raises ~azure.core.exceptions.HttpResponseError:
@@ -488,8 +485,6 @@ def _analyze_from_url(
488485
If however you would like to make sure analysis results do not change over time, set this
489486
value to a specific model version. Default value is None.
490487
:paramtype model_version: str
491-
:keyword content_type: The format of the HTTP payload. Default value is None.
492-
:paramtype content_type: str
493488
:return: ImageAnalysisResult. The ImageAnalysisResult is compatible with MutableMapping
494489
:rtype: ~azure.ai.vision.imageanalysis.models.ImageAnalysisResult
495490
:raises ~azure.core.exceptions.HttpResponseError:

sdk/vision/azure-ai-vision-imageanalysis/azure/ai/vision/imageanalysis/_patch.py

+67-60
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,8 @@
66
77
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
88
9-
Note 1: the analyze overloads here should have been implemented in the `_patch.py` file in the `_operations` folder
10-
instead of here. That would have worked fine, except there is an issue with the generated Python
11-
ref-docs. The overloads do not show up. See this GitHub issue: https://github.com/Azure/autorest.python/issues/1315.
12-
To overcome this, the overloads are defined here. Consider moving them to the right place once the
13-
above issue is fixed.
14-
15-
Note 2: Don't bother documenting the two overload methods below. The doc tool (sphinx) will not pick them up. Instead,
16-
document the 3rd method.
179
"""
18-
from typing import List, overload, Any, Optional, Union
10+
from typing import List, Any, Optional, Union
1911
from azure.core.tracing.decorator import distributed_trace
2012
from . import models as _models
2113
from ._operations._operations import ImageAnalysisClientOperationsMixin
@@ -35,41 +27,73 @@ class ImageAnalysisClient(ImageAnalysisClientGenerated):
3527
:paramtype api_version: str
3628
"""
3729

38-
@overload
39-
def analyze(
30+
@distributed_trace
31+
def analyze_from_url(
4032
self,
41-
*,
4233
image_url: str,
4334
visual_features: List[_models.VisualFeatures],
44-
language: Optional[str] = None,
45-
gender_neutral_caption: Optional[bool] = None,
46-
smart_crops_aspect_ratios: Optional[List[float]] = None,
47-
model_version: Optional[str] = None,
48-
**kwargs: Any
49-
) -> _models.ImageAnalysisResult:
50-
...
51-
52-
@overload
53-
def analyze(
54-
self,
5535
*,
56-
image_data: bytes,
57-
visual_features: List[_models.VisualFeatures],
5836
language: Optional[str] = None,
5937
gender_neutral_caption: Optional[bool] = None,
6038
smart_crops_aspect_ratios: Optional[List[float]] = None,
6139
model_version: Optional[str] = None,
6240
**kwargs: Any
6341
) -> _models.ImageAnalysisResult:
64-
...
42+
"""Performs a single Image Analysis operation.
43+
44+
:param image_url: The publicly accessible URL of the image to analyze.
45+
:type image_url: str
46+
:param visual_features: A list of visual features to analyze. Required. Seven visual features
47+
are supported: Caption, DenseCaptions, Read (OCR), Tags, Objects, SmartCrops, and People. At
48+
least one visual feature must be specified.
49+
:type visual_features: list[~azure.ai.vision.imageanalysis.models.VisualFeatures]
50+
:keyword language: The desired language for result generation (a two-letter language code).
51+
Defaults to 'en' (English). See https://aka.ms/cv-languages for a list of supported languages.
52+
:paramtype language: str
53+
:keyword gender_neutral_caption: Boolean flag for enabling gender-neutral captioning for
54+
Caption and Dense Captions features. Defaults to 'false'.
55+
Captions may contain gender terms (for example: 'man', 'woman', or 'boy', 'girl').
56+
If you set this to 'true', those will be replaced with gender-neutral terms (for example:
57+
'person' or 'child').
58+
:paramtype gender_neutral_caption: bool
59+
:keyword smart_crops_aspect_ratios: A list of aspect ratios to use for smart cropping.
60+
Defaults to one crop region with an aspect ratio the service sees fit between
61+
0.5 and 2.0 (inclusive). Aspect ratios are calculated by dividing the target crop
62+
width in pixels by the height in pixels. When set, supported values are
63+
between 0.75 and 1.8 (inclusive).
64+
:paramtype smart_crops_aspect_ratios: list[float]
65+
:keyword model_version: The version of cloud AI-model used for analysis. Defaults to 'latest',
66+
for the latest AI model with recent improvements.
67+
The format is the following: 'latest' or 'YYYY-MM-DD' or 'YYYY-MM-DD-preview',
68+
where 'YYYY', 'MM', 'DD' are the year, month and day associated with the model.
69+
If you would like to make sure analysis results do not change over time, set this
70+
value to a specific model version.
71+
:paramtype model_version: str
72+
:return: ImageAnalysisResult. The ImageAnalysisResult is compatible with MutableMapping
73+
:rtype: ~azure.ai.vision.imageanalysis.models.ImageAnalysisResult
74+
:raises: ~azure.core.exceptions.HttpResponseError
75+
"""
76+
77+
visual_features_impl: List[Union[str, _models.VisualFeatures]] = list(visual_features)
78+
79+
return ImageAnalysisClientOperationsMixin._analyze_from_url( # pylint: disable=protected-access
80+
self,
81+
image_content=_models._models.ImageUrl(url=image_url), # pylint: disable=protected-access
82+
visual_features=visual_features_impl,
83+
language=language,
84+
gender_neutral_caption=gender_neutral_caption,
85+
smart_crops_aspect_ratios=smart_crops_aspect_ratios,
86+
model_version=model_version,
87+
**kwargs
88+
)
89+
6590

6691
@distributed_trace
6792
def analyze(
6893
self,
69-
*,
94+
image_data: bytes,
7095
visual_features: List[_models.VisualFeatures],
71-
image_data: Optional[bytes] = None,
72-
image_url: Optional[str] = None,
96+
*,
7397
language: Optional[str] = None,
7498
gender_neutral_caption: Optional[bool] = None,
7599
smart_crops_aspect_ratios: Optional[List[float]] = None,
@@ -78,14 +102,12 @@ def analyze(
78102
) -> _models.ImageAnalysisResult:
79103
"""Performs a single Image Analysis operation.
80104
81-
:keyword image_url: The publicly accessible URL of the image to analyze.
82-
:paramtype image_url: str
83-
:keyword image_data: A buffer containing the whole image to be analyzed.
84-
:paramtype image_data: bytes
85-
:keyword visual_features: A list of visual features to analyze. Required. Seven visual features
105+
:param image_data: A buffer containing the whole image to be analyzed.
106+
:type image_data: bytes
107+
:param visual_features: A list of visual features to analyze. Required. Seven visual features
86108
are supported: Caption, DenseCaptions, Read (OCR), Tags, Objects, SmartCrops, and People. At
87109
least one visual feature must be specified.
88-
:paramtype visual_features: list[~azure.ai.vision.imageanalysis.models.VisualFeatures]
110+
:type visual_features: list[~azure.ai.vision.imageanalysis.models.VisualFeatures]
89111
:keyword language: The desired language for result generation (a two-letter language code).
90112
Defaults to 'en' (English). See https://aka.ms/cv-languages for a list of supported languages.
91113
:paramtype language: str
@@ -115,31 +137,16 @@ def analyze(
115137

116138
visual_features_impl: List[Union[str, _models.VisualFeatures]] = list(visual_features)
117139

118-
if image_url is not None:
119-
return ImageAnalysisClientOperationsMixin._analyze_from_url( # pylint: disable=protected-access
120-
self,
121-
image_content=_models._models.ImageUrl(url=image_url), # pylint: disable=protected-access
122-
visual_features=visual_features_impl,
123-
language=language,
124-
gender_neutral_caption=gender_neutral_caption,
125-
smart_crops_aspect_ratios=smart_crops_aspect_ratios,
126-
model_version=model_version,
127-
**kwargs
128-
)
129-
130-
if image_data is not None:
131-
return ImageAnalysisClientOperationsMixin._analyze_from_image_data( # pylint: disable=protected-access
132-
self,
133-
image_content=image_data,
134-
visual_features=visual_features_impl,
135-
language=language,
136-
gender_neutral_caption=gender_neutral_caption,
137-
smart_crops_aspect_ratios=smart_crops_aspect_ratios,
138-
model_version=model_version,
139-
**kwargs
140-
)
141-
142-
raise ValueError("Either image_data or image_url must be specified.")
140+
return ImageAnalysisClientOperationsMixin._analyze_from_image_data( # pylint: disable=protected-access
141+
self,
142+
image_content=image_data,
143+
visual_features=visual_features_impl,
144+
language=language,
145+
gender_neutral_caption=gender_neutral_caption,
146+
smart_crops_aspect_ratios=smart_crops_aspect_ratios,
147+
model_version=model_version,
148+
**kwargs
149+
)
143150

144151

145152
__all__: List[str] = [

sdk/vision/azure-ai-vision-imageanalysis/azure/ai/vision/imageanalysis/aio/_operations/_operations.py

-5
Original file line numberDiff line numberDiff line change
@@ -89,9 +89,6 @@ async def _analyze_from_image_data(
8989
If however you would like to make sure analysis results do not change over time, set this
9090
value to a specific model version. Default value is None.
9191
:paramtype model_version: str
92-
:keyword content_type: The format of the HTTP payload. Default value is
93-
"application/octet-stream".
94-
:paramtype content_type: str
9592
:return: ImageAnalysisResult. The ImageAnalysisResult is compatible with MutableMapping
9693
:rtype: ~azure.ai.vision.imageanalysis.models.ImageAnalysisResult
9794
:raises ~azure.core.exceptions.HttpResponseError:
@@ -407,8 +404,6 @@ async def _analyze_from_url(
407404
If however you would like to make sure analysis results do not change over time, set this
408405
value to a specific model version. Default value is None.
409406
:paramtype model_version: str
410-
:keyword content_type: The format of the HTTP payload. Default value is None.
411-
:paramtype content_type: str
412407
:return: ImageAnalysisResult. The ImageAnalysisResult is compatible with MutableMapping
413408
:rtype: ~azure.ai.vision.imageanalysis.models.ImageAnalysisResult
414409
:raises ~azure.core.exceptions.HttpResponseError:

0 commit comments

Comments
 (0)