Skip to content

Commit 7ea3c23

Browse files
chore: add cross_sync annotations (#1000)
1 parent 511abb1 commit 7ea3c23

32 files changed

+3430
-3297
lines changed

.github/workflows/conformance.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,9 @@ jobs:
2626
matrix:
2727
test-version: [ "v0.0.2" ]
2828
py-version: [ 3.8 ]
29-
client-type: [ "Async v3", "Legacy" ]
29+
client-type: [ "async", "legacy" ]
3030
fail-fast: false
31-
name: "${{ matrix.client-type }} Client / Python ${{ matrix.py-version }} / Test Tag ${{ matrix.test-version }}"
31+
name: "${{ matrix.client-type }} client / python ${{ matrix.py-version }} / test tag ${{ matrix.test-version }}"
3232
steps:
3333
- uses: actions/checkout@v4
3434
name: "Checkout python-bigtable"

.kokoro/conformance.sh

+1-2
Original file line numberDiff line numberDiff line change
@@ -23,15 +23,14 @@ PROXY_ARGS=""
2323
TEST_ARGS=""
2424
if [[ "${CLIENT_TYPE^^}" == "LEGACY" ]]; then
2525
echo "Using legacy client"
26-
PROXY_ARGS="--legacy-client"
2726
# legacy client does not expose mutate_row. Disable those tests
2827
TEST_ARGS="-skip TestMutateRow_"
2928
fi
3029

3130
# Build and start the proxy in a separate process
3231
PROXY_PORT=9999
3332
pushd test_proxy
34-
nohup python test_proxy.py --port $PROXY_PORT $PROXY_ARGS &
33+
nohup python test_proxy.py --port $PROXY_PORT --client_type=$CLIENT_TYPE &
3534
proxyPID=$!
3635
popd
3736

google/cloud/bigtable/data/__init__.py

+15-1
Original file line numberDiff line numberDiff line change
@@ -45,16 +45,30 @@
4545
from google.cloud.bigtable.data._helpers import RowKeySamples
4646
from google.cloud.bigtable.data._helpers import ShardedQuery
4747

48+
# setup custom CrossSync mappings for library
49+
from google.cloud.bigtable_v2.services.bigtable.async_client import (
50+
BigtableAsyncClient,
51+
)
52+
from google.cloud.bigtable.data._async._read_rows import _ReadRowsOperationAsync
53+
from google.cloud.bigtable.data._async._mutate_rows import _MutateRowsOperationAsync
54+
55+
from google.cloud.bigtable.data._cross_sync import CrossSync
56+
57+
CrossSync.add_mapping("GapicClient", BigtableAsyncClient)
58+
CrossSync.add_mapping("_ReadRowsOperation", _ReadRowsOperationAsync)
59+
CrossSync.add_mapping("_MutateRowsOperation", _MutateRowsOperationAsync)
60+
CrossSync.add_mapping("MutationsBatcher", MutationsBatcherAsync)
61+
4862

4963
__version__: str = package_version.__version__
5064

5165
__all__ = (
5266
"BigtableDataClientAsync",
5367
"TableAsync",
68+
"MutationsBatcherAsync",
5469
"RowKeySamples",
5570
"ReadRowsQuery",
5671
"RowRange",
57-
"MutationsBatcherAsync",
5872
"Mutation",
5973
"RowMutationEntry",
6074
"SetCell",

google/cloud/bigtable/data/_async/_mutate_rows.py

+22-18
Original file line numberDiff line numberDiff line change
@@ -15,37 +15,38 @@
1515
from __future__ import annotations
1616

1717
from typing import Sequence, TYPE_CHECKING
18-
from dataclasses import dataclass
1918
import functools
2019

2120
from google.api_core import exceptions as core_exceptions
2221
from google.api_core import retry as retries
23-
import google.cloud.bigtable_v2.types.bigtable as types_pb
2422
import google.cloud.bigtable.data.exceptions as bt_exceptions
2523
from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
2624
from google.cloud.bigtable.data._helpers import _retry_exception_factory
2725

2826
# mutate_rows requests are limited to this number of mutations
2927
from google.cloud.bigtable.data.mutations import _MUTATE_ROWS_REQUEST_MUTATION_LIMIT
28+
from google.cloud.bigtable.data.mutations import _EntryWithProto
29+
30+
from google.cloud.bigtable.data._cross_sync import CrossSync
3031

3132
if TYPE_CHECKING:
32-
from google.cloud.bigtable_v2.services.bigtable.async_client import (
33-
BigtableAsyncClient,
34-
)
3533
from google.cloud.bigtable.data.mutations import RowMutationEntry
36-
from google.cloud.bigtable.data._async.client import TableAsync
37-
3834

39-
@dataclass
40-
class _EntryWithProto:
41-
"""
42-
A dataclass to hold a RowMutationEntry and its corresponding proto representation.
43-
"""
35+
if CrossSync.is_async:
36+
from google.cloud.bigtable_v2.services.bigtable.async_client import (
37+
BigtableAsyncClient as GapicClientType,
38+
)
39+
from google.cloud.bigtable.data._async.client import TableAsync as TableType
40+
else:
41+
from google.cloud.bigtable_v2.services.bigtable.client import ( # type: ignore
42+
BigtableClient as GapicClientType,
43+
)
44+
from google.cloud.bigtable.data._sync_autogen.client import Table as TableType # type: ignore
4445

45-
entry: RowMutationEntry
46-
proto: types_pb.MutateRowsRequest.Entry
46+
__CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen._mutate_rows"
4747

4848

49+
@CrossSync.convert_class("_MutateRowsOperation")
4950
class _MutateRowsOperationAsync:
5051
"""
5152
MutateRowsOperation manages the logic of sending a set of row mutations,
@@ -65,10 +66,11 @@ class _MutateRowsOperationAsync:
6566
If not specified, the request will run until operation_timeout is reached.
6667
"""
6768

69+
@CrossSync.convert
6870
def __init__(
6971
self,
70-
gapic_client: "BigtableAsyncClient",
71-
table: "TableAsync",
72+
gapic_client: GapicClientType,
73+
table: TableType,
7274
mutation_entries: list["RowMutationEntry"],
7375
operation_timeout: float,
7476
attempt_timeout: float | None,
@@ -97,7 +99,7 @@ def __init__(
9799
bt_exceptions._MutateRowsIncomplete,
98100
)
99101
sleep_generator = retries.exponential_sleep_generator(0.01, 2, 60)
100-
self._operation = retries.retry_target_async(
102+
self._operation = lambda: CrossSync.retry_target(
101103
self._run_attempt,
102104
self.is_retryable,
103105
sleep_generator,
@@ -112,6 +114,7 @@ def __init__(
112114
self.remaining_indices = list(range(len(self.mutations)))
113115
self.errors: dict[int, list[Exception]] = {}
114116

117+
@CrossSync.convert
115118
async def start(self):
116119
"""
117120
Start the operation, and run until completion
@@ -121,7 +124,7 @@ async def start(self):
121124
"""
122125
try:
123126
# trigger mutate_rows
124-
await self._operation
127+
await self._operation()
125128
except Exception as exc:
126129
# exceptions raised by retryable are added to the list of exceptions for all unfinalized mutations
127130
incomplete_indices = self.remaining_indices.copy()
@@ -148,6 +151,7 @@ async def start(self):
148151
all_errors, len(self.mutations)
149152
)
150153

154+
@CrossSync.convert
151155
async def _run_attempt(self):
152156
"""
153157
Run a single attempt of the mutate_rows rpc.

google/cloud/bigtable/data/_async/_read_rows.py

+24-22
Original file line numberDiff line numberDiff line change
@@ -15,13 +15,7 @@
1515

1616
from __future__ import annotations
1717

18-
from typing import (
19-
TYPE_CHECKING,
20-
AsyncGenerator,
21-
AsyncIterable,
22-
Awaitable,
23-
Sequence,
24-
)
18+
from typing import Sequence, TYPE_CHECKING
2519

2620
from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB
2721
from google.cloud.bigtable_v2.types import ReadRowsResponse as ReadRowsResponsePB
@@ -32,21 +26,25 @@
3226
from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
3327
from google.cloud.bigtable.data.exceptions import InvalidChunk
3428
from google.cloud.bigtable.data.exceptions import _RowSetComplete
29+
from google.cloud.bigtable.data.exceptions import _ResetRow
3530
from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
3631
from google.cloud.bigtable.data._helpers import _retry_exception_factory
3732

3833
from google.api_core import retry as retries
3934
from google.api_core.retry import exponential_sleep_generator
4035

41-
if TYPE_CHECKING:
42-
from google.cloud.bigtable.data._async.client import TableAsync
36+
from google.cloud.bigtable.data._cross_sync import CrossSync
4337

38+
if TYPE_CHECKING:
39+
if CrossSync.is_async:
40+
from google.cloud.bigtable.data._async.client import TableAsync as TableType
41+
else:
42+
from google.cloud.bigtable.data._sync_autogen.client import Table as TableType # type: ignore
4443

45-
class _ResetRow(Exception):
46-
def __init__(self, chunk):
47-
self.chunk = chunk
44+
__CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen._read_rows"
4845

4946

47+
@CrossSync.convert_class("_ReadRowsOperation")
5048
class _ReadRowsOperationAsync:
5149
"""
5250
ReadRowsOperation handles the logic of merging chunks from a ReadRowsResponse stream
@@ -80,7 +78,7 @@ class _ReadRowsOperationAsync:
8078
def __init__(
8179
self,
8280
query: ReadRowsQuery,
83-
table: "TableAsync",
81+
table: TableType,
8482
operation_timeout: float,
8583
attempt_timeout: float,
8684
retryable_exceptions: Sequence[type[Exception]] = (),
@@ -102,22 +100,22 @@ def __init__(
102100
self._last_yielded_row_key: bytes | None = None
103101
self._remaining_count: int | None = self.request.rows_limit or None
104102

105-
def start_operation(self) -> AsyncGenerator[Row, None]:
103+
def start_operation(self) -> CrossSync.Iterable[Row]:
106104
"""
107105
Start the read_rows operation, retrying on retryable errors.
108106
109107
Yields:
110108
Row: The next row in the stream
111109
"""
112-
return retries.retry_target_stream_async(
110+
return CrossSync.retry_target_stream(
113111
self._read_rows_attempt,
114112
self._predicate,
115113
exponential_sleep_generator(0.01, 60, multiplier=2),
116114
self.operation_timeout,
117115
exception_factory=_retry_exception_factory,
118116
)
119117

120-
def _read_rows_attempt(self) -> AsyncGenerator[Row, None]:
118+
def _read_rows_attempt(self) -> CrossSync.Iterable[Row]:
121119
"""
122120
Attempt a single read_rows rpc call.
123121
This function is intended to be wrapped by retry logic,
@@ -152,9 +150,10 @@ def _read_rows_attempt(self) -> AsyncGenerator[Row, None]:
152150
chunked_stream = self.chunk_stream(gapic_stream)
153151
return self.merge_rows(chunked_stream)
154152

153+
@CrossSync.convert()
155154
async def chunk_stream(
156-
self, stream: Awaitable[AsyncIterable[ReadRowsResponsePB]]
157-
) -> AsyncGenerator[ReadRowsResponsePB.CellChunk, None]:
155+
self, stream: CrossSync.Awaitable[CrossSync.Iterable[ReadRowsResponsePB]]
156+
) -> CrossSync.Iterable[ReadRowsResponsePB.CellChunk]:
158157
"""
159158
process chunks out of raw read_rows stream
160159
@@ -204,9 +203,12 @@ async def chunk_stream(
204203
current_key = None
205204

206205
@staticmethod
206+
@CrossSync.convert(
207+
replace_symbols={"__aiter__": "__iter__", "__anext__": "__next__"},
208+
)
207209
async def merge_rows(
208-
chunks: AsyncGenerator[ReadRowsResponsePB.CellChunk, None] | None
209-
) -> AsyncGenerator[Row, None]:
210+
chunks: CrossSync.Iterable[ReadRowsResponsePB.CellChunk] | None,
211+
) -> CrossSync.Iterable[Row]:
210212
"""
211213
Merge chunks into rows
212214
@@ -222,7 +224,7 @@ async def merge_rows(
222224
while True:
223225
try:
224226
c = await it.__anext__()
225-
except StopAsyncIteration:
227+
except CrossSync.StopIteration:
226228
# stream complete
227229
return
228230
row_key = c.row_key
@@ -315,7 +317,7 @@ async def merge_rows(
315317
):
316318
raise InvalidChunk("reset row with data")
317319
continue
318-
except StopAsyncIteration:
320+
except CrossSync.StopIteration:
319321
raise InvalidChunk("premature end of stream")
320322

321323
@staticmethod

0 commit comments

Comments
 (0)