Skip to content

Commit 0e52722

Browse files
committed
test: add tests for RowIterator's max_stream_count parameter
1 parent 20000f7 commit 0e52722

File tree

1 file changed

+72
-0
lines changed

1 file changed

+72
-0
lines changed

tests/unit/test_table.py

Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5822,3 +5822,75 @@ def test_table_reference_to_bqstorage_v1_stable(table_path):
58225822
for klass in (mut.TableReference, mut.Table, mut.TableListItem):
58235823
got = klass.from_string(table_path).to_bqstorage()
58245824
assert got == expected
5825+
5826+
5827+
@pytest.mark.parametrize("preserve_order", [True, False])
5828+
def test_to_arrow_iterable_w_bqstorage_max_stream_count(preserve_order):
5829+
pytest.importorskip("pandas")
5830+
pytest.importorskip("google.cloud.bigquery_storage")
5831+
from google.cloud.bigquery import schema
5832+
from google.cloud.bigquery import table as mut
5833+
from google.cloud import bigquery_storage
5834+
5835+
bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient)
5836+
session = bigquery_storage.types.ReadSession()
5837+
bqstorage_client.create_read_session.return_value = session
5838+
5839+
row_iterator = mut.RowIterator(
5840+
_mock_client(),
5841+
api_request=None,
5842+
path=None,
5843+
schema=[
5844+
schema.SchemaField("colA", "INTEGER"),
5845+
],
5846+
table=mut.TableReference.from_string("proj.dset.tbl"),
5847+
)
5848+
row_iterator._preserve_order = preserve_order
5849+
5850+
max_stream_count = 132
5851+
result_iterable = row_iterator.to_arrow_iterable(
5852+
bqstorage_client=bqstorage_client, max_stream_count=max_stream_count
5853+
)
5854+
for _ in result_iterable: # pragma: NO COVER
5855+
pass
5856+
bqstorage_client.create_read_session.assert_called_once_with(
5857+
parent=mock.ANY,
5858+
read_session=mock.ANY,
5859+
max_stream_count=max_stream_count if not preserve_order else 1,
5860+
)
5861+
5862+
5863+
@pytest.mark.parametrize("preserve_order", [True, False])
5864+
def test_to_dataframe_iterable_w_bqstorage_max_stream_count(preserve_order):
5865+
pytest.importorskip("pandas")
5866+
pytest.importorskip("google.cloud.bigquery_storage")
5867+
from google.cloud.bigquery import schema
5868+
from google.cloud.bigquery import table as mut
5869+
from google.cloud import bigquery_storage
5870+
5871+
bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient)
5872+
session = bigquery_storage.types.ReadSession()
5873+
bqstorage_client.create_read_session.return_value = session
5874+
5875+
row_iterator = mut.RowIterator(
5876+
_mock_client(),
5877+
api_request=None,
5878+
path=None,
5879+
schema=[
5880+
schema.SchemaField("colA", "INTEGER"),
5881+
],
5882+
table=mut.TableReference.from_string("proj.dset.tbl"),
5883+
)
5884+
row_iterator._preserve_order = preserve_order
5885+
5886+
max_stream_count = 132
5887+
result_iterable = row_iterator.to_dataframe_iterable(
5888+
bqstorage_client=bqstorage_client, max_stream_count=max_stream_count
5889+
)
5890+
for _ in result_iterable: # pragma: NO COVER
5891+
pass
5892+
bqstorage_client.create_read_session.assert_called_once_with(
5893+
parent=mock.ANY,
5894+
read_session=mock.ANY,
5895+
max_stream_count=max_stream_count if not preserve_order else 1,
5896+
)

0 commit comments

Comments
 (0)