| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | import datetime |
| | import decimal |
| | from unittest import mock |
| |
|
| | import pytest |
| |
|
| | from google.cloud import bigquery |
| |
|
| | pandas = pytest.importorskip("pandas") |
| | pyarrow = pytest.importorskip("pyarrow", minversion="3.0.0") |
| |
|
| |
|
| | TEST_PATH = "/v1/project/test-proj/dataset/test-dset/table/test-tbl/data" |
| |
|
| |
|
| | @pytest.fixture |
| | def class_under_test(): |
| | from google.cloud.bigquery.table import RowIterator |
| |
|
| | return RowIterator |
| |
|
| |
|
| | @pytest.mark.skipif( |
| | pandas.__version__.startswith("2."), |
| | reason="pandas 2.0 changes some default dtypes and we haven't update the test to account for those", |
| | ) |
| | def test_to_dataframe_nullable_scalars(monkeypatch, class_under_test): |
| | |
| | arrow_schema = pyarrow.schema( |
| | [ |
| | pyarrow.field("bignumeric_col", pyarrow.decimal256(76, scale=38)), |
| | pyarrow.field("bool_col", pyarrow.bool_()), |
| | pyarrow.field("bytes_col", pyarrow.binary()), |
| | pyarrow.field("date_col", pyarrow.date32()), |
| | pyarrow.field("datetime_col", pyarrow.timestamp("us", tz=None)), |
| | pyarrow.field("float64_col", pyarrow.float64()), |
| | pyarrow.field("int64_col", pyarrow.int64()), |
| | pyarrow.field("numeric_col", pyarrow.decimal128(38, scale=9)), |
| | pyarrow.field("string_col", pyarrow.string()), |
| | pyarrow.field("time_col", pyarrow.time64("us")), |
| | pyarrow.field( |
| | "timestamp_col", pyarrow.timestamp("us", tz=datetime.timezone.utc) |
| | ), |
| | ] |
| | ) |
| | arrow_table = pyarrow.Table.from_pydict( |
| | { |
| | "bignumeric_col": [decimal.Decimal("123.456789101112131415")], |
| | "bool_col": [True], |
| | "bytes_col": [b"Hello,\x00World!"], |
| | "date_col": [datetime.date(2021, 8, 9)], |
| | "datetime_col": [datetime.datetime(2021, 8, 9, 13, 30, 44, 123456)], |
| | "float64_col": [1.25], |
| | "int64_col": [-7], |
| | "numeric_col": [decimal.Decimal("-123.456789")], |
| | "string_col": ["abcdefg"], |
| | "time_col": [datetime.time(14, 21, 17, 123456)], |
| | "timestamp_col": [ |
| | datetime.datetime( |
| | 2021, 8, 9, 13, 30, 44, 123456, tzinfo=datetime.timezone.utc |
| | ) |
| | ], |
| | }, |
| | schema=arrow_schema, |
| | ) |
| |
|
| | nullable_schema = [ |
| | bigquery.SchemaField("bignumeric_col", "BIGNUMERIC"), |
| | bigquery.SchemaField("bool_col", "BOOLEAN"), |
| | bigquery.SchemaField("bytes_col", "BYTES"), |
| | bigquery.SchemaField("date_col", "DATE"), |
| | bigquery.SchemaField("datetime_col", "DATETIME"), |
| | bigquery.SchemaField("float64_col", "FLOAT"), |
| | bigquery.SchemaField("int64_col", "INT64"), |
| | bigquery.SchemaField("numeric_col", "NUMERIC"), |
| | bigquery.SchemaField("string_col", "STRING"), |
| | bigquery.SchemaField("time_col", "TIME"), |
| | bigquery.SchemaField("timestamp_col", "TIMESTAMP"), |
| | ] |
| | mock_client = mock.create_autospec(bigquery.Client) |
| | mock_client.project = "test-proj" |
| | mock_api_request = mock.Mock() |
| | mock_to_arrow = mock.Mock() |
| | mock_to_arrow.return_value = arrow_table |
| | rows = class_under_test(mock_client, mock_api_request, TEST_PATH, nullable_schema) |
| | monkeypatch.setattr(rows, "to_arrow", mock_to_arrow) |
| | df = rows.to_dataframe() |
| |
|
| | |
| | |
| | assert df.dtypes["bignumeric_col"].name == "object" |
| | assert df.dtypes["bool_col"].name == "boolean" |
| | assert df.dtypes["bytes_col"].name == "object" |
| | assert df.dtypes["date_col"].name == "dbdate" |
| | assert df.dtypes["datetime_col"].name == "datetime64[ns]" |
| | assert df.dtypes["float64_col"].name == "float64" |
| | assert df.dtypes["int64_col"].name == "Int64" |
| | assert df.dtypes["numeric_col"].name == "object" |
| | assert df.dtypes["string_col"].name == "object" |
| | assert df.dtypes["time_col"].name == "dbtime" |
| | assert df.dtypes["timestamp_col"].name == "datetime64[ns, UTC]" |
| |
|
| | |
| | assert df["bignumeric_col"][0] == decimal.Decimal("123.456789101112131415") |
| | assert df["bool_col"][0] |
| | assert df["bytes_col"][0] == b"Hello,\x00World!" |
| |
|
| | |
| | |
| | |
| | assert df["date_col"][0] == datetime.date(2021, 8, 9) |
| |
|
| | assert df["datetime_col"][0] == pandas.to_datetime("2021-08-09 13:30:44.123456") |
| | assert df["float64_col"][0] == 1.25 |
| | assert df["int64_col"][0] == -7 |
| | assert df["numeric_col"][0] == decimal.Decimal("-123.456789") |
| | assert df["string_col"][0] == "abcdefg" |
| |
|
| | |
| | |
| | |
| | assert df["time_col"][0] == datetime.time(14, 21, 17, 123456) |
| |
|
| | assert df["timestamp_col"][0] == pandas.to_datetime("2021-08-09 13:30:44.123456Z") |
| |
|
| |
|
| | def test_to_dataframe_nullable_scalars_with_custom_dtypes( |
| | monkeypatch, class_under_test |
| | ): |
| | """Passing in explicit dtypes is merged with default behavior.""" |
| | arrow_schema = pyarrow.schema( |
| | [ |
| | pyarrow.field("int64_col", pyarrow.int64()), |
| | pyarrow.field("other_int_col", pyarrow.int64()), |
| | ] |
| | ) |
| | arrow_table = pyarrow.Table.from_pydict( |
| | {"int64_col": [1000], "other_int_col": [-7]}, |
| | schema=arrow_schema, |
| | ) |
| |
|
| | nullable_schema = [ |
| | bigquery.SchemaField("int64_col", "INT64"), |
| | bigquery.SchemaField("other_int_col", "INT64"), |
| | ] |
| | mock_client = mock.create_autospec(bigquery.Client) |
| | mock_client.project = "test-proj" |
| | mock_api_request = mock.Mock() |
| | mock_to_arrow = mock.Mock() |
| | mock_to_arrow.return_value = arrow_table |
| | rows = class_under_test(mock_client, mock_api_request, TEST_PATH, nullable_schema) |
| | monkeypatch.setattr(rows, "to_arrow", mock_to_arrow) |
| | df = rows.to_dataframe(dtypes={"other_int_col": "int8"}) |
| |
|
| | assert df.dtypes["int64_col"].name == "Int64" |
| | assert df["int64_col"][0] == 1000 |
| |
|
| | assert df.dtypes["other_int_col"].name == "int8" |
| | assert df["other_int_col"][0] == -7 |
| |
|
| |
|
| | def test_to_dataframe_arrays(monkeypatch, class_under_test): |
| | arrow_schema = pyarrow.schema( |
| | [pyarrow.field("int64_repeated", pyarrow.list_(pyarrow.int64()))] |
| | ) |
| | arrow_table = pyarrow.Table.from_pydict( |
| | {"int64_repeated": [[-1, 0, 2]]}, |
| | schema=arrow_schema, |
| | ) |
| |
|
| | nullable_schema = [ |
| | bigquery.SchemaField("int64_repeated", "INT64", mode="REPEATED"), |
| | ] |
| | mock_client = mock.create_autospec(bigquery.Client) |
| | mock_client.project = "test-proj" |
| | mock_api_request = mock.Mock() |
| | mock_to_arrow = mock.Mock() |
| | mock_to_arrow.return_value = arrow_table |
| | rows = class_under_test(mock_client, mock_api_request, TEST_PATH, nullable_schema) |
| | monkeypatch.setattr(rows, "to_arrow", mock_to_arrow) |
| | df = rows.to_dataframe() |
| |
|
| | assert df.dtypes["int64_repeated"].name == "object" |
| | assert tuple(df["int64_repeated"][0]) == (-1, 0, 2) |
| |
|
| |
|
| | def test_to_dataframe_with_jobs_query_response(class_under_test): |
| | resource = { |
| | "kind": "bigquery#queryResponse", |
| | "schema": { |
| | "fields": [ |
| | {"name": "name", "type": "STRING", "mode": "NULLABLE"}, |
| | {"name": "number", "type": "INTEGER", "mode": "NULLABLE"}, |
| | ] |
| | }, |
| | "jobReference": { |
| | "projectId": "test-project", |
| | "jobId": "job_ocd3cb-N62QIslU7R5qKKa2_427J", |
| | "location": "US", |
| | }, |
| | "totalRows": "9", |
| | "rows": [ |
| | {"f": [{"v": "Tiarra"}, {"v": "6"}]}, |
| | {"f": [{"v": "Timothy"}, {"v": "325"}]}, |
| | {"f": [{"v": "Tina"}, {"v": "26"}]}, |
| | {"f": [{"v": "Tierra"}, {"v": "10"}]}, |
| | {"f": [{"v": "Tia"}, {"v": "17"}]}, |
| | {"f": [{"v": "Tiara"}, {"v": "22"}]}, |
| | {"f": [{"v": "Tiana"}, {"v": "6"}]}, |
| | {"f": [{"v": "Tiffany"}, {"v": "229"}]}, |
| | {"f": [{"v": "Tiffani"}, {"v": "8"}]}, |
| | ], |
| | "totalBytesProcessed": "154775150", |
| | "jobComplete": True, |
| | "cacheHit": False, |
| | "queryId": "job_ocd3cb-N62QIslU7R5qKKa2_427J", |
| | } |
| |
|
| | rows = class_under_test( |
| | client=None, |
| | api_request=None, |
| | path=None, |
| | schema=[ |
| | bigquery.SchemaField.from_api_repr(field) |
| | for field in resource["schema"]["fields"] |
| | ], |
| | first_page_response=resource, |
| | ) |
| | df = rows.to_dataframe() |
| |
|
| | assert list(df.columns) == ["name", "number"] |
| | assert list(df["name"]) == [ |
| | "Tiarra", |
| | "Timothy", |
| | "Tina", |
| | "Tierra", |
| | "Tia", |
| | "Tiara", |
| | "Tiana", |
| | "Tiffany", |
| | "Tiffani", |
| | ] |
| | assert list(df["number"]) == [6, 325, 26, 10, 17, 22, 6, 229, 8] |
| |
|