Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit dd54292

Browse files
authored
Merge branch 'main' into feature/default_rounding_mode
2 parents cf723d9 + f637e5e commit dd54292

13 files changed

+379
-307
lines changed

google/cloud/bigquery/_helpers.py

Lines changed: 4 additions & 75 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
import math
2121
import re
2222
import os
23-
from typing import Any, Optional, Union
23+
from typing import Optional, Union
2424

2525
from dateutil import relativedelta
2626
from google.cloud._helpers import UTC # type: ignore
@@ -32,10 +32,7 @@
3232

3333
import packaging.version
3434

35-
from google.cloud.bigquery.exceptions import (
36-
LegacyBigQueryStorageError,
37-
LegacyPyarrowError,
38-
)
35+
from google.cloud.bigquery import exceptions
3936

4037
_RFC3339_MICROS_NO_ZULU = "%Y-%m-%dT%H:%M:%S.%f"
4138
_TIMEONLY_WO_MICROS = "%H:%M:%S"
@@ -57,8 +54,6 @@
5754

5855
_MIN_BQ_STORAGE_VERSION = packaging.version.Version("2.0.0")
5956

60-
_MIN_PYARROW_VERSION = packaging.version.Version("3.0.0")
61-
6257
_BQ_STORAGE_OPTIONAL_READ_SESSION_VERSION = packaging.version.Version("2.6.0")
6358

6459
BIGQUERY_EMULATOR_HOST = "BIGQUERY_EMULATOR_HOST"
@@ -115,84 +110,18 @@ def verify_version(self):
115110
verify the version compatibility at runtime.
116111
117112
Raises:
118-
LegacyBigQueryStorageError:
113+
exceptions.LegacyBigQueryStorageError:
119114
If the google-cloud-bigquery-storage package is outdated.
120115
"""
121116
if self.installed_version < _MIN_BQ_STORAGE_VERSION:
122117
msg = (
123118
"Dependency google-cloud-bigquery-storage is outdated, please upgrade "
124119
f"it to version >= {_MIN_BQ_STORAGE_VERSION} (version found: {self.installed_version})."
125120
)
126-
raise LegacyBigQueryStorageError(msg)
127-
128-
129-
class PyarrowVersions:
130-
"""Version comparisons for pyarrow package."""
131-
132-
def __init__(self):
133-
self._installed_version = None
134-
135-
@property
136-
def installed_version(self) -> packaging.version.Version:
137-
"""Return the parsed version of pyarrow."""
138-
if self._installed_version is None:
139-
import pyarrow # type: ignore
140-
141-
self._installed_version = packaging.version.parse(
142-
# Use 0.0.0, since it is earlier than any released version.
143-
# Legacy versions also have the same property, but
144-
# creating a LegacyVersion has been deprecated.
145-
# https://github.com/pypa/packaging/issues/321
146-
getattr(pyarrow, "__version__", "0.0.0")
147-
)
148-
149-
return self._installed_version
150-
151-
@property
152-
def use_compliant_nested_type(self) -> bool:
153-
return self.installed_version.major >= 4
154-
155-
def try_import(self, raise_if_error: bool = False) -> Any:
156-
"""Verify that a recent enough version of pyarrow extra is
157-
installed.
158-
159-
The function assumes that pyarrow extra is installed, and should thus
160-
be used in places where this assumption holds.
161-
162-
Because `pip` can install an outdated version of this extra despite the
163-
constraints in `setup.py`, the calling code can use this helper to
164-
verify the version compatibility at runtime.
165-
166-
Returns:
167-
The ``pyarrow`` module or ``None``.
168-
169-
Raises:
170-
LegacyPyarrowError:
171-
If the pyarrow package is outdated and ``raise_if_error`` is ``True``.
172-
"""
173-
try:
174-
import pyarrow
175-
except ImportError as exc: # pragma: NO COVER
176-
if raise_if_error:
177-
raise LegacyPyarrowError(
178-
f"pyarrow package not found. Install pyarrow version >= {_MIN_PYARROW_VERSION}."
179-
) from exc
180-
return None
181-
182-
if self.installed_version < _MIN_PYARROW_VERSION:
183-
if raise_if_error:
184-
msg = (
185-
"Dependency pyarrow is outdated, please upgrade "
186-
f"it to version >= {_MIN_PYARROW_VERSION} (version found: {self.installed_version})."
187-
)
188-
raise LegacyPyarrowError(msg)
189-
return None
190-
191-
return pyarrow
121+
raise exceptions.LegacyBigQueryStorageError(msg)
192122

193123

194124
BQ_STORAGE_VERSIONS = BQStorageVersions()
195-
PYARROW_VERSIONS = PyarrowVersions()
196125

197126

198127
def _not_null(value, field):

google/cloud/bigquery/_pandas_helpers.py

Lines changed: 14 additions & 89 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,9 @@
2323
import warnings
2424
from typing import Any, Union
2525

26-
from packaging import version
27-
2826
from google.cloud.bigquery import _helpers
27+
from google.cloud.bigquery import _pyarrow_helpers
28+
from google.cloud.bigquery import _versions_helpers
2929
from google.cloud.bigquery import schema
3030

3131
try:
@@ -49,7 +49,11 @@
4949
db_dtypes_import_exception = exc
5050
date_dtype_name = time_dtype_name = "" # Use '' rather than None because pytype
5151

52-
pyarrow = _helpers.PYARROW_VERSIONS.try_import()
52+
pyarrow = _versions_helpers.PYARROW_VERSIONS.try_import()
53+
54+
_BIGNUMERIC_SUPPORT = False
55+
if pyarrow is not None:
56+
_BIGNUMERIC_SUPPORT = True
5357

5458
try:
5559
# _BaseGeometry is used to detect shapely objevys in `bq_to_arrow_array`
@@ -119,87 +123,6 @@ def __init__(self):
119123
self.done = False
120124

121125

122-
def pyarrow_datetime():
123-
return pyarrow.timestamp("us", tz=None)
124-
125-
126-
def pyarrow_numeric():
127-
return pyarrow.decimal128(38, 9)
128-
129-
130-
def pyarrow_bignumeric():
131-
# 77th digit is partial.
132-
# https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#decimal_types
133-
return pyarrow.decimal256(76, 38)
134-
135-
136-
def pyarrow_time():
137-
return pyarrow.time64("us")
138-
139-
140-
def pyarrow_timestamp():
141-
return pyarrow.timestamp("us", tz="UTC")
142-
143-
144-
if pyarrow:
145-
# This dictionary is duplicated in bigquery_storage/test/unite/test_reader.py
146-
# When modifying it be sure to update it there as well.
147-
BQ_TO_ARROW_SCALARS = {
148-
"BOOL": pyarrow.bool_,
149-
"BOOLEAN": pyarrow.bool_,
150-
"BYTES": pyarrow.binary,
151-
"DATE": pyarrow.date32,
152-
"DATETIME": pyarrow_datetime,
153-
"FLOAT": pyarrow.float64,
154-
"FLOAT64": pyarrow.float64,
155-
"GEOGRAPHY": pyarrow.string,
156-
"INT64": pyarrow.int64,
157-
"INTEGER": pyarrow.int64,
158-
"NUMERIC": pyarrow_numeric,
159-
"STRING": pyarrow.string,
160-
"TIME": pyarrow_time,
161-
"TIMESTAMP": pyarrow_timestamp,
162-
}
163-
ARROW_SCALAR_IDS_TO_BQ = {
164-
# https://arrow.apache.org/docs/python/api/datatypes.html#type-classes
165-
pyarrow.bool_().id: "BOOL",
166-
pyarrow.int8().id: "INT64",
167-
pyarrow.int16().id: "INT64",
168-
pyarrow.int32().id: "INT64",
169-
pyarrow.int64().id: "INT64",
170-
pyarrow.uint8().id: "INT64",
171-
pyarrow.uint16().id: "INT64",
172-
pyarrow.uint32().id: "INT64",
173-
pyarrow.uint64().id: "INT64",
174-
pyarrow.float16().id: "FLOAT64",
175-
pyarrow.float32().id: "FLOAT64",
176-
pyarrow.float64().id: "FLOAT64",
177-
pyarrow.time32("ms").id: "TIME",
178-
pyarrow.time64("ns").id: "TIME",
179-
pyarrow.timestamp("ns").id: "TIMESTAMP",
180-
pyarrow.date32().id: "DATE",
181-
pyarrow.date64().id: "DATETIME", # because millisecond resolution
182-
pyarrow.binary().id: "BYTES",
183-
pyarrow.string().id: "STRING", # also alias for pyarrow.utf8()
184-
# The exact scale and precision don't matter, see below.
185-
pyarrow.decimal128(38, scale=9).id: "NUMERIC",
186-
}
187-
188-
if version.parse(pyarrow.__version__) >= version.parse("3.0.0"):
189-
BQ_TO_ARROW_SCALARS["BIGNUMERIC"] = pyarrow_bignumeric
190-
# The exact decimal's scale and precision are not important, as only
191-
# the type ID matters, and it's the same for all decimal256 instances.
192-
ARROW_SCALAR_IDS_TO_BQ[pyarrow.decimal256(76, scale=38).id] = "BIGNUMERIC"
193-
_BIGNUMERIC_SUPPORT = True
194-
else:
195-
_BIGNUMERIC_SUPPORT = False # pragma: NO COVER
196-
197-
else: # pragma: NO COVER
198-
BQ_TO_ARROW_SCALARS = {} # pragma: NO COVER
199-
ARROW_SCALAR_IDS_TO_BQ = {} # pragma: NO_COVER
200-
_BIGNUMERIC_SUPPORT = False # pragma: NO COVER
201-
202-
203126
BQ_FIELD_TYPE_TO_ARROW_FIELD_METADATA = {
204127
"GEOGRAPHY": {
205128
b"ARROW:extension:name": b"google:sqlType:geography",
@@ -240,7 +163,7 @@ def bq_to_arrow_data_type(field):
240163
if field_type_upper in schema._STRUCT_TYPES:
241164
return bq_to_arrow_struct_data_type(field)
242165

243-
data_type_constructor = BQ_TO_ARROW_SCALARS.get(field_type_upper)
166+
data_type_constructor = _pyarrow_helpers.bq_to_arrow_scalars(field_type_upper)
244167
if data_type_constructor is None:
245168
return None
246169
return data_type_constructor()
@@ -568,7 +491,9 @@ def augment_schema(dataframe, current_bq_schema):
568491
if pyarrow.types.is_list(arrow_table.type):
569492
# `pyarrow.ListType`
570493
detected_mode = "REPEATED"
571-
detected_type = ARROW_SCALAR_IDS_TO_BQ.get(arrow_table.values.type.id)
494+
detected_type = _pyarrow_helpers.arrow_scalar_ids_to_bq(
495+
arrow_table.values.type.id
496+
)
572497

573498
# For timezone-naive datetimes, pyarrow assumes the UTC timezone and adds
574499
# it to such datetimes, causing them to be recognized as TIMESTAMP type.
@@ -584,7 +509,7 @@ def augment_schema(dataframe, current_bq_schema):
584509
detected_type = "DATETIME"
585510
else:
586511
detected_mode = field.mode
587-
detected_type = ARROW_SCALAR_IDS_TO_BQ.get(arrow_table.type.id)
512+
detected_type = _pyarrow_helpers.arrow_scalar_ids_to_bq(arrow_table.type.id)
588513

589514
if detected_type is None:
590515
unknown_type_fields.append(field)
@@ -705,13 +630,13 @@ def dataframe_to_parquet(
705630
706631
This argument is ignored for ``pyarrow`` versions earlier than ``4.0.0``.
707632
"""
708-
pyarrow = _helpers.PYARROW_VERSIONS.try_import(raise_if_error=True)
633+
pyarrow = _versions_helpers.PYARROW_VERSIONS.try_import(raise_if_error=True)
709634

710635
import pyarrow.parquet # type: ignore
711636

712637
kwargs = (
713638
{"use_compliant_nested_type": parquet_use_compliant_nested_type}
714-
if _helpers.PYARROW_VERSIONS.use_compliant_nested_type
639+
if _versions_helpers.PYARROW_VERSIONS.use_compliant_nested_type
715640
else {}
716641
)
717642

Lines changed: 123 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,123 @@
1+
# Copyright 2023 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
"""Shared helper functions for connecting BigQuery and pyarrow."""
16+
17+
from typing import Any
18+
19+
from packaging import version
20+
21+
try:
22+
import pyarrow # type: ignore
23+
except ImportError: # pragma: NO COVER
24+
pyarrow = None
25+
26+
27+
def pyarrow_datetime():
28+
return pyarrow.timestamp("us", tz=None)
29+
30+
31+
def pyarrow_numeric():
32+
return pyarrow.decimal128(38, 9)
33+
34+
35+
def pyarrow_bignumeric():
36+
# 77th digit is partial.
37+
# https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#decimal_types
38+
return pyarrow.decimal256(76, 38)
39+
40+
41+
def pyarrow_time():
42+
return pyarrow.time64("us")
43+
44+
45+
def pyarrow_timestamp():
46+
return pyarrow.timestamp("us", tz="UTC")
47+
48+
49+
_BQ_TO_ARROW_SCALARS = {}
50+
_ARROW_SCALAR_IDS_TO_BQ = {}
51+
52+
if pyarrow:
53+
# This dictionary is duplicated in bigquery_storage/test/unite/test_reader.py
54+
# When modifying it be sure to update it there as well.
55+
# Note(todo!!): type "BIGNUMERIC"'s matching pyarrow type is added in _pandas_helpers.py
56+
_BQ_TO_ARROW_SCALARS = {
57+
"BOOL": pyarrow.bool_,
58+
"BOOLEAN": pyarrow.bool_,
59+
"BYTES": pyarrow.binary,
60+
"DATE": pyarrow.date32,
61+
"DATETIME": pyarrow_datetime,
62+
"FLOAT": pyarrow.float64,
63+
"FLOAT64": pyarrow.float64,
64+
"GEOGRAPHY": pyarrow.string,
65+
"INT64": pyarrow.int64,
66+
"INTEGER": pyarrow.int64,
67+
"NUMERIC": pyarrow_numeric,
68+
"STRING": pyarrow.string,
69+
"TIME": pyarrow_time,
70+
"TIMESTAMP": pyarrow_timestamp,
71+
}
72+
73+
_ARROW_SCALAR_IDS_TO_BQ = {
74+
# https://arrow.apache.org/docs/python/api/datatypes.html#type-classes
75+
pyarrow.bool_().id: "BOOL",
76+
pyarrow.int8().id: "INT64",
77+
pyarrow.int16().id: "INT64",
78+
pyarrow.int32().id: "INT64",
79+
pyarrow.int64().id: "INT64",
80+
pyarrow.uint8().id: "INT64",
81+
pyarrow.uint16().id: "INT64",
82+
pyarrow.uint32().id: "INT64",
83+
pyarrow.uint64().id: "INT64",
84+
pyarrow.float16().id: "FLOAT64",
85+
pyarrow.float32().id: "FLOAT64",
86+
pyarrow.float64().id: "FLOAT64",
87+
pyarrow.time32("ms").id: "TIME",
88+
pyarrow.time64("ns").id: "TIME",
89+
pyarrow.timestamp("ns").id: "TIMESTAMP",
90+
pyarrow.date32().id: "DATE",
91+
pyarrow.date64().id: "DATETIME", # because millisecond resolution
92+
pyarrow.binary().id: "BYTES",
93+
pyarrow.string().id: "STRING", # also alias for pyarrow.utf8()
94+
# The exact scale and precision don't matter, see below.
95+
pyarrow.decimal128(38, scale=9).id: "NUMERIC",
96+
}
97+
98+
# Adds bignumeric support only if pyarrow version >= 3.0.0
99+
# Decimal256 support was added to arrow 3.0.0
100+
# https://arrow.apache.org/blog/2021/01/25/3.0.0-release/
101+
if version.parse(pyarrow.__version__) >= version.parse("3.0.0"):
102+
_BQ_TO_ARROW_SCALARS["BIGNUMERIC"] = pyarrow_bignumeric
103+
# The exact decimal's scale and precision are not important, as only
104+
# the type ID matters, and it's the same for all decimal256 instances.
105+
_ARROW_SCALAR_IDS_TO_BQ[pyarrow.decimal256(76, scale=38).id] = "BIGNUMERIC"
106+
107+
108+
def bq_to_arrow_scalars(bq_scalar: str):
109+
"""
110+
Returns:
111+
The Arrow scalar type that the input BigQuery scalar type maps to.
112+
If it cannot find the BigQuery scalar, return None.
113+
"""
114+
return _BQ_TO_ARROW_SCALARS.get(bq_scalar)
115+
116+
117+
def arrow_scalar_ids_to_bq(arrow_scalar: Any):
118+
"""
119+
Returns:
120+
The BigQuery scalar type that the input arrow scalar type maps to.
121+
If it cannot find the arrow scalar, return None.
122+
"""
123+
return _ARROW_SCALAR_IDS_TO_BQ.get(arrow_scalar)

0 commit comments

Comments
 (0)