diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index e2b39f94..9ee60f7e 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719 + digest: sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 412b0b56..4e1b1fb8 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -40,6 +40,7 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ + python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -59,40 +60,8 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb - -COPY fetch_gpg_keys.sh /tmp -# Install the desired versions of Python. -RUN set -ex \ - && export GNUPGHOME="$(mktemp -d)" \ - && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ - && /tmp/fetch_gpg_keys.sh \ - && for PYTHON_VERSION in 3.7.8 3.8.5; do \ - wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ - && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ - && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ - && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ - && mkdir -p /usr/src/python-${PYTHON_VERSION} \ - && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ - && rm python-${PYTHON_VERSION}.tar.xz \ - && cd /usr/src/python-${PYTHON_VERSION} \ - && ./configure \ - --enable-shared \ - # This works only on Python 2.7 and throws a warning on every other - # version, but seems otherwise harmless. - --enable-unicode=ucs4 \ - --with-system-ffi \ - --without-ensurepip \ - && make -j$(nproc) \ - && make install \ - && ldconfig \ - ; done \ - && rm -rf "${GNUPGHOME}" \ - && rm -rf /usr/src/python* \ - && rm -rf ~/.cache/ - RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.7 /tmp/get-pip.py \ && python3.8 /tmp/get-pip.py \ && rm /tmp/get-pip.py -CMD ["python3.7"] +CMD ["python3.8"] diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh deleted file mode 100755 index d653dd86..00000000 --- a/.kokoro/docker/docs/fetch_gpg_keys.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A script to fetch gpg keys with retry. -# Avoid jinja parsing the file. -# - -function retry { - if [[ "${#}" -le 1 ]]; then - echo "Usage: ${0} retry_count commands.." - exit 1 - fi - local retries=${1} - local command="${@:2}" - until [[ "${retries}" -le 0 ]]; do - $command && return 0 - if [[ $? -ne 0 ]]; then - echo "command failed, retrying" - ((retries--)) - fi - done - return 1 -} - -# 3.6.9, 3.7.5 (Ned Deily) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D - -# 3.8.0 (Ɓukasz Langa) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - E3FF2839C048B25C084DEBE9B26995E310250568 - -# diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg index f9cfcd33..abf3481d 100644 --- a/.kokoro/samples/python3.6/periodic-head.cfg +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg index f9cfcd33..abf3481d 100644 --- a/.kokoro/samples/python3.7/periodic-head.cfg +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg index f9cfcd33..abf3481d 100644 --- a/.kokoro/samples/python3.8/periodic-head.cfg +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg index f9cfcd33..abf3481d 100644 --- a/.kokoro/samples/python3.9/periodic-head.cfg +++ b/.kokoro/samples/python3.9/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index cf5de74c..311a8d54 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -20,9 +20,9 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" +# Exit early if samples don't exist +if ! find samples -name 'requirements.txt' | grep -q .; then + echo "No tests run. './samples/**/requirements.txt' not found" exit 0 fi diff --git a/.repo-metadata.json b/.repo-metadata.json index a1c74197..8c2815f3 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -6,5 +6,6 @@ "language": "python", "library_type": "INTEGRATION", "repo": "googleapis/python-bigquery-sqlalchemy", - "distribution_name": "pybigquery" -} \ No newline at end of file + "distribution_name": "pybigquery", + "api_id": "bigquery.googleapis.com" +} diff --git a/CHANGELOG.md b/CHANGELOG.md index f57c1c66..70963b8e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/pybigquery/#history +### [0.10.1](https://www.github.com/googleapis/python-bigquery-sqlalchemy/compare/v0.10.0...v0.10.1) (2021-07-30) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#220](https://www.github.com/googleapis/python-bigquery-sqlalchemy/issues/220)) ([bf1f47c](https://www.github.com/googleapis/python-bigquery-sqlalchemy/commit/bf1f47c794e747a2ea878347322c040636e8c2d4)) + ## [0.10.0](https://www.github.com/googleapis/python-bigquery-sqlalchemy/compare/v0.9.1...v0.10.0) (2021-07-06) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 6c221127..54003910 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -177,6 +177,30 @@ Build the docs via: $ nox -s docs +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + ******************************************** Note About ``README`` as it pertains to PyPI ******************************************** diff --git a/setup.py b/setup.py index caddaf12..e0b2ccec 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ # Package metadata. name = "pybigquery" -version = "0.10.0" +version = "0.10.1" description = "SQLAlchemy dialect for BigQuery" # Should be one of: @@ -66,7 +66,10 @@ def readme(): platforms="Posix; MacOS X; Windows", install_requires=[ "google-api-core>=1.30.0", # Work-around bug in cloud core deps. - "google-auth>=1.25.0,<2.0dev", # Work around pip wack. + # NOTE: Maintainers, please do not require google-auth>=2.x.x + # Until this issue is closed + # https://github.com/googleapis/google-cloud-python/issues/10566 + "google-auth>=1.25.0,<3.0.0dev", # Work around pip wack. "google-cloud-bigquery>=2.19.0", "sqlalchemy>=1.2.0,<1.5.0dev", "future", diff --git a/tests/sqlalchemy_dialect_compliance/conftest.py b/tests/sqlalchemy_dialect_compliance/conftest.py index a0fa5e62..3ad9a50b 100644 --- a/tests/sqlalchemy_dialect_compliance/conftest.py +++ b/tests/sqlalchemy_dialect_compliance/conftest.py @@ -18,7 +18,6 @@ # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. import contextlib -import random import re import traceback @@ -30,8 +29,14 @@ ) import google.cloud.bigquery.dbapi.connection +import test_utils.prefixer + import pybigquery.sqlalchemy_bigquery +prefixer = test_utils.prefixer.Prefixer( + "python-bigquery-sqlalchemy", "tests/compliance" +) + pybigquery.sqlalchemy_bigquery.BigQueryDialect.preexecute_autoincrement_sequences = True google.cloud.bigquery.dbapi.connection.Connection.rollback = lambda self: None @@ -62,7 +67,7 @@ def visit_delete(self, delete_stmt, *args, **kw): def pytest_sessionstart(session): - dataset_id = f"test_pybigquery_sqla{random.randint(0, 1<<63)}" + dataset_id = prefixer.create_prefix() session.config.option.dburi = [f"bigquery:///{dataset_id}"] with contextlib.closing(google.cloud.bigquery.Client()) as client: client.create_dataset(dataset_id) @@ -73,4 +78,7 @@ def pytest_sessionfinish(session): dataset_id = config.db.dialect.dataset_id _pytest_sessionfinish(session) with contextlib.closing(google.cloud.bigquery.Client()) as client: - client.delete_dataset(dataset_id, delete_contents=True) + client.delete_dataset(dataset_id, delete_contents=True, not_found_ok=True) + for dataset in client.list_datasets(): + if prefixer.should_cleanup(dataset.dataset_id): + client.delete_dataset(dataset, delete_contents=True, not_found_ok=True) diff --git a/tests/system/conftest.py b/tests/system/conftest.py index bd7e25a4..639ba636 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -17,25 +17,19 @@ # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -import datetime import pathlib -import random from typing import List import pytest from google.cloud import bigquery +import test_utils.prefixer +prefixer = test_utils.prefixer.Prefixer("python-bigquery-sqlalchemy", "tests/system") DATA_DIR = pathlib.Path(__file__).parent / "data" -def temp_suffix(): - timestamp = datetime.datetime.utcnow().strftime("%y%m%d_%H%M%S") - random_string = hex(random.randrange(1000000))[2:] - return f"{timestamp}_{random_string}" - - def load_sample_data( full_table_id: str, bigquery_client: bigquery.Client, @@ -73,7 +67,7 @@ def bigquery_dataset( bigquery_client: bigquery.Client, bigquery_schema: List[bigquery.SchemaField] ): project_id = bigquery_client.project - dataset_id = f"test_pybigquery_{temp_suffix()}" + dataset_id = prefixer.create_prefix() dataset = bigquery.Dataset(f"{project_id}.{dataset_id}") dataset = bigquery_client.create_dataset(dataset) sample_table_id = f"{project_id}.{dataset_id}.sample" @@ -112,7 +106,7 @@ def bigquery_empty_table( @pytest.fixture(scope="session") def bigquery_regional_dataset(bigquery_client, bigquery_schema): project_id = bigquery_client.project - dataset_id = f"test_pybigquery_location_{temp_suffix()}" + dataset_id = prefixer.create_prefix() dataset = bigquery.Dataset(f"{project_id}.{dataset_id}") dataset.location = "asia-northeast1" dataset = bigquery_client.create_dataset(dataset) @@ -126,3 +120,12 @@ def bigquery_regional_dataset(bigquery_client, bigquery_schema): job.result() yield dataset_id bigquery_client.delete_dataset(dataset_id, delete_contents=True) + + +@pytest.fixture(scope="session", autouse=True) +def cleanup_datasets(bigquery_client: bigquery.Client): + for dataset in bigquery_client.list_datasets(): + if prefixer.should_cleanup(dataset.dataset_id): + bigquery_client.delete_dataset( + dataset, delete_contents=True, not_found_ok=True + ) diff --git a/tests/system/test_alembic.py b/tests/system/test_alembic.py index 2ea7af18..49b0af10 100644 --- a/tests/system/test_alembic.py +++ b/tests/system/test_alembic.py @@ -82,9 +82,9 @@ def test_alembic_scenario(alembic_table): Column("description", String(200)), ) assert alembic_table("account", "schema") == [ - "SchemaField('id', 'INTEGER', 'REQUIRED')", - "SchemaField('name', 'STRING(50)', 'REQUIRED', 'The name')", - "SchemaField('description', 'STRING(200)', 'NULLABLE')", + "SchemaField('id', 'INTEGER', 'REQUIRED', None, (), ())", + "SchemaField('name', 'STRING(50)', 'REQUIRED', 'The name', (), ())", + "SchemaField('description', 'STRING(200)', 'NULLABLE', None, (), ())", ] op.bulk_insert( @@ -107,10 +107,11 @@ def test_alembic_scenario(alembic_table): ) assert alembic_table("account", "schema") == [ - "SchemaField('id', 'INTEGER', 'REQUIRED')", - "SchemaField('name', 'STRING(50)', 'REQUIRED', 'The name')", - "SchemaField('description', 'STRING(200)', 'NULLABLE')", - "SchemaField('last_transaction_date', 'DATETIME', 'NULLABLE', 'when updated')", + "SchemaField('id', 'INTEGER', 'REQUIRED', None, (), ())", + "SchemaField('name', 'STRING(50)', 'REQUIRED', 'The name', (), ())", + "SchemaField('description', 'STRING(200)', 'NULLABLE', None, (), ())", + "SchemaField('last_transaction_date', 'DATETIME', 'NULLABLE', 'when updated'" + ", (), ())", ] op.create_table( @@ -126,8 +127,8 @@ def test_alembic_scenario(alembic_table): op.drop_column("account_w_comment", "description") assert alembic_table("account_w_comment", "schema") == [ - "SchemaField('id', 'INTEGER', 'REQUIRED')", - "SchemaField('name', 'STRING(50)', 'REQUIRED', 'The name')", + "SchemaField('id', 'INTEGER', 'REQUIRED', None, (), ())", + "SchemaField('name', 'STRING(50)', 'REQUIRED', 'The name', (), ())", ] op.drop_table("account_w_comment") @@ -136,10 +137,11 @@ def test_alembic_scenario(alembic_table): op.rename_table("account", "accounts") assert alembic_table("account") is None assert alembic_table("accounts", "schema") == [ - "SchemaField('id', 'INTEGER', 'REQUIRED')", - "SchemaField('name', 'STRING(50)', 'REQUIRED', 'The name')", - "SchemaField('description', 'STRING(200)', 'NULLABLE')", - "SchemaField('last_transaction_date', 'DATETIME', 'NULLABLE', 'when updated')", + "SchemaField('id', 'INTEGER', 'REQUIRED', None, (), ())", + "SchemaField('name', 'STRING(50)', 'REQUIRED', 'The name', (), ())", + "SchemaField('description', 'STRING(200)', 'NULLABLE', None, (), ())", + "SchemaField('last_transaction_date', 'DATETIME', 'NULLABLE', 'when updated'" + ", (), ())", ] op.drop_table("accounts") assert alembic_table("accounts") is None @@ -159,9 +161,9 @@ def test_alembic_scenario(alembic_table): # nullable: op.alter_column("transactions", "amount", True) assert alembic_table("transactions", "schema") == [ - "SchemaField('account', 'INTEGER', 'REQUIRED')", - "SchemaField('transaction_time', 'DATETIME', 'REQUIRED')", - "SchemaField('amount', 'NUMERIC(11, 2)', 'NULLABLE')", + "SchemaField('account', 'INTEGER', 'REQUIRED', None, (), ())", + "SchemaField('transaction_time', 'DATETIME', 'REQUIRED', None, (), ())", + "SchemaField('amount', 'NUMERIC(11, 2)', 'NULLABLE', None, (), ())", ] op.create_table_comment("transactions", "Transaction log")