Thanks to visit codestin.com
Credit goes to github.com

Skip to content

BigQuery: 'test_undelete_table' fails with 500 #9633

Closed
@busunkim96

Description

@busunkim96

It has failed for every run today. See internal fusion link.

I don't see any reported outages for BigQuery at https://status.cloud.google.com/

=================================== FAILURES ===================================
_____________________________ test_undelete_table ______________________________

client = <google.cloud.bigquery.client.Client object at 0x7fc1ddc76710>
to_delete = [Dataset(DatasetReference(u'precise-truck-742', u'undelete_table_dataset_1573146523039'))]

    def test_undelete_table(client, to_delete):
        dataset_id = "undelete_table_dataset_{}".format(_millis())
        table_id = "undelete_table_table_{}".format(_millis())
        dataset = bigquery.Dataset(client.dataset(dataset_id))
        dataset.location = "US"
        dataset = client.create_dataset(dataset)
        to_delete.append(dataset)
    
        table = bigquery.Table(dataset.table(table_id), schema=SCHEMA)
        client.create_table(table)
    
        # [START bigquery_undelete_table]
        # TODO(developer): Uncomment the lines below and replace with your values.
        # import time
        # from google.cloud import bigquery
        # client = bigquery.Client()
        # dataset_id = 'my_dataset'  # Replace with your dataset ID.
        # table_id = 'my_table'      # Replace with your table ID.
    
        table_ref = client.dataset(dataset_id).table(table_id)
    
        # TODO(developer): Choose an appropriate snapshot point as epoch
        # milliseconds. For this example, we choose the current time as we're about
        # to delete the table immediately afterwards.
        snapshot_epoch = int(time.time() * 1000)
        # [END bigquery_undelete_table]
    
        # Due to very short lifecycle of the table, ensure we're not picking a time
        # prior to the table creation due to time drift between backend and client.
        table = client.get_table(table_ref)
        created_epoch = datetime_helpers.to_microseconds(table.created)
        if created_epoch > snapshot_epoch:
            snapshot_epoch = created_epoch
    
        # [START bigquery_undelete_table]
    
        # "Accidentally" delete the table.
        client.delete_table(table_ref)  # API request
    
        # Construct the restore-from table ID using a snapshot decorator.
        snapshot_table_id = "{}@{}".format(table_id, snapshot_epoch)
        source_table_ref = client.dataset(dataset_id).table(snapshot_table_id)
    
        # Choose a new table ID for the recovered table data.
        recovered_table_id = "{}_recovered".format(table_id)
        dest_table_ref = client.dataset(dataset_id).table(recovered_table_id)
    
        # Construct and run a copy job.
        job = client.copy_table(
            source_table_ref,
            dest_table_ref,
            # Location must match that of the source and destination tables.
>           location="US",
        )  # API request

docs/snippets.py:1483: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
google/cloud/bigquery/client.py:1975: in copy_table
    copy_job._begin(retry=retry)
google/cloud/bigquery/job.py:631: in _begin
    retry, method="POST", path=path, data=self.to_api_repr()
google/cloud/bigquery/client.py:475: in _call_api
    return call()
../api_core/google/api_core/retry.py:277: in retry_wrapped_func
    on_error=on_error,
../api_core/google/api_core/retry.py:202: in retry_target
    last_exc,
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

value = RetryError(u'Deadline of 120.0s exceeded while calling <functools.partial object at 0x7fc1dd52b260>',)
from_value = InternalServerError(u'POST https://bigquery.googleapis.com/bigquery/v2/project...742/jobs: An internal error occurred and the request could not be completed.',)

    def raise_from(value, from_value):
>       raise value
E       RetryError: Deadline of 120.0s exceeded while calling <functools.partial object at 0x7fc1dd52b260>, last exception: 500 POST https://bigquery.googleapis.com/bigquery/v2/projects/precise-truck-742/jobs: An internal error occurred and the request could not be completed.

.nox/snippets-2-7/lib/python2.7/site-packages/six.py:740: RetryError

Metadata

Metadata

Assignees

Labels

api: bigqueryIssues related to the BigQuery API.testingtype: processA process-related concern. May include testing, release, or the like.

Type

No type

Projects

No projects

Milestone

No milestone

Relationships

None yet

Development

No branches or pull requests

Issue actions