@@ -132,7 +132,8 @@ def test_create_table_nested_repeated_schema(client, to_delete):
132132 # [START bigquery_nested_repeated_schema]
133133 # from google.cloud import bigquery
134134 # client = bigquery.Client()
135- # dataset_ref = client.dataset('my_dataset')
135+ # project = client.project
136+ # dataset_ref = bigquery.DatasetReference(project, 'my_dataset')
136137
137138 schema = [
138139 bigquery .SchemaField ("id" , "STRING" , mode = "NULLABLE" ),
@@ -202,7 +203,8 @@ def test_create_partitioned_table(client, to_delete):
202203 # [START bigquery_create_table_partitioned]
203204 # from google.cloud import bigquery
204205 # client = bigquery.Client()
205- # dataset_ref = client.dataset('my_dataset')
206+ # project = client.project
207+ # dataset_ref = bigquery.DatasetReference(project, 'my_dataset')
206208
207209 table_ref = dataset_ref .table ("my_partitioned_table" )
208210 schema = [
@@ -240,7 +242,9 @@ def test_create_partitioned_table(client, to_delete):
240242def test_manage_table_labels (client , to_delete ):
241243 dataset_id = "label_table_dataset_{}" .format (_millis ())
242244 table_id = "label_table_{}" .format (_millis ())
243- dataset = bigquery .Dataset (client .dataset (dataset_id ))
245+ project = client .project
246+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
247+ dataset = bigquery .Dataset (dataset_ref )
244248 client .create_dataset (dataset )
245249 to_delete .append (dataset )
246250
@@ -250,7 +254,9 @@ def test_manage_table_labels(client, to_delete):
250254 # [START bigquery_label_table]
251255 # from google.cloud import bigquery
252256 # client = bigquery.Client()
253- # table_ref = client.dataset('my_dataset').table('my_table')
257+ # project = client.project
258+ # dataset_ref = bigquery.DatasetReference(project, dataset_id)
259+ # table_ref = dataset_ref.table('my_table')
254260 # table = client.get_table(table_ref) # API request
255261
256262 assert table .labels == {}
@@ -268,7 +274,8 @@ def test_manage_table_labels(client, to_delete):
268274 # dataset_id = 'my_dataset'
269275 # table_id = 'my_table'
270276
271- dataset_ref = client .dataset (dataset_id )
277+ project = client .project
278+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
272279 table_ref = dataset_ref .table (table_id )
273280 table = client .get_table (table_ref ) # API Request
274281
@@ -286,7 +293,9 @@ def test_manage_table_labels(client, to_delete):
286293 # [START bigquery_delete_label_table]
287294 # from google.cloud import bigquery
288295 # client = bigquery.Client()
289- # table_ref = client.dataset('my_dataset').table('my_table')
296+ # project = client.project
297+ # dataset_ref = bigquery.DatasetReference(project, dataset_id)
298+ # table_ref = dataset_ref.table('my_table')
290299 # table = client.get_table(table_ref) # API request
291300
292301 # This example table starts with one label
@@ -310,7 +319,9 @@ def test_update_table_description(client, to_delete):
310319 """Update a table's description."""
311320 dataset_id = "update_table_description_dataset_{}" .format (_millis ())
312321 table_id = "update_table_description_table_{}" .format (_millis ())
313- dataset = bigquery .Dataset (client .dataset (dataset_id ))
322+ project = client .project
323+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
324+ dataset = bigquery .Dataset (dataset_ref )
314325 client .create_dataset (dataset )
315326 to_delete .append (dataset )
316327
@@ -321,7 +332,9 @@ def test_update_table_description(client, to_delete):
321332 # [START bigquery_update_table_description]
322333 # from google.cloud import bigquery
323334 # client = bigquery.Client()
324- # table_ref = client.dataset('my_dataset').table('my_table')
335+ # project = client.project
336+ # dataset_ref = bigquery.DatasetReference(project, dataset_id)
337+ # table_ref = dataset_ref.table('my_table')
325338 # table = client.get_table(table_ref) # API request
326339
327340 assert table .description == "Original description."
@@ -343,7 +356,9 @@ def test_update_table_expiration(client, to_delete):
343356 """Update a table's expiration time."""
344357 dataset_id = "update_table_expiration_dataset_{}" .format (_millis ())
345358 table_id = "update_table_expiration_table_{}" .format (_millis ())
346- dataset = bigquery .Dataset (client .dataset (dataset_id ))
359+ project = client .project
360+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
361+ dataset = bigquery .Dataset (dataset_ref )
347362 client .create_dataset (dataset )
348363 to_delete .append (dataset )
349364
@@ -356,7 +371,9 @@ def test_update_table_expiration(client, to_delete):
356371
357372 # from google.cloud import bigquery
358373 # client = bigquery.Client()
359- # table_ref = client.dataset('my_dataset').table('my_table')
374+ # project = client.project
375+ # dataset_ref = bigquery.DatasetReference(project, dataset_id)
376+ # table_ref = dataset_ref.table('my_table')
360377 # table = client.get_table(table_ref) # API request
361378
362379 assert table .expires is None
@@ -382,7 +399,9 @@ def test_relax_column(client, to_delete):
382399 """Updates a schema field from required to nullable."""
383400 dataset_id = "relax_column_dataset_{}" .format (_millis ())
384401 table_id = "relax_column_table_{}" .format (_millis ())
385- dataset = bigquery .Dataset (client .dataset (dataset_id ))
402+ project = client .project
403+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
404+ dataset = bigquery .Dataset (dataset_ref )
386405 dataset = client .create_dataset (dataset )
387406 to_delete .append (dataset )
388407
@@ -396,7 +415,9 @@ def test_relax_column(client, to_delete):
396415 bigquery .SchemaField ("full_name" , "STRING" , mode = "REQUIRED" ),
397416 bigquery .SchemaField ("age" , "INTEGER" , mode = "REQUIRED" ),
398417 ]
399- table_ref = client .dataset (dataset_id ).table (table_id )
418+
419+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
420+ table_ref = dataset_ref .table (table_id )
400421 table = bigquery .Table (table_ref , schema = original_schema )
401422 table = client .create_table (table )
402423 assert all (field .mode == "REQUIRED" for field in table .schema )
@@ -424,7 +445,9 @@ def test_update_table_cmek(client, to_delete):
424445 """Patch a table's metadata."""
425446 dataset_id = "update_table_cmek_{}" .format (_millis ())
426447 table_id = "update_table_cmek_{}" .format (_millis ())
427- dataset = bigquery .Dataset (client .dataset (dataset_id ))
448+ project = client .project
449+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
450+ dataset = bigquery .Dataset (dataset_ref )
428451 client .create_dataset (dataset )
429452 to_delete .append (dataset )
430453
@@ -468,7 +491,7 @@ def test_update_table_cmek(client, to_delete):
468491def test_manage_views (client , to_delete ):
469492 project = client .project
470493 source_dataset_id = "source_dataset_{}" .format (_millis ())
471- source_dataset_ref = client . dataset ( source_dataset_id )
494+ source_dataset_ref = bigquery . DatasetReference ( project , source_dataset_id )
472495 source_dataset = bigquery .Dataset (source_dataset_ref )
473496 source_dataset = client .create_dataset (source_dataset )
474497 to_delete .append (source_dataset )
@@ -487,7 +510,7 @@ def test_manage_views(client, to_delete):
487510 load_job .result ()
488511
489512 shared_dataset_id = "shared_dataset_{}" .format (_millis ())
490- shared_dataset_ref = client . dataset ( shared_dataset_id )
513+ shared_dataset_ref = bigquery . DatasetReference ( project , shared_dataset_id )
491514 shared_dataset = bigquery .Dataset (shared_dataset_ref )
492515 shared_dataset = client .create_dataset (shared_dataset )
493516 to_delete .append (shared_dataset )
@@ -498,7 +521,7 @@ def test_manage_views(client, to_delete):
498521 # project = 'my-project'
499522 # source_dataset_id = 'my_source_dataset'
500523 # source_table_id = 'us_states'
501- # shared_dataset_ref = client.dataset( 'my_shared_dataset')
524+ # shared_dataset_ref = bigquery.DatasetReference(project, 'my_shared_dataset')
502525
503526 # This example shows how to create a shared view of a source table of
504527 # US States. The source table contains all 50 states, while the view will
@@ -518,7 +541,7 @@ def test_manage_views(client, to_delete):
518541 # project = 'my-project'
519542 # source_dataset_id = 'my_source_dataset'
520543 # source_table_id = 'us_states'
521- # shared_dataset_ref = client.dataset( 'my_shared_dataset')
544+ # shared_dataset_ref = bigquery.DatasetReference(project, 'my_shared_dataset')
522545
523546 # This example shows how to update a shared view of a source table of
524547 # US States. The view's query will be updated to contain only states with
@@ -534,8 +557,9 @@ def test_manage_views(client, to_delete):
534557 # from google.cloud import bigquery
535558 # client = bigquery.Client()
536559 # shared_dataset_id = 'my_shared_dataset'
537-
538- view_ref = client .dataset (shared_dataset_id ).table ("my_shared_view" )
560+ project = client .project
561+ shared_dataset_ref = bigquery .DatasetReference (project , shared_dataset_id )
562+ view_ref = shared_dataset_ref .table ("my_shared_view" )
539563 view = client .get_table (view_ref ) # API Request
540564
541565 # Display view properties
@@ -552,9 +576,9 @@ def test_manage_views(client, to_delete):
552576 # Assign access controls to the dataset containing the view
553577 # shared_dataset_id = 'my_shared_dataset'
554578 # analyst_group_email = '[email protected] ' 555- shared_dataset = client .get_dataset (
556- client . dataset ( shared_dataset_id )
557- ) # API request
579+ project = client .project
580+ shared_dataset_ref = bigquery . DatasetReference ( project , shared_dataset_id )
581+ shared_dataset = client . get_dataset ( shared_dataset_ref ) # API request
558582 access_entries = shared_dataset .access_entries
559583 access_entries .append (
560584 bigquery .AccessEntry ("READER" , "groupByEmail" , analyst_group_email )
@@ -567,9 +591,9 @@ def test_manage_views(client, to_delete):
567591 # Authorize the view to access the source dataset
568592 # project = 'my-project'
569593 # source_dataset_id = 'my_source_dataset'
570- source_dataset = client .get_dataset (
571- client . dataset ( source_dataset_id )
572- ) # API request
594+ project = client .project
595+ source_dataset_ref = bigquery . DatasetReference ( project , source_dataset_id )
596+ source_dataset = client . get_dataset ( source_dataset_ref ) # API request
573597 view_reference = {
574598 "projectId" : project ,
575599 "datasetId" : shared_dataset_id ,
@@ -602,7 +626,8 @@ def test_load_table_add_column(client, to_delete):
602626 # [START bigquery_add_column_load_append]
603627 # from google.cloud import bigquery
604628 # client = bigquery.Client()
605- # dataset_ref = client.dataset('my_dataset')
629+ # project = client.project
630+ # dataset_ref = bigquery.DatasetReference(project, 'my_dataset')
606631 # filepath = 'path/to/your_file.csv'
607632
608633 # Retrieves the destination table and checks the length of the schema
@@ -673,7 +698,8 @@ def test_load_table_relax_column(client, to_delete):
673698 # [START bigquery_relax_column_load_append]
674699 # from google.cloud import bigquery
675700 # client = bigquery.Client()
676- # dataset_ref = client.dataset('my_dataset')
701+ # project = client.project
702+ # dataset_ref = bigquery.DatasetReference(project, 'my_dataset')
677703 # filepath = 'path/to/your_file.csv'
678704
679705 # Retrieves the destination table and checks the number of required fields
0 commit comments