3535def list_projects ():
3636 bigquery_client = bigquery .Client ()
3737
38- projects = []
39- page_token = None
40-
41- while True :
42- results , page_token = bigquery_client .list_projects (
43- page_token = page_token )
44- projects .extend (results )
45-
46- if not page_token :
47- break
48-
49- for project in projects :
38+ for project in bigquery_client .list_projects ():
5039 print (project .project_id )
5140
5241
@@ -57,18 +46,7 @@ def list_datasets(project=None):
5746 """
5847 bigquery_client = bigquery .Client (project = project )
5948
60- datasets = []
61- page_token = None
62-
63- while True :
64- results , page_token = bigquery_client .list_datasets (
65- page_token = page_token )
66- datasets .extend (results )
67-
68- if not page_token :
69- break
70-
71- for dataset in datasets :
49+ for dataset in bigquery_client .list_datasets ():
7250 print (dataset .name )
7351
7452
@@ -98,17 +76,7 @@ def list_tables(dataset_name, project=None):
9876 print ('Dataset {} does not exist.' .format (dataset_name ))
9977 return
10078
101- tables = []
102- page_token = None
103-
104- while True :
105- results , page_token = dataset .list_tables (page_token = page_token )
106- tables .extend (results )
107-
108- if not page_token :
109- break
110-
111- for table in tables :
79+ for table in dataset .list_tables ():
11280 print (table .name )
11381
11482
@@ -157,19 +125,10 @@ def list_rows(dataset_name, table_name, project=None):
157125 # Reload the table so that the schema is available.
158126 table .reload ()
159127
160- rows = []
161- page_token = None
162-
163- # Load at most 25 results. You can change this to `while True` and change
164- # the max_results argument to load more rows from BigQuery, but note
165- # that this can take some time. It's preferred to use a query.
166- while len (rows ) < 25 :
167- results , total_rows , page_token = table .fetch_data (
168- max_results = 25 , page_token = page_token )
169- rows .extend (results )
170-
171- if not page_token :
172- break
128+ # Load at most 25 results. You can change the max_results argument to load
129+ # more rows from BigQuery, but note that this can take some time. It's
130+ # preferred to use a query.
131+ rows = list (table .fetch_data (max_results = 25 ))
173132
174133 # Use format to create a simple table.
175134 format_string = '{!s:<16} ' * len (table .schema )
0 commit comments