1616import pandas .util .testing as tm
1717
1818
19+ api_exceptions = pytest .importorskip ("google.api_core.exceptions" )
20+ bigquery = pytest .importorskip ("google.cloud.bigquery" )
21+ service_account = pytest .importorskip ("google.oauth2.service_account" )
1922pandas_gbq = pytest .importorskip ('pandas_gbq' )
2023
2124PROJECT_ID = None
@@ -67,20 +70,16 @@ def _get_private_key_path():
6770 return private_key_path
6871
6972
70- def clean_gbq_environment ( private_key = None ):
71- dataset = pandas_gbq . gbq . _Dataset ( _get_project_id (),
72- private_key = private_key )
73+ def _get_client ( ):
74+ project_id = _get_project_id ()
75+ credentials = None
7376
74- for i in range (1 , 10 ):
75- if DATASET_ID + str (i ) in dataset .datasets ():
76- dataset_id = DATASET_ID + str (i )
77- table = pandas_gbq .gbq ._Table (_get_project_id (), dataset_id ,
78- private_key = private_key )
79- for j in range (1 , 20 ):
80- if TABLE_ID + str (j ) in dataset .tables (dataset_id ):
81- table .delete (TABLE_ID + str (j ))
77+ private_key_path = _get_private_key_path ()
78+ if private_key_path :
79+ credentials = service_account .Credentials .from_service_account_file (
80+ private_key_path )
8281
83- dataset . delete ( dataset_id )
82+ return bigquery . Client ( project = project_id , credentials = credentials )
8483
8584
8685def make_mixed_dataframe_v2 (test_size ):
@@ -109,7 +108,6 @@ def test_read_gbq_without_dialect_warns_future_change(monkeypatch):
109108 pd .read_gbq ("SELECT 1" )
110109
111110
112- @pytest .mark .xfail (reason = "failing for pandas-gbq >= 0.7.0" )
113111@pytest .mark .single
114112class TestToGBQIntegrationWithServiceAccountKeyPath (object ):
115113
@@ -122,18 +120,22 @@ def setup_class(cls):
122120 _skip_if_no_project_id ()
123121 _skip_if_no_private_key_path ()
124122
125- clean_gbq_environment (_get_private_key_path ())
126- pandas_gbq .gbq ._Dataset (_get_project_id (),
127- private_key = _get_private_key_path ()
128- ).create (DATASET_ID + "1" )
123+ cls .client = _get_client ()
124+ cls .dataset = cls .client .dataset (DATASET_ID + "1" )
125+ try :
126+ # Clean-up previous test runs.
127+ cls .client .delete_dataset (cls .dataset , delete_contents = True )
128+ except api_exceptions .NotFound :
129+ pass # It's OK if the dataset doesn't already exist.
130+
131+ cls .client .create_dataset (bigquery .Dataset (cls .dataset ))
129132
130133 @classmethod
131134 def teardown_class (cls ):
132135 # - GLOBAL CLASS FIXTURES -
133136 # put here any instruction you want to execute only *ONCE* *AFTER*
134137 # executing all tests.
135-
136- clean_gbq_environment (_get_private_key_path ())
138+ cls .client .delete_dataset (cls .dataset , delete_contents = True )
137139
138140 def test_roundtrip (self ):
139141 destination_table = DESTINATION_TABLE + "1"
@@ -147,5 +149,6 @@ def test_roundtrip(self):
147149 result = pd .read_gbq ("SELECT COUNT(*) AS num_rows FROM {0}"
148150 .format (destination_table ),
149151 project_id = _get_project_id (),
150- private_key = _get_private_key_path ())
152+ private_key = _get_private_key_path (),
153+ dialect = "standard" )
151154 assert result ['num_rows' ][0 ] == test_size
0 commit comments