Skip to content

Commit 0078fd5

Browse files
author
Jonathan Wayne Parrott
committed
Merge pull request #96 from GoogleCloudPlatform/test-cleanup
Cleaning up base test case
2 parents 1bfaa4d + c651359 commit 0078fd5

File tree

17 files changed

+105
-104
lines changed

17 files changed

+105
-104
lines changed

CONTRIBUTING.md

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -81,16 +81,22 @@ If you want to run the Google App Engine tests, you will need:
8181

8282
$ export GAE_PYTHONPATH=<path your AppeEngine sdk>
8383

84-
To run the bigquery tests, you'll need to create a bigquery dataset:
84+
To run the bigquery tests:
8585

8686
* Create a dataset in your project named `test_dataset`.
87-
* Create a table named `test_table2`, upload ``tests/resources/data.csv`` and give it the following schema:
8887

89-
Name STRING
90-
Age INTEGER
91-
Weight FLOAT
92-
IsMagic BOOLEAN
88+
gcloud alpha bigquery datasets create test_dataset
9389

90+
* Load sample data into google cloud storage (for import tests):
91+
92+
gsutil cp tests/resources/data.csv gs://$TEST_BUCKET_NAME/data.csv
93+
94+
* Load the sample data into a table named `test_table` (for export and streaming tests):
95+
96+
gcloud alpha bigquery import \
97+
gs://$TEST_BUCKET_NAME/data.csv \
98+
test_dataset/test_table \
99+
--schema-file tests/resources/schema.json
94100

95101
### Test environments
96102

appengine/bigquery/main_test.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ class TestAuthSample(tests.AppEngineTestbedCase):
2828
def setUp(self):
2929
super(TestAuthSample, self).setUp()
3030
self.app = webtest.TestApp(main.app)
31+
main.PROJECTID = self.project_id
3132

3233
def test_anonymous_get(self):
3334
response = self.app.get('/')
@@ -55,12 +56,7 @@ def test_oauthed_get(self, *args):
5556
{'status': '200'})
5657

5758
with mock.patch.object(main.decorator, 'http', return_value=mock_http):
58-
original_projectid = main.PROJECTID
59-
try:
60-
main.PROJECTID = self.constants['projectId']
61-
response = self.app.get('/')
62-
finally:
63-
main.PROJECTID = original_projectid
59+
response = self.app.get('/')
6460

6561
# Should make the api call
6662
self.assertEqual(response.status_int, 200)

bigquery/samples/async_query_test.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,13 +21,17 @@
2121
class TestAsyncQuery(tests.CloudBaseTest):
2222

2323
def test_async_query(self):
24+
query = (
25+
'SELECT corpus FROM publicdata:samples.shakespeare '
26+
'GROUP BY corpus;')
27+
2428
with tests.capture_stdout() as stdout:
2529
main(
26-
self.constants['projectId'],
27-
self.constants['query'],
28-
False,
29-
5,
30-
5)
30+
project_id=self.project_id,
31+
query_string=query,
32+
batch=False,
33+
num_retries=5,
34+
interval=1)
3135

3236
value = stdout.getvalue().strip().split('\n').pop()
3337

bigquery/samples/export_data_to_cloud_storage_test.py

Lines changed: 26 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -19,33 +19,41 @@
1919

2020

2121
class TestExportTableToGCS(CloudBaseTest):
22+
dataset_id = 'test_dataset'
23+
table_id = 'test_table'
2224

2325
def test_export_table_csv(self):
26+
cloud_storage_output_uri = \
27+
'gs://{}/output.csv'.format(self.bucket_name)
2428
main(
25-
self.constants['cloudStorageOutputURI'],
26-
self.constants['projectId'],
27-
self.constants['datasetId'],
28-
self.constants['newTableId'],
29-
5,
30-
1,
29+
cloud_storage_output_uri,
30+
self.project_id,
31+
self.dataset_id,
32+
self.table_id,
33+
num_retries=5,
34+
interval=1,
3135
export_format="CSV")
3236

3337
def test_export_table_json(self):
38+
cloud_storage_output_uri = \
39+
'gs://{}/output.json'.format(self.bucket_name)
3440
main(
35-
self.constants['cloudStorageOutputURI'],
36-
self.constants['projectId'],
37-
self.constants['datasetId'],
38-
self.constants['newTableId'],
39-
5,
40-
1,
41+
cloud_storage_output_uri,
42+
self.project_id,
43+
self.dataset_id,
44+
self.table_id,
45+
num_retries=5,
46+
interval=1,
4147
export_format="NEWLINE_DELIMITED_JSON")
4248

4349
def test_export_table_avro(self):
50+
cloud_storage_output_uri = \
51+
'gs://{}/output.avro'.format(self.bucket_name)
4452
main(
45-
self.constants['cloudStorageOutputURI'],
46-
self.constants['projectId'],
47-
self.constants['datasetId'],
48-
self.constants['newTableId'],
49-
5,
50-
1,
53+
cloud_storage_output_uri,
54+
self.project_id,
55+
self.dataset_id,
56+
self.table_id,
57+
num_retries=5,
58+
interval=1,
5159
export_format="AVRO")

bigquery/samples/getting_started_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
class TestGettingStarted(tests.CloudBaseTest):
2222
def test_main(self):
2323
with tests.capture_stdout() as mock_stdout:
24-
main(self.constants['projectId'])
24+
main(self.project_id)
2525

2626
stdout = mock_stdout.getvalue()
2727
self.assertRegexpMatches(stdout, re.compile(

bigquery/samples/list_datasets_projects_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ class TestListDatasetsProjects(tests.CloudBaseTest):
2222

2323
def test_main(self):
2424
with tests.capture_stdout() as mock_stdout:
25-
main(self.constants['projectId'])
25+
main(self.project_id)
2626

2727
stdout = mock_stdout.getvalue()
2828

bigquery/samples/load_data_from_csv_test.py

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,18 @@
2020

2121

2222
class TestLoadDataFromCSV(CloudBaseTest):
23+
dataset_id = 'test_dataset'
24+
table_id = 'test_import_table'
25+
2326
def test_load_table(self):
27+
cloud_storage_input_uri = 'gs://{}/data.csv'.format(self.bucket_name)
28+
schema_file = os.path.join(self.resource_path, 'schema.json')
29+
2430
main(
25-
self.constants['projectId'],
26-
self.constants['datasetId'],
27-
self.constants['newTableId'],
28-
os.path.join(self.resource_path, 'schema.json'),
29-
self.constants['cloudStorageInputURI'],
30-
1,
31-
5)
31+
self.project_id,
32+
self.dataset_id,
33+
self.table_id,
34+
schema_file=schema_file,
35+
data_path=cloud_storage_input_uri,
36+
poll_interval=1,
37+
num_retries=5)

bigquery/samples/streaming_test.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@
2121

2222

2323
class TestStreaming(CloudBaseTest):
24+
dataset_id = 'test_dataset'
25+
table_id = 'test_table'
2426

2527
def test_stream_row_to_bigquery(self):
2628
with open(
@@ -33,10 +35,10 @@ def test_stream_row_to_bigquery(self):
3335

3436
with capture_stdout() as stdout:
3537
streaming.main(
36-
self.constants['projectId'],
37-
self.constants['datasetId'],
38-
self.constants['newTableId'],
39-
5)
38+
self.project_id,
39+
self.dataset_id,
40+
self.table_id,
41+
num_retries=5)
4042

4143
results = stdout.getvalue().split('\n')
4244
self.assertIsNotNone(json.loads(results[0]))

bigquery/samples/sync_query_test.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,16 @@
2121
class TestSyncQuery(CloudBaseTest):
2222

2323
def test_sync_query(self):
24+
query = (
25+
'SELECT corpus FROM publicdata:samples.shakespeare '
26+
'GROUP BY corpus;')
27+
2428
with capture_stdout() as stdout:
2529
main(
26-
self.constants['projectId'],
27-
self.constants['query'],
28-
30,
29-
5)
30+
project_id=self.project_id,
31+
query=query,
32+
timeout=30,
33+
num_retries=5)
3034

3135
result = stdout.getvalue().split('\n')[0]
3236
self.assertIsNotNone(json.loads(result))

blog/introduction_to_data_models_in_cloud_datastore/blog_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,4 +20,4 @@ class BlogTestCase(CloudBaseTest):
2020
"""Simple test case that ensures the blog code doesn't throw any errors."""
2121

2222
def test_main(self):
23-
main(self.constants['projectId'])
23+
main(self.project_id)

0 commit comments

Comments
 (0)