Skip to content

Commit b4b2e0f

Browse files
alixhamitseaver
authored andcommitted
Remove append sample for ORC and Parquet formats (googleapis#5936)
Samples are no longer used in the docs.
1 parent e08a98d commit b4b2e0f

File tree

1 file changed

+0
-20
lines changed

1 file changed

+0
-20
lines changed

docs/bigquery/snippets.py

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1542,8 +1542,6 @@ def test_load_table_from_uri_append(client, to_delete, capsys):
15421542
# SHared code
15431543
# [START bigquery_load_table_gcs_csv_append]
15441544
# [START bigquery_load_table_gcs_json_append]
1545-
# [START bigquery_load_table_gcs_parquet_append]
1546-
# [START bigquery_load_table_gcs_orc_append]
15471545
# from google.cloud import bigquery
15481546
# client = bigquery.Client()
15491547
# table_ref = client.dataset('my_dataset').table('existing_table')
@@ -1555,8 +1553,6 @@ def test_load_table_from_uri_append(client, to_delete, capsys):
15551553
job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND
15561554
# [END bigquery_load_table_gcs_csv_append]
15571555
# [END bigquery_load_table_gcs_json_append]
1558-
# [END bigquery_load_table_gcs_parquet_append]
1559-
# [END bigquery_load_table_gcs_orc_append]
15601556

15611557
# Format-specific code
15621558
# [START bigquery_load_table_gcs_csv_append]
@@ -1573,23 +1569,9 @@ def test_load_table_from_uri_append(client, to_delete, capsys):
15731569
uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.json'
15741570
# [END bigquery_load_table_gcs_json_append]
15751571

1576-
# [START bigquery_load_table_gcs_parquet_append]
1577-
# The schema of the parquet file must match the table schema in an append
1578-
job_config.source_format = bigquery.SourceFormat.PARQUET
1579-
uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.parquet'
1580-
# [END bigquery_load_table_gcs_parquet_append]
1581-
1582-
# [START bigquery_load_table_gcs_orc_append]
1583-
# The schema of the orc file must match the table schema in an append
1584-
job_config.source_format = bigquery.SourceFormat.ORC
1585-
uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.orc'
1586-
# [END bigquery_load_table_gcs_orc_append]
1587-
15881572
# Shared code
15891573
# [START bigquery_load_table_gcs_csv_append]
15901574
# [START bigquery_load_table_gcs_json_append]
1591-
# [START bigquery_load_table_gcs_parquet_append]
1592-
# [START bigquery_load_table_gcs_orc_append]
15931575
load_job = client.load_table_from_uri(
15941576
uri,
15951577
table_ref,
@@ -1603,8 +1585,6 @@ def test_load_table_from_uri_append(client, to_delete, capsys):
16031585
print('Loaded {} rows.'.format(destination_table.num_rows - previous_rows))
16041586
# [END bigquery_load_table_gcs_csv_append]
16051587
# [END bigquery_load_table_gcs_json_append]
1606-
# [END bigquery_load_table_gcs_parquet_append]
1607-
# [END bigquery_load_table_gcs_orc_append]
16081588

16091589
out, _ = capsys.readouterr()
16101590
assert previous_rows == 1

0 commit comments

Comments
 (0)