From c293967c864a8cc2b58a4717da3cb2f3b183fbfd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 21 May 2020 18:24:11 +0200 Subject: [PATCH 01/20] chore(deps): update dependency com.google.cloud:google-cloud-bigquery to v1.116.0 (#375) --- samples/install-without-bom/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml index 75e9d54cb..0e2c300b8 100644 --- a/samples/install-without-bom/pom.xml +++ b/samples/install-without-bom/pom.xml @@ -45,7 +45,7 @@ com.google.cloud google-cloud-bigquery - 1.115.1 + 1.116.0 From 62b25e32b88ee4e851ac35db68519a9036560400 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 26 May 2020 08:05:35 -0700 Subject: [PATCH 02/20] changes without context (#377) autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. --- README.md | 2 +- synth.metadata | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 1f5d01b88..bf6d0059a 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ If you are using Maven without BOM, add this to your dependencies: com.google.cloud google-cloud-bigquery - 1.115.1 + 1.116.0 ``` diff --git a/synth.metadata b/synth.metadata index 9c415deb3..8fa9a2e91 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/java-bigquery.git", - "sha": "69ede0ca217cfaf98b3364fb5301f1399db4e498" + "sha": "c293967c864a8cc2b58a4717da3cb2f3b183fbfd" } }, { From 0c0c063f264fd3ec1c76921cb98c87d96a13c510 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 26 May 2020 08:08:58 -0700 Subject: [PATCH 03/20] chore(java): allow overriding minimum supported Java version in templates (#577) (#378) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore(java): allow overriding minimum supported Java version in templates (#577) Source-Author: Jeff Ching Source-Date: Thu May 21 17:51:49 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 65f8c647c0bc0c6b38211b969a2a003e271a5ef1 Source-Link: https://github.com/googleapis/synthtool/commit/65f8c647c0bc0c6b38211b969a2a003e271a5ef1 --- README.md | 3 +-- synth.metadata | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index bf6d0059a..b05bc6fc1 100644 --- a/README.md +++ b/README.md @@ -275,8 +275,7 @@ Apache 2.0 - See [LICENSE][license] for more information. Java Version | Status ------------ | ------ -Java 7 | [![Kokoro CI][kokoro-badge-image-1]][kokoro-badge-link-1] -Java 8 | [![Kokoro CI][kokoro-badge-image-2]][kokoro-badge-link-2] +Java 7 | [![Kokoro CI][kokoro-badge-image-1]][kokoro-badge-link-1]Java 8 | [![Kokoro CI][kokoro-badge-image-2]][kokoro-badge-link-2] Java 8 OSX | [![Kokoro CI][kokoro-badge-image-3]][kokoro-badge-link-3] Java 8 Windows | [![Kokoro CI][kokoro-badge-image-4]][kokoro-badge-link-4] Java 11 | [![Kokoro CI][kokoro-badge-image-5]][kokoro-badge-link-5] diff --git a/synth.metadata b/synth.metadata index 8fa9a2e91..39a2c5bf6 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,7 +11,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5b48b0716a36ca069db3038da7e205c87a22ed19" + "sha": "65f8c647c0bc0c6b38211b969a2a003e271a5ef1" } } ] From 14c24f598a5b8847f39961879b141318375e7688 Mon Sep 17 00:00:00 2001 From: Irvi Aini <7439590+irvifa@users.noreply.github.com> Date: Tue, 26 May 2020 22:11:05 +0700 Subject: [PATCH 04/20] chore: Include clustering fields and schema as part of the function arguments (#379) As of the current version we can see that the schema and clustering fields is already defined in the functions, however by passing it as the arguments engineer can easily change the schema and clustering fields in the IT test thus enable them to understand more about the concept. --- .../bigquery/CreateClusteredTable.java | 24 +++++++++++-------- .../bigquery/CreateClusteredTableIT.java | 12 +++++++++- 2 files changed, 25 insertions(+), 11 deletions(-) diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java b/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java index 1fc265e17..27a0e144f 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java @@ -29,16 +29,25 @@ import com.google.cloud.bigquery.TableInfo; import com.google.cloud.bigquery.TimePartitioning; import com.google.common.collect.ImmutableList; +import java.util.List; public class CreateClusteredTable { public static void runCreateClusteredTable() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; - createClusteredTable(datasetName, tableName); + Schema schema = + Schema.of( + Field.of("name", StandardSQLTypeName.STRING), + Field.of("post_abbr", StandardSQLTypeName.STRING), + Field.of("date", StandardSQLTypeName.DATE)); + createClusteredTable(datasetName, tableName, + schema, ImmutableList.of("name", "post_abbr")); } - public static void createClusteredTable(String datasetName, String tableName) { + public static void createClusteredTable( + String datasetName, String tableName, + Schema schema, List clusteringFields) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -47,15 +56,10 @@ public static void createClusteredTable(String datasetName, String tableName) { TableId tableId = TableId.of(datasetName, tableName); TimePartitioning partitioning = TimePartitioning.of(TimePartitioning.Type.DAY); - - Schema schema = - Schema.of( - Field.of("name", StandardSQLTypeName.STRING), - Field.of("post_abbr", StandardSQLTypeName.STRING), - Field.of("date", StandardSQLTypeName.DATE)); - + // Clustering fields will be consisted of fields mentioned in the schema. + // As of now, another condition is that the table should be partitioned. Clustering clustering = - Clustering.newBuilder().setFields(ImmutableList.of("name", "post_abbr")).build(); + Clustering.newBuilder().setFields(clusteringFields).build(); StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder() diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateClusteredTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateClusteredTableIT.java index 4f8c29942..37ddd226b 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CreateClusteredTableIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateClusteredTableIT.java @@ -19,6 +19,10 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.common.collect.ImmutableList; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import org.junit.After; @@ -58,8 +62,14 @@ public void tearDown() { @Test public void createClusteredTable() { String tableName = "MY_CLUSTERED_TABLE"; + Schema schema = + Schema.of( + Field.of("name", StandardSQLTypeName.STRING), + Field.of("post_abbr", StandardSQLTypeName.STRING), + Field.of("date", StandardSQLTypeName.DATE)); - CreateClusteredTable.createClusteredTable(BIGQUERY_DATASET_NAME, tableName); + CreateClusteredTable.createClusteredTable(BIGQUERY_DATASET_NAME, tableName, + schema, ImmutableList.of("name", "post_abbr")); assertThat(bout.toString()).contains("Clustered table created successfully"); From 9581ccbb5c61ac89f33c5849b42c0afabd5cbeca Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 26 May 2020 11:15:11 -0400 Subject: [PATCH 05/20] chore: release 1.116.1-SNAPSHOT (#376) * updated versions.txt [ci skip] * updated samples/pom.xml [ci skip] * updated samples/install-without-bom/pom.xml [ci skip] * updated samples/snippets/pom.xml [ci skip] * updated google-cloud-bigquery/pom.xml [ci skip] * updated pom.xml [ci skip] * updated samples/snapshot/pom.xml Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- google-cloud-bigquery/pom.xml | 4 ++-- pom.xml | 4 ++-- samples/snapshot/pom.xml | 2 +- versions.txt | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index d1b59b1a7..86ef49ea0 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 1.116.0 + 1.116.1-SNAPSHOT jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 1.116.0 + 1.116.1-SNAPSHOT google-cloud-bigquery diff --git a/pom.xml b/pom.xml index 76bf1fef7..8ea11da2f 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 1.116.0 + 1.116.1-SNAPSHOT BigQuery Parent https://github.com/googleapis/java-bigquery @@ -88,7 +88,7 @@ com.google.cloud google-cloud-bigquery - 1.116.0 + 1.116.1-SNAPSHOT diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 202d493ea..974660723 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -44,7 +44,7 @@ com.google.cloud google-cloud-bigquery - 1.116.0 + 1.116.1-SNAPSHOT diff --git a/versions.txt b/versions.txt index ee09eebae..8d27316d4 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:1.116.0:1.116.0 \ No newline at end of file +google-cloud-bigquery:1.116.0:1.116.1-SNAPSHOT \ No newline at end of file From 2b96f7877f75781416fb26407d4fee6d4958dc3d Mon Sep 17 00:00:00 2001 From: Praful Makani Date: Tue, 26 May 2020 20:46:19 +0530 Subject: [PATCH 06/20] chore: remove objenesis ignor from maven dependency plugin (#383) --- pom.xml | 3 --- 1 file changed, 3 deletions(-) diff --git a/pom.xml b/pom.xml index 8ea11da2f..b24faf6ca 100644 --- a/pom.xml +++ b/pom.xml @@ -117,9 +117,6 @@ org.apache.maven.plugins maven-dependency-plugin - - org.objenesis:objenesis - From 0e91af23f63eb2b9af578bd82952d3480e751f9b Mon Sep 17 00:00:00 2001 From: Irvi Aini <7439590+irvifa@users.noreply.github.com> Date: Tue, 26 May 2020 22:16:36 +0700 Subject: [PATCH 07/20] chore: Include clustering fields and schema in LoadTableClusteredIT as part of the function arguments (#382) As of the current version we can see that the schema and clustering fields is already defined in the functions, however by passing it as the arguments engineer can easily change the schema and clustering fields in the IT test thus enable them to understand more about the concept. --- .../example/bigquery/LoadTableClustered.java | 23 +++++++++++-------- .../bigquery/LoadTableClusteredIT.java | 13 ++++++++++- 2 files changed, 25 insertions(+), 11 deletions(-) diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java b/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java index 7d4987d11..20f4104f9 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java +++ b/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java @@ -31,6 +31,7 @@ import com.google.cloud.bigquery.TableId; import com.google.cloud.bigquery.TimePartitioning; import com.google.common.collect.ImmutableList; +import java.util.List; public class LoadTableClustered { @@ -39,10 +40,17 @@ public static void runLoadTableClustered() throws Exception { String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; String sourceUri = "/path/to/file.csv"; - loadTableClustered(datasetName, tableName, sourceUri); + Schema schema = + Schema.of( + Field.of("name", StandardSQLTypeName.STRING), + Field.of("post_abbr", StandardSQLTypeName.STRING), + Field.of("date", StandardSQLTypeName.DATE)); + loadTableClustered(datasetName, tableName, sourceUri, + schema, ImmutableList.of("name", "post_abbr")); } - public static void loadTableClustered(String datasetName, String tableName, String sourceUri) + public static void loadTableClustered(String datasetName, String tableName, String sourceUri, + Schema schema, List clusteringFields) throws Exception { try { // Initialize client that will be used to send requests. This client only needs to be created @@ -51,16 +59,11 @@ public static void loadTableClustered(String datasetName, String tableName, Stri TableId tableId = TableId.of(datasetName, tableName); - Schema schema = - Schema.of( - Field.of("name", StandardSQLTypeName.STRING), - Field.of("post_abbr", StandardSQLTypeName.STRING), - Field.of("date", StandardSQLTypeName.DATE)); - TimePartitioning partitioning = TimePartitioning.of(TimePartitioning.Type.DAY); - + // Clustering fields will be consisted of fields mentioned in the schema. + // As of now, another condition is that the table should be partitioned. Clustering clustering = - Clustering.newBuilder().setFields(ImmutableList.of("name", "post_abbr")).build(); + Clustering.newBuilder().setFields(clusteringFields).build(); LoadJobConfiguration loadJobConfig = LoadJobConfiguration.builder(tableId, sourceUri) diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java index 968235e4b..98ef57afd 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java @@ -19,6 +19,10 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.common.collect.ImmutableList; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import org.junit.After; @@ -61,7 +65,14 @@ public void loadTableClustered() throws Exception { String tableName = "LOAD_CLUSTERED_TABLE_TEST"; - LoadTableClustered.loadTableClustered(BIGQUERY_DATASET_NAME, tableName, sourceUri); + Schema schema = + Schema.of( + Field.of("name", StandardSQLTypeName.STRING), + Field.of("post_abbr", StandardSQLTypeName.STRING), + Field.of("date", StandardSQLTypeName.DATE)); + + LoadTableClustered.loadTableClustered(BIGQUERY_DATASET_NAME, tableName, sourceUri, + schema, ImmutableList.of("name", "post_abbr")); assertThat(bout.toString()) .contains("Data successfully loaded into clustered table during load job"); From 70f702d473a7c3bdbde2867cc552f25c91fbe873 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 26 May 2020 10:12:13 -0700 Subject: [PATCH 08/20] chore(java): fix inadvertent whitespace changes (#578) (#384) Source-Author: Jeff Ching Source-Date: Tue May 26 08:37:51 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 388f7aafee3d7a067c23db6c13b7e83fb361c64a Source-Link: https://github.com/googleapis/synthtool/commit/388f7aafee3d7a067c23db6c13b7e83fb361c64a --- README.md | 3 ++- synth.metadata | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index b05bc6fc1..bf6d0059a 100644 --- a/README.md +++ b/README.md @@ -275,7 +275,8 @@ Apache 2.0 - See [LICENSE][license] for more information. Java Version | Status ------------ | ------ -Java 7 | [![Kokoro CI][kokoro-badge-image-1]][kokoro-badge-link-1]Java 8 | [![Kokoro CI][kokoro-badge-image-2]][kokoro-badge-link-2] +Java 7 | [![Kokoro CI][kokoro-badge-image-1]][kokoro-badge-link-1] +Java 8 | [![Kokoro CI][kokoro-badge-image-2]][kokoro-badge-link-2] Java 8 OSX | [![Kokoro CI][kokoro-badge-image-3]][kokoro-badge-link-3] Java 8 Windows | [![Kokoro CI][kokoro-badge-image-4]][kokoro-badge-link-4] Java 11 | [![Kokoro CI][kokoro-badge-image-5]][kokoro-badge-link-5] diff --git a/synth.metadata b/synth.metadata index 39a2c5bf6..e6a7e94a2 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,14 +4,14 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/java-bigquery.git", - "sha": "c293967c864a8cc2b58a4717da3cb2f3b183fbfd" + "sha": "0e91af23f63eb2b9af578bd82952d3480e751f9b" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "65f8c647c0bc0c6b38211b969a2a003e271a5ef1" + "sha": "388f7aafee3d7a067c23db6c13b7e83fb361c64a" } } ] From 7c02cd3ca7238d24bb2914ae2262a3b2e2d4bda7 Mon Sep 17 00:00:00 2001 From: Irvi Aini <7439590+irvifa@users.noreply.github.com> Date: Wed, 27 May 2020 01:22:08 +0700 Subject: [PATCH 09/20] docs(samples): Convert tightly coupled local variable inside of method into method arguments (#386) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit As the previous PR https://github.com/googleapis/java-bigquery/pull/382 and https://github.com/googleapis/java-bigquery/pull/379. I find several other samples that have the tendencies of including tightly coupled variable for each of specific method. Hence I think it will be better to pass it as an argument instead of simply using local variable inside of the method. Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/java-bigquery/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) Fixes #387 ☕️ --- .../example/bigquery/AddColumnLoadAppend.java | 38 +++++++++++-------- .../com/example/bigquery/LoadParquet.java | 9 +++-- .../bigquery/LoadParquetReplaceTable.java | 10 +++-- .../com/example/bigquery/TableInsertRows.java | 15 ++++---- .../example/bigquery/UpdateDatasetAccess.java | 14 +++---- .../bigquery/UpdateDatasetExpiration.java | 9 ++--- .../bigquery/UpdateTableExpiration.java | 10 ++--- .../bigquery/AddColumnLoadAppendIT.java | 13 ++++++- .../com/example/bigquery/LoadParquetIT.java | 4 +- .../bigquery/LoadParquetReplaceTableIT.java | 4 +- .../example/bigquery/TableInsertRowsIT.java | 9 ++++- .../bigquery/UpdateDatasetAccessIT.java | 6 ++- .../bigquery/UpdateDatasetExpirationIT.java | 4 +- .../bigquery/UpdateTableExpirationIT.java | 4 +- 14 files changed, 94 insertions(+), 55 deletions(-) diff --git a/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java b/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java index c529272fc..932c27d69 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java +++ b/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java @@ -32,6 +32,8 @@ import com.google.cloud.bigquery.Schema; import com.google.cloud.bigquery.TableId; import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.List; import java.util.UUID; public class AddColumnLoadAppend { @@ -41,11 +43,28 @@ public static void runAddColumnLoadAppend() throws Exception { String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; String sourceUri = "/path/to/file.csv"; - addColumnLoadAppend(datasetName, tableName, sourceUri); + // Add a new column to a BigQuery table while appending rows via a load job. + // 'REQUIRED' fields cannot be added to an existing schema, so the additional column must be + // 'NULLABLE'. + Schema schema = + Schema.of( + Field.newBuilder("name", LegacySQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + + List fields = schema.getFields(); + // Adding below additional column during the load job + Field newField = Field.newBuilder("post_abbr", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(); + List newFields = new ArrayList<>(fields); + newFields.add(newField); + Schema newSchema = Schema.of(newFields); + addColumnLoadAppend(datasetName, tableName, sourceUri, newSchema); } - public static void addColumnLoadAppend(String datasetName, String tableName, String sourceUri) - throws Exception { + public static void addColumnLoadAppend(String datasetName, String tableName, + String sourceUri, Schema newSchema) throws Exception { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -53,19 +72,6 @@ public static void addColumnLoadAppend(String datasetName, String tableName, Str TableId tableId = TableId.of(datasetName, tableName); - // Add a new column to a BigQuery table while appending rows via a load job. - // 'REQUIRED' fields cannot be added to an existing schema, so the additional column must be - // 'NULLABLE'. - Schema newSchema = - Schema.of( - Field.newBuilder("name", LegacySQLTypeName.STRING) - .setMode(Field.Mode.REQUIRED) - .build(), - // Adding below additional column during the load job - Field.newBuilder("post_abbr", LegacySQLTypeName.STRING) - .setMode(Field.Mode.NULLABLE) - .build()); - LoadJobConfiguration loadJobConfig = LoadJobConfiguration.builder(tableId, sourceUri) .setFormatOptions(FormatOptions.csv()) diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java b/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java index 580b6183f..56a6b6b04 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java +++ b/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java @@ -32,17 +32,18 @@ public class LoadParquet { public static void runLoadParquet() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; - loadParquet(datasetName); + String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; + String tableName = "us_states"; + loadParquet(datasetName, tableName, sourceUri); } - public static void loadParquet(String datasetName) { + public static void loadParquet(String datasetName, String tableName, String sourceUri) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; - TableId tableId = TableId.of(datasetName, "us_states"); + TableId tableId = TableId.of(datasetName, tableName); LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri) diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java b/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java index 80f815046..c8ee67c67 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java +++ b/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java @@ -34,10 +34,13 @@ public class LoadParquetReplaceTable { public static void runLoadParquetReplaceTable() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; - loadParquetReplaceTable(datasetName); + String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; + String tableName = "us_states"; + loadParquetReplaceTable(datasetName, tableName, sourceUri); } - public static void loadParquetReplaceTable(String datasetName) { + public static void loadParquetReplaceTable(String datasetName, String tableName, + String sourceUri) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -46,8 +49,7 @@ public static void loadParquetReplaceTable(String datasetName) { // Imports a GCS file into a table and overwrites table data if table already exists. // This sample loads CSV file at: // https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv - String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; - TableId tableId = TableId.of(datasetName, "us_states"); + TableId tableId = TableId.of(datasetName, tableName); // For more information on LoadJobConfiguration see: // https://googleapis.dev/java/google-cloud-clients/latest/com/google/cloud/bigquery/LoadJobConfiguration.Builder.html diff --git a/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java b/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java index 4163a5979..b601c859b 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java +++ b/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java @@ -34,10 +34,16 @@ public static void runTableInsertRows() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; - tableInsertRows(datasetName, tableName); + // Create a row to insert + Map rowContent = new HashMap<>(); + rowContent.put("booleanField", true); + rowContent.put("numericField", "3.14"); + + tableInsertRows(datasetName, tableName, rowContent); } - public static void tableInsertRows(String datasetName, String tableName) { + public static void tableInsertRows(String datasetName, String tableName, + Map rowContent) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -46,11 +52,6 @@ public static void tableInsertRows(String datasetName, String tableName) { // Get table TableId tableId = TableId.of(datasetName, tableName); - // Create a row to insert - Map rowContent = new HashMap<>(); - rowContent.put("booleanField", true); - rowContent.put("numericField", "3.14"); - // Inserts rowContent into datasetName:tableId. InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow(rowContent).build()); diff --git a/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java b/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java index 5719dcffa..5f7cc27fa 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java +++ b/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java @@ -31,10 +31,15 @@ public class UpdateDatasetAccess { public static void runUpdateDatasetAccess() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; - updateDatasetAccess(datasetName); + // Create a new ACL granting the READER role to "sample.bigquery.dev@gmail.com" + // For more information on the types of ACLs available see: + // https://cloud.google.com/storage/docs/access-control/lists + Acl newEntry = Acl.of(new User("sample.bigquery.dev@gmail.com"), Role.READER); + + updateDatasetAccess(datasetName, newEntry); } - public static void updateDatasetAccess(String datasetName) { + public static void updateDatasetAccess(String datasetName, Acl newEntry) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -42,11 +47,6 @@ public static void updateDatasetAccess(String datasetName) { Dataset dataset = bigquery.getDataset(datasetName); - // Create a new ACL granting the READER role to "sample.bigquery.dev@gmail.com" - // For more information on the types of ACLs available see: - // https://cloud.google.com/storage/docs/access-control/lists - Acl newEntry = Acl.of(new User("sample.bigquery.dev@gmail.com"), Role.READER); - // Get a copy of the ACLs list from the dataset and append the new entry ArrayList acls = new ArrayList<>(dataset.getAcl()); acls.add(newEntry); diff --git a/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java b/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java index bea27624f..9b746a90d 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java +++ b/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java @@ -28,18 +28,17 @@ public class UpdateDatasetExpiration { public static void runUpdateDatasetExpiration() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; - updateDatasetExpiration(datasetName); + // Update dataset expiration to one day + Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); + updateDatasetExpiration(datasetName, newExpiration); } - public static void updateDatasetExpiration(String datasetName) { + public static void updateDatasetExpiration(String datasetName, Long newExpiration) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - // Update dataset expiration to one day - Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); - Dataset dataset = bigquery.getDataset(datasetName); bigquery.update(dataset.toBuilder().setDefaultTableLifetime(newExpiration).build()); System.out.println("Dataset description updated successfully to " + newExpiration); diff --git a/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java b/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java index a5d9abedb..cbc9a1940 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java +++ b/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java @@ -29,18 +29,18 @@ public static void runUpdateTableExpiration() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; - updateTableExpiration(datasetName, tableName); + // Update table expiration to one day. + Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); + updateTableExpiration(datasetName, tableName, newExpiration); } - public static void updateTableExpiration(String datasetName, String tableName) { + public static void updateTableExpiration(String datasetName, String tableName, + Long newExpiration) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - // Update table expiration to one day - Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); - Table table = bigquery.getTable(datasetName, tableName); bigquery.update(table.toBuilder().setExpirationTime(newExpiration).build()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/AddColumnLoadAppendIT.java b/samples/snippets/src/test/java/com/example/bigquery/AddColumnLoadAppendIT.java index 12a9acc38..afbe13c94 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/AddColumnLoadAppendIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/AddColumnLoadAppendIT.java @@ -24,6 +24,8 @@ import com.google.cloud.bigquery.Schema; import java.io.ByteArrayOutputStream; import java.io.PrintStream; +import java.util.ArrayList; +import java.util.List; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -71,7 +73,16 @@ public void testAddColumnLoadAppend() throws Exception { CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, originalSchema); - AddColumnLoadAppend.addColumnLoadAppend(BIGQUERY_DATASET_NAME, tableName, sourceUri); + List fields = originalSchema.getFields(); + // Adding below additional column during the load job + Field newField = Field.newBuilder("post_abbr", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(); + List newFields = new ArrayList<>(fields); + newFields.add(newField); + Schema newSchema = Schema.of(newFields); + + AddColumnLoadAppend.addColumnLoadAppend(BIGQUERY_DATASET_NAME, tableName, sourceUri, newSchema); assertThat(bout.toString()).contains("Column successfully added during load append job"); diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadParquetIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadParquetIT.java index a60fbea3b..cc3ff5d72 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/LoadParquetIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/LoadParquetIT.java @@ -57,7 +57,9 @@ public void tearDown() { @Test public void loadParquet() { - LoadParquet.loadParquet(BIGQUERY_DATASET_NAME); + String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; + String tableName = "us_states"; + LoadParquet.loadParquet(BIGQUERY_DATASET_NAME, tableName, sourceUri); assertThat(bout.toString()).contains("GCS parquet loaded successfully."); } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadParquetReplaceTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadParquetReplaceTableIT.java index 1d2bd0550..03222cd6d 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/LoadParquetReplaceTableIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/LoadParquetReplaceTableIT.java @@ -57,7 +57,9 @@ public void tearDown() { @Test public void testLoadParquetReplaceTable() { - LoadParquetReplaceTable.loadParquetReplaceTable(BIGQUERY_DATASET_NAME); + String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; + String tableName = "us_states"; + LoadParquetReplaceTable.loadParquetReplaceTable(BIGQUERY_DATASET_NAME, tableName, sourceUri); assertThat(bout.toString()).contains("GCS parquet overwrote existing table successfully."); } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java b/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java index 311cd9bd4..527907dd9 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java @@ -24,6 +24,8 @@ import com.google.cloud.bigquery.Schema; import java.io.ByteArrayOutputStream; import java.io.PrintStream; +import java.util.HashMap; +import java.util.Map; import java.util.UUID; import org.junit.After; import org.junit.Before; @@ -70,8 +72,13 @@ public void testTableInsertRows() { // Create table in dataset for testing CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, schema); + // Create a row to insert + Map rowContent = new HashMap<>(); + rowContent.put("booleanField", true); + rowContent.put("numericField", "3.14"); + // Testing - TableInsertRows.tableInsertRows(BIGQUERY_DATASET_NAME, tableName); + TableInsertRows.tableInsertRows(BIGQUERY_DATASET_NAME, tableName, rowContent); assertThat(bout.toString()).contains("Rows successfully inserted into table"); // Clean up diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java index 60086567a..10e7cfddd 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java @@ -19,6 +19,9 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.Acl; +import com.google.cloud.bigquery.Acl.Role; +import com.google.cloud.bigquery.Acl.User; import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; import java.io.ByteArrayOutputStream; import java.io.PrintStream; @@ -62,8 +65,9 @@ public void updateDatasetAccess() { // Create a dataset in order to modify its ACL CreateDataset.createDataset(generatedDatasetName); + Acl newEntry = Acl.of(new User("sample.bigquery.dev@gmail.com"), Role.READER); // Modify dataset's ACL - UpdateDatasetAccess.updateDatasetAccess(generatedDatasetName); + UpdateDatasetAccess.updateDatasetAccess(generatedDatasetName, newEntry); assertThat(bout.toString()).contains("Dataset Access Control updated successfully"); // Clean up diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetExpirationIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetExpirationIT.java index 7dd4f7f46..3360223e3 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetExpirationIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetExpirationIT.java @@ -22,6 +22,7 @@ import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; import java.io.ByteArrayOutputStream; import java.io.PrintStream; +import java.util.concurrent.TimeUnit; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -62,8 +63,9 @@ public void updateDatasetExpiration() { // Create a dataset in order to modify its expiration CreateDataset.createDataset(generatedDatasetName); + Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); // Modify dataset's expiration - UpdateDatasetExpiration.updateDatasetExpiration(generatedDatasetName); + UpdateDatasetExpiration.updateDatasetExpiration(generatedDatasetName, newExpiration); assertThat(bout.toString()).contains("Dataset description updated successfully"); // Clean up diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java index 5352762d8..428317029 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java @@ -21,6 +21,7 @@ import java.io.ByteArrayOutputStream; import java.io.PrintStream; +import java.util.concurrent.TimeUnit; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -59,7 +60,8 @@ public void tearDown() { public void updateTableExpiration() { String tableName = "update_expiration_table"; CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, null); - UpdateTableExpiration.updateTableExpiration(BIGQUERY_DATASET_NAME, tableName); + Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); + UpdateTableExpiration.updateTableExpiration(BIGQUERY_DATASET_NAME, tableName, newExpiration); assertThat(bout.toString()).contains("Table expiration updated successfully"); // Clean up From 7e21be2506cdaf065cd32e3aed403890929b93e7 Mon Sep 17 00:00:00 2001 From: Stephanie Wang Date: Tue, 26 May 2020 16:37:06 -0400 Subject: [PATCH 10/20] deps: update shared-deps and add back certain test deps (#390) * deps: update shared-deps and add back certain test deps * add junit version --- pom.xml | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index b24faf6ca..292f6fb7a 100644 --- a/pom.xml +++ b/pom.xml @@ -74,7 +74,7 @@ com.google.cloud google-cloud-shared-dependencies - 0.4.0 + 0.5.0 pom import @@ -91,6 +91,25 @@ 1.116.1-SNAPSHOT + + com.google.apis + google-api-services-bigquery + ${google-api-services-bigquery.version} + + + + + junit + junit + 4.13 + test + + + com.google.truth + truth + 1.0.1 + test + org.mockito mockito-core @@ -103,11 +122,6 @@ 1.108.0 test - - com.google.apis - google-api-services-bigquery - ${google-api-services-bigquery.version} - From 3f13ccbbf1ca8f179c48a69c8e456bdf45c43af5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 28 May 2020 17:13:14 +0200 Subject: [PATCH 11/20] deps: update dependency com.google.cloud:google-cloud-shared-dependencies to v0.6.0 (#392) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 292f6fb7a..4e5fbaa66 100644 --- a/pom.xml +++ b/pom.xml @@ -74,7 +74,7 @@ com.google.cloud google-cloud-shared-dependencies - 0.5.0 + 0.6.0 pom import From a11f3cb4733215a9c4a65936b05f9cc2ed75656f Mon Sep 17 00:00:00 2001 From: Irvi Aini <7439590+irvifa@users.noreply.github.com> Date: Thu, 28 May 2020 22:20:10 +0700 Subject: [PATCH 12/20] docs(samples): Convert tightly coupled local variable involved options inside of method into method arguments (#393) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit As of the current version we can see that more options related to argument are written inside of the test instead of included as arguments, so I think as this is tightly coupled with argument, it will be better to pass it as well. Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/java-bigquery/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) Fixes #394 ☕️ --- .../bigquery/CreatePartitionedTable.java | 15 +++++++-------- .../example/bigquery/ExtractTableToJson.java | 19 +++++++++++-------- .../com/example/bigquery/LoadLocalFile.java | 7 ++++--- .../bigquery/CreatePartitionedTableIT.java | 10 +++++++++- .../bigquery/ExtractTableToJsonIT.java | 6 ++++-- .../com/example/bigquery/LoadLocalFileIT.java | 3 ++- 6 files changed, 37 insertions(+), 23 deletions(-) diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java b/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java index 79a15a801..62a51c669 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java +++ b/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java @@ -34,10 +34,15 @@ public static void runCreatePartitionedTable() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; - createPartitionedTable(datasetName, tableName); + Schema schema = + Schema.of( + Field.of("stringField", StandardSQLTypeName.STRING), + Field.of("booleanField", StandardSQLTypeName.BOOL), + Field.of("dateField", StandardSQLTypeName.DATE)); + createPartitionedTable(datasetName, tableName, schema); } - public static void createPartitionedTable(String datasetName, String tableName) { + public static void createPartitionedTable(String datasetName, String tableName, Schema schema) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -47,12 +52,6 @@ public static void createPartitionedTable(String datasetName, String tableName) TimePartitioning partitioning = TimePartitioning.of(TimePartitioning.Type.DAY); - Schema schema = - Schema.of( - Field.of("stringField", StandardSQLTypeName.STRING), - Field.of("booleanField", StandardSQLTypeName.BOOL), - Field.of("dateField", StandardSQLTypeName.DATE)); - StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder() .setSchema(schema) diff --git a/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java b/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java index 4d657c690..0f05cb20d 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java +++ b/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java @@ -35,12 +35,19 @@ public static void runExtractTableToJson() { String tableName = "shakespeare"; String bucketName = "my-bucket"; String destinationUri = "gs://" + bucketName + "/path/to/file"; - extractTableToJson(projectId, datasetName, tableName, destinationUri); + // For more information on export formats available see: + // https://cloud.google.com/bigquery/docs/exporting-data#export_formats_and_compression_types + // For more information on Job see: + // https://googleapis.dev/java/google-cloud-clients/latest/index.html?com/google/cloud/bigquery/package-summary.html + + String dataFormat = "CSV"; + extractTableToJson(projectId, datasetName, tableName, destinationUri, dataFormat); } // Exports datasetName:tableName to destinationUri as raw CSV public static void extractTableToJson( - String projectId, String datasetName, String tableName, String destinationUri) { + String projectId, String datasetName, String tableName, String destinationUri, + String dataFormat) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -49,11 +56,7 @@ public static void extractTableToJson( TableId tableId = TableId.of(projectId, datasetName, tableName); Table table = bigquery.getTable(tableId); - // For more information on export formats available see: - // https://cloud.google.com/bigquery/docs/exporting-data#export_formats_and_compression_types - // For more information on Job see: - // https://googleapis.dev/java/google-cloud-clients/latest/index.html?com/google/cloud/bigquery/package-summary.html - Job job = table.extract("CSV", destinationUri); + Job job = table.extract(dataFormat, destinationUri); // Blocks until this job completes its execution, either failing or succeeding. Job completedJob = @@ -68,7 +71,7 @@ public static void extractTableToJson( "BigQuery was unable to extract due to an error: \n" + job.getStatus().getError()); return; } - System.out.println("Table export successful. Check in GCS bucket for the CSV file."); + System.out.println("Table export successful. Check in GCS bucket for the " + dataFormat + " file."); } catch (BigQueryException | InterruptedException e) { System.out.println("Table extraction job was interrupted. \n" + e.toString()); } diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java b/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java index e6e2e3915..3e580ec7d 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java +++ b/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java @@ -41,10 +41,11 @@ public static void runLoadLocalFile() throws IOException, InterruptedException { String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv"); - loadLocalFile(datasetName, tableName, csvPath); + loadLocalFile(datasetName, tableName, csvPath, FormatOptions.csv()); } - public static void loadLocalFile(String datasetName, String tableName, Path csvPath) + public static void loadLocalFile(String datasetName, String tableName, Path csvPath, + FormatOptions formatOptions) throws IOException, InterruptedException { try { // Initialize client that will be used to send requests. This client only needs to be created @@ -54,7 +55,7 @@ public static void loadLocalFile(String datasetName, String tableName, Path csvP WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId) - .setFormatOptions(FormatOptions.csv()) + .setFormatOptions(formatOptions) .build(); // The location and JobName must be specified; other fields can be auto-detected. diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreatePartitionedTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreatePartitionedTableIT.java index cbe612b32..32000c5ce 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CreatePartitionedTableIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CreatePartitionedTableIT.java @@ -19,6 +19,9 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import org.junit.After; @@ -58,8 +61,13 @@ public void tearDown() { @Test public void testCreatePartitionedTable() { String tableName = "MY_PARTITIONED_TABLE"; + Schema schema = + Schema.of( + Field.of("stringField", StandardSQLTypeName.STRING), + Field.of("booleanField", StandardSQLTypeName.BOOL), + Field.of("dateField", StandardSQLTypeName.DATE)); - CreatePartitionedTable.createPartitionedTable(BIGQUERY_DATASET_NAME, tableName); + CreatePartitionedTable.createPartitionedTable(BIGQUERY_DATASET_NAME, tableName, schema); assertThat(bout.toString()).contains("Partitioned table created successfully"); diff --git a/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java b/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java index e018384ed..fd28bc1cb 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java @@ -61,10 +61,12 @@ public void testExtractTableToJson() { String datasetName = "samples"; String tableName = "shakespeare"; String destinationUri = "gs://" + GCS_BUCKET + "/extractTest.csv"; + String dataFormat = "CSV"; // Extract table content to GCS in CSV format - ExtractTableToJson.extractTableToJson(projectId, datasetName, tableName, destinationUri); + ExtractTableToJson.extractTableToJson(projectId, datasetName, tableName, destinationUri, + dataFormat); assertThat(bout.toString()) - .contains("Table export successful. Check in GCS bucket for the CSV file."); + .contains("Table export successful. Check in GCS bucket for the " + dataFormat + " file."); } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileIT.java index a1a8d5a6f..aefa2adba 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileIT.java @@ -20,6 +20,7 @@ import static junit.framework.TestCase.assertNotNull; import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FormatOptions; import com.google.cloud.bigquery.LegacySQLTypeName; import com.google.cloud.bigquery.Schema; import java.io.ByteArrayOutputStream; @@ -76,7 +77,7 @@ public void loadLocalFile() throws IOException, InterruptedException { Path csvPath = FileSystems.getDefault().getPath("src/test/resources", "bigquery_noheader.csv"); - LoadLocalFile.loadLocalFile(BIGQUERY_DATASET_NAME, tableName, csvPath); + LoadLocalFile.loadLocalFile(BIGQUERY_DATASET_NAME, tableName, csvPath, FormatOptions.csv()); assertThat(bout.toString()).contains("Successfully loaded"); From 117b49e92abcea275d8a98dde54f5563ec13135c Mon Sep 17 00:00:00 2001 From: Praful Makani Date: Thu, 28 May 2020 22:02:08 +0530 Subject: [PATCH 13/20] chore: cover code coverage (#395) Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [X] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/java-bigquery/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [X] Ensure the tests and linter pass - [X] Code coverage does not decrease (if any source code was changed) - [X] Appropriate docs were updated (if necessary) Fixes #330 --- .../cloud/bigquery/BigQueryImplTest.java | 258 +++++++++++++++++- .../cloud/bigquery/BigtableOptionsTest.java | 57 ++++ .../bigquery/ExternalTableDefinitionTest.java | 12 + .../com/google/cloud/bigquery/JobTest.java | 2 + .../bigquery/LoadJobConfigurationTest.java | 11 + .../bigquery/ModelTableDefinitionTest.java | 109 ++++++++ .../google/cloud/bigquery/PolicyTagsTest.java | 60 ++++ .../bigquery/QueryJobConfigurationTest.java | 42 +++ .../bigquery/QueryParameterValueTest.java | 29 ++ .../google/cloud/bigquery/QueryStageTest.java | 7 + .../cloud/bigquery/RoutineInfoTest.java | 1 + .../bigquery/StandardTableDefinitionTest.java | 13 + .../com/google/cloud/bigquery/TableTest.java | 15 + .../cloud/bigquery/TimePartitioningTest.java | 3 + .../cloud/bigquery/TimelineSampleTest.java | 10 + .../cloud/bigquery/ViewDefinitionTest.java | 16 +- 16 files changed, 643 insertions(+), 2 deletions(-) create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java index ac11f40d0..6435ff91a 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java @@ -53,6 +53,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import java.io.IOException; import java.math.BigInteger; import java.util.Collections; import java.util.List; @@ -78,6 +79,15 @@ public class BigQueryImplTest { private static final String JOB = "job"; private static final String OTHER_TABLE = "otherTable"; private static final String OTHER_DATASET = "otherDataset"; + private static final String ROUTINE = "routine"; + private static final RoutineId ROUTINE_ID = RoutineId.of(DATASET, ROUTINE); + private static final String ETAG = "etag"; + private static final String ROUTINE_TYPE = "SCALAR_FUNCTION"; + private static final Long CREATION_TIME = 10L; + private static final Long LAST_MODIFIED_TIME = 20L; + private static final String LANGUAGE = "SQL"; + private static final String UPLOAD_ID = "uploadid"; + private static final int MIN_CHUNK_SIZE = 256 * 1024; private static final List ACCESS_RULES = ImmutableList.of( Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), @@ -439,6 +449,43 @@ public class BigQueryImplTest { BigQueryRpc.Option.PAGE_TOKEN, CURSOR, BigQueryRpc.Option.MAX_RESULTS, 0L); + private static final RoutineArgument ARG_1 = + RoutineArgument.newBuilder() + .setDataType(StandardSQLDataType.newBuilder("STRING").build()) + .setName("arg1") + .build(); + + private static final List ARGUMENT_LIST = ImmutableList.of(ARG_1); + + private static final StandardSQLDataType RETURN_TYPE = + StandardSQLDataType.newBuilder("FLOAT64").build(); + + private static final List IMPORTED_LIBRARIES = + ImmutableList.of("gs://foo", "gs://bar", "gs://baz"); + + private static final String BODY = "body"; + + private static final RoutineInfo ROUTINE_INFO = + RoutineInfo.newBuilder(ROUTINE_ID) + .setEtag(ETAG) + .setRoutineType(ROUTINE_TYPE) + .setCreationTime(CREATION_TIME) + .setLastModifiedTime(LAST_MODIFIED_TIME) + .setLanguage(LANGUAGE) + .setArguments(ARGUMENT_LIST) + .setReturnType(RETURN_TYPE) + .setImportedLibraries(IMPORTED_LIBRARIES) + .setBody(BODY) + .build(); + private static final WriteChannelConfiguration LOAD_CONFIGURATION = + WriteChannelConfiguration.newBuilder(TABLE_ID) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setWriteDisposition(JobInfo.WriteDisposition.WRITE_APPEND) + .setFormatOptions(FormatOptions.json()) + .setIgnoreUnknownValues(true) + .setMaxBadRecords(10) + .build(); + private BigQueryOptions options; private BigQueryRpcFactory rpcFactoryMock; private BigQueryRpc bigqueryRpcMock; @@ -446,6 +493,9 @@ public class BigQueryImplTest { @Captor private ArgumentCaptor> capturedOptions; @Captor private ArgumentCaptor jobCapture; + @Captor private ArgumentCaptor capturedBuffer; + + private TableDataWriteChannel writer; private BigQueryOptions createBigQueryOptionsForProject( String project, BigQueryRpcFactory rpcFactory) { @@ -535,6 +585,7 @@ public void testGetDatasetNotFoundWhenThrowIsDisabled() { @Test public void testGetDatasetNotFoundWhenThrowIsEnabled() { when(bigqueryRpcMock.getDataset(PROJECT, "dataset-not-found", EMPTY_RPC_OPTIONS)) + .thenReturn(null) .thenThrow(new BigQueryException(404, "Dataset not found")); options.setThrowNotFound(true); bigquery = options.getService(); @@ -782,6 +833,22 @@ public void testGetModel() { verify(bigqueryRpcMock).getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS); } + @Test + public void testGetModelNotFoundWhenThrowIsEnabled() { + String expected = "Model not found"; + when(bigqueryRpcMock.getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS)) + .thenReturn(null) + .thenThrow(new BigQueryException(404, expected)); + options.setThrowNotFound(true); + bigquery = options.getService(); + try { + bigquery.getModel(DATASET, MODEL); + } catch (BigQueryException ex) { + assertEquals(expected, ex.getMessage()); + } + verify(bigqueryRpcMock).getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS); + } + @Test public void testListPartition() { when(bigqueryRpcMock.getTable( @@ -811,6 +878,7 @@ public void testGetTableNotFoundWhenThrowIsDisabled() { @Test public void testGetTableNotFoundWhenThrowIsEnabled() { when(bigqueryRpcMock.getTable(PROJECT, DATASET, "table-not-found", EMPTY_RPC_OPTIONS)) + .thenReturn(null) .thenThrow(new BigQueryException(404, "Table not found")); options.setThrowNotFound(true); bigquery = options.getService(); @@ -1001,6 +1069,22 @@ public void testListModels() { verify(bigqueryRpcMock).listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } + @Test + public void testListModelsWithModelId() { + bigquery = options.getService(); + ImmutableList modelList = + ImmutableList.of( + new Model(bigquery, new ModelInfo.BuilderImpl(MODEL_INFO_WITH_PROJECT)), + new Model(bigquery, new ModelInfo.BuilderImpl(OTHER_MODEL_INFO))); + Tuple> result = + Tuple.of(CURSOR, Iterables.transform(modelList, ModelInfo.TO_PB_FUNCTION)); + when(bigqueryRpcMock.listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + Page page = bigquery.listModels(DatasetId.of(DATASET)); + assertEquals(CURSOR, page.getNextPageToken()); + assertArrayEquals(modelList.toArray(), Iterables.toArray(page.getValues(), Model.class)); + verify(bigqueryRpcMock).listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + } + @Test public void testDeleteTable() { when(bigqueryRpcMock.deleteTable(PROJECT, DATASET, TABLE)).thenReturn(true); @@ -1046,6 +1130,24 @@ public void testDeleteModel() { verify(bigqueryRpcMock).deleteModel(PROJECT, DATASET, MODEL); } + @Test + public void testUpdateModel() { + ModelInfo updateModelInfo = + MODEL_INFO_WITH_PROJECT + .setProjectId(OTHER_PROJECT) + .toBuilder() + .setDescription("newDescription") + .build(); + when(bigqueryRpcMock.patch(updateModelInfo.toPb(), EMPTY_RPC_OPTIONS)) + .thenReturn(updateModelInfo.toPb()); + BigQueryOptions bigQueryOptions = + createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); + bigquery = bigQueryOptions.getService(); + Model actualModel = bigquery.update(updateModelInfo); + assertEquals(new Model(bigquery, new ModelInfo.BuilderImpl(updateModelInfo)), actualModel); + verify(bigqueryRpcMock).patch(updateModelInfo.toPb(), EMPTY_RPC_OPTIONS); + } + @Test public void testUpdateTable() { TableInfo updatedTableInfo = @@ -1487,6 +1589,7 @@ public void testGetJobNotFoundWhenThrowIsDisabled() { @Test public void testGetJobNotFoundWhenThrowIsEnabled() { when(bigqueryRpcMock.getJob(PROJECT, "job-not-found", null, EMPTY_RPC_OPTIONS)) + .thenReturn(null) .thenThrow(new BigQueryException(404, "Job not found")); options.setThrowNotFound(true); bigquery = options.getService(); @@ -1900,7 +2003,7 @@ public void testGetQueryResultsWithOptions() { } @Test - public void testRetryableException() { + public void testGetDatasetRetryableException() { when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(500, "InternalError")) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); @@ -1971,4 +2074,157 @@ public void testQueryDryRun() throws Exception { Assert.assertNotNull(ex.getMessage()); } } + + @Test + public void testCreateRoutine() { + RoutineInfo routineInfo = ROUTINE_INFO.setProjectId(OTHER_PROJECT); + when(bigqueryRpcMock.create(routineInfo.toPb(), EMPTY_RPC_OPTIONS)) + .thenReturn(routineInfo.toPb()); + BigQueryOptions bigQueryOptions = + createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); + bigquery = bigQueryOptions.getService(); + Routine actualRoutine = bigquery.create(routineInfo); + assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(routineInfo)), actualRoutine); + verify(bigqueryRpcMock).create(routineInfo.toPb(), EMPTY_RPC_OPTIONS); + } + + @Test + public void testGetRoutine() { + when(bigqueryRpcMock.getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) + .thenReturn(ROUTINE_INFO.toPb()); + bigquery = options.getService(); + Routine routine = bigquery.getRoutine(DATASET, ROUTINE); + assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO)), routine); + verify(bigqueryRpcMock).getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); + } + + @Test + public void testGetRoutineWithRountineId() { + when(bigqueryRpcMock.getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) + .thenReturn(ROUTINE_INFO.toPb()); + bigquery = options.getService(); + Routine routine = bigquery.getRoutine(ROUTINE_ID); + assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO)), routine); + verify(bigqueryRpcMock).getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); + } + + @Test + public void testGetRoutineWithEnabledThrowNotFoundException() { + when(bigqueryRpcMock.getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) + .thenReturn(null) + .thenThrow(new BigQueryException(404, "Routine not found")); + options.setThrowNotFound(true); + bigquery = options.getService(); + try { + Routine routine = bigquery.getRoutine(ROUTINE_ID); + fail(); + } catch (BigQueryException ex) { + assertEquals("Routine not found", ex.getMessage()); + } + verify(bigqueryRpcMock).getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); + } + + @Test + public void testUpdateRoutine() { + RoutineInfo updatedRoutineInfo = + ROUTINE_INFO + .setProjectId(OTHER_PROJECT) + .toBuilder() + .setDescription("newDescription") + .build(); + when(bigqueryRpcMock.update(updatedRoutineInfo.toPb(), EMPTY_RPC_OPTIONS)) + .thenReturn(updatedRoutineInfo.toPb()); + BigQueryOptions bigQueryOptions = + createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); + bigquery = bigQueryOptions.getService(); + Routine routine = bigquery.update(updatedRoutineInfo); + assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(updatedRoutineInfo)), routine); + verify(bigqueryRpcMock).update(updatedRoutineInfo.toPb(), EMPTY_RPC_OPTIONS); + } + + @Test + public void testListRoutines() { + bigquery = options.getService(); + ImmutableList routineList = + ImmutableList.of(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO))); + Tuple> result = + Tuple.of(CURSOR, Iterables.transform(routineList, RoutineInfo.TO_PB_FUNCTION)); + when(bigqueryRpcMock.listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + Page page = bigquery.listRoutines(DATASET); + assertEquals(CURSOR, page.getNextPageToken()); + assertArrayEquals(routineList.toArray(), Iterables.toArray(page.getValues(), Routine.class)); + verify(bigqueryRpcMock).listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + } + + @Test + public void testListRoutinesWithDatasetId() { + bigquery = options.getService(); + ImmutableList routineList = + ImmutableList.of(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO))); + Tuple> result = + Tuple.of(CURSOR, Iterables.transform(routineList, RoutineInfo.TO_PB_FUNCTION)); + when(bigqueryRpcMock.listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + Page page = bigquery.listRoutines(DatasetId.of(PROJECT, DATASET)); + assertEquals(CURSOR, page.getNextPageToken()); + assertArrayEquals(routineList.toArray(), Iterables.toArray(page.getValues(), Routine.class)); + verify(bigqueryRpcMock).listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + } + + @Test + public void testDeleteRoutine() { + when(bigqueryRpcMock.deleteRoutine(PROJECT, DATASET, ROUTINE)).thenReturn(true); + bigquery = options.getService(); + assertTrue(bigquery.delete(ROUTINE_ID)); + verify(bigqueryRpcMock).deleteRoutine(PROJECT, DATASET, ROUTINE); + } + + @Test + public void testWriteWithJob() throws IOException { + bigquery = options.getService(); + Job job = new Job(bigquery, new JobInfo.BuilderImpl(JOB_INFO)); + when(bigqueryRpcMock.open( + new com.google.api.services.bigquery.model.Job() + .setJobReference(JOB_INFO.getJobId().toPb()) + .setConfiguration(LOAD_CONFIGURATION.toPb()))) + .thenReturn(UPLOAD_ID); + when(bigqueryRpcMock.write( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) + .thenReturn(job.toPb()); + writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); + writer.close(); + assertEquals(job, writer.getJob()); + bigquery.writer(JOB_INFO.getJobId(), LOAD_CONFIGURATION); + verify(bigqueryRpcMock) + .open( + new com.google.api.services.bigquery.model.Job() + .setJobReference(JOB_INFO.getJobId().toPb()) + .setConfiguration(LOAD_CONFIGURATION.toPb())); + verify(bigqueryRpcMock) + .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + } + + @Test + public void testWriteChannel() throws IOException { + bigquery = options.getService(); + Job job = new Job(bigquery, new JobInfo.BuilderImpl(JOB_INFO)); + when(bigqueryRpcMock.open( + new com.google.api.services.bigquery.model.Job() + .setJobReference(JOB_INFO.getJobId().toPb()) + .setConfiguration(LOAD_CONFIGURATION.toPb()))) + .thenReturn(UPLOAD_ID); + when(bigqueryRpcMock.write( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) + .thenReturn(job.toPb()); + writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); + writer.close(); + assertEquals(job, writer.getJob()); + bigquery.writer(LOAD_CONFIGURATION); + verify(bigqueryRpcMock) + .open( + new com.google.api.services.bigquery.model.Job() + .setJobReference(JOB_INFO.getJobId().toPb()) + .setConfiguration(LOAD_CONFIGURATION.toPb())); + verify(bigqueryRpcMock) + .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java index 51d27b793..88fa1595e 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java @@ -18,6 +18,7 @@ import static com.google.common.truth.Truth.assertThat; import com.google.common.collect.ImmutableList; +import org.junit.Assert; import org.junit.Test; public class BigtableOptionsTest { @@ -61,6 +62,8 @@ public void testConstructors() { assertThat(COL1.getOnlyReadLatest()).isEqualTo(true); assertThat(COL1.getEncoding()).isEqualTo("BINARY"); assertThat(COL1.getType()).isEqualTo("BYTES"); + assertThat(COL1.equals(COL1)).isTrue(); + assertThat(COL1).isNotEqualTo(TESTFAMILY); // family assertThat(TESTFAMILY.getFamilyID()).isEqualTo("fooFamily"); @@ -73,6 +76,50 @@ public void testConstructors() { assertThat(OPTIONS.getIgnoreUnspecifiedColumnFamilies()).isEqualTo(true); assertThat(OPTIONS.getReadRowkeyAsString()).isEqualTo(true); assertThat(OPTIONS.getColumnFamilies()).isEqualTo(ImmutableList.of(TESTFAMILY)); + compareBigtableOptions(OPTIONS, OPTIONS.toBuilder().build()); + } + + @Test + public void testNullPointerException() { + try { + BigtableColumnFamily.newBuilder().setFamilyID(null).build(); + Assert.fail(); + } catch (NullPointerException ex) { + assertThat(ex.getMessage()).isNotNull(); + } + try { + BigtableColumnFamily.newBuilder().setColumns(null).build(); + Assert.fail(); + } catch (NullPointerException ex) { + assertThat(ex.getMessage()).isNotNull(); + } + try { + BigtableColumnFamily.newBuilder().setEncoding(null).build(); + Assert.fail(); + } catch (NullPointerException ex) { + assertThat(ex.getMessage()).isNotNull(); + } + try { + BigtableColumnFamily.newBuilder().setOnlyReadLatest(null).build(); + Assert.fail(); + } catch (NullPointerException ex) { + assertThat(ex.getMessage()).isNotNull(); + } + try { + BigtableColumnFamily.newBuilder().setType(null).build(); + Assert.fail(); + } catch (NullPointerException ex) { + assertThat(ex.getMessage()).isNotNull(); + } + } + + @Test + public void testIllegalStateException() { + try { + BigtableColumnFamily.newBuilder().build(); + } catch (IllegalStateException ex) { + assertThat(ex.getMessage()).isNotNull(); + } } @Test @@ -86,6 +133,10 @@ public void testToAndFromPb() { public void testEquals() { compareBigtableColumn(COL1, COL1); compareBigtableColumnFamily(TESTFAMILY, TESTFAMILY); + assertThat(TESTFAMILY.equals(TESTFAMILY)).isTrue(); + assertThat(TESTFAMILY).isNotEqualTo(COL1); + assertThat(OPTIONS.equals(OPTIONS)).isTrue(); + assertThat(OPTIONS).isNotEqualTo(TESTFAMILY); compareBigtableOptions(OPTIONS, OPTIONS); } @@ -96,6 +147,8 @@ private void compareBigtableColumn(BigtableColumn expected, BigtableColumn value assertThat(expected.getQualifierEncoded()).isEqualTo(value.getQualifierEncoded()); assertThat(expected.getOnlyReadLatest()).isEqualTo(value.getOnlyReadLatest()); assertThat(expected.getType()).isEqualTo(value.getType()); + assertThat(expected.toString()).isEqualTo(value.toString()); + assertThat(expected.hashCode()).isEqualTo(value.hashCode()); } private void compareBigtableColumnFamily( @@ -106,6 +159,8 @@ private void compareBigtableColumnFamily( assertThat(expected.getColumns()).isEqualTo(value.getColumns()); assertThat(expected.getEncoding()).isEqualTo(value.getEncoding()); assertThat(expected.getType()).isEqualTo(value.getType()); + assertThat(expected.toString()).isEqualTo(value.toString()); + assertThat(expected.hashCode()).isEqualTo(value.hashCode()); } private void compareBigtableOptions(BigtableOptions expected, BigtableOptions value) { @@ -114,5 +169,7 @@ private void compareBigtableOptions(BigtableOptions expected, BigtableOptions va .isEqualTo(value.getIgnoreUnspecifiedColumnFamilies()); assertThat(expected.getReadRowkeyAsString()).isEqualTo(value.getReadRowkeyAsString()); assertThat(expected.getColumnFamilies()).isEqualTo(value.getColumnFamilies()); + assertThat(expected.hashCode()).isEqualTo(value.hashCode()); + assertThat(expected.toString()).isEqualTo(value.toString()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java index bd5a6cdfb..ea8ca1ada 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java @@ -17,6 +17,8 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; import com.google.common.collect.ImmutableList; import java.util.List; @@ -79,6 +81,15 @@ public void testToBuilderIncomplete() { assertEquals(externalTableDefinition, externalTableDefinition.toBuilder().build()); } + @Test + public void testTypeNullPointerException() { + try { + EXTERNAL_TABLE_DEFINITION.toBuilder().setType(null).build(); + } catch (NullPointerException ex) { + assertNotNull(ex.getMessage()); + } + } + @Test public void testBuilder() { assertEquals(TableDefinition.Type.EXTERNAL, EXTERNAL_TABLE_DEFINITION.getType()); @@ -90,6 +101,7 @@ public void testBuilder() { assertEquals(SOURCE_URIS, EXTERNAL_TABLE_DEFINITION.getSourceUris()); assertEquals(AUTODETECT, EXTERNAL_TABLE_DEFINITION.getAutodetect()); assertEquals(HIVE_PARTITIONING_OPTIONS, EXTERNAL_TABLE_DEFINITION.getHivePartitioningOptions()); + assertNotEquals(EXTERNAL_TABLE_DEFINITION, TableDefinition.Type.EXTERNAL); } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java index 68ccfd627..1d34f4958 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java @@ -20,6 +20,7 @@ import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; @@ -471,6 +472,7 @@ public void testToAndFromPb() { @Test public void testToAndFromPbWithoutConfiguration() { + assertNotEquals(expectedJob, bigquery); compareJob(expectedJob, Job.fromPb(bigquery, expectedJob.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java index b64e3c451..9f42d62b7 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java @@ -67,6 +67,13 @@ public class LoadJobConfigurationTest { RangePartitioning.Range.newBuilder().setStart(1L).setInterval(2L).setEnd(10L).build(); private static final RangePartitioning RANGE_PARTITIONING = RangePartitioning.newBuilder().setField("IntegerField").setRange(RANGE).build(); + private static final String MODE = "STRING"; + private static final String SOURCE_URI_PREFIX = "gs://bucket/path_to_table"; + private static final HivePartitioningOptions HIVE_PARTITIONING_OPTIONS = + HivePartitioningOptions.newBuilder() + .setMode(MODE) + .setSourceUriPrefix(SOURCE_URI_PREFIX) + .build(); private static final LoadJobConfiguration LOAD_CONFIGURATION_CSV = LoadJobConfiguration.newBuilder(TABLE_ID, SOURCE_URIS) .setCreateDisposition(CREATE_DISPOSITION) @@ -83,6 +90,8 @@ public class LoadJobConfigurationTest { .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) .setRangePartitioning(RANGE_PARTITIONING) + .setNullMarker("nullMarker") + .setHivePartitioningOptions(HIVE_PARTITIONING_OPTIONS) .build(); private static final DatastoreBackupOptions BACKUP_OPTIONS = @@ -238,5 +247,7 @@ private void compareLoadJobConfiguration( assertEquals(expected.getLabels(), value.getLabels()); assertEquals(expected.getJobTimeoutMs(), value.getJobTimeoutMs()); assertEquals(expected.getRangePartitioning(), value.getRangePartitioning()); + assertEquals(expected.getNullMarker(), value.getNullMarker()); + assertEquals(expected.getHivePartitioningOptions(), value.getHivePartitioningOptions()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java new file mode 100644 index 000000000..62b2cfe7d --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java @@ -0,0 +1,109 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; + +import org.junit.Test; + +public class ModelTableDefinitionTest { + + private static final String LOCATION = "US"; + private static final Long NUM_BYTES = 14L; + private static final Field FIELD_SCHEMA1 = + Field.newBuilder("StringField", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.newBuilder("IntegerField", LegacySQLTypeName.INTEGER) + .setMode(Field.Mode.REPEATED) + .setDescription("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.newBuilder("RecordField", LegacySQLTypeName.RECORD, FIELD_SCHEMA1, FIELD_SCHEMA2) + .setMode(Field.Mode.REQUIRED) + .setDescription("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final ModelTableDefinition MODEL_TABLE_DEFINITION = + ModelTableDefinition.newBuilder() + .setSchema(TABLE_SCHEMA) + .setType(TableDefinition.Type.MODEL) + .setLocation(LOCATION) + .setNumBytes(NUM_BYTES) + .build(); + + @Test + public void testToBuilder() { + compareModelTableDefinition(MODEL_TABLE_DEFINITION, MODEL_TABLE_DEFINITION.toBuilder().build()); + } + + @Test + public void testTypeNullPointerException() { + try { + MODEL_TABLE_DEFINITION.toBuilder().setType(null).build(); + fail(); + } catch (NullPointerException ex) { + assertNotNull(ex.getMessage()); + } + } + + @Test + public void testToBuilderIncomplete() { + ModelTableDefinition modelTableDefinition = ModelTableDefinition.newBuilder().build(); + assertEquals(modelTableDefinition, modelTableDefinition.toBuilder().build()); + } + + @Test + public void testToAndFromPb() { + assertEquals( + MODEL_TABLE_DEFINITION, ModelTableDefinition.fromPb(MODEL_TABLE_DEFINITION.toPb())); + } + + @Test + public void testBuilder() { + assertEquals(MODEL_TABLE_DEFINITION.getSchema(), TABLE_SCHEMA); + assertEquals(MODEL_TABLE_DEFINITION.getType(), TableDefinition.Type.MODEL); + assertEquals(MODEL_TABLE_DEFINITION.getLocation(), LOCATION); + assertEquals(MODEL_TABLE_DEFINITION.getNumBytes(), NUM_BYTES); + } + + @Test + public void testEquals() { + assertEquals(MODEL_TABLE_DEFINITION, MODEL_TABLE_DEFINITION); + } + + @Test + public void testNotEquals() { + assertNotEquals(MODEL_TABLE_DEFINITION, LOCATION); + } + + private void compareModelTableDefinition( + ModelTableDefinition expected, ModelTableDefinition value) { + assertEquals(expected, value); + assertEquals(expected.getSchema(), value.getSchema()); + assertEquals(expected.getType(), value.getType()); + assertEquals(expected.getLocation(), value.getLocation()); + assertEquals(expected.getNumBytes(), value.getNumBytes()); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java new file mode 100644 index 000000000..2136b2dab --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java @@ -0,0 +1,60 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; + +import com.google.common.collect.ImmutableList; +import java.util.List; +import org.junit.Test; + +public class PolicyTagsTest { + + private static final List POLICIES = ImmutableList.of("test/policy1", "test/policy2"); + private static final PolicyTags POLICY_TAGS = PolicyTags.newBuilder().setNames(POLICIES).build(); + + @Test + public void testToBuilder() { + comparePolicyTags(POLICY_TAGS, POLICY_TAGS.toBuilder().build()); + } + + @Test + public void testToBuilderIncomplete() { + PolicyTags policyTags = PolicyTags.newBuilder().build(); + assertEquals(policyTags, policyTags); + assertEquals(policyTags, policyTags.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(POLICIES, POLICY_TAGS.getNames()); + assertNotEquals(POLICY_TAGS, POLICIES); + } + + @Test + public void testFromAndPb() { + assertEquals(POLICY_TAGS, PolicyTags.fromPb(POLICY_TAGS.toPb())); + } + + private void comparePolicyTags(PolicyTags expected, PolicyTags value) { + assertEquals(expected, value); + assertEquals(expected.getNames(), value.getNames()); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java index 406d7edfa..0e892b6e1 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java @@ -80,6 +80,7 @@ public class QueryJobConfigurationTest { private static final boolean FLATTEN_RESULTS = true; private static final boolean USE_LEGACY_SQL = true; private static final Integer MAX_BILLING_TIER = 123; + private static final Long MAX_BYTES_BILL = 12345L; private static final List SCHEMA_UPDATE_OPTIONS = ImmutableList.of(SchemaUpdateOption.ALLOW_FIELD_RELAXATION); private static final List USER_DEFINED_FUNCTIONS = @@ -96,6 +97,14 @@ public class QueryJobConfigurationTest { RangePartitioning.Range.newBuilder().setStart(1L).setInterval(2L).setEnd(10L).build(); private static final RangePartitioning RANGE_PARTITIONING = RangePartitioning.newBuilder().setField("IntegerField").setRange(RANGE).build(); + private static final QueryParameterValue STRING_PARAMETER = + QueryParameterValue.string("stringValue"); + private static final QueryParameterValue TIMESTAMP_PARAMETER = + QueryParameterValue.timestamp("2014-01-01 07:00:00.000000+00:00"); + private static final List POSITIONAL_PARAMETER = + ImmutableList.of(STRING_PARAMETER, TIMESTAMP_PARAMETER); + private static final Map NAME_PARAMETER = + ImmutableMap.of("string", STRING_PARAMETER, "timestamp", TIMESTAMP_PARAMETER); private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION = QueryJobConfiguration.newBuilder(QUERY) .setUseQueryCache(USE_QUERY_CACHE) @@ -111,6 +120,7 @@ public class QueryJobConfigurationTest { .setDryRun(true) .setUseLegacySql(USE_LEGACY_SQL) .setMaximumBillingTier(MAX_BILLING_TIER) + .setMaximumBytesBilled(MAX_BYTES_BILL) .setSchemaUpdateOptions(SCHEMA_UPDATE_OPTIONS) .setDestinationEncryptionConfiguration(JOB_ENCRYPTION_CONFIGURATION) .setTimePartitioning(TIME_PARTITIONING) @@ -119,6 +129,19 @@ public class QueryJobConfigurationTest { .setLabels(LABELS) .setRangePartitioning(RANGE_PARTITIONING) .setConnectionProperties(CONNECTION_PROPERTIES) + .setPositionalParameters(POSITIONAL_PARAMETER) + .build(); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_ADD_POSITIONAL_PARAMETER = + QUERY_JOB_CONFIGURATION + .toBuilder() + .setPositionalParameters(ImmutableList.of()) + .addPositionalParameter(STRING_PARAMETER) + .build(); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_SET_NAME_PARAMETER = + QUERY_JOB_CONFIGURATION + .toBuilder() + .setPositionalParameters(ImmutableList.of()) + .setNamedParameters(NAME_PARAMETER) .build(); @Test @@ -154,6 +177,8 @@ public void testToPbAndFromPb() { assertNotNull(QUERY_JOB_CONFIGURATION.getLabels()); assertNotNull(QUERY_JOB_CONFIGURATION.getRangePartitioning()); assertNotNull(QUERY_JOB_CONFIGURATION.getConnectionProperties()); + assertNotNull(QUERY_JOB_CONFIGURATION.getPositionalParameters()); + assertNotNull(QUERY_JOB_CONFIGURATION.getNamedParameters()); compareQueryJobConfiguration( QUERY_JOB_CONFIGURATION, QueryJobConfiguration.fromPb(QUERY_JOB_CONFIGURATION.toPb())); QueryJobConfiguration job = QueryJobConfiguration.of(QUERY); @@ -184,6 +209,20 @@ public void testGetType() { assertEquals(JobConfiguration.Type.QUERY, QUERY_JOB_CONFIGURATION.getType()); } + @Test + public void testPositionalParameter() { + compareQueryJobConfiguration( + QUERY_JOB_CONFIGURATION_ADD_POSITIONAL_PARAMETER, + QUERY_JOB_CONFIGURATION_ADD_POSITIONAL_PARAMETER.toBuilder().build()); + } + + @Test + public void testNamedParameter() { + compareQueryJobConfiguration( + QUERY_JOB_CONFIGURATION_SET_NAME_PARAMETER, + QUERY_JOB_CONFIGURATION_SET_NAME_PARAMETER.toBuilder().build()); + } + private void compareQueryJobConfiguration( QueryJobConfiguration expected, QueryJobConfiguration value) { assertEquals(expected, value); @@ -203,6 +242,7 @@ private void compareQueryJobConfiguration( assertEquals(expected.getWriteDisposition(), value.getWriteDisposition()); assertEquals(expected.useLegacySql(), value.useLegacySql()); assertEquals(expected.getMaximumBillingTier(), value.getMaximumBillingTier()); + assertEquals(expected.getMaximumBytesBilled(), value.getMaximumBytesBilled()); assertEquals(expected.getSchemaUpdateOptions(), value.getSchemaUpdateOptions()); assertEquals( expected.getDestinationEncryptionConfiguration(), @@ -213,5 +253,7 @@ private void compareQueryJobConfiguration( assertEquals(expected.getLabels(), value.getLabels()); assertEquals(expected.getRangePartitioning(), value.getRangePartitioning()); assertEquals(expected.getConnectionProperties(), value.getConnectionProperties()); + assertEquals(expected.getPositionalParameters(), value.getPositionalParameters()); + assertEquals(expected.getNamedParameters(), value.getNamedParameters()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java index 930a29c40..09421565c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java @@ -30,6 +30,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import org.junit.Assert; import org.junit.Test; import org.threeten.bp.Instant; import org.threeten.bp.ZoneOffset; @@ -58,6 +59,34 @@ public class QueryParameterValueTest { .toFormatter() .withZone(ZoneOffset.UTC); + private static final QueryParameterValue QUERY_PARAMETER_VALUE = + QueryParameterValue.newBuilder() + .setType(StandardSQLTypeName.STRING) + .setValue("test-string") + .build(); + + @Test + public void testBuilder() { + QueryParameterValue value = QUERY_PARAMETER_VALUE.toBuilder().build(); + assertThat(value).isEqualTo(QUERY_PARAMETER_VALUE); + assertThat(value.getType()).isEqualTo(StandardSQLTypeName.STRING); + assertThat(value.getValue()).isEqualTo("test-string"); + assertThat(value.toString()).isEqualTo(QUERY_PARAMETER_VALUE.toString()); + assertThat(value.hashCode()).isEqualTo(QUERY_PARAMETER_VALUE.hashCode()); + assertThat(value.equals(value)).isTrue(); + assertThat(QUERY_PARAMETER_VALUE).isNotEqualTo(StandardSQLTypeName.STRING); + } + + @Test + public void testTypeNullPointerException() { + try { + QUERY_PARAMETER_VALUE.toBuilder().setType(null).build(); + Assert.fail(); + } catch (NullPointerException ex) { + assertThat(ex).isNotNull(); + } + } + @Test public void testBool() { QueryParameterValue value = QueryParameterValue.bool(true); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java index 69edb3988..bc7d6083b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import com.google.api.services.bigquery.model.ExplainQueryStep; import com.google.cloud.bigquery.QueryStage.QueryStep; @@ -154,6 +155,12 @@ public void testEquals() { compareQueryStage(QUERY_STAGE, QUERY_STAGE); } + @Test + public void testNotEquals() { + assertNotEquals(QUERY_STAGE, QUERY_STEP1); + assertNotEquals(QUERY_STEP1, QUERY_STAGE); + } + private void compareQueryStage(QueryStage expected, QueryStage value) { assertEquals(expected, value); assertEquals(expected.getCompletedParallelInputs(), value.getCompletedParallelInputs()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java index d167858fc..ce613193c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java @@ -128,5 +128,6 @@ public void compareRoutineInfo(RoutineInfo expected, RoutineInfo value) { assertEquals(expected.getImportedLibraries(), value.getImportedLibraries()); assertEquals(expected.getBody(), value.getBody()); assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java index 6742763cb..393b7fbc6 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java @@ -17,6 +17,8 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -95,6 +97,17 @@ public void testBuilder() { assertEquals(STREAMING_BUFFER, TABLE_DEFINITION.getStreamingBuffer()); assertEquals(TIME_PARTITIONING, TABLE_DEFINITION.getTimePartitioning()); assertEquals(CLUSTERING, TABLE_DEFINITION.getClustering()); + assertNotEquals(TABLE_DEFINITION, TableDefinition.Type.TABLE); + } + + @Test + public void testTypeNullPointerException() { + try { + TABLE_DEFINITION.toBuilder().setType(null).build(); + fail(); + } catch (NullPointerException ex) { + assertNotNull(ex.getMessage()); + } } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java index 860230f30..b93ed770b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java @@ -32,6 +32,7 @@ import com.google.cloud.bigquery.InsertAllRequest.RowToInsert; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import java.math.BigInteger; import java.util.List; import org.junit.Before; import org.junit.Rule; @@ -51,8 +52,14 @@ public class TableTest { private static final Long CREATION_TIME = 10L; private static final Long EXPIRATION_TIME = 100L; private static final Long LAST_MODIFIED_TIME = 20L; + private static final Long NUM_BYTES = 42L; + private static final Long NUM_LONG_TERM_BYTES = 21L; + private static final Long NUM_ROWS = 43L; private static final TableId TABLE_ID1 = TableId.of("dataset", "table1"); private static final TableId TABLE_ID2 = TableId.of("dataset", "table2"); + private static final Boolean REQUIRE_PARTITION_FILTER = true; + private static final EncryptionConfiguration ENCRYPTION_CONFIGURATION = + EncryptionConfiguration.newBuilder().setKmsKeyName("KMS_KEY_1").build(); private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = CopyJobConfiguration.of(TABLE_ID2, TABLE_ID1); private static final JobInfo COPY_JOB_INFO = JobInfo.of(COPY_JOB_CONFIGURATION); @@ -118,6 +125,10 @@ public void testBuilder() { .setGeneratedId(GENERATED_ID) .setLastModifiedTime(LAST_MODIFIED_TIME) .setSelfLink(SELF_LINK) + .setNumBytes(NUM_BYTES) + .setNumLongTermBytes(NUM_LONG_TERM_BYTES) + .setNumRows(BigInteger.valueOf(NUM_ROWS)) + .setRequirePartitionFilter(REQUIRE_PARTITION_FILTER) .build(); assertEquals(TABLE_ID1, builtTable.getTableId()); assertEquals(CREATION_TIME, builtTable.getCreationTime()); @@ -129,6 +140,10 @@ public void testBuilder() { assertEquals(LAST_MODIFIED_TIME, builtTable.getLastModifiedTime()); assertEquals(TABLE_DEFINITION, builtTable.getDefinition()); assertEquals(SELF_LINK, builtTable.getSelfLink()); + assertEquals(NUM_BYTES, builtTable.getNumBytes()); + assertEquals(NUM_LONG_TERM_BYTES, builtTable.getNumLongTermBytes()); + assertEquals(BigInteger.valueOf(NUM_ROWS), builtTable.getNumRows()); + assertEquals(REQUIRE_PARTITION_FILTER, builtTable.getRequirePartitionFilter()); assertSame(bigquery, builtTable.getBigQuery()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java index 10b303f21..ee7f5dde6 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @@ -68,6 +69,8 @@ public void testBuilder() { assertEquals(100, (long) partitioning.getExpirationMs()); assertNull(partitioning.getRequirePartitionFilter()); assertNull(partitioning.getField()); + compareTimePartitioning(TIME_PARTITIONING_HOUR, TIME_PARTITIONING_HOUR.toBuilder().build()); + assertNotEquals(TIME_PARTITIONING_DAY, TYPE_DAY); } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java index 0d39fa25b..1d888f00d 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java @@ -16,6 +16,7 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import org.junit.Test; @@ -36,6 +37,12 @@ public class TimelineSampleTest { .setSlotMillis(SLOT_MILLIS) .build(); + @Test + public void testToBuilder() { + TimelineSample sample = TIMELINE_SAMPLE.toBuilder().setPendingUnits(15L).build(); + assertEquals(15L, sample.getPendingUnits().longValue()); + } + @Test public void testTimelineSampleBuilder() { assertEquals(ELAPSED_MS, TIMELINE_SAMPLE.getElapsedMs()); @@ -48,5 +55,8 @@ public void testTimelineSampleBuilder() { @Test public void TestEquals() { assertEquals(TIMELINE_SAMPLE, TIMELINE_SAMPLE); + assertNotEquals(TIMELINE_SAMPLE, SLOT_MILLIS); + assertEquals(TIMELINE_SAMPLE.toString(), TIMELINE_SAMPLE.toString()); + assertEquals(TIMELINE_SAMPLE.hashCode(), TIMELINE_SAMPLE.hashCode()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java index fa4f82cd0..d60c7be2b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java @@ -18,8 +18,11 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import com.google.common.collect.ImmutableList; import java.util.List; @@ -31,7 +34,7 @@ public class ViewDefinitionTest { private static final List USER_DEFINED_FUNCTIONS = ImmutableList.of(UserDefinedFunction.inline("Function"), UserDefinedFunction.fromUri("URI")); private static final ViewDefinition VIEW_DEFINITION = - ViewDefinition.newBuilder(VIEW_QUERY, USER_DEFINED_FUNCTIONS).build(); + ViewDefinition.newBuilder(VIEW_QUERY, USER_DEFINED_FUNCTIONS).setSchema(Schema.of()).build(); @Test public void testToBuilder() { @@ -43,6 +46,17 @@ public void testToBuilder() { viewDefinition = viewDefinition.toBuilder().setUseLegacySql(true).build(); assertTrue(viewDefinition.useLegacySql()); + assertNotEquals(VIEW_DEFINITION, VIEW_QUERY); + } + + @Test + public void testTypeNullPointerException() { + try { + VIEW_DEFINITION.toBuilder().setType(null).build(); + fail(); + } catch (NullPointerException ex) { + assertNotNull(ex.getMessage()); + } } @Test From 64847d1e5137e811e7f462fc058908fa9fcc55fb Mon Sep 17 00:00:00 2001 From: Irvi Aini <7439590+irvifa@users.noreply.github.com> Date: Fri, 29 May 2020 01:19:15 +0700 Subject: [PATCH 14/20] docs(samples): Fix flaky test (#397) We found that UpdateTableExpirationIT is flaky since during table creation the table is not created. --- .../example/bigquery/UpdateTableExpirationIT.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java index 428317029..73ca93c06 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java @@ -19,8 +19,12 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; import java.io.ByteArrayOutputStream; import java.io.PrintStream; +import java.util.UUID; import java.util.concurrent.TimeUnit; import org.junit.After; import org.junit.Before; @@ -58,8 +62,13 @@ public void tearDown() { @Test public void updateTableExpiration() { - String tableName = "update_expiration_table"; - CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, null); + String suffix = UUID.randomUUID().toString().replace('-', '_'); + String tableName = "update_expiration_table_" + suffix; + Schema schema = + Schema.of( + Field.of("stringField", StandardSQLTypeName.STRING), + Field.of("booleanField", StandardSQLTypeName.BOOL)); + CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, schema); Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); UpdateTableExpiration.updateTableExpiration(BIGQUERY_DATASET_NAME, tableName, newExpiration); assertThat(bout.toString()).contains("Table expiration updated successfully"); From bafd8ae780121ee968bca7b3eb68bb0f0a438124 Mon Sep 17 00:00:00 2001 From: Stephanie Wang Date: Fri, 29 May 2020 11:59:25 -0400 Subject: [PATCH 15/20] =?UTF-8?q?deps:=20remove=20google-cloud-core=20clas?= =?UTF-8?q?siier=3Dtest=20since=20it=20is=20managed=20by=20=E2=80=A6=20(#3?= =?UTF-8?q?96)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * deps: remove google-cloud-core classiier=test since it is managed by shared-deps * bump shared-deps to 0.7.0 --- google-cloud-bigquery/pom.xml | 1 - pom.xml | 3 +-- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index 86ef49ea0..0a6f2327e 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -60,7 +60,6 @@ com.google.cloud google-cloud-core - ${google.core.version} tests test diff --git a/pom.xml b/pom.xml index 4e5fbaa66..532dd4825 100644 --- a/pom.xml +++ b/pom.xml @@ -63,7 +63,6 @@ UTF-8 github google-cloud-bigquery-parent - 1.93.4 1.3.2 v2-rev20200429-1.30.9 @@ -74,7 +73,7 @@ com.google.cloud google-cloud-shared-dependencies - 0.6.0 + 0.7.0 pom import From c836961259808c534614270b8273cec02ca6d173 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 29 May 2020 20:38:19 +0200 Subject: [PATCH 16/20] chore(deps): update dependency com.google.cloud:libraries-bom to v5.5.0 (#400) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [com.google.cloud:libraries-bom](https://togithub.com/GoogleCloudPlatform/cloud-opensource-java) | minor | `5.4.0` -> `5.5.0` | --- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#googleapis/java-bigquery). --- samples/snippets/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml index 8c070dfb0..6b6788b2e 100644 --- a/samples/snippets/pom.xml +++ b/samples/snippets/pom.xml @@ -44,7 +44,7 @@ com.google.cloud libraries-bom - 5.4.0 + 5.5.0 pom import From 69b9d8417d5b910286aaa855f4a83fc151a81cec Mon Sep 17 00:00:00 2001 From: Stephanie Wang Date: Fri, 29 May 2020 15:53:32 -0400 Subject: [PATCH 17/20] docs(samples): update revierwers for samples module changes (#401) * docs(samples): update revierwers for samples module changes * Update .github/CODEOWNERS Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f953c5f0f..aef889d3d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,4 +5,4 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # The java-samples-reviewers team is the default owner for samples changes -samples/ @stephaniewang526 @GoogleCloudPlatform/java-samples-reviewers \ No newline at end of file +samples/**/*.java @stephaniewang526 @googleapis/java-samples-reviewers From db4158186b99d0bed68fd70bef5918b1820e5dd1 Mon Sep 17 00:00:00 2001 From: Stephanie Wang Date: Fri, 29 May 2020 16:31:36 -0400 Subject: [PATCH 18/20] deps: restrict -annotation deps (#402) --- google-cloud-bigquery/pom.xml | 18 +----------------- pom.xml | 2 -- 2 files changed, 1 insertion(+), 19 deletions(-) diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index 0a6f2327e..2e3df3aa6 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -36,6 +36,7 @@ com.google.auto.value auto-value-annotations + provided com.google.http-client @@ -134,21 +135,4 @@ - - - - java9 - - [9,) - - - - javax.annotation - javax.annotation-api - - ${javax.annotations.version} - - - - diff --git a/pom.xml b/pom.xml index 532dd4825..0edf79bd2 100644 --- a/pom.xml +++ b/pom.xml @@ -63,7 +63,6 @@ UTF-8 github google-cloud-bigquery-parent - 1.3.2 v2-rev20200429-1.30.9 @@ -217,5 +216,4 @@ - From 6583139d9b207fe21f674b376130e8eb4c1e2e51 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Sun, 31 May 2020 22:41:17 -0700 Subject: [PATCH 19/20] changes without context (#403) autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. --- README.md | 2 +- synth.metadata | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index bf6d0059a..cfba6fbc9 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ See https://github.com/GoogleCloudPlatform/cloud-opensource-java/wiki/The-Google com.google.cloud libraries-bom - 5.4.0 + 5.5.0 pom import diff --git a/synth.metadata b/synth.metadata index e6a7e94a2..bdc51594c 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/java-bigquery.git", - "sha": "0e91af23f63eb2b9af578bd82952d3480e751f9b" + "sha": "db4158186b99d0bed68fd70bef5918b1820e5dd1" } }, { From f98cf756494d45e62923f60c3dfc434a92181405 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 1 Jun 2020 20:58:07 +0000 Subject: [PATCH 20/20] chore: release 1.116.1 (#389) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release \*beep\* \*boop\* --- ### [1.116.1](https://www.github.com/googleapis/java-bigquery/compare/v1.116.0...v1.116.1) (2020-06-01) ### Dependencies * remove google-cloud-core classiier=test since it is managed by … ([#396](https://www.github.com/googleapis/java-bigquery/issues/396)) ([bafd8ae](https://www.github.com/googleapis/java-bigquery/commit/bafd8ae780121ee968bca7b3eb68bb0f0a438124)) * restrict -annotation deps ([#402](https://www.github.com/googleapis/java-bigquery/issues/402)) ([db41581](https://www.github.com/googleapis/java-bigquery/commit/db4158186b99d0bed68fd70bef5918b1820e5dd1)) * update dependency com.google.cloud:google-cloud-shared-dependencies to v0.6.0 ([#392](https://www.github.com/googleapis/java-bigquery/issues/392)) ([3f13ccb](https://www.github.com/googleapis/java-bigquery/commit/3f13ccbbf1ca8f179c48a69c8e456bdf45c43af5)) * update shared-deps and add back certain test deps ([#390](https://www.github.com/googleapis/java-bigquery/issues/390)) ([7e21be2](https://www.github.com/googleapis/java-bigquery/commit/7e21be2506cdaf065cd32e3aed403890929b93e7)) ### Documentation * **samples:** Convert tightly coupled local variable inside of method into method arguments ([#386](https://www.github.com/googleapis/java-bigquery/issues/386)) ([7c02cd3](https://www.github.com/googleapis/java-bigquery/commit/7c02cd3ca7238d24bb2914ae2262a3b2e2d4bda7)), closes [#387](https://www.github.com/googleapis/java-bigquery/issues/387) * **samples:** Convert tightly coupled local variable involved options inside of method into method arguments ([#393](https://www.github.com/googleapis/java-bigquery/issues/393)) ([a11f3cb](https://www.github.com/googleapis/java-bigquery/commit/a11f3cb4733215a9c4a65936b05f9cc2ed75656f)), closes [#394](https://www.github.com/googleapis/java-bigquery/issues/394) * **samples:** Fix flaky test ([#397](https://www.github.com/googleapis/java-bigquery/issues/397)) ([64847d1](https://www.github.com/googleapis/java-bigquery/commit/64847d1e5137e811e7f462fc058908fa9fcc55fb)) * **samples:** update revierwers for samples module changes ([#401](https://www.github.com/googleapis/java-bigquery/issues/401)) ([69b9d84](https://www.github.com/googleapis/java-bigquery/commit/69b9d8417d5b910286aaa855f4a83fc151a81cec)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- CHANGELOG.md | 18 ++++++++++++++++++ README.md | 4 ++-- google-cloud-bigquery/pom.xml | 4 ++-- pom.xml | 4 ++-- samples/snapshot/pom.xml | 2 +- versions.txt | 2 +- 6 files changed, 26 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 28e65ffe1..510606eea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +### [1.116.1](https://www.github.com/googleapis/java-bigquery/compare/v1.116.0...v1.116.1) (2020-06-01) + + +### Dependencies + +* remove google-cloud-core classiier=test since it is managed by … ([#396](https://www.github.com/googleapis/java-bigquery/issues/396)) ([bafd8ae](https://www.github.com/googleapis/java-bigquery/commit/bafd8ae780121ee968bca7b3eb68bb0f0a438124)) +* restrict -annotation deps ([#402](https://www.github.com/googleapis/java-bigquery/issues/402)) ([db41581](https://www.github.com/googleapis/java-bigquery/commit/db4158186b99d0bed68fd70bef5918b1820e5dd1)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.6.0 ([#392](https://www.github.com/googleapis/java-bigquery/issues/392)) ([3f13ccb](https://www.github.com/googleapis/java-bigquery/commit/3f13ccbbf1ca8f179c48a69c8e456bdf45c43af5)) +* update shared-deps and add back certain test deps ([#390](https://www.github.com/googleapis/java-bigquery/issues/390)) ([7e21be2](https://www.github.com/googleapis/java-bigquery/commit/7e21be2506cdaf065cd32e3aed403890929b93e7)) + + +### Documentation + +* **samples:** Convert tightly coupled local variable inside of method into method arguments ([#386](https://www.github.com/googleapis/java-bigquery/issues/386)) ([7c02cd3](https://www.github.com/googleapis/java-bigquery/commit/7c02cd3ca7238d24bb2914ae2262a3b2e2d4bda7)), closes [#387](https://www.github.com/googleapis/java-bigquery/issues/387) +* **samples:** Convert tightly coupled local variable involved options inside of method into method arguments ([#393](https://www.github.com/googleapis/java-bigquery/issues/393)) ([a11f3cb](https://www.github.com/googleapis/java-bigquery/commit/a11f3cb4733215a9c4a65936b05f9cc2ed75656f)), closes [#394](https://www.github.com/googleapis/java-bigquery/issues/394) +* **samples:** Fix flaky test ([#397](https://www.github.com/googleapis/java-bigquery/issues/397)) ([64847d1](https://www.github.com/googleapis/java-bigquery/commit/64847d1e5137e811e7f462fc058908fa9fcc55fb)) +* **samples:** update revierwers for samples module changes ([#401](https://www.github.com/googleapis/java-bigquery/issues/401)) ([69b9d84](https://www.github.com/googleapis/java-bigquery/commit/69b9d8417d5b910286aaa855f4a83fc151a81cec)) + ## [1.116.0](https://www.github.com/googleapis/java-bigquery/compare/v1.115.1...v1.116.0) (2020-05-21) diff --git a/README.md b/README.md index cfba6fbc9..a74ce77b3 100644 --- a/README.md +++ b/README.md @@ -49,11 +49,11 @@ If you are using Maven without BOM, add this to your dependencies: If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.cloud:google-cloud-bigquery:1.116.0' +compile 'com.google.cloud:google-cloud-bigquery:1.116.1' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.cloud" % "google-cloud-bigquery" % "1.116.0" +libraryDependencies += "com.google.cloud" % "google-cloud-bigquery" % "1.116.1" ``` [//]: # ({x-version-update-end}) diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index 2e3df3aa6..b1f467a26 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 1.116.1-SNAPSHOT + 1.116.1 jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 1.116.1-SNAPSHOT + 1.116.1 google-cloud-bigquery diff --git a/pom.xml b/pom.xml index 0edf79bd2..8fdc5aff1 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 1.116.1-SNAPSHOT + 1.116.1 BigQuery Parent https://github.com/googleapis/java-bigquery @@ -86,7 +86,7 @@ com.google.cloud google-cloud-bigquery - 1.116.1-SNAPSHOT + 1.116.1 diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 974660723..f84fbf44b 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -44,7 +44,7 @@ com.google.cloud google-cloud-bigquery - 1.116.1-SNAPSHOT + 1.116.1 diff --git a/versions.txt b/versions.txt index 8d27316d4..110d6881d 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:1.116.0:1.116.1-SNAPSHOT \ No newline at end of file +google-cloud-bigquery:1.116.1:1.116.1 \ No newline at end of file