diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index f953c5f0f..aef889d3d 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -5,4 +5,4 @@
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
# The java-samples-reviewers team is the default owner for samples changes
-samples/ @stephaniewang526 @GoogleCloudPlatform/java-samples-reviewers
\ No newline at end of file
+samples/**/*.java @stephaniewang526 @googleapis/java-samples-reviewers
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 28e65ffe1..510606eea 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,23 @@
# Changelog
+### [1.116.1](https://www.github.com/googleapis/java-bigquery/compare/v1.116.0...v1.116.1) (2020-06-01)
+
+
+### Dependencies
+
+* remove google-cloud-core classiier=test since it is managed by … ([#396](https://www.github.com/googleapis/java-bigquery/issues/396)) ([bafd8ae](https://www.github.com/googleapis/java-bigquery/commit/bafd8ae780121ee968bca7b3eb68bb0f0a438124))
+* restrict -annotation deps ([#402](https://www.github.com/googleapis/java-bigquery/issues/402)) ([db41581](https://www.github.com/googleapis/java-bigquery/commit/db4158186b99d0bed68fd70bef5918b1820e5dd1))
+* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.6.0 ([#392](https://www.github.com/googleapis/java-bigquery/issues/392)) ([3f13ccb](https://www.github.com/googleapis/java-bigquery/commit/3f13ccbbf1ca8f179c48a69c8e456bdf45c43af5))
+* update shared-deps and add back certain test deps ([#390](https://www.github.com/googleapis/java-bigquery/issues/390)) ([7e21be2](https://www.github.com/googleapis/java-bigquery/commit/7e21be2506cdaf065cd32e3aed403890929b93e7))
+
+
+### Documentation
+
+* **samples:** Convert tightly coupled local variable inside of method into method arguments ([#386](https://www.github.com/googleapis/java-bigquery/issues/386)) ([7c02cd3](https://www.github.com/googleapis/java-bigquery/commit/7c02cd3ca7238d24bb2914ae2262a3b2e2d4bda7)), closes [#387](https://www.github.com/googleapis/java-bigquery/issues/387)
+* **samples:** Convert tightly coupled local variable involved options inside of method into method arguments ([#393](https://www.github.com/googleapis/java-bigquery/issues/393)) ([a11f3cb](https://www.github.com/googleapis/java-bigquery/commit/a11f3cb4733215a9c4a65936b05f9cc2ed75656f)), closes [#394](https://www.github.com/googleapis/java-bigquery/issues/394)
+* **samples:** Fix flaky test ([#397](https://www.github.com/googleapis/java-bigquery/issues/397)) ([64847d1](https://www.github.com/googleapis/java-bigquery/commit/64847d1e5137e811e7f462fc058908fa9fcc55fb))
+* **samples:** update revierwers for samples module changes ([#401](https://www.github.com/googleapis/java-bigquery/issues/401)) ([69b9d84](https://www.github.com/googleapis/java-bigquery/commit/69b9d8417d5b910286aaa855f4a83fc151a81cec))
+
## [1.116.0](https://www.github.com/googleapis/java-bigquery/compare/v1.115.1...v1.116.0) (2020-05-21)
diff --git a/README.md b/README.md
index 1f5d01b88..a74ce77b3 100644
--- a/README.md
+++ b/README.md
@@ -19,7 +19,7 @@ See https://github.com/GoogleCloudPlatform/cloud-opensource-java/wiki/The-Google
com.google.cloud
libraries-bom
- 5.4.0
+ 5.5.0
pom
import
@@ -40,7 +40,7 @@ If you are using Maven without BOM, add this to your dependencies:
com.google.cloud
google-cloud-bigquery
- 1.115.1
+ 1.116.0
```
@@ -49,11 +49,11 @@ If you are using Maven without BOM, add this to your dependencies:
If you are using Gradle, add this to your dependencies
```Groovy
-compile 'com.google.cloud:google-cloud-bigquery:1.116.0'
+compile 'com.google.cloud:google-cloud-bigquery:1.116.1'
```
If you are using SBT, add this to your dependencies
```Scala
-libraryDependencies += "com.google.cloud" % "google-cloud-bigquery" % "1.116.0"
+libraryDependencies += "com.google.cloud" % "google-cloud-bigquery" % "1.116.1"
```
[//]: # ({x-version-update-end})
diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml
index d1b59b1a7..b1f467a26 100644
--- a/google-cloud-bigquery/pom.xml
+++ b/google-cloud-bigquery/pom.xml
@@ -3,7 +3,7 @@
4.0.0
com.google.cloud
google-cloud-bigquery
- 1.116.0
+ 1.116.1
jar
BigQuery
https://github.com/googleapis/java-bigquery
@@ -11,7 +11,7 @@
com.google.cloud
google-cloud-bigquery-parent
- 1.116.0
+ 1.116.1
google-cloud-bigquery
@@ -36,6 +36,7 @@
com.google.auto.value
auto-value-annotations
+ provided
com.google.http-client
@@ -60,7 +61,6 @@
com.google.cloud
google-cloud-core
- ${google.core.version}
tests
test
@@ -135,21 +135,4 @@
-
-
-
- java9
-
- [9,)
-
-
-
- javax.annotation
- javax.annotation-api
-
- ${javax.annotations.version}
-
-
-
-
diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java
index ac11f40d0..6435ff91a 100644
--- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java
+++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java
@@ -53,6 +53,7 @@
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
+import java.io.IOException;
import java.math.BigInteger;
import java.util.Collections;
import java.util.List;
@@ -78,6 +79,15 @@ public class BigQueryImplTest {
private static final String JOB = "job";
private static final String OTHER_TABLE = "otherTable";
private static final String OTHER_DATASET = "otherDataset";
+ private static final String ROUTINE = "routine";
+ private static final RoutineId ROUTINE_ID = RoutineId.of(DATASET, ROUTINE);
+ private static final String ETAG = "etag";
+ private static final String ROUTINE_TYPE = "SCALAR_FUNCTION";
+ private static final Long CREATION_TIME = 10L;
+ private static final Long LAST_MODIFIED_TIME = 20L;
+ private static final String LANGUAGE = "SQL";
+ private static final String UPLOAD_ID = "uploadid";
+ private static final int MIN_CHUNK_SIZE = 256 * 1024;
private static final List ACCESS_RULES =
ImmutableList.of(
Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER),
@@ -439,6 +449,43 @@ public class BigQueryImplTest {
BigQueryRpc.Option.PAGE_TOKEN, CURSOR,
BigQueryRpc.Option.MAX_RESULTS, 0L);
+ private static final RoutineArgument ARG_1 =
+ RoutineArgument.newBuilder()
+ .setDataType(StandardSQLDataType.newBuilder("STRING").build())
+ .setName("arg1")
+ .build();
+
+ private static final List ARGUMENT_LIST = ImmutableList.of(ARG_1);
+
+ private static final StandardSQLDataType RETURN_TYPE =
+ StandardSQLDataType.newBuilder("FLOAT64").build();
+
+ private static final List IMPORTED_LIBRARIES =
+ ImmutableList.of("gs://foo", "gs://bar", "gs://baz");
+
+ private static final String BODY = "body";
+
+ private static final RoutineInfo ROUTINE_INFO =
+ RoutineInfo.newBuilder(ROUTINE_ID)
+ .setEtag(ETAG)
+ .setRoutineType(ROUTINE_TYPE)
+ .setCreationTime(CREATION_TIME)
+ .setLastModifiedTime(LAST_MODIFIED_TIME)
+ .setLanguage(LANGUAGE)
+ .setArguments(ARGUMENT_LIST)
+ .setReturnType(RETURN_TYPE)
+ .setImportedLibraries(IMPORTED_LIBRARIES)
+ .setBody(BODY)
+ .build();
+ private static final WriteChannelConfiguration LOAD_CONFIGURATION =
+ WriteChannelConfiguration.newBuilder(TABLE_ID)
+ .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED)
+ .setWriteDisposition(JobInfo.WriteDisposition.WRITE_APPEND)
+ .setFormatOptions(FormatOptions.json())
+ .setIgnoreUnknownValues(true)
+ .setMaxBadRecords(10)
+ .build();
+
private BigQueryOptions options;
private BigQueryRpcFactory rpcFactoryMock;
private BigQueryRpc bigqueryRpcMock;
@@ -446,6 +493,9 @@ public class BigQueryImplTest {
@Captor private ArgumentCaptor
@@ -74,7 +72,7 @@
com.google.cloud
google-cloud-shared-dependencies
- 0.4.0
+ 0.7.0
pom
import
@@ -88,9 +86,28 @@
com.google.cloud
google-cloud-bigquery
- 1.116.0
+ 1.116.1
+
+ com.google.apis
+ google-api-services-bigquery
+ ${google-api-services-bigquery.version}
+
+
+
+
+ junit
+ junit
+ 4.13
+ test
+
+
+ com.google.truth
+ truth
+ 1.0.1
+ test
+
org.mockito
mockito-core
@@ -103,11 +120,6 @@
1.108.0
test
-
- com.google.apis
- google-api-services-bigquery
- ${google-api-services-bigquery.version}
-
@@ -117,9 +129,6 @@
org.apache.maven.plugins
maven-dependency-plugin
-
- org.objenesis:objenesis
-
@@ -207,5 +216,4 @@
-
diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml
index 75e9d54cb..0e2c300b8 100644
--- a/samples/install-without-bom/pom.xml
+++ b/samples/install-without-bom/pom.xml
@@ -45,7 +45,7 @@
com.google.cloud
google-cloud-bigquery
- 1.115.1
+ 1.116.0
diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml
index 202d493ea..f84fbf44b 100644
--- a/samples/snapshot/pom.xml
+++ b/samples/snapshot/pom.xml
@@ -44,7 +44,7 @@
com.google.cloud
google-cloud-bigquery
- 1.116.0
+ 1.116.1
diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml
index 8c070dfb0..6b6788b2e 100644
--- a/samples/snippets/pom.xml
+++ b/samples/snippets/pom.xml
@@ -44,7 +44,7 @@
com.google.cloud
libraries-bom
- 5.4.0
+ 5.5.0
pom
import
diff --git a/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java b/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java
index c529272fc..932c27d69 100644
--- a/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java
+++ b/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java
@@ -32,6 +32,8 @@
import com.google.cloud.bigquery.Schema;
import com.google.cloud.bigquery.TableId;
import com.google.common.collect.ImmutableList;
+import java.util.ArrayList;
+import java.util.List;
import java.util.UUID;
public class AddColumnLoadAppend {
@@ -41,11 +43,28 @@ public static void runAddColumnLoadAppend() throws Exception {
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
String sourceUri = "/path/to/file.csv";
- addColumnLoadAppend(datasetName, tableName, sourceUri);
+ // Add a new column to a BigQuery table while appending rows via a load job.
+ // 'REQUIRED' fields cannot be added to an existing schema, so the additional column must be
+ // 'NULLABLE'.
+ Schema schema =
+ Schema.of(
+ Field.newBuilder("name", LegacySQLTypeName.STRING)
+ .setMode(Field.Mode.REQUIRED)
+ .build());
+
+ List fields = schema.getFields();
+ // Adding below additional column during the load job
+ Field newField = Field.newBuilder("post_abbr", LegacySQLTypeName.STRING)
+ .setMode(Field.Mode.NULLABLE)
+ .build();
+ List newFields = new ArrayList<>(fields);
+ newFields.add(newField);
+ Schema newSchema = Schema.of(newFields);
+ addColumnLoadAppend(datasetName, tableName, sourceUri, newSchema);
}
- public static void addColumnLoadAppend(String datasetName, String tableName, String sourceUri)
- throws Exception {
+ public static void addColumnLoadAppend(String datasetName, String tableName,
+ String sourceUri, Schema newSchema) throws Exception {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
@@ -53,19 +72,6 @@ public static void addColumnLoadAppend(String datasetName, String tableName, Str
TableId tableId = TableId.of(datasetName, tableName);
- // Add a new column to a BigQuery table while appending rows via a load job.
- // 'REQUIRED' fields cannot be added to an existing schema, so the additional column must be
- // 'NULLABLE'.
- Schema newSchema =
- Schema.of(
- Field.newBuilder("name", LegacySQLTypeName.STRING)
- .setMode(Field.Mode.REQUIRED)
- .build(),
- // Adding below additional column during the load job
- Field.newBuilder("post_abbr", LegacySQLTypeName.STRING)
- .setMode(Field.Mode.NULLABLE)
- .build());
-
LoadJobConfiguration loadJobConfig =
LoadJobConfiguration.builder(tableId, sourceUri)
.setFormatOptions(FormatOptions.csv())
diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java b/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java
index 1fc265e17..27a0e144f 100644
--- a/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java
+++ b/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java
@@ -29,16 +29,25 @@
import com.google.cloud.bigquery.TableInfo;
import com.google.cloud.bigquery.TimePartitioning;
import com.google.common.collect.ImmutableList;
+import java.util.List;
public class CreateClusteredTable {
public static void runCreateClusteredTable() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
- createClusteredTable(datasetName, tableName);
+ Schema schema =
+ Schema.of(
+ Field.of("name", StandardSQLTypeName.STRING),
+ Field.of("post_abbr", StandardSQLTypeName.STRING),
+ Field.of("date", StandardSQLTypeName.DATE));
+ createClusteredTable(datasetName, tableName,
+ schema, ImmutableList.of("name", "post_abbr"));
}
- public static void createClusteredTable(String datasetName, String tableName) {
+ public static void createClusteredTable(
+ String datasetName, String tableName,
+ Schema schema, List clusteringFields) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
@@ -47,15 +56,10 @@ public static void createClusteredTable(String datasetName, String tableName) {
TableId tableId = TableId.of(datasetName, tableName);
TimePartitioning partitioning = TimePartitioning.of(TimePartitioning.Type.DAY);
-
- Schema schema =
- Schema.of(
- Field.of("name", StandardSQLTypeName.STRING),
- Field.of("post_abbr", StandardSQLTypeName.STRING),
- Field.of("date", StandardSQLTypeName.DATE));
-
+ // Clustering fields will be consisted of fields mentioned in the schema.
+ // As of now, another condition is that the table should be partitioned.
Clustering clustering =
- Clustering.newBuilder().setFields(ImmutableList.of("name", "post_abbr")).build();
+ Clustering.newBuilder().setFields(clusteringFields).build();
StandardTableDefinition tableDefinition =
StandardTableDefinition.newBuilder()
diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java b/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java
index 79a15a801..62a51c669 100644
--- a/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java
+++ b/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java
@@ -34,10 +34,15 @@ public static void runCreatePartitionedTable() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
- createPartitionedTable(datasetName, tableName);
+ Schema schema =
+ Schema.of(
+ Field.of("stringField", StandardSQLTypeName.STRING),
+ Field.of("booleanField", StandardSQLTypeName.BOOL),
+ Field.of("dateField", StandardSQLTypeName.DATE));
+ createPartitionedTable(datasetName, tableName, schema);
}
- public static void createPartitionedTable(String datasetName, String tableName) {
+ public static void createPartitionedTable(String datasetName, String tableName, Schema schema) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
@@ -47,12 +52,6 @@ public static void createPartitionedTable(String datasetName, String tableName)
TimePartitioning partitioning = TimePartitioning.of(TimePartitioning.Type.DAY);
- Schema schema =
- Schema.of(
- Field.of("stringField", StandardSQLTypeName.STRING),
- Field.of("booleanField", StandardSQLTypeName.BOOL),
- Field.of("dateField", StandardSQLTypeName.DATE));
-
StandardTableDefinition tableDefinition =
StandardTableDefinition.newBuilder()
.setSchema(schema)
diff --git a/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java b/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java
index 4d657c690..0f05cb20d 100644
--- a/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java
+++ b/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java
@@ -35,12 +35,19 @@ public static void runExtractTableToJson() {
String tableName = "shakespeare";
String bucketName = "my-bucket";
String destinationUri = "gs://" + bucketName + "/path/to/file";
- extractTableToJson(projectId, datasetName, tableName, destinationUri);
+ // For more information on export formats available see:
+ // https://cloud.google.com/bigquery/docs/exporting-data#export_formats_and_compression_types
+ // For more information on Job see:
+ // https://googleapis.dev/java/google-cloud-clients/latest/index.html?com/google/cloud/bigquery/package-summary.html
+
+ String dataFormat = "CSV";
+ extractTableToJson(projectId, datasetName, tableName, destinationUri, dataFormat);
}
// Exports datasetName:tableName to destinationUri as raw CSV
public static void extractTableToJson(
- String projectId, String datasetName, String tableName, String destinationUri) {
+ String projectId, String datasetName, String tableName, String destinationUri,
+ String dataFormat) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
@@ -49,11 +56,7 @@ public static void extractTableToJson(
TableId tableId = TableId.of(projectId, datasetName, tableName);
Table table = bigquery.getTable(tableId);
- // For more information on export formats available see:
- // https://cloud.google.com/bigquery/docs/exporting-data#export_formats_and_compression_types
- // For more information on Job see:
- // https://googleapis.dev/java/google-cloud-clients/latest/index.html?com/google/cloud/bigquery/package-summary.html
- Job job = table.extract("CSV", destinationUri);
+ Job job = table.extract(dataFormat, destinationUri);
// Blocks until this job completes its execution, either failing or succeeding.
Job completedJob =
@@ -68,7 +71,7 @@ public static void extractTableToJson(
"BigQuery was unable to extract due to an error: \n" + job.getStatus().getError());
return;
}
- System.out.println("Table export successful. Check in GCS bucket for the CSV file.");
+ System.out.println("Table export successful. Check in GCS bucket for the " + dataFormat + " file.");
} catch (BigQueryException | InterruptedException e) {
System.out.println("Table extraction job was interrupted. \n" + e.toString());
}
diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java b/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java
index e6e2e3915..3e580ec7d 100644
--- a/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java
+++ b/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java
@@ -41,10 +41,11 @@ public static void runLoadLocalFile() throws IOException, InterruptedException {
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv");
- loadLocalFile(datasetName, tableName, csvPath);
+ loadLocalFile(datasetName, tableName, csvPath, FormatOptions.csv());
}
- public static void loadLocalFile(String datasetName, String tableName, Path csvPath)
+ public static void loadLocalFile(String datasetName, String tableName, Path csvPath,
+ FormatOptions formatOptions)
throws IOException, InterruptedException {
try {
// Initialize client that will be used to send requests. This client only needs to be created
@@ -54,7 +55,7 @@ public static void loadLocalFile(String datasetName, String tableName, Path csvP
WriteChannelConfiguration writeChannelConfiguration =
WriteChannelConfiguration.newBuilder(tableId)
- .setFormatOptions(FormatOptions.csv())
+ .setFormatOptions(formatOptions)
.build();
// The location and JobName must be specified; other fields can be auto-detected.
diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java b/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java
index 580b6183f..56a6b6b04 100644
--- a/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java
+++ b/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java
@@ -32,17 +32,18 @@ public class LoadParquet {
public static void runLoadParquet() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
- loadParquet(datasetName);
+ String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet";
+ String tableName = "us_states";
+ loadParquet(datasetName, tableName, sourceUri);
}
- public static void loadParquet(String datasetName) {
+ public static void loadParquet(String datasetName, String tableName, String sourceUri) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
- String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet";
- TableId tableId = TableId.of(datasetName, "us_states");
+ TableId tableId = TableId.of(datasetName, tableName);
LoadJobConfiguration configuration =
LoadJobConfiguration.builder(tableId, sourceUri)
diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java b/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java
index 80f815046..c8ee67c67 100644
--- a/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java
+++ b/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java
@@ -34,10 +34,13 @@ public class LoadParquetReplaceTable {
public static void runLoadParquetReplaceTable() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
- loadParquetReplaceTable(datasetName);
+ String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet";
+ String tableName = "us_states";
+ loadParquetReplaceTable(datasetName, tableName, sourceUri);
}
- public static void loadParquetReplaceTable(String datasetName) {
+ public static void loadParquetReplaceTable(String datasetName, String tableName,
+ String sourceUri) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
@@ -46,8 +49,7 @@ public static void loadParquetReplaceTable(String datasetName) {
// Imports a GCS file into a table and overwrites table data if table already exists.
// This sample loads CSV file at:
// https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv
- String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet";
- TableId tableId = TableId.of(datasetName, "us_states");
+ TableId tableId = TableId.of(datasetName, tableName);
// For more information on LoadJobConfiguration see:
// https://googleapis.dev/java/google-cloud-clients/latest/com/google/cloud/bigquery/LoadJobConfiguration.Builder.html
diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java b/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java
index 7d4987d11..20f4104f9 100644
--- a/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java
+++ b/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java
@@ -31,6 +31,7 @@
import com.google.cloud.bigquery.TableId;
import com.google.cloud.bigquery.TimePartitioning;
import com.google.common.collect.ImmutableList;
+import java.util.List;
public class LoadTableClustered {
@@ -39,10 +40,17 @@ public static void runLoadTableClustered() throws Exception {
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
String sourceUri = "/path/to/file.csv";
- loadTableClustered(datasetName, tableName, sourceUri);
+ Schema schema =
+ Schema.of(
+ Field.of("name", StandardSQLTypeName.STRING),
+ Field.of("post_abbr", StandardSQLTypeName.STRING),
+ Field.of("date", StandardSQLTypeName.DATE));
+ loadTableClustered(datasetName, tableName, sourceUri,
+ schema, ImmutableList.of("name", "post_abbr"));
}
- public static void loadTableClustered(String datasetName, String tableName, String sourceUri)
+ public static void loadTableClustered(String datasetName, String tableName, String sourceUri,
+ Schema schema, List clusteringFields)
throws Exception {
try {
// Initialize client that will be used to send requests. This client only needs to be created
@@ -51,16 +59,11 @@ public static void loadTableClustered(String datasetName, String tableName, Stri
TableId tableId = TableId.of(datasetName, tableName);
- Schema schema =
- Schema.of(
- Field.of("name", StandardSQLTypeName.STRING),
- Field.of("post_abbr", StandardSQLTypeName.STRING),
- Field.of("date", StandardSQLTypeName.DATE));
-
TimePartitioning partitioning = TimePartitioning.of(TimePartitioning.Type.DAY);
-
+ // Clustering fields will be consisted of fields mentioned in the schema.
+ // As of now, another condition is that the table should be partitioned.
Clustering clustering =
- Clustering.newBuilder().setFields(ImmutableList.of("name", "post_abbr")).build();
+ Clustering.newBuilder().setFields(clusteringFields).build();
LoadJobConfiguration loadJobConfig =
LoadJobConfiguration.builder(tableId, sourceUri)
diff --git a/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java b/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java
index 4163a5979..b601c859b 100644
--- a/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java
+++ b/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java
@@ -34,10 +34,16 @@ public static void runTableInsertRows() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
- tableInsertRows(datasetName, tableName);
+ // Create a row to insert
+ Map rowContent = new HashMap<>();
+ rowContent.put("booleanField", true);
+ rowContent.put("numericField", "3.14");
+
+ tableInsertRows(datasetName, tableName, rowContent);
}
- public static void tableInsertRows(String datasetName, String tableName) {
+ public static void tableInsertRows(String datasetName, String tableName,
+ Map rowContent) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
@@ -46,11 +52,6 @@ public static void tableInsertRows(String datasetName, String tableName) {
// Get table
TableId tableId = TableId.of(datasetName, tableName);
- // Create a row to insert
- Map rowContent = new HashMap<>();
- rowContent.put("booleanField", true);
- rowContent.put("numericField", "3.14");
-
// Inserts rowContent into datasetName:tableId.
InsertAllResponse response =
bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow(rowContent).build());
diff --git a/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java b/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java
index 5719dcffa..5f7cc27fa 100644
--- a/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java
+++ b/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java
@@ -31,10 +31,15 @@ public class UpdateDatasetAccess {
public static void runUpdateDatasetAccess() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
- updateDatasetAccess(datasetName);
+ // Create a new ACL granting the READER role to "sample.bigquery.dev@gmail.com"
+ // For more information on the types of ACLs available see:
+ // https://cloud.google.com/storage/docs/access-control/lists
+ Acl newEntry = Acl.of(new User("sample.bigquery.dev@gmail.com"), Role.READER);
+
+ updateDatasetAccess(datasetName, newEntry);
}
- public static void updateDatasetAccess(String datasetName) {
+ public static void updateDatasetAccess(String datasetName, Acl newEntry) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
@@ -42,11 +47,6 @@ public static void updateDatasetAccess(String datasetName) {
Dataset dataset = bigquery.getDataset(datasetName);
- // Create a new ACL granting the READER role to "sample.bigquery.dev@gmail.com"
- // For more information on the types of ACLs available see:
- // https://cloud.google.com/storage/docs/access-control/lists
- Acl newEntry = Acl.of(new User("sample.bigquery.dev@gmail.com"), Role.READER);
-
// Get a copy of the ACLs list from the dataset and append the new entry
ArrayList acls = new ArrayList<>(dataset.getAcl());
acls.add(newEntry);
diff --git a/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java b/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java
index bea27624f..9b746a90d 100644
--- a/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java
+++ b/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java
@@ -28,18 +28,17 @@ public class UpdateDatasetExpiration {
public static void runUpdateDatasetExpiration() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
- updateDatasetExpiration(datasetName);
+ // Update dataset expiration to one day
+ Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);
+ updateDatasetExpiration(datasetName, newExpiration);
}
- public static void updateDatasetExpiration(String datasetName) {
+ public static void updateDatasetExpiration(String datasetName, Long newExpiration) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
- // Update dataset expiration to one day
- Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);
-
Dataset dataset = bigquery.getDataset(datasetName);
bigquery.update(dataset.toBuilder().setDefaultTableLifetime(newExpiration).build());
System.out.println("Dataset description updated successfully to " + newExpiration);
diff --git a/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java b/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java
index a5d9abedb..cbc9a1940 100644
--- a/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java
+++ b/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java
@@ -29,18 +29,18 @@ public static void runUpdateTableExpiration() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
- updateTableExpiration(datasetName, tableName);
+ // Update table expiration to one day.
+ Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);
+ updateTableExpiration(datasetName, tableName, newExpiration);
}
- public static void updateTableExpiration(String datasetName, String tableName) {
+ public static void updateTableExpiration(String datasetName, String tableName,
+ Long newExpiration) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
- // Update table expiration to one day
- Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);
-
Table table = bigquery.getTable(datasetName, tableName);
bigquery.update(table.toBuilder().setExpirationTime(newExpiration).build());
diff --git a/samples/snippets/src/test/java/com/example/bigquery/AddColumnLoadAppendIT.java b/samples/snippets/src/test/java/com/example/bigquery/AddColumnLoadAppendIT.java
index 12a9acc38..afbe13c94 100644
--- a/samples/snippets/src/test/java/com/example/bigquery/AddColumnLoadAppendIT.java
+++ b/samples/snippets/src/test/java/com/example/bigquery/AddColumnLoadAppendIT.java
@@ -24,6 +24,8 @@
import com.google.cloud.bigquery.Schema;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
@@ -71,7 +73,16 @@ public void testAddColumnLoadAppend() throws Exception {
CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, originalSchema);
- AddColumnLoadAppend.addColumnLoadAppend(BIGQUERY_DATASET_NAME, tableName, sourceUri);
+ List fields = originalSchema.getFields();
+ // Adding below additional column during the load job
+ Field newField = Field.newBuilder("post_abbr", LegacySQLTypeName.STRING)
+ .setMode(Field.Mode.NULLABLE)
+ .build();
+ List newFields = new ArrayList<>(fields);
+ newFields.add(newField);
+ Schema newSchema = Schema.of(newFields);
+
+ AddColumnLoadAppend.addColumnLoadAppend(BIGQUERY_DATASET_NAME, tableName, sourceUri, newSchema);
assertThat(bout.toString()).contains("Column successfully added during load append job");
diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateClusteredTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateClusteredTableIT.java
index 4f8c29942..37ddd226b 100644
--- a/samples/snippets/src/test/java/com/example/bigquery/CreateClusteredTableIT.java
+++ b/samples/snippets/src/test/java/com/example/bigquery/CreateClusteredTableIT.java
@@ -19,6 +19,10 @@
import static com.google.common.truth.Truth.assertThat;
import static junit.framework.TestCase.assertNotNull;
+import com.google.cloud.bigquery.Field;
+import com.google.cloud.bigquery.Schema;
+import com.google.cloud.bigquery.StandardSQLTypeName;
+import com.google.common.collect.ImmutableList;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import org.junit.After;
@@ -58,8 +62,14 @@ public void tearDown() {
@Test
public void createClusteredTable() {
String tableName = "MY_CLUSTERED_TABLE";
+ Schema schema =
+ Schema.of(
+ Field.of("name", StandardSQLTypeName.STRING),
+ Field.of("post_abbr", StandardSQLTypeName.STRING),
+ Field.of("date", StandardSQLTypeName.DATE));
- CreateClusteredTable.createClusteredTable(BIGQUERY_DATASET_NAME, tableName);
+ CreateClusteredTable.createClusteredTable(BIGQUERY_DATASET_NAME, tableName,
+ schema, ImmutableList.of("name", "post_abbr"));
assertThat(bout.toString()).contains("Clustered table created successfully");
diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreatePartitionedTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreatePartitionedTableIT.java
index cbe612b32..32000c5ce 100644
--- a/samples/snippets/src/test/java/com/example/bigquery/CreatePartitionedTableIT.java
+++ b/samples/snippets/src/test/java/com/example/bigquery/CreatePartitionedTableIT.java
@@ -19,6 +19,9 @@
import static com.google.common.truth.Truth.assertThat;
import static junit.framework.TestCase.assertNotNull;
+import com.google.cloud.bigquery.Field;
+import com.google.cloud.bigquery.Schema;
+import com.google.cloud.bigquery.StandardSQLTypeName;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import org.junit.After;
@@ -58,8 +61,13 @@ public void tearDown() {
@Test
public void testCreatePartitionedTable() {
String tableName = "MY_PARTITIONED_TABLE";
+ Schema schema =
+ Schema.of(
+ Field.of("stringField", StandardSQLTypeName.STRING),
+ Field.of("booleanField", StandardSQLTypeName.BOOL),
+ Field.of("dateField", StandardSQLTypeName.DATE));
- CreatePartitionedTable.createPartitionedTable(BIGQUERY_DATASET_NAME, tableName);
+ CreatePartitionedTable.createPartitionedTable(BIGQUERY_DATASET_NAME, tableName, schema);
assertThat(bout.toString()).contains("Partitioned table created successfully");
diff --git a/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java b/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java
index e018384ed..fd28bc1cb 100644
--- a/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java
+++ b/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java
@@ -61,10 +61,12 @@ public void testExtractTableToJson() {
String datasetName = "samples";
String tableName = "shakespeare";
String destinationUri = "gs://" + GCS_BUCKET + "/extractTest.csv";
+ String dataFormat = "CSV";
// Extract table content to GCS in CSV format
- ExtractTableToJson.extractTableToJson(projectId, datasetName, tableName, destinationUri);
+ ExtractTableToJson.extractTableToJson(projectId, datasetName, tableName, destinationUri,
+ dataFormat);
assertThat(bout.toString())
- .contains("Table export successful. Check in GCS bucket for the CSV file.");
+ .contains("Table export successful. Check in GCS bucket for the " + dataFormat + " file.");
}
}
diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileIT.java
index a1a8d5a6f..aefa2adba 100644
--- a/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileIT.java
+++ b/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileIT.java
@@ -20,6 +20,7 @@
import static junit.framework.TestCase.assertNotNull;
import com.google.cloud.bigquery.Field;
+import com.google.cloud.bigquery.FormatOptions;
import com.google.cloud.bigquery.LegacySQLTypeName;
import com.google.cloud.bigquery.Schema;
import java.io.ByteArrayOutputStream;
@@ -76,7 +77,7 @@ public void loadLocalFile() throws IOException, InterruptedException {
Path csvPath = FileSystems.getDefault().getPath("src/test/resources", "bigquery_noheader.csv");
- LoadLocalFile.loadLocalFile(BIGQUERY_DATASET_NAME, tableName, csvPath);
+ LoadLocalFile.loadLocalFile(BIGQUERY_DATASET_NAME, tableName, csvPath, FormatOptions.csv());
assertThat(bout.toString()).contains("Successfully loaded");
diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadParquetIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadParquetIT.java
index a60fbea3b..cc3ff5d72 100644
--- a/samples/snippets/src/test/java/com/example/bigquery/LoadParquetIT.java
+++ b/samples/snippets/src/test/java/com/example/bigquery/LoadParquetIT.java
@@ -57,7 +57,9 @@ public void tearDown() {
@Test
public void loadParquet() {
- LoadParquet.loadParquet(BIGQUERY_DATASET_NAME);
+ String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet";
+ String tableName = "us_states";
+ LoadParquet.loadParquet(BIGQUERY_DATASET_NAME, tableName, sourceUri);
assertThat(bout.toString()).contains("GCS parquet loaded successfully.");
}
}
diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadParquetReplaceTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadParquetReplaceTableIT.java
index 1d2bd0550..03222cd6d 100644
--- a/samples/snippets/src/test/java/com/example/bigquery/LoadParquetReplaceTableIT.java
+++ b/samples/snippets/src/test/java/com/example/bigquery/LoadParquetReplaceTableIT.java
@@ -57,7 +57,9 @@ public void tearDown() {
@Test
public void testLoadParquetReplaceTable() {
- LoadParquetReplaceTable.loadParquetReplaceTable(BIGQUERY_DATASET_NAME);
+ String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet";
+ String tableName = "us_states";
+ LoadParquetReplaceTable.loadParquetReplaceTable(BIGQUERY_DATASET_NAME, tableName, sourceUri);
assertThat(bout.toString()).contains("GCS parquet overwrote existing table successfully.");
}
}
diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java
index 968235e4b..98ef57afd 100644
--- a/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java
+++ b/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java
@@ -19,6 +19,10 @@
import static com.google.common.truth.Truth.assertThat;
import static junit.framework.TestCase.assertNotNull;
+import com.google.cloud.bigquery.Field;
+import com.google.cloud.bigquery.Schema;
+import com.google.cloud.bigquery.StandardSQLTypeName;
+import com.google.common.collect.ImmutableList;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import org.junit.After;
@@ -61,7 +65,14 @@ public void loadTableClustered() throws Exception {
String tableName = "LOAD_CLUSTERED_TABLE_TEST";
- LoadTableClustered.loadTableClustered(BIGQUERY_DATASET_NAME, tableName, sourceUri);
+ Schema schema =
+ Schema.of(
+ Field.of("name", StandardSQLTypeName.STRING),
+ Field.of("post_abbr", StandardSQLTypeName.STRING),
+ Field.of("date", StandardSQLTypeName.DATE));
+
+ LoadTableClustered.loadTableClustered(BIGQUERY_DATASET_NAME, tableName, sourceUri,
+ schema, ImmutableList.of("name", "post_abbr"));
assertThat(bout.toString())
.contains("Data successfully loaded into clustered table during load job");
diff --git a/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java b/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java
index 311cd9bd4..527907dd9 100644
--- a/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java
+++ b/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java
@@ -24,6 +24,8 @@
import com.google.cloud.bigquery.Schema;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
+import java.util.HashMap;
+import java.util.Map;
import java.util.UUID;
import org.junit.After;
import org.junit.Before;
@@ -70,8 +72,13 @@ public void testTableInsertRows() {
// Create table in dataset for testing
CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, schema);
+ // Create a row to insert
+ Map rowContent = new HashMap<>();
+ rowContent.put("booleanField", true);
+ rowContent.put("numericField", "3.14");
+
// Testing
- TableInsertRows.tableInsertRows(BIGQUERY_DATASET_NAME, tableName);
+ TableInsertRows.tableInsertRows(BIGQUERY_DATASET_NAME, tableName, rowContent);
assertThat(bout.toString()).contains("Rows successfully inserted into table");
// Clean up
diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java
index 60086567a..10e7cfddd 100644
--- a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java
+++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java
@@ -19,6 +19,9 @@
import static com.google.common.truth.Truth.assertThat;
import static junit.framework.TestCase.assertNotNull;
+import com.google.cloud.bigquery.Acl;
+import com.google.cloud.bigquery.Acl.Role;
+import com.google.cloud.bigquery.Acl.User;
import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
@@ -62,8 +65,9 @@ public void updateDatasetAccess() {
// Create a dataset in order to modify its ACL
CreateDataset.createDataset(generatedDatasetName);
+ Acl newEntry = Acl.of(new User("sample.bigquery.dev@gmail.com"), Role.READER);
// Modify dataset's ACL
- UpdateDatasetAccess.updateDatasetAccess(generatedDatasetName);
+ UpdateDatasetAccess.updateDatasetAccess(generatedDatasetName, newEntry);
assertThat(bout.toString()).contains("Dataset Access Control updated successfully");
// Clean up
diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetExpirationIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetExpirationIT.java
index 7dd4f7f46..3360223e3 100644
--- a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetExpirationIT.java
+++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetExpirationIT.java
@@ -22,6 +22,7 @@
import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
+import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
@@ -62,8 +63,9 @@ public void updateDatasetExpiration() {
// Create a dataset in order to modify its expiration
CreateDataset.createDataset(generatedDatasetName);
+ Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);
// Modify dataset's expiration
- UpdateDatasetExpiration.updateDatasetExpiration(generatedDatasetName);
+ UpdateDatasetExpiration.updateDatasetExpiration(generatedDatasetName, newExpiration);
assertThat(bout.toString()).contains("Dataset description updated successfully");
// Clean up
diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java
index 5352762d8..73ca93c06 100644
--- a/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java
+++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java
@@ -19,8 +19,13 @@
import static com.google.common.truth.Truth.assertThat;
import static junit.framework.TestCase.assertNotNull;
+import com.google.cloud.bigquery.Field;
+import com.google.cloud.bigquery.Schema;
+import com.google.cloud.bigquery.StandardSQLTypeName;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
@@ -57,9 +62,15 @@ public void tearDown() {
@Test
public void updateTableExpiration() {
- String tableName = "update_expiration_table";
- CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, null);
- UpdateTableExpiration.updateTableExpiration(BIGQUERY_DATASET_NAME, tableName);
+ String suffix = UUID.randomUUID().toString().replace('-', '_');
+ String tableName = "update_expiration_table_" + suffix;
+ Schema schema =
+ Schema.of(
+ Field.of("stringField", StandardSQLTypeName.STRING),
+ Field.of("booleanField", StandardSQLTypeName.BOOL));
+ CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, schema);
+ Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);
+ UpdateTableExpiration.updateTableExpiration(BIGQUERY_DATASET_NAME, tableName, newExpiration);
assertThat(bout.toString()).contains("Table expiration updated successfully");
// Clean up
diff --git a/synth.metadata b/synth.metadata
index 9c415deb3..bdc51594c 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -4,14 +4,14 @@
"git": {
"name": ".",
"remote": "https://github.com/googleapis/java-bigquery.git",
- "sha": "69ede0ca217cfaf98b3364fb5301f1399db4e498"
+ "sha": "db4158186b99d0bed68fd70bef5918b1820e5dd1"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "5b48b0716a36ca069db3038da7e205c87a22ed19"
+ "sha": "388f7aafee3d7a067c23db6c13b7e83fb361c64a"
}
}
]
diff --git a/versions.txt b/versions.txt
index ee09eebae..110d6881d 100644
--- a/versions.txt
+++ b/versions.txt
@@ -1,4 +1,4 @@
# Format:
# module:released-version:current-version
-google-cloud-bigquery:1.116.0:1.116.0
\ No newline at end of file
+google-cloud-bigquery:1.116.1:1.116.1
\ No newline at end of file