diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f953c5f0f..aef889d3d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,4 +5,4 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # The java-samples-reviewers team is the default owner for samples changes -samples/ @stephaniewang526 @GoogleCloudPlatform/java-samples-reviewers \ No newline at end of file +samples/**/*.java @stephaniewang526 @googleapis/java-samples-reviewers diff --git a/CHANGELOG.md b/CHANGELOG.md index 28e65ffe1..510606eea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +### [1.116.1](https://www.github.com/googleapis/java-bigquery/compare/v1.116.0...v1.116.1) (2020-06-01) + + +### Dependencies + +* remove google-cloud-core classiier=test since it is managed by … ([#396](https://www.github.com/googleapis/java-bigquery/issues/396)) ([bafd8ae](https://www.github.com/googleapis/java-bigquery/commit/bafd8ae780121ee968bca7b3eb68bb0f0a438124)) +* restrict -annotation deps ([#402](https://www.github.com/googleapis/java-bigquery/issues/402)) ([db41581](https://www.github.com/googleapis/java-bigquery/commit/db4158186b99d0bed68fd70bef5918b1820e5dd1)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.6.0 ([#392](https://www.github.com/googleapis/java-bigquery/issues/392)) ([3f13ccb](https://www.github.com/googleapis/java-bigquery/commit/3f13ccbbf1ca8f179c48a69c8e456bdf45c43af5)) +* update shared-deps and add back certain test deps ([#390](https://www.github.com/googleapis/java-bigquery/issues/390)) ([7e21be2](https://www.github.com/googleapis/java-bigquery/commit/7e21be2506cdaf065cd32e3aed403890929b93e7)) + + +### Documentation + +* **samples:** Convert tightly coupled local variable inside of method into method arguments ([#386](https://www.github.com/googleapis/java-bigquery/issues/386)) ([7c02cd3](https://www.github.com/googleapis/java-bigquery/commit/7c02cd3ca7238d24bb2914ae2262a3b2e2d4bda7)), closes [#387](https://www.github.com/googleapis/java-bigquery/issues/387) +* **samples:** Convert tightly coupled local variable involved options inside of method into method arguments ([#393](https://www.github.com/googleapis/java-bigquery/issues/393)) ([a11f3cb](https://www.github.com/googleapis/java-bigquery/commit/a11f3cb4733215a9c4a65936b05f9cc2ed75656f)), closes [#394](https://www.github.com/googleapis/java-bigquery/issues/394) +* **samples:** Fix flaky test ([#397](https://www.github.com/googleapis/java-bigquery/issues/397)) ([64847d1](https://www.github.com/googleapis/java-bigquery/commit/64847d1e5137e811e7f462fc058908fa9fcc55fb)) +* **samples:** update revierwers for samples module changes ([#401](https://www.github.com/googleapis/java-bigquery/issues/401)) ([69b9d84](https://www.github.com/googleapis/java-bigquery/commit/69b9d8417d5b910286aaa855f4a83fc151a81cec)) + ## [1.116.0](https://www.github.com/googleapis/java-bigquery/compare/v1.115.1...v1.116.0) (2020-05-21) diff --git a/README.md b/README.md index 1f5d01b88..a74ce77b3 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ See https://github.com/GoogleCloudPlatform/cloud-opensource-java/wiki/The-Google com.google.cloud libraries-bom - 5.4.0 + 5.5.0 pom import @@ -40,7 +40,7 @@ If you are using Maven without BOM, add this to your dependencies: com.google.cloud google-cloud-bigquery - 1.115.1 + 1.116.0 ``` @@ -49,11 +49,11 @@ If you are using Maven without BOM, add this to your dependencies: If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.cloud:google-cloud-bigquery:1.116.0' +compile 'com.google.cloud:google-cloud-bigquery:1.116.1' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.cloud" % "google-cloud-bigquery" % "1.116.0" +libraryDependencies += "com.google.cloud" % "google-cloud-bigquery" % "1.116.1" ``` [//]: # ({x-version-update-end}) diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index d1b59b1a7..b1f467a26 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 1.116.0 + 1.116.1 jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 1.116.0 + 1.116.1 google-cloud-bigquery @@ -36,6 +36,7 @@ com.google.auto.value auto-value-annotations + provided com.google.http-client @@ -60,7 +61,6 @@ com.google.cloud google-cloud-core - ${google.core.version} tests test @@ -135,21 +135,4 @@ - - - - java9 - - [9,) - - - - javax.annotation - javax.annotation-api - - ${javax.annotations.version} - - - - diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java index ac11f40d0..6435ff91a 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java @@ -53,6 +53,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import java.io.IOException; import java.math.BigInteger; import java.util.Collections; import java.util.List; @@ -78,6 +79,15 @@ public class BigQueryImplTest { private static final String JOB = "job"; private static final String OTHER_TABLE = "otherTable"; private static final String OTHER_DATASET = "otherDataset"; + private static final String ROUTINE = "routine"; + private static final RoutineId ROUTINE_ID = RoutineId.of(DATASET, ROUTINE); + private static final String ETAG = "etag"; + private static final String ROUTINE_TYPE = "SCALAR_FUNCTION"; + private static final Long CREATION_TIME = 10L; + private static final Long LAST_MODIFIED_TIME = 20L; + private static final String LANGUAGE = "SQL"; + private static final String UPLOAD_ID = "uploadid"; + private static final int MIN_CHUNK_SIZE = 256 * 1024; private static final List ACCESS_RULES = ImmutableList.of( Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), @@ -439,6 +449,43 @@ public class BigQueryImplTest { BigQueryRpc.Option.PAGE_TOKEN, CURSOR, BigQueryRpc.Option.MAX_RESULTS, 0L); + private static final RoutineArgument ARG_1 = + RoutineArgument.newBuilder() + .setDataType(StandardSQLDataType.newBuilder("STRING").build()) + .setName("arg1") + .build(); + + private static final List ARGUMENT_LIST = ImmutableList.of(ARG_1); + + private static final StandardSQLDataType RETURN_TYPE = + StandardSQLDataType.newBuilder("FLOAT64").build(); + + private static final List IMPORTED_LIBRARIES = + ImmutableList.of("gs://foo", "gs://bar", "gs://baz"); + + private static final String BODY = "body"; + + private static final RoutineInfo ROUTINE_INFO = + RoutineInfo.newBuilder(ROUTINE_ID) + .setEtag(ETAG) + .setRoutineType(ROUTINE_TYPE) + .setCreationTime(CREATION_TIME) + .setLastModifiedTime(LAST_MODIFIED_TIME) + .setLanguage(LANGUAGE) + .setArguments(ARGUMENT_LIST) + .setReturnType(RETURN_TYPE) + .setImportedLibraries(IMPORTED_LIBRARIES) + .setBody(BODY) + .build(); + private static final WriteChannelConfiguration LOAD_CONFIGURATION = + WriteChannelConfiguration.newBuilder(TABLE_ID) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setWriteDisposition(JobInfo.WriteDisposition.WRITE_APPEND) + .setFormatOptions(FormatOptions.json()) + .setIgnoreUnknownValues(true) + .setMaxBadRecords(10) + .build(); + private BigQueryOptions options; private BigQueryRpcFactory rpcFactoryMock; private BigQueryRpc bigqueryRpcMock; @@ -446,6 +493,9 @@ public class BigQueryImplTest { @Captor private ArgumentCaptor> capturedOptions; @Captor private ArgumentCaptor jobCapture; + @Captor private ArgumentCaptor capturedBuffer; + + private TableDataWriteChannel writer; private BigQueryOptions createBigQueryOptionsForProject( String project, BigQueryRpcFactory rpcFactory) { @@ -535,6 +585,7 @@ public void testGetDatasetNotFoundWhenThrowIsDisabled() { @Test public void testGetDatasetNotFoundWhenThrowIsEnabled() { when(bigqueryRpcMock.getDataset(PROJECT, "dataset-not-found", EMPTY_RPC_OPTIONS)) + .thenReturn(null) .thenThrow(new BigQueryException(404, "Dataset not found")); options.setThrowNotFound(true); bigquery = options.getService(); @@ -782,6 +833,22 @@ public void testGetModel() { verify(bigqueryRpcMock).getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS); } + @Test + public void testGetModelNotFoundWhenThrowIsEnabled() { + String expected = "Model not found"; + when(bigqueryRpcMock.getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS)) + .thenReturn(null) + .thenThrow(new BigQueryException(404, expected)); + options.setThrowNotFound(true); + bigquery = options.getService(); + try { + bigquery.getModel(DATASET, MODEL); + } catch (BigQueryException ex) { + assertEquals(expected, ex.getMessage()); + } + verify(bigqueryRpcMock).getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS); + } + @Test public void testListPartition() { when(bigqueryRpcMock.getTable( @@ -811,6 +878,7 @@ public void testGetTableNotFoundWhenThrowIsDisabled() { @Test public void testGetTableNotFoundWhenThrowIsEnabled() { when(bigqueryRpcMock.getTable(PROJECT, DATASET, "table-not-found", EMPTY_RPC_OPTIONS)) + .thenReturn(null) .thenThrow(new BigQueryException(404, "Table not found")); options.setThrowNotFound(true); bigquery = options.getService(); @@ -1001,6 +1069,22 @@ public void testListModels() { verify(bigqueryRpcMock).listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } + @Test + public void testListModelsWithModelId() { + bigquery = options.getService(); + ImmutableList modelList = + ImmutableList.of( + new Model(bigquery, new ModelInfo.BuilderImpl(MODEL_INFO_WITH_PROJECT)), + new Model(bigquery, new ModelInfo.BuilderImpl(OTHER_MODEL_INFO))); + Tuple> result = + Tuple.of(CURSOR, Iterables.transform(modelList, ModelInfo.TO_PB_FUNCTION)); + when(bigqueryRpcMock.listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + Page page = bigquery.listModels(DatasetId.of(DATASET)); + assertEquals(CURSOR, page.getNextPageToken()); + assertArrayEquals(modelList.toArray(), Iterables.toArray(page.getValues(), Model.class)); + verify(bigqueryRpcMock).listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + } + @Test public void testDeleteTable() { when(bigqueryRpcMock.deleteTable(PROJECT, DATASET, TABLE)).thenReturn(true); @@ -1046,6 +1130,24 @@ public void testDeleteModel() { verify(bigqueryRpcMock).deleteModel(PROJECT, DATASET, MODEL); } + @Test + public void testUpdateModel() { + ModelInfo updateModelInfo = + MODEL_INFO_WITH_PROJECT + .setProjectId(OTHER_PROJECT) + .toBuilder() + .setDescription("newDescription") + .build(); + when(bigqueryRpcMock.patch(updateModelInfo.toPb(), EMPTY_RPC_OPTIONS)) + .thenReturn(updateModelInfo.toPb()); + BigQueryOptions bigQueryOptions = + createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); + bigquery = bigQueryOptions.getService(); + Model actualModel = bigquery.update(updateModelInfo); + assertEquals(new Model(bigquery, new ModelInfo.BuilderImpl(updateModelInfo)), actualModel); + verify(bigqueryRpcMock).patch(updateModelInfo.toPb(), EMPTY_RPC_OPTIONS); + } + @Test public void testUpdateTable() { TableInfo updatedTableInfo = @@ -1487,6 +1589,7 @@ public void testGetJobNotFoundWhenThrowIsDisabled() { @Test public void testGetJobNotFoundWhenThrowIsEnabled() { when(bigqueryRpcMock.getJob(PROJECT, "job-not-found", null, EMPTY_RPC_OPTIONS)) + .thenReturn(null) .thenThrow(new BigQueryException(404, "Job not found")); options.setThrowNotFound(true); bigquery = options.getService(); @@ -1900,7 +2003,7 @@ public void testGetQueryResultsWithOptions() { } @Test - public void testRetryableException() { + public void testGetDatasetRetryableException() { when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(500, "InternalError")) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); @@ -1971,4 +2074,157 @@ public void testQueryDryRun() throws Exception { Assert.assertNotNull(ex.getMessage()); } } + + @Test + public void testCreateRoutine() { + RoutineInfo routineInfo = ROUTINE_INFO.setProjectId(OTHER_PROJECT); + when(bigqueryRpcMock.create(routineInfo.toPb(), EMPTY_RPC_OPTIONS)) + .thenReturn(routineInfo.toPb()); + BigQueryOptions bigQueryOptions = + createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); + bigquery = bigQueryOptions.getService(); + Routine actualRoutine = bigquery.create(routineInfo); + assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(routineInfo)), actualRoutine); + verify(bigqueryRpcMock).create(routineInfo.toPb(), EMPTY_RPC_OPTIONS); + } + + @Test + public void testGetRoutine() { + when(bigqueryRpcMock.getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) + .thenReturn(ROUTINE_INFO.toPb()); + bigquery = options.getService(); + Routine routine = bigquery.getRoutine(DATASET, ROUTINE); + assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO)), routine); + verify(bigqueryRpcMock).getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); + } + + @Test + public void testGetRoutineWithRountineId() { + when(bigqueryRpcMock.getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) + .thenReturn(ROUTINE_INFO.toPb()); + bigquery = options.getService(); + Routine routine = bigquery.getRoutine(ROUTINE_ID); + assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO)), routine); + verify(bigqueryRpcMock).getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); + } + + @Test + public void testGetRoutineWithEnabledThrowNotFoundException() { + when(bigqueryRpcMock.getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) + .thenReturn(null) + .thenThrow(new BigQueryException(404, "Routine not found")); + options.setThrowNotFound(true); + bigquery = options.getService(); + try { + Routine routine = bigquery.getRoutine(ROUTINE_ID); + fail(); + } catch (BigQueryException ex) { + assertEquals("Routine not found", ex.getMessage()); + } + verify(bigqueryRpcMock).getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); + } + + @Test + public void testUpdateRoutine() { + RoutineInfo updatedRoutineInfo = + ROUTINE_INFO + .setProjectId(OTHER_PROJECT) + .toBuilder() + .setDescription("newDescription") + .build(); + when(bigqueryRpcMock.update(updatedRoutineInfo.toPb(), EMPTY_RPC_OPTIONS)) + .thenReturn(updatedRoutineInfo.toPb()); + BigQueryOptions bigQueryOptions = + createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); + bigquery = bigQueryOptions.getService(); + Routine routine = bigquery.update(updatedRoutineInfo); + assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(updatedRoutineInfo)), routine); + verify(bigqueryRpcMock).update(updatedRoutineInfo.toPb(), EMPTY_RPC_OPTIONS); + } + + @Test + public void testListRoutines() { + bigquery = options.getService(); + ImmutableList routineList = + ImmutableList.of(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO))); + Tuple> result = + Tuple.of(CURSOR, Iterables.transform(routineList, RoutineInfo.TO_PB_FUNCTION)); + when(bigqueryRpcMock.listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + Page page = bigquery.listRoutines(DATASET); + assertEquals(CURSOR, page.getNextPageToken()); + assertArrayEquals(routineList.toArray(), Iterables.toArray(page.getValues(), Routine.class)); + verify(bigqueryRpcMock).listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + } + + @Test + public void testListRoutinesWithDatasetId() { + bigquery = options.getService(); + ImmutableList routineList = + ImmutableList.of(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO))); + Tuple> result = + Tuple.of(CURSOR, Iterables.transform(routineList, RoutineInfo.TO_PB_FUNCTION)); + when(bigqueryRpcMock.listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + Page page = bigquery.listRoutines(DatasetId.of(PROJECT, DATASET)); + assertEquals(CURSOR, page.getNextPageToken()); + assertArrayEquals(routineList.toArray(), Iterables.toArray(page.getValues(), Routine.class)); + verify(bigqueryRpcMock).listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + } + + @Test + public void testDeleteRoutine() { + when(bigqueryRpcMock.deleteRoutine(PROJECT, DATASET, ROUTINE)).thenReturn(true); + bigquery = options.getService(); + assertTrue(bigquery.delete(ROUTINE_ID)); + verify(bigqueryRpcMock).deleteRoutine(PROJECT, DATASET, ROUTINE); + } + + @Test + public void testWriteWithJob() throws IOException { + bigquery = options.getService(); + Job job = new Job(bigquery, new JobInfo.BuilderImpl(JOB_INFO)); + when(bigqueryRpcMock.open( + new com.google.api.services.bigquery.model.Job() + .setJobReference(JOB_INFO.getJobId().toPb()) + .setConfiguration(LOAD_CONFIGURATION.toPb()))) + .thenReturn(UPLOAD_ID); + when(bigqueryRpcMock.write( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) + .thenReturn(job.toPb()); + writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); + writer.close(); + assertEquals(job, writer.getJob()); + bigquery.writer(JOB_INFO.getJobId(), LOAD_CONFIGURATION); + verify(bigqueryRpcMock) + .open( + new com.google.api.services.bigquery.model.Job() + .setJobReference(JOB_INFO.getJobId().toPb()) + .setConfiguration(LOAD_CONFIGURATION.toPb())); + verify(bigqueryRpcMock) + .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + } + + @Test + public void testWriteChannel() throws IOException { + bigquery = options.getService(); + Job job = new Job(bigquery, new JobInfo.BuilderImpl(JOB_INFO)); + when(bigqueryRpcMock.open( + new com.google.api.services.bigquery.model.Job() + .setJobReference(JOB_INFO.getJobId().toPb()) + .setConfiguration(LOAD_CONFIGURATION.toPb()))) + .thenReturn(UPLOAD_ID); + when(bigqueryRpcMock.write( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) + .thenReturn(job.toPb()); + writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); + writer.close(); + assertEquals(job, writer.getJob()); + bigquery.writer(LOAD_CONFIGURATION); + verify(bigqueryRpcMock) + .open( + new com.google.api.services.bigquery.model.Job() + .setJobReference(JOB_INFO.getJobId().toPb()) + .setConfiguration(LOAD_CONFIGURATION.toPb())); + verify(bigqueryRpcMock) + .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java index 51d27b793..88fa1595e 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java @@ -18,6 +18,7 @@ import static com.google.common.truth.Truth.assertThat; import com.google.common.collect.ImmutableList; +import org.junit.Assert; import org.junit.Test; public class BigtableOptionsTest { @@ -61,6 +62,8 @@ public void testConstructors() { assertThat(COL1.getOnlyReadLatest()).isEqualTo(true); assertThat(COL1.getEncoding()).isEqualTo("BINARY"); assertThat(COL1.getType()).isEqualTo("BYTES"); + assertThat(COL1.equals(COL1)).isTrue(); + assertThat(COL1).isNotEqualTo(TESTFAMILY); // family assertThat(TESTFAMILY.getFamilyID()).isEqualTo("fooFamily"); @@ -73,6 +76,50 @@ public void testConstructors() { assertThat(OPTIONS.getIgnoreUnspecifiedColumnFamilies()).isEqualTo(true); assertThat(OPTIONS.getReadRowkeyAsString()).isEqualTo(true); assertThat(OPTIONS.getColumnFamilies()).isEqualTo(ImmutableList.of(TESTFAMILY)); + compareBigtableOptions(OPTIONS, OPTIONS.toBuilder().build()); + } + + @Test + public void testNullPointerException() { + try { + BigtableColumnFamily.newBuilder().setFamilyID(null).build(); + Assert.fail(); + } catch (NullPointerException ex) { + assertThat(ex.getMessage()).isNotNull(); + } + try { + BigtableColumnFamily.newBuilder().setColumns(null).build(); + Assert.fail(); + } catch (NullPointerException ex) { + assertThat(ex.getMessage()).isNotNull(); + } + try { + BigtableColumnFamily.newBuilder().setEncoding(null).build(); + Assert.fail(); + } catch (NullPointerException ex) { + assertThat(ex.getMessage()).isNotNull(); + } + try { + BigtableColumnFamily.newBuilder().setOnlyReadLatest(null).build(); + Assert.fail(); + } catch (NullPointerException ex) { + assertThat(ex.getMessage()).isNotNull(); + } + try { + BigtableColumnFamily.newBuilder().setType(null).build(); + Assert.fail(); + } catch (NullPointerException ex) { + assertThat(ex.getMessage()).isNotNull(); + } + } + + @Test + public void testIllegalStateException() { + try { + BigtableColumnFamily.newBuilder().build(); + } catch (IllegalStateException ex) { + assertThat(ex.getMessage()).isNotNull(); + } } @Test @@ -86,6 +133,10 @@ public void testToAndFromPb() { public void testEquals() { compareBigtableColumn(COL1, COL1); compareBigtableColumnFamily(TESTFAMILY, TESTFAMILY); + assertThat(TESTFAMILY.equals(TESTFAMILY)).isTrue(); + assertThat(TESTFAMILY).isNotEqualTo(COL1); + assertThat(OPTIONS.equals(OPTIONS)).isTrue(); + assertThat(OPTIONS).isNotEqualTo(TESTFAMILY); compareBigtableOptions(OPTIONS, OPTIONS); } @@ -96,6 +147,8 @@ private void compareBigtableColumn(BigtableColumn expected, BigtableColumn value assertThat(expected.getQualifierEncoded()).isEqualTo(value.getQualifierEncoded()); assertThat(expected.getOnlyReadLatest()).isEqualTo(value.getOnlyReadLatest()); assertThat(expected.getType()).isEqualTo(value.getType()); + assertThat(expected.toString()).isEqualTo(value.toString()); + assertThat(expected.hashCode()).isEqualTo(value.hashCode()); } private void compareBigtableColumnFamily( @@ -106,6 +159,8 @@ private void compareBigtableColumnFamily( assertThat(expected.getColumns()).isEqualTo(value.getColumns()); assertThat(expected.getEncoding()).isEqualTo(value.getEncoding()); assertThat(expected.getType()).isEqualTo(value.getType()); + assertThat(expected.toString()).isEqualTo(value.toString()); + assertThat(expected.hashCode()).isEqualTo(value.hashCode()); } private void compareBigtableOptions(BigtableOptions expected, BigtableOptions value) { @@ -114,5 +169,7 @@ private void compareBigtableOptions(BigtableOptions expected, BigtableOptions va .isEqualTo(value.getIgnoreUnspecifiedColumnFamilies()); assertThat(expected.getReadRowkeyAsString()).isEqualTo(value.getReadRowkeyAsString()); assertThat(expected.getColumnFamilies()).isEqualTo(value.getColumnFamilies()); + assertThat(expected.hashCode()).isEqualTo(value.hashCode()); + assertThat(expected.toString()).isEqualTo(value.toString()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java index bd5a6cdfb..ea8ca1ada 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java @@ -17,6 +17,8 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; import com.google.common.collect.ImmutableList; import java.util.List; @@ -79,6 +81,15 @@ public void testToBuilderIncomplete() { assertEquals(externalTableDefinition, externalTableDefinition.toBuilder().build()); } + @Test + public void testTypeNullPointerException() { + try { + EXTERNAL_TABLE_DEFINITION.toBuilder().setType(null).build(); + } catch (NullPointerException ex) { + assertNotNull(ex.getMessage()); + } + } + @Test public void testBuilder() { assertEquals(TableDefinition.Type.EXTERNAL, EXTERNAL_TABLE_DEFINITION.getType()); @@ -90,6 +101,7 @@ public void testBuilder() { assertEquals(SOURCE_URIS, EXTERNAL_TABLE_DEFINITION.getSourceUris()); assertEquals(AUTODETECT, EXTERNAL_TABLE_DEFINITION.getAutodetect()); assertEquals(HIVE_PARTITIONING_OPTIONS, EXTERNAL_TABLE_DEFINITION.getHivePartitioningOptions()); + assertNotEquals(EXTERNAL_TABLE_DEFINITION, TableDefinition.Type.EXTERNAL); } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java index 68ccfd627..1d34f4958 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java @@ -20,6 +20,7 @@ import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; @@ -471,6 +472,7 @@ public void testToAndFromPb() { @Test public void testToAndFromPbWithoutConfiguration() { + assertNotEquals(expectedJob, bigquery); compareJob(expectedJob, Job.fromPb(bigquery, expectedJob.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java index b64e3c451..9f42d62b7 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java @@ -67,6 +67,13 @@ public class LoadJobConfigurationTest { RangePartitioning.Range.newBuilder().setStart(1L).setInterval(2L).setEnd(10L).build(); private static final RangePartitioning RANGE_PARTITIONING = RangePartitioning.newBuilder().setField("IntegerField").setRange(RANGE).build(); + private static final String MODE = "STRING"; + private static final String SOURCE_URI_PREFIX = "gs://bucket/path_to_table"; + private static final HivePartitioningOptions HIVE_PARTITIONING_OPTIONS = + HivePartitioningOptions.newBuilder() + .setMode(MODE) + .setSourceUriPrefix(SOURCE_URI_PREFIX) + .build(); private static final LoadJobConfiguration LOAD_CONFIGURATION_CSV = LoadJobConfiguration.newBuilder(TABLE_ID, SOURCE_URIS) .setCreateDisposition(CREATE_DISPOSITION) @@ -83,6 +90,8 @@ public class LoadJobConfigurationTest { .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) .setRangePartitioning(RANGE_PARTITIONING) + .setNullMarker("nullMarker") + .setHivePartitioningOptions(HIVE_PARTITIONING_OPTIONS) .build(); private static final DatastoreBackupOptions BACKUP_OPTIONS = @@ -238,5 +247,7 @@ private void compareLoadJobConfiguration( assertEquals(expected.getLabels(), value.getLabels()); assertEquals(expected.getJobTimeoutMs(), value.getJobTimeoutMs()); assertEquals(expected.getRangePartitioning(), value.getRangePartitioning()); + assertEquals(expected.getNullMarker(), value.getNullMarker()); + assertEquals(expected.getHivePartitioningOptions(), value.getHivePartitioningOptions()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java new file mode 100644 index 000000000..62b2cfe7d --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java @@ -0,0 +1,109 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; + +import org.junit.Test; + +public class ModelTableDefinitionTest { + + private static final String LOCATION = "US"; + private static final Long NUM_BYTES = 14L; + private static final Field FIELD_SCHEMA1 = + Field.newBuilder("StringField", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.newBuilder("IntegerField", LegacySQLTypeName.INTEGER) + .setMode(Field.Mode.REPEATED) + .setDescription("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.newBuilder("RecordField", LegacySQLTypeName.RECORD, FIELD_SCHEMA1, FIELD_SCHEMA2) + .setMode(Field.Mode.REQUIRED) + .setDescription("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final ModelTableDefinition MODEL_TABLE_DEFINITION = + ModelTableDefinition.newBuilder() + .setSchema(TABLE_SCHEMA) + .setType(TableDefinition.Type.MODEL) + .setLocation(LOCATION) + .setNumBytes(NUM_BYTES) + .build(); + + @Test + public void testToBuilder() { + compareModelTableDefinition(MODEL_TABLE_DEFINITION, MODEL_TABLE_DEFINITION.toBuilder().build()); + } + + @Test + public void testTypeNullPointerException() { + try { + MODEL_TABLE_DEFINITION.toBuilder().setType(null).build(); + fail(); + } catch (NullPointerException ex) { + assertNotNull(ex.getMessage()); + } + } + + @Test + public void testToBuilderIncomplete() { + ModelTableDefinition modelTableDefinition = ModelTableDefinition.newBuilder().build(); + assertEquals(modelTableDefinition, modelTableDefinition.toBuilder().build()); + } + + @Test + public void testToAndFromPb() { + assertEquals( + MODEL_TABLE_DEFINITION, ModelTableDefinition.fromPb(MODEL_TABLE_DEFINITION.toPb())); + } + + @Test + public void testBuilder() { + assertEquals(MODEL_TABLE_DEFINITION.getSchema(), TABLE_SCHEMA); + assertEquals(MODEL_TABLE_DEFINITION.getType(), TableDefinition.Type.MODEL); + assertEquals(MODEL_TABLE_DEFINITION.getLocation(), LOCATION); + assertEquals(MODEL_TABLE_DEFINITION.getNumBytes(), NUM_BYTES); + } + + @Test + public void testEquals() { + assertEquals(MODEL_TABLE_DEFINITION, MODEL_TABLE_DEFINITION); + } + + @Test + public void testNotEquals() { + assertNotEquals(MODEL_TABLE_DEFINITION, LOCATION); + } + + private void compareModelTableDefinition( + ModelTableDefinition expected, ModelTableDefinition value) { + assertEquals(expected, value); + assertEquals(expected.getSchema(), value.getSchema()); + assertEquals(expected.getType(), value.getType()); + assertEquals(expected.getLocation(), value.getLocation()); + assertEquals(expected.getNumBytes(), value.getNumBytes()); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java new file mode 100644 index 000000000..2136b2dab --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java @@ -0,0 +1,60 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; + +import com.google.common.collect.ImmutableList; +import java.util.List; +import org.junit.Test; + +public class PolicyTagsTest { + + private static final List POLICIES = ImmutableList.of("test/policy1", "test/policy2"); + private static final PolicyTags POLICY_TAGS = PolicyTags.newBuilder().setNames(POLICIES).build(); + + @Test + public void testToBuilder() { + comparePolicyTags(POLICY_TAGS, POLICY_TAGS.toBuilder().build()); + } + + @Test + public void testToBuilderIncomplete() { + PolicyTags policyTags = PolicyTags.newBuilder().build(); + assertEquals(policyTags, policyTags); + assertEquals(policyTags, policyTags.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(POLICIES, POLICY_TAGS.getNames()); + assertNotEquals(POLICY_TAGS, POLICIES); + } + + @Test + public void testFromAndPb() { + assertEquals(POLICY_TAGS, PolicyTags.fromPb(POLICY_TAGS.toPb())); + } + + private void comparePolicyTags(PolicyTags expected, PolicyTags value) { + assertEquals(expected, value); + assertEquals(expected.getNames(), value.getNames()); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java index 406d7edfa..0e892b6e1 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java @@ -80,6 +80,7 @@ public class QueryJobConfigurationTest { private static final boolean FLATTEN_RESULTS = true; private static final boolean USE_LEGACY_SQL = true; private static final Integer MAX_BILLING_TIER = 123; + private static final Long MAX_BYTES_BILL = 12345L; private static final List SCHEMA_UPDATE_OPTIONS = ImmutableList.of(SchemaUpdateOption.ALLOW_FIELD_RELAXATION); private static final List USER_DEFINED_FUNCTIONS = @@ -96,6 +97,14 @@ public class QueryJobConfigurationTest { RangePartitioning.Range.newBuilder().setStart(1L).setInterval(2L).setEnd(10L).build(); private static final RangePartitioning RANGE_PARTITIONING = RangePartitioning.newBuilder().setField("IntegerField").setRange(RANGE).build(); + private static final QueryParameterValue STRING_PARAMETER = + QueryParameterValue.string("stringValue"); + private static final QueryParameterValue TIMESTAMP_PARAMETER = + QueryParameterValue.timestamp("2014-01-01 07:00:00.000000+00:00"); + private static final List POSITIONAL_PARAMETER = + ImmutableList.of(STRING_PARAMETER, TIMESTAMP_PARAMETER); + private static final Map NAME_PARAMETER = + ImmutableMap.of("string", STRING_PARAMETER, "timestamp", TIMESTAMP_PARAMETER); private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION = QueryJobConfiguration.newBuilder(QUERY) .setUseQueryCache(USE_QUERY_CACHE) @@ -111,6 +120,7 @@ public class QueryJobConfigurationTest { .setDryRun(true) .setUseLegacySql(USE_LEGACY_SQL) .setMaximumBillingTier(MAX_BILLING_TIER) + .setMaximumBytesBilled(MAX_BYTES_BILL) .setSchemaUpdateOptions(SCHEMA_UPDATE_OPTIONS) .setDestinationEncryptionConfiguration(JOB_ENCRYPTION_CONFIGURATION) .setTimePartitioning(TIME_PARTITIONING) @@ -119,6 +129,19 @@ public class QueryJobConfigurationTest { .setLabels(LABELS) .setRangePartitioning(RANGE_PARTITIONING) .setConnectionProperties(CONNECTION_PROPERTIES) + .setPositionalParameters(POSITIONAL_PARAMETER) + .build(); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_ADD_POSITIONAL_PARAMETER = + QUERY_JOB_CONFIGURATION + .toBuilder() + .setPositionalParameters(ImmutableList.of()) + .addPositionalParameter(STRING_PARAMETER) + .build(); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_SET_NAME_PARAMETER = + QUERY_JOB_CONFIGURATION + .toBuilder() + .setPositionalParameters(ImmutableList.of()) + .setNamedParameters(NAME_PARAMETER) .build(); @Test @@ -154,6 +177,8 @@ public void testToPbAndFromPb() { assertNotNull(QUERY_JOB_CONFIGURATION.getLabels()); assertNotNull(QUERY_JOB_CONFIGURATION.getRangePartitioning()); assertNotNull(QUERY_JOB_CONFIGURATION.getConnectionProperties()); + assertNotNull(QUERY_JOB_CONFIGURATION.getPositionalParameters()); + assertNotNull(QUERY_JOB_CONFIGURATION.getNamedParameters()); compareQueryJobConfiguration( QUERY_JOB_CONFIGURATION, QueryJobConfiguration.fromPb(QUERY_JOB_CONFIGURATION.toPb())); QueryJobConfiguration job = QueryJobConfiguration.of(QUERY); @@ -184,6 +209,20 @@ public void testGetType() { assertEquals(JobConfiguration.Type.QUERY, QUERY_JOB_CONFIGURATION.getType()); } + @Test + public void testPositionalParameter() { + compareQueryJobConfiguration( + QUERY_JOB_CONFIGURATION_ADD_POSITIONAL_PARAMETER, + QUERY_JOB_CONFIGURATION_ADD_POSITIONAL_PARAMETER.toBuilder().build()); + } + + @Test + public void testNamedParameter() { + compareQueryJobConfiguration( + QUERY_JOB_CONFIGURATION_SET_NAME_PARAMETER, + QUERY_JOB_CONFIGURATION_SET_NAME_PARAMETER.toBuilder().build()); + } + private void compareQueryJobConfiguration( QueryJobConfiguration expected, QueryJobConfiguration value) { assertEquals(expected, value); @@ -203,6 +242,7 @@ private void compareQueryJobConfiguration( assertEquals(expected.getWriteDisposition(), value.getWriteDisposition()); assertEquals(expected.useLegacySql(), value.useLegacySql()); assertEquals(expected.getMaximumBillingTier(), value.getMaximumBillingTier()); + assertEquals(expected.getMaximumBytesBilled(), value.getMaximumBytesBilled()); assertEquals(expected.getSchemaUpdateOptions(), value.getSchemaUpdateOptions()); assertEquals( expected.getDestinationEncryptionConfiguration(), @@ -213,5 +253,7 @@ private void compareQueryJobConfiguration( assertEquals(expected.getLabels(), value.getLabels()); assertEquals(expected.getRangePartitioning(), value.getRangePartitioning()); assertEquals(expected.getConnectionProperties(), value.getConnectionProperties()); + assertEquals(expected.getPositionalParameters(), value.getPositionalParameters()); + assertEquals(expected.getNamedParameters(), value.getNamedParameters()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java index 930a29c40..09421565c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java @@ -30,6 +30,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import org.junit.Assert; import org.junit.Test; import org.threeten.bp.Instant; import org.threeten.bp.ZoneOffset; @@ -58,6 +59,34 @@ public class QueryParameterValueTest { .toFormatter() .withZone(ZoneOffset.UTC); + private static final QueryParameterValue QUERY_PARAMETER_VALUE = + QueryParameterValue.newBuilder() + .setType(StandardSQLTypeName.STRING) + .setValue("test-string") + .build(); + + @Test + public void testBuilder() { + QueryParameterValue value = QUERY_PARAMETER_VALUE.toBuilder().build(); + assertThat(value).isEqualTo(QUERY_PARAMETER_VALUE); + assertThat(value.getType()).isEqualTo(StandardSQLTypeName.STRING); + assertThat(value.getValue()).isEqualTo("test-string"); + assertThat(value.toString()).isEqualTo(QUERY_PARAMETER_VALUE.toString()); + assertThat(value.hashCode()).isEqualTo(QUERY_PARAMETER_VALUE.hashCode()); + assertThat(value.equals(value)).isTrue(); + assertThat(QUERY_PARAMETER_VALUE).isNotEqualTo(StandardSQLTypeName.STRING); + } + + @Test + public void testTypeNullPointerException() { + try { + QUERY_PARAMETER_VALUE.toBuilder().setType(null).build(); + Assert.fail(); + } catch (NullPointerException ex) { + assertThat(ex).isNotNull(); + } + } + @Test public void testBool() { QueryParameterValue value = QueryParameterValue.bool(true); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java index 69edb3988..bc7d6083b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import com.google.api.services.bigquery.model.ExplainQueryStep; import com.google.cloud.bigquery.QueryStage.QueryStep; @@ -154,6 +155,12 @@ public void testEquals() { compareQueryStage(QUERY_STAGE, QUERY_STAGE); } + @Test + public void testNotEquals() { + assertNotEquals(QUERY_STAGE, QUERY_STEP1); + assertNotEquals(QUERY_STEP1, QUERY_STAGE); + } + private void compareQueryStage(QueryStage expected, QueryStage value) { assertEquals(expected, value); assertEquals(expected.getCompletedParallelInputs(), value.getCompletedParallelInputs()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java index d167858fc..ce613193c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java @@ -128,5 +128,6 @@ public void compareRoutineInfo(RoutineInfo expected, RoutineInfo value) { assertEquals(expected.getImportedLibraries(), value.getImportedLibraries()); assertEquals(expected.getBody(), value.getBody()); assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java index 6742763cb..393b7fbc6 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java @@ -17,6 +17,8 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -95,6 +97,17 @@ public void testBuilder() { assertEquals(STREAMING_BUFFER, TABLE_DEFINITION.getStreamingBuffer()); assertEquals(TIME_PARTITIONING, TABLE_DEFINITION.getTimePartitioning()); assertEquals(CLUSTERING, TABLE_DEFINITION.getClustering()); + assertNotEquals(TABLE_DEFINITION, TableDefinition.Type.TABLE); + } + + @Test + public void testTypeNullPointerException() { + try { + TABLE_DEFINITION.toBuilder().setType(null).build(); + fail(); + } catch (NullPointerException ex) { + assertNotNull(ex.getMessage()); + } } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java index 860230f30..b93ed770b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java @@ -32,6 +32,7 @@ import com.google.cloud.bigquery.InsertAllRequest.RowToInsert; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import java.math.BigInteger; import java.util.List; import org.junit.Before; import org.junit.Rule; @@ -51,8 +52,14 @@ public class TableTest { private static final Long CREATION_TIME = 10L; private static final Long EXPIRATION_TIME = 100L; private static final Long LAST_MODIFIED_TIME = 20L; + private static final Long NUM_BYTES = 42L; + private static final Long NUM_LONG_TERM_BYTES = 21L; + private static final Long NUM_ROWS = 43L; private static final TableId TABLE_ID1 = TableId.of("dataset", "table1"); private static final TableId TABLE_ID2 = TableId.of("dataset", "table2"); + private static final Boolean REQUIRE_PARTITION_FILTER = true; + private static final EncryptionConfiguration ENCRYPTION_CONFIGURATION = + EncryptionConfiguration.newBuilder().setKmsKeyName("KMS_KEY_1").build(); private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = CopyJobConfiguration.of(TABLE_ID2, TABLE_ID1); private static final JobInfo COPY_JOB_INFO = JobInfo.of(COPY_JOB_CONFIGURATION); @@ -118,6 +125,10 @@ public void testBuilder() { .setGeneratedId(GENERATED_ID) .setLastModifiedTime(LAST_MODIFIED_TIME) .setSelfLink(SELF_LINK) + .setNumBytes(NUM_BYTES) + .setNumLongTermBytes(NUM_LONG_TERM_BYTES) + .setNumRows(BigInteger.valueOf(NUM_ROWS)) + .setRequirePartitionFilter(REQUIRE_PARTITION_FILTER) .build(); assertEquals(TABLE_ID1, builtTable.getTableId()); assertEquals(CREATION_TIME, builtTable.getCreationTime()); @@ -129,6 +140,10 @@ public void testBuilder() { assertEquals(LAST_MODIFIED_TIME, builtTable.getLastModifiedTime()); assertEquals(TABLE_DEFINITION, builtTable.getDefinition()); assertEquals(SELF_LINK, builtTable.getSelfLink()); + assertEquals(NUM_BYTES, builtTable.getNumBytes()); + assertEquals(NUM_LONG_TERM_BYTES, builtTable.getNumLongTermBytes()); + assertEquals(BigInteger.valueOf(NUM_ROWS), builtTable.getNumRows()); + assertEquals(REQUIRE_PARTITION_FILTER, builtTable.getRequirePartitionFilter()); assertSame(bigquery, builtTable.getBigQuery()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java index 10b303f21..ee7f5dde6 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @@ -68,6 +69,8 @@ public void testBuilder() { assertEquals(100, (long) partitioning.getExpirationMs()); assertNull(partitioning.getRequirePartitionFilter()); assertNull(partitioning.getField()); + compareTimePartitioning(TIME_PARTITIONING_HOUR, TIME_PARTITIONING_HOUR.toBuilder().build()); + assertNotEquals(TIME_PARTITIONING_DAY, TYPE_DAY); } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java index 0d39fa25b..1d888f00d 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java @@ -16,6 +16,7 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import org.junit.Test; @@ -36,6 +37,12 @@ public class TimelineSampleTest { .setSlotMillis(SLOT_MILLIS) .build(); + @Test + public void testToBuilder() { + TimelineSample sample = TIMELINE_SAMPLE.toBuilder().setPendingUnits(15L).build(); + assertEquals(15L, sample.getPendingUnits().longValue()); + } + @Test public void testTimelineSampleBuilder() { assertEquals(ELAPSED_MS, TIMELINE_SAMPLE.getElapsedMs()); @@ -48,5 +55,8 @@ public void testTimelineSampleBuilder() { @Test public void TestEquals() { assertEquals(TIMELINE_SAMPLE, TIMELINE_SAMPLE); + assertNotEquals(TIMELINE_SAMPLE, SLOT_MILLIS); + assertEquals(TIMELINE_SAMPLE.toString(), TIMELINE_SAMPLE.toString()); + assertEquals(TIMELINE_SAMPLE.hashCode(), TIMELINE_SAMPLE.hashCode()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java index fa4f82cd0..d60c7be2b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java @@ -18,8 +18,11 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import com.google.common.collect.ImmutableList; import java.util.List; @@ -31,7 +34,7 @@ public class ViewDefinitionTest { private static final List USER_DEFINED_FUNCTIONS = ImmutableList.of(UserDefinedFunction.inline("Function"), UserDefinedFunction.fromUri("URI")); private static final ViewDefinition VIEW_DEFINITION = - ViewDefinition.newBuilder(VIEW_QUERY, USER_DEFINED_FUNCTIONS).build(); + ViewDefinition.newBuilder(VIEW_QUERY, USER_DEFINED_FUNCTIONS).setSchema(Schema.of()).build(); @Test public void testToBuilder() { @@ -43,6 +46,17 @@ public void testToBuilder() { viewDefinition = viewDefinition.toBuilder().setUseLegacySql(true).build(); assertTrue(viewDefinition.useLegacySql()); + assertNotEquals(VIEW_DEFINITION, VIEW_QUERY); + } + + @Test + public void testTypeNullPointerException() { + try { + VIEW_DEFINITION.toBuilder().setType(null).build(); + fail(); + } catch (NullPointerException ex) { + assertNotNull(ex.getMessage()); + } } @Test diff --git a/pom.xml b/pom.xml index 76bf1fef7..8fdc5aff1 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 1.116.0 + 1.116.1 BigQuery Parent https://github.com/googleapis/java-bigquery @@ -63,8 +63,6 @@ UTF-8 github google-cloud-bigquery-parent - 1.93.4 - 1.3.2 v2-rev20200429-1.30.9 @@ -74,7 +72,7 @@ com.google.cloud google-cloud-shared-dependencies - 0.4.0 + 0.7.0 pom import @@ -88,9 +86,28 @@ com.google.cloud google-cloud-bigquery - 1.116.0 + 1.116.1 + + com.google.apis + google-api-services-bigquery + ${google-api-services-bigquery.version} + + + + + junit + junit + 4.13 + test + + + com.google.truth + truth + 1.0.1 + test + org.mockito mockito-core @@ -103,11 +120,6 @@ 1.108.0 test - - com.google.apis - google-api-services-bigquery - ${google-api-services-bigquery.version} - @@ -117,9 +129,6 @@ org.apache.maven.plugins maven-dependency-plugin - - org.objenesis:objenesis - @@ -207,5 +216,4 @@ - diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml index 75e9d54cb..0e2c300b8 100644 --- a/samples/install-without-bom/pom.xml +++ b/samples/install-without-bom/pom.xml @@ -45,7 +45,7 @@ com.google.cloud google-cloud-bigquery - 1.115.1 + 1.116.0 diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 202d493ea..f84fbf44b 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -44,7 +44,7 @@ com.google.cloud google-cloud-bigquery - 1.116.0 + 1.116.1 diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml index 8c070dfb0..6b6788b2e 100644 --- a/samples/snippets/pom.xml +++ b/samples/snippets/pom.xml @@ -44,7 +44,7 @@ com.google.cloud libraries-bom - 5.4.0 + 5.5.0 pom import diff --git a/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java b/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java index c529272fc..932c27d69 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java +++ b/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java @@ -32,6 +32,8 @@ import com.google.cloud.bigquery.Schema; import com.google.cloud.bigquery.TableId; import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.List; import java.util.UUID; public class AddColumnLoadAppend { @@ -41,11 +43,28 @@ public static void runAddColumnLoadAppend() throws Exception { String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; String sourceUri = "/path/to/file.csv"; - addColumnLoadAppend(datasetName, tableName, sourceUri); + // Add a new column to a BigQuery table while appending rows via a load job. + // 'REQUIRED' fields cannot be added to an existing schema, so the additional column must be + // 'NULLABLE'. + Schema schema = + Schema.of( + Field.newBuilder("name", LegacySQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + + List fields = schema.getFields(); + // Adding below additional column during the load job + Field newField = Field.newBuilder("post_abbr", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(); + List newFields = new ArrayList<>(fields); + newFields.add(newField); + Schema newSchema = Schema.of(newFields); + addColumnLoadAppend(datasetName, tableName, sourceUri, newSchema); } - public static void addColumnLoadAppend(String datasetName, String tableName, String sourceUri) - throws Exception { + public static void addColumnLoadAppend(String datasetName, String tableName, + String sourceUri, Schema newSchema) throws Exception { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -53,19 +72,6 @@ public static void addColumnLoadAppend(String datasetName, String tableName, Str TableId tableId = TableId.of(datasetName, tableName); - // Add a new column to a BigQuery table while appending rows via a load job. - // 'REQUIRED' fields cannot be added to an existing schema, so the additional column must be - // 'NULLABLE'. - Schema newSchema = - Schema.of( - Field.newBuilder("name", LegacySQLTypeName.STRING) - .setMode(Field.Mode.REQUIRED) - .build(), - // Adding below additional column during the load job - Field.newBuilder("post_abbr", LegacySQLTypeName.STRING) - .setMode(Field.Mode.NULLABLE) - .build()); - LoadJobConfiguration loadJobConfig = LoadJobConfiguration.builder(tableId, sourceUri) .setFormatOptions(FormatOptions.csv()) diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java b/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java index 1fc265e17..27a0e144f 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java @@ -29,16 +29,25 @@ import com.google.cloud.bigquery.TableInfo; import com.google.cloud.bigquery.TimePartitioning; import com.google.common.collect.ImmutableList; +import java.util.List; public class CreateClusteredTable { public static void runCreateClusteredTable() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; - createClusteredTable(datasetName, tableName); + Schema schema = + Schema.of( + Field.of("name", StandardSQLTypeName.STRING), + Field.of("post_abbr", StandardSQLTypeName.STRING), + Field.of("date", StandardSQLTypeName.DATE)); + createClusteredTable(datasetName, tableName, + schema, ImmutableList.of("name", "post_abbr")); } - public static void createClusteredTable(String datasetName, String tableName) { + public static void createClusteredTable( + String datasetName, String tableName, + Schema schema, List clusteringFields) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -47,15 +56,10 @@ public static void createClusteredTable(String datasetName, String tableName) { TableId tableId = TableId.of(datasetName, tableName); TimePartitioning partitioning = TimePartitioning.of(TimePartitioning.Type.DAY); - - Schema schema = - Schema.of( - Field.of("name", StandardSQLTypeName.STRING), - Field.of("post_abbr", StandardSQLTypeName.STRING), - Field.of("date", StandardSQLTypeName.DATE)); - + // Clustering fields will be consisted of fields mentioned in the schema. + // As of now, another condition is that the table should be partitioned. Clustering clustering = - Clustering.newBuilder().setFields(ImmutableList.of("name", "post_abbr")).build(); + Clustering.newBuilder().setFields(clusteringFields).build(); StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder() diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java b/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java index 79a15a801..62a51c669 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java +++ b/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java @@ -34,10 +34,15 @@ public static void runCreatePartitionedTable() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; - createPartitionedTable(datasetName, tableName); + Schema schema = + Schema.of( + Field.of("stringField", StandardSQLTypeName.STRING), + Field.of("booleanField", StandardSQLTypeName.BOOL), + Field.of("dateField", StandardSQLTypeName.DATE)); + createPartitionedTable(datasetName, tableName, schema); } - public static void createPartitionedTable(String datasetName, String tableName) { + public static void createPartitionedTable(String datasetName, String tableName, Schema schema) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -47,12 +52,6 @@ public static void createPartitionedTable(String datasetName, String tableName) TimePartitioning partitioning = TimePartitioning.of(TimePartitioning.Type.DAY); - Schema schema = - Schema.of( - Field.of("stringField", StandardSQLTypeName.STRING), - Field.of("booleanField", StandardSQLTypeName.BOOL), - Field.of("dateField", StandardSQLTypeName.DATE)); - StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder() .setSchema(schema) diff --git a/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java b/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java index 4d657c690..0f05cb20d 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java +++ b/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java @@ -35,12 +35,19 @@ public static void runExtractTableToJson() { String tableName = "shakespeare"; String bucketName = "my-bucket"; String destinationUri = "gs://" + bucketName + "/path/to/file"; - extractTableToJson(projectId, datasetName, tableName, destinationUri); + // For more information on export formats available see: + // https://cloud.google.com/bigquery/docs/exporting-data#export_formats_and_compression_types + // For more information on Job see: + // https://googleapis.dev/java/google-cloud-clients/latest/index.html?com/google/cloud/bigquery/package-summary.html + + String dataFormat = "CSV"; + extractTableToJson(projectId, datasetName, tableName, destinationUri, dataFormat); } // Exports datasetName:tableName to destinationUri as raw CSV public static void extractTableToJson( - String projectId, String datasetName, String tableName, String destinationUri) { + String projectId, String datasetName, String tableName, String destinationUri, + String dataFormat) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -49,11 +56,7 @@ public static void extractTableToJson( TableId tableId = TableId.of(projectId, datasetName, tableName); Table table = bigquery.getTable(tableId); - // For more information on export formats available see: - // https://cloud.google.com/bigquery/docs/exporting-data#export_formats_and_compression_types - // For more information on Job see: - // https://googleapis.dev/java/google-cloud-clients/latest/index.html?com/google/cloud/bigquery/package-summary.html - Job job = table.extract("CSV", destinationUri); + Job job = table.extract(dataFormat, destinationUri); // Blocks until this job completes its execution, either failing or succeeding. Job completedJob = @@ -68,7 +71,7 @@ public static void extractTableToJson( "BigQuery was unable to extract due to an error: \n" + job.getStatus().getError()); return; } - System.out.println("Table export successful. Check in GCS bucket for the CSV file."); + System.out.println("Table export successful. Check in GCS bucket for the " + dataFormat + " file."); } catch (BigQueryException | InterruptedException e) { System.out.println("Table extraction job was interrupted. \n" + e.toString()); } diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java b/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java index e6e2e3915..3e580ec7d 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java +++ b/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java @@ -41,10 +41,11 @@ public static void runLoadLocalFile() throws IOException, InterruptedException { String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv"); - loadLocalFile(datasetName, tableName, csvPath); + loadLocalFile(datasetName, tableName, csvPath, FormatOptions.csv()); } - public static void loadLocalFile(String datasetName, String tableName, Path csvPath) + public static void loadLocalFile(String datasetName, String tableName, Path csvPath, + FormatOptions formatOptions) throws IOException, InterruptedException { try { // Initialize client that will be used to send requests. This client only needs to be created @@ -54,7 +55,7 @@ public static void loadLocalFile(String datasetName, String tableName, Path csvP WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId) - .setFormatOptions(FormatOptions.csv()) + .setFormatOptions(formatOptions) .build(); // The location and JobName must be specified; other fields can be auto-detected. diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java b/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java index 580b6183f..56a6b6b04 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java +++ b/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java @@ -32,17 +32,18 @@ public class LoadParquet { public static void runLoadParquet() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; - loadParquet(datasetName); + String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; + String tableName = "us_states"; + loadParquet(datasetName, tableName, sourceUri); } - public static void loadParquet(String datasetName) { + public static void loadParquet(String datasetName, String tableName, String sourceUri) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; - TableId tableId = TableId.of(datasetName, "us_states"); + TableId tableId = TableId.of(datasetName, tableName); LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri) diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java b/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java index 80f815046..c8ee67c67 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java +++ b/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java @@ -34,10 +34,13 @@ public class LoadParquetReplaceTable { public static void runLoadParquetReplaceTable() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; - loadParquetReplaceTable(datasetName); + String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; + String tableName = "us_states"; + loadParquetReplaceTable(datasetName, tableName, sourceUri); } - public static void loadParquetReplaceTable(String datasetName) { + public static void loadParquetReplaceTable(String datasetName, String tableName, + String sourceUri) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -46,8 +49,7 @@ public static void loadParquetReplaceTable(String datasetName) { // Imports a GCS file into a table and overwrites table data if table already exists. // This sample loads CSV file at: // https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv - String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; - TableId tableId = TableId.of(datasetName, "us_states"); + TableId tableId = TableId.of(datasetName, tableName); // For more information on LoadJobConfiguration see: // https://googleapis.dev/java/google-cloud-clients/latest/com/google/cloud/bigquery/LoadJobConfiguration.Builder.html diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java b/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java index 7d4987d11..20f4104f9 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java +++ b/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java @@ -31,6 +31,7 @@ import com.google.cloud.bigquery.TableId; import com.google.cloud.bigquery.TimePartitioning; import com.google.common.collect.ImmutableList; +import java.util.List; public class LoadTableClustered { @@ -39,10 +40,17 @@ public static void runLoadTableClustered() throws Exception { String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; String sourceUri = "/path/to/file.csv"; - loadTableClustered(datasetName, tableName, sourceUri); + Schema schema = + Schema.of( + Field.of("name", StandardSQLTypeName.STRING), + Field.of("post_abbr", StandardSQLTypeName.STRING), + Field.of("date", StandardSQLTypeName.DATE)); + loadTableClustered(datasetName, tableName, sourceUri, + schema, ImmutableList.of("name", "post_abbr")); } - public static void loadTableClustered(String datasetName, String tableName, String sourceUri) + public static void loadTableClustered(String datasetName, String tableName, String sourceUri, + Schema schema, List clusteringFields) throws Exception { try { // Initialize client that will be used to send requests. This client only needs to be created @@ -51,16 +59,11 @@ public static void loadTableClustered(String datasetName, String tableName, Stri TableId tableId = TableId.of(datasetName, tableName); - Schema schema = - Schema.of( - Field.of("name", StandardSQLTypeName.STRING), - Field.of("post_abbr", StandardSQLTypeName.STRING), - Field.of("date", StandardSQLTypeName.DATE)); - TimePartitioning partitioning = TimePartitioning.of(TimePartitioning.Type.DAY); - + // Clustering fields will be consisted of fields mentioned in the schema. + // As of now, another condition is that the table should be partitioned. Clustering clustering = - Clustering.newBuilder().setFields(ImmutableList.of("name", "post_abbr")).build(); + Clustering.newBuilder().setFields(clusteringFields).build(); LoadJobConfiguration loadJobConfig = LoadJobConfiguration.builder(tableId, sourceUri) diff --git a/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java b/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java index 4163a5979..b601c859b 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java +++ b/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java @@ -34,10 +34,16 @@ public static void runTableInsertRows() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; - tableInsertRows(datasetName, tableName); + // Create a row to insert + Map rowContent = new HashMap<>(); + rowContent.put("booleanField", true); + rowContent.put("numericField", "3.14"); + + tableInsertRows(datasetName, tableName, rowContent); } - public static void tableInsertRows(String datasetName, String tableName) { + public static void tableInsertRows(String datasetName, String tableName, + Map rowContent) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -46,11 +52,6 @@ public static void tableInsertRows(String datasetName, String tableName) { // Get table TableId tableId = TableId.of(datasetName, tableName); - // Create a row to insert - Map rowContent = new HashMap<>(); - rowContent.put("booleanField", true); - rowContent.put("numericField", "3.14"); - // Inserts rowContent into datasetName:tableId. InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow(rowContent).build()); diff --git a/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java b/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java index 5719dcffa..5f7cc27fa 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java +++ b/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java @@ -31,10 +31,15 @@ public class UpdateDatasetAccess { public static void runUpdateDatasetAccess() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; - updateDatasetAccess(datasetName); + // Create a new ACL granting the READER role to "sample.bigquery.dev@gmail.com" + // For more information on the types of ACLs available see: + // https://cloud.google.com/storage/docs/access-control/lists + Acl newEntry = Acl.of(new User("sample.bigquery.dev@gmail.com"), Role.READER); + + updateDatasetAccess(datasetName, newEntry); } - public static void updateDatasetAccess(String datasetName) { + public static void updateDatasetAccess(String datasetName, Acl newEntry) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -42,11 +47,6 @@ public static void updateDatasetAccess(String datasetName) { Dataset dataset = bigquery.getDataset(datasetName); - // Create a new ACL granting the READER role to "sample.bigquery.dev@gmail.com" - // For more information on the types of ACLs available see: - // https://cloud.google.com/storage/docs/access-control/lists - Acl newEntry = Acl.of(new User("sample.bigquery.dev@gmail.com"), Role.READER); - // Get a copy of the ACLs list from the dataset and append the new entry ArrayList acls = new ArrayList<>(dataset.getAcl()); acls.add(newEntry); diff --git a/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java b/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java index bea27624f..9b746a90d 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java +++ b/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java @@ -28,18 +28,17 @@ public class UpdateDatasetExpiration { public static void runUpdateDatasetExpiration() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; - updateDatasetExpiration(datasetName); + // Update dataset expiration to one day + Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); + updateDatasetExpiration(datasetName, newExpiration); } - public static void updateDatasetExpiration(String datasetName) { + public static void updateDatasetExpiration(String datasetName, Long newExpiration) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - // Update dataset expiration to one day - Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); - Dataset dataset = bigquery.getDataset(datasetName); bigquery.update(dataset.toBuilder().setDefaultTableLifetime(newExpiration).build()); System.out.println("Dataset description updated successfully to " + newExpiration); diff --git a/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java b/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java index a5d9abedb..cbc9a1940 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java +++ b/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java @@ -29,18 +29,18 @@ public static void runUpdateTableExpiration() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; - updateTableExpiration(datasetName, tableName); + // Update table expiration to one day. + Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); + updateTableExpiration(datasetName, tableName, newExpiration); } - public static void updateTableExpiration(String datasetName, String tableName) { + public static void updateTableExpiration(String datasetName, String tableName, + Long newExpiration) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - // Update table expiration to one day - Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); - Table table = bigquery.getTable(datasetName, tableName); bigquery.update(table.toBuilder().setExpirationTime(newExpiration).build()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/AddColumnLoadAppendIT.java b/samples/snippets/src/test/java/com/example/bigquery/AddColumnLoadAppendIT.java index 12a9acc38..afbe13c94 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/AddColumnLoadAppendIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/AddColumnLoadAppendIT.java @@ -24,6 +24,8 @@ import com.google.cloud.bigquery.Schema; import java.io.ByteArrayOutputStream; import java.io.PrintStream; +import java.util.ArrayList; +import java.util.List; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -71,7 +73,16 @@ public void testAddColumnLoadAppend() throws Exception { CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, originalSchema); - AddColumnLoadAppend.addColumnLoadAppend(BIGQUERY_DATASET_NAME, tableName, sourceUri); + List fields = originalSchema.getFields(); + // Adding below additional column during the load job + Field newField = Field.newBuilder("post_abbr", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(); + List newFields = new ArrayList<>(fields); + newFields.add(newField); + Schema newSchema = Schema.of(newFields); + + AddColumnLoadAppend.addColumnLoadAppend(BIGQUERY_DATASET_NAME, tableName, sourceUri, newSchema); assertThat(bout.toString()).contains("Column successfully added during load append job"); diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateClusteredTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateClusteredTableIT.java index 4f8c29942..37ddd226b 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CreateClusteredTableIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateClusteredTableIT.java @@ -19,6 +19,10 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.common.collect.ImmutableList; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import org.junit.After; @@ -58,8 +62,14 @@ public void tearDown() { @Test public void createClusteredTable() { String tableName = "MY_CLUSTERED_TABLE"; + Schema schema = + Schema.of( + Field.of("name", StandardSQLTypeName.STRING), + Field.of("post_abbr", StandardSQLTypeName.STRING), + Field.of("date", StandardSQLTypeName.DATE)); - CreateClusteredTable.createClusteredTable(BIGQUERY_DATASET_NAME, tableName); + CreateClusteredTable.createClusteredTable(BIGQUERY_DATASET_NAME, tableName, + schema, ImmutableList.of("name", "post_abbr")); assertThat(bout.toString()).contains("Clustered table created successfully"); diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreatePartitionedTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreatePartitionedTableIT.java index cbe612b32..32000c5ce 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CreatePartitionedTableIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CreatePartitionedTableIT.java @@ -19,6 +19,9 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import org.junit.After; @@ -58,8 +61,13 @@ public void tearDown() { @Test public void testCreatePartitionedTable() { String tableName = "MY_PARTITIONED_TABLE"; + Schema schema = + Schema.of( + Field.of("stringField", StandardSQLTypeName.STRING), + Field.of("booleanField", StandardSQLTypeName.BOOL), + Field.of("dateField", StandardSQLTypeName.DATE)); - CreatePartitionedTable.createPartitionedTable(BIGQUERY_DATASET_NAME, tableName); + CreatePartitionedTable.createPartitionedTable(BIGQUERY_DATASET_NAME, tableName, schema); assertThat(bout.toString()).contains("Partitioned table created successfully"); diff --git a/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java b/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java index e018384ed..fd28bc1cb 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java @@ -61,10 +61,12 @@ public void testExtractTableToJson() { String datasetName = "samples"; String tableName = "shakespeare"; String destinationUri = "gs://" + GCS_BUCKET + "/extractTest.csv"; + String dataFormat = "CSV"; // Extract table content to GCS in CSV format - ExtractTableToJson.extractTableToJson(projectId, datasetName, tableName, destinationUri); + ExtractTableToJson.extractTableToJson(projectId, datasetName, tableName, destinationUri, + dataFormat); assertThat(bout.toString()) - .contains("Table export successful. Check in GCS bucket for the CSV file."); + .contains("Table export successful. Check in GCS bucket for the " + dataFormat + " file."); } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileIT.java index a1a8d5a6f..aefa2adba 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileIT.java @@ -20,6 +20,7 @@ import static junit.framework.TestCase.assertNotNull; import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FormatOptions; import com.google.cloud.bigquery.LegacySQLTypeName; import com.google.cloud.bigquery.Schema; import java.io.ByteArrayOutputStream; @@ -76,7 +77,7 @@ public void loadLocalFile() throws IOException, InterruptedException { Path csvPath = FileSystems.getDefault().getPath("src/test/resources", "bigquery_noheader.csv"); - LoadLocalFile.loadLocalFile(BIGQUERY_DATASET_NAME, tableName, csvPath); + LoadLocalFile.loadLocalFile(BIGQUERY_DATASET_NAME, tableName, csvPath, FormatOptions.csv()); assertThat(bout.toString()).contains("Successfully loaded"); diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadParquetIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadParquetIT.java index a60fbea3b..cc3ff5d72 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/LoadParquetIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/LoadParquetIT.java @@ -57,7 +57,9 @@ public void tearDown() { @Test public void loadParquet() { - LoadParquet.loadParquet(BIGQUERY_DATASET_NAME); + String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; + String tableName = "us_states"; + LoadParquet.loadParquet(BIGQUERY_DATASET_NAME, tableName, sourceUri); assertThat(bout.toString()).contains("GCS parquet loaded successfully."); } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadParquetReplaceTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadParquetReplaceTableIT.java index 1d2bd0550..03222cd6d 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/LoadParquetReplaceTableIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/LoadParquetReplaceTableIT.java @@ -57,7 +57,9 @@ public void tearDown() { @Test public void testLoadParquetReplaceTable() { - LoadParquetReplaceTable.loadParquetReplaceTable(BIGQUERY_DATASET_NAME); + String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; + String tableName = "us_states"; + LoadParquetReplaceTable.loadParquetReplaceTable(BIGQUERY_DATASET_NAME, tableName, sourceUri); assertThat(bout.toString()).contains("GCS parquet overwrote existing table successfully."); } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java index 968235e4b..98ef57afd 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java @@ -19,6 +19,10 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.common.collect.ImmutableList; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import org.junit.After; @@ -61,7 +65,14 @@ public void loadTableClustered() throws Exception { String tableName = "LOAD_CLUSTERED_TABLE_TEST"; - LoadTableClustered.loadTableClustered(BIGQUERY_DATASET_NAME, tableName, sourceUri); + Schema schema = + Schema.of( + Field.of("name", StandardSQLTypeName.STRING), + Field.of("post_abbr", StandardSQLTypeName.STRING), + Field.of("date", StandardSQLTypeName.DATE)); + + LoadTableClustered.loadTableClustered(BIGQUERY_DATASET_NAME, tableName, sourceUri, + schema, ImmutableList.of("name", "post_abbr")); assertThat(bout.toString()) .contains("Data successfully loaded into clustered table during load job"); diff --git a/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java b/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java index 311cd9bd4..527907dd9 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java @@ -24,6 +24,8 @@ import com.google.cloud.bigquery.Schema; import java.io.ByteArrayOutputStream; import java.io.PrintStream; +import java.util.HashMap; +import java.util.Map; import java.util.UUID; import org.junit.After; import org.junit.Before; @@ -70,8 +72,13 @@ public void testTableInsertRows() { // Create table in dataset for testing CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, schema); + // Create a row to insert + Map rowContent = new HashMap<>(); + rowContent.put("booleanField", true); + rowContent.put("numericField", "3.14"); + // Testing - TableInsertRows.tableInsertRows(BIGQUERY_DATASET_NAME, tableName); + TableInsertRows.tableInsertRows(BIGQUERY_DATASET_NAME, tableName, rowContent); assertThat(bout.toString()).contains("Rows successfully inserted into table"); // Clean up diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java index 60086567a..10e7cfddd 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java @@ -19,6 +19,9 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.Acl; +import com.google.cloud.bigquery.Acl.Role; +import com.google.cloud.bigquery.Acl.User; import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; import java.io.ByteArrayOutputStream; import java.io.PrintStream; @@ -62,8 +65,9 @@ public void updateDatasetAccess() { // Create a dataset in order to modify its ACL CreateDataset.createDataset(generatedDatasetName); + Acl newEntry = Acl.of(new User("sample.bigquery.dev@gmail.com"), Role.READER); // Modify dataset's ACL - UpdateDatasetAccess.updateDatasetAccess(generatedDatasetName); + UpdateDatasetAccess.updateDatasetAccess(generatedDatasetName, newEntry); assertThat(bout.toString()).contains("Dataset Access Control updated successfully"); // Clean up diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetExpirationIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetExpirationIT.java index 7dd4f7f46..3360223e3 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetExpirationIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetExpirationIT.java @@ -22,6 +22,7 @@ import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; import java.io.ByteArrayOutputStream; import java.io.PrintStream; +import java.util.concurrent.TimeUnit; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -62,8 +63,9 @@ public void updateDatasetExpiration() { // Create a dataset in order to modify its expiration CreateDataset.createDataset(generatedDatasetName); + Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); // Modify dataset's expiration - UpdateDatasetExpiration.updateDatasetExpiration(generatedDatasetName); + UpdateDatasetExpiration.updateDatasetExpiration(generatedDatasetName, newExpiration); assertThat(bout.toString()).contains("Dataset description updated successfully"); // Clean up diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java index 5352762d8..73ca93c06 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java @@ -19,8 +19,13 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; import java.io.ByteArrayOutputStream; import java.io.PrintStream; +import java.util.UUID; +import java.util.concurrent.TimeUnit; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -57,9 +62,15 @@ public void tearDown() { @Test public void updateTableExpiration() { - String tableName = "update_expiration_table"; - CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, null); - UpdateTableExpiration.updateTableExpiration(BIGQUERY_DATASET_NAME, tableName); + String suffix = UUID.randomUUID().toString().replace('-', '_'); + String tableName = "update_expiration_table_" + suffix; + Schema schema = + Schema.of( + Field.of("stringField", StandardSQLTypeName.STRING), + Field.of("booleanField", StandardSQLTypeName.BOOL)); + CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, schema); + Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); + UpdateTableExpiration.updateTableExpiration(BIGQUERY_DATASET_NAME, tableName, newExpiration); assertThat(bout.toString()).contains("Table expiration updated successfully"); // Clean up diff --git a/synth.metadata b/synth.metadata index 9c415deb3..bdc51594c 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,14 +4,14 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/java-bigquery.git", - "sha": "69ede0ca217cfaf98b3364fb5301f1399db4e498" + "sha": "db4158186b99d0bed68fd70bef5918b1820e5dd1" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5b48b0716a36ca069db3038da7e205c87a22ed19" + "sha": "388f7aafee3d7a067c23db6c13b7e83fb361c64a" } } ] diff --git a/versions.txt b/versions.txt index ee09eebae..110d6881d 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:1.116.0:1.116.0 \ No newline at end of file +google-cloud-bigquery:1.116.1:1.116.1 \ No newline at end of file