From a942b0755341bfbf4d2be2ea6fbdc3b85fb00285 Mon Sep 17 00:00:00 2001 From: Diego Marquez Date: Thu, 4 Dec 2025 16:46:40 -0500 Subject: [PATCH 01/36] chore: add release-please config for protobuf-4.x (#4009) * chore: add release-please config for protobuf-4.x * Change release type from java-lts to java-yoshi --- .github/release-please.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/release-please.yml b/.github/release-please.yml index f87ac1ecca..e78d84de07 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -39,6 +39,11 @@ branches: handleGHRelease: true releaseType: java-backport branch: 2.51.x + - branch: protobuf-4.x-rc + bumpMinorPreMajor: true + handleGHRelease: true + releaseType: java-yoshi + manifest: true bumpMinorPreMajor: true handleGHRelease: true releaseType: java-yoshi From 6dcc90053353422ae766e531413b3ecc65b8b155 Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Mon, 8 Dec 2025 10:32:02 -0500 Subject: [PATCH 02/36] feat: Introduce DataFormatOptions to configure the output of BigQuery data types (#4010) * feat: Create DataFormatOptions in BigQuery * feat: Add Builder class for DataFormatOptions * fix: Update existing references of useInt64Timestamp to use DataFormatOption's variant * chore: Fix lint issues * chore: Address PR feedback * chore: Add tests for useInt64Timestamp behavior * chore: Address failing tests and GCA * chore: Remove unused fromPb method --- .../google/cloud/bigquery/BigQueryImpl.java | 18 +++-- .../cloud/bigquery/BigQueryOptions.java | 65 ++++++++++++++++- .../cloud/bigquery/DataFormatOptions.java | 73 +++++++++++++++++++ .../cloud/bigquery/QueryRequestInfo.java | 5 +- .../cloud/bigquery/BigQueryOptionsTest.java | 56 ++++++++++++++ .../cloud/bigquery/QueryRequestInfoTest.java | 22 ++++-- .../cloud/bigquery/it/ITBigQueryTest.java | 8 +- 7 files changed, 230 insertions(+), 17 deletions(-) create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DataFormatOptions.java diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java index ac8fce7082..c057cdaca6 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java @@ -1731,7 +1731,10 @@ public TableDataList call() throws IOException { new PageImpl<>( new TableDataPageFetcher(tableId, schema, serviceOptions, cursor, pageOptionMap), cursor, - transformTableData(result.getRows(), schema, serviceOptions.getUseInt64Timestamps())), + transformTableData( + result.getRows(), + schema, + serviceOptions.getDataFormatOptions().useInt64Timestamp())), result.getTotalRows()); } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); @@ -2007,7 +2010,9 @@ public com.google.api.services.bigquery.model.QueryResponse call() new QueryPageFetcher(jobId, schema, getOptions(), cursor, optionMap(options)), cursor, transformTableData( - results.getRows(), schema, getOptions().getUseInt64Timestamps()))) + results.getRows(), + schema, + getOptions().getDataFormatOptions().useInt64Timestamp()))) .setJobId(jobId) .setQueryId(results.getQueryId()) .build(); @@ -2021,7 +2026,9 @@ public com.google.api.services.bigquery.model.QueryResponse call() new TableDataPageFetcher(null, schema, getOptions(), null, optionMap(options)), null, transformTableData( - results.getRows(), schema, getOptions().getUseInt64Timestamps()))) + results.getRows(), + schema, + getOptions().getDataFormatOptions().useInt64Timestamp()))) // Return the JobID of the successful job .setJobId( results.getJobReference() != null ? JobId.fromPb(results.getJobReference()) : null) @@ -2066,10 +2073,9 @@ && getOptions().getOpenTelemetryTracer() != null) { } try (Scope queryScope = querySpan != null ? querySpan.makeCurrent() : null) { // If all parameters passed in configuration are supported by the query() method on the - // backend, - // put on fast path + // backend, put on fast path QueryRequestInfo requestInfo = - new QueryRequestInfo(configuration, getOptions().getUseInt64Timestamps()); + new QueryRequestInfo(configuration, getOptions().getDataFormatOptions()); if (requestInfo.isFastQuerySupported(jobId)) { // Be careful when setting the projectID in JobId, if a projectID is specified in the JobId, // the job created by the query method will use that project. This may cause the query to diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java index 7adb58d3a2..10ae77930c 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import com.google.api.core.BetaApi; +import com.google.api.core.ObsoleteApi; import com.google.api.gax.retrying.ResultRetryAlgorithm; import com.google.cloud.ServiceDefaults; import com.google.cloud.ServiceOptions; @@ -26,6 +27,7 @@ import com.google.cloud.bigquery.spi.BigQueryRpcFactory; import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import com.google.cloud.http.HttpTransportOptions; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; import io.opentelemetry.api.trace.Tracer; import java.util.Set; @@ -41,6 +43,7 @@ public class BigQueryOptions extends ServiceOptions { // set the option ThrowNotFound when you want to throw the exception when the value not found private boolean setThrowNotFound; private boolean useInt64Timestamps; + private DataFormatOptions dataFormatOptions; private JobCreationMode defaultJobCreationMode = JobCreationMode.JOB_CREATION_MODE_UNSPECIFIED; private boolean enableOpenTelemetryTracing; private Tracer openTelemetryTracer; @@ -70,6 +73,7 @@ public static class Builder extends ServiceOptions.Builder resultRetryAlgorithm; @@ -94,11 +98,32 @@ public Builder setLocation(String location) { return this; } + /** + * This setter is marked as Obsolete. Prefer {@link #setDataFormatOptions(DataFormatOptions)} to + * set the int64timestamp configuration instead. + * + *

If useInt64Timestamps value is set in here and via DataFormatOptions, the + * DataFormatOptions configuration value is used. + * + *

{@code DataFormatOptions.newBuilder().setUseInt64Timestamp(...).build()} + */ + @ObsoleteApi("Use setDataFormatOptions(DataFormatOptions) instead") public Builder setUseInt64Timestamps(boolean useInt64Timestamps) { this.useInt64Timestamps = useInt64Timestamps; return this; } + /** + * Set the format options for the BigQuery data types + * + * @param dataFormatOptions Configuration of the formatting options + */ + public Builder setDataFormatOptions(DataFormatOptions dataFormatOptions) { + Preconditions.checkNotNull(dataFormatOptions, "DataFormatOptions cannot be null"); + this.dataFormatOptions = dataFormatOptions; + return this; + } + /** * Enables OpenTelemetry tracing functionality for this BigQuery instance * @@ -143,6 +168,15 @@ private BigQueryOptions(Builder builder) { } else { this.resultRetryAlgorithm = BigQueryBaseService.DEFAULT_BIGQUERY_EXCEPTION_HANDLER; } + + // If dataFormatOptions is not set, then create a new instance and set it with the + // useInt64Timestamps configured in BigQueryOptions + if (builder.dataFormatOptions == null) { + this.dataFormatOptions = + DataFormatOptions.newBuilder().useInt64Timestamp(builder.useInt64Timestamps).build(); + } else { + this.dataFormatOptions = builder.dataFormatOptions; + } } private static class BigQueryDefaults implements ServiceDefaults { @@ -191,8 +225,23 @@ public void setThrowNotFound(boolean setThrowNotFound) { this.setThrowNotFound = setThrowNotFound; } + /** + * This setter is marked as Obsolete. Prefer {@link + * Builder#setDataFormatOptions(DataFormatOptions)} to set the int64timestamp configuration + * instead. + * + *

If useInt64Timestamps is set via DataFormatOptions, then the value in DataFormatOptions will + * be used. Otherwise, this value will be passed to DataFormatOptions. + * + *

Alternative: {@code DataFormatOptions.newBuilder().setUseInt64Timestamp(...).build()} + */ + @ObsoleteApi("Use Builder#setDataFormatOptions(DataFormatOptions) instead") public void setUseInt64Timestamps(boolean useInt64Timestamps) { this.useInt64Timestamps = useInt64Timestamps; + // Because this setter exists outside the Builder, DataFormatOptions needs be rebuilt to + // account for this setting. + this.dataFormatOptions = + dataFormatOptions.toBuilder().useInt64Timestamp(useInt64Timestamps).build(); } @Deprecated @@ -206,8 +255,22 @@ public boolean getThrowNotFound() { return setThrowNotFound; } + /** + * This getter is marked as Obsolete. Prefer {@link + * DataFormatOptions.Builder#useInt64Timestamp(boolean)} to set the int64timestamp configuration + * instead. + * + *

Warning: DataFormatOptions values have precedence. Use {@link + * DataFormatOptions#useInt64Timestamp()} to get `useInt64Timestamp` value used by the BigQuery + * client. + */ + @ObsoleteApi("Use getDataFormatOptions().isUseInt64Timestamp() instead") public boolean getUseInt64Timestamps() { - return useInt64Timestamps; + return dataFormatOptions.useInt64Timestamp(); + } + + public DataFormatOptions getDataFormatOptions() { + return dataFormatOptions; } public JobCreationMode getDefaultJobCreationMode() { diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DataFormatOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DataFormatOptions.java new file mode 100644 index 0000000000..beaadf32cf --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DataFormatOptions.java @@ -0,0 +1,73 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import java.io.Serializable; + +/** + * Google BigQuery DataFormatOptions. Configures the output format for data types returned from + * BigQuery. + */ +@AutoValue +public abstract class DataFormatOptions implements Serializable { + public enum TimestampFormatOptions { + TIMESTAMP_OUTPUT_FORMAT_UNSPECIFIED("TIMESTAMP_OUTPUT_FORMAT_UNSPECIFIED"), + FLOAT64("FLOAT64"), + INT64("INT64"), + ISO8601_STRING("ISO8601_STRING"); + + private final String format; + + TimestampFormatOptions(String format) { + this.format = format; + } + + @Override + public String toString() { + return format; + } + } + + public abstract boolean useInt64Timestamp(); + + public abstract TimestampFormatOptions timestampFormatOptions(); + + public static Builder newBuilder() { + return new AutoValue_DataFormatOptions.Builder() + .useInt64Timestamp(false) + .timestampFormatOptions(TimestampFormatOptions.TIMESTAMP_OUTPUT_FORMAT_UNSPECIFIED); + } + + public abstract Builder toBuilder(); + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder useInt64Timestamp(boolean useInt64Timestamp); + + public abstract Builder timestampFormatOptions(TimestampFormatOptions timestampFormatOptions); + + public abstract DataFormatOptions build(); + } + + com.google.api.services.bigquery.model.DataFormatOptions toPb() { + com.google.api.services.bigquery.model.DataFormatOptions request = + new com.google.api.services.bigquery.model.DataFormatOptions(); + request.setUseInt64Timestamp(useInt64Timestamp()); + request.setTimestampOutputFormat(timestampFormatOptions().toString()); + return request; + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java index 588b7cae8c..c7033817c3 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java @@ -46,7 +46,8 @@ final class QueryRequestInfo { private final DataFormatOptions formatOptions; private final String reservation; - QueryRequestInfo(QueryJobConfiguration config, Boolean useInt64Timestamps) { + QueryRequestInfo( + QueryJobConfiguration config, com.google.cloud.bigquery.DataFormatOptions dataFormatOptions) { this.config = config; this.connectionProperties = config.getConnectionProperties(); this.defaultDataset = config.getDefaultDataset(); @@ -61,7 +62,7 @@ final class QueryRequestInfo { this.useLegacySql = config.useLegacySql(); this.useQueryCache = config.useQueryCache(); this.jobCreationMode = config.getJobCreationMode(); - this.formatOptions = new DataFormatOptions().setUseInt64Timestamp(useInt64Timestamps); + this.formatOptions = dataFormatOptions.toPb(); this.reservation = config.getReservation(); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java index 4176ec24dc..e77d7936a4 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java @@ -16,6 +16,11 @@ package com.google.cloud.bigquery; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + import com.google.cloud.TransportOptions; import org.junit.Assert; import org.junit.Test; @@ -35,4 +40,55 @@ public void testInvalidTransport() { Assert.assertNotNull(expected.getMessage()); } } + + @Test + public void dataFormatOptions_createdByDefault() { + BigQueryOptions options = BigQueryOptions.newBuilder().setProjectId("project-id").build(); + + assertNotNull(options.getDataFormatOptions()); + assertFalse(options.getDataFormatOptions().useInt64Timestamp()); + assertEquals( + DataFormatOptions.TimestampFormatOptions.TIMESTAMP_OUTPUT_FORMAT_UNSPECIFIED, + options.getDataFormatOptions().timestampFormatOptions()); + } + + @Test + public void nonBuilderSetUseInt64Timestamp_capturedInDataFormatOptions() { + BigQueryOptions options = + BigQueryOptions.newBuilder() + .setDataFormatOptions(DataFormatOptions.newBuilder().useInt64Timestamp(false).build()) + .setProjectId("project-id") + .build(); + options.setUseInt64Timestamps(true); + + assertTrue(options.getDataFormatOptions().useInt64Timestamp()); + } + + @Test + public void nonBuilderSetUseInt64Timestamp_overridesEverything() { + BigQueryOptions options = BigQueryOptions.newBuilder().setProjectId("project-id").build(); + options.setUseInt64Timestamps(true); + + assertTrue(options.getDataFormatOptions().useInt64Timestamp()); + } + + @Test + public void noDataFormatOptions_capturesUseInt64TimestampSetInBuilder() { + BigQueryOptions options = + BigQueryOptions.newBuilder().setUseInt64Timestamps(true).setProjectId("project-id").build(); + + assertTrue(options.getDataFormatOptions().useInt64Timestamp()); + } + + @Test + public void dataFormatOptionsSetterHasPrecedence() { + BigQueryOptions options = + BigQueryOptions.newBuilder() + .setProjectId("project-id") + .setDataFormatOptions(DataFormatOptions.newBuilder().useInt64Timestamp(true).build()) + .setUseInt64Timestamps(false) + .build(); + + assertTrue(options.getDataFormatOptions().useInt64Timestamp()); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java index ed9effe0b4..8661346776 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java @@ -140,7 +140,8 @@ public class QueryRequestInfoTest { .setJobCreationMode(jobCreationModeRequired) .setReservation(RESERVATION) .build(); - QueryRequestInfo REQUEST_INFO = new QueryRequestInfo(QUERY_JOB_CONFIGURATION, false); + QueryRequestInfo REQUEST_INFO = + new QueryRequestInfo(QUERY_JOB_CONFIGURATION, DataFormatOptions.newBuilder().build()); private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_SUPPORTED = QueryJobConfiguration.newBuilder(QUERY) .setUseQueryCache(USE_QUERY_CACHE) @@ -156,7 +157,8 @@ public class QueryRequestInfoTest { .setReservation(RESERVATION) .build(); QueryRequestInfo REQUEST_INFO_SUPPORTED = - new QueryRequestInfo(QUERY_JOB_CONFIGURATION_SUPPORTED, false); + new QueryRequestInfo( + QUERY_JOB_CONFIGURATION_SUPPORTED, DataFormatOptions.newBuilder().build()); @Test public void testIsFastQuerySupported() { @@ -177,17 +179,25 @@ public void testToPb() { @Test public void equalTo() { compareQueryRequestInfo( - new QueryRequestInfo(QUERY_JOB_CONFIGURATION_SUPPORTED, false), REQUEST_INFO_SUPPORTED); - compareQueryRequestInfo(new QueryRequestInfo(QUERY_JOB_CONFIGURATION, false), REQUEST_INFO); + new QueryRequestInfo( + QUERY_JOB_CONFIGURATION_SUPPORTED, DataFormatOptions.newBuilder().build()), + REQUEST_INFO_SUPPORTED); + compareQueryRequestInfo( + new QueryRequestInfo(QUERY_JOB_CONFIGURATION, DataFormatOptions.newBuilder().build()), + REQUEST_INFO); } @Test public void testInt64Timestamp() { - QueryRequestInfo requestInfo = new QueryRequestInfo(QUERY_JOB_CONFIGURATION, false); + QueryRequestInfo requestInfo = + new QueryRequestInfo(QUERY_JOB_CONFIGURATION, DataFormatOptions.newBuilder().build()); QueryRequest requestPb = requestInfo.toPb(); assertFalse(requestPb.getFormatOptions().getUseInt64Timestamp()); - QueryRequestInfo requestInfoLosslessTs = new QueryRequestInfo(QUERY_JOB_CONFIGURATION, true); + QueryRequestInfo requestInfoLosslessTs = + new QueryRequestInfo( + QUERY_JOB_CONFIGURATION, + DataFormatOptions.newBuilder().useInt64Timestamp(true).build()); QueryRequest requestLosslessTsPb = requestInfoLosslessTs.toPb(); assertTrue(requestLosslessTsPb.getFormatOptions().getUseInt64Timestamp()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index ec1f7b5a09..268199869e 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -71,6 +71,7 @@ import com.google.cloud.bigquery.ConnectionSettings; import com.google.cloud.bigquery.CopyJobConfiguration; import com.google.cloud.bigquery.CsvOptions; +import com.google.cloud.bigquery.DataFormatOptions; import com.google.cloud.bigquery.Dataset; import com.google.cloud.bigquery.DatasetId; import com.google.cloud.bigquery.DatasetInfo; @@ -3462,8 +3463,11 @@ public void testLosslessTimestamp() throws InterruptedException { // Create new BQ object to toggle lossless timestamps without affecting // other tests. RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); - BigQuery bigqueryLossless = bigqueryHelper.getOptions().getService(); - bigqueryLossless.getOptions().setUseInt64Timestamps(true); + DataFormatOptions dataFormatOptions = + DataFormatOptions.newBuilder().useInt64Timestamp(true).build(); + BigQueryOptions options = + bigqueryHelper.getOptions().toBuilder().setDataFormatOptions(dataFormatOptions).build(); + BigQuery bigqueryLossless = options.getService(); TableResult resultLossless = bigqueryLossless.query( From 57ffe1d2ba8af3b950438c926d66ac23ca8a3093 Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Mon, 8 Dec 2025 16:53:20 -0500 Subject: [PATCH 03/36] feat: Add timestamp_precision to Field (#4014) * feat: Add timestamp_precision to Field * Apply suggestion from @gemini-code-assist[bot] Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> * chore: Address GCA PR feedback * chore: Fix typo * chore: Remove default value * chore: Address PR feedback --------- Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- .../java/com/google/cloud/bigquery/Field.java | 44 ++++++++++++++++++- .../com/google/cloud/bigquery/FieldTest.java | 15 +++++++ 2 files changed, 58 insertions(+), 1 deletion(-) diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java index 3c959a73fe..88e09c5c48 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java @@ -25,6 +25,7 @@ import com.google.api.services.bigquery.model.TableFieldSchema; import com.google.common.base.Function; import com.google.common.base.MoreObjects; +import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import java.io.Serializable; import java.util.List; @@ -62,6 +63,7 @@ public TableFieldSchema apply(Field field) { private final Long maxLength; private final Long scale; private final Long precision; + private final Long timestampPrecision; private final String defaultValueExpression; private final String collation; private final FieldElementType rangeElementType; @@ -88,6 +90,7 @@ public static final class Builder { private Long maxLength; private Long scale; private Long precision; + private Long timestampPrecision; private String defaultValueExpression; private String collation; private FieldElementType rangeElementType; @@ -104,6 +107,7 @@ private Builder(Field field) { this.maxLength = field.maxLength; this.scale = field.scale; this.precision = field.precision; + this.timestampPrecision = field.timestampPrecision; this.defaultValueExpression = field.defaultValueExpression; this.collation = field.collation; this.rangeElementType = field.rangeElementType; @@ -254,6 +258,19 @@ public Builder setPrecision(Long precision) { return this; } + /** + * Specifies the precision for TIMESTAMP types. + * + *

The default value is 6. Possible values are 6 (microsecond) or 12 (picosecond). + */ + public Builder setTimestampPrecision(Long timestampPrecision) { + Preconditions.checkArgument( + timestampPrecision == 6L || timestampPrecision == 12L, + "Timestamp Precision must be 6 (microsecond) or 12 (picosecond)"); + this.timestampPrecision = timestampPrecision; + return this; + } + /** * DefaultValueExpression is used to specify the default value of a field using a SQL * expression. It can only be set for top level fields (columns). @@ -317,6 +334,7 @@ private Field(Builder builder) { this.maxLength = builder.maxLength; this.scale = builder.scale; this.precision = builder.precision; + this.timestampPrecision = builder.timestampPrecision; this.defaultValueExpression = builder.defaultValueExpression; this.collation = builder.collation; this.rangeElementType = builder.rangeElementType; @@ -370,6 +388,11 @@ public Long getPrecision() { return precision; } + /** Returns the precision for TIMESTAMP type. */ + public Long getTimestampPrecision() { + return timestampPrecision; + } + /** Return the default value of the field. */ public String getDefaultValueExpression() { return defaultValueExpression; @@ -408,6 +431,7 @@ public String toString() { .add("maxLength", maxLength) .add("scale", scale) .add("precision", precision) + .add("timestampPrecision", timestampPrecision) .add("defaultValueExpression", defaultValueExpression) .add("collation", collation) .add("rangeElementType", rangeElementType) @@ -416,7 +440,19 @@ public String toString() { @Override public int hashCode() { - return Objects.hash(name, type, mode, description, policyTags, rangeElementType); + return Objects.hash( + name, + type, + mode, + description, + policyTags, + maxLength, + scale, + precision, + timestampPrecision, + defaultValueExpression, + collation, + rangeElementType); } @Override @@ -490,6 +526,9 @@ TableFieldSchema toPb() { if (precision != null) { fieldSchemaPb.setPrecision(precision); } + if (timestampPrecision != null) { + fieldSchemaPb.setTimestampPrecision(timestampPrecision); + } if (defaultValueExpression != null) { fieldSchemaPb.setDefaultValueExpression(defaultValueExpression); } @@ -527,6 +566,9 @@ static Field fromPb(TableFieldSchema fieldSchemaPb) { if (fieldSchemaPb.getPrecision() != null) { fieldBuilder.setPrecision(fieldSchemaPb.getPrecision()); } + if (fieldSchemaPb.getTimestampPrecision() != null) { + fieldBuilder.setTimestampPrecision(fieldSchemaPb.getTimestampPrecision()); + } if (fieldSchemaPb.getDefaultValueExpression() != null) { fieldBuilder.setDefaultValueExpression(fieldSchemaPb.getDefaultValueExpression()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java index ce431ca295..d7c5e25a2e 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -213,6 +214,20 @@ public void testSubFieldWithClonedType() throws Exception { Field.of("field", clonedRecord, Field.of("subfield", LegacySQLTypeName.BOOLEAN)); } + @Test + public void setTimestampPrecisionValues() { + Field.Builder builder = Field.newBuilder(FIELD_NAME1, FIELD_TYPE1); + + // Value values: 6L or 12L + builder.setTimestampPrecision(6L); + builder.setTimestampPrecision(12L); + + assertThrows(IllegalArgumentException.class, () -> builder.setTimestampPrecision(-1L)); + assertThrows(IllegalArgumentException.class, () -> builder.setTimestampPrecision(0L)); + assertThrows(IllegalArgumentException.class, () -> builder.setTimestampPrecision(5L)); + assertThrows(IllegalArgumentException.class, () -> builder.setTimestampPrecision(13L)); + } + private void compareFieldSchemas(Field expected, Field value) { assertEquals(expected, value); assertEquals(expected.getName(), value.getName()); From 5a51a51ced49957d53a911c585bbbd69aaf8283f Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Mon, 8 Dec 2025 17:07:06 -0500 Subject: [PATCH 04/36] chore: Use custom timestamp validator for ISO8601 timestamps with more than nanosecond precision (#4017) * chore: Use custom timestamp validator for ISO8601 timestamps with more than nanosecond precision * chore: Rename helper method to validateTimestamp * chore: Address GCA comments * chore: Address last GCA comment * chore: Update to use assert helper method --- .../cloud/bigquery/QueryParameterValue.java | 72 ++++++++-- .../bigquery/QueryParameterValueTest.java | 131 ++++++++++-------- 2 files changed, 136 insertions(+), 67 deletions(-) diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java index 0487c3f7c3..cb4e44861d 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java @@ -26,6 +26,7 @@ import com.google.api.services.bigquery.model.RangeValue; import com.google.auto.value.AutoValue; import com.google.cloud.Timestamp; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -44,6 +45,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import javax.annotation.Nullable; import org.threeten.extra.PeriodDuration; @@ -76,7 +79,7 @@ @AutoValue public abstract class QueryParameterValue implements Serializable { - private static final DateTimeFormatter timestampFormatter = + static final DateTimeFormatter TIMESTAMP_FORMATTER = new DateTimeFormatterBuilder() .parseLenient() .append(DateTimeFormatter.ISO_LOCAL_DATE) @@ -94,15 +97,21 @@ public abstract class QueryParameterValue implements Serializable { .optionalEnd() .toFormatter() .withZone(ZoneOffset.UTC); - private static final DateTimeFormatter timestampValidator = + private static final DateTimeFormatter TIMESTAMP_VALIDATOR = new DateTimeFormatterBuilder() .parseLenient() - .append(timestampFormatter) + .append(TIMESTAMP_FORMATTER) .optionalStart() .appendOffsetId() .optionalEnd() .toFormatter() .withZone(ZoneOffset.UTC); + // Regex to identify >9 digits in the fraction part (e.g. `.123456789123`) + // Matches the dot, followed by 10+ digits (fractional part), followed by non-digits (like `+00`) + // or end of string + private static final Pattern ISO8601_TIMESTAMP_HIGH_PRECISION_PATTERN = + Pattern.compile("\\.(\\d{10,})(?:\\D|$)"); + private static final DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); private static final DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSS"); @@ -303,6 +312,9 @@ public static QueryParameterValue bytes(byte[] value) { /** * Creates a {@code QueryParameterValue} object with a type of TIMESTAMP. * + *

This method only supports microsecond precision for timestamp. To use higher precision, + * prefer {@link #timestamp(String)} with an ISO8601 String + * * @param value Microseconds since epoch, e.g. 1733945416000000 corresponds to 2024-12-11 * 19:30:16.929Z */ @@ -311,8 +323,14 @@ public static QueryParameterValue timestamp(Long value) { } /** - * Creates a {@code QueryParameterValue} object with a type of TIMESTAMP. Must be in the format - * "yyyy-MM-dd HH:mm:ss.SSSSSSZZ", e.g. "2014-08-19 12:41:35.220000+00:00". + * Creates a {@code QueryParameterValue} object with a type of TIMESTAMP. + * + *

This method supports up to picosecond precision (12 digits) for timestamp. Input should + * conform to ISO8601 format. + * + *

Must be in the format "yyyy-MM-dd HH:mm:ss.SSSSSS{SSSSSSS}ZZ", e.g. "2014-08-19 + * 12:41:35.123456+00:00" for microsecond precision and "2014-08-19 12:41:35.123456789123+00:00" + * for picosecond precision */ public static QueryParameterValue timestamp(String value) { return of(value, StandardSQLTypeName.TIMESTAMP); @@ -481,12 +499,15 @@ private static String valueToStringOrNull(T value, StandardSQLTypeName type) throw new IllegalArgumentException("Cannot convert RANGE to String value"); case TIMESTAMP: if (value instanceof Long) { + // Timestamp passed as a Long only support Microsecond precision Timestamp timestamp = Timestamp.ofTimeMicroseconds((Long) value); - return timestampFormatter.format( + return TIMESTAMP_FORMATTER.format( Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos())); } else if (value instanceof String) { - // verify that the String is in the right format - checkFormat(value, timestampValidator); + // Timestamp passed as a String can support up picosecond precision, however, + // DateTimeFormatter only supports nanosecond precision. Higher than nanosecond + // requires a custom validator. + validateTimestamp((String) value); return (String) value; } break; @@ -521,9 +542,42 @@ private static String valueToStringOrNull(T value, StandardSQLTypeName type) "Type " + type + " incompatible with " + value.getClass().getCanonicalName()); } + /** + * Internal helper method to check that the timestamp follows the expected String input of ISO8601 + * string. Allows the fractional portion of the timestamp to support up to 12 digits of precision + * (up to picosecond). + * + * @throws IllegalArgumentException if timestamp is invalid or exceeds picosecond precision + */ + @VisibleForTesting + static void validateTimestamp(String timestamp) { + // Check if the string has greater than nanosecond precision (>9 digits in fractional second) + Matcher matcher = ISO8601_TIMESTAMP_HIGH_PRECISION_PATTERN.matcher(timestamp); + if (matcher.find()) { + // Group 1 is the fractional second part of the ISO8601 string + String fraction = matcher.group(1); + // Pos 10-12 of the fractional second are guaranteed to be digits. The regex only + // matches the fraction section as long as they are digits. + if (fraction.length() > 12) { + throw new IllegalArgumentException( + "Fractional second portion of ISO8601 only supports up to picosecond (12 digits) in BigQuery"); + } + + // Replace the entire fractional second portion with just the nanosecond portion. + // The new timestamp will be validated against the JDK's DateTimeFormatter + String truncatedFraction = fraction.substring(0, 9); + timestamp = + new StringBuilder(timestamp) + .replace(matcher.start(1), matcher.end(1), truncatedFraction) + .toString(); + } + + // It is valid as long as DateTimeFormatter doesn't throw an exception + checkFormat(timestamp, TIMESTAMP_VALIDATOR); + } + private static void checkFormat(Object value, DateTimeFormatter formatter) { try { - formatter.parse((String) value); } catch (DateTimeParseException e) { throw new IllegalArgumentException(e.getMessage(), e); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java index 75060a4f04..25649388e9 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java @@ -16,11 +16,9 @@ package com.google.cloud.bigquery; +import static com.google.cloud.bigquery.QueryParameterValue.TIMESTAMP_FORMATTER; import static com.google.common.truth.Truth.assertThat; -import static java.time.temporal.ChronoField.HOUR_OF_DAY; -import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; -import static java.time.temporal.ChronoField.NANO_OF_SECOND; -import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; +import static org.junit.Assert.assertThrows; import com.google.api.services.bigquery.model.QueryParameterType; import com.google.common.collect.ImmutableMap; @@ -29,9 +27,6 @@ import java.text.ParseException; import java.time.Instant; import java.time.Period; -import java.time.ZoneOffset; -import java.time.format.DateTimeFormatter; -import java.time.format.DateTimeFormatterBuilder; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; @@ -43,25 +38,6 @@ public class QueryParameterValueTest { - private static final DateTimeFormatter TIMESTAMPFORMATTER = - new DateTimeFormatterBuilder() - .parseLenient() - .append(DateTimeFormatter.ISO_LOCAL_DATE) - .appendLiteral(' ') - .appendValue(HOUR_OF_DAY, 2) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 2) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 2) - .optionalStart() - .appendFraction(NANO_OF_SECOND, 6, 9, true) - .optionalStart() - .appendOffset("+HHMM", "+00:00") - .optionalEnd() - .toFormatter() - .withZone(ZoneOffset.UTC); - private static final QueryParameterValue QUERY_PARAMETER_VALUE = QueryParameterValue.newBuilder() .setType(StandardSQLTypeName.STRING) @@ -326,11 +302,9 @@ public void testStringArray() { @Test public void testTimestampFromLong() { - QueryParameterValue value = QueryParameterValue.timestamp(1408452095220000L); - assertThat(value.getValue()).isEqualTo("2014-08-19 12:41:35.220000+00:00"); - assertThat(value.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value.getArrayType()).isNull(); - assertThat(value.getArrayValues()).isNull(); + // Expects output to be ISO8601 string with microsecond precision + assertTimestampValue( + QueryParameterValue.timestamp(1408452095220000L), "2014-08-19 12:41:35.220000+00:00"); } @Test @@ -340,43 +314,77 @@ public void testTimestampWithFormatter() { long secs = Math.floorDiv(timestampInMicroseconds, microseconds); int nano = (int) Math.floorMod(timestampInMicroseconds, microseconds) * 1000; Instant instant = Instant.ofEpochSecond(secs, nano); - String expected = TIMESTAMPFORMATTER.format(instant); - assertThat(expected) - .isEqualTo(QueryParameterValue.timestamp(timestampInMicroseconds).getValue()); + String expected = TIMESTAMP_FORMATTER.format(instant); + assertTimestampValue(QueryParameterValue.timestamp(timestampInMicroseconds), expected); } @Test - public void testTimestamp() { - QueryParameterValue value = QueryParameterValue.timestamp("2014-08-19 12:41:35.220000+00:00"); - assertThat(value.getValue()).isEqualTo("2014-08-19 12:41:35.220000+00:00"); - assertThat(value.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value.getArrayType()).isNull(); - assertThat(value.getArrayValues()).isNull(); + public void testTimestampFromString() { + assertTimestampValue( + QueryParameterValue.timestamp("2014-08-19 12:41:35.220000+00:00"), + "2014-08-19 12:41:35.220000+00:00"); + assertTimestampValue( + QueryParameterValue.timestamp("2025-08-19 12:34:56.123456789+00:00"), + "2025-08-19 12:34:56.123456789+00:00"); + + // The following test cases test more than nanosecond precision + // 10 digits of precision (1 digit more than nanosecond) + assertTimestampValue( + QueryParameterValue.timestamp("2025-12-08 12:34:56.1234567890+00:00"), + "2025-12-08 12:34:56.1234567890+00:00"); + // 12 digits (picosecond precision) + assertTimestampValue( + QueryParameterValue.timestamp("2025-12-08 12:34:56.123456789123+00:00"), + "2025-12-08 12:34:56.123456789123+00:00"); + + // More than picosecond precision + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.timestamp("2025-12-08 12:34:56.1234567891234+00:00")); + assertThrows( + IllegalArgumentException.class, + () -> + QueryParameterValue.timestamp("2025-12-08 12:34:56.123456789123456789123456789+00:00")); } @Test public void testTimestampWithDateTimeFormatterBuilder() { - QueryParameterValue value = QueryParameterValue.timestamp("2019-02-14 12:34:45.938993Z"); - assertThat(value.getValue()).isEqualTo("2019-02-14 12:34:45.938993Z"); - assertThat(value.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value.getArrayType()).isNull(); - assertThat(value.getArrayValues()).isNull(); - QueryParameterValue value1 = QueryParameterValue.timestamp("2019-02-14 12:34:45.938993+0000"); - assertThat(value1.getValue()).isEqualTo("2019-02-14 12:34:45.938993+0000"); - assertThat(value1.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value1.getArrayType()).isNull(); - assertThat(value1.getArrayValues()).isNull(); - QueryParameterValue value2 = QueryParameterValue.timestamp("2019-02-14 12:34:45.102+00:00"); - assertThat(value2.getValue()).isEqualTo("2019-02-14 12:34:45.102+00:00"); - assertThat(value2.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value2.getArrayType()).isNull(); - assertThat(value2.getArrayValues()).isNull(); + assertTimestampValue( + QueryParameterValue.timestamp("2019-02-14 12:34:45.938993Z"), + "2019-02-14 12:34:45.938993Z"); + assertTimestampValue( + QueryParameterValue.timestamp("2019-02-14 12:34:45.938993+0000"), + "2019-02-14 12:34:45.938993+0000"); + assertTimestampValue( + QueryParameterValue.timestamp("2019-02-14 12:34:45.102+00:00"), + "2019-02-14 12:34:45.102+00:00"); } - @Test(expected = IllegalArgumentException.class) - public void testInvalidTimestamp() { + @Test + public void testInvalidTimestampStringValues() { + assertThrows(IllegalArgumentException.class, () -> QueryParameterValue.timestamp("abc")); + // missing the time - QueryParameterValue.timestamp("2014-08-19"); + assertThrows(IllegalArgumentException.class, () -> QueryParameterValue.timestamp("2014-08-19")); + + // missing the hour + assertThrows( + IllegalArgumentException.class, () -> QueryParameterValue.timestamp("2014-08-19 12")); + + // can't have the 'T' separator + assertThrows( + IllegalArgumentException.class, () -> QueryParameterValue.timestamp("2014-08-19T12")); + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.timestamp("2014-08-19T12:34:00.123456")); + + // Fractional part has picosecond length, but fractional part is not a valid number + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.timestamp("2014-08-19 12:34:00.123456789abc+00:00")); + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.timestamp("2014-08-19 12:34:00.123456abc789+00:00")); } @Test @@ -683,4 +691,11 @@ private static void testRangeDataEquals(String start, String end, FieldElementTy assertThat(queryParameterValue.getStructValues()).isNull(); assertThat(queryParameterValue.getValue()).isNull(); } + + private void assertTimestampValue(QueryParameterValue value, String expectedStringValue) { + assertThat(value.getValue()).isEqualTo(expectedStringValue); + assertThat(value.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); + assertThat(value.getArrayType()).isNull(); + assertThat(value.getArrayValues()).isNull(); + } } From 5ad1ce7625a8fe74cd6b8a084fb144de6c34b240 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 9 Dec 2025 12:52:30 -0500 Subject: [PATCH 05/36] chore(main): release 2.56.1-SNAPSHOT (#4001) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- benchmark/pom.xml | 2 +- google-cloud-bigquery-bom/pom.xml | 4 ++-- google-cloud-bigquery/pom.xml | 4 ++-- pom.xml | 4 ++-- samples/snapshot/pom.xml | 2 +- versions.txt | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 9f219179fd..75b4d4d8a6 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -6,7 +6,7 @@ google-cloud-bigquery-parent com.google.cloud - 2.56.0 + 2.56.1-SNAPSHOT diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index 2e6d9828bb..357c509748 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery-bom - 2.56.0 + 2.56.1-SNAPSHOT pom com.google.cloud @@ -54,7 +54,7 @@ com.google.cloud google-cloud-bigquery - 2.56.0 + 2.56.1-SNAPSHOT diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index 304230dd21..5994781371 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 2.56.0 + 2.56.1-SNAPSHOT jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 2.56.0 + 2.56.1-SNAPSHOT google-cloud-bigquery diff --git a/pom.xml b/pom.xml index 2169463a6b..06d3a7bfec 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 2.56.0 + 2.56.1-SNAPSHOT BigQuery Parent https://github.com/googleapis/java-bigquery @@ -93,7 +93,7 @@ com.google.cloud google-cloud-bigquery - 2.56.0 + 2.56.1-SNAPSHOT diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 6cabd8bbfc..923d2f6031 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -56,7 +56,7 @@ com.google.cloud google-cloud-bigquery - 2.56.0 + 2.56.1-SNAPSHOT diff --git a/versions.txt b/versions.txt index 46c19475b0..7f07bf2096 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:2.56.0:2.56.0 \ No newline at end of file +google-cloud-bigquery:2.56.0:2.56.1-SNAPSHOT \ No newline at end of file From d2f20579fd60efc36fa4239619e0d679a914cd6d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 11 Dec 2025 19:25:27 +0000 Subject: [PATCH 06/36] deps: update dependency com.google.cloud:sdk-platform-java-config to v3.54.2 (#4022) --- .github/workflows/unmanaged_dependency_check.yaml | 2 +- .kokoro/continuous/graalvm-native-a.cfg | 2 +- .kokoro/continuous/graalvm-native-b.cfg | 2 +- .kokoro/continuous/graalvm-native-c.cfg | 2 +- .kokoro/presubmit/graalvm-native-a.cfg | 2 +- .kokoro/presubmit/graalvm-native-b.cfg | 2 +- .kokoro/presubmit/graalvm-native-c.cfg | 2 +- google-cloud-bigquery-bom/pom.xml | 2 +- pom.xml | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/unmanaged_dependency_check.yaml b/.github/workflows/unmanaged_dependency_check.yaml index 5ba388acc0..3c07fc2027 100644 --- a/.github/workflows/unmanaged_dependency_check.yaml +++ b/.github/workflows/unmanaged_dependency_check.yaml @@ -17,7 +17,7 @@ jobs: # repository .kokoro/build.sh - name: Unmanaged dependency check - uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.54.1 + uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.54.2 with: # java-bigquery does not produce a BOM. Fortunately the root pom.xml # defines google-cloud-bigquery in dependencyManagement section. So diff --git a/.kokoro/continuous/graalvm-native-a.cfg b/.kokoro/continuous/graalvm-native-a.cfg index 0d98de5094..20c0ac4a52 100644 --- a/.kokoro/continuous/graalvm-native-a.cfg +++ b/.kokoro/continuous/graalvm-native-a.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.54.1" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.54.2" } env_vars: { diff --git a/.kokoro/continuous/graalvm-native-b.cfg b/.kokoro/continuous/graalvm-native-b.cfg index c270bff717..3b7b14a650 100644 --- a/.kokoro/continuous/graalvm-native-b.cfg +++ b/.kokoro/continuous/graalvm-native-b.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.54.1" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.54.2" } env_vars: { diff --git a/.kokoro/continuous/graalvm-native-c.cfg b/.kokoro/continuous/graalvm-native-c.cfg index 720f8bcfaf..a41d88c092 100644 --- a/.kokoro/continuous/graalvm-native-c.cfg +++ b/.kokoro/continuous/graalvm-native-c.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.54.1" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.54.2" } env_vars: { diff --git a/.kokoro/presubmit/graalvm-native-a.cfg b/.kokoro/presubmit/graalvm-native-a.cfg index 0d98de5094..20c0ac4a52 100644 --- a/.kokoro/presubmit/graalvm-native-a.cfg +++ b/.kokoro/presubmit/graalvm-native-a.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.54.1" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.54.2" } env_vars: { diff --git a/.kokoro/presubmit/graalvm-native-b.cfg b/.kokoro/presubmit/graalvm-native-b.cfg index c270bff717..3b7b14a650 100644 --- a/.kokoro/presubmit/graalvm-native-b.cfg +++ b/.kokoro/presubmit/graalvm-native-b.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.54.1" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.54.2" } env_vars: { diff --git a/.kokoro/presubmit/graalvm-native-c.cfg b/.kokoro/presubmit/graalvm-native-c.cfg index 720f8bcfaf..a41d88c092 100644 --- a/.kokoro/presubmit/graalvm-native-c.cfg +++ b/.kokoro/presubmit/graalvm-native-c.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.54.1" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.54.2" } env_vars: { diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index 357c509748..18b870c38e 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -8,7 +8,7 @@ com.google.cloud sdk-platform-java-config - 3.54.1 + 3.54.2 diff --git a/pom.xml b/pom.xml index 06d3a7bfec..0e5d85cd95 100644 --- a/pom.xml +++ b/pom.xml @@ -14,7 +14,7 @@ com.google.cloud sdk-platform-java-config - 3.54.1 + 3.54.2 From f4a15364e294a6c379fefe965973f5e140a091e5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 11 Dec 2025 17:55:34 -0500 Subject: [PATCH 07/36] chore(main): release 2.57.0 (#4021) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 14 ++++++++++++++ benchmark/pom.xml | 2 +- google-cloud-bigquery-bom/pom.xml | 4 ++-- google-cloud-bigquery/pom.xml | 4 ++-- pom.xml | 4 ++-- samples/snapshot/pom.xml | 2 +- versions.txt | 2 +- 7 files changed, 23 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 59350c03bc..f9397c65bc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [2.57.0](https://github.com/googleapis/java-bigquery/compare/v2.56.0...v2.57.0) (2025-12-11) + + +### Features + +* Add timestamp_precision to Field ([#4014](https://github.com/googleapis/java-bigquery/issues/4014)) ([57ffe1d](https://github.com/googleapis/java-bigquery/commit/57ffe1d2ba8af3b950438c926d66ac23ca8a3093)) +* Introduce DataFormatOptions to configure the output of BigQuery data types ([#4010](https://github.com/googleapis/java-bigquery/issues/4010)) ([6dcc900](https://github.com/googleapis/java-bigquery/commit/6dcc90053353422ae766e531413b3ecc65b8b155)) +* Relax client-side validation for BigQuery entity IDs ([#4000](https://github.com/googleapis/java-bigquery/issues/4000)) ([c3548a2](https://github.com/googleapis/java-bigquery/commit/c3548a2f521b19761c844c0b24fc8caab541aba7)) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.54.2 ([#4022](https://github.com/googleapis/java-bigquery/issues/4022)) ([d2f2057](https://github.com/googleapis/java-bigquery/commit/d2f20579fd60efc36fa4239619e0d679a914cd6d)) + ## [2.56.0](https://github.com/googleapis/java-bigquery/compare/v2.55.3...v2.56.0) (2025-11-15) diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 75b4d4d8a6..a1ac9dc9ae 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -6,7 +6,7 @@ google-cloud-bigquery-parent com.google.cloud - 2.56.1-SNAPSHOT + 2.57.0 diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index 18b870c38e..8d2b8864aa 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery-bom - 2.56.1-SNAPSHOT + 2.57.0 pom com.google.cloud @@ -54,7 +54,7 @@ com.google.cloud google-cloud-bigquery - 2.56.1-SNAPSHOT + 2.57.0 diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index 5994781371..d3a1150d35 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 2.56.1-SNAPSHOT + 2.57.0 jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 2.56.1-SNAPSHOT + 2.57.0 google-cloud-bigquery diff --git a/pom.xml b/pom.xml index 0e5d85cd95..b9fedf5f87 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 2.56.1-SNAPSHOT + 2.57.0 BigQuery Parent https://github.com/googleapis/java-bigquery @@ -93,7 +93,7 @@ com.google.cloud google-cloud-bigquery - 2.56.1-SNAPSHOT + 2.57.0 diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 923d2f6031..e2e994a16b 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -56,7 +56,7 @@ com.google.cloud google-cloud-bigquery - 2.56.1-SNAPSHOT + 2.57.0 diff --git a/versions.txt b/versions.txt index 7f07bf2096..fe934a8fc5 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:2.56.0:2.56.1-SNAPSHOT \ No newline at end of file +google-cloud-bigquery:2.57.0:2.57.0 \ No newline at end of file From 3f592a50d25852caaf4e71f64446e153d39ccc1d Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Fri, 12 Dec 2025 10:06:45 -0500 Subject: [PATCH 08/36] test: Clean up resources created in ITs (#4024) * chore: Cleanup initialized resources * chore: Delete created dataset * chore: Use try-with to close stream --- .../cloud/bigquery/it/ITBigQueryTest.java | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index 268199869e..d565688d50 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -1099,10 +1099,12 @@ public static void beforeClass() throws InterruptedException, IOException { .setContentType("application/json") .build(), JSON_CONTENT_SIMPLE.getBytes(StandardCharsets.UTF_8)); - InputStream stream = - ITBigQueryTest.class.getClassLoader().getResourceAsStream("QueryTestData.csv"); - storage.createFrom( - BlobInfo.newBuilder(BUCKET, LOAD_FILE_LARGE).setContentType("text/plain").build(), stream); + try (InputStream stream = + ITBigQueryTest.class.getClassLoader().getResourceAsStream("QueryTestData.csv")) { + storage.createFrom( + BlobInfo.newBuilder(BUCKET, LOAD_FILE_LARGE).setContentType("text/plain").build(), + stream); + } storage.create( BlobInfo.newBuilder(BUCKET, JSON_LOAD_FILE_BQ_RESULTSET) .setContentType("application/json") @@ -1179,10 +1181,11 @@ public static void beforeClass() throws InterruptedException, IOException { } @AfterClass - public static void afterClass() throws ExecutionException, InterruptedException { + public static void afterClass() throws Exception { if (bigquery != null) { RemoteBigQueryHelper.forceDelete(bigquery, DATASET); RemoteBigQueryHelper.forceDelete(bigquery, UK_DATASET); + RemoteBigQueryHelper.forceDelete(bigquery, OTHER_DATASET); RemoteBigQueryHelper.forceDelete(bigquery, MODEL_DATASET); RemoteBigQueryHelper.forceDelete(bigquery, ROUTINE_DATASET); } @@ -1191,6 +1194,11 @@ public static void afterClass() throws ExecutionException, InterruptedException if (!wasDeleted && LOG.isLoggable(Level.WARNING)) { LOG.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", BUCKET); } + storage.close(); + } + + if (otel instanceof OpenTelemetrySdk) { + ((OpenTelemetrySdk) otel).close(); } } From 380e0d999a0bd79dad8f3b777ab511bdf2e657e3 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 12 Dec 2025 19:39:10 +0000 Subject: [PATCH 09/36] chore(deps): update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.19.0 (#4025) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index b9fedf5f87..068d0cad93 100644 --- a/pom.xml +++ b/pom.xml @@ -71,7 +71,7 @@ com.google.cloud google-cloud-bigquerystorage-bom - 3.18.0 + 3.19.0 pom import From 3b8b8ef11aefdeac9512d10e1441e6ae91c4cc1f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 12 Dec 2025 14:39:26 -0500 Subject: [PATCH 10/36] chore(main): release 2.57.1-SNAPSHOT (#4023) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- benchmark/pom.xml | 2 +- google-cloud-bigquery-bom/pom.xml | 4 ++-- google-cloud-bigquery/pom.xml | 4 ++-- pom.xml | 4 ++-- samples/snapshot/pom.xml | 2 +- versions.txt | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/benchmark/pom.xml b/benchmark/pom.xml index a1ac9dc9ae..8d70e4d685 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -6,7 +6,7 @@ google-cloud-bigquery-parent com.google.cloud - 2.57.0 + 2.57.1-SNAPSHOT diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index 8d2b8864aa..3532008034 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery-bom - 2.57.0 + 2.57.1-SNAPSHOT pom com.google.cloud @@ -54,7 +54,7 @@ com.google.cloud google-cloud-bigquery - 2.57.0 + 2.57.1-SNAPSHOT diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index d3a1150d35..9517e49198 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 2.57.0 + 2.57.1-SNAPSHOT jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 2.57.0 + 2.57.1-SNAPSHOT google-cloud-bigquery diff --git a/pom.xml b/pom.xml index 068d0cad93..129c6226c6 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 2.57.0 + 2.57.1-SNAPSHOT BigQuery Parent https://github.com/googleapis/java-bigquery @@ -93,7 +93,7 @@ com.google.cloud google-cloud-bigquery - 2.57.0 + 2.57.1-SNAPSHOT diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index e2e994a16b..3976d33b4e 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -56,7 +56,7 @@ com.google.cloud google-cloud-bigquery - 2.57.0 + 2.57.1-SNAPSHOT diff --git a/versions.txt b/versions.txt index fe934a8fc5..e6d97a7042 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:2.57.0:2.57.0 \ No newline at end of file +google-cloud-bigquery:2.57.0:2.57.1-SNAPSHOT \ No newline at end of file From c161364aeac184917174b290adb0108cba3d2489 Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Fri, 12 Dec 2025 15:14:45 -0500 Subject: [PATCH 11/36] chore: Cleanup Intellij test warnings (#4026) * chore: Cleanup initialized resources * chore: Use try-with to close stream * chore: Cleanup Intellij test warnings * chore: Allow for floating point inaccuracies * chore: Add otel delete operation --- .../cloud/bigquery/it/ITBigQueryTest.java | 273 +++++++++--------- 1 file changed, 132 insertions(+), 141 deletions(-) diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index d565688d50..de9fef4ee5 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -44,7 +44,6 @@ import com.google.cloud.bigquery.Acl.Expr; import com.google.cloud.bigquery.Acl.User; import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption; import com.google.cloud.bigquery.BigQuery.DatasetField; import com.google.cloud.bigquery.BigQuery.DatasetListOption; import com.google.cloud.bigquery.BigQuery.DatasetOption; @@ -1434,8 +1433,8 @@ public void testUpdateDatasetWithAccessPolicyVersion() throws IOException { datasetOption, updateModeOption); assertNotNull(updatedDataset); - assertEquals(updatedDataset.getDescription(), "Updated Description"); - assertThat(updatedDataset.getLabels().isEmpty()); + assertEquals("Updated Description", updatedDataset.getDescription()); + assertTrue(updatedDataset.getLabels().isEmpty()); Acl updatedAclWithCond = null; for (Acl updatedAcl : updatedDataset.getAcl()) { @@ -1875,7 +1874,7 @@ public void testCreateDatasetWithAccessPolicyVersion() throws IOException { DatasetOption datasetOption = DatasetOption.accessPolicyVersion(3); Dataset dataset = bigquery.create(info, datasetOption); assertNotNull(dataset); - assertEquals(dataset.getDescription(), DESCRIPTION); + assertEquals(DESCRIPTION, dataset.getDescription()); Acl remoteAclWithCond = null; for (Acl remoteAcl : dataset.getAcl()) { @@ -1981,7 +1980,7 @@ public void testCreateFieldWithDefaultCollation() { Schema remoteSchema = remoteTable.getDefinition().getSchema(); // Schema should be equal because collation has been added to the fields. assertEquals(schema, remoteSchema); - assertEquals(null, remoteTable.getDefaultCollation()); + assertNull(remoteTable.getDefaultCollation()); FieldList fieldList = remoteSchema.getFields(); for (Field field : fieldList) { if (field.getName().equals("stringFieldWithoutDefaultCollation")) { @@ -2425,7 +2424,7 @@ public void testCreateExternalTable() throws InterruptedException { assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); assertEquals(integerValue, integerCell.getLongValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); integerValue = ~integerValue & 0x1; rowCount++; } @@ -2531,7 +2530,7 @@ public void testCreateViewTable() throws InterruptedException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); rowCount++; } assertEquals(2, rowCount); @@ -3065,13 +3064,13 @@ public void testListAllTableData() { assertEquals("stringValue", stringCell.getStringValue()); assertEquals(0, integerArrayCell.getRepeatedValue().get(0).getLongValue()); assertEquals(1, integerArrayCell.getRepeatedValue().get(1).getLongValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); assertArrayEquals(BYTES, bytesCell.getBytesValue()); assertEquals(-14182916000000L, recordCell.getRecordValue().get(0).getTimestampValue()); assertTrue(recordCell.getRecordValue().get(1).isNull()); assertEquals(1, recordCell.getRecordValue().get(2).getRepeatedValue().get(0).getLongValue()); assertEquals(0, recordCell.getRecordValue().get(2).getRepeatedValue().get(1).getLongValue()); - assertEquals(true, recordCell.getRecordValue().get(3).getBooleanValue()); + assertTrue(recordCell.getRecordValue().get(3).getBooleanValue()); assertEquals(3, integerCell.getLongValue()); assertEquals(1.2, floatCell.getDoubleValue(), 0.0001); assertEquals("POINT(-122.35022 47.649154)", geographyCell.getStringValue()); @@ -3132,21 +3131,21 @@ public void testModelLifecycle() throws InterruptedException { ModelId modelId = ModelId.of(MODEL_DATASET, modelName); Model model = bigquery.getModel(modelId); assertNotNull(model); - assertEquals(model.getModelType(), "LINEAR_REGRESSION"); + assertEquals("LINEAR_REGRESSION", model.getModelType()); // Compare the extended model metadata. - assertEquals(model.getFeatureColumns().get(0).getName(), "f1"); - assertEquals(model.getLabelColumns().get(0).getName(), "predicted_label"); + assertEquals("f1", model.getFeatureColumns().get(0).getName()); + assertEquals("predicted_label", model.getLabelColumns().get(0).getName()); assertEquals( - model.getTrainingRuns().get(0).getTrainingOptions().getLearnRateStrategy(), "CONSTANT"); + "CONSTANT", model.getTrainingRuns().get(0).getTrainingOptions().getLearnRateStrategy()); // Mutate metadata. ModelInfo info = model.toBuilder().setDescription("TEST").build(); Model afterUpdate = bigquery.update(info); - assertEquals(afterUpdate.getDescription(), "TEST"); + assertEquals("TEST", afterUpdate.getDescription()); // Ensure model is present in listModels. Page models = bigquery.listModels(MODEL_DATASET); - Boolean found = false; + boolean found = false; for (Model m : models.getValues()) { if (m.getModelId().getModel().equals(modelName)) { found = true; @@ -3167,7 +3166,7 @@ public void testEmptyListModels() { assertEquals(0, Iterables.size(models.getValues())); assertFalse(models.hasNextPage()); assertNull(models.getNextPageToken()); - assertTrue(bigquery.delete(datasetId)); + RemoteBigQueryHelper.forceDelete(bigquery, datasetId); } @Test @@ -3179,7 +3178,7 @@ public void testEmptyListRoutines() { assertEquals(0, Iterables.size(routines.getValues())); assertFalse(routines.hasNextPage()); assertNull(routines.getNextPageToken()); - assertTrue(bigquery.delete(datasetId)); + RemoteBigQueryHelper.forceDelete(bigquery, datasetId); } @Test @@ -3197,7 +3196,7 @@ public void testRoutineLifecycle() throws InterruptedException { RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); Routine routine = bigquery.getRoutine(routineId); assertNotNull(routine); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); // Mutate metadata. RoutineInfo newInfo = @@ -3208,11 +3207,11 @@ public void testRoutineLifecycle() throws InterruptedException { .setRoutineType(routine.getRoutineType()) .build(); Routine afterUpdate = bigquery.update(newInfo); - assertEquals(afterUpdate.getBody(), "x * 4"); + assertEquals("x * 4", afterUpdate.getBody()); // Ensure routine is present in listRoutines. Page routines = bigquery.listRoutines(ROUTINE_DATASET); - Boolean found = false; + boolean found = false; for (Routine r : routines.getValues()) { if (r.getRoutineId().getRoutine().equals(routineName)) { found = true; @@ -3244,7 +3243,7 @@ public void testRoutineAPICreation() { Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); } @Test @@ -3269,10 +3268,10 @@ public void testRoutineAPICreationJavascriptUDF() { Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getLanguage(), "JAVASCRIPT"); - assertEquals(routine.getDeterminismLevel(), "DETERMINISTIC"); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); - assertEquals(routine.getReturnType(), StandardSQLDataType.newBuilder("STRING").build()); + assertEquals("JAVASCRIPT", routine.getLanguage()); + assertEquals("DETERMINISTIC", routine.getDeterminismLevel()); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); + assertEquals(StandardSQLDataType.newBuilder("STRING").build(), routine.getReturnType()); } @Test @@ -3299,8 +3298,8 @@ public void testRoutineAPICreationTVF() { .build(); Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getRoutineType(), "TABLE_VALUED_FUNCTION"); - assertEquals(routine.getReturnTableType(), returnTableType); + assertEquals("TABLE_VALUED_FUNCTION", routine.getRoutineType()); + assertEquals(returnTableType, routine.getReturnTableType()); } @Test @@ -3324,10 +3323,10 @@ public void testRoutineDataGovernanceType() { Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getLanguage(), "SQL"); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); - assertEquals(routine.getReturnType(), StandardSQLDataType.newBuilder("INT64").build()); - assertEquals(routine.getDataGovernanceType(), "DATA_MASKING"); + assertEquals("SQL", routine.getLanguage()); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); + assertEquals(StandardSQLDataType.newBuilder("INT64").build(), routine.getReturnType()); + assertEquals("DATA_MASKING", routine.getDataGovernanceType()); } @Test @@ -3348,7 +3347,7 @@ public void testAuthorizeRoutine() { .build(); Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); Dataset routineDataset = bigquery.getDataset(ROUTINE_DATASET); List routineAcl = new ArrayList<>(routineDataset.getAcl()); routineAcl.add(Acl.of(new Acl.Routine(routineId))); @@ -3370,7 +3369,7 @@ public void testAuthorizeDataset() { DatasetInfo.newBuilder(datasetId).setAcl(acl).setDescription("shared Dataset").build(); Dataset sharedDataset = bigquery.create(datasetInfo); assertNotNull(sharedDataset); - assertEquals(sharedDataset.getDescription(), "shared Dataset"); + assertEquals("shared Dataset", sharedDataset.getDescription()); // Get the current metadata for the dataset you want to share by calling the datasets.get method List sharedDatasetAcl = new ArrayList<>(sharedDataset.getAcl()); @@ -3384,7 +3383,7 @@ public void testAuthorizeDataset() { Dataset authorizedDataset = bigquery.create(authorizedDatasetInfo); assertNotNull(authorizedDataset); assertEquals( - authorizedDataset.getDescription(), "new Dataset to be authorized by the sharedDataset"); + "new Dataset to be authorized by the sharedDataset", authorizedDataset.getDescription()); // Add the new DatasetAccessEntry object to the existing sharedDatasetAcl list DatasetAclEntity datasetEntity = new DatasetAclEntity(authorizedDatasetId, targetTypes); @@ -3395,6 +3394,9 @@ public void testAuthorizeDataset() { // Verify that the authorized dataset has been added assertEquals(sharedDatasetAcl, updatedDataset.getAcl()); + + RemoteBigQueryHelper.forceDelete(bigquery, datasetName); + RemoteBigQueryHelper.forceDelete(bigquery, authorizedDatasetName); } /* TODO(prasmish): replicate the entire test case for executeSelect */ @@ -3514,7 +3516,7 @@ public void testQuery() throws InterruptedException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); rowCount++; } assertEquals(2, rowCount); @@ -3945,14 +3947,14 @@ public void testExecuteSelectSinglePageTableRow() throws SQLException { assertTrue(rs.next()); // first row // checking for the null or 0 column values assertNull(rs.getString("StringField")); - assertTrue(rs.getDouble("BigNumericField") == 0.0d); + assertEquals(0.0d, rs.getDouble("BigNumericField"), 1e-9); assertFalse(rs.getBoolean("BooleanField")); assertNull(rs.getBytes("BytesField")); - assertEquals(rs.getInt("IntegerField"), 0); + assertEquals(0, rs.getInt("IntegerField")); assertNull(rs.getTimestamp("TimestampField")); assertNull(rs.getDate("DateField")); - assertTrue(rs.getDouble("FloatField") == 0.0d); - assertTrue(rs.getDouble("NumericField") == 0.0d); + assertEquals(0.0d, rs.getDouble("FloatField"), 1e-9); + assertEquals(0.0d, rs.getDouble("NumericField"), 1e-9); assertNull(rs.getTime("TimeField")); assertNull(rs.getString("DateTimeField")); assertNull(rs.getString("GeographyField")); @@ -3962,14 +3964,14 @@ public void testExecuteSelectSinglePageTableRow() throws SQLException { assertTrue(rs.next()); // second row // second row is non null, comparing the values assertEquals("StringValue1", rs.getString("StringField")); - assertTrue(rs.getDouble("BigNumericField") == 0.3333333333333333d); + assertEquals(0.3333333333333333d, rs.getDouble("BigNumericField"), 1e-9); assertFalse(rs.getBoolean("BooleanField")); assertNotNull(rs.getBytes("BytesField")); assertEquals(1, rs.getInt("IntegerField")); assertEquals(1534680695123L, rs.getTimestamp("TimestampField").getTime()); assertEquals(java.sql.Date.valueOf("2018-08-19"), rs.getDate("DateField")); - assertTrue(rs.getDouble("FloatField") == 10.1d); - assertTrue(rs.getDouble("NumericField") == 100.0d); + assertEquals(10.1d, rs.getDouble("FloatField"), 1e-9); + assertEquals(100.0d, rs.getDouble("NumericField"), 1e-9); assertEquals(Time.valueOf(LocalTime.of(12, 11, 35, 123456)), rs.getTime("TimeField")); assertEquals("2018-08-19T12:11:35.123456", rs.getString("DateTimeField")); assertEquals("POINT(-122.35022 47.649154)", rs.getString("GeographyField")); @@ -4015,14 +4017,14 @@ public void testExecuteSelectSinglePageTableRowWithReadAPI() throws SQLException assertTrue(rs.next()); // first row // checking for the null or 0 column values assertNull(rs.getString("StringField")); - assertTrue(rs.getDouble("BigNumericField") == 0.0d); + assertEquals(0.0d, rs.getDouble("BigNumericField"), 1e-9); assertFalse(rs.getBoolean("BooleanField")); assertNull(rs.getBytes("BytesField")); - assertEquals(rs.getInt("IntegerField"), 0); + assertEquals(0, rs.getInt("IntegerField")); assertNull(rs.getTimestamp("TimestampField")); assertNull(rs.getDate("DateField")); - assertTrue(rs.getDouble("FloatField") == 0.0d); - assertTrue(rs.getDouble("NumericField") == 0.0d); + assertEquals(0.0d, rs.getDouble("FloatField"), 1e-9); + assertEquals(0.0d, rs.getDouble("NumericField"), 1e-9); assertNull(rs.getTime("TimeField")); assertNull(rs.getString("DateTimeField")); assertNull(rs.getString("GeographyField")); @@ -4032,14 +4034,14 @@ public void testExecuteSelectSinglePageTableRowWithReadAPI() throws SQLException assertTrue(rs.next()); // second row // second row is non null, comparing the values assertEquals("StringValue1", rs.getString("StringField")); - assertTrue(rs.getDouble("BigNumericField") == 0.3333333333333333d); + assertEquals(0.3333333333333333d, rs.getDouble("BigNumericField"), 1e-9); assertFalse(rs.getBoolean("BooleanField")); assertNotNull(rs.getBytes("BytesField")); assertEquals(1, rs.getInt("IntegerField")); assertEquals(1534680695123L, rs.getTimestamp("TimestampField").getTime()); assertEquals(java.sql.Date.valueOf("2018-08-19"), rs.getDate("DateField")); - assertTrue(rs.getDouble("FloatField") == 10.1d); - assertTrue(rs.getDouble("NumericField") == 100.0d); + assertEquals(10.1d, rs.getDouble("FloatField"), 1e-9); + assertEquals(100.0d, rs.getDouble("NumericField"), 1e-9); assertEquals( Time.valueOf(LocalTime.of(12, 11, 35, 123456)).toString(), rs.getTime("TimeField").toString()); @@ -4364,7 +4366,7 @@ public void testExecuteSelectSinglePageTableRowColInd() throws SQLException { assertEquals(2, bigQueryResult.getTotalRows()); // Expecting 2 rows while (rs.next()) { assertEquals(rs.getString(0), rs.getString("StringField")); - assertTrue(rs.getDouble(1) == rs.getDouble("BigNumericField")); + assertEquals(rs.getDouble(1), rs.getDouble("BigNumericField"), 1e-9); assertEquals(rs.getBoolean(2), rs.getBoolean("BooleanField")); if (rs.getBytes(3) == null) { // both overloads should be null assertEquals(rs.getBytes(3), rs.getBytes("BytesField")); @@ -4376,8 +4378,8 @@ public void testExecuteSelectSinglePageTableRowColInd() throws SQLException { assertEquals(rs.getInt(4), rs.getInt("IntegerField")); assertEquals(rs.getTimestamp(5), rs.getTimestamp("TimestampField")); assertEquals(rs.getDate(9), rs.getDate("DateField")); - assertTrue(rs.getDouble("FloatField") == rs.getDouble(6)); - assertTrue(rs.getDouble("NumericField") == rs.getDouble(7)); + assertEquals(rs.getDouble("FloatField"), rs.getDouble(6), 1e-9); + assertEquals(rs.getDouble("NumericField"), rs.getDouble(7), 1e-9); assertEquals(rs.getTime(8), rs.getTime("TimeField")); assertEquals(rs.getString(10), rs.getString("DateTimeField")); assertEquals(rs.getString(11), rs.getString("GeographyField")); @@ -4638,7 +4640,7 @@ public void testFastSQLQuery() throws InterruptedException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); } } @@ -4808,7 +4810,7 @@ public void testFastDDLQuery() throws InterruptedException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); } } @@ -5249,7 +5251,7 @@ public void testPositionalQueryParameters() throws InterruptedException { (long) Double.parseDouble("1.40845209522E9"), (long) Double.parseDouble(values.get(0).getValue().toString())); assertEquals("stringValue", values.get(1).getValue()); - assertEquals(false, values.get(2).getBooleanValue()); + assertFalse(values.get(2).getBooleanValue()); assertEquals("0.33333333333333333333333333333333333333", values.get(3).getValue()); assertEquals("0.00000000000000000000000000000000000001", values.get(4).getValue()); assertEquals("-100000000000000000000000000000000000000", values.get(5).getValue()); @@ -5579,15 +5581,15 @@ public void testStructQuery() throws InterruptedException { assertEquals(2, Iterables.size(result.getValues())); for (FieldValueList values : result.iterateAll()) { for (FieldValue value : values) { - assertEquals(null, value.getRecordValue().get("StringField").getValue()); - assertEquals(true, value.getRecordValue().get("BooleanField").getBooleanValue()); + assertNull(value.getRecordValue().get("StringField").getValue()); + assertTrue(value.getRecordValue().get("BooleanField").getBooleanValue()); } } } private static void assertsFieldValue(FieldValue record) { assertEquals(FieldValue.Attribute.RECORD, record.getAttribute()); - assertEquals(true, record.getRecordValue().get("booleanField").getBooleanValue()); + assertTrue(record.getRecordValue().get("booleanField").getBooleanValue()); assertEquals(10, record.getRecordValue().get("integerField").getLongValue()); assertEquals("test-stringField", record.getRecordValue().get("stringField").getStringValue()); } @@ -5622,12 +5624,12 @@ public void testNestedStructNamedQueryParameters() throws InterruptedException { for (FieldValueList values : result.iterateAll()) { for (FieldValue value : values) { assertEquals(Attribute.RECORD, value.getAttribute()); - assertEquals(true, value.getRecordValue().get(0).getRecordValue().get(0).getBooleanValue()); + assertTrue(value.getRecordValue().get(0).getRecordValue().get(0).getBooleanValue()); assertEquals(10, value.getRecordValue().get(0).getRecordValue().get(1).getLongValue()); assertEquals( "test-stringField", value.getRecordValue().get(0).getRecordValue().get(2).getStringValue()); - assertEquals(true, value.getRecordValue().get(1).getBooleanValue()); + assertTrue(value.getRecordValue().get(1).getBooleanValue()); assertEquals("test-stringField", value.getRecordValue().get(2).getStringValue()); assertEquals(10, value.getRecordValue().get(3).getLongValue()); } @@ -5674,7 +5676,7 @@ public void testGeographyParameter() throws Exception { int rowCount = 0; for (FieldValueList row : result.getValues()) { rowCount++; - assertEquals(true, row.get(0).getBooleanValue()); + assertTrue(row.get(0).getBooleanValue()); } assertEquals(1, rowCount); } @@ -5938,7 +5940,6 @@ public void testSnapshotTableCopyJob() throws InterruptedException { assertNotNull(snapshotTable); assertEquals(snapshotTableId.getDataset(), snapshotTable.getTableId().getDataset()); assertEquals(snapshotTableName, snapshotTable.getTableId().getTable()); - System.out.println(snapshotTable.getDefinition()); assertTrue(snapshotTable.getDefinition() instanceof SnapshotTableDefinition); assertEquals(DDL_TABLE_SCHEMA, snapshotTable.getDefinition().getSchema()); assertNotNull(((SnapshotTableDefinition) snapshotTable.getDefinition()).getSnapshotTime()); @@ -6034,7 +6035,7 @@ public void testQueryJob() throws InterruptedException, TimeoutException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); rowCount++; } assertEquals(2, rowCount); @@ -6042,12 +6043,12 @@ public void testQueryJob() throws InterruptedException, TimeoutException { Job queryJob = bigquery.getJob(remoteJob.getJobId()); JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); if (statistics.getBiEngineStats() != null) { - assertEquals(statistics.getBiEngineStats().getBiEngineMode(), "DISABLED"); + assertEquals("DISABLED", statistics.getBiEngineStats().getBiEngineMode()); assertEquals( - statistics.getBiEngineStats().getBiEngineReasons().get(0).getCode(), "OTHER_REASON"); + "OTHER_REASON", statistics.getBiEngineStats().getBiEngineReasons().get(0).getCode()); assertEquals( - statistics.getBiEngineStats().getBiEngineReasons().get(0).getMessage(), - "Only SELECT queries without a destination table can be accelerated."); + "Only SELECT queries without a destination table can be accelerated.", + statistics.getBiEngineStats().getBiEngineReasons().get(0).getMessage()); } assertNotNull(statistics.getQueryPlan()); } @@ -6115,11 +6116,11 @@ public void testQueryJobWithSearchReturnsSearchStatisticsUnused() throws Interru assertNull(remoteJob.getStatus().getError()); JobStatistics.QueryStatistics stats = remoteJob.getStatistics(); assertNotNull(stats.getSearchStats()); - assertEquals(stats.getSearchStats().getIndexUsageMode(), "UNUSED"); + assertEquals("UNUSED", stats.getSearchStats().getIndexUsageMode()); assertNotNull(stats.getSearchStats().getIndexUnusedReasons()); - assertNotNull( - stats.getSearchStats().getIndexUnusedReasons().get(0).getCode(), - "INDEX_CONFIG_NOT_AVAILABLE"); + assertEquals( + "INDEX_CONFIG_NOT_AVAILABLE", + stats.getSearchStats().getIndexUnusedReasons().get(0).getCode()); } finally { bigquery.delete(destinationTable); } @@ -6196,8 +6197,8 @@ public void testLoadJobWithDecimalTargetTypes() throws InterruptedException { Table remoteTable = bigquery.getTable(DATASET, tableName); assertNotNull(remoteTable); assertEquals( - remoteTable.getDefinition().getSchema().getFields().get(0).getType().toString(), - "BIGNUMERIC"); + "BIGNUMERIC", + remoteTable.getDefinition().getSchema().getFields().get(0).getType().toString()); } finally { bigquery.delete(destinationTable); } @@ -6218,8 +6219,8 @@ public void testExternalTableWithDecimalTargetTypes() throws InterruptedExceptio Table remoteTable = bigquery.getTable(DATASET, tableName); assertNotNull(remoteTable); assertEquals( - remoteTable.getDefinition().getSchema().getFields().get(0).getType().toString(), - "BIGNUMERIC"); + "BIGNUMERIC", + remoteTable.getDefinition().getSchema().getFields().get(0).getType().toString()); assertTrue(remoteTable.delete()); } @@ -6422,13 +6423,13 @@ public void testInsertFromFile() throws InterruptedException, IOException, Timeo assertEquals("stringValue", stringCell.getStringValue()); assertEquals(0, integerArrayCell.getRepeatedValue().get(0).getLongValue()); assertEquals(1, integerArrayCell.getRepeatedValue().get(1).getLongValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); assertArrayEquals(BYTES, bytesCell.getBytesValue()); assertEquals(-14182916000000L, recordCell.getRecordValue().get(0).getTimestampValue()); assertTrue(recordCell.getRecordValue().get(1).isNull()); assertEquals(1, recordCell.getRecordValue().get(2).getRepeatedValue().get(0).getLongValue()); assertEquals(0, recordCell.getRecordValue().get(2).getRepeatedValue().get(1).getLongValue()); - assertEquals(true, recordCell.getRecordValue().get(3).getBooleanValue()); + assertTrue(recordCell.getRecordValue().get(3).getBooleanValue()); assertEquals(3, integerCell.getLongValue()); assertEquals(1.2, floatCell.getDoubleValue(), 0.0001); assertEquals("POINT(-122.35022 47.649154)", geographyCell.getStringValue()); @@ -6507,23 +6508,21 @@ public void testLocation() throws Exception { assertThat(location).isNotEqualTo(wrongLocation); Tracer tracer = otel.getTracer("Test Tracer"); - bigquery = + BigQuery otelBigquery = bigquery.getOptions().toBuilder() .setEnableOpenTelemetryTracing(true) .setOpenTelemetryTracer(tracer) .build() .getService(); + String datasetName = "locationset_" + UUID.randomUUID().toString().replace("-", "_"); Dataset dataset = - bigquery.create( - DatasetInfo.newBuilder("locationset_" + UUID.randomUUID().toString().replace("-", "_")) - .setLocation(location) - .build()); + otelBigquery.create(DatasetInfo.newBuilder(datasetName).setLocation(location).build()); try { TableId tableId = TableId.of(dataset.getDatasetId().getDataset(), "sometable"); Schema schema = Schema.of(Field.of("name", LegacySQLTypeName.STRING)); TableDefinition tableDef = StandardTableDefinition.of(schema); - Table table = bigquery.create(TableInfo.newBuilder(tableId, tableDef).build()); + Table table = otelBigquery.create(TableInfo.newBuilder(tableId, tableDef).build()); String query = String.format( @@ -6535,7 +6534,7 @@ public void testLocation() throws Exception { // Test create/get { Job job = - bigquery.create( + otelBigquery.create( JobInfo.of( JobId.newBuilder().setLocation(location).build(), QueryJobConfiguration.of(query))); @@ -6548,20 +6547,20 @@ public void testLocation() throws Exception { JobId wrongId = jobId.toBuilder().setLocation(wrongLocation).build(); // Getting with location should work. - assertThat(bigquery.getJob(jobId)).isNotNull(); + assertThat(otelBigquery.getJob(jobId)).isNotNull(); // Getting with wrong location shouldn't work. - assertThat(bigquery.getJob(wrongId)).isNull(); + assertThat(otelBigquery.getJob(wrongId)).isNull(); // Cancelling with location should work. (Cancelling already finished job is fine.) - assertThat(bigquery.cancel(jobId)).isTrue(); + assertThat(otelBigquery.cancel(jobId)).isTrue(); // Cancelling with wrong location shouldn't work. - assertThat(bigquery.cancel(wrongId)).isFalse(); + assertThat(otelBigquery.cancel(wrongId)).isFalse(); } // Test query { assertThat( - bigquery + otelBigquery .query( QueryJobConfiguration.of(query), JobId.newBuilder().setLocation(location).build()) @@ -6569,7 +6568,7 @@ public void testLocation() throws Exception { .isEmpty(); try { - bigquery + otelBigquery .query( QueryJobConfiguration.of(query), JobId.newBuilder().setLocation(wrongLocation).build()) @@ -6587,7 +6586,7 @@ public void testLocation() throws Exception { .setFormatOptions(FormatOptions.csv()) .build(); try (TableDataWriteChannel writer = - bigquery.writer( + otelBigquery.writer( JobId.newBuilder().setLocation(location).build(), writeChannelConfiguration)) { writer.write(ByteBuffer.wrap("foo".getBytes())); assertEquals( @@ -6597,22 +6596,16 @@ public void testLocation() throws Exception { location); } - try { - bigquery.writer( - JobId.newBuilder().setLocation(wrongLocation).build(), writeChannelConfiguration); + try (TableDataWriteChannel ignore = + otelBigquery.writer( + JobId.newBuilder().setLocation(wrongLocation).build(), writeChannelConfiguration)) { fail("writing to a table with wrong location shouldn't work"); } catch (BigQueryException e) { // Nothing to do } } } finally { - bigquery.delete(dataset.getDatasetId(), DatasetDeleteOption.deleteContents()); - bigquery = - bigquery.getOptions().toBuilder() - .setEnableOpenTelemetryTracing(false) - .setOpenTelemetryTracer(null) - .build() - .getService(); + RemoteBigQueryHelper.forceDelete(bigquery, datasetName); } } @@ -6708,7 +6701,7 @@ public void testReferenceFileSchemaUriForAvro() { Job job = bigquery.create(JobInfo.of(loadJobConfiguration)); // Blocks until this load table job completes its execution, either failing or succeeding. job = job.waitFor(); - assertEquals(true, job.isDone()); + assertTrue(job.isDone()); LoadJobConfiguration actualLoadJobConfiguration = job.getConfiguration(); Table generatedTable = bigquery.getTable(actualLoadJobConfiguration.getDestinationTable()); @@ -6716,7 +6709,7 @@ public void testReferenceFileSchemaUriForAvro() { assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); // clean up after test to avoid conflict with other tests boolean success = bigquery.delete(tableId); - assertEquals(true, success); + assertTrue(success); } catch (BigQueryException | InterruptedException e) { System.out.println("Column not added during load append \n" + e.toString()); } @@ -6768,14 +6761,14 @@ public void testReferenceFileSchemaUriForParquet() { Job job = bigquery.create(JobInfo.of(loadJobConfiguration)); // Blocks until this load table job completes its execution, either failing or succeeding. job = job.waitFor(); - assertEquals(true, job.isDone()); + assertTrue(job.isDone()); LoadJobConfiguration actualLoadJobConfiguration = job.getConfiguration(); Table generatedTable = bigquery.getTable(actualLoadJobConfiguration.getDestinationTable()); assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); // clean up after test to avoid conflict with other tests boolean success = bigquery.delete(tableId); - assertEquals(true, success); + assertTrue(success); } catch (BigQueryException | InterruptedException e) { System.out.println("Column not added during load append \n" + e.toString()); } @@ -6818,7 +6811,7 @@ public void testCreateExternalTableWithReferenceFileSchemaAvro() { assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); // clean up after test to avoid conflict with other tests boolean success = bigquery.delete(tableId); - assertEquals(true, success); + assertTrue(success); } @Test @@ -6859,15 +6852,14 @@ public void testCreateExternalTableWithReferenceFileSchemaParquet() { Table generatedTable = bigquery.getTable(createdTable.getTableId()); assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); // clean up after test to avoid conflict with other tests - boolean success = bigquery.delete(tableId); - assertEquals(true, success); + assertTrue(bigquery.delete(tableId)); } @Test public void testCloneTableCopyJob() throws InterruptedException { String sourceTableName = "test_copy_job_base_table"; String ddlTableName = TABLE_ID_DDL.getTable(); - String cloneTableName = String.format("test_clone_table"); + String cloneTableName = "test_clone_table"; // Create source table with some data in it String ddlQuery = String.format( @@ -7277,11 +7269,9 @@ public void testStatelessQueriesWithLocation() throws Exception { BigQuery bigQuery = bigqueryHelper.getOptions().toBuilder().setLocation(location).build().getService(); + String datasetName = "locationset_" + UUID.randomUUID().toString().replace("-", "_"); Dataset dataset = - bigQuery.create( - DatasetInfo.newBuilder("locationset_" + UUID.randomUUID().toString().replace("-", "_")) - .setLocation(location) - .build()); + bigQuery.create(DatasetInfo.newBuilder(datasetName).setLocation(location).build()); try { TableId tableId = TableId.of(dataset.getDatasetId().getDataset(), "sometable"); Schema schema = Schema.of(Field.of("name", LegacySQLTypeName.STRING)); @@ -7313,7 +7303,7 @@ public void testStatelessQueriesWithLocation() throws Exception { // Nothing to do } } finally { - bigQuery.delete(dataset.getDatasetId(), DatasetDeleteOption.deleteContents()); + RemoteBigQueryHelper.forceDelete(bigQuery, datasetName); } } @@ -7376,7 +7366,7 @@ public void testUniverseDomainWithInvalidUniverseDomain() { bigQuery.listDatasets("bigquery-public-data"); fail("RPCs to invalid universe domain should fail"); } catch (BigQueryException e) { - assertEquals(e.getCode(), HTTP_UNAUTHORIZED); + assertEquals(HTTP_UNAUTHORIZED, e.getCode()); assertNotNull(e.getMessage()); assertThat( (e.getMessage() @@ -7399,7 +7389,7 @@ public void testInvalidUniverseDomainWithMismatchCredentials() { bigQuery.listDatasets("bigquery-public-data"); fail("RPCs to invalid universe domain should fail"); } catch (BigQueryException e) { - assertEquals(e.getCode(), HTTP_UNAUTHORIZED); + assertEquals(HTTP_UNAUTHORIZED, e.getCode()); assertNotNull(e.getMessage()); assertThat( (e.getMessage() @@ -7693,49 +7683,50 @@ public void testOpenTelemetryTracingDatasets() { .build(); dataset = bigquery.update(updatedInfo, DatasetOption.accessPolicyVersion(2)); - assertEquals(dataset.getDescription(), "Updated Description"); + assertEquals("Updated Description", dataset.getDescription()); assertTrue(bigquery.delete(dataset.getDatasetId())); } finally { parentSpan.end(); Map, Object> createMap = OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.createDataset"); - assertEquals(createMap.get(AttributeKey.stringKey("bq.dataset.location")), "null"); + assertEquals("null", createMap.get(AttributeKey.stringKey("bq.dataset.location"))); assertEquals( + "DatasetService", OTEL_ATTRIBUTES .get("com.google.cloud.bigquery.BigQueryRpc.createDataset") - .get(AttributeKey.stringKey("bq.rpc.service")), - "DatasetService"); + .get(AttributeKey.stringKey("bq.rpc.service"))); Map, Object> getMap = OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.getDataset"); - assertEquals(getMap.get(AttributeKey.stringKey("bq.dataset.id")), billingModelDataset); + assertEquals(billingModelDataset, getMap.get(AttributeKey.stringKey("bq.dataset.id"))); Map, Object> updateMap = OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.updateDataset"); - assertEquals(updateMap.get(AttributeKey.stringKey("bq.option.ACCESS_POLICY_VERSION")), "2"); + assertEquals("2", updateMap.get(AttributeKey.stringKey("bq.option.ACCESS_POLICY_VERSION"))); Map, Object> deleteMap = OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.deleteDataset"); - assertEquals(deleteMap.get(AttributeKey.stringKey("bq.dataset.id")), billingModelDataset); + assertEquals(billingModelDataset, deleteMap.get(AttributeKey.stringKey("bq.dataset.id"))); // All should be children spans of parentSpan + String testParentSpanName = "Test Parent Span"; assertEquals( + testParentSpanName, OTEL_SPAN_IDS_TO_NAMES.get( - OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.getDataset")), - "Test Parent Span"); + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.getDataset"))); assertEquals( + testParentSpanName, OTEL_SPAN_IDS_TO_NAMES.get( - OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.createDataset")), - "Test Parent Span"); + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.createDataset"))); assertEquals( + testParentSpanName, OTEL_SPAN_IDS_TO_NAMES.get( - OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.deleteDataset")), - "Test Parent Span"); + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.deleteDataset"))); assertEquals( OTEL_SPAN_IDS_TO_NAMES.get( OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQueryRpc.createDataset")), "com.google.cloud.bigquery.BigQueryRetryHelper.runWithRetries"); - assertEquals(OTEL_PARENT_SPAN_IDS.get("Test Parent Span"), OTEL_PARENT_SPAN_ID); + assertEquals(OTEL_PARENT_SPAN_ID, OTEL_PARENT_SPAN_IDS.get(testParentSpanName)); RemoteBigQueryHelper.forceDelete(bigquery, billingModelDataset); } } @@ -7757,26 +7748,26 @@ public void testOpenTelemetryTracingTables() { .setDescription("Some Description") .build(); Table createdTable = bigquery.create(tableInfo); - assertThat(createdTable.getDescription()).isEqualTo("Some Description"); + assertEquals("Some Description", createdTable.getDescription()); assertEquals( - OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.createTable"), - OTEL_PARENT_SPAN_ID); + OTEL_PARENT_SPAN_ID, + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.createTable")); assertEquals( + tableName, OTEL_ATTRIBUTES .get("com.google.cloud.bigquery.BigQuery.createTable") - .get(AttributeKey.stringKey("bq.table.id")), - tableName); + .get(AttributeKey.stringKey("bq.table.id"))); assertEquals( + "null", OTEL_ATTRIBUTES .get("com.google.cloud.bigquery.BigQuery.createTable") - .get(AttributeKey.stringKey("bq.table.creation_time")), - "null"); + .get(AttributeKey.stringKey("bq.table.creation_time"))); assertEquals( + "InsertTable", OTEL_ATTRIBUTES .get("com.google.cloud.bigquery.BigQueryRpc.createTable") - .get(AttributeKey.stringKey("bq.rpc.method")), - "InsertTable"); + .get(AttributeKey.stringKey("bq.rpc.method"))); Table updatedTable = bigquery.update(createdTable.toBuilder().setDescription("Updated Description").build()); @@ -7785,8 +7776,8 @@ public void testOpenTelemetryTracingTables() { assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.updateTable")); assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRpc.patchTable")); assertEquals( - OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.updateTable"), - OTEL_PARENT_SPAN_ID); + OTEL_PARENT_SPAN_ID, + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.updateTable")); assertTrue(bigquery.delete(updatedTable.getTableId())); } From 5d389cf45b41a0edceb3c5ed98dd2421ba6f2234 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 12 Dec 2025 21:51:40 +0000 Subject: [PATCH 12/36] deps: update actions/upload-artifact action to v6 (#4027) Co-authored-by: Blake Li --- .github/workflows/scorecard.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 1524dc8d2c..69df4a2532 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -59,7 +59,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: SARIF file path: results.sarif From 3673310c47b5836caba997d76d60d3e8233ea146 Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Fri, 12 Dec 2025 17:27:07 -0500 Subject: [PATCH 13/36] chore: Use assertThrows in tests (#4028) * chore: Cleanup initialized resources * chore: Use try-with to close stream * chore: Cleanup Intellij test warnings * chore: Allow for floating point inaccuracies * chore: Add otel delete operation * chore: Clean up rest of the IT test file * chore: Add GCA feedback --- .../cloud/bigquery/it/ITBigQueryTest.java | 388 +++++++++--------- .../bigquery/it/ITNightlyBigQueryTest.java | 3 +- 2 files changed, 186 insertions(+), 205 deletions(-) diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index de9fef4ee5..80605884d3 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -26,6 +26,7 @@ import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -1201,6 +1202,15 @@ public static void afterClass() throws Exception { } } + static GoogleCredentials loadCredentials(String credentialFile) { + try (InputStream keyStream = new ByteArrayInputStream(credentialFile.getBytes())) { + return GoogleCredentials.fromStream(keyStream); + } catch (IOException e) { + fail("Couldn't create fake JSON credentials."); + } + return null; + } + @Test public void testListDatasets() { Page datasets = bigquery.listDatasets("bigquery-public-data"); @@ -1227,9 +1237,10 @@ public void testListDatasetsWithFilter() { for (Dataset dataset : datasets.getValues()) { assertTrue( "failed to find label key in dataset", dataset.getLabels().containsKey("example-label1")); - assertTrue( + assertEquals( "failed to find label value in dataset", - dataset.getLabels().get("example-label1").equals("example-value1")); + "example-value1", + dataset.getLabels().get("example-label1")); count++; } assertTrue(count > 0); @@ -1573,14 +1584,14 @@ public void testJsonType() throws InterruptedException { .setUseLegacySql(false) .addPositionalParameter(badJsonParameter) .build(); - try { - bigquery.query(dmlQueryJobConfiguration2); - fail("Querying with malformed JSON shouldn't work"); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error); - assertEquals("invalidQuery", error.getReason()); - } + BigQueryException exception = + assertThrows( + "Querying with malformed JSON shouldn't work", + BigQueryException.class, + () -> bigquery.query(dmlQueryJobConfiguration2)); + BigQueryError error = exception.getError(); + assertNotNull(error); + assertEquals("invalidQuery", error.getReason()); } finally { assertTrue(bigquery.delete(tableId)); } @@ -2808,15 +2819,15 @@ public void testUpdateNonExistingTable() { TableInfo.of( TableId.of(DATASET, "test_update_non_existing_table"), StandardTableDefinition.of(SIMPLE_SCHEMA)); - try { - bigquery.update(tableInfo); - fail("BigQueryException was expected"); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error); - assertEquals("notFound", error.getReason()); - assertNotNull(error.getMessage()); - } + BigQueryException exception = + assertThrows( + "BigQueryException was expected", + BigQueryException.class, + () -> bigquery.update(tableInfo)); + BigQueryError error = exception.getError(); + assertNotNull(error); + assertEquals("notFound", error.getReason()); + assertNotNull(error.getMessage()); } @Test @@ -3404,16 +3415,16 @@ public void testAuthorizeDataset() { public void testSingleStatementsQueryException() throws InterruptedException { String invalidQuery = String.format("INSERT %s.%s VALUES('3', 10);", DATASET, TABLE_ID.getTable()); - try { - bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor(); - fail("BigQueryException was expected"); - } catch (BigQueryException ex) { - assertEquals("invalidQuery", ex.getReason()); - assertNotNull(ex.getMessage()); - BigQueryError error = ex.getError(); - assertEquals("invalidQuery", error.getReason()); - assertNotNull(error.getMessage()); - } + BigQueryException exception = + assertThrows( + "BigQueryException was expected", + BigQueryException.class, + () -> bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor()); + assertEquals("invalidQuery", exception.getReason()); + assertNotNull(exception.getMessage()); + BigQueryError error = exception.getError(); + assertEquals("invalidQuery", error.getReason()); + assertNotNull(error.getMessage()); } /* TODO(prasmish): replicate the entire test case for executeSelect */ @@ -3423,16 +3434,16 @@ public void testMultipleStatementsQueryException() throws InterruptedException { String.format( "INSERT %s.%s VALUES('3', 10); DELETE %s.%s where c2=3;", DATASET, TABLE_ID.getTable(), DATASET, TABLE_ID.getTable()); - try { - bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor(); - fail("BigQueryException was expected"); - } catch (BigQueryException ex) { - assertEquals("invalidQuery", ex.getReason()); - assertNotNull(ex.getMessage()); - BigQueryError error = ex.getError(); - assertEquals("invalidQuery", error.getReason()); - assertNotNull(error.getMessage()); - } + BigQueryException exception = + assertThrows( + "BigQueryException was expected", + BigQueryException.class, + () -> bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor()); + assertEquals("invalidQuery", exception.getReason()); + assertNotNull(exception.getMessage()); + BigQueryError error = exception.getError(); + assertEquals("invalidQuery", error.getReason()); + assertNotNull(error.getMessage()); } @Test @@ -3668,12 +3679,7 @@ public void testExecuteSelectWithCredentials() throws SQLException { BigQuery bigQueryBadCredentials = bigQueryOptionsBadCredentials.getService(); Connection connectionBadCredentials = bigQueryBadCredentials.createConnection(connectionSettings); - try { - connectionBadCredentials.executeSelect(query); - fail(); // this line should not be reached - } catch (BigQuerySQLException e) { - assertNotNull(e); - } + assertThrows(BigQuerySQLException.class, () -> connectionBadCredentials.executeSelect(query)); } /* TODO(prasmish): replicate the entire test case for executeSelect */ @@ -4228,12 +4234,7 @@ public void testExecuteSelectAsyncCancel() }); testCloseAsync.start(); - try { - executeSelectFut.get(); - fail(); // this line should not be reached - } catch (CancellationException e) { - assertNotNull(e); - } + assertThrows(CancellationException.class, executeSelectFut::get); } @Test @@ -4855,14 +4856,14 @@ public void testFastQueryHTTPException() throws InterruptedException { QueryJobConfiguration.newBuilder(queryInvalid) .setDefaultDataset(DatasetId.of(DATASET)) .build(); - try { - bigquery.query(configInvalidQuery); - fail("\"BigQueryException was expected\""); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error.getMessage()); - assertEquals("invalidQuery", error.getReason()); - } + BigQueryException exception = + assertThrows( + "BigQueryException was expected", + BigQueryException.class, + () -> bigquery.query(configInvalidQuery)); + BigQueryError error = exception.getError(); + assertNotNull(error.getMessage()); + assertEquals("invalidQuery", error.getReason()); String queryMissingTable = "SELECT * FROM " + TableId.of(DATASET, "non_existing_table").getTable(); @@ -4870,14 +4871,15 @@ public void testFastQueryHTTPException() throws InterruptedException { QueryJobConfiguration.newBuilder(queryMissingTable) .setDefaultDataset(DatasetId.of(DATASET)) .build(); - try { - bigquery.query(configMissingTable); - fail("\"BigQueryException was expected\""); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error.getMessage()); - assertEquals("notFound", error.getReason()); - } + + BigQueryException exception1 = + assertThrows( + "BigQueryException was expected", + BigQueryException.class, + () -> bigquery.query(configMissingTable)); + BigQueryError error1 = exception1.getError(); + assertNotNull(error1.getMessage()); + assertEquals("notFound", error1.getReason()); } @Test @@ -5559,12 +5561,11 @@ public void testEmptyRepeatedRecordNamedQueryParameters() throws InterruptedExce .setUseLegacySql(false) .addNamedParameter("repeatedRecordField", repeatedRecord) .build(); - try { - bigquery.query(config); - fail("an empty array of struct query parameter shouldn't work with 'IN UNNEST'"); - } catch (BigQueryException e) { - // Nothing to do - } + + assertThrows( + "an empty array of struct query parameter shouldn't work with 'IN UNNEST'", + BigQueryException.class, + () -> bigquery.query(config)); } @Test @@ -5904,7 +5905,7 @@ public void testSnapshotTableCopyJob() throws InterruptedException { String sourceTableName = "test_copy_job_base_table"; String ddlTableName = TABLE_ID_DDL.getTable(); // this creates a snapshot table at specified snapshotTime - String snapshotTableName = String.format("test_snapshot_table"); + String snapshotTableName = "test_snapshot_table"; // Create source table with some data in it String ddlQuery = String.format( @@ -6374,20 +6375,17 @@ public void testInsertFromFile() throws InterruptedException, IOException, Timeo .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) .setSchema(TABLE_SCHEMA) .build(); - TableDataWriteChannel channel = bigquery.writer(configuration); - try { + try (TableDataWriteChannel channel = bigquery.writer(configuration)) { // A zero byte write should not throw an exception. assertEquals(0, channel.write(ByteBuffer.wrap("".getBytes(StandardCharsets.UTF_8)))); - } finally { - // Force the channel to flush by calling `close`. - channel.close(); } - channel = bigquery.writer(configuration); + TableDataWriteChannel channel = bigquery.writer(configuration); try { channel.write(ByteBuffer.wrap(JSON_CONTENT.getBytes(StandardCharsets.UTF_8))); } finally { channel.close(); } + // Channel must close before retrieving the job Job job = channel.getJob().waitFor(); LoadStatistics statistics = job.getStatistics(); assertEquals(1L, statistics.getInputFiles().longValue()); @@ -6452,20 +6450,17 @@ public void testInsertFromFileWithLabels() .setSchema(TABLE_SCHEMA) .setLabels(LABELS) .build(); - TableDataWriteChannel channel = bigquery.writer(configuration); - try { + try (TableDataWriteChannel channel = bigquery.writer(configuration)) { // A zero byte write should not throw an exception. assertEquals(0, channel.write(ByteBuffer.wrap("".getBytes(StandardCharsets.UTF_8)))); - } finally { - // Force the channel to flush by calling `close`. - channel.close(); } - channel = bigquery.writer(configuration); + TableDataWriteChannel channel = bigquery.writer(configuration); try { channel.write(ByteBuffer.wrap(JSON_CONTENT.getBytes(StandardCharsets.UTF_8))); } finally { channel.close(); } + // Channel must close before retrieving the job Job job = channel.getJob().waitFor(); LoadJobConfiguration jobConfiguration = job.getConfiguration(); assertEquals(TABLE_SCHEMA, jobConfiguration.getSchema()); @@ -6491,6 +6486,7 @@ public void testInsertWithDecimalTargetTypes() } finally { channel.close(); } + // Channel must close before retrieving the job Job job = channel.getJob().waitFor(); LoadJobConfiguration jobConfiguration = job.getConfiguration(); assertNull(job.getStatus().getError()); @@ -6532,52 +6528,47 @@ public void testLocation() throws Exception { table.getTableId().getTable()); // Test create/get - { - Job job = - otelBigquery.create( - JobInfo.of( - JobId.newBuilder().setLocation(location).build(), - QueryJobConfiguration.of(query))); - job = job.waitFor(); - assertThat(job.getStatus().getError()).isNull(); - - assertThat(job.getJobId().getLocation()).isEqualTo(location); - - JobId jobId = job.getJobId(); - JobId wrongId = jobId.toBuilder().setLocation(wrongLocation).build(); - - // Getting with location should work. - assertThat(otelBigquery.getJob(jobId)).isNotNull(); - // Getting with wrong location shouldn't work. - assertThat(otelBigquery.getJob(wrongId)).isNull(); - - // Cancelling with location should work. (Cancelling already finished job is fine.) - assertThat(otelBigquery.cancel(jobId)).isTrue(); - // Cancelling with wrong location shouldn't work. - assertThat(otelBigquery.cancel(wrongId)).isFalse(); - } + Job job = + otelBigquery.create( + JobInfo.of( + JobId.newBuilder().setLocation(location).build(), + QueryJobConfiguration.of(query))); + job = job.waitFor(); + assertThat(job.getStatus().getError()).isNull(); + + assertThat(job.getJobId().getLocation()).isEqualTo(location); + + JobId jobId = job.getJobId(); + JobId wrongId = jobId.toBuilder().setLocation(wrongLocation).build(); + + // Getting with location should work. + assertThat(otelBigquery.getJob(jobId)).isNotNull(); + // Getting with wrong location shouldn't work. + assertThat(otelBigquery.getJob(wrongId)).isNull(); + + // Cancelling with location should work. (Cancelling already finished job is fine.) + assertThat(otelBigquery.cancel(jobId)).isTrue(); + // Cancelling with wrong location shouldn't work. + assertThat(otelBigquery.cancel(wrongId)).isFalse(); // Test query - { - assertThat( - otelBigquery - .query( - QueryJobConfiguration.of(query), - JobId.newBuilder().setLocation(location).build()) - .iterateAll()) - .isEmpty(); - - try { - otelBigquery - .query( - QueryJobConfiguration.of(query), - JobId.newBuilder().setLocation(wrongLocation).build()) - .iterateAll(); - fail("querying a table with wrong location shouldn't work"); - } catch (BigQueryException e) { - // Nothing to do - } - } + assertThat( + otelBigquery + .query( + QueryJobConfiguration.of(query), + JobId.newBuilder().setLocation(location).build()) + .iterateAll()) + .isEmpty(); + + assertThrows( + "querying a table with wrong location shouldn't work", + BigQueryException.class, + () -> + otelBigquery + .query( + QueryJobConfiguration.of(query), + JobId.newBuilder().setLocation(wrongLocation).build()) + .iterateAll()); // Test write { @@ -6596,13 +6587,15 @@ public void testLocation() throws Exception { location); } - try (TableDataWriteChannel ignore = - otelBigquery.writer( - JobId.newBuilder().setLocation(wrongLocation).build(), writeChannelConfiguration)) { - fail("writing to a table with wrong location shouldn't work"); - } catch (BigQueryException e) { - // Nothing to do - } + assertThrows( + "writing to a table with wrong location shouldn't work", + BigQueryException.class, + () -> { + try (TableDataWriteChannel ignore = + otelBigquery.writer( + JobId.newBuilder().setLocation(wrongLocation).build(), + writeChannelConfiguration)) {} + }); } } finally { RemoteBigQueryHelper.forceDelete(bigquery, datasetName); @@ -6627,6 +6620,7 @@ public void testWriteChannelPreserveAsciiControlCharacters() } finally { channel.close(); } + // Channel must close before retrieving the job Job job = channel.getJob().waitFor(); assertNull(job.getStatus().getError()); Page rows = bigquery.listTableData(tableId); @@ -6708,10 +6702,9 @@ public void testReferenceFileSchemaUriForAvro() { assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); // clean up after test to avoid conflict with other tests - boolean success = bigquery.delete(tableId); - assertTrue(success); + assertTrue(bigquery.delete(tableId)); } catch (BigQueryException | InterruptedException e) { - System.out.println("Column not added during load append \n" + e.toString()); + System.out.println("Column not added during load append \n" + e); } } @@ -6767,10 +6760,9 @@ public void testReferenceFileSchemaUriForParquet() { assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); // clean up after test to avoid conflict with other tests - boolean success = bigquery.delete(tableId); - assertTrue(success); + assertTrue(bigquery.delete(tableId)); } catch (BigQueryException | InterruptedException e) { - System.out.println("Column not added during load append \n" + e.toString()); + System.out.println("Column not added during load append \n" + e); } } @@ -6810,8 +6802,7 @@ public void testCreateExternalTableWithReferenceFileSchemaAvro() { Table generatedTable = bigquery.getTable(createdTable.getTableId()); assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); // clean up after test to avoid conflict with other tests - boolean success = bigquery.delete(tableId); - assertTrue(success); + assertTrue(bigquery.delete(tableId)); } @Test @@ -7208,8 +7199,7 @@ public void testStatelessQueries() throws InterruptedException { private TableResult executeSimpleQuery(BigQuery bigQuery) throws InterruptedException { String query = "SELECT CURRENT_TIMESTAMP() as ts"; QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).build(); - TableResult result = bigQuery.query(config); - return result; + return bigQuery.query(config); } @Test @@ -7291,17 +7281,20 @@ public void testStatelessQueriesWithLocation() throws Exception { assertNull(tb.getJobId()); // Test stateless query when BigQueryOption location does not match dataset location. - try { - BigQuery bigQueryWrongLocation = - bigqueryHelper.getOptions().toBuilder().setLocation(wrongLocation).build().getService(); - bigQueryWrongLocation - .getOptions() - .setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); - bigQueryWrongLocation.query(QueryJobConfiguration.of(query)); - fail("querying a table with wrong location shouldn't work"); - } catch (BigQueryException e) { - // Nothing to do - } + assertThrows( + "querying a table with wrong location shouldn't work", + BigQueryException.class, + () -> { + BigQuery bigQueryWrongLocation = + bigqueryHelper.getOptions().toBuilder() + .setLocation(wrongLocation) + .build() + .getService(); + bigQueryWrongLocation + .getOptions() + .setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); + bigQueryWrongLocation.query(QueryJobConfiguration.of(query)); + }); } finally { RemoteBigQueryHelper.forceDelete(bigQuery, datasetName); } @@ -7361,18 +7354,17 @@ public void testUniverseDomainWithInvalidUniverseDomain() { .build(); BigQuery bigQuery = bigQueryOptions.getService(); - try { - // Use list dataset to send RPC to invalid domain. - bigQuery.listDatasets("bigquery-public-data"); - fail("RPCs to invalid universe domain should fail"); - } catch (BigQueryException e) { - assertEquals(HTTP_UNAUTHORIZED, e.getCode()); - assertNotNull(e.getMessage()); - assertThat( - (e.getMessage() - .contains("does not match the universe domain found in the credentials"))) - .isTrue(); - } + BigQueryException exception = + assertThrows( + "RPCs to invalid universe domain should fail", + BigQueryException.class, + () -> bigQuery.listDatasets("bigquery-public-data")); + assertEquals(HTTP_UNAUTHORIZED, exception.getCode()); + assertNotNull(exception.getMessage()); + assertTrue( + exception + .getMessage() + .contains("does not match the universe domain found in the credentials")); } @Test @@ -7384,18 +7376,17 @@ public void testInvalidUniverseDomainWithMismatchCredentials() { .build(); BigQuery bigQuery = bigQueryOptions.getService(); - try { - // Use list dataset to send RPC to invalid domain. - bigQuery.listDatasets("bigquery-public-data"); - fail("RPCs to invalid universe domain should fail"); - } catch (BigQueryException e) { - assertEquals(HTTP_UNAUTHORIZED, e.getCode()); - assertNotNull(e.getMessage()); - assertThat( - (e.getMessage() - .contains("does not match the universe domain found in the credentials"))) - .isTrue(); - } + BigQueryException exception = + assertThrows( + "RPCs to invalid universe domain should fail", + BigQueryException.class, + () -> bigQuery.listDatasets("bigquery-public-data")); + assertEquals(HTTP_UNAUTHORIZED, exception.getCode()); + assertNotNull(exception.getMessage()); + assertTrue( + exception + .getMessage() + .contains("does not match the universe domain found in the credentials")); } @Test @@ -7477,17 +7468,18 @@ public void testExternalMetadataCacheModeFailForNonBiglake() { .build(); TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); - try { - bigquery.create(tableInfo); - fail("BigQueryException was expected"); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error); - assertEquals("invalid", error.getReason()); - assertThat( - e.getMessage().contains("metadataCacheMode provided for non BigLake external table")) - .isTrue(); - } + BigQueryException exception = + assertThrows( + "BigQueryException was expected", + BigQueryException.class, + () -> bigquery.create(tableInfo)); + BigQueryError error = exception.getError(); + assertNotNull(error); + assertEquals("invalid", error.getReason()); + assertTrue( + exception + .getMessage() + .contains("metadataCacheMode provided for non BigLake external table")); } @Test @@ -7533,16 +7525,6 @@ public void testObjectTable() throws InterruptedException { } } - static GoogleCredentials loadCredentials(String credentialFile) { - try { - InputStream keyStream = new ByteArrayInputStream(credentialFile.getBytes()); - return GoogleCredentials.fromStream(keyStream); - } catch (IOException e) { - fail("Couldn't create fake JSON credentials."); - } - return null; - } - @Test public void testQueryExportStatistics() throws InterruptedException { String query = diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java index 588484749c..790f35fe5c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java @@ -63,7 +63,6 @@ import java.util.Map; import java.util.TimeZone; import java.util.UUID; -import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.arrow.vector.util.JsonStringArrayList; @@ -183,7 +182,7 @@ public static void beforeClass() throws InterruptedException, IOException { } @AfterClass - public static void afterClass() throws ExecutionException, InterruptedException { + public static void afterClass() { try { if (bigquery != null) { deleteTable(DATASET, TABLE); From 00ec93962debcdee7cfed506bc33c9f75082fa33 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 12 Dec 2025 22:11:32 -0500 Subject: [PATCH 14/36] chore(main): release 2.57.1 (#4029) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 7 +++++++ benchmark/pom.xml | 2 +- google-cloud-bigquery-bom/pom.xml | 4 ++-- google-cloud-bigquery/pom.xml | 4 ++-- pom.xml | 4 ++-- samples/snapshot/pom.xml | 2 +- versions.txt | 2 +- 7 files changed, 16 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f9397c65bc..469bc0b33d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.57.1](https://github.com/googleapis/java-bigquery/compare/v2.57.0...v2.57.1) (2025-12-12) + + +### Dependencies + +* Update actions/upload-artifact action to v6 ([#4027](https://github.com/googleapis/java-bigquery/issues/4027)) ([5d389cf](https://github.com/googleapis/java-bigquery/commit/5d389cf45b41a0edceb3c5ed98dd2421ba6f2234)) + ## [2.57.0](https://github.com/googleapis/java-bigquery/compare/v2.56.0...v2.57.0) (2025-12-11) diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 8d70e4d685..4b04479cd3 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -6,7 +6,7 @@ google-cloud-bigquery-parent com.google.cloud - 2.57.1-SNAPSHOT + 2.57.1 diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index 3532008034..dc9d046e59 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery-bom - 2.57.1-SNAPSHOT + 2.57.1 pom com.google.cloud @@ -54,7 +54,7 @@ com.google.cloud google-cloud-bigquery - 2.57.1-SNAPSHOT + 2.57.1 diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index 9517e49198..adeb1ce480 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 2.57.1-SNAPSHOT + 2.57.1 jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 2.57.1-SNAPSHOT + 2.57.1 google-cloud-bigquery diff --git a/pom.xml b/pom.xml index 129c6226c6..9c123afe21 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 2.57.1-SNAPSHOT + 2.57.1 BigQuery Parent https://github.com/googleapis/java-bigquery @@ -93,7 +93,7 @@ com.google.cloud google-cloud-bigquery - 2.57.1-SNAPSHOT + 2.57.1 diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 3976d33b4e..901c0c5ca0 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -56,7 +56,7 @@ com.google.cloud google-cloud-bigquery - 2.57.1-SNAPSHOT + 2.57.1 diff --git a/versions.txt b/versions.txt index e6d97a7042..218ca40390 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:2.57.0:2.57.1-SNAPSHOT \ No newline at end of file +google-cloud-bigquery:2.57.1:2.57.1 \ No newline at end of file From c18702edc69cee76e7ae7a2bbedbc9c1a16b48dc Mon Sep 17 00:00:00 2001 From: Blake Li Date: Mon, 15 Dec 2025 10:46:26 -0500 Subject: [PATCH 15/36] chore: Update renovate.json (#4031) deps update from bigquerystorage was incorrectly marked as chore(deps). This would cause the upgrade not show up in the release notes and also not trigger release please. This is because the pattern /^com.google.cloud:google-cloud-bigquery is configured in renovate.json, which matches both bigquery and bigquerystorage. However, I believe the original intention is only for bigquery update in samples, not for bigquerystorage update. Fixing the pattern so that the String must ends with bigquery --- renovate.json | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/renovate.json b/renovate.json index 38fdb78ebd..dc6b01e796 100644 --- a/renovate.json +++ b/renovate.json @@ -44,14 +44,6 @@ "/^com.google.guava:/" ] }, - { - "semanticCommitType": "deps", - "semanticCommitScope": null, - "matchPackageNames": [ - "*", - "/^com.google.cloud:google-cloud-bigquerystorage/" - ] - }, { "semanticCommitType": "build", "semanticCommitScope": "deps", @@ -68,7 +60,7 @@ "semanticCommitType": "chore", "semanticCommitScope": "deps", "matchPackageNames": [ - "/^com.google.cloud:google-cloud-bigquery/", + "/^com.google.cloud:google-cloud-bigquery$/", "/^com.google.cloud:google-cloud-bigtable/", "/^com.google.cloud:libraries-bom/", "/^com.google.cloud.samples:shared-configuration/" From adbe2e04db7d45cc1d12368de3e0a728a6b56ab8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 15 Dec 2025 14:35:55 -0500 Subject: [PATCH 16/36] chore(main): release 2.57.2-SNAPSHOT (#4032) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- benchmark/pom.xml | 2 +- google-cloud-bigquery-bom/pom.xml | 4 ++-- google-cloud-bigquery/pom.xml | 4 ++-- pom.xml | 4 ++-- samples/snapshot/pom.xml | 2 +- versions.txt | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 4b04479cd3..41e1a2fcb3 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -6,7 +6,7 @@ google-cloud-bigquery-parent com.google.cloud - 2.57.1 + 2.57.2-SNAPSHOT diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index dc9d046e59..4cfd474143 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery-bom - 2.57.1 + 2.57.2-SNAPSHOT pom com.google.cloud @@ -54,7 +54,7 @@ com.google.cloud google-cloud-bigquery - 2.57.1 + 2.57.2-SNAPSHOT diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index adeb1ce480..5e86e77fd2 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 2.57.1 + 2.57.2-SNAPSHOT jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 2.57.1 + 2.57.2-SNAPSHOT google-cloud-bigquery diff --git a/pom.xml b/pom.xml index 9c123afe21..44548ec5c2 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 2.57.1 + 2.57.2-SNAPSHOT BigQuery Parent https://github.com/googleapis/java-bigquery @@ -93,7 +93,7 @@ com.google.cloud google-cloud-bigquery - 2.57.1 + 2.57.2-SNAPSHOT diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 901c0c5ca0..e10b51dc7d 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -56,7 +56,7 @@ com.google.cloud google-cloud-bigquery - 2.57.1 + 2.57.2-SNAPSHOT diff --git a/versions.txt b/versions.txt index 218ca40390..507619351d 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:2.57.1:2.57.1 \ No newline at end of file +google-cloud-bigquery:2.57.1:2.57.2-SNAPSHOT \ No newline at end of file From 7198340e7596089d6c69256fd9d982a7f1c17c85 Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Mon, 15 Dec 2025 16:02:33 -0500 Subject: [PATCH 17/36] test: Add integration tests for picosecond support (#4030) * test: Add integration tests for picosecond support * chore: Add a micro -> pico exact timestamp test case * chore: Add additional test cases * chore: Fix test cases with 'Z' * chore: Test if first query has an impact * chore: Remove flaky test for now * chore: Remove testing comment --- .../cloud/bigquery/QueryParameterValue.java | 6 +- .../testing/RemoteBigQueryHelper.java | 19 +- .../bigquery/it/ITHighPrecisionTimestamp.java | 319 ++++++++++++++++++ 3 files changed, 336 insertions(+), 8 deletions(-) create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITHighPrecisionTimestamp.java diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java index cb4e44861d..89e7ae85bf 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java @@ -328,9 +328,9 @@ public static QueryParameterValue timestamp(Long value) { *

This method supports up to picosecond precision (12 digits) for timestamp. Input should * conform to ISO8601 format. * - *

Must be in the format "yyyy-MM-dd HH:mm:ss.SSSSSS{SSSSSSS}ZZ", e.g. "2014-08-19 - * 12:41:35.123456+00:00" for microsecond precision and "2014-08-19 12:41:35.123456789123+00:00" - * for picosecond precision + *

Should be in the format "yyyy-MM-dd HH:mm:ss.SSSSSS{SSSSSSS}Z", e.g. "2014-08-19 + * 12:41:35.123456Z" for microsecond precision and "2014-08-19 12:41:35.123456789123Z" for + * picosecond precision */ public static QueryParameterValue timestamp(String value) { return of(value, StandardSQLTypeName.TIMESTAMP); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java index 4b1767362c..c32e1a0516 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java @@ -120,18 +120,27 @@ public static RemoteBigQueryHelper create(String projectId, InputStream keyStrea * credentials. */ public static RemoteBigQueryHelper create() { + return create(BigQueryOptions.newBuilder()); + } + + /** + * Creates a {@code RemoteBigQueryHelper} object using default project id and authentication + * credentials. + * + * @param bigqueryOptionsBuilder Custom BigqueryOptions.Builder with some pre-defined settings + */ + public static RemoteBigQueryHelper create(BigQueryOptions.Builder bigqueryOptionsBuilder) { HttpTransportOptions transportOptions = BigQueryOptions.getDefaultHttpTransportOptions(); transportOptions = transportOptions.toBuilder() .setConnectTimeout(connectTimeout) .setReadTimeout(connectTimeout) .build(); - BigQueryOptions bigqueryOptions = - BigQueryOptions.newBuilder() + BigQueryOptions.Builder builder = + bigqueryOptionsBuilder .setRetrySettings(retrySettings()) - .setTransportOptions(transportOptions) - .build(); - return new RemoteBigQueryHelper(bigqueryOptions); + .setTransportOptions(transportOptions); + return new RemoteBigQueryHelper(builder.build()); } private static RetrySettings retrySettings() { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITHighPrecisionTimestamp.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITHighPrecisionTimestamp.java new file mode 100644 index 0000000000..332071a620 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITHighPrecisionTimestamp.java @@ -0,0 +1,319 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery.it; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.DataFormatOptions; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.InsertAllRequest; +import com.google.cloud.bigquery.InsertAllResponse; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.Table; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableInfo; +import com.google.cloud.bigquery.TableResult; +import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; +import com.google.protobuf.Timestamp; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +public class ITHighPrecisionTimestamp { + + public static final String TEST_HIGH_PRECISION_TIMESTAMP_TABLE_NAME = + "test_high_precision_timestamp"; + private static BigQuery bigquery; + private static final String DATASET = RemoteBigQueryHelper.generateDatasetName(); + private static TableId defaultTableId; + public static final long TIMESTAMP_PICOSECOND_PRECISION = 12L; + private static final Field TIMESTAMP_HIGH_PRECISION_FIELD_SCHEMA = + Field.newBuilder("timestampHighPrecisionField", StandardSQLTypeName.TIMESTAMP) + .setTimestampPrecision(TIMESTAMP_PICOSECOND_PRECISION) + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(TIMESTAMP_HIGH_PRECISION_FIELD_SCHEMA); + + private static final String TIMESTAMP1 = "2025-01-01T12:34:56.123456789123Z"; + private static final String TIMESTAMP2 = "1970-01-01T12:34:56.123456789123Z"; + private static final String TIMESTAMP3 = "2000-01-01T12:34:56.123456789123Z"; + + @BeforeClass + public static void beforeClass() { + BigQueryOptions.Builder builder = + BigQueryOptions.newBuilder() + .setDataFormatOptions( + DataFormatOptions.newBuilder() + .timestampFormatOptions(DataFormatOptions.TimestampFormatOptions.ISO8601_STRING) + .build()); + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(builder); + bigquery = bigqueryHelper.getOptions().getService(); + + // Create a new dataset + DatasetInfo info = DatasetInfo.newBuilder(DATASET).build(); + bigquery.create(info); + + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build(); + defaultTableId = TableId.of(DATASET, TEST_HIGH_PRECISION_TIMESTAMP_TABLE_NAME); + + // Create a new table that can be re-used by the test cases + Table createdTable = bigquery.create(TableInfo.of(defaultTableId, tableDefinition)); + assertNotNull(createdTable); + + // Populate with some starter data + Map timestamp1 = + Collections.singletonMap("timestampHighPrecisionField", TIMESTAMP1); + Map timestamp2 = + Collections.singletonMap("timestampHighPrecisionField", TIMESTAMP2); + Map timestamp3 = + Collections.singletonMap("timestampHighPrecisionField", TIMESTAMP3); + InsertAllRequest request = + InsertAllRequest.newBuilder(defaultTableId) + .addRow(timestamp1) + .addRow(timestamp2) + .addRow(timestamp3) + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertFalse(response.hasErrors()); + assertEquals(0, response.getInsertErrors().size()); + } + + @AfterClass + public static void afterClass() { + if (bigquery != null) { + bigquery.delete(defaultTableId); + RemoteBigQueryHelper.forceDelete(bigquery, DATASET); + } + } + + private static String generateTempTableName() { + return String.format( + "insert_temp_%s%s", + UUID.randomUUID().toString().substring(0, 6), TEST_HIGH_PRECISION_TIMESTAMP_TABLE_NAME); + } + + @Test + public void query_highPrecisionTimestamp() throws InterruptedException { + String sql = + String.format("SELECT timestampHighPrecisionField FROM %s;", defaultTableId.getTable()); + QueryJobConfiguration queryJobConfiguration = + QueryJobConfiguration.newBuilder(sql) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseLegacySql(false) + .build(); + TableResult result = bigquery.query(queryJobConfiguration); + assertNotNull(result.getJobId()); + String[] expected = new String[] {TIMESTAMP1, TIMESTAMP2, TIMESTAMP3}; + List timestamps = + StreamSupport.stream(result.getValues().spliterator(), false) + .map(x -> (String) x.get(0).getValue()) + .collect(Collectors.toList()); + assertEquals(expected.length, timestamps.size()); + for (int i = 0; i < timestamps.size(); i++) { + assertEquals(expected[i], timestamps.get(i)); + } + } + + @Test + public void insert_highPrecisionTimestamp_ISOValidFormat() { + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build(); + String tempTableName = generateTempTableName(); + TableId tableId = TableId.of(DATASET, tempTableName); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + + Map timestampISO = + Collections.singletonMap("timestampHighPrecisionField", "2025-01-01T12:34:56.123456Z"); + InsertAllRequest request = InsertAllRequest.newBuilder(tableId).addRow(timestampISO).build(); + InsertAllResponse response = bigquery.insertAll(request); + assertFalse(response.hasErrors()); + assertEquals(0, response.getInsertErrors().size()); + + bigquery.delete(tableId); + } + + @Test + public void insert_highPrecisionTimestamp_invalidFormats() { + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build(); + String tempTable = generateTempTableName(); + TableId tableId = TableId.of(DATASET, tempTable); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + + Map timestampInMicros = + Collections.singletonMap("timestampHighPrecisionField", 123456); + Map timestampInMicrosString = + Collections.singletonMap("timestampHighPrecisionField", "123456"); + Map timestampNegative = + Collections.singletonMap("timestampHighPrecisionField", -123456); + Map timestampFloat = + Collections.singletonMap("timestampHighPrecisionField", 1000.0); + Map timestampProtobuf = + Collections.singletonMap( + "timestampHighPrecisionField", + Timestamp.newBuilder().setSeconds(123456789).setNanos(123456789).build()); + Map timestampProtobufNegative = + Collections.singletonMap( + "timestampHighPrecisionField", + Timestamp.newBuilder().setSeconds(-123456789).setNanos(-123456789).build()); + InsertAllRequest request = + InsertAllRequest.newBuilder(tableId) + .addRow(timestampInMicros) + .addRow(timestampInMicrosString) + .addRow(timestampNegative) + .addRow(timestampFloat) + .addRow(timestampProtobuf) + .addRow(timestampProtobufNegative) + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertTrue(response.hasErrors()); + assertEquals(request.getRows().size(), response.getInsertErrors().size()); + + bigquery.delete(tableId); + } + + @Test + public void queryNamedParameter_highPrecisionTimestamp() throws InterruptedException { + String query = + String.format( + "SELECT * FROM %s.%s WHERE timestampHighPrecisionField >= CAST(@timestampParam AS TIMESTAMP(12))", + DATASET, defaultTableId.getTable()); + + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addNamedParameter( + "timestampParam", + QueryParameterValue.timestamp("2000-01-01 12:34:56.123456789123Z")) + .build(); + + TableResult result = bigquery.query(queryConfig); + assertNotNull(result); + String[] expected = new String[] {TIMESTAMP1, TIMESTAMP3}; + List timestamps = + StreamSupport.stream(result.getValues().spliterator(), false) + .map(x -> (String) x.get(0).getValue()) + .collect(Collectors.toList()); + assertEquals(expected.length, timestamps.size()); + for (int i = 0; i < timestamps.size(); i++) { + assertEquals(expected[i], timestamps.get(i)); + } + } + + @Test + public void queryNamedParameter_highPrecisionTimestamp_microsLong() throws InterruptedException { + String query = + String.format( + "SELECT * FROM %s.%s WHERE timestampHighPrecisionField >= CAST(@timestampParam AS TIMESTAMP(12))", + DATASET, defaultTableId.getTable()); + + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addNamedParameter( + "timestampParam", + QueryParameterValue.timestamp( + 946730096123456L)) // micros for 2000-01-01 12:34:56.123456Z + .build(); + + TableResult result = bigquery.query(queryConfig); + assertNotNull(result); + // Exact timestamp for TIMESTAMP3 is `2000-01-01T12:34:56.123456789123Z` and for the micros + // is `2000-01-01T12:34:56.123456Z`. The micros value gets cast to 12 digits of precision, so + // it becomes `2000-01-01T12:34:56.123456000000Z`. We do expect it as part of the query. + String[] expected = new String[] {TIMESTAMP1, TIMESTAMP3}; + List timestamps = + StreamSupport.stream(result.getValues().spliterator(), false) + .map(x -> (String) x.get(0).getValue()) + .collect(Collectors.toList()); + assertEquals(expected.length, timestamps.size()); + for (int i = 0; i < timestamps.size(); i++) { + assertEquals(expected[i], timestamps.get(i)); + } + } + + @Test + public void queryNamedParameter_highPrecisionTimestamp_microsISOString() + throws InterruptedException { + String query = + String.format( + "SELECT * FROM %s.%s WHERE timestampHighPrecisionField >= CAST(@timestampParam AS TIMESTAMP(12))", + DATASET, defaultTableId.getTable()); + + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addNamedParameter( + "timestampParam", QueryParameterValue.timestamp("2000-01-01 12:34:56.123456Z")) + .build(); + + TableResult result = bigquery.query(queryConfig); + assertNotNull(result); + List timestamps = + StreamSupport.stream(result.getValues().spliterator(), false) + .map(x -> (String) x.get(0).getValue()) + .collect(Collectors.toList()); + String[] expected = new String[] {TIMESTAMP1, TIMESTAMP3}; + assertEquals(expected.length, timestamps.size()); + for (int i = 0; i < timestamps.size(); i++) { + assertEquals(expected[i], timestamps.get(i)); + } + } + + @Test + public void queryNamedParameter_highPrecisionTimestamp_noExplicitCastInQuery_fails() { + String query = + String.format( + "SELECT * FROM %s.%s WHERE timestampHighPrecisionField >= @timestampParam", + DATASET, defaultTableId.getTable()); + + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addNamedParameter( + "timestampParam", QueryParameterValue.timestamp("2000-01-01 12:34:56.123456789123")) + .build(); + + BigQueryException exception = + assertThrows(BigQueryException.class, () -> bigquery.query(queryConfig)); + assertEquals("Invalid argument type passed to a function", exception.getMessage()); + } +} From fa0a12e3cf171abab528c318ba3f4260b69a5274 Mon Sep 17 00:00:00 2001 From: Abgar Simonean Date: Fri, 19 Dec 2025 06:03:25 +0200 Subject: [PATCH 18/36] fix: Job.isDone() uses Job.Status.State if available (#4039) --- .../src/main/java/com/google/cloud/bigquery/Job.java | 9 ++++++++- .../src/test/java/com/google/cloud/bigquery/JobTest.java | 8 ++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java index 88950b9fb2..c64327500f 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java @@ -210,6 +210,9 @@ public boolean exists() { */ public boolean isDone() { checkNotDryRun("isDone"); + if (hasDoneState()) { + return true; + } Span isDone = null; if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { isDone = @@ -220,7 +223,7 @@ public boolean isDone() { } try (Scope isDoneScope = isDone != null ? isDone.makeCurrent() : null) { Job job = bigquery.getJob(getJobId(), JobOption.fields(BigQuery.JobField.STATUS)); - return job == null || JobStatus.State.DONE.equals(job.getStatus().getState()); + return job == null || job.hasDoneState(); } finally { if (isDone != null) { isDone.end(); @@ -228,6 +231,10 @@ public boolean isDone() { } } + private boolean hasDoneState() { + return getStatus() != null && JobStatus.State.DONE.equals(getStatus().getState()); + } + /** See {@link #waitFor(BigQueryRetryConfig, RetryOption...)} */ public Job waitFor(RetryOption... waitOptions) throws InterruptedException { return waitForInternal(DEFAULT_RETRY_CONFIG, waitOptions); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java index f12d9fcafa..e6d249af46 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java @@ -157,11 +157,9 @@ public void testExists_False() { @Test public void testIsDone_True() { - BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; Job job = expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.DONE)).build(); - when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(job); assertTrue(job.isDone()); - verify(bigquery).getJob(JOB_INFO.getJobId(), expectedOptions); + verify(bigquery, times(0)).getJob(eq(JOB_INFO.getJobId()), any()); } @Test @@ -176,8 +174,10 @@ public void testIsDone_False() { @Test public void testIsDone_NotExists() { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; + Job jobWithRunningState = + expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.RUNNING)).build(); when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(null); - assertTrue(job.isDone()); + assertTrue(jobWithRunningState.isDone()); verify(bigquery).getJob(JOB_INFO.getJobId(), expectedOptions); } From d214d10e4ad216ea6e8d3405b71b1f1212f29f4a Mon Sep 17 00:00:00 2001 From: Tomo Suzuki Date: Tue, 23 Dec 2025 15:54:46 -0500 Subject: [PATCH 19/36] chore: remove build badges (#4046) b/468377909 --- README.md | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/README.md b/README.md index d8f6c46d87..0d2e002bd3 100644 --- a/README.md +++ b/README.md @@ -327,29 +327,10 @@ information. Apache 2.0 - See [LICENSE][license] for more information. -## CI Status - -Java Version | Status ------------- | ------ -Java 8 | [![Kokoro CI][kokoro-badge-image-2]][kokoro-badge-link-2] -Java 8 OSX | [![Kokoro CI][kokoro-badge-image-3]][kokoro-badge-link-3] -Java 8 Windows | [![Kokoro CI][kokoro-badge-image-4]][kokoro-badge-link-4] -Java 11 | [![Kokoro CI][kokoro-badge-image-5]][kokoro-badge-link-5] - Java is a registered trademark of Oracle and/or its affiliates. [product-docs]: https://cloud.google.com/bigquery [javadocs]: https://cloud.google.com/java/docs/reference/google-cloud-bigquery/latest/history -[kokoro-badge-image-1]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java7.svg -[kokoro-badge-link-1]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java7.html -[kokoro-badge-image-2]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java8.svg -[kokoro-badge-link-2]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java8.html -[kokoro-badge-image-3]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java8-osx.svg -[kokoro-badge-link-3]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java8-osx.html -[kokoro-badge-image-4]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java8-win.svg -[kokoro-badge-link-4]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java8-win.html -[kokoro-badge-image-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java11.svg -[kokoro-badge-link-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java11.html [stability-image]: https://img.shields.io/badge/stability-stable-green [maven-version-image]: https://img.shields.io/maven-central/v/com.google.cloud/google-cloud-bigquery.svg [maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-bigquery/2.42.2 From 4bb157c935a69f099189d8ee50e84d0a33b57505 Mon Sep 17 00:00:00 2001 From: Sivamurugan P <73530366+SivamuruganP@users.noreply.github.com> Date: Mon, 29 Dec 2025 23:41:21 +0530 Subject: [PATCH 20/36] chore: handled race condition in stateless query integration test (#4045) * fix: handled race condition in stateless query integration test The testTableResultJobIdAndQueryId test was failing intermittently on slower networks. The test strictly asserted that Job ID must be null for stateless queries. However, the library correctly falls back to creating a Job ID if the stateless query times out. This change updates the assertion logic to accept either a valid Query ID (stateless success) or a valid Job ID (fallback success). Fixes #4008 * refactor: use XOR assertion for conciseness Applied feedback from code review to use exclusive OR operator for validating JobID/QueryID mutual exclusivity. * fix: apply race condition logic to testStatelessQueries Applied XOR assertion logic to testStatelessQueries. Test was failing on slow networks because they did not account for JOB_CREATION_OPTIONAL falling back to job creation. Fixes #4002 * docs: add comment explaining stateless query fallback behavior * docs: add comment explaining stateless query fallback behavior in testTableResultJobIdAndQueryId() --- .../cloud/bigquery/it/ITBigQueryTest.java | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index 80605884d3..ecb5e0c63d 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -7181,8 +7181,12 @@ public void testStatelessQueries() throws InterruptedException { // Stateless query should have no job id. bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); TableResult tableResult = executeSimpleQuery(bigQuery); - assertNotNull(tableResult.getQueryId()); - assertNull(tableResult.getJobId()); + // Use XOR: We accept EITHER a QueryId (fast path) OR a JobId (slow fallback), but not both. + // Ideally Stateless query will return queryId but in some cases it would return jobId instead + // of queryId based on the query complexity or other factors (job timeout configs). + assertTrue( + "Exactly one of jobId or queryId should be non-null", + (tableResult.getJobId() != null) ^ (tableResult.getQueryId() != null)); // Job creation takes over, no query id is created. bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_REQUIRED); @@ -7220,8 +7224,14 @@ public void testTableResultJobIdAndQueryId() throws InterruptedException { String query = "SELECT 1 as one"; QueryJobConfiguration configStateless = QueryJobConfiguration.newBuilder(query).build(); TableResult result = bigQuery.query(configStateless); - assertNull(result.getJobId()); - assertNotNull(result.getQueryId()); + // A stateless query should result in either a queryId (stateless success) or a jobId (fallback + // to a job). + // Exactly one of them should be non-null. + // Ideally Stateless query will return queryId but in some cases it would return jobId instead + // of queryId based on the query complexity or other factors (job timeout configs). + assertTrue( + "Exactly one of jobId or queryId should be non-null", + (result.getJobId() != null) ^ (result.getQueryId() != null)); // Test scenario 2 by failing stateless check by setting job timeout. QueryJobConfiguration configQueryWithJob = From cbd5d238b2a3a6abf7d2eb9f62e69680ccfc580b Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Mon, 29 Dec 2025 17:53:14 -0500 Subject: [PATCH 21/36] chore: Ignore unused declared junit-jupiter-engine error in dependencies check (#4048) * chore: Ingore unused junit-jupiter-engine error in dependencies check * chore: Update pom.xml Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> * chore: Update comment format for junit-jupiter-engine --------- Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- pom.xml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pom.xml b/pom.xml index 44548ec5c2..4cded9553c 100644 --- a/pom.xml +++ b/pom.xml @@ -172,6 +172,8 @@ io.netty:netty-common org.apache.arrow:arrow-memory-netty com.google.api:gax + + org.junit.jupiter:junit-jupiter-engine From 031deb00f153b47d37655b025fcef298a3db0e0b Mon Sep 17 00:00:00 2001 From: Sivamurugan P <73530366+SivamuruganP@users.noreply.github.com> Date: Tue, 30 Dec 2025 23:55:08 +0530 Subject: [PATCH 22/36] =?UTF-8?q?fix:=20gracefully=20handle=20thread=20int?= =?UTF-8?q?erruption=20in=20ConnectionImpl=20to=20preve=E2=80=A6=20(#4047)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: gracefully handle thread interruption in ConnectionImpl to prevent CI flakes Fixes #3992 * fix: consolidate interrupt checks and handle raw InterruptedException as per review * fix: remove CancelledException check per review feedback --- .../java/com/google/cloud/bigquery/ConnectionImpl.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java index c3465c33a8..d31a406e40 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java @@ -1069,7 +1069,15 @@ private void processArrowStreamAsync( } } catch (Exception e) { - throw BigQueryException.translateAndThrow(e); + if (e instanceof InterruptedException || e.getCause() instanceof InterruptedException) { + // Log silently and let it fall through to 'finally' for cleanup. + // This is the "graceful shutdown". + logger.log( + Level.INFO, "Background thread interrupted (Connection Closed). Stopping."); + Thread.currentThread().interrupt(); + } else { + throw BigQueryException.translateAndThrow(e); + } } finally { // logic needed for graceful shutdown // marking end of stream try { From 2578dcbb1340a1a121cdb335b16e21fef7a0638f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 6 Jan 2026 22:21:25 +0000 Subject: [PATCH 23/36] chore(deps): update dependency com.google.cloud:libraries-bom to v26.73.0 (#3922) --- samples/snippets/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml index d6cbcca5d1..1903381ad7 100644 --- a/samples/snippets/pom.xml +++ b/samples/snippets/pom.xml @@ -47,7 +47,7 @@ com.google.cloud libraries-bom - 26.64.0 + 26.73.0 pom import From fac16a8eb05a6e13e406feeb9761259cdbf8e674 Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Wed, 7 Jan 2026 13:19:10 -0500 Subject: [PATCH 24/36] docs: Add specific samples for creating and query timestamps (#4051) * docs: Add specific samples for creating and query timestamps * chore: Fix samples checkstyle issue * chore: Address gemini suggestions * chore: Update expiration time for test --- .../cloud/bigquery/it/ITBigQueryTest.java | 8 +- .../bigquery/CreateTableTimestamp.java | 59 ++++++++++++++ .../QueryWithTimestampParameters.java | 34 +++++++- .../com/example/bigquery/CreateTableIT.java | 11 ++- .../bigquery/CreateTableTimestampIT.java | 80 +++++++++++++++++++ .../QueryWithTimestampParametersIT.java | 9 ++- 6 files changed, 187 insertions(+), 14 deletions(-) create mode 100644 samples/snippets/src/main/java/com/example/bigquery/CreateTableTimestamp.java create mode 100644 samples/snippets/src/test/java/com/example/bigquery/CreateTableTimestampIT.java diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index ecb5e0c63d..ddad48d392 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -5980,9 +5980,11 @@ public void testSnapshotTableCopyJob() throws InterruptedException { @Test public void testCopyJobWithLabelsAndExpTime() throws InterruptedException { - String destExpiryTime = "2025-12-31T23:59:59.999999999Z"; - String sourceTableName = "test_copy_job_source_table_label"; - String destinationTableName = "test_copy_job_destination_table_label"; + String destExpiryTime = "2099-12-31T23:59:59.999999999Z"; + String sourceTableName = + "test_copy_job_source_table_label" + UUID.randomUUID().toString().substring(0, 8); + String destinationTableName = + "test_copy_job_destination_table_label" + UUID.randomUUID().toString().substring(0, 8); Map labels = ImmutableMap.of("test_job_name", "test_copy_job"); TableId sourceTable = TableId.of(DATASET, sourceTableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateTableTimestamp.java b/samples/snippets/src/main/java/com/example/bigquery/CreateTableTimestamp.java new file mode 100644 index 0000000000..b1336aaff4 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateTableTimestamp.java @@ -0,0 +1,59 @@ +/* + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_create_table_timestamp] +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.TableDefinition; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableInfo; + +public class CreateTableTimestamp { + + public static void main(String[] args) { + // TODO(developer): Replace these variables before running the sample. + String datasetName = "MY_DATASET_NAME"; + String tableName = "MY_TABLE_NAME"; + Schema schema = + Schema.of(Field.newBuilder("timestampField", StandardSQLTypeName.TIMESTAMP).build()); + createTable(datasetName, tableName, schema); + } + + public static void createTable(String datasetName, String tableName, Schema schema) { + try { + // Initialize client that will be used to send requests. This client only needs to be created + // once, and can be reused for multiple requests. + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + + TableId tableId = TableId.of(datasetName, tableName); + TableDefinition tableDefinition = StandardTableDefinition.of(schema); + TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); + + bigquery.create(tableInfo); + System.out.println("Table created successfully"); + } catch (BigQueryException e) { + System.out.println("Table was not created. \n" + e); + } + } +} +// [END bigquery_create_table_timestamp] diff --git a/samples/snippets/src/main/java/com/example/bigquery/QueryWithTimestampParameters.java b/samples/snippets/src/main/java/com/example/bigquery/QueryWithTimestampParameters.java index 56a3fcea91..6f20b9801d 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/QueryWithTimestampParameters.java +++ b/samples/snippets/src/main/java/com/example/bigquery/QueryWithTimestampParameters.java @@ -30,8 +30,36 @@ // Sample to running a query with timestamp query parameters. public class QueryWithTimestampParameters { - public static void main(String[] args) { - queryWithTimestampParameters(); + public static void queryFromTableTimestampParameters() { + try { + // Initialize client that will be used to send requests. This client only needs to be created + // once, and can be reused for multiple requests. + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + + ZonedDateTime timestamp = LocalDateTime.of(2016, 12, 7, 8, 0, 0).atZone(ZoneOffset.UTC); + String query = "SELECT last_reported FROM " + + "`bigquery-public-data`.new_york_citibike.citibike_stations" + + " WHERE last_reported >= @ts_value LIMIT 5"; + // Note: Standard SQL is required to use query parameters. + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .addNamedParameter( + "ts_value", + QueryParameterValue.timestamp( + // Timestamp takes microseconds since 1970-01-01T00:00:00 UTC + timestamp.toInstant().toEpochMilli() * 1000)) + .build(); + + TableResult results = bigquery.query(queryConfig); + + results + .iterateAll() + .forEach(row -> row.forEach(val -> System.out.printf("%s\n", val.toString()))); + + System.out.println("Query with timestamp parameter performed successfully."); + } catch (BigQueryException | InterruptedException e) { + System.out.println("Query not performed \n" + e); + } } public static void queryWithTimestampParameters() { @@ -60,7 +88,7 @@ public static void queryWithTimestampParameters() { System.out.println("Query with timestamp parameter performed successfully."); } catch (BigQueryException | InterruptedException e) { - System.out.println("Query not performed \n" + e.toString()); + System.out.println("Query not performed \n" + e); } } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateTableIT.java index af5104c1c6..000091a045 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CreateTableIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateTableIT.java @@ -37,26 +37,25 @@ public class CreateTableIT { private final Logger log = Logger.getLogger(this.getClass().getName()); private String tableName; private ByteArrayOutputStream bout; - private PrintStream out; private PrintStream originalPrintStream; private static final String BIGQUERY_DATASET_NAME = System.getenv("BIGQUERY_DATASET_NAME"); - private static void requireEnvVar(String varName) { + private static void requireEnvVar() { assertNotNull( - "Environment variable " + varName + " is required to perform these tests.", - System.getenv(varName)); + "Environment variable BIGQUERY_DATASET_NAME is required to perform these tests.", + System.getenv("BIGQUERY_DATASET_NAME")); } @BeforeClass public static void checkRequirements() { - requireEnvVar("BIGQUERY_DATASET_NAME"); + requireEnvVar(); } @Before public void setUp() { bout = new ByteArrayOutputStream(); - out = new PrintStream(bout); + PrintStream out = new PrintStream(bout); originalPrintStream = System.out; System.setOut(out); tableName = "MY_TABLE_NAME_" + UUID.randomUUID().toString().replace("-", "_"); diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateTableTimestampIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateTableTimestampIT.java new file mode 100644 index 0000000000..b63d6eff63 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateTableTimestampIT.java @@ -0,0 +1,80 @@ +/* + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertNotNull; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class CreateTableTimestampIT { + private final Logger log = Logger.getLogger(this.getClass().getName()); + private String tableName; + private ByteArrayOutputStream bout; + private PrintStream originalPrintStream; + + private static final String BIGQUERY_DATASET_NAME = System.getenv("BIGQUERY_DATASET_NAME"); + + private static void requireEnvVar() { + assertNotNull( + "Environment variable BIGQUERY_DATASET_NAME is required to perform these tests.", + System.getenv("BIGQUERY_DATASET_NAME")); + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar(); + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + PrintStream out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + tableName = "MY_TABLE_NAME_" + UUID.randomUUID().toString().replace("-", "_"); + } + + @After + public void tearDown() { + // Clean up + DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableName); + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testCreateTable() { + Schema schema = + Schema.of(Field.of("timestampField", StandardSQLTypeName.TIMESTAMP)); + CreateTableTimestamp.createTable(BIGQUERY_DATASET_NAME, tableName, schema); + assertThat(bout.toString()).contains("Table created successfully"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/QueryWithTimestampParametersIT.java b/samples/snippets/src/test/java/com/example/bigquery/QueryWithTimestampParametersIT.java index eb6f79cb7b..32d707c307 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/QueryWithTimestampParametersIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/QueryWithTimestampParametersIT.java @@ -30,13 +30,12 @@ public class QueryWithTimestampParametersIT { private final Logger log = Logger.getLogger(this.getClass().getName()); private ByteArrayOutputStream bout; - private PrintStream out; private PrintStream originalPrintStream; @Before public void setUp() { bout = new ByteArrayOutputStream(); - out = new PrintStream(bout); + PrintStream out = new PrintStream(bout); originalPrintStream = System.out; System.setOut(out); } @@ -54,4 +53,10 @@ public void testQueryWithTimestampParameters() { QueryWithTimestampParameters.queryWithTimestampParameters(); assertThat(bout.toString()).contains("Query with timestamp parameter performed successfully."); } + + @Test + public void testQueryFromTableTimestampParameters() { + QueryWithTimestampParameters.queryFromTableTimestampParameters(); + assertThat(bout.toString()).contains("Query with timestamp parameter performed successfully."); + } } From 719f8fd14e384cbe0a5e26f36ed05f97c4231fc0 Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Fri, 9 Jan 2026 15:19:17 -0500 Subject: [PATCH 25/36] chore: Migrate tests to JUnit5 (#4052) * feat: Migrate to JUnit 5 and add parallel test execution * feat: Migrate tests to JUnit5 * chore: Add surefire-junit-platform dep for ITs * chore: Fix broken tests * chore: Upgrade existing integration tests to JUnit 5 syntax and features * chore: Upgrade ITNightlyBigQueryTest to JUnit 5 features and package-private * chore: Make the tests package-private * feat: migrate tests to JUnit 5 assertThrows and static imports * chore: Remove wildcard imports * chore: revert samples to use junit4 * chore: Address code comments * chore: Close connection after test --------- Co-authored-by: AbgarSim --- google-cloud-bigquery/pom.xml | 28 +- .../src/test/java/MetadataCacheStatsTest.java | 8 +- .../com/google/cloud/bigquery/AclTest.java | 26 +- .../cloud/bigquery/AnnotationsTest.java | 26 +- .../cloud/bigquery/AvroOptionsTest.java | 10 +- .../bigquery/BigLakeConfigurationTest.java | 12 +- .../cloud/bigquery/BigQueryErrorTest.java | 8 +- .../cloud/bigquery/BigQueryExceptionTest.java | 26 +- .../cloud/bigquery/BigQueryImplTest.java | 437 +++++++------- .../cloud/bigquery/BigQueryOptionsTest.java | 43 +- .../bigquery/BigQueryResultImplTest.java | 8 +- .../cloud/bigquery/BigtableOptionsTest.java | 69 ++- .../cloud/bigquery/CloneDefinitionTest.java | 12 +- .../cloud/bigquery/ColumnReferenceTest.java | 12 +- .../cloud/bigquery/ConnectionImplTest.java | 80 +-- .../bigquery/ConnectionPropertyTest.java | 10 +- .../bigquery/ConnectionSettingsTest.java | 12 +- .../bigquery/CopyJobConfigurationTest.java | 26 +- .../google/cloud/bigquery/CsvOptionsTest.java | 12 +- .../google/cloud/bigquery/DatasetIdTest.java | 14 +- .../cloud/bigquery/DatasetInfoTest.java | 26 +- .../google/cloud/bigquery/DatasetTest.java | 78 ++- .../bigquery/DatastoreBackupOptionsTest.java | 12 +- .../google/cloud/bigquery/DmlStatsTest.java | 8 +- .../ExternalDatasetReferenceTest.java | 12 +- .../bigquery/ExternalTableDefinitionTest.java | 29 +- .../bigquery/ExtractJobConfigurationTest.java | 10 +- .../cloud/bigquery/FieldElementTypeTest.java | 4 +- .../google/cloud/bigquery/FieldListTest.java | 35 +- .../com/google/cloud/bigquery/FieldTest.java | 6 +- .../cloud/bigquery/FieldValueListTest.java | 39 +- .../google/cloud/bigquery/FieldValueTest.java | 12 +- .../google/cloud/bigquery/ForeignKeyTest.java | 14 +- .../cloud/bigquery/FormatOptionsTest.java | 4 +- .../bigquery/GoogleSheetsOptionsTest.java | 2 +- .../bigquery/HivePartitioningOptionsTest.java | 12 +- .../cloud/bigquery/InsertAllRequestTest.java | 11 +- .../cloud/bigquery/InsertAllResponseTest.java | 10 +- .../com/google/cloud/bigquery/JobIdTest.java | 14 +- .../google/cloud/bigquery/JobInfoTest.java | 10 +- .../cloud/bigquery/JobStatisticsTest.java | 4 +- .../google/cloud/bigquery/JobStatusTest.java | 10 +- .../com/google/cloud/bigquery/JobTest.java | 135 ++--- .../bigquery/LoadJobConfigurationTest.java | 20 +- .../MaterializedViewDefinitionTest.java | 6 +- .../google/cloud/bigquery/ModelIdTest.java | 14 +- .../google/cloud/bigquery/ModelInfoTest.java | 20 +- .../bigquery/ModelTableDefinitionTest.java | 37 +- .../com/google/cloud/bigquery/ModelTest.java | 50 +- .../com/google/cloud/bigquery/OptionTest.java | 17 +- .../cloud/bigquery/ParquetOptionsTest.java | 4 +- .../cloud/bigquery/PolicyHelperTest.java | 10 +- .../google/cloud/bigquery/PolicyTagsTest.java | 8 +- .../google/cloud/bigquery/PrimaryKeyTest.java | 14 +- .../bigquery/QueryJobConfigurationTest.java | 8 +- .../bigquery/QueryParameterValueTest.java | 32 +- .../cloud/bigquery/QueryRequestInfoTest.java | 10 +- .../google/cloud/bigquery/QueryStageTest.java | 18 +- .../com/google/cloud/bigquery/RangeTest.java | 6 +- .../bigquery/RemoteFunctionOptionsTest.java | 12 +- .../cloud/bigquery/RoutineArgumentTest.java | 4 +- .../google/cloud/bigquery/RoutineIdTest.java | 4 +- .../cloud/bigquery/RoutineInfoTest.java | 20 +- .../google/cloud/bigquery/RoutineTest.java | 26 +- .../com/google/cloud/bigquery/SchemaTest.java | 12 +- .../bigquery/SnapshotTableDefinitionTest.java | 14 +- .../bigquery/StandardSQLDataTypeTest.java | 12 +- .../cloud/bigquery/StandardSQLFieldTest.java | 4 +- .../bigquery/StandardSQLStructTypeTest.java | 12 +- .../bigquery/StandardSQLTableTypeTest.java | 4 +- .../bigquery/StandardTableDefinitionTest.java | 39 +- .../cloud/bigquery/TableConstraintsTest.java | 14 +- .../bigquery/TableDataWriteChannelTest.java | 70 +-- .../google/cloud/bigquery/TableIdTest.java | 14 +- .../google/cloud/bigquery/TableInfoTest.java | 22 +- .../bigquery/TableMetadataCacheUsageTest.java | 8 +- .../cloud/bigquery/TableResultTest.java | 8 +- .../com/google/cloud/bigquery/TableTest.java | 28 +- .../cloud/bigquery/TimePartitioningTest.java | 42 +- .../cloud/bigquery/TimelineSampleTest.java | 6 +- .../bigquery/UserDefinedFunctionTest.java | 4 +- .../cloud/bigquery/ViewDefinitionTest.java | 37 +- .../WriteChannelConfigurationTest.java | 6 +- .../cloud/bigquery/it/ITBigQueryTest.java | 494 ++++++++-------- .../bigquery/it/ITHighPrecisionTimestamp.java | 20 +- .../bigquery/it/ITNightlyBigQueryTest.java | 533 +++++++++--------- .../cloud/bigquery/it/ITRemoteUDFTest.java | 22 +- .../bigquery/spi/v2/HttpBigQueryRpcTest.java | 2 +- .../testing/RemoteBigQueryHelperTest.java | 12 +- pom.xml | 19 +- 90 files changed, 1576 insertions(+), 1598 deletions(-) diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index 5e86e77fd2..174bb9cb36 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -162,8 +162,18 @@ test - junit - junit + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.mockito + mockito-junit-jupiter test @@ -207,6 +217,20 @@ + + + + org.apache.maven.plugins + maven-failsafe-plugin + 3.5.2 + + + org.apache.maven.surefire + surefire-junit-platform + ${surefire.version} + + + org.codehaus.mojo diff --git a/google-cloud-bigquery/src/test/java/MetadataCacheStatsTest.java b/google-cloud-bigquery/src/test/java/MetadataCacheStatsTest.java index d1cfa86e9e..d417bfc7fa 100644 --- a/google-cloud-bigquery/src/test/java/MetadataCacheStatsTest.java +++ b/google-cloud-bigquery/src/test/java/MetadataCacheStatsTest.java @@ -16,16 +16,16 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.api.services.bigquery.model.MetadataCacheStatistics; import com.google.common.collect.ImmutableList; import com.google.common.truth.Truth; import java.util.List; import java.util.stream.Collectors; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class MetadataCacheStatsTest { +class MetadataCacheStatsTest { private static List TABLE_METADATA_CACHE_USAGE_PB_LIST = ImmutableList.of( @@ -44,7 +44,7 @@ public class MetadataCacheStatsTest { new MetadataCacheStatistics().setTableMetadataCacheUsage(TABLE_METADATA_CACHE_USAGE_PB_LIST); @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { assertEquals(METADATA_CACHE_STATISTICS_PB, METADATA_CACHE_STATS.toPb()); compareMetadataCacheStats( METADATA_CACHE_STATS, MetadataCacheStats.fromPb(METADATA_CACHE_STATISTICS_PB)); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java index 0b53f32ff4..f7bed53ba1 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java @@ -16,7 +16,7 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.api.services.bigquery.model.Dataset; import com.google.cloud.bigquery.Acl.DatasetAclEntity; @@ -31,12 +31,12 @@ import com.google.cloud.bigquery.Acl.View; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class AclTest { +class AclTest { @Test - public void testDatasetEntity() { + void testDatasetEntity() { DatasetId datasetId = DatasetId.of("dataset"); List targetTypes = ImmutableList.of("VIEWS"); DatasetAclEntity entity = new DatasetAclEntity(datasetId, targetTypes); @@ -47,7 +47,7 @@ public void testDatasetEntity() { } @Test - public void testDomainEntity() { + void testDomainEntity() { Domain entity = new Domain("d1"); assertEquals("d1", entity.getDomain()); assertEquals(Type.DOMAIN, entity.getType()); @@ -56,7 +56,7 @@ public void testDomainEntity() { } @Test - public void testGroupEntity() { + void testGroupEntity() { Group entity = new Group("g1"); assertEquals("g1", entity.getIdentifier()); assertEquals(Type.GROUP, entity.getType()); @@ -65,7 +65,7 @@ public void testGroupEntity() { } @Test - public void testSpecialGroupEntity() { + void testSpecialGroupEntity() { Group entity = Group.ofAllAuthenticatedUsers(); assertEquals("allAuthenticatedUsers", entity.getIdentifier()); Dataset.Access pb = entity.toPb(); @@ -85,7 +85,7 @@ public void testSpecialGroupEntity() { } @Test - public void testUserEntity() { + void testUserEntity() { User entity = new User("u1"); assertEquals("u1", entity.getEmail()); assertEquals(Type.USER, entity.getType()); @@ -94,7 +94,7 @@ public void testUserEntity() { } @Test - public void testViewEntity() { + void testViewEntity() { TableId viewId = TableId.of("project", "dataset", "view"); View entity = new View(viewId); assertEquals(viewId, entity.getId()); @@ -104,7 +104,7 @@ public void testViewEntity() { } @Test - public void testRoutineEntity() { + void testRoutineEntity() { RoutineId routineId = RoutineId.of("project", "dataset", "routine"); Acl.Routine entity = new Acl.Routine(routineId); assertEquals(routineId, entity.getId()); @@ -114,7 +114,7 @@ public void testRoutineEntity() { } @Test - public void testIamMemberEntity() { + void testIamMemberEntity() { IamMember entity = new IamMember("member1"); assertEquals("member1", entity.getIamMember()); Dataset.Access pb = entity.toPb(); @@ -122,7 +122,7 @@ public void testIamMemberEntity() { } @Test - public void testOf() { + void testOf() { Acl acl = Acl.of(Group.ofAllAuthenticatedUsers(), Role.READER); assertEquals(Group.ofAllAuthenticatedUsers(), acl.getEntity()); assertEquals(Role.READER, acl.getRole()); @@ -139,7 +139,7 @@ public void testOf() { } @Test - public void testOfWithCondition() { + void testOfWithCondition() { Expr expr = new Expr("expression", "title", "description", "location"); Acl acl = Acl.of(Group.ofAllAuthenticatedUsers(), Role.READER, expr); Dataset.Access pb = acl.toPb(); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AnnotationsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AnnotationsTest.java index aa3dd9fde4..ad475c7dce 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AnnotationsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AnnotationsTest.java @@ -17,17 +17,17 @@ package com.google.cloud.bigquery; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.google.api.client.util.Data; import java.util.Collections; import java.util.HashMap; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class AnnotationsTest { @Test - public void testFromUser() { + void testFromUser() { assertThat(Annotations.fromUser(null).userMap()).isNull(); HashMap user = new HashMap<>(); @@ -43,7 +43,7 @@ public void testFromUser() { } @Test - public void testFromToPb() { + void testFromToPb() { assertThat(Annotations.fromPb(null).toPb()).isNull(); HashMap pb = new HashMap<>(); @@ -60,17 +60,13 @@ public void testFromToPb() { } @Test - public void testNullKey() { - try { - Annotations.fromUser(Collections.singletonMap((String) null, "foo")); - fail("null key shouldn't work"); - } catch (IllegalArgumentException e) { - } + void testNullKey() { + assertThrows( + IllegalArgumentException.class, + () -> Annotations.fromUser(Collections.singletonMap((String) null, "foo"))); - try { - Annotations.fromPb(Collections.singletonMap((String) null, "foo")); - fail("null key shouldn't work"); - } catch (IllegalArgumentException e) { - } + assertThrows( + IllegalArgumentException.class, + () -> Annotations.fromPb(Collections.singletonMap((String) null, "foo"))); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AvroOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AvroOptionsTest.java index f40660fd7b..840ae24ba8 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AvroOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AvroOptionsTest.java @@ -16,9 +16,9 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class AvroOptionsTest { @@ -27,7 +27,7 @@ public class AvroOptionsTest { AvroOptions.newBuilder().setUseAvroLogicalTypes(USE_AVRO_LOGICAL_TYPES).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareAvroOptions(AVRO_OPTIONS, AVRO_OPTIONS.toBuilder().build()); AvroOptions avroOptions = AVRO_OPTIONS.toBuilder().setUseAvroLogicalTypes(false).build(); assertEquals(false, avroOptions.useAvroLogicalTypes()); @@ -36,13 +36,13 @@ public void testToBuilder() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals(FormatOptions.AVRO, AVRO_OPTIONS.getType()); assertEquals(USE_AVRO_LOGICAL_TYPES, AVRO_OPTIONS.useAvroLogicalTypes()); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareAvroOptions(AVRO_OPTIONS, AvroOptions.fromPb(AVRO_OPTIONS.toPb())); AvroOptions avroOptions = AvroOptions.newBuilder().setUseAvroLogicalTypes(USE_AVRO_LOGICAL_TYPES).build(); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigLakeConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigLakeConfigurationTest.java index afb2b5b10a..66fcd7c6b4 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigLakeConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigLakeConfigurationTest.java @@ -16,11 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class BigLakeConfigurationTest { +class BigLakeConfigurationTest { private static final String STORAGE_URI = "gs://storage-uri"; private static final String FILE_FORMAT = "PARQUET"; @@ -43,7 +43,7 @@ public class BigLakeConfigurationTest { .setConnectionId(CONNECTION_ID); @Test - public void testToBuilder() { + void testToBuilder() { assertEquals(STORAGE_URI, BIG_LAKE_CONFIGURATION.getStorageUri()); assertEquals(FILE_FORMAT, BIG_LAKE_CONFIGURATION.getFileFormat()); assertEquals(TABLE_FORMAT, BIG_LAKE_CONFIGURATION.getTableFormat()); @@ -51,12 +51,12 @@ public void testToBuilder() { } @Test - public void testToPb() { + void testToPb() { assertBigLakeConfiguration(BIG_LAKE_CONFIGURATION_PB, BIG_LAKE_CONFIGURATION.toPb()); } @Test - public void testFromPb() { + void testFromPb() { assertBigLakeConfiguration( BIG_LAKE_CONFIGURATION, BigLakeConfiguration.fromPb(BIG_LAKE_CONFIGURATION_PB)); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryErrorTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryErrorTest.java index 7cd737cf4b..d618214e29 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryErrorTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryErrorTest.java @@ -16,9 +16,9 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BigQueryErrorTest { @@ -32,7 +32,7 @@ public class BigQueryErrorTest { new BigQueryError(REASON, LOCATION, MESSAGE); @Test - public void testConstructor() { + void testConstructor() { assertEquals(REASON, ERROR.getReason()); assertEquals(LOCATION, ERROR.getLocation()); assertEquals(DEBUG_INFO, ERROR.getDebugInfo()); @@ -44,7 +44,7 @@ public void testConstructor() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareBigQueryError(ERROR, BigQueryError.fromPb(ERROR.toPb())); compareBigQueryError(ERROR_INCOMPLETE, BigQueryError.fromPb(ERROR_INCOMPLETE.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java index 8a2094b558..7254ede1bc 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java @@ -16,11 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; @@ -40,15 +40,15 @@ import java.net.SocketTimeoutException; import java.util.HashMap; import java.util.Map; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class BigQueryExceptionTest { @Test - public void testBigQueryException() { + void testBigQueryException() { BigQueryException exception = new BigQueryException(500, "message"); assertEquals(500, exception.getCode()); assertEquals("message", exception.getMessage()); @@ -137,7 +137,7 @@ public void testBigQueryException() { } @Test - public void testTranslateAndThrow() throws Exception { + void testTranslateAndThrow() throws Exception { Exception cause = new BigQueryException(503, "message"); RetryHelperException exceptionMock = mock(RetryHelperException.class); when(exceptionMock.getCause()).thenReturn(cause); @@ -168,7 +168,7 @@ public void testTranslateAndThrow() throws Exception { } @Test - public void testDefaultExceptionHandler() throws java.io.IOException { + void testDefaultExceptionHandler() throws java.io.IOException { BigQueryOptions defaultOptions = BigQueryOptions.newBuilder().setProjectId("project-id").build(); DatasetInfo info = DatasetInfo.newBuilder("dataset").build(); @@ -198,7 +198,7 @@ public void testDefaultExceptionHandler() throws java.io.IOException { } @Test - public void testCustomExceptionHandler() throws java.io.IOException { + void testCustomExceptionHandler() throws java.io.IOException { BigQueryOptions defaultOptions = BigQueryOptions.newBuilder() .setProjectId("project-id") diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java index 393455e364..20a6ef679e 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java @@ -20,12 +20,33 @@ import static com.google.cloud.bigquery.BigQuery.JobField.USER_EMAIL; import static com.google.cloud.bigquery.BigQueryImpl.optionMap; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.nullable; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import com.google.api.gax.paging.Page; -import com.google.api.services.bigquery.model.*; +import com.google.api.services.bigquery.model.ErrorProto; +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.JobConfigurationQuery; import com.google.api.services.bigquery.model.JobStatistics; +import com.google.api.services.bigquery.model.QueryRequest; +import com.google.api.services.bigquery.model.TableCell; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableDataList; +import com.google.api.services.bigquery.model.TableRow; import com.google.cloud.Policy; import com.google.cloud.RetryOption; import com.google.cloud.ServiceOptions; @@ -39,7 +60,11 @@ import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import com.google.common.base.Function; import com.google.common.base.Supplier; -import com.google.common.collect.*; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; import java.io.IOException; import java.math.BigInteger; import java.net.ConnectException; @@ -47,16 +72,18 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; -import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class BigQueryImplTest { private static final String PROJECT = "project"; @@ -537,8 +564,8 @@ private BigQueryOptions createBigQueryOptionsForProjectWithLocation( .build(); } - @Before - public void setUp() { + @BeforeEach + void setUp() { rpcFactoryMock = mock(BigQueryRpcFactory.class); bigqueryRpcMock = mock(HttpBigQueryRpc.class); when(rpcFactoryMock.create(any(BigQueryOptions.class))).thenReturn(bigqueryRpcMock); @@ -546,13 +573,13 @@ public void setUp() { } @Test - public void testGetOptions() { + void testGetOptions() { bigquery = options.getService(); assertSame(options, bigquery.getOptions()); } @Test - public void testCreateDataset() throws IOException { + void testCreateDataset() throws IOException { DatasetInfo datasetInfo = DATASET_INFO.setProjectId(OTHER_PROJECT); when(bigqueryRpcMock.createSkipExceptionTranslation(datasetInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(datasetInfo.toPb()); @@ -565,7 +592,7 @@ public void testCreateDataset() throws IOException { } @Test - public void testCreateDatasetWithSelectedFields() throws IOException { + void testCreateDatasetWithSelectedFields() throws IOException { when(bigqueryRpcMock.createSkipExceptionTranslation( eq(DATASET_INFO_WITH_PROJECT.toPb()), capturedOptions.capture())) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); @@ -584,7 +611,7 @@ public void testCreateDatasetWithSelectedFields() throws IOException { } @Test - public void testCreateDatasetWithAccessPolicy() throws IOException { + void testCreateDatasetWithAccessPolicy() throws IOException { DatasetInfo datasetInfo = DATASET_INFO.setProjectId(OTHER_PROJECT); DatasetOption datasetOption = DatasetOption.accessPolicyVersion(3); when(bigqueryRpcMock.createSkipExceptionTranslation( @@ -600,7 +627,7 @@ public void testCreateDatasetWithAccessPolicy() throws IOException { } @Test - public void testGetDataset() throws IOException { + void testGetDataset() throws IOException { when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); @@ -611,7 +638,7 @@ public void testGetDataset() throws IOException { } @Test - public void testGetDatasetNotFoundWhenThrowIsDisabled() throws IOException { + void testGetDatasetNotFoundWhenThrowIsDisabled() throws IOException { when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); options.setThrowNotFound(false); @@ -623,24 +650,22 @@ public void testGetDatasetNotFoundWhenThrowIsDisabled() throws IOException { } @Test - public void testGetDatasetNotFoundWhenThrowIsEnabled() throws IOException { + void testGetDatasetNotFoundWhenThrowIsEnabled() throws IOException { when(bigqueryRpcMock.getDatasetSkipExceptionTranslation( PROJECT, "dataset-not-found", EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(404, "Dataset not found")); options.setThrowNotFound(true); bigquery = options.getService(); - try { - bigquery.getDataset("dataset-not-found"); - Assert.fail(); - } catch (BigQueryException ex) { - Assert.assertNotNull(ex.getMessage()); - } + BigQueryException ex = + Assertions.assertThrows( + BigQueryException.class, () -> bigquery.getDataset("dataset-not-found")); + Assertions.assertNotNull(ex.getMessage()); verify(bigqueryRpcMock) .getDatasetSkipExceptionTranslation(PROJECT, "dataset-not-found", EMPTY_RPC_OPTIONS); } @Test - public void testGetDatasetFromDatasetId() throws IOException { + void testGetDatasetFromDatasetId() throws IOException { when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); @@ -651,7 +676,7 @@ public void testGetDatasetFromDatasetId() throws IOException { } @Test - public void testGetDatasetFromDatasetIdWithProject() throws IOException { + void testGetDatasetFromDatasetIdWithProject() throws IOException { DatasetInfo datasetInfo = DATASET_INFO.setProjectId(OTHER_PROJECT); DatasetId datasetId = DatasetId.of(OTHER_PROJECT, DATASET); when(bigqueryRpcMock.getDatasetSkipExceptionTranslation( @@ -665,7 +690,7 @@ public void testGetDatasetFromDatasetIdWithProject() throws IOException { } @Test - public void testGetDatasetWithSelectedFields() throws IOException { + void testGetDatasetWithSelectedFields() throws IOException { when(bigqueryRpcMock.getDatasetSkipExceptionTranslation( eq(PROJECT), eq(DATASET), capturedOptions.capture())) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); @@ -683,7 +708,7 @@ public void testGetDatasetWithSelectedFields() throws IOException { } @Test - public void testListDatasets() throws IOException { + void testListDatasets() throws IOException { bigquery = options.getService(); ImmutableList datasetList = ImmutableList.of( @@ -701,7 +726,7 @@ public void testListDatasets() throws IOException { } @Test - public void testListDatasetsWithProjects() throws IOException { + void testListDatasetsWithProjects() throws IOException { bigquery = options.getService(); ImmutableList datasetList = ImmutableList.of( @@ -719,7 +744,7 @@ public void testListDatasetsWithProjects() throws IOException { } @Test - public void testListEmptyDatasets() throws IOException { + void testListEmptyDatasets() throws IOException { ImmutableList datasets = ImmutableList.of(); Tuple> result = Tuple.>of(null, datasets); @@ -734,7 +759,7 @@ public void testListEmptyDatasets() throws IOException { } @Test - public void testListDatasetsWithOptions() throws IOException { + void testListDatasetsWithOptions() throws IOException { bigquery = options.getService(); ImmutableList datasetList = ImmutableList.of( @@ -753,7 +778,7 @@ public void testListDatasetsWithOptions() throws IOException { } @Test - public void testDeleteDataset() throws IOException { + void testDeleteDataset() throws IOException { when(bigqueryRpcMock.deleteDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(true); bigquery = options.getService(); @@ -763,7 +788,7 @@ public void testDeleteDataset() throws IOException { } @Test - public void testDeleteDatasetFromDatasetId() throws IOException { + void testDeleteDatasetFromDatasetId() throws IOException { when(bigqueryRpcMock.deleteDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(true); bigquery = options.getService(); @@ -773,7 +798,7 @@ public void testDeleteDatasetFromDatasetId() throws IOException { } @Test - public void testDeleteDatasetFromDatasetIdWithProject() throws IOException { + void testDeleteDatasetFromDatasetIdWithProject() throws IOException { DatasetId datasetId = DatasetId.of(OTHER_PROJECT, DATASET); when(bigqueryRpcMock.deleteDatasetSkipExceptionTranslation( OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)) @@ -785,7 +810,7 @@ public void testDeleteDatasetFromDatasetIdWithProject() throws IOException { } @Test - public void testDeleteDatasetWithOptions() throws IOException { + void testDeleteDatasetWithOptions() throws IOException { when(bigqueryRpcMock.deleteDatasetSkipExceptionTranslation( PROJECT, DATASET, DATASET_DELETE_OPTIONS)) .thenReturn(true); @@ -796,7 +821,7 @@ public void testDeleteDatasetWithOptions() throws IOException { } @Test - public void testUpdateDataset() throws IOException { + void testUpdateDataset() throws IOException { DatasetInfo updatedDatasetInfo = DATASET_INFO.setProjectId(OTHER_PROJECT).toBuilder() .setDescription("newDescription") @@ -812,7 +837,7 @@ public void testUpdateDataset() throws IOException { } @Test - public void testUpdateDatasetWithSelectedFields() throws IOException { + void testUpdateDatasetWithSelectedFields() throws IOException { DatasetInfo updatedDatasetInfo = DATASET_INFO.toBuilder().setDescription("newDescription").build(); DatasetInfo updatedDatasetInfoWithProject = @@ -835,7 +860,7 @@ public void testUpdateDatasetWithSelectedFields() throws IOException { } @Test - public void testCreateTable() throws IOException { + void testCreateTable() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(OTHER_PROJECT); when(bigqueryRpcMock.createSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(tableInfo.toPb()); @@ -848,7 +873,7 @@ public void testCreateTable() throws IOException { } @Test - public void tesCreateExternalTable() throws IOException { + void tesCreateExternalTable() throws IOException { TableInfo createTableInfo = TableInfo.of(TABLE_ID, ExternalTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build()) .setProjectId(OTHER_PROJECT); @@ -867,7 +892,7 @@ public void tesCreateExternalTable() throws IOException { } @Test - public void testCreateTableWithoutProject() throws IOException { + void testCreateTableWithoutProject() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(PROJECT); TableId tableId = TableId.of("", TABLE_ID.getDataset(), TABLE_ID.getTable()); tableInfo.toBuilder().setTableId(tableId); @@ -881,7 +906,7 @@ public void testCreateTableWithoutProject() throws IOException { } @Test - public void testCreateTableWithSelectedFields() throws IOException { + void testCreateTableWithSelectedFields() throws IOException { when(bigqueryRpcMock.createSkipExceptionTranslation( eq(TABLE_INFO_WITH_PROJECT.toPb()), capturedOptions.capture())) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); @@ -899,7 +924,7 @@ public void testCreateTableWithSelectedFields() throws IOException { } @Test - public void testGetTable() throws IOException { + void testGetTable() throws IOException { when(bigqueryRpcMock.getTableSkipExceptionTranslation( PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); @@ -911,7 +936,7 @@ public void testGetTable() throws IOException { } @Test - public void testGetModel() throws IOException { + void testGetModel() throws IOException { when(bigqueryRpcMock.getModelSkipExceptionTranslation( PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS)) .thenReturn(MODEL_INFO_WITH_PROJECT.toPb()); @@ -923,7 +948,7 @@ public void testGetModel() throws IOException { } @Test - public void testGetModelNotFoundWhenThrowIsEnabled() throws IOException { + void testGetModelNotFoundWhenThrowIsEnabled() throws IOException { String expected = "Model not found"; when(bigqueryRpcMock.getModelSkipExceptionTranslation( PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS)) @@ -940,7 +965,7 @@ public void testGetModelNotFoundWhenThrowIsEnabled() throws IOException { } @Test - public void testListPartition() throws IOException { + void testListPartition() throws IOException { when(bigqueryRpcMock.getTableSkipExceptionTranslation( PROJECT, DATASET, "table$__PARTITIONS_SUMMARY__", EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_INFO_PARTITIONS.toPb()); @@ -958,7 +983,7 @@ public void testListPartition() throws IOException { } @Test - public void testGetTableNotFoundWhenThrowIsDisabled() throws IOException { + void testGetTableNotFoundWhenThrowIsDisabled() throws IOException { when(bigqueryRpcMock.getTableSkipExceptionTranslation( PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); @@ -971,7 +996,7 @@ public void testGetTableNotFoundWhenThrowIsDisabled() throws IOException { } @Test - public void testGetTableNotFoundWhenThrowIsEnabled() throws IOException { + void testGetTableNotFoundWhenThrowIsEnabled() throws IOException { when(bigqueryRpcMock.getTableSkipExceptionTranslation( PROJECT, DATASET, "table-not-found", EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(404, "Table not found")); @@ -979,16 +1004,16 @@ public void testGetTableNotFoundWhenThrowIsEnabled() throws IOException { bigquery = options.getService(); try { bigquery.getTable(DATASET, "table-not-found"); - Assert.fail(); + Assertions.fail(); } catch (BigQueryException ex) { - Assert.assertNotNull(ex.getMessage()); + Assertions.assertNotNull(ex.getMessage()); } verify(bigqueryRpcMock) .getTableSkipExceptionTranslation(PROJECT, DATASET, "table-not-found", EMPTY_RPC_OPTIONS); } @Test - public void testGetTableFromTableId() throws IOException { + void testGetTableFromTableId() throws IOException { when(bigqueryRpcMock.getTableSkipExceptionTranslation( PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); @@ -1000,7 +1025,7 @@ public void testGetTableFromTableId() throws IOException { } @Test - public void testGetTableFromTableIdWithProject() throws IOException { + void testGetTableFromTableIdWithProject() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(OTHER_PROJECT); TableId tableId = TABLE_ID.setProjectId(OTHER_PROJECT); when(bigqueryRpcMock.getTableSkipExceptionTranslation( @@ -1016,7 +1041,7 @@ public void testGetTableFromTableIdWithProject() throws IOException { } @Test - public void testGetTableFromTableIdWithoutProject() throws IOException { + void testGetTableFromTableIdWithoutProject() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(PROJECT); TableId tableId = TableId.of("", TABLE_ID.getDataset(), TABLE_ID.getTable()); when(bigqueryRpcMock.getTableSkipExceptionTranslation( @@ -1031,7 +1056,7 @@ public void testGetTableFromTableIdWithoutProject() throws IOException { } @Test - public void testGetTableWithSelectedFields() throws IOException { + void testGetTableWithSelectedFields() throws IOException { when(bigqueryRpcMock.getTableSkipExceptionTranslation( eq(PROJECT), eq(DATASET), eq(TABLE), capturedOptions.capture())) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); @@ -1049,7 +1074,7 @@ public void testGetTableWithSelectedFields() throws IOException { } @Test - public void testListTables() throws IOException { + void testListTables() throws IOException { bigquery = options.getService(); ImmutableList tableList = ImmutableList.of( @@ -1067,7 +1092,7 @@ public void testListTables() throws IOException { } @Test - public void testListTablesReturnedParameters() throws IOException { + void testListTablesReturnedParameters() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( @@ -1084,7 +1109,7 @@ public void testListTablesReturnedParameters() throws IOException { } @Test - public void testListTablesReturnedParametersNullType() throws IOException { + void testListTablesReturnedParametersNullType() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( @@ -1101,7 +1126,7 @@ public void testListTablesReturnedParametersNullType() throws IOException { } @Test - public void testListTablesWithRangePartitioning() throws IOException { + void testListTablesWithRangePartitioning() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( @@ -1118,7 +1143,7 @@ public void testListTablesWithRangePartitioning() throws IOException { } @Test - public void testListTablesFromDatasetId() throws IOException { + void testListTablesFromDatasetId() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( @@ -1135,7 +1160,7 @@ public void testListTablesFromDatasetId() throws IOException { } @Test - public void testListTablesFromDatasetIdWithProject() throws IOException { + void testListTablesFromDatasetIdWithProject() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( @@ -1153,7 +1178,7 @@ public void testListTablesFromDatasetIdWithProject() throws IOException { } @Test - public void testListTablesWithLabels() throws IOException { + void testListTablesWithLabels() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( @@ -1170,7 +1195,7 @@ public void testListTablesWithLabels() throws IOException { } @Test - public void testListTablesWithOptions() throws IOException { + void testListTablesWithOptions() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( @@ -1188,7 +1213,7 @@ public void testListTablesWithOptions() throws IOException { } @Test - public void testListModels() throws IOException { + void testListModels() throws IOException { bigquery = options.getService(); ImmutableList modelList = ImmutableList.of( @@ -1205,7 +1230,7 @@ public void testListModels() throws IOException { } @Test - public void testListModelsWithModelId() throws IOException { + void testListModelsWithModelId() throws IOException { bigquery = options.getService(); ImmutableList modelList = ImmutableList.of( @@ -1222,7 +1247,7 @@ public void testListModelsWithModelId() throws IOException { } @Test - public void testDeleteTable() throws IOException { + void testDeleteTable() throws IOException { when(bigqueryRpcMock.deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE)) .thenReturn(true); bigquery = options.getService(); @@ -1231,7 +1256,7 @@ public void testDeleteTable() throws IOException { } @Test - public void testDeleteTableFromTableId() throws IOException { + void testDeleteTableFromTableId() throws IOException { when(bigqueryRpcMock.deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE)) .thenReturn(true); bigquery = options.getService(); @@ -1240,7 +1265,7 @@ public void testDeleteTableFromTableId() throws IOException { } @Test - public void testDeleteTableFromTableIdWithProject() throws IOException { + void testDeleteTableFromTableIdWithProject() throws IOException { TableId tableId = TABLE_ID.setProjectId(OTHER_PROJECT); when(bigqueryRpcMock.deleteTableSkipExceptionTranslation(OTHER_PROJECT, DATASET, TABLE)) .thenReturn(true); @@ -1252,7 +1277,7 @@ public void testDeleteTableFromTableIdWithProject() throws IOException { } @Test - public void testDeleteTableFromTableIdWithoutProject() throws IOException { + void testDeleteTableFromTableIdWithoutProject() throws IOException { TableId tableId = TableId.of("", TABLE_ID.getDataset(), TABLE_ID.getTable()); when(bigqueryRpcMock.deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE)) .thenReturn(true); @@ -1263,7 +1288,7 @@ public void testDeleteTableFromTableIdWithoutProject() throws IOException { } @Test - public void testDeleteModel() throws IOException { + void testDeleteModel() throws IOException { when(bigqueryRpcMock.deleteModelSkipExceptionTranslation(PROJECT, DATASET, MODEL)) .thenReturn(true); bigquery = options.getService(); @@ -1272,7 +1297,7 @@ public void testDeleteModel() throws IOException { } @Test - public void testUpdateModel() throws IOException { + void testUpdateModel() throws IOException { ModelInfo updateModelInfo = MODEL_INFO_WITH_PROJECT.setProjectId(OTHER_PROJECT).toBuilder() .setDescription("newDescription") @@ -1289,7 +1314,7 @@ public void testUpdateModel() throws IOException { } @Test - public void testUpdateTable() throws IOException { + void testUpdateTable() throws IOException { TableInfo updatedTableInfo = TABLE_INFO.setProjectId(OTHER_PROJECT).toBuilder().setDescription("newDescription").build(); when(bigqueryRpcMock.patchSkipExceptionTranslation(updatedTableInfo.toPb(), EMPTY_RPC_OPTIONS)) @@ -1304,7 +1329,7 @@ public void testUpdateTable() throws IOException { } @Test - public void testUpdateExternalTableWithNewSchema() throws IOException { + void testUpdateExternalTableWithNewSchema() throws IOException { TableInfo updatedTableInfo = TableInfo.of(TABLE_ID, ExternalTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build()) .setProjectId(OTHER_PROJECT); @@ -1323,7 +1348,7 @@ public void testUpdateExternalTableWithNewSchema() throws IOException { } @Test - public void testUpdateTableWithoutProject() throws IOException { + void testUpdateTableWithoutProject() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(PROJECT); TableId tableId = TableId.of("", TABLE_ID.getDataset(), TABLE_ID.getTable()); tableInfo.toBuilder().setTableId(tableId); @@ -1337,7 +1362,7 @@ public void testUpdateTableWithoutProject() throws IOException { } @Test - public void testUpdateTableWithSelectedFields() throws IOException { + void testUpdateTableWithSelectedFields() throws IOException { TableInfo updatedTableInfo = TABLE_INFO.toBuilder().setDescription("newDescription").build(); TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder().setDescription("newDescription").build(); @@ -1359,7 +1384,7 @@ public void testUpdateTableWithSelectedFields() throws IOException { } @Test - public void testUpdateTableWithAutoDetectSchema() throws IOException { + void testUpdateTableWithAutoDetectSchema() throws IOException { TableInfo updatedTableInfo = TABLE_INFO.toBuilder().setDescription("newDescription").build(); TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder().setDescription("newDescription").build(); @@ -1379,7 +1404,7 @@ public void testUpdateTableWithAutoDetectSchema() throws IOException { } @Test - public void testInsertAllWithRowIdShouldRetry() throws IOException { + void testInsertAllWithRowIdShouldRetry() throws IOException { Map row1 = ImmutableMap.of("field", "value1"); Map row2 = ImmutableMap.of("field", "value2"); List rows = @@ -1432,7 +1457,7 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { } @Test - public void testInsertAllWithoutRowIdShouldNotRetry() { + void testInsertAllWithoutRowIdShouldNotRetry() { Map row1 = ImmutableMap.of("field", "value1"); Map row2 = ImmutableMap.of("field", "value2"); List rows = ImmutableList.of(RowToInsert.of(row1), RowToInsert.of(row2)); @@ -1468,15 +1493,15 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { .getService(); try { bigquery.insertAll(request); - Assert.fail(); + Assertions.fail(); } catch (BigQueryException ex) { - Assert.assertNotNull(ex.getMessage()); + Assertions.assertNotNull(ex.getMessage()); } verify(bigqueryRpcMock).insertAll(PROJECT, DATASET, TABLE, requestPb); } @Test - public void testInsertAllWithProject() throws IOException { + void testInsertAllWithProject() throws IOException { Map row1 = ImmutableMap.of("field", "value1"); Map row2 = ImmutableMap.of("field", "value2"); List rows = @@ -1528,7 +1553,7 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { } @Test - public void testInsertAllWithProjectInTable() throws IOException { + void testInsertAllWithProjectInTable() throws IOException { Map row1 = ImmutableMap.of("field", "value1"); Map row2 = ImmutableMap.of("field", "value2"); List rows = @@ -1581,7 +1606,7 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { } @Test - public void testListTableData() throws IOException { + void testListTableData() throws IOException { when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_DATA_PB); @@ -1594,7 +1619,7 @@ public void testListTableData() throws IOException { } @Test - public void testListTableDataFromTableId() throws IOException { + void testListTableDataFromTableId() throws IOException { when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_DATA_PB); @@ -1607,7 +1632,7 @@ public void testListTableDataFromTableId() throws IOException { } @Test - public void testListTableDataFromTableIdWithProject() throws IOException { + void testListTableDataFromTableIdWithProject() throws IOException { TableId tableId = TABLE_ID.setProjectId(OTHER_PROJECT); when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) @@ -1622,7 +1647,7 @@ public void testListTableDataFromTableIdWithProject() throws IOException { } @Test - public void testListTableDataWithOptions() throws IOException { + void testListTableDataWithOptions() throws IOException { when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS)) .thenReturn(TABLE_DATA_PB); @@ -1641,7 +1666,7 @@ public void testListTableDataWithOptions() throws IOException { } @Test - public void testListTableDataWithNextPage() throws IOException { + void testListTableDataWithNextPage() throws IOException { doReturn(TABLE_DATA_PB) .when(bigqueryRpcMock) .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); @@ -1687,7 +1712,7 @@ private static com.google.api.services.bigquery.model.Job newJobPb() { } @Test - public void testCreateJobSuccess() throws IOException { + void testCreateJobSuccess() throws IOException { String id = "testCreateJobSuccess-id"; JobId jobId = JobId.of(id); String query = "SELECT * in FOO"; @@ -1704,7 +1729,7 @@ public void testCreateJobSuccess() throws IOException { } @Test - public void testCreateJobFailureShouldRetryExceptionHandlerExceptions() throws IOException { + void testCreateJobFailureShouldRetryExceptionHandlerExceptions() throws IOException { when(bigqueryRpcMock.createSkipExceptionTranslation( jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) .thenThrow(new UnknownHostException()) @@ -1724,7 +1749,7 @@ public void testCreateJobFailureShouldRetryExceptionHandlerExceptions() throws I } @Test - public void testCreateJobFailureShouldRetry() throws IOException { + void testCreateJobFailureShouldRetry() throws IOException { when(bigqueryRpcMock.createSkipExceptionTranslation( jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) .thenThrow(new BigQueryException(500, "InternalError")) @@ -1749,7 +1774,7 @@ public void testCreateJobFailureShouldRetry() throws IOException { } @Test - public void testCreateJobWithBigQueryRetryConfigFailureShouldRetry() throws IOException { + void testCreateJobWithBigQueryRetryConfigFailureShouldRetry() throws IOException { // Validate create job with BigQueryRetryConfig that retries on rate limit error message. JobOption bigQueryRetryConfigOption = JobOption.bigQueryRetryConfig( @@ -1782,7 +1807,7 @@ public void testCreateJobWithBigQueryRetryConfigFailureShouldRetry() throws IOEx } @Test - public void testCreateJobWithBigQueryRetryConfigFailureShouldNotRetry() throws IOException { + void testCreateJobWithBigQueryRetryConfigFailureShouldNotRetry() throws IOException { // Validate create job with BigQueryRetryConfig that does not retry on rate limit error message. JobOption bigQueryRetryConfigOption = JobOption.bigQueryRetryConfig(BigQueryRetryConfig.newBuilder().build()); @@ -1795,7 +1820,7 @@ public void testCreateJobWithBigQueryRetryConfigFailureShouldNotRetry() throws I // Job create will attempt to retrieve the job even in the case when the job is created in a // returned failure. when(bigqueryRpcMock.getJobSkipExceptionTranslation( - nullable(String.class), nullable(String.class), nullable(String.class), Mockito.any())) + nullable(String.class), nullable(String.class), nullable(String.class), any())) .thenThrow(new BigQueryException(500, "InternalError")); bigquery = options.getService(); @@ -1805,13 +1830,15 @@ public void testCreateJobWithBigQueryRetryConfigFailureShouldNotRetry() throws I .build() .getService(); - try { - ((BigQueryImpl) bigquery) - .create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY), bigQueryRetryConfigOption); - fail("JobException expected"); - } catch (BigQueryException e) { - assertNotNull(e.getMessage()); - } + BigQueryException e = + Assertions.assertThrows( + BigQueryException.class, + () -> + ((BigQueryImpl) bigquery) + .create( + JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY), + bigQueryRetryConfigOption)); + assertNotNull(e.getMessage()); // Verify that getQueryResults is attempted only once and not retried since the error message // does not match. verify(bigqueryRpcMock, times(1)) @@ -1819,7 +1846,7 @@ public void testCreateJobWithBigQueryRetryConfigFailureShouldNotRetry() throws I } @Test - public void testCreateJobWithRetryOptionsFailureShouldRetry() throws IOException { + void testCreateJobWithRetryOptionsFailureShouldRetry() throws IOException { // Validate create job with RetryOptions. JobOption retryOptions = JobOption.retryOptions(RetryOption.maxAttempts(4)); Map bigQueryRpcOptions = optionMap(retryOptions); @@ -1844,7 +1871,7 @@ public void testCreateJobWithRetryOptionsFailureShouldRetry() throws IOException } @Test - public void testCreateJobWithRetryOptionsFailureShouldNotRetry() throws IOException { + void testCreateJobWithRetryOptionsFailureShouldNotRetry() throws IOException { // Validate create job with RetryOptions that only attempts once (no retry). JobOption retryOptions = JobOption.retryOptions(RetryOption.maxAttempts(1)); Map bigQueryRpcOptions = optionMap(retryOptions); @@ -1856,7 +1883,7 @@ public void testCreateJobWithRetryOptionsFailureShouldNotRetry() throws IOExcept // Job create will attempt to retrieve the job even in the case when the job is created in a // returned failure. when(bigqueryRpcMock.getJobSkipExceptionTranslation( - nullable(String.class), nullable(String.class), nullable(String.class), Mockito.any())) + nullable(String.class), nullable(String.class), nullable(String.class), any())) .thenThrow(new BigQueryException(500, "InternalError")); bigquery = options.getService(); @@ -1866,19 +1893,19 @@ public void testCreateJobWithRetryOptionsFailureShouldNotRetry() throws IOExcept .build() .getService(); - try { - ((BigQueryImpl) bigquery) - .create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY), retryOptions); - fail("JobException expected"); - } catch (BigQueryException e) { - assertNotNull(e.getMessage()); - } + BigQueryException e = + Assertions.assertThrows( + BigQueryException.class, + () -> + ((BigQueryImpl) bigquery) + .create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY), retryOptions)); + assertNotNull(e.getMessage()); verify(bigqueryRpcMock, times(1)) .createSkipExceptionTranslation(jobCapture.capture(), eq(bigQueryRpcOptions)); } @Test - public void testCreateJobWithSelectedFields() throws IOException { + void testCreateJobWithSelectedFields() throws IOException { when(bigqueryRpcMock.createSkipExceptionTranslation( any(com.google.api.services.bigquery.model.Job.class), capturedOptions.capture())) .thenReturn(newJobPb()); @@ -1899,7 +1926,7 @@ public void testCreateJobWithSelectedFields() throws IOException { } @Test - public void testCreateJobNoGet() throws IOException { + void testCreateJobNoGet() throws IOException { String id = "testCreateJobNoGet-id"; JobId jobId = JobId.of(id); String query = "SELECT * in FOO"; @@ -1909,18 +1936,17 @@ public void testCreateJobNoGet() throws IOException { .thenThrow(new BigQueryException(409, "already exists, for some reason")); bigquery = options.getService(); - try { - bigquery.create(JobInfo.of(jobId, QueryJobConfiguration.of(query))); - fail("should throw"); - } catch (BigQueryException e) { - assertThat(jobCapture.getValue().getJobReference().getJobId()).isEqualTo(id); - } + BigQueryException e = + Assertions.assertThrows( + BigQueryException.class, + () -> bigquery.create(JobInfo.of(jobId, QueryJobConfiguration.of(query)))); + assertThat(jobCapture.getValue().getJobReference().getJobId()).isEqualTo(id); verify(bigqueryRpcMock) .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); } @Test - public void testCreateJobTryGet() throws IOException { + void testCreateJobTryGet() throws IOException { final String id = "testCreateJobTryGet-id"; String query = "SELECT * in FOO"; Supplier idProvider = @@ -1949,7 +1975,7 @@ public JobId get() { } @Test - public void testCreateJobTryGetNotRandom() throws IOException { + void testCreateJobTryGetNotRandom() throws IOException { Map withStatisticOption = optionMap(JobOption.fields(STATISTICS)); final String id = "testCreateJobTryGet-id"; String query = "SELECT * in FOO"; @@ -1981,7 +2007,7 @@ public void testCreateJobTryGetNotRandom() throws IOException { } @Test - public void testCreateJobWithProjectId() throws IOException { + void testCreateJobWithProjectId() throws IOException { JobInfo jobInfo = JobInfo.newBuilder(QUERY_JOB_CONFIGURATION.setProjectId(OTHER_PROJECT)) .setJobId(JobId.of(OTHER_PROJECT, JOB)) @@ -2004,7 +2030,7 @@ public void testCreateJobWithProjectId() throws IOException { } @Test - public void testDeleteJob() throws IOException { + void testDeleteJob() throws IOException { JobId jobId = JobId.newBuilder().setJob(JOB).setProject(PROJECT).setLocation(LOCATION).build(); when(bigqueryRpcMock.deleteJobSkipExceptionTranslation(PROJECT, JOB, LOCATION)) .thenReturn(true); @@ -2014,7 +2040,7 @@ public void testDeleteJob() throws IOException { } @Test - public void testGetJob() throws IOException { + void testGetJob() throws IOException { when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); bigquery = options.getService(); @@ -2024,7 +2050,7 @@ public void testGetJob() throws IOException { } @Test - public void testGetJobWithLocation() throws IOException { + void testGetJobWithLocation() throws IOException { when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); BigQueryOptions options = createBigQueryOptionsForProjectWithLocation(PROJECT, rpcFactoryMock); @@ -2036,7 +2062,7 @@ public void testGetJobWithLocation() throws IOException { } @Test - public void testGetJobNotFoundWhenThrowIsDisabled() throws IOException { + void testGetJobNotFoundWhenThrowIsDisabled() throws IOException { when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); options.setThrowNotFound(false); @@ -2047,24 +2073,21 @@ public void testGetJobNotFoundWhenThrowIsDisabled() throws IOException { } @Test - public void testGetJobNotFoundWhenThrowIsEnabled() throws IOException { + void testGetJobNotFoundWhenThrowIsEnabled() throws IOException { when(bigqueryRpcMock.getJobSkipExceptionTranslation( PROJECT, "job-not-found", null, EMPTY_RPC_OPTIONS)) .thenThrow(new IOException("Job not found")); options.setThrowNotFound(true); bigquery = options.getService(); - try { - bigquery.getJob("job-not-found"); - Assert.fail(); - } catch (BigQueryException ex) { - Assert.assertNotNull(ex.getMessage()); - } + BigQueryException ex = + Assertions.assertThrows(BigQueryException.class, () -> bigquery.getJob("job-not-found")); + Assertions.assertNotNull(ex.getMessage()); verify(bigqueryRpcMock) .getJobSkipExceptionTranslation(PROJECT, "job-not-found", null, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobFromJobId() throws IOException { + void testGetJobFromJobId() throws IOException { when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); bigquery = options.getService(); @@ -2074,7 +2097,7 @@ public void testGetJobFromJobId() throws IOException { } @Test - public void testGetJobFromJobIdWithLocation() throws IOException { + void testGetJobFromJobIdWithLocation() throws IOException { when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); BigQueryOptions options = createBigQueryOptionsForProjectWithLocation(PROJECT, rpcFactoryMock); @@ -2086,7 +2109,7 @@ public void testGetJobFromJobIdWithLocation() throws IOException { } @Test - public void testGetJobFromJobIdWithProject() throws IOException { + void testGetJobFromJobIdWithProject() throws IOException { JobId jobId = JobId.of(OTHER_PROJECT, JOB); JobInfo jobInfo = COPY_JOB.setProjectId(OTHER_PROJECT); when(bigqueryRpcMock.getJobSkipExceptionTranslation( @@ -2100,7 +2123,7 @@ public void testGetJobFromJobIdWithProject() throws IOException { } @Test - public void testGetJobFromJobIdWithProjectWithLocation() throws IOException { + void testGetJobFromJobIdWithProjectWithLocation() throws IOException { JobId jobId = JobId.of(OTHER_PROJECT, JOB); JobInfo jobInfo = COPY_JOB.setProjectId(OTHER_PROJECT); when(bigqueryRpcMock.getJobSkipExceptionTranslation( @@ -2115,7 +2138,7 @@ public void testGetJobFromJobIdWithProjectWithLocation() throws IOException { } @Test - public void testListJobs() throws IOException { + void testListJobs() throws IOException { bigquery = options.getService(); ImmutableList jobList = ImmutableList.of( @@ -2141,7 +2164,7 @@ public com.google.api.services.bigquery.model.Job apply(Job job) { } @Test - public void testListJobsWithOptions() throws IOException { + void testListJobsWithOptions() throws IOException { bigquery = options.getService(); ImmutableList jobList = ImmutableList.of( @@ -2169,7 +2192,7 @@ public com.google.api.services.bigquery.model.Job apply(Job job) { } @Test - public void testListJobsWithSelectedFields() throws IOException { + void testListJobsWithSelectedFields() throws IOException { bigquery = options.getService(); ImmutableList jobList = ImmutableList.of( @@ -2205,7 +2228,7 @@ public com.google.api.services.bigquery.model.Job apply(Job job) { } @Test - public void testCancelJob() throws IOException { + void testCancelJob() throws IOException { when(bigqueryRpcMock.cancelSkipExceptionTranslation(PROJECT, JOB, null)).thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.cancel(JOB)); @@ -2213,7 +2236,7 @@ public void testCancelJob() throws IOException { } @Test - public void testCancelJobFromJobId() throws IOException { + void testCancelJobFromJobId() throws IOException { when(bigqueryRpcMock.cancelSkipExceptionTranslation(PROJECT, JOB, null)).thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.cancel(JobId.of(PROJECT, JOB))); @@ -2221,7 +2244,7 @@ public void testCancelJobFromJobId() throws IOException { } @Test - public void testCancelJobFromJobIdWithProject() throws IOException { + void testCancelJobFromJobIdWithProject() throws IOException { JobId jobId = JobId.of(OTHER_PROJECT, JOB); when(bigqueryRpcMock.cancelSkipExceptionTranslation(OTHER_PROJECT, JOB, null)).thenReturn(true); bigquery = options.getService(); @@ -2230,7 +2253,7 @@ public void testCancelJobFromJobIdWithProject() throws IOException { } @Test - public void testQueryRequestCompleted() throws InterruptedException, IOException { + void testQueryRequestCompleted() throws InterruptedException, IOException { JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job jobResponsePb = new com.google.api.services.bigquery.model.Job() @@ -2285,7 +2308,7 @@ PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) } @Test - public void testFastQueryRequestCompleted() throws InterruptedException, IOException { + void testFastQueryRequestCompleted() throws InterruptedException, IOException { com.google.api.services.bigquery.model.QueryResponse queryResponsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -2325,7 +2348,7 @@ public void testFastQueryRequestCompleted() throws InterruptedException, IOExcep } @Test - public void testFastQueryRequestCompletedWithLocation() throws InterruptedException, IOException { + void testFastQueryRequestCompletedWithLocation() throws InterruptedException, IOException { com.google.api.services.bigquery.model.QueryResponse queryResponsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -2366,7 +2389,7 @@ public void testFastQueryRequestCompletedWithLocation() throws InterruptedExcept } @Test - public void testFastQueryMultiplePages() throws InterruptedException, IOException { + void testFastQueryMultiplePages() throws InterruptedException, IOException { JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job responseJob = new com.google.api.services.bigquery.model.Job() @@ -2422,7 +2445,7 @@ PROJECT, DATASET, TABLE, optionMap(BigQuery.TableDataListOption.pageToken(CURSOR } @Test - public void testFastQuerySlowDdl() throws InterruptedException, IOException { + void testFastQuerySlowDdl() throws InterruptedException, IOException { // mock new fast query path response when running a query that takes more than 10s JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.QueryResponse queryResponsePb = @@ -2488,7 +2511,7 @@ PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) } @Test - public void testQueryRequestCompletedOptions() throws InterruptedException, IOException { + void testQueryRequestCompletedOptions() throws InterruptedException, IOException { JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job jobResponsePb = new com.google.api.services.bigquery.model.Job() @@ -2546,7 +2569,7 @@ PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) } @Test - public void testQueryRequestCompletedOnSecondAttempt() throws InterruptedException, IOException { + void testQueryRequestCompletedOnSecondAttempt() throws InterruptedException, IOException { JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job jobResponsePb1 = new com.google.api.services.bigquery.model.Job() @@ -2611,7 +2634,7 @@ PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) } @Test - public void testQueryWithTimeoutSetsTimeout() throws InterruptedException, IOException { + void testQueryWithTimeoutSetsTimeout() throws InterruptedException, IOException { com.google.api.services.bigquery.model.QueryResponse queryResponsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -2634,7 +2657,7 @@ public void testQueryWithTimeoutSetsTimeout() throws InterruptedException, IOExc } @Test - public void testGetQueryResults() throws IOException { + void testGetQueryResults() throws IOException { JobId queryJob = JobId.of(JOB); GetQueryResultsResponse responsePb = new GetQueryResultsResponse() @@ -2658,7 +2681,7 @@ public void testGetQueryResults() throws IOException { } @Test - public void testGetQueryResultsRetry() throws IOException { + void testGetQueryResultsRetry() throws IOException { JobId queryJob = JobId.of(JOB); GetQueryResultsResponse responsePb = new GetQueryResultsResponse() @@ -2702,7 +2725,7 @@ public void testGetQueryResultsRetry() throws IOException { } @Test - public void testGetQueryResultsWithProject() throws IOException { + void testGetQueryResultsWithProject() throws IOException { JobId queryJob = JobId.of(OTHER_PROJECT, JOB); GetQueryResultsResponse responsePb = new GetQueryResultsResponse() @@ -2726,7 +2749,7 @@ public void testGetQueryResultsWithProject() throws IOException { } @Test - public void testGetQueryResultsWithOptions() throws IOException { + void testGetQueryResultsWithOptions() throws IOException { JobId queryJob = JobId.of(PROJECT, JOB); GetQueryResultsResponse responsePb = new GetQueryResultsResponse() @@ -2755,7 +2778,7 @@ public void testGetQueryResultsWithOptions() throws IOException { } @Test - public void testGetDatasetRetryableException() throws IOException { + void testGetDatasetRetryableException() throws IOException { when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(500, "InternalError")) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); @@ -2772,7 +2795,7 @@ public void testGetDatasetRetryableException() throws IOException { } @Test - public void testNonRetryableException() throws IOException { + void testNonRetryableException() throws IOException { String exceptionMessage = "Not Implemented"; when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(501, exceptionMessage)); @@ -2781,17 +2804,15 @@ public void testNonRetryableException() throws IOException { .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); - try { - bigquery.getDataset(DatasetId.of(DATASET)); - Assert.fail(); - } catch (BigQueryException ex) { - Assert.assertEquals(exceptionMessage, ex.getMessage()); - } + BigQueryException ex = + Assertions.assertThrows( + BigQueryException.class, () -> bigquery.getDataset(DatasetId.of(DATASET))); + assertEquals(exceptionMessage, ex.getMessage()); verify(bigqueryRpcMock).getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testRuntimeException() throws IOException { + void testRuntimeException() throws IOException { String exceptionMessage = "Artificial runtime exception"; when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenThrow(new RuntimeException(exceptionMessage)); @@ -2800,32 +2821,29 @@ public void testRuntimeException() throws IOException { .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); - try { - bigquery.getDataset(DATASET); - Assert.fail(); - } catch (BigQueryException ex) { - Assert.assertTrue(ex.getMessage().endsWith(exceptionMessage)); - } + BigQueryException ex = + Assertions.assertThrows(BigQueryException.class, () -> bigquery.getDataset(DATASET)); + assertTrue(ex.getMessage().endsWith(exceptionMessage)); verify(bigqueryRpcMock).getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testQueryDryRun() throws Exception { + void testQueryDryRun() throws Exception { // https://github.com/googleapis/google-cloud-java/issues/2479 - try { - options.toBuilder() - .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) - .build() - .getService() - .query(QueryJobConfiguration.newBuilder("foo").setDryRun(true).build()); - Assert.fail(); - } catch (UnsupportedOperationException ex) { - Assert.assertNotNull(ex.getMessage()); - } + UnsupportedOperationException ex = + Assertions.assertThrows( + UnsupportedOperationException.class, + () -> + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService() + .query(QueryJobConfiguration.newBuilder("foo").setDryRun(true).build())); + Assertions.assertNotNull(ex.getMessage()); } @Test - public void testFastQuerySQLShouldRetry() throws Exception { + void testFastQuerySQLShouldRetry() throws Exception { com.google.api.services.bigquery.model.QueryResponse responsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -2866,7 +2884,7 @@ public void testFastQuerySQLShouldRetry() throws Exception { } @Test - public void testFastQueryDMLShouldRetry() throws Exception { + void testFastQueryDMLShouldRetry() throws Exception { com.google.api.services.bigquery.model.QueryResponse responsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -2907,7 +2925,7 @@ public void testFastQueryDMLShouldRetry() throws Exception { } @Test - public void testFastQueryRateLimitIdempotency() throws Exception { + void testFastQueryRateLimitIdempotency() throws Exception { com.google.api.services.bigquery.model.QueryResponse responsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -2955,7 +2973,7 @@ public void testFastQueryRateLimitIdempotency() throws Exception { } @Test - public void testRateLimitRegEx() throws Exception { + void testRateLimitRegEx() throws Exception { String msg2 = "Job eceeded rate limits: Your table exceeded quota for table update operations. For more information, see https://cloud.google.com/bigquery/docs/troubleshoot-quotas"; String msg3 = "exceeded rate exceeded quota for table update"; @@ -2979,7 +2997,7 @@ public void testRateLimitRegEx() throws Exception { } @Test - public void testFastQueryDDLShouldRetry() throws Exception { + void testFastQueryDDLShouldRetry() throws Exception { com.google.api.services.bigquery.model.QueryResponse responsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -3019,7 +3037,7 @@ public void testFastQueryDDLShouldRetry() throws Exception { } @Test - public void testFastQueryBigQueryException() throws InterruptedException, IOException { + void testFastQueryBigQueryException() throws InterruptedException, IOException { List errorProtoList = ImmutableList.of( new ErrorProto() @@ -3040,12 +3058,10 @@ public void testFastQueryBigQueryException() throws InterruptedException, IOExce .thenReturn(responsePb); bigquery = options.getService(); - try { - bigquery.query(QUERY_JOB_CONFIGURATION_FOR_QUERY); - fail("BigQueryException expected"); - } catch (BigQueryException ex) { - assertEquals(Lists.transform(errorProtoList, BigQueryError.FROM_PB_FUNCTION), ex.getErrors()); - } + BigQueryException ex = + Assertions.assertThrows( + BigQueryException.class, () -> bigquery.query(QUERY_JOB_CONFIGURATION_FOR_QUERY)); + assertEquals(Lists.transform(errorProtoList, BigQueryError.FROM_PB_FUNCTION), ex.getErrors()); QueryRequest requestPb = requestPbCapture.getValue(); assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.getQuery(), requestPb.getQuery()); @@ -3058,7 +3074,7 @@ public void testFastQueryBigQueryException() throws InterruptedException, IOExce } @Test - public void testCreateRoutine() throws IOException { + void testCreateRoutine() throws IOException { RoutineInfo routineInfo = ROUTINE_INFO.setProjectId(OTHER_PROJECT); when(bigqueryRpcMock.createSkipExceptionTranslation(routineInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(routineInfo.toPb()); @@ -3071,7 +3087,7 @@ public void testCreateRoutine() throws IOException { } @Test - public void testGetRoutine() throws IOException { + void testGetRoutine() throws IOException { when(bigqueryRpcMock.getRoutineSkipExceptionTranslation( PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) .thenReturn(ROUTINE_INFO.toPb()); @@ -3083,7 +3099,7 @@ public void testGetRoutine() throws IOException { } @Test - public void testGetRoutineWithRountineId() throws IOException { + void testGetRoutineWithRountineId() throws IOException { when(bigqueryRpcMock.getRoutineSkipExceptionTranslation( PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) .thenReturn(ROUTINE_INFO.toPb()); @@ -3095,24 +3111,21 @@ public void testGetRoutineWithRountineId() throws IOException { } @Test - public void testGetRoutineWithEnabledThrowNotFoundException() throws IOException { + void testGetRoutineWithEnabledThrowNotFoundException() throws IOException { when(bigqueryRpcMock.getRoutineSkipExceptionTranslation( PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(404, "Routine not found")); options.setThrowNotFound(true); bigquery = options.getService(); - try { - bigquery.getRoutine(ROUTINE_ID); - fail(); - } catch (BigQueryException ex) { - assertEquals("Routine not found", ex.getMessage()); - } + BigQueryException ex = + Assertions.assertThrows(BigQueryException.class, () -> bigquery.getRoutine(ROUTINE_ID)); + assertEquals("Routine not found", ex.getMessage()); verify(bigqueryRpcMock) .getRoutineSkipExceptionTranslation(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); } @Test - public void testUpdateRoutine() throws IOException { + void testUpdateRoutine() throws IOException { RoutineInfo updatedRoutineInfo = ROUTINE_INFO.setProjectId(OTHER_PROJECT).toBuilder() .setDescription("newDescription") @@ -3130,7 +3143,7 @@ public void testUpdateRoutine() throws IOException { } @Test - public void testListRoutines() throws IOException { + void testListRoutines() throws IOException { bigquery = options.getService(); ImmutableList routineList = ImmutableList.of(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO))); @@ -3146,7 +3159,7 @@ public void testListRoutines() throws IOException { } @Test - public void testListRoutinesWithDatasetId() throws IOException { + void testListRoutinesWithDatasetId() throws IOException { bigquery = options.getService(); ImmutableList routineList = ImmutableList.of(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO))); @@ -3162,7 +3175,7 @@ public void testListRoutinesWithDatasetId() throws IOException { } @Test - public void testDeleteRoutine() throws IOException { + void testDeleteRoutine() throws IOException { when(bigqueryRpcMock.deleteRoutineSkipExceptionTranslation(PROJECT, DATASET, ROUTINE)) .thenReturn(true); bigquery = options.getService(); @@ -3171,7 +3184,7 @@ public void testDeleteRoutine() throws IOException { } @Test - public void testWriteWithJob() throws IOException { + void testWriteWithJob() throws IOException { bigquery = options.getService(); Job job = new Job(bigquery, new JobInfo.BuilderImpl(JOB_INFO)); when(bigqueryRpcMock.openSkipExceptionTranslation( @@ -3197,7 +3210,7 @@ public void testWriteWithJob() throws IOException { } @Test - public void testWriteChannel() throws IOException { + void testWriteChannel() throws IOException { bigquery = options.getService(); Job job = new Job(bigquery, new JobInfo.BuilderImpl(JOB_INFO)); when(bigqueryRpcMock.openSkipExceptionTranslation( @@ -3223,7 +3236,7 @@ public void testWriteChannel() throws IOException { } @Test - public void testGetIamPolicy() throws IOException { + void testGetIamPolicy() throws IOException { final String resourceId = String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); final com.google.api.services.bigquery.model.Policy apiPolicy = @@ -3237,7 +3250,7 @@ public void testGetIamPolicy() throws IOException { } @Test - public void testSetIamPolicy() throws IOException { + void testSetIamPolicy() throws IOException { final String resourceId = String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); final com.google.api.services.bigquery.model.Policy apiPolicy = @@ -3253,7 +3266,7 @@ public void testSetIamPolicy() throws IOException { } @Test - public void testTestIamPermissions() throws IOException { + void testTestIamPermissions() throws IOException { final String resourceId = String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); final List checkedPermissions = ImmutableList.of("foo", "bar", "baz"); @@ -3273,7 +3286,7 @@ public void testTestIamPermissions() throws IOException { } @Test - public void testTestIamPermissionsWhenNoPermissionsGranted() throws IOException { + void testTestIamPermissionsWhenNoPermissionsGranted() throws IOException { final String resourceId = String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); final List checkedPermissions = ImmutableList.of("foo", "bar", "baz"); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java index e77d7936a4..050deba4af 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java @@ -16,33 +16,34 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.cloud.TransportOptions; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class BigQueryOptionsTest { @Test - public void testInvalidTransport() { - try { - BigQueryOptions.newBuilder().setTransportOptions(Mockito.mock(TransportOptions.class)); - Assert.fail(); - } catch (IllegalArgumentException expected) { - Assert.assertNotNull(expected.getMessage()); - } + void testInvalidTransport() { + IllegalArgumentException expected = + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryOptions.newBuilder() + .setTransportOptions(Mockito.mock(TransportOptions.class))); + assertNotNull(expected.getMessage()); } @Test - public void dataFormatOptions_createdByDefault() { + void dataFormatOptions_createdByDefault() { BigQueryOptions options = BigQueryOptions.newBuilder().setProjectId("project-id").build(); assertNotNull(options.getDataFormatOptions()); @@ -53,7 +54,7 @@ public void dataFormatOptions_createdByDefault() { } @Test - public void nonBuilderSetUseInt64Timestamp_capturedInDataFormatOptions() { + void nonBuilderSetUseInt64Timestamp_capturedInDataFormatOptions() { BigQueryOptions options = BigQueryOptions.newBuilder() .setDataFormatOptions(DataFormatOptions.newBuilder().useInt64Timestamp(false).build()) @@ -65,7 +66,7 @@ public void nonBuilderSetUseInt64Timestamp_capturedInDataFormatOptions() { } @Test - public void nonBuilderSetUseInt64Timestamp_overridesEverything() { + void nonBuilderSetUseInt64Timestamp_overridesEverything() { BigQueryOptions options = BigQueryOptions.newBuilder().setProjectId("project-id").build(); options.setUseInt64Timestamps(true); @@ -73,7 +74,7 @@ public void nonBuilderSetUseInt64Timestamp_overridesEverything() { } @Test - public void noDataFormatOptions_capturesUseInt64TimestampSetInBuilder() { + void noDataFormatOptions_capturesUseInt64TimestampSetInBuilder() { BigQueryOptions options = BigQueryOptions.newBuilder().setUseInt64Timestamps(true).setProjectId("project-id").build(); @@ -81,7 +82,7 @@ public void noDataFormatOptions_capturesUseInt64TimestampSetInBuilder() { } @Test - public void dataFormatOptionsSetterHasPrecedence() { + void dataFormatOptionsSetterHasPrecedence() { BigQueryOptions options = BigQueryOptions.newBuilder() .setProjectId("project-id") diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryResultImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryResultImplTest.java index ca150eb1ba..54d0b8e4e4 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryResultImplTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryResultImplTest.java @@ -37,9 +37,9 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingDeque; import org.apache.arrow.vector.util.Text; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class BigQueryResultImplTest { +class BigQueryResultImplTest { private static final Schema SCHEMA = Schema.of( @@ -97,7 +97,7 @@ public class BigQueryResultImplTest { private static final int BUFFER_SIZE = 10; @Test - public void testResultSetFieldValueList() throws InterruptedException, SQLException { + void testResultSetFieldValueList() throws InterruptedException, SQLException { BlockingQueue> buffer = new LinkedBlockingDeque<>(BUFFER_SIZE); FieldValueList fieldValues = FieldValueList.of( @@ -199,7 +199,7 @@ public void testResultSetFieldValueList() throws InterruptedException, SQLExcept } @Test - public void testResultSetReadApi() throws InterruptedException, SQLException { + void testResultSetReadApi() throws InterruptedException, SQLException { BlockingQueue buffer = new LinkedBlockingDeque<>(BUFFER_SIZE); Map rowValues = new HashMap<>(); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java index 88fa1595e3..a11d9b923b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java @@ -18,8 +18,8 @@ import static com.google.common.truth.Truth.assertThat; import com.google.common.collect.ImmutableList; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; public class BigtableOptionsTest { @@ -55,7 +55,7 @@ public class BigtableOptionsTest { .build(); @Test - public void testConstructors() { + void testConstructors() { // column assertThat(COL1.getQualifierEncoded()).isEqualTo("aaa"); assertThat(COL1.getFieldName()).isEqualTo("field1"); @@ -80,41 +80,36 @@ public void testConstructors() { } @Test - public void testNullPointerException() { - try { - BigtableColumnFamily.newBuilder().setFamilyID(null).build(); - Assert.fail(); - } catch (NullPointerException ex) { - assertThat(ex.getMessage()).isNotNull(); - } - try { - BigtableColumnFamily.newBuilder().setColumns(null).build(); - Assert.fail(); - } catch (NullPointerException ex) { - assertThat(ex.getMessage()).isNotNull(); - } - try { - BigtableColumnFamily.newBuilder().setEncoding(null).build(); - Assert.fail(); - } catch (NullPointerException ex) { - assertThat(ex.getMessage()).isNotNull(); - } - try { - BigtableColumnFamily.newBuilder().setOnlyReadLatest(null).build(); - Assert.fail(); - } catch (NullPointerException ex) { - assertThat(ex.getMessage()).isNotNull(); - } - try { - BigtableColumnFamily.newBuilder().setType(null).build(); - Assert.fail(); - } catch (NullPointerException ex) { - assertThat(ex.getMessage()).isNotNull(); - } + void testNullPointerException() { + NullPointerException ex = + Assertions.assertThrows( + NullPointerException.class, + () -> BigtableColumnFamily.newBuilder().setFamilyID(null).build()); + assertThat(ex.getMessage()).isNotNull(); + ex = + Assertions.assertThrows( + NullPointerException.class, + () -> BigtableColumnFamily.newBuilder().setColumns(null).build()); + assertThat(ex.getMessage()).isNotNull(); + ex = + Assertions.assertThrows( + NullPointerException.class, + () -> BigtableColumnFamily.newBuilder().setEncoding(null).build()); + assertThat(ex.getMessage()).isNotNull(); + ex = + Assertions.assertThrows( + NullPointerException.class, + () -> BigtableColumnFamily.newBuilder().setOnlyReadLatest(null).build()); + assertThat(ex.getMessage()).isNotNull(); + ex = + Assertions.assertThrows( + NullPointerException.class, + () -> BigtableColumnFamily.newBuilder().setType(null).build()); + assertThat(ex.getMessage()).isNotNull(); } @Test - public void testIllegalStateException() { + void testIllegalStateException() { try { BigtableColumnFamily.newBuilder().build(); } catch (IllegalStateException ex) { @@ -123,14 +118,14 @@ public void testIllegalStateException() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareBigtableColumn(COL1, BigtableColumn.fromPb(COL1.toPb())); compareBigtableColumnFamily(TESTFAMILY, BigtableColumnFamily.fromPb(TESTFAMILY.toPb())); compareBigtableOptions(OPTIONS, BigtableOptions.fromPb(OPTIONS.toPb())); } @Test - public void testEquals() { + void testEquals() { compareBigtableColumn(COL1, COL1); compareBigtableColumnFamily(TESTFAMILY, TESTFAMILY); assertThat(TESTFAMILY.equals(TESTFAMILY)).isTrue(); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CloneDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CloneDefinitionTest.java index 33bcf5f405..1a319c947c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CloneDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CloneDefinitionTest.java @@ -16,10 +16,10 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class CloneDefinitionTest { private static final TableId BASE_TABLE_ID = TableId.of("DATASET_NAME", "BASE_TABLE_NAME"); @@ -28,7 +28,7 @@ public class CloneDefinitionTest { CloneDefinition.newBuilder().setBaseTableId(BASE_TABLE_ID).setCloneTime(CLONE_TIME).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareCloneTableDefinition(CLONETABLE_DEFINITION, CLONETABLE_DEFINITION.toBuilder().build()); CloneDefinition cloneTableDefinition = CLONETABLE_DEFINITION.toBuilder().setCloneTime("2021-05-20T11:32:26.553Z").build(); @@ -36,7 +36,7 @@ public void testToBuilder() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals(BASE_TABLE_ID, CLONETABLE_DEFINITION.getBaseTableId()); assertEquals(CLONE_TIME, CLONETABLE_DEFINITION.getCloneTime()); CloneDefinition cloneDefinition = @@ -45,7 +45,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { CloneDefinition cloneDefinition = CLONETABLE_DEFINITION.toBuilder().build(); assertTrue(CloneDefinition.fromPb(cloneDefinition.toPb()) instanceof CloneDefinition); compareCloneTableDefinition(cloneDefinition, CloneDefinition.fromPb(cloneDefinition.toPb())); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ColumnReferenceTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ColumnReferenceTest.java index 0cc680dddf..0c7c75306c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ColumnReferenceTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ColumnReferenceTest.java @@ -16,10 +16,10 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ColumnReferenceTest { private static final ColumnReference COLUMN_REFERENCE = @@ -29,7 +29,7 @@ public class ColumnReferenceTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareColumnReferenceDefinition(COLUMN_REFERENCE, COLUMN_REFERENCE.toBuilder().build()); ColumnReference columnReference = COLUMN_REFERENCE.toBuilder() @@ -41,7 +41,7 @@ public void testToBuilder() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals("column1", COLUMN_REFERENCE.getReferencingColumn()); assertEquals("column2", COLUMN_REFERENCE.getReferencedColumn()); ColumnReference columnReference = @@ -54,7 +54,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { ColumnReference columnReference = COLUMN_REFERENCE.toBuilder().build(); assertTrue(ColumnReference.fromPb(columnReference.toPb()) instanceof ColumnReference); compareColumnReferenceDefinition( diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionImplTest.java index 4edc6f05d1..54f9b7a33a 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionImplTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionImplTest.java @@ -16,14 +16,26 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.*; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; -import com.google.api.services.bigquery.model.*; +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.QueryParameter; +import com.google.api.services.bigquery.model.QueryParameterType; +import com.google.api.services.bigquery.model.QueryRequest; import com.google.api.services.bigquery.model.QueryResponse; +import com.google.api.services.bigquery.model.TableCell; +import com.google.api.services.bigquery.model.TableDataList; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; import com.google.cloud.ServiceOptions; import com.google.cloud.Tuple; import com.google.cloud.bigquery.spi.BigQueryRpcFactory; @@ -41,14 +53,14 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutionException; import java.util.concurrent.LinkedBlockingDeque; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) -public class ConnectionImplTest { +@ExtendWith(MockitoExtension.class) +class ConnectionImplTest { private BigQueryOptions options; private BigQueryRpcFactory rpcFactoryMock; private HttpBigQueryRpc bigqueryRpcMock; @@ -140,8 +152,8 @@ private BigQueryOptions createBigQueryOptionsForProject( .build(); } - @Before - public void setUp() { + @BeforeEach + void setUp() { rpcFactoryMock = mock(BigQueryRpcFactory.class); bigqueryRpcMock = mock(HttpBigQueryRpc.class); connectionMock = mock(Connection.class); @@ -164,7 +176,7 @@ public void setUp() { } @Test - public void testFastQuerySinglePage() throws BigQuerySQLException, IOException { + void testFastQuerySinglePage() throws BigQuerySQLException, IOException { com.google.api.services.bigquery.model.QueryResponse mockQueryRes = new QueryResponse().setSchema(FAST_QUERY_TABLESCHEMA).setJobComplete(true); when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( @@ -186,7 +198,7 @@ public void testFastQuerySinglePage() throws BigQuerySQLException, IOException { @Test // NOTE: This doesn't truly paginates. Returns a response while mocking // processQueryResponseResults - public void testFastQueryMultiplePages() throws BigQuerySQLException, IOException { + void testFastQueryMultiplePages() throws BigQuerySQLException, IOException { com.google.api.services.bigquery.model.QueryResponse mockQueryRes = new QueryResponse() .setSchema(FAST_QUERY_TABLESCHEMA) @@ -211,13 +223,13 @@ public void testFastQueryMultiplePages() throws BigQuerySQLException, IOExceptio } @Test - public void testClose() throws BigQuerySQLException { + void testClose() throws BigQuerySQLException { boolean cancelled = connection.close(); assertTrue(cancelled); } @Test - public void testQueryDryRun() throws BigQuerySQLException, IOException { + void testQueryDryRun() throws BigQuerySQLException, IOException { List queryParametersMock = ImmutableList.of( new QueryParameter().setParameterType(new QueryParameterType().setType("STRING"))); @@ -251,7 +263,7 @@ public void testQueryDryRun() throws BigQuerySQLException, IOException { } @Test - public void testQueryDryRunNoQueryParameters() throws BigQuerySQLException, IOException { + void testQueryDryRunNoQueryParameters() throws BigQuerySQLException, IOException { com.google.api.services.bigquery.model.JobStatistics2 queryMock = new com.google.api.services.bigquery.model.JobStatistics2() .setSchema(FAST_QUERY_TABLESCHEMA); @@ -281,7 +293,7 @@ public void testQueryDryRunNoQueryParameters() throws BigQuerySQLException, IOEx } @Test - public void testParseDataTask() throws InterruptedException { + void testParseDataTask() throws InterruptedException { BlockingQueue, Boolean>> pageCache = new LinkedBlockingDeque<>(2); BlockingQueue> rpcResponseQueue = new LinkedBlockingDeque<>(2); @@ -306,7 +318,7 @@ public void testParseDataTask() throws InterruptedException { } @Test - public void testPopulateBuffer() throws InterruptedException { + void testPopulateBuffer() throws InterruptedException { BlockingQueue, Boolean>> pageCache = new LinkedBlockingDeque<>(2); @@ -341,7 +353,7 @@ public void testPopulateBuffer() throws InterruptedException { } @Test - public void testNextPageTask() throws InterruptedException { + void testNextPageTask() throws InterruptedException { BlockingQueue> rpcResponseQueue = new LinkedBlockingDeque<>(2); TableDataList mockTabledataList = new TableDataList() @@ -364,7 +376,7 @@ public void testNextPageTask() throws InterruptedException { } @Test - public void testGetQueryResultsFirstPage() throws IOException { + void testGetQueryResultsFirstPage() throws IOException { when(bigqueryRpcMock.getQueryResultsWithRowLimitSkipExceptionTranslation( any(String.class), any(String.class), @@ -386,7 +398,7 @@ public void testGetQueryResultsFirstPage() throws IOException { // calls executeSelect with a nonFast query and exercises createQueryJob @Test - public void testLegacyQuerySinglePage() throws BigQuerySQLException, IOException { + void testLegacyQuerySinglePage() throws BigQuerySQLException, IOException { ConnectionImpl connectionSpy = Mockito.spy(connection); com.google.api.services.bigquery.model.Job jobResponseMock = new com.google.api.services.bigquery.model.Job() @@ -419,7 +431,7 @@ public void testLegacyQuerySinglePage() throws BigQuerySQLException, IOException // calls executeSelect with a nonFast query where the query returns an empty result. @Test - public void testLegacyQuerySinglePageEmptyResults() throws SQLException, IOException { + void testLegacyQuerySinglePageEmptyResults() throws SQLException, IOException { ConnectionImpl connectionSpy = Mockito.spy(connection); com.google.api.services.bigquery.model.Job jobResponseMock = new com.google.api.services.bigquery.model.Job() @@ -448,7 +460,7 @@ public void testLegacyQuerySinglePageEmptyResults() throws SQLException, IOExcep // exercises getSubsequentQueryResultsWithJob for fast running queries @Test - public void testFastQueryLongRunning() throws SQLException, IOException { + void testFastQueryLongRunning() throws SQLException, IOException { ConnectionImpl connectionSpy = Mockito.spy(connection); // emulating a fast query doReturn(true).when(connectionSpy).isFastQuerySupported(); @@ -479,7 +491,7 @@ public void testFastQueryLongRunning() throws SQLException, IOException { } @Test - public void testFastQueryLongRunningAsync() + void testFastQueryLongRunningAsync() throws SQLException, ExecutionException, InterruptedException, IOException { ConnectionImpl connectionSpy = Mockito.spy(connection); // emulating a fast query @@ -515,7 +527,7 @@ public void testFastQueryLongRunningAsync() } @Test - public void testFastQuerySinglePageAsync() + void testFastQuerySinglePageAsync() throws BigQuerySQLException, ExecutionException, InterruptedException, IOException { com.google.api.services.bigquery.model.QueryResponse mockQueryRes = new QueryResponse().setSchema(FAST_QUERY_TABLESCHEMA).setJobComplete(true); @@ -540,7 +552,7 @@ public void testFastQuerySinglePageAsync() } @Test - public void testExecuteSelectSlowWithParamsAsync() + void testExecuteSelectSlowWithParamsAsync() throws BigQuerySQLException, ExecutionException, InterruptedException { ConnectionImpl connectionSpy = Mockito.spy(connection); List parameters = new ArrayList<>(); @@ -584,7 +596,7 @@ public void testExecuteSelectSlowWithParamsAsync() } @Test - public void testFastQueryMultiplePagesAsync() + void testFastQueryMultiplePagesAsync() throws BigQuerySQLException, ExecutionException, InterruptedException, IOException { com.google.api.services.bigquery.model.QueryResponse mockQueryRes = new QueryResponse() @@ -616,7 +628,7 @@ public void testFastQueryMultiplePagesAsync() @Test // Emulates first page response using getQueryResultsFirstPage(jobId) and then subsequent pages // using getQueryResultsFirstPage(jobId) getSubsequentQueryResultsWithJob( - public void testLegacyQueryMultiplePages() throws SQLException, IOException { + void testLegacyQueryMultiplePages() throws SQLException, IOException { ConnectionImpl connectionSpy = Mockito.spy(connection); com.google.api.services.bigquery.model.JobStatistics jobStatistics = new com.google.api.services.bigquery.model.JobStatistics(); @@ -649,7 +661,7 @@ public void testLegacyQueryMultiplePages() throws SQLException, IOException { } @Test - public void testExecuteSelectSlow() throws BigQuerySQLException { + void testExecuteSelectSlow() throws BigQuerySQLException { ConnectionImpl connectionSpy = Mockito.spy(connection); doReturn(false).when(connectionSpy).isFastQuerySupported(); com.google.api.services.bigquery.model.JobStatistics jobStatistics = @@ -686,7 +698,7 @@ public void testExecuteSelectSlow() throws BigQuerySQLException { } @Test - public void testExecuteSelectSlowWithParams() throws BigQuerySQLException { + void testExecuteSelectSlowWithParams() throws BigQuerySQLException { ConnectionImpl connectionSpy = Mockito.spy(connection); List parameters = new ArrayList<>(); Map labels = new HashMap<>(); @@ -725,7 +737,7 @@ public void testExecuteSelectSlowWithParams() throws BigQuerySQLException { } @Test - public void testGetSubsequentQueryResultsWithJob() { + void testGetSubsequentQueryResultsWithJob() { ConnectionImpl connectionSpy = Mockito.spy(connection); JobId jobId = mock(JobId.class); BigQueryResultStats bqRsStats = mock(BigQueryResultStats.class); @@ -749,7 +761,7 @@ public void testGetSubsequentQueryResultsWithJob() { } @Test - public void testUseReadApi() { + void testUseReadApi() { ConnectionSettings connectionSettingsSpy = Mockito.spy(ConnectionSettings.class); doReturn(true).when(connectionSettingsSpy).getUseReadAPI(); doReturn(2).when(connectionSettingsSpy).getTotalToPageRowCountRatio(); @@ -775,7 +787,7 @@ public void testUseReadApi() { } @Test - public void testGetPageCacheSize() { + void testGetPageCacheSize() { ConnectionImpl connectionSpy = Mockito.spy(connection); // number of cached pages should be within a range assertTrue(connectionSpy.getPageCacheSize(10000, QUERY_SCHEMA) >= 3); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionPropertyTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionPropertyTest.java index 9177720e8f..bc5def560b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionPropertyTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionPropertyTest.java @@ -18,7 +18,7 @@ import static com.google.common.truth.Truth.assertThat; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ConnectionPropertyTest { @@ -28,7 +28,7 @@ public class ConnectionPropertyTest { ConnectionProperty.newBuilder().setKey(KEY).setValue(VALUE).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareConnectionProperty(CONNECTION_PROPERTY, CONNECTION_PROPERTY.toBuilder().build()); ConnectionProperty property = CONNECTION_PROPERTY.toBuilder().setKey("time-zone").build(); assertThat(property.getKey()).isEqualTo("time-zone"); @@ -37,19 +37,19 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { ConnectionProperty connectionProperty = ConnectionProperty.of(KEY, VALUE); compareConnectionProperty(connectionProperty, connectionProperty.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertThat(CONNECTION_PROPERTY.getKey()).isEqualTo(KEY); assertThat(CONNECTION_PROPERTY.getValue()).isEqualTo(VALUE); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareConnectionProperty( CONNECTION_PROPERTY, ConnectionProperty.fromPb(CONNECTION_PROPERTY.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionSettingsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionSettingsTest.java index 8523825bca..29c29ed55c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionSettingsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionSettingsTest.java @@ -16,7 +16,7 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; @@ -26,9 +26,9 @@ import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class ConnectionSettingsTest { +class ConnectionSettingsTest { private static final String TEST_PROJECT_ID = "test-project-id"; private static final DatasetId DATASET_ID = DatasetId.of("dataset"); private static final TableId TABLE_ID = TableId.of("dataset", "table"); @@ -116,19 +116,19 @@ public class ConnectionSettingsTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareConnectionSettings(CONNECTION_SETTINGS, CONNECTION_SETTINGS.toBuilder().build()); } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { ConnectionSettings connectionSettings = ConnectionSettings.newBuilder().setDefaultDataset(DATASET_ID).build(); compareConnectionSettings(connectionSettings, connectionSettings.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(REQUEST_TIMEOUT, CONNECTION_SETTINGS.getRequestTimeout()); assertEquals(NUM_BUFFERED_ROWS, CONNECTION_SETTINGS.getNumBufferedRows()); assertEquals(MAX_RESULTS, CONNECTION_SETTINGS.getMaxResults()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java index 3f21bf1c05..97538f2991 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java @@ -16,9 +16,9 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.WriteDisposition; @@ -28,9 +28,9 @@ import com.google.common.collect.Lists; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class CopyJobConfigurationTest { +class CopyJobConfigurationTest { private static final String TEST_PROJECT_ID = "test-project-id"; private static final TableId SOURCE_TABLE = TableId.of("dataset", "sourceTable"); @@ -64,7 +64,7 @@ public class CopyJobConfigurationTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareCopyJobConfiguration(COPY_JOB_CONFIGURATION, COPY_JOB_CONFIGURATION.toBuilder().build()); compareCopyJobConfiguration( COPY_JOB_CONFIGURATION_MULTIPLE_TABLES, @@ -79,7 +79,7 @@ public void testToBuilder() { } @Test - public void testOf() { + void testOf() { CopyJobConfiguration job = CopyJobConfiguration.of(DESTINATION_TABLE, SOURCE_TABLES); assertEquals(DESTINATION_TABLE, job.getDestinationTable()); assertEquals(SOURCE_TABLES, job.getSourceTables()); @@ -89,14 +89,14 @@ public void testOf() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { CopyJobConfiguration jobConfiguration = CopyJobConfiguration.of(DESTINATION_TABLE, SOURCE_TABLES); compareCopyJobConfiguration(jobConfiguration, jobConfiguration.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(DESTINATION_TABLE, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.getDestinationTable()); assertEquals(SOURCE_TABLES, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.getSourceTables()); assertEquals(CREATE_DISPOSITION, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.getCreateDisposition()); @@ -110,7 +110,7 @@ public void testBuilder() { } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { assertNotNull(COPY_JOB_CONFIGURATION.toPb().getCopy()); assertNull(COPY_JOB_CONFIGURATION.toPb().getExtract()); assertNull(COPY_JOB_CONFIGURATION.toPb().getLoad()); @@ -133,7 +133,7 @@ public void testToPbAndFromPb() { } @Test - public void testSetProjectId() { + void testSetProjectId() { CopyJobConfiguration configuration = COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.setProjectId(TEST_PROJECT_ID); assertEquals(TEST_PROJECT_ID, configuration.getDestinationTable().getProject()); @@ -143,7 +143,7 @@ public void testSetProjectId() { } @Test - public void testSetProjectIdDoNotOverride() { + void testSetProjectIdDoNotOverride() { CopyJobConfiguration configuration = COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.toBuilder() .setSourceTables( @@ -165,7 +165,7 @@ public TableId apply(TableId tableId) { } @Test - public void testGetType() { + void testGetType() { assertEquals(JobConfiguration.Type.COPY, COPY_JOB_CONFIGURATION.getType()); assertEquals(JobConfiguration.Type.COPY, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.getType()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java index fb0293a97a..1c31540fc0 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java @@ -16,11 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class CsvOptionsTest { @@ -46,7 +46,7 @@ public class CsvOptionsTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareCsvOptions(CSV_OPTIONS, CSV_OPTIONS.toBuilder().build()); CsvOptions csvOptions = CSV_OPTIONS.toBuilder().setFieldDelimiter(";").build(); assertEquals(";", csvOptions.getFieldDelimiter()); @@ -55,13 +55,13 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { CsvOptions csvOptions = CsvOptions.newBuilder().setFieldDelimiter("|").build(); assertEquals(csvOptions, csvOptions.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(FormatOptions.CSV, CSV_OPTIONS.getType()); assertEquals(ALLOW_JAGGED_ROWS, CSV_OPTIONS.allowJaggedRows()); assertEquals(ALLOW_QUOTED_NEWLINE, CSV_OPTIONS.allowQuotedNewLines()); @@ -75,7 +75,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareCsvOptions(CSV_OPTIONS, CsvOptions.fromPb(CSV_OPTIONS.toPb())); CsvOptions csvOptions = CsvOptions.newBuilder().setAllowJaggedRows(ALLOW_JAGGED_ROWS).build(); compareCsvOptions(csvOptions, CsvOptions.fromPb(csvOptions.toPb())); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetIdTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetIdTest.java index bacf7b2b0e..dc2ba28998 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetIdTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetIdTest.java @@ -16,17 +16,17 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class DatasetIdTest { +class DatasetIdTest { private static final DatasetId DATASET = DatasetId.of("dataset"); private static final DatasetId DATASET_COMPLETE = DatasetId.of("project", "dataset"); @Test - public void testOf() { + void testOf() { assertEquals(null, DATASET.getProject()); assertEquals("dataset", DATASET.getDataset()); assertEquals("project", DATASET_COMPLETE.getProject()); @@ -34,19 +34,19 @@ public void testOf() { } @Test - public void testEquals() { + void testEquals() { compareDatasetIds(DATASET, DatasetId.of("dataset")); compareDatasetIds(DATASET_COMPLETE, DatasetId.of("project", "dataset")); } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareDatasetIds(DATASET, DatasetId.fromPb(DATASET.toPb())); compareDatasetIds(DATASET_COMPLETE, DatasetId.fromPb(DATASET_COMPLETE.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { assertEquals(DATASET_COMPLETE, DATASET.setProjectId("project")); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java index 1b75195cec..cb9768de4d 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java @@ -16,16 +16,16 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class DatasetInfoTest { @@ -104,7 +104,7 @@ public class DatasetInfoTest { DATASET_INFO.toBuilder().setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS_5_DAYS).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareDatasets(DATASET_INFO, DATASET_INFO.toBuilder().build()); compareDatasets( DATASET_INFO_COMPLETE_WITH_IAM_MEMBER, @@ -122,13 +122,13 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { DatasetInfo datasetInfo = DatasetInfo.newBuilder(DATASET_ID).build(); assertEquals(datasetInfo, datasetInfo.toBuilder().build()); } @Test - public void testToBuilderWithExternalDatasetReference() { + void testToBuilderWithExternalDatasetReference() { compareDatasets( DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE, DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE.toBuilder().build()); @@ -149,7 +149,7 @@ public void testToBuilderWithExternalDatasetReference() { } @Test - public void testBuilder() { + void testBuilder() { assertNull(DATASET_INFO.getDatasetId().getProject()); assertEquals(DATASET_ID, DATASET_INFO.getDatasetId()); assertEquals(ACCESS_RULES, DATASET_INFO.getAcl()); @@ -189,7 +189,7 @@ public void testBuilder() { } @Test - public void testOf() { + void testOf() { DatasetInfo datasetInfo = DatasetInfo.of(DATASET_ID.getDataset()); assertEquals(DATASET_ID, datasetInfo.getDatasetId()); assertNull(datasetInfo.getAcl()); @@ -230,7 +230,7 @@ public void testOf() { } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareDatasets(DATASET_INFO_COMPLETE, DatasetInfo.fromPb(DATASET_INFO_COMPLETE.toPb())); compareDatasets( DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE, @@ -240,12 +240,12 @@ public void testToPbAndFromPb() { } @Test - public void testSetProjectId() { + void testSetProjectId() { assertEquals(DATASET_INFO_COMPLETE, DATASET_INFO.setProjectId("project")); } @Test - public void testSetMaxTimeTravelHours() { + void testSetMaxTimeTravelHours() { assertNotEquals( DATASET_INFO_WITH_MAX_TIME_TRAVEL_5_DAYS.getMaxTimeTravelHours(), DATASET_INFO.getMaxTimeTravelHours()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java index d138e3cb51..5e19e8c825 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java @@ -16,13 +16,13 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -35,15 +35,13 @@ import com.google.common.collect.Iterables; import java.util.List; import java.util.Map; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.junit.MockitoRule; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) -public class DatasetTest { +@ExtendWith(MockitoExtension.class) +class DatasetTest { private static final DatasetId DATASET_ID = DatasetId.of("dataset"); private static final List ACCESS_RULES = @@ -95,15 +93,13 @@ public class DatasetTest { .setConnection("connection") .build(); - @Rule public MockitoRule rule; - private BigQuery bigquery; private BigQueryOptions mockOptions; private Dataset expectedDataset; private Dataset dataset; - @Before - public void setUp() { + @BeforeEach + void setUp() { bigquery = mock(BigQuery.class); mockOptions = mock(BigQueryOptions.class); when(bigquery.getOptions()).thenReturn(mockOptions); @@ -112,7 +108,7 @@ public void setUp() { } @Test - public void testBuilder() { + void testBuilder() { Dataset builtDataset = new Dataset.Builder(bigquery, DATASET_ID) .setAcl(ACCESS_RULES) @@ -148,12 +144,12 @@ public void testBuilder() { } @Test - public void testToBuilder() { + void testToBuilder() { compareDataset(expectedDataset, expectedDataset.toBuilder().build()); } @Test - public void testExists_True() { + void testExists_True() { BigQuery.DatasetOption[] expectedOptions = {BigQuery.DatasetOption.fields()}; when(bigquery.getDataset(DATASET_INFO.getDatasetId(), expectedOptions)) .thenReturn(expectedDataset); @@ -162,7 +158,7 @@ public void testExists_True() { } @Test - public void testExists_False() { + void testExists_False() { BigQuery.DatasetOption[] expectedOptions = {BigQuery.DatasetOption.fields()}; when(bigquery.getDataset(DATASET_INFO.getDatasetId(), expectedOptions)).thenReturn(null); assertFalse(dataset.exists()); @@ -170,7 +166,7 @@ public void testExists_False() { } @Test - public void testReload() { + void testReload() { DatasetInfo updatedInfo = DATASET_INFO.toBuilder().setDescription("Description").build(); Dataset expectedDataset = new Dataset(bigquery, new DatasetInfo.BuilderImpl(updatedInfo)); when(bigquery.getDataset(DATASET_INFO.getDatasetId().getDataset())).thenReturn(expectedDataset); @@ -180,14 +176,14 @@ public void testReload() { } @Test - public void testReloadNull() { + void testReloadNull() { when(bigquery.getDataset(DATASET_INFO.getDatasetId().getDataset())).thenReturn(null); assertNull(dataset.reload()); verify(bigquery).getDataset(DATASET_INFO.getDatasetId().getDataset()); } @Test - public void testReloadWithOptions() { + void testReloadWithOptions() { DatasetInfo updatedInfo = DATASET_INFO.toBuilder().setDescription("Description").build(); Dataset expectedDataset = new Dataset(bigquery, new DatasetInfo.BuilderImpl(updatedInfo)); when(bigquery.getDataset( @@ -200,7 +196,7 @@ public void testReloadWithOptions() { } @Test - public void testUpdate() { + void testUpdate() { Dataset expectedUpdatedDataset = expectedDataset.toBuilder().setDescription("Description").build(); when(bigquery.update(eq(expectedDataset))).thenReturn(expectedUpdatedDataset); @@ -210,7 +206,7 @@ public void testUpdate() { } @Test - public void testUpdateWithOptions() { + void testUpdateWithOptions() { Dataset expectedUpdatedDataset = expectedDataset.toBuilder().setDescription("Description").build(); when(bigquery.update(eq(expectedDataset), eq(BigQuery.DatasetOption.fields()))) @@ -221,21 +217,21 @@ public void testUpdateWithOptions() { } @Test - public void testDeleteTrue() { + void testDeleteTrue() { when(bigquery.delete(DATASET_INFO.getDatasetId())).thenReturn(true); assertTrue(dataset.delete()); verify(bigquery).delete(DATASET_INFO.getDatasetId()); } @Test - public void testDeleteFalse() { + void testDeleteFalse() { when(bigquery.delete(DATASET_INFO.getDatasetId())).thenReturn(false); assertFalse(dataset.delete()); verify(bigquery).delete(DATASET_INFO.getDatasetId()); } @Test - public void testList() { + void testList() { List
tableResults = ImmutableList.of( new Table(bigquery, new Table.BuilderImpl(TABLE_INFO1)), @@ -251,7 +247,7 @@ public void testList() { } @Test - public void testListWithOptions() { + void testListWithOptions() { List
tableResults = ImmutableList.of( new Table(bigquery, new Table.BuilderImpl(TABLE_INFO1)), @@ -269,7 +265,7 @@ public void testListWithOptions() { } @Test - public void testGet() { + void testGet() { Table expectedTable = new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO1)); when(bigquery.getTable(TABLE_INFO1.getTableId())).thenReturn(expectedTable); Table table = dataset.get(TABLE_INFO1.getTableId().getTable()); @@ -279,7 +275,7 @@ public void testGet() { } @Test - public void testGetTableWithNewProjectId() { + void testGetTableWithNewProjectId() { Table expectedTable = new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO4)); when(bigquery.getTable(TABLE_ID1, null)).thenReturn(expectedTable); Table table = bigquery.getTable(TABLE_ID1, null); @@ -289,14 +285,14 @@ public void testGetTableWithNewProjectId() { } @Test - public void testGetNull() { + void testGetNull() { when(bigquery.getTable(TABLE_INFO1.getTableId())).thenReturn(null); assertNull(dataset.get(TABLE_INFO1.getTableId().getTable())); verify(bigquery).getTable(TABLE_INFO1.getTableId()); } @Test - public void testGetWithOptions() { + void testGetWithOptions() { Table expectedTable = new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO1)); when(bigquery.getTable(TABLE_INFO1.getTableId(), BigQuery.TableOption.fields())) .thenReturn(expectedTable); @@ -307,7 +303,7 @@ public void testGetWithOptions() { } @Test - public void testCreateTable() { + void testCreateTable() { Table expectedTable = new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO1)); when(bigquery.create(TABLE_INFO1)).thenReturn(expectedTable); Table table = dataset.create(TABLE_INFO1.getTableId().getTable(), TABLE_DEFINITION); @@ -316,7 +312,7 @@ public void testCreateTable() { } @Test - public void testCreateTableWithOptions() { + void testCreateTableWithOptions() { Table expectedTable = new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO1)); when(bigquery.create(TABLE_INFO1, BigQuery.TableOption.fields())).thenReturn(expectedTable); Table table = @@ -327,17 +323,17 @@ public void testCreateTableWithOptions() { } @Test - public void testBigQuery() { + void testBigQuery() { assertSame(bigquery, expectedDataset.getBigQuery()); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareDataset(expectedDataset, Dataset.fromPb(bigquery, expectedDataset.toPb())); } @Test - public void testExternalDatasetReference() { + void testExternalDatasetReference() { Dataset datasetWithExternalDatasetReference = new Dataset.Builder(bigquery, DATASET_ID) .setAcl(ACCESS_RULES) diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatastoreBackupOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatastoreBackupOptionsTest.java index af14108032..0102781190 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatastoreBackupOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatastoreBackupOptionsTest.java @@ -16,20 +16,20 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class DatastoreBackupOptionsTest { +class DatastoreBackupOptionsTest { private static final List PROJECTION_FIELDS = ImmutableList.of("field1", "field2"); private static final DatastoreBackupOptions BACKUP_OPTIONS = DatastoreBackupOptions.newBuilder().setProjectionFields(PROJECTION_FIELDS).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareDatastoreBackupOptions(BACKUP_OPTIONS, BACKUP_OPTIONS.toBuilder().build()); List fields = ImmutableList.of("field1", "field2"); DatastoreBackupOptions backupOptions = @@ -40,14 +40,14 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { DatastoreBackupOptions backupOptions = DatastoreBackupOptions.newBuilder().setProjectionFields(PROJECTION_FIELDS).build(); assertEquals(backupOptions, backupOptions.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(FormatOptions.DATASTORE_BACKUP, BACKUP_OPTIONS.getType()); assertEquals(PROJECTION_FIELDS, BACKUP_OPTIONS.getProjectionFields()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DmlStatsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DmlStatsTest.java index 48950831a4..f165b60e39 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DmlStatsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DmlStatsTest.java @@ -16,9 +16,9 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class DmlStatsTest { @@ -33,14 +33,14 @@ public class DmlStatsTest { .build(); @Test - public void testBuilder() { + void testBuilder() { assertEquals(DELETED_ROW_COUNT, DML_STATS.getDeletedRowCount()); assertEquals(UPDATED_ROW_COUNT, DML_STATS.getUpdatedRowCount()); assertEquals(INSERTED_ROW_COUNT, DML_STATS.getInsertedRowCount()); } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareDmlStats(DML_STATS, DmlStats.fromPb(DML_STATS.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalDatasetReferenceTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalDatasetReferenceTest.java index 6d241948b8..26dfcd5dcc 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalDatasetReferenceTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalDatasetReferenceTest.java @@ -16,10 +16,10 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ExternalDatasetReferenceTest { private static final String EXTERNAL_SOURCE = "test_source"; @@ -31,7 +31,7 @@ public class ExternalDatasetReferenceTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareExternalDatasetReference( EXTERNAL_DATASET_REFERENCE, EXTERNAL_DATASET_REFERENCE.toBuilder().build()); ExternalDatasetReference externalDatasetReference = @@ -40,7 +40,7 @@ public void testToBuilder() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals(EXTERNAL_SOURCE, EXTERNAL_DATASET_REFERENCE.getExternalSource()); assertEquals(CONNECTION, EXTERNAL_DATASET_REFERENCE.getConnection()); ExternalDatasetReference externalDatasetReference = @@ -52,7 +52,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { ExternalDatasetReference externalDatasetReference = EXTERNAL_DATASET_REFERENCE.toBuilder().build(); assertTrue( diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java index cb7578c75a..480b8a4972 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java @@ -16,16 +16,15 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import com.google.cloud.bigquery.ExternalTableDefinition.SourceColumnMatch; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class ExternalTableDefinitionTest { +class ExternalTableDefinitionTest { private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); private static final List DECIMAL_TARGET_TYPES = @@ -98,7 +97,7 @@ public class ExternalTableDefinitionTest { ExternalTableDefinition.newBuilder(SOURCE_URIS, TABLE_SCHEMA, PARQUET_OPTIONS).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareExternalTableDefinition( EXTERNAL_TABLE_DEFINITION, EXTERNAL_TABLE_DEFINITION.toBuilder().build()); ExternalTableDefinition externalTableDefinition = @@ -117,23 +116,21 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { ExternalTableDefinition externalTableDefinition = ExternalTableDefinition.of(SOURCE_URIS, TABLE_SCHEMA, FormatOptions.json()); assertEquals(externalTableDefinition, externalTableDefinition.toBuilder().build()); } @Test - public void testTypeNullPointerException() { - try { - EXTERNAL_TABLE_DEFINITION.toBuilder().setType(null).build(); - } catch (NullPointerException ex) { - assertNotNull(ex.getMessage()); - } + void testTypeNullPointerException() { + org.junit.jupiter.api.Assertions.assertThrows( + NullPointerException.class, + () -> EXTERNAL_TABLE_DEFINITION.toBuilder().setType(null).build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(TableDefinition.Type.EXTERNAL, EXTERNAL_TABLE_DEFINITION.getType()); assertEquals(COMPRESSION, EXTERNAL_TABLE_DEFINITION.getCompression()); assertEquals(CONNECTION_ID, EXTERNAL_TABLE_DEFINITION.getConnectionId()); @@ -157,7 +154,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareExternalTableDefinition( EXTERNAL_TABLE_DEFINITION, ExternalTableDefinition.fromPb(EXTERNAL_TABLE_DEFINITION.toPb())); @@ -168,7 +165,7 @@ public void testToAndFromPb() { } @Test - public void testToAndFromPbParquet() { + void testToAndFromPbParquet() { compareExternalTableDefinition( EXTERNAL_TABLE_DEFINITION_PARQUET, ExternalTableDefinition.fromPb(EXTERNAL_TABLE_DEFINITION_PARQUET.toPb())); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java index 2bf1e80a23..d7ce318f1b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java @@ -16,15 +16,15 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ExtractJobConfigurationTest { @@ -88,7 +88,7 @@ public class ExtractJobConfigurationTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareExtractJobConfiguration( EXTRACT_CONFIGURATION, EXTRACT_CONFIGURATION.toBuilder().build()); ExtractJobConfiguration job = diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldElementTypeTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldElementTypeTest.java index cf217b25c7..7821b93219 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldElementTypeTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldElementTypeTest.java @@ -15,10 +15,10 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.api.services.bigquery.model.QueryParameterType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class FieldElementTypeTest { private static final FieldElementType FIELD_ELEMENT_TYPE = diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldListTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldListTest.java index 999bbf1b02..9f63716427 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldListTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldListTest.java @@ -16,13 +16,14 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class FieldListTest { +class FieldListTest { private static final String FIELD_NAME1 = "StringField"; private static final String FIELD_NAME2 = "IntegerField"; private static final String FIELD_NAME3 = "RecordField"; @@ -63,7 +64,7 @@ public class FieldListTest { private final FieldList fieldsSchema = FieldList.of(fieldSchema1, fieldSchema2, fieldSchema3); @Test - public void testGetByName() { + void testGetByName() { assertEquals(fieldSchema1, fieldsSchema.get(FIELD_NAME1)); assertEquals(fieldSchema2, fieldsSchema.get(FIELD_NAME2)); assertEquals(fieldSchema3, fieldsSchema.get(FIELD_NAME3)); @@ -76,34 +77,26 @@ public void testGetByName() { assertEquals(3, fieldsSchema.size()); - IllegalArgumentException exception = null; - try { - fieldsSchema.get(FIELD_NAME4); - } catch (IllegalArgumentException e) { - exception = e; - } + IllegalArgumentException exception = + assertThrows(IllegalArgumentException.class, () -> fieldsSchema.get(FIELD_NAME4)); assertNotNull(exception); } @Test - public void testGetByIndex() { + void testGetByIndex() { assertEquals(fieldSchema1, fieldsSchema.get(0)); assertEquals(fieldSchema2, fieldsSchema.get(1)); assertEquals(fieldSchema3, fieldsSchema.get(2)); assertEquals(3, fieldsSchema.size()); - IndexOutOfBoundsException exception = null; - try { - fieldsSchema.get(4); - } catch (IndexOutOfBoundsException e) { - exception = e; - } + IndexOutOfBoundsException exception = + assertThrows(IndexOutOfBoundsException.class, () -> fieldsSchema.get(4)); assertNotNull(exception); } @Test - public void testGetRecordSchema() { + void testGetRecordSchema() { assertEquals(2, fieldSchema3.getSubFields().size()); assertEquals(fieldSchema1, fieldSchema3.getSubFields().get(FIELD_NAME1)); assertEquals(fieldSchema2, fieldSchema3.getSubFields().get(FIELD_NAME2)); @@ -122,7 +115,7 @@ public void testGetRecordSchema() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { assertEquals(fieldsSchema, FieldList.of(fieldSchema1, fieldSchema2, fieldSchema3)); assertNotEquals(fieldsSchema, FieldList.of(fieldSchema1, fieldSchema3)); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java index d7c5e25a2e..72f8bb3e82 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java @@ -16,15 +16,15 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class FieldTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java index 5ade7c2298..dd5092b1c1 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java @@ -16,9 +16,10 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.google.api.client.util.Data; import com.google.api.services.bigquery.model.TableCell; @@ -28,9 +29,9 @@ import com.google.common.io.BaseEncoding; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class FieldValueListTest { +class FieldValueListTest { private static final byte[] BYTES = {0xD, 0xE, 0xA, 0xD}; private static final String BYTES_BASE64 = BaseEncoding.base64().encode(BYTES); private static final TableCell booleanPb = new TableCell().setV("false"); @@ -138,7 +139,7 @@ public class FieldValueListTest { schemaLosslessTimestamp); @Test - public void testFromPb() { + void testFromPb() { assertEquals(fieldValues, FieldValueList.fromPb(fieldValuesPb, schema)); // Schema does not influence values equality assertEquals(fieldValues, FieldValueList.fromPb(fieldValuesPb, null)); @@ -151,7 +152,7 @@ public void testFromPb() { } @Test - public void testGetByIndex() { + void testGetByIndex() { assertEquals(11, fieldValues.size()); assertEquals(booleanFv, fieldValues.get(0)); assertEquals(integerFv, fieldValues.get(1)); @@ -173,7 +174,7 @@ public void testGetByIndex() { } @Test - public void testGetByName() { + void testGetByName() { assertEquals(11, fieldValues.size()); assertEquals(booleanFv, fieldValues.get("first")); assertEquals(integerFv, fieldValues.get("second")); @@ -195,7 +196,7 @@ public void testGetByName() { } @Test - public void testNullSchema() { + void testNullSchema() { FieldValueList fieldValuesNoSchema = FieldValueList.of( ImmutableList.of( @@ -213,25 +214,15 @@ public void testNullSchema() { assertEquals(fieldValues, fieldValuesNoSchema); - UnsupportedOperationException exception = null; - try { - fieldValuesNoSchema.get("first"); - } catch (UnsupportedOperationException e) { - exception = e; - } - + UnsupportedOperationException exception = + assertThrows(UnsupportedOperationException.class, () -> fieldValuesNoSchema.get("first")); assertNotNull(exception); } @Test - public void testGetNonExistentField() { - IllegalArgumentException exception = null; - try { - fieldValues.get("nonexistent"); - } catch (IllegalArgumentException e) { - exception = e; - } - + void testGetNonExistentField() { + IllegalArgumentException exception = + assertThrows(IllegalArgumentException.class, () -> fieldValues.get("nonexistent")); assertNotNull(exception); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java index 4ec527f7ce..958e206598 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java @@ -16,11 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.api.client.util.Data; import com.google.api.services.bigquery.model.TableCell; @@ -33,7 +33,7 @@ import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.threeten.extra.PeriodDuration; public class FieldValueTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ForeignKeyTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ForeignKeyTest.java index 1ebd93ef42..2dfacda542 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ForeignKeyTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ForeignKeyTest.java @@ -16,14 +16,14 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.Collections; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class ForeignKeyTest { +class ForeignKeyTest { private static final TableId TABLE_ID = TableId.of("project", "dataset", "table"); private static final ColumnReference COLUMN_REFERENCE = @@ -39,7 +39,7 @@ public class ForeignKeyTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareForeignKeyDefinition(FOREIGN_KEY, FOREIGN_KEY.toBuilder().build()); TableId referencedTable = TableId.of("project1", "dataset1", "table1"); ArrayList columnReferences = new ArrayList<>(); @@ -65,7 +65,7 @@ public void testToBuilder() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals("foreign_key", FOREIGN_KEY.getName()); assertEquals(TABLE_ID, FOREIGN_KEY.getReferencedTable()); assertEquals(Collections.singletonList(COLUMN_REFERENCE), FOREIGN_KEY.getColumnReferences()); @@ -80,7 +80,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { ForeignKey foreignKey = FOREIGN_KEY.toBuilder().build(); assertTrue(ForeignKey.fromPb(foreignKey.toPb()) instanceof ForeignKey); compareForeignKeyDefinition(foreignKey, ForeignKey.fromPb(foreignKey.toPb())); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FormatOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FormatOptionsTest.java index d3cfb6e330..e8642e86e2 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FormatOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FormatOptionsTest.java @@ -16,9 +16,9 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class FormatOptionsTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java index efbee79b69..7aae673d3d 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java @@ -18,7 +18,7 @@ import static com.google.common.truth.Truth.assertThat; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class GoogleSheetsOptionsTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/HivePartitioningOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/HivePartitioningOptionsTest.java index 51baf918b8..401ab07b72 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/HivePartitioningOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/HivePartitioningOptionsTest.java @@ -20,9 +20,9 @@ import java.util.Arrays; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class HivePartitioningOptionsTest { +class HivePartitioningOptionsTest { private static final String MODE = "STRING"; private static final String SOURCE_URI_PREFIX = "gs://bucket/path_to_table"; @@ -37,7 +37,7 @@ public class HivePartitioningOptionsTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareHivePartitioningOptions( HIVE_PARTITIONING_OPTIONS, HIVE_PARTITIONING_OPTIONS.toBuilder().build()); HivePartitioningOptions options = HIVE_PARTITIONING_OPTIONS.toBuilder().setMode("AUTO").build(); @@ -47,13 +47,13 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { HivePartitioningOptions options = HivePartitioningOptions.newBuilder().build(); compareHivePartitioningOptions(options, options.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertThat(HIVE_PARTITIONING_OPTIONS.getMode()).isEqualTo(MODE); assertThat(HIVE_PARTITIONING_OPTIONS.getRequirePartitionFilter()) .isEqualTo(REQUIRE_PARTITION_FILTER); @@ -61,7 +61,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareHivePartitioningOptions( HIVE_PARTITIONING_OPTIONS, HivePartitioningOptions.fromPb(HIVE_PARTITIONING_OPTIONS.toPb())); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllRequestTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllRequestTest.java index 4ee1ca13f3..d687e75b34 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllRequestTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllRequestTest.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import static com.google.common.truth.Truth.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -24,7 +25,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class InsertAllRequestTest { @@ -214,11 +215,11 @@ public void testEquals() { compareInsertAllRequest(INSERT_ALL_REQUEST11, INSERT_ALL_REQUEST11); } - @Test(expected = UnsupportedOperationException.class) + @Test public void testImmutable() { - InsertAllRequest.RowToInsert row = - InsertAllRequest.RowToInsert.of(new HashMap()); - row.getContent().put("zip", "zap"); + InsertAllRequest.RowToInsert row = InsertAllRequest.RowToInsert.of(new HashMap<>()); + + assertThrows(UnsupportedOperationException.class, () -> row.getContent().put("zip", "zap")); } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllResponseTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllResponseTest.java index b39066a6a2..9b30e25860 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllResponseTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllResponseTest.java @@ -16,16 +16,16 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class InsertAllResponseTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobIdTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobIdTest.java index 05ae7cefe4..7934ad1204 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobIdTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobIdTest.java @@ -16,17 +16,17 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class JobIdTest { +class JobIdTest { private static final JobId JOB = JobId.of("job"); private static final JobId JOB_COMPLETE = JobId.of("project", "job"); @Test - public void testOf() { + void testOf() { assertEquals(null, JOB.getProject()); assertEquals("job", JOB.getJob()); assertEquals("project", JOB_COMPLETE.getProject()); @@ -34,19 +34,19 @@ public void testOf() { } @Test - public void testEquals() { + void testEquals() { compareJobs(JOB, JobId.of("job")); compareJobs(JOB_COMPLETE, JobId.of("project", "job")); } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareJobs(JOB, JobId.fromPb(JOB.toPb())); compareJobs(JOB_COMPLETE, JobId.fromPb(JOB_COMPLETE.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { assertEquals(JOB_COMPLETE, JOB.setProjectId("project")); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobInfoTest.java index 71825f0a54..6c7f9b245c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobInfoTest.java @@ -16,10 +16,10 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; @@ -32,7 +32,7 @@ import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class JobInfoTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java index 2a1353f5d8..289548113b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java @@ -17,7 +17,7 @@ package com.google.cloud.bigquery; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.cloud.bigquery.JobStatistics.CopyStatistics; import com.google.cloud.bigquery.JobStatistics.ExtractStatistics; @@ -33,7 +33,7 @@ import com.google.common.collect.ImmutableList; import java.util.List; import java.util.UUID; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class JobStatisticsTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatusTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatusTest.java index bb463d1cec..1c20b72404 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatusTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatusTest.java @@ -16,13 +16,13 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class JobStatusTest { +class JobStatusTest { private static final JobStatus.State STATE = JobStatus.State.DONE; private static final BigQueryError ERROR = @@ -36,7 +36,7 @@ public class JobStatusTest { private static final JobStatus JOB_STATUS_INCOMPLETE2 = new JobStatus(STATE, null, null); @Test - public void testConstructor() { + void testConstructor() { assertEquals(STATE, JOB_STATUS.getState()); assertEquals(ERROR, JOB_STATUS.getError()); assertEquals(ALL_ERRORS, JOB_STATUS.getExecutionErrors()); @@ -51,7 +51,7 @@ public void testConstructor() { } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareStatus(JOB_STATUS, JobStatus.fromPb(JOB_STATUS.toPb())); compareStatus(JOB_STATUS_INCOMPLETE1, JobStatus.fromPb(JOB_STATUS_INCOMPLETE1.toPb())); compareStatus(JOB_STATUS_INCOMPLETE2, JobStatus.fromPb(JOB_STATUS_INCOMPLETE2.toPb())); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java index e6d249af46..f85c2f76c4 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java @@ -18,14 +18,14 @@ import static com.google.common.collect.ObjectArrays.concat; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; @@ -42,16 +42,13 @@ import com.google.cloud.bigquery.JobStatus.State; import com.google.common.collect.ImmutableList; import java.time.Duration; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.junit.MockitoRule; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) -public class JobTest { +@ExtendWith(MockitoExtension.class) +class JobTest { private static final JobId JOB_ID = JobId.of("project", "job"); private static final TableId TABLE_ID1 = TableId.of("dataset", "table1"); @@ -94,15 +91,13 @@ public class JobTest { .retryOnMessage(BigQueryErrorMessages.RATE_LIMIT_EXCEEDED_MSG) .build(); - @Rule public MockitoRule rule; - private BigQuery bigquery; private BigQueryOptions mockOptions; private Job expectedJob; private Job job; - @Before - public void setUp() { + @BeforeEach + void setUp() { bigquery = mock(BigQuery.class); mockOptions = mock(BigQueryOptions.class); when(bigquery.getOptions()).thenReturn(mockOptions); @@ -111,7 +106,7 @@ public void setUp() { } @Test - public void testBuilder() { + void testBuilder() { Job builtJob = new Job.Builder(bigquery, COPY_CONFIGURATION) .setJobId(JOB_ID) @@ -135,12 +130,12 @@ public void testBuilder() { } @Test - public void testToBuilder() { + void testToBuilder() { compareJob(expectedJob, expectedJob.toBuilder().build()); } @Test - public void testExists_True() { + void testExists_True() { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields()}; when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(expectedJob); assertTrue(job.exists()); @@ -148,7 +143,7 @@ public void testExists_True() { } @Test - public void testExists_False() { + void testExists_False() { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields()}; when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(null); assertFalse(job.exists()); @@ -156,14 +151,14 @@ public void testExists_False() { } @Test - public void testIsDone_True() { + void testIsDone_True() { Job job = expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.DONE)).build(); assertTrue(job.isDone()); verify(bigquery, times(0)).getJob(eq(JOB_INFO.getJobId()), any()); } @Test - public void testIsDone_False() { + void testIsDone_False() { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; Job job = expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.RUNNING)).build(); when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(job); @@ -172,7 +167,7 @@ public void testIsDone_False() { } @Test - public void testIsDone_NotExists() { + void testIsDone_NotExists() { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; Job jobWithRunningState = expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.RUNNING)).build(); @@ -182,7 +177,7 @@ public void testIsDone_NotExists() { } @Test - public void testWaitFor() throws InterruptedException { + void testWaitFor() throws InterruptedException { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; JobStatus status = mock(JobStatus.class); when(status.getState()).thenReturn(JobStatus.State.DONE); @@ -199,7 +194,7 @@ public void testWaitFor() throws InterruptedException { } @Test - public void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { + void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { QueryStatistics jobStatistics = QueryStatistics.newBuilder() .setCreationTimestamp(1L) @@ -243,7 +238,7 @@ public void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { } @Test - public void testWaitForAndGetQueryResultsEmptyWithSchema() throws InterruptedException { + void testWaitForAndGetQueryResultsEmptyWithSchema() throws InterruptedException { QueryStatistics jobStatistics = QueryStatistics.newBuilder() .setCreationTimestamp(1L) @@ -288,7 +283,7 @@ public void testWaitForAndGetQueryResultsEmptyWithSchema() throws InterruptedExc } @Test - public void testWaitForAndGetQueryResults() throws InterruptedException { + void testWaitForAndGetQueryResults() throws InterruptedException { QueryStatistics jobStatistics = QueryStatistics.newBuilder() .setCreationTimestamp(1L) @@ -340,17 +335,14 @@ public void testWaitForAndGetQueryResults() throws InterruptedException { } @Test - public void testWaitForAndGetQueryResults_Unsupported() throws InterruptedException { - try { - job.getQueryResults(); - Assert.fail(); - } catch (UnsupportedOperationException expected) { - Assert.assertNotNull(expected.getMessage()); - } + void testWaitForAndGetQueryResults_Unsupported() throws InterruptedException { + UnsupportedOperationException expected = + assertThrows(UnsupportedOperationException.class, () -> job.getQueryResults()); + assertNotNull(expected.getMessage()); } @Test - public void testWaitFor_Null() throws InterruptedException { + void testWaitFor_Null() throws InterruptedException { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; when(mockOptions.getClock()).thenReturn(CurrentMillisClock.getDefaultClock()); when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(null); @@ -359,7 +351,7 @@ public void testWaitFor_Null() throws InterruptedException { } @Test - public void testWaitForWithCheckingPeriod() throws InterruptedException { + void testWaitForWithCheckingPeriod() throws InterruptedException { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; JobStatus status = mock(JobStatus.class); when(status.getState()).thenReturn(JobStatus.State.RUNNING); @@ -381,7 +373,7 @@ public void testWaitForWithCheckingPeriod() throws InterruptedException { } @Test - public void testWaitForWithCheckingPeriod_Null() throws InterruptedException { + void testWaitForWithCheckingPeriod_Null() throws InterruptedException { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; when(mockOptions.getClock()).thenReturn(CurrentMillisClock.getDefaultClock()); Job runningJob = @@ -394,24 +386,26 @@ public void testWaitForWithCheckingPeriod_Null() throws InterruptedException { } @Test - public void testWaitForWithTimeout() throws InterruptedException { + void testWaitForWithTimeout() throws InterruptedException { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; when(mockOptions.getClock()).thenReturn(CurrentMillisClock.getDefaultClock()); Job runningJob = expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.RUNNING)).build(); when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(runningJob); when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(runningJob); - try { - job.waitFor( - concat(TEST_RETRY_OPTIONS, RetryOption.totalTimeoutDuration(Duration.ofMillis(3)))); - Assert.fail(); - } catch (BigQueryException expected) { - Assert.assertNotNull(expected.getMessage()); - } + BigQueryException expected = + assertThrows( + BigQueryException.class, + () -> + job.waitFor( + concat( + TEST_RETRY_OPTIONS, + RetryOption.totalTimeoutDuration(Duration.ofMillis(3))))); + assertNotNull(expected.getMessage()); } @Test - public void testWaitForWithBigQueryRetryConfig() throws InterruptedException { + void testWaitForWithBigQueryRetryConfig() throws InterruptedException { QueryStatistics jobStatistics = QueryStatistics.newBuilder() .setCreationTimestamp(1L) @@ -453,7 +447,7 @@ public void testWaitForWithBigQueryRetryConfig() throws InterruptedException { } @Test - public void testWaitForWithBigQueryRetryConfigShouldRetry() throws InterruptedException { + void testWaitForWithBigQueryRetryConfigShouldRetry() throws InterruptedException { QueryStatistics jobStatistics = QueryStatistics.newBuilder() .setCreationTimestamp(1L) @@ -504,7 +498,7 @@ public void testWaitForWithBigQueryRetryConfigShouldRetry() throws InterruptedEx } @Test - public void testWaitForWithBigQueryRetryConfigErrorShouldNotRetry() throws InterruptedException { + void testWaitForWithBigQueryRetryConfigErrorShouldNotRetry() throws InterruptedException { QueryStatistics jobStatistics = QueryStatistics.newBuilder() .setCreationTimestamp(1L) @@ -542,19 +536,18 @@ public void testWaitForWithBigQueryRetryConfigErrorShouldNotRetry() throws Inter .thenThrow(bigQueryException) .thenReturn(completedQuery); job = this.job.toBuilder().setConfiguration(DRL_QUERY_CONFIGURATION).build(); - try { - job.waitFor(TEST_BIGQUERY_RETRY_CONFIG, TEST_RETRY_OPTIONS); - fail("JobException expected"); - } catch (BigQueryException e) { - assertNotNull(e.getErrors()); - } + BigQueryException e = + assertThrows( + BigQueryException.class, + () -> job.waitFor(TEST_BIGQUERY_RETRY_CONFIG, TEST_RETRY_OPTIONS)); + assertNotNull(e.getErrors()); // Verify that getQueryResults is attempted only once and not retried since the error message // does not match. verify(bigquery, times(1)).getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS); } @Test - public void testReload() { + void testReload() { JobInfo updatedInfo = JOB_INFO.toBuilder().setEtag("etag").build(); Job expectedJob = new Job(bigquery, new JobInfo.BuilderImpl(updatedInfo)); when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(expectedJob); @@ -564,7 +557,7 @@ public void testReload() { } @Test - public void testReloadJobException() { + void testReloadJobException() { JobInfo updatedInfo = JOB_INFO.toBuilder().setEtag("etag").build(); Job expectedJob = new Job(bigquery, new JobInfo.BuilderImpl(updatedInfo)); BigQueryError bigQueryError = new BigQueryError("invalidQuery", "US", "invalidQuery"); @@ -573,23 +566,19 @@ public void testReloadJobException() { ImmutableList bigQueryErrorList = ImmutableList.of(bigQueryError); BigQueryException bigQueryException = new BigQueryException(bigQueryErrorList); when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(expectedJob).thenThrow(bigQueryException); - try { - job.reload(); - fail("JobException expected"); - } catch (BigQueryException e) { - assertNotNull(e.getErrors()); - } + BigQueryException e = assertThrows(BigQueryException.class, () -> job.reload()); + assertNotNull(e.getErrors()); } @Test - public void testReloadNull() { + void testReloadNull() { when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(null); assertNull(job.reload()); verify(bigquery).getJob(JOB_INFO.getJobId()); } @Test - public void testReloadWithOptions() { + void testReloadWithOptions() { JobInfo updatedInfo = JOB_INFO.toBuilder().setEtag("etag").build(); Job expectedJob = new Job(bigquery, new JobInfo.BuilderImpl(updatedInfo)); when(bigquery.getJob(JOB_INFO.getJobId(), BigQuery.JobOption.fields())).thenReturn(expectedJob); @@ -599,24 +588,24 @@ public void testReloadWithOptions() { } @Test - public void testCancel() { + void testCancel() { when(bigquery.cancel(JOB_INFO.getJobId())).thenReturn(true); assertTrue(job.cancel()); verify(bigquery).cancel(JOB_INFO.getJobId()); } @Test - public void testBigQuery() { + void testBigQuery() { assertSame(bigquery, expectedJob.getBigQuery()); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareJob(expectedJob, Job.fromPb(bigquery, expectedJob.toPb())); } @Test - public void testToAndFromPbWithoutConfiguration() { + void testToAndFromPbWithoutConfiguration() { assertNotEquals(expectedJob, bigquery); compareJob(expectedJob, Job.fromPb(bigquery, expectedJob.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java index d987eb28e0..1a9db29951 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java @@ -16,7 +16,7 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; @@ -28,9 +28,9 @@ import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class LoadJobConfigurationTest { +class LoadJobConfigurationTest { private static final String TEST_PROJECT_ID = "test-project-id"; private static final CsvOptions CSV_OPTIONS = @@ -168,7 +168,7 @@ public class LoadJobConfigurationTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareLoadJobConfiguration(LOAD_CONFIGURATION_CSV, LOAD_CONFIGURATION_CSV.toBuilder().build()); LoadJobConfiguration configurationCSV = LOAD_CONFIGURATION_CSV.toBuilder() @@ -200,7 +200,7 @@ public void testToBuilder() { } @Test - public void testOf() { + void testOf() { LoadJobConfiguration configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); assertEquals(TABLE_ID, configuration.getDestinationTable()); assertEquals(SOURCE_URIS, configuration.getSourceUris()); @@ -220,13 +220,13 @@ public void testOf() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { LoadJobConfiguration configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); compareLoadJobConfiguration(configuration, configuration.toBuilder().build()); } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareLoadJobConfiguration( LOAD_CONFIGURATION_CSV, LoadJobConfiguration.fromPb(LOAD_CONFIGURATION_CSV.toPb())); LoadJobConfiguration configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); @@ -234,13 +234,13 @@ public void testToPbAndFromPb() { } @Test - public void testSetProjectId() { + void testSetProjectId() { LoadConfiguration configuration = LOAD_CONFIGURATION_CSV.setProjectId(TEST_PROJECT_ID); assertEquals(TEST_PROJECT_ID, configuration.getDestinationTable().getProject()); } @Test - public void testSetProjectIdDoNotOverride() { + void testSetProjectIdDoNotOverride() { LoadConfiguration configuration = LOAD_CONFIGURATION_CSV.toBuilder() .setDestinationTable(TABLE_ID.setProjectId(TEST_PROJECT_ID)) @@ -250,7 +250,7 @@ public void testSetProjectIdDoNotOverride() { } @Test - public void testGetType() { + void testGetType() { assertEquals(JobConfiguration.Type.LOAD, LOAD_CONFIGURATION_CSV.getType()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/MaterializedViewDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/MaterializedViewDefinitionTest.java index eef4324a0e..00ac64937f 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/MaterializedViewDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/MaterializedViewDefinitionTest.java @@ -16,11 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.common.collect.ImmutableList; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class MaterializedViewDefinitionTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelIdTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelIdTest.java index 266a754c44..98df2d5f26 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelIdTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelIdTest.java @@ -16,17 +16,17 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class ModelIdTest { +class ModelIdTest { public static final ModelId MODEL = ModelId.of("dataset", "model"); public static final ModelId MODEL_COMPLETE = ModelId.of("project", "dataset", "model"); @Test - public void testOf() { + void testOf() { assertEquals(null, MODEL.getProject()); assertEquals("dataset", MODEL.getDataset()); assertEquals("model", MODEL.getModel()); @@ -37,19 +37,19 @@ public void testOf() { } @Test - public void testEquals() { + void testEquals() { compareModelIds(MODEL, ModelId.of("dataset", "model")); compareModelIds(MODEL_COMPLETE, ModelId.of("project", "dataset", "model")); } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareModelIds(MODEL, ModelId.fromPb(MODEL.toPb())); compareModelIds(MODEL_COMPLETE, ModelId.fromPb(MODEL_COMPLETE.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { ModelId differentProjectTable = ModelId.of("differentProject", "dataset", "model"); assertEquals(differentProjectTable, MODEL.setProjectId("differentProject")); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelInfoTest.java index 87fa8bbf56..be0e1ff233 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelInfoTest.java @@ -15,16 +15,16 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.api.services.bigquery.model.TrainingOptions; import com.google.api.services.bigquery.model.TrainingRun; import java.util.Arrays; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class ModelInfoTest { +class ModelInfoTest { private static final ModelId MODEL_ID = ModelId.of("dataset", "model"); private static final String ETAG = "etag"; @@ -57,18 +57,18 @@ public class ModelInfoTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareModelInfo(MODEL_INFO, MODEL_INFO.toBuilder().build()); } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { ModelInfo modelInfo = ModelInfo.of(MODEL_ID); assertEquals(modelInfo, modelInfo.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(ETAG, MODEL_INFO.getEtag()); assertEquals(CREATION_TIME, MODEL_INFO.getCreationTime()); assertEquals(LAST_MODIFIED_TIME, MODEL_INFO.getLastModifiedTime()); @@ -81,7 +81,7 @@ public void testBuilder() { } @Test - public void testOf() { + void testOf() { ModelInfo modelInfo = ModelInfo.of(MODEL_ID); assertEquals(MODEL_ID, modelInfo.getModelId()); assertNull(modelInfo.getEtag()); @@ -98,12 +98,12 @@ public void testOf() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareModelInfo(MODEL_INFO, ModelInfo.fromPb(MODEL_INFO.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { assertEquals("project", MODEL_INFO.setProjectId("project").getModelId().getProject()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java index 62b2cfe7d8..444d47c093 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java @@ -16,14 +16,14 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; -public class ModelTableDefinitionTest { +class ModelTableDefinitionTest { private static final String LOCATION = "US"; private static final Long NUM_BYTES = 14L; @@ -52,34 +52,33 @@ public class ModelTableDefinitionTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareModelTableDefinition(MODEL_TABLE_DEFINITION, MODEL_TABLE_DEFINITION.toBuilder().build()); } @Test - public void testTypeNullPointerException() { - try { - MODEL_TABLE_DEFINITION.toBuilder().setType(null).build(); - fail(); - } catch (NullPointerException ex) { - assertNotNull(ex.getMessage()); - } + void testTypeNullPointerException() { + NullPointerException ex = + Assertions.assertThrows( + NullPointerException.class, + () -> MODEL_TABLE_DEFINITION.toBuilder().setType(null).build()); + assertNotNull(ex.getMessage()); } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { ModelTableDefinition modelTableDefinition = ModelTableDefinition.newBuilder().build(); assertEquals(modelTableDefinition, modelTableDefinition.toBuilder().build()); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { assertEquals( MODEL_TABLE_DEFINITION, ModelTableDefinition.fromPb(MODEL_TABLE_DEFINITION.toPb())); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(MODEL_TABLE_DEFINITION.getSchema(), TABLE_SCHEMA); assertEquals(MODEL_TABLE_DEFINITION.getType(), TableDefinition.Type.MODEL); assertEquals(MODEL_TABLE_DEFINITION.getLocation(), LOCATION); @@ -87,12 +86,12 @@ public void testBuilder() { } @Test - public void testEquals() { + void testEquals() { assertEquals(MODEL_TABLE_DEFINITION, MODEL_TABLE_DEFINITION); } @Test - public void testNotEquals() { + void testNotEquals() { assertNotEquals(MODEL_TABLE_DEFINITION, LOCATION); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTest.java index 756277adc3..acdbdfbfe5 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTest.java @@ -16,25 +16,23 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.junit.MockitoRule; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) -public class ModelTest { +@ExtendWith(MockitoExtension.class) +class ModelTest { private static final ModelId MODEL_ID = ModelId.of("dataset", "model"); private static final String ETAG = "etag"; @@ -54,15 +52,13 @@ public class ModelTest { .setFriendlyName(FRIENDLY_NAME) .build(); - @Rule public MockitoRule rule; - private BigQuery bigquery; private BigQueryOptions mockOptions; private Model expectedModel; private Model model; - @Before - public void setUp() { + @BeforeEach + void setUp() { bigquery = mock(BigQuery.class); mockOptions = mock(BigQueryOptions.class); when(bigquery.getOptions()).thenReturn(mockOptions); @@ -71,7 +67,7 @@ public void setUp() { } @Test - public void testBuilder() { + void testBuilder() { Model builtModel = new Model.Builder(bigquery, MODEL_ID) .setEtag(ETAG) @@ -86,12 +82,12 @@ public void testBuilder() { } @Test - public void testToBuilder() { + void testToBuilder() { compareModelInfo(expectedModel, expectedModel.toBuilder().build()); } @Test - public void testExists_True() { + void testExists_True() { BigQuery.ModelOption[] expectedOptions = {BigQuery.ModelOption.fields()}; when(bigquery.getModel(MODEL_INFO.getModelId(), expectedOptions)).thenReturn(expectedModel); assertTrue(model.exists()); @@ -99,7 +95,7 @@ public void testExists_True() { } @Test - public void testExists_False() { + void testExists_False() { BigQuery.ModelOption[] expectedOptions = {BigQuery.ModelOption.fields()}; when(bigquery.getModel(MODEL_INFO.getModelId(), expectedOptions)).thenReturn(null); assertFalse(model.exists()); @@ -107,7 +103,7 @@ public void testExists_False() { } @Test - public void testReload() { + void testReload() { ModelInfo updatedInfo = MODEL_INFO.toBuilder().setDescription("Description").build(); Model expectedModel = new Model(bigquery, new ModelInfo.BuilderImpl(updatedInfo)); when(bigquery.getModel(MODEL_INFO.getModelId())).thenReturn(expectedModel); @@ -117,14 +113,14 @@ public void testReload() { } @Test - public void testReloadNull() { + void testReloadNull() { when(bigquery.getModel(MODEL_INFO.getModelId())).thenReturn(null); assertNull(model.reload()); verify(bigquery).getModel(MODEL_INFO.getModelId()); } @Test - public void testUpdate() { + void testUpdate() { Model expectedUpdatedModel = expectedModel.toBuilder().setDescription("Description").build(); when(bigquery.update(eq(expectedModel))).thenReturn(expectedUpdatedModel); Model actualUpdatedModel = model.update(); @@ -133,7 +129,7 @@ public void testUpdate() { } @Test - public void testUpdateWithOptions() { + void testUpdateWithOptions() { Model expectedUpdatedModel = expectedModel.toBuilder().setDescription("Description").build(); when(bigquery.update(eq(expectedModel), eq(BigQuery.ModelOption.fields()))) .thenReturn(expectedUpdatedModel); @@ -143,14 +139,14 @@ public void testUpdateWithOptions() { } @Test - public void testDeleteTrue() { + void testDeleteTrue() { when(bigquery.delete(MODEL_INFO.getModelId())).thenReturn(true); assertTrue(model.delete()); verify(bigquery).delete(MODEL_INFO.getModelId()); } @Test - public void testDeleteFalse() { + void testDeleteFalse() { when(bigquery.delete(MODEL_INFO.getModelId())).thenReturn(false); assertFalse(model.delete()); verify(bigquery).delete(MODEL_INFO.getModelId()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/OptionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/OptionTest.java index 58f3148667..b25f067064 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/OptionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/OptionTest.java @@ -16,13 +16,13 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.cloud.bigquery.spi.v2.BigQueryRpc; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; public class OptionTest { @@ -54,11 +54,6 @@ public void testConstructor() { Option option = new Option(RPC_OPTION, null) {}; assertEquals(RPC_OPTION, option.getRpcOption()); assertNull(option.getValue()); - try { - new Option(null, VALUE) {}; - Assert.fail(); - } catch (NullPointerException expected) { - - } + Assertions.assertThrows(NullPointerException.class, () -> new Option(null, VALUE) {}); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ParquetOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ParquetOptionsTest.java index c70ac3355e..b5ace223f1 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ParquetOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ParquetOptionsTest.java @@ -17,9 +17,9 @@ package com.google.cloud.bigquery; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ParquetOptionsTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyHelperTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyHelperTest.java index 291df79fde..ac05a2c1ff 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyHelperTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyHelperTest.java @@ -15,16 +15,16 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.api.services.bigquery.model.Binding; import com.google.cloud.Identity; import com.google.cloud.Policy; import com.google.cloud.Role; import com.google.common.collect.ImmutableList; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class PolicyHelperTest { +class PolicyHelperTest { public static final String ETAG = "etag"; public static final String ROLE1 = "roles/bigquery.admin"; @@ -58,7 +58,7 @@ public class PolicyHelperTest { Policy.newBuilder().setEtag(ETAG).setVersion(1).build(); @Test - public void testConversionWithBindings() { + void testConversionWithBindings() { assertEquals(IAM_POLICY, PolicyHelper.convertFromApiPolicy(API_POLICY)); assertEquals(API_POLICY, PolicyHelper.convertToApiPolicy(IAM_POLICY)); assertEquals( @@ -68,7 +68,7 @@ public void testConversionWithBindings() { } @Test - public void testConversionNoBindings() { + void testConversionNoBindings() { assertEquals(IAM_POLICY_NO_BINDINGS, PolicyHelper.convertFromApiPolicy(API_POLICY_NO_BINDINGS)); assertEquals(API_POLICY_NO_BINDINGS, PolicyHelper.convertToApiPolicy(IAM_POLICY_NO_BINDINGS)); assertEquals( diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java index f23cb36c2d..a94e4324be 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java @@ -16,13 +16,13 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class PolicyTagsTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PrimaryKeyTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PrimaryKeyTest.java index 2de87a0258..702ca5cb48 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PrimaryKeyTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PrimaryKeyTest.java @@ -16,19 +16,19 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Arrays; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class PrimaryKeyTest { +class PrimaryKeyTest { private static final List COLUMNS = Arrays.asList("column1", "column2"); private static final PrimaryKey PRIMARY_KEY = PrimaryKey.newBuilder().setColumns(COLUMNS).build(); @Test - public void testToBuilder() { + void testToBuilder() { comparePrimaryKeyDefinition(PRIMARY_KEY, PRIMARY_KEY.toBuilder().build()); PrimaryKey primaryKey = PRIMARY_KEY.toBuilder().setColumns(Arrays.asList("col1", "col2", "col3")).build(); @@ -36,14 +36,14 @@ public void testToBuilder() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals(COLUMNS, PRIMARY_KEY.getColumns()); PrimaryKey primaryKey = PRIMARY_KEY.newBuilder().setColumns(COLUMNS).build(); assertEquals(PRIMARY_KEY, primaryKey); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { PrimaryKey primaryKey = PRIMARY_KEY.toBuilder().build(); assertTrue(PrimaryKey.fromPb(primaryKey.toPb()) instanceof PrimaryKey); comparePrimaryKeyDefinition(primaryKey, PrimaryKey.fromPb(primaryKey.toPb())); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java index f25aa47eda..7fe41daa06 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java @@ -16,9 +16,9 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; @@ -31,7 +31,7 @@ import java.math.BigDecimal; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class QueryJobConfigurationTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java index 25649388e9..276234246e 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java @@ -18,7 +18,7 @@ import static com.google.cloud.bigquery.QueryParameterValue.TIMESTAMP_FORMATTER; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.google.api.services.bigquery.model.QueryParameterType; import com.google.common.collect.ImmutableMap; @@ -32,8 +32,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.threeten.extra.PeriodDuration; public class QueryParameterValueTest { @@ -58,12 +57,11 @@ public void testBuilder() { @Test public void testTypeNullPointerException() { - try { - QUERY_PARAMETER_VALUE.toBuilder().setType(null).build(); - Assert.fail(); - } catch (NullPointerException ex) { - assertThat(ex).isNotNull(); - } + NullPointerException ex = + assertThrows( + NullPointerException.class, + () -> QUERY_PARAMETER_VALUE.toBuilder().setType(null).build()); + assertThat(ex).isNotNull(); } @Test @@ -407,10 +405,12 @@ public void testStandardDate() throws ParseException { assertThat(value.getArrayValues()).isNull(); } - @Test(expected = IllegalArgumentException.class) + @Test public void testInvalidDate() { // not supposed to have the time - QueryParameterValue.date("2014-08-19 12:41:35.220000"); + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.date("2014-08-19 12:41:35.220000")); } @Test @@ -422,10 +422,12 @@ public void testTime() { assertThat(value.getArrayValues()).isNull(); } - @Test(expected = IllegalArgumentException.class) + @Test public void testInvalidTime() { // not supposed to have the date - QueryParameterValue.time("2014-08-19 12:41:35.220000"); + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.time("2014-08-19 12:41:35.220000")); } @Test @@ -437,10 +439,10 @@ public void testDateTime() { assertThat(value.getArrayValues()).isNull(); } - @Test(expected = IllegalArgumentException.class) + @Test public void testInvalidDateTime() { // missing the time - QueryParameterValue.dateTime("2014-08-19"); + assertThrows(IllegalArgumentException.class, () -> QueryParameterValue.dateTime("2014-08-19")); } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java index 8661346776..be1f0e1982 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java @@ -16,10 +16,10 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.api.services.bigquery.model.QueryRequest; import com.google.cloud.bigquery.JobInfo.CreateDisposition; @@ -31,7 +31,7 @@ import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class QueryRequestInfoTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java index bc7d6083be..30eeb90ad4 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java @@ -16,16 +16,16 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import com.google.api.services.bigquery.model.ExplainQueryStep; import com.google.cloud.bigquery.QueryStage.QueryStep; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class QueryStageTest { +class QueryStageTest { private static final List SUBSTEPS1 = ImmutableList.of("substep1", "substep2"); private static final List SUBSTEPS2 = ImmutableList.of("substep3", "substep4"); @@ -96,7 +96,7 @@ public class QueryStageTest { .build(); @Test - public void testQueryStepConstructor() { + void testQueryStepConstructor() { assertEquals("KIND", QUERY_STEP1.getName()); assertEquals("KIND", QUERY_STEP2.getName()); assertEquals(SUBSTEPS1, QUERY_STEP1.getSubsteps()); @@ -104,7 +104,7 @@ public void testQueryStepConstructor() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals(COMPLETED_PARALLEL_INPUTS, QUERY_STAGE.getCompletedParallelInputs()); assertEquals(COMPUTE_MS_AVG, QUERY_STAGE.getComputeMsAvg()); assertEquals(COMPUTE_MS_MAX, QUERY_STAGE.getComputeMsMax()); @@ -138,7 +138,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareQueryStep(QUERY_STEP1, QueryStep.fromPb(QUERY_STEP1.toPb())); compareQueryStep(QUERY_STEP2, QueryStep.fromPb(QUERY_STEP2.toPb())); compareQueryStage(QUERY_STAGE, QueryStage.fromPb(QUERY_STAGE.toPb())); @@ -149,14 +149,14 @@ public void testToAndFromPb() { } @Test - public void testEquals() { + void testEquals() { compareQueryStep(QUERY_STEP1, QUERY_STEP1); compareQueryStep(QUERY_STEP2, QUERY_STEP2); compareQueryStage(QUERY_STAGE, QUERY_STAGE); } @Test - public void testNotEquals() { + void testNotEquals() { assertNotEquals(QUERY_STAGE, QUERY_STEP1); assertNotEquals(QUERY_STEP1, QUERY_STAGE); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RangeTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RangeTest.java index 2d98376b36..b72b4b70c2 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RangeTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RangeTest.java @@ -15,11 +15,11 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.common.collect.ImmutableMap; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class RangeTest { private static final Range RANGE_DATE = diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RemoteFunctionOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RemoteFunctionOptionsTest.java index 8ee0e4564f..a3559f5cfc 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RemoteFunctionOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RemoteFunctionOptionsTest.java @@ -15,13 +15,13 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.HashMap; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class RemoteFunctionOptionsTest { +class RemoteFunctionOptionsTest { private static final String endpoint = "https://aaabbbccc-uc.a.run.app"; private static final String connection = "projects/{projectId}/locations/{locationId}/connections/{connectionId}"; @@ -43,13 +43,13 @@ public class RemoteFunctionOptionsTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareRemoteFunctionOptions( REMOTE_FUNCTION_OPTIONS, REMOTE_FUNCTION_OPTIONS.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(endpoint, REMOTE_FUNCTION_OPTIONS.getEndpoint()); assertEquals(connection, REMOTE_FUNCTION_OPTIONS.getConnection()); assertEquals(userDefinedContext, REMOTE_FUNCTION_OPTIONS.getUserDefinedContext()); @@ -57,7 +57,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareRemoteFunctionOptions( REMOTE_FUNCTION_OPTIONS, RemoteFunctionOptions.fromPb(REMOTE_FUNCTION_OPTIONS.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineArgumentTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineArgumentTest.java index 909d5981d7..31a2c56de9 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineArgumentTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineArgumentTest.java @@ -15,9 +15,9 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class RoutineArgumentTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineIdTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineIdTest.java index 94a19fbfd5..2800f3caa6 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineIdTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineIdTest.java @@ -15,9 +15,9 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class RoutineIdTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java index 145dc8914c..f191cbedd0 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java @@ -15,14 +15,14 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class RoutineInfoTest { +class RoutineInfoTest { private static final RoutineId ROUTINE_ID = RoutineId.of("dataset", "routine"); private static final String ETAG = "etag"; @@ -68,18 +68,18 @@ public class RoutineInfoTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareRoutineInfo(ROUTINE_INFO, ROUTINE_INFO.toBuilder().build()); } @Test - public void testBuilderIncomplete() { + void testBuilderIncomplete() { RoutineInfo routineInfo = RoutineInfo.of(ROUTINE_ID); assertEquals(routineInfo, routineInfo.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(ROUTINE_ID, ROUTINE_INFO.getRoutineId()); assertEquals(ETAG, ROUTINE_INFO.getEtag()); assertEquals(ROUTINE_TYPE, ROUTINE_INFO.getRoutineType()); @@ -96,7 +96,7 @@ public void testBuilder() { } @Test - public void testOf() { + void testOf() { RoutineInfo routineInfo = RoutineInfo.of(ROUTINE_ID); assertEquals(ROUTINE_ID, ROUTINE_INFO.getRoutineId()); assertNull(routineInfo.getEtag()); @@ -114,12 +114,12 @@ public void testOf() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareRoutineInfo(ROUTINE_INFO, RoutineInfo.fromPb(ROUTINE_INFO.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { assertEquals("project", ROUTINE_INFO.setProjectId("project").getRoutineId().getProject()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineTest.java index eaf1420120..839bfe5e66 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineTest.java @@ -15,11 +15,11 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -29,14 +29,12 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.junit.MockitoRule; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class RoutineTest { private static final RoutineId ROUTINE_ID = RoutineId.of("dataset", "routine"); @@ -116,15 +114,13 @@ public class RoutineTest { .setReturnTableType(RETURN_TABLE_TYPE) .build(); - @Rule public MockitoRule rule; - private BigQuery bigquery; private BigQueryOptions mockOptions; private Routine expectedRoutine; private Routine expectedRoutineTvf; private Routine routine; - @Before + @BeforeEach public void setUp() { bigquery = mock(BigQuery.class); mockOptions = mock(BigQueryOptions.class); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SchemaTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SchemaTest.java index 7f53680e62..9750fd7bd3 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SchemaTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SchemaTest.java @@ -16,14 +16,14 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.api.services.bigquery.model.TableSchema; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class SchemaTest { +class SchemaTest { private static PolicyTags POLICY_TAGS = PolicyTags.newBuilder().setNames(ImmutableList.of("someTag")).build(); @@ -53,12 +53,12 @@ public class SchemaTest { private static final Schema TABLE_SCHEMA = Schema.of(FIELDS); @Test - public void testOf() { + void testOf() { compareTableSchema(TABLE_SCHEMA, Schema.of(FIELDS)); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareTableSchema(TABLE_SCHEMA, Schema.fromPb(TABLE_SCHEMA.toPb())); } @@ -68,7 +68,7 @@ private void compareTableSchema(Schema expected, Schema value) { } @Test - public void testEmptySchema() { + void testEmptySchema() { TableSchema tableSchema = new TableSchema(); Schema schema = Schema.fromPb(tableSchema); assertEquals(0, schema.getFields().size()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SnapshotTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SnapshotTableDefinitionTest.java index c739bcf5a5..defcd9cb39 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SnapshotTableDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SnapshotTableDefinitionTest.java @@ -16,12 +16,12 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class SnapshotTableDefinitionTest { +class SnapshotTableDefinitionTest { private static final TableId BASE_TABLE_ID = TableId.of("DATASET_NAME", "BASE_TABLE_NAME"); private static final String SNAPSHOT_TIME = "2021-05-19T11:32:26.553Z"; @@ -32,7 +32,7 @@ public class SnapshotTableDefinitionTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareSnapshotTableDefinition( SNAPSHOTTABLE_DEFINITION, SNAPSHOTTABLE_DEFINITION.toBuilder().build()); SnapshotTableDefinition snapshotTableDefinition = @@ -41,7 +41,7 @@ public void testToBuilder() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals(TableDefinition.Type.SNAPSHOT, SNAPSHOTTABLE_DEFINITION.getType()); assertEquals(BASE_TABLE_ID, SNAPSHOTTABLE_DEFINITION.getBaseTableId()); assertEquals(SNAPSHOT_TIME, SNAPSHOTTABLE_DEFINITION.getSnapshotTime()); @@ -54,7 +54,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { SnapshotTableDefinition snapshotTableDefinition = SNAPSHOTTABLE_DEFINITION.toBuilder().build(); assertTrue( TableDefinition.fromPb(snapshotTableDefinition.toPb()) instanceof SnapshotTableDefinition); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLDataTypeTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLDataTypeTest.java index 635a75612b..ffc6311184 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLDataTypeTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLDataTypeTest.java @@ -15,13 +15,13 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class StandardSQLDataTypeTest { +class StandardSQLDataTypeTest { private static final String STRING_TYPEKIND = "STRING"; private static final String ARRAY_TYPEKIND = "ARRAY"; private static final String STRUCT_TYPEKIND = "STRUCT"; @@ -42,7 +42,7 @@ public class StandardSQLDataTypeTest { StandardSQLDataType.newBuilder(STRUCT_TYPEKIND).setStructType(STRUCT_TYPE).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareStandardSQLDataType(STRING_DATA_TYPE, STRING_DATA_TYPE.toBuilder().build()); compareStandardSQLDataType( ARRAY_OF_STRING_DATA_TYPE, ARRAY_OF_STRING_DATA_TYPE.toBuilder().build()); @@ -50,7 +50,7 @@ public void testToBuilder() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals(STRING_TYPEKIND, STRING_DATA_TYPE.getTypeKind()); assertEquals(ARRAY_TYPEKIND, ARRAY_OF_STRING_DATA_TYPE.getTypeKind()); assertEquals(STRING_DATA_TYPE, ARRAY_OF_STRING_DATA_TYPE.getArrayElementType()); @@ -58,7 +58,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareStandardSQLDataType( ARRAY_OF_STRING_DATA_TYPE, StandardSQLDataType.fromPb(ARRAY_OF_STRING_DATA_TYPE.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLFieldTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLFieldTest.java index 904ed80280..5e3af997d7 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLFieldTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLFieldTest.java @@ -15,9 +15,9 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class StandardSQLFieldTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLStructTypeTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLStructTypeTest.java index d4fa86950f..ab88de3f0e 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLStructTypeTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLStructTypeTest.java @@ -15,13 +15,13 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class StandardSQLStructTypeTest { +class StandardSQLStructTypeTest { private static final StandardSQLField FIELD_1 = StandardSQLField.newBuilder("FIELD_1", StandardSQLDataType.newBuilder("STRING").build()) @@ -35,18 +35,18 @@ public class StandardSQLStructTypeTest { StandardSQLStructType.newBuilder(FIELD_LIST).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareStandardSQLStructType(STRUCT_TYPE, STRUCT_TYPE.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(FIELD_1, STRUCT_TYPE.getFields().get(0)); assertEquals(FIELD_2, STRUCT_TYPE.getFields().get(1)); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareStandardSQLStructType(STRUCT_TYPE, StandardSQLStructType.fromPb(STRUCT_TYPE.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLTableTypeTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLTableTypeTest.java index 2ed6e35354..ce5a4992cd 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLTableTypeTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLTableTypeTest.java @@ -16,11 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class StandardSQLTableTypeTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java index 8fbe3cefe7..6ff0a366d1 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java @@ -16,12 +16,12 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.api.services.bigquery.model.Streamingbuffer; import com.google.api.services.bigquery.model.Table; @@ -29,7 +29,7 @@ import com.google.cloud.bigquery.StandardTableDefinition.StreamingBuffer; import com.google.common.collect.ImmutableList; import com.google.common.truth.Truth; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class StandardTableDefinitionTest { @@ -131,12 +131,10 @@ public void testBuilder() { @Test public void testTypeNullPointerException() { - try { - TABLE_DEFINITION.toBuilder().setType(null).build(); - fail(); - } catch (NullPointerException ex) { - assertNotNull(ex.getMessage()); - } + NullPointerException ex = + assertThrows( + NullPointerException.class, () -> TABLE_DEFINITION.toBuilder().setType(null).build()); + assertNotNull(ex.getMessage()); } @Test @@ -183,15 +181,12 @@ public void testFromPbWithUnexpectedTimePartitioningTypeRaisesInvalidArgumentExc .setTableId("ILLEGAL_ARG_TEST_TABLE")) .setTimePartitioning( new com.google.api.services.bigquery.model.TimePartitioning().setType("GHURRY")); - try { - StandardTableDefinition.fromPb(invalidTable); - } catch (IllegalArgumentException ie) { - Truth.assertThat(ie.getMessage()) - .contains( - "Illegal Argument - Got unexpected time partitioning GHURRY in project ILLEGAL_ARG_TEST_PROJECT in dataset ILLEGAL_ARG_TEST_DATASET in table ILLEGAL_ARG_TEST_TABLE"); - return; - } - fail("testFromPb illegal argument exception did not throw!"); + IllegalArgumentException ie = + assertThrows( + IllegalArgumentException.class, () -> StandardTableDefinition.fromPb(invalidTable)); + Truth.assertThat(ie.getMessage()) + .contains( + "Illegal Argument - Got unexpected time partitioning GHURRY in project ILLEGAL_ARG_TEST_PROJECT in dataset ILLEGAL_ARG_TEST_DATASET in table ILLEGAL_ARG_TEST_TABLE"); } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableConstraintsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableConstraintsTest.java index 7d0f57ef76..b074b2f22c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableConstraintsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableConstraintsTest.java @@ -16,16 +16,16 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TableConstraintsTest { +class TableConstraintsTest { private static final List COLUMNS_PK = Arrays.asList("column1", "column2"); private static final PrimaryKey PRIMARY_KEY = PrimaryKey.newBuilder().setColumns(COLUMNS_PK).build(); @@ -50,7 +50,7 @@ public class TableConstraintsTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareTableConstraintsDefinition(TABLE_CONSTRAINTS, TABLE_CONSTRAINTS.toBuilder().build()); List columnsPk = Arrays.asList("col1", "col2", "col3"); PrimaryKey primaryKey = PrimaryKey.newBuilder().setColumns(columnsPk).build(); @@ -90,7 +90,7 @@ public void testToBuilder() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals(Collections.singletonList(FOREIGN_KEY), TABLE_CONSTRAINTS.getForeignKeys()); assertEquals(PRIMARY_KEY, TABLE_CONSTRAINTS.getPrimaryKey()); TableConstraints tableConstraints = @@ -103,7 +103,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { TableConstraints tableConstraints = TABLE_CONSTRAINTS.toBuilder().build(); assertTrue(TableConstraints.fromPb(tableConstraints.toPb()) instanceof TableConstraints); compareTableConstraintsDefinition( diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java index a90b5c4d72..8752b27086 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java @@ -16,11 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; @@ -39,15 +39,17 @@ import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Random; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class TableDataWriteChannelTest { private static final String UPLOAD_ID = "uploadid"; @@ -80,7 +82,7 @@ public class TableDataWriteChannelTest { private TableDataWriteChannel writer; - @Before + @BeforeEach public void setUp() { rpcFactoryMock = mock(BigQueryRpcFactory.class); bigqueryRpcMock = mock(HttpBigQueryRpc.class); @@ -142,12 +144,14 @@ public void testCreateNonRetryableError() throws IOException { .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenThrow(new RuntimeException("expected")); - try (TableDataWriteChannel channel = - new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION)) { - Assert.fail(); - } catch (RuntimeException expected) { - Assert.assertEquals("java.lang.RuntimeException: expected", expected.getMessage()); - } + RuntimeException expected = + assertThrows( + RuntimeException.class, + () -> { + try (TableDataWriteChannel channel = + new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION)) {} + }); + assertEquals("java.lang.RuntimeException: expected", expected.getMessage()); verify(bigqueryRpcMock) .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() @@ -269,17 +273,18 @@ public void testWritesAndFlushNonRetryableError() throws IOException { eq(DEFAULT_CHUNK_SIZE), eq(false))) .thenThrow(new RuntimeException("expected")); - try { - writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); - ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; - for (int i = 0; i < buffers.length; i++) { - buffers[i] = randomBuffer(MIN_CHUNK_SIZE); - assertEquals(MIN_CHUNK_SIZE, writer.write(buffers[i])); - } - Assert.fail(); - } catch (RuntimeException expected) { - Assert.assertEquals("java.lang.RuntimeException: expected", expected.getMessage()); - } + RuntimeException expected = + assertThrows( + RuntimeException.class, + () -> { + writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); + ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; + for (int i = 0; i < buffers.length; i++) { + buffers[i] = randomBuffer(MIN_CHUNK_SIZE); + assertEquals(MIN_CHUNK_SIZE, writer.write(buffers[i])); + } + }); + assertEquals("java.lang.RuntimeException: expected", expected.getMessage()); verify(bigqueryRpcMock) .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() @@ -363,12 +368,7 @@ public void testWriteClosed() throws IOException { writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); writer.close(); assertEquals(job, writer.getJob()); - try { - writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE)); - fail("Expected TableDataWriteChannel write to throw IOException"); - } catch (IOException ex) { - // expected - } + assertThrows(IOException.class, () -> writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE))); verify(bigqueryRpcMock) .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java index dc28ff8610..02154db0ca 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java @@ -16,11 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TableIdTest { +class TableIdTest { private static final TableId TABLE = TableId.of("dataset", "table"); private static final TableId TABLE_COMPLETE = TableId.of("project", "dataset", "table"); @@ -28,7 +28,7 @@ public class TableIdTest { "projects/project/datasets/dataset/tables/table"; @Test - public void testOf() { + void testOf() { assertEquals(null, TABLE.getProject()); assertEquals("dataset", TABLE.getDataset()); assertEquals("table", TABLE.getTable()); @@ -39,19 +39,19 @@ public void testOf() { } @Test - public void testEquals() { + void testEquals() { compareTableIds(TABLE, TableId.of("dataset", "table")); compareTableIds(TABLE_COMPLETE, TableId.of("project", "dataset", "table")); } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareTableIds(TABLE, TableId.fromPb(TABLE.toPb())); compareTableIds(TABLE_COMPLETE, TableId.fromPb(TABLE_COMPLETE.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { TableId differentProjectTable = TableId.of("differentProject", "dataset", "table"); assertEquals(differentProjectTable, TABLE.setProjectId("differentProject")); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableInfoTest.java index a4ce6fbb47..4dd488a680 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableInfoTest.java @@ -16,16 +16,16 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.common.collect.ImmutableList; import java.math.BigInteger; import java.util.Collections; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TableInfoTest { +class TableInfoTest { private static final String ETAG = "etag"; private static final String GENERATED_ID = "project:dataset:table"; @@ -153,7 +153,7 @@ public class TableInfoTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareTableInfo(TABLE_INFO, TABLE_INFO.toBuilder().build()); compareTableInfo(VIEW_INFO, VIEW_INFO.toBuilder().build()); compareTableInfo(EXTERNAL_TABLE_INFO, EXTERNAL_TABLE_INFO.toBuilder().build()); @@ -164,7 +164,7 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { TableInfo tableInfo = TableInfo.of(TABLE_ID, TABLE_DEFINITION); assertEquals(tableInfo, tableInfo.toBuilder().build()); tableInfo = TableInfo.of(TABLE_ID, VIEW_DEFINITION); @@ -174,7 +174,7 @@ public void testToBuilderIncomplete() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals(TABLE_ID, TABLE_INFO.getTableId()); assertEquals(CREATION_TIME, TABLE_INFO.getCreationTime()); assertEquals(DESCRIPTION, TABLE_INFO.getDescription()); @@ -223,7 +223,7 @@ public void testBuilder() { } @Test - public void testOf() { + void testOf() { TableInfo tableInfo = TableInfo.of(TABLE_ID, TABLE_DEFINITION); assertEquals(TABLE_ID, tableInfo.getTableId()); assertNull(tableInfo.getCreationTime()); @@ -266,21 +266,21 @@ public void testOf() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareTableInfo(TABLE_INFO, TableInfo.fromPb(TABLE_INFO.toPb())); compareTableInfo(VIEW_INFO, TableInfo.fromPb(VIEW_INFO.toPb())); compareTableInfo(EXTERNAL_TABLE_INFO, TableInfo.fromPb(EXTERNAL_TABLE_INFO.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { assertEquals("project", TABLE_INFO.setProjectId("project").getTableId().getProject()); assertEquals("project", EXTERNAL_TABLE_INFO.setProjectId("project").getTableId().getProject()); assertEquals("project", VIEW_INFO.setProjectId("project").getTableId().getProject()); } @Test - public void testSetProjectIdDoNotOverride() { + void testSetProjectIdDoNotOverride() { TableInfo tableInfo = TableInfo.of(TABLE_ID, TABLE_DEFINITION).setProjectId("project"); tableInfo.setProjectId("not-override-project").toBuilder(); assertEquals("project", tableInfo.getTableId().getProject()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableMetadataCacheUsageTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableMetadataCacheUsageTest.java index 8f141fa596..dc996693cc 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableMetadataCacheUsageTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableMetadataCacheUsageTest.java @@ -16,13 +16,13 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.api.services.bigquery.model.TableReference; import com.google.cloud.bigquery.TableMetadataCacheUsage.UnusedReason; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TableMetadataCacheUsageTest { +class TableMetadataCacheUsageTest { private static final String EXPLANATION = "test explanation"; @@ -51,7 +51,7 @@ public class TableMetadataCacheUsageTest { .setUnusedReason(UNUSED_REASON.toString()); @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { assertEquals(TABLE_METADATA_CACHE_USAGE_PB, TABLE_METADATA_CACHE_USAGE.toPb()); compareTableMetadataCacheUsage( TABLE_METADATA_CACHE_USAGE, TableMetadataCacheUsage.fromPb(TABLE_METADATA_CACHE_USAGE_PB)); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableResultTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableResultTest.java index 71f9e35da5..5bdb14cf49 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableResultTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableResultTest.java @@ -22,9 +22,9 @@ import com.google.api.gax.paging.Page; import com.google.cloud.PageImpl; import com.google.common.collect.ImmutableList; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TableResultTest { +class TableResultTest { private static final Page INNER_PAGE_0 = new PageImpl<>( new PageImpl.NextPageFetcher() { @@ -52,7 +52,7 @@ private static FieldValueList newFieldValueList(String s) { } @Test - public void testNullSchema() { + void testNullSchema() { TableResult result = TableResult.newBuilder().setTotalRows(3L).setPageNoSchema(INNER_PAGE_0).build(); assertThat(result.getSchema()).isNull(); @@ -75,7 +75,7 @@ public void testNullSchema() { } @Test - public void testSchema() { + void testSchema() { TableResult result = TableResult.newBuilder() .setSchema(SCHEMA) diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java index 6e99b701c1..6ad7822d9b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java @@ -17,11 +17,11 @@ package com.google.cloud.bigquery; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -34,14 +34,12 @@ import com.google.common.collect.ImmutableMap; import java.math.BigInteger; import java.util.List; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.junit.MockitoRule; - -@RunWith(MockitoJUnitRunner.class) +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) public class TableTest { private static final String ETAG = "etag"; @@ -97,14 +95,12 @@ public class TableTest { FieldValueList.of(ImmutableList.of(FIELD_VALUE1)).withSchema(SCHEMA.getFields()), FieldValueList.of(ImmutableList.of(FIELD_VALUE2)).withSchema(SCHEMA.getFields())); - @Rule public MockitoRule rule; - private BigQuery bigquery; private BigQueryOptions mockOptions; private Table expectedTable; private Table table; - @Before + @BeforeEach public void setUp() { bigquery = mock(BigQuery.class); mockOptions = mock(BigQueryOptions.class); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java index 1e48c817ed..ec947381b0 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java @@ -16,16 +16,16 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.google.cloud.bigquery.TimePartitioning.Type; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TimePartitioningTest { +class TimePartitioningTest { private static final Type TYPE_DAY = Type.DAY; private static final Type TYPE_HOUR = Type.HOUR; @@ -60,7 +60,7 @@ public class TimePartitioningTest { .build(); @Test - public void testOf() { + void testOf() { assertEquals(TYPE_DAY, TIME_PARTITIONING_DAY.getType()); assertEquals(TYPE_HOUR, TIME_PARTITIONING_HOUR.getType()); assertEquals(TYPE_MONTH, TIME_PARTITIONING_MONTH.getType()); @@ -74,7 +74,7 @@ public void testOf() { } @Test - public void testBuilder() { + void testBuilder() { TimePartitioning partitioning = TimePartitioning.newBuilder(TYPE_DAY).build(); assertEquals(TYPE_DAY, partitioning.getType()); assertNull(partitioning.getExpirationMs()); @@ -90,27 +90,21 @@ public void testBuilder() { } @Test - public void testTypeOf_Npe() { - try { - TimePartitioning.of(null); - Assert.fail(); - } catch (NullPointerException ex) { - assertNotNull(ex.getMessage()); - } + void testTypeOf_Npe() { + NullPointerException ex = + assertThrows(NullPointerException.class, () -> TimePartitioning.of(null)); + assertNotNull(ex.getMessage()); } @Test - public void testTypeAndExpirationOf_Npe() { - try { - TimePartitioning.of(null, EXPIRATION_MS); - Assert.fail(); - } catch (NullPointerException ex) { - assertNotNull(ex.getMessage()); - } + void testTypeAndExpirationOf_Npe() { + NullPointerException ex = + assertThrows(NullPointerException.class, () -> TimePartitioning.of(null, EXPIRATION_MS)); + assertNotNull(ex.getMessage()); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareTimePartitioning( TIME_PARTITIONING_DAY, TimePartitioning.fromPb(TIME_PARTITIONING_DAY.toPb())); TimePartitioning partitioning = TimePartitioning.of(TYPE_DAY); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java index 1d888f00df..22f419593d 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java @@ -15,10 +15,10 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TimelineSampleTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/UserDefinedFunctionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/UserDefinedFunctionTest.java index 93657b44c1..81622527a3 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/UserDefinedFunctionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/UserDefinedFunctionTest.java @@ -16,9 +16,9 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class UserDefinedFunctionTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java index d60c7be2bc..60eeea7668 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java @@ -16,19 +16,18 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class ViewDefinitionTest { +class ViewDefinitionTest { private static final String VIEW_QUERY = "VIEW QUERY"; private static final List USER_DEFINED_FUNCTIONS = @@ -37,7 +36,7 @@ public class ViewDefinitionTest { ViewDefinition.newBuilder(VIEW_QUERY, USER_DEFINED_FUNCTIONS).setSchema(Schema.of()).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareViewDefinition(VIEW_DEFINITION, VIEW_DEFINITION.toBuilder().build()); ViewDefinition viewDefinition = VIEW_DEFINITION.toBuilder().setQuery("NEW QUERY").build(); assertEquals("NEW QUERY", viewDefinition.getQuery()); @@ -50,23 +49,21 @@ public void testToBuilder() { } @Test - public void testTypeNullPointerException() { - try { - VIEW_DEFINITION.toBuilder().setType(null).build(); - fail(); - } catch (NullPointerException ex) { - assertNotNull(ex.getMessage()); - } + void testTypeNullPointerException() { + NullPointerException ex = + org.junit.jupiter.api.Assertions.assertThrows( + NullPointerException.class, () -> VIEW_DEFINITION.toBuilder().setType(null).build()); + assertNotNull(ex.getMessage()); } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { TableDefinition viewDefinition = ViewDefinition.of(VIEW_QUERY); assertEquals(viewDefinition, viewDefinition.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(VIEW_QUERY, VIEW_DEFINITION.getQuery()); assertEquals(TableDefinition.Type.VIEW, VIEW_DEFINITION.getType()); assertEquals(USER_DEFINED_FUNCTIONS, VIEW_DEFINITION.getUserDefinedFunctions()); @@ -106,7 +103,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { ViewDefinition viewDefinition = VIEW_DEFINITION.toBuilder().setUseLegacySql(false).build(); assertTrue(TableDefinition.fromPb(viewDefinition.toPb()) instanceof ViewDefinition); compareViewDefinition( diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java index 240f12185c..35745235e5 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java @@ -16,8 +16,8 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.WriteDisposition; @@ -27,7 +27,7 @@ import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class WriteChannelConfigurationTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index ddad48d392..fec7e55e05 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -20,15 +20,15 @@ import static com.google.common.truth.Truth.assertThat; import static java.lang.System.currentTimeMillis; import static java.net.HttpURLConnection.HTTP_UNAUTHORIZED; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.google.api.client.util.IOUtils; import com.google.api.gax.paging.Page; @@ -208,14 +208,14 @@ import java.util.concurrent.TimeoutException; import java.util.logging.Level; import java.util.logging.Logger; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.threeten.extra.PeriodDuration; -public class ITBigQueryTest { +@Timeout(value = 300) +class ITBigQueryTest { private static final byte[] BYTES = {0xD, 0xE, 0xA, 0xD}; private static final String BYTES_BASE64 = BaseEncoding.base64().encode(BYTES); @@ -1063,10 +1063,8 @@ public CompletableResultCode shutdown() { } } - @Rule public Timeout globalTimeout = Timeout.seconds(300); - - @BeforeClass - public static void beforeClass() throws InterruptedException, IOException { + @BeforeAll + static void beforeClass() throws InterruptedException, IOException { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); RemoteStorageHelper storageHelper = RemoteStorageHelper.create(); Map labels = ImmutableMap.of("test-job-name", "test-load-job"); @@ -1180,8 +1178,8 @@ public static void beforeClass() throws InterruptedException, IOException { assertNull(jobLargeTable.getStatus().getError()); } - @AfterClass - public static void afterClass() throws Exception { + @AfterAll + static void afterClass() throws Exception { if (bigquery != null) { RemoteBigQueryHelper.forceDelete(bigquery, DATASET); RemoteBigQueryHelper.forceDelete(bigquery, UK_DATASET); @@ -1206,13 +1204,12 @@ static GoogleCredentials loadCredentials(String credentialFile) { try (InputStream keyStream = new ByteArrayInputStream(credentialFile.getBytes())) { return GoogleCredentials.fromStream(keyStream); } catch (IOException e) { - fail("Couldn't create fake JSON credentials."); + throw new RuntimeException("Couldn't create fake JSON credentials.", e); } - return null; } @Test - public void testListDatasets() { + void testListDatasets() { Page datasets = bigquery.listDatasets("bigquery-public-data"); Iterator iterator = datasets.iterateAll().iterator(); Set datasetNames = new HashSet<>(); @@ -1230,24 +1227,24 @@ public void testListDatasets() { } @Test - public void testListDatasetsWithFilter() { + void testListDatasetsWithFilter() { String labelFilter = "labels.example-label1:example-value1"; Page datasets = bigquery.listDatasets(DatasetListOption.labelFilter(labelFilter)); int count = 0; for (Dataset dataset : datasets.getValues()) { assertTrue( - "failed to find label key in dataset", dataset.getLabels().containsKey("example-label1")); + dataset.getLabels().containsKey("example-label1"), "failed to find label key in dataset"); assertEquals( - "failed to find label value in dataset", "example-value1", - dataset.getLabels().get("example-label1")); + dataset.getLabels().get("example-label1"), + "failed to find label value in dataset"); count++; } assertTrue(count > 0); } @Test - public void testGetDataset() { + void testGetDataset() { Dataset dataset = bigquery.getDataset(DATASET); assertEquals(bigquery.getOptions().getProjectId(), dataset.getDatasetId().getProject()); assertEquals(DATASET, dataset.getDatasetId().getDataset()); @@ -1261,7 +1258,7 @@ public void testGetDataset() { } @Test - public void testDatasetUpdateAccess() throws IOException { + void testDatasetUpdateAccess() throws IOException { Dataset dataset = bigquery.getDataset(DATASET); ServiceAccountCredentials credentials = (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); @@ -1276,7 +1273,7 @@ public void testDatasetUpdateAccess() throws IOException { } @Test - public void testGetDatasetWithSelectedFields() { + void testGetDatasetWithSelectedFields() { Dataset dataset = bigquery.getDataset( DATASET, DatasetOption.fields(DatasetField.CREATION_TIME, DatasetField.LABELS)); @@ -1298,7 +1295,7 @@ public void testGetDatasetWithSelectedFields() { } @Test - public void testGetDatasetWithAccessPolicyVersion() throws IOException { + void testGetDatasetWithAccessPolicyVersion() throws IOException { String accessPolicyDataset = RemoteBigQueryHelper.generateDatasetName(); ServiceAccountCredentials credentials = (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); @@ -1342,7 +1339,7 @@ public void testGetDatasetWithAccessPolicyVersion() throws IOException { } @Test - public void testUpdateDataset() { + void testUpdateDataset() { Dataset dataset = bigquery.create( DatasetInfo.newBuilder(OTHER_DATASET) @@ -1379,7 +1376,7 @@ public void testUpdateDataset() { } @Test - public void testUpdateDatasetWithSelectedFields() { + void testUpdateDatasetWithSelectedFields() { Dataset dataset = bigquery.create( DatasetInfo.newBuilder(OTHER_DATASET).setDescription("Some Description").build()); @@ -1407,7 +1404,7 @@ public void testUpdateDatasetWithSelectedFields() { } @Test - public void testUpdateDatasetWithAccessPolicyVersion() throws IOException { + void testUpdateDatasetWithAccessPolicyVersion() throws IOException { String accessPolicyDataset = RemoteBigQueryHelper.generateDatasetName(); ServiceAccountCredentials credentials = (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); @@ -1460,12 +1457,12 @@ public void testUpdateDatasetWithAccessPolicyVersion() throws IOException { } @Test - public void testGetNonExistingTable() { + void testGetNonExistingTable() { assertNull(bigquery.getTable(DATASET, "test_get_non_existing_table")); } @Test - public void testCreateTableWithRangePartitioning() { + void testCreateTableWithRangePartitioning() { String tableName = "test_create_table_rangepartitioning"; TableId tableId = TableId.of(DATASET, tableName); try { @@ -1490,7 +1487,7 @@ public void testCreateTableWithRangePartitioning() { /* TODO(prasmish): replicate this test case for executeSelect on the relevant part */ @Test - public void testJsonType() throws InterruptedException { + void testJsonType() throws InterruptedException { String tableName = "test_create_table_jsontype"; TableId tableId = TableId.of(DATASET, tableName); Schema schema = Schema.of(Field.of("jsonField", StandardSQLTypeName.JSON)); @@ -1586,9 +1583,9 @@ public void testJsonType() throws InterruptedException { .build(); BigQueryException exception = assertThrows( - "Querying with malformed JSON shouldn't work", BigQueryException.class, - () -> bigquery.query(dmlQueryJobConfiguration2)); + () -> bigquery.query(dmlQueryJobConfiguration2), + "Querying with malformed JSON shouldn't work"); BigQueryError error = exception.getError(); assertNotNull(error); assertEquals("invalidQuery", error.getReason()); @@ -1599,7 +1596,7 @@ public void testJsonType() throws InterruptedException { /* TODO(prasmish): replicate this test case for executeSelect on the relevant part */ @Test - public void testIntervalType() throws InterruptedException { + void testIntervalType() throws InterruptedException { String tableName = "test_create_table_intervaltype"; TableId tableId = TableId.of(DATASET, tableName); Schema schema = Schema.of(Field.of("intervalField", StandardSQLTypeName.INTERVAL)); @@ -1674,7 +1671,7 @@ public void testIntervalType() throws InterruptedException { } @Test - public void testRangeType() throws InterruptedException { + void testRangeType() throws InterruptedException { String tableName = "test_range_type_table"; TableId tableId = TableId.of(DATASET, tableName); @@ -1751,7 +1748,7 @@ public void testRangeType() throws InterruptedException { } @Test - public void testCreateTableWithConstraints() { + void testCreateTableWithConstraints() { String tableName = "test_create_table_with_constraints"; TableId tableId = TableId.of(DATASET, tableName); Field stringFieldWithConstraint = @@ -1795,7 +1792,7 @@ public void testCreateTableWithConstraints() { } @Test - public void testCreateDatasetWithSpecifiedStorageBillingModel() { + void testCreateDatasetWithSpecifiedStorageBillingModel() { String billingModelDataset = RemoteBigQueryHelper.generateDatasetName(); DatasetInfo info = DatasetInfo.newBuilder(billingModelDataset) @@ -1812,7 +1809,7 @@ public void testCreateDatasetWithSpecifiedStorageBillingModel() { } @Test - public void testCreateDatasetWithSpecificMaxTimeTravelHours() { + void testCreateDatasetWithSpecificMaxTimeTravelHours() { String timeTravelDataset = RemoteBigQueryHelper.generateDatasetName(); DatasetInfo info = DatasetInfo.newBuilder(timeTravelDataset) @@ -1829,7 +1826,7 @@ public void testCreateDatasetWithSpecificMaxTimeTravelHours() { } @Test - public void testCreateDatasetWithDefaultMaxTimeTravelHours() { + void testCreateDatasetWithDefaultMaxTimeTravelHours() { String timeTravelDataset = RemoteBigQueryHelper.generateDatasetName(); DatasetInfo info = DatasetInfo.newBuilder(timeTravelDataset) @@ -1846,7 +1843,7 @@ public void testCreateDatasetWithDefaultMaxTimeTravelHours() { } @Test - public void testCreateDatasetWithDefaultCollation() { + void testCreateDatasetWithDefaultCollation() { String collationDataset = RemoteBigQueryHelper.generateDatasetName(); DatasetInfo info = DatasetInfo.newBuilder(collationDataset) @@ -1863,7 +1860,7 @@ public void testCreateDatasetWithDefaultCollation() { } @Test - public void testCreateDatasetWithAccessPolicyVersion() throws IOException { + void testCreateDatasetWithAccessPolicyVersion() throws IOException { String accessPolicyDataset = RemoteBigQueryHelper.generateDatasetName(); ServiceAccountCredentials credentials = (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); @@ -1899,8 +1896,8 @@ public void testCreateDatasetWithAccessPolicyVersion() throws IOException { RemoteBigQueryHelper.forceDelete(bigquery, accessPolicyDataset); } - @Test(expected = BigQueryException.class) - public void testCreateDatasetWithInvalidAccessPolicyVersion() throws IOException { + @Test + void testCreateDatasetWithInvalidAccessPolicyVersion() throws IOException { String accessPolicyDataset = RemoteBigQueryHelper.generateDatasetName(); ServiceAccountCredentials credentials = (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); @@ -1920,14 +1917,13 @@ public void testCreateDatasetWithInvalidAccessPolicyVersion() throws IOException .setAcl(ImmutableList.of(acl)) .build(); DatasetOption datasetOption = DatasetOption.accessPolicyVersion(4); - Dataset dataset = bigquery.create(info, datasetOption); - assertNotNull(dataset); + assertThrows(BigQueryException.class, () -> bigquery.create(info, datasetOption)); RemoteBigQueryHelper.forceDelete(bigquery, accessPolicyDataset); } @Test - public void testCreateTableWithDefaultCollation() { + void testCreateTableWithDefaultCollation() { String tableName = "test_create_table_with_default_collation"; TableId tableId = TableId.of(DATASET, tableName); Field stringFieldWithoutCollation = @@ -1965,7 +1961,7 @@ public void testCreateTableWithDefaultCollation() { } @Test - public void testCreateFieldWithDefaultCollation() { + void testCreateFieldWithDefaultCollation() { String tableName = "test_create_field_with_default_collation"; TableId tableId = TableId.of(DATASET, tableName); Field stringFieldWithCollation = @@ -2002,7 +1998,7 @@ public void testCreateFieldWithDefaultCollation() { } @Test - public void testCreateTableWithDefaultValueExpression() { + void testCreateTableWithDefaultValueExpression() { String tableName = "test_create_table_with_default_value_expression"; TableId tableId = TableId.of(DATASET, tableName); Field stringFieldWithDefaultValueExpression = @@ -2064,7 +2060,7 @@ public void testCreateTableWithDefaultValueExpression() { } @Test - public void testCreateAndUpdateTableWithPolicyTags() throws IOException { + void testCreateAndUpdateTableWithPolicyTags() throws IOException { // Set up policy tags in the datacatalog service try (PolicyTagManagerClient policyTagManagerClient = PolicyTagManagerClient.create()) { CreateTaxonomyRequest createTaxonomyRequest = @@ -2146,7 +2142,7 @@ public void testCreateAndUpdateTableWithPolicyTags() throws IOException { } @Test - public void testCreateAndGetTable() { + void testCreateAndGetTable() { String tableName = "test_create_and_get_table"; TableId tableId = TableId.of(DATASET, tableName); TimePartitioning partitioning = TimePartitioning.of(Type.DAY); @@ -2184,7 +2180,7 @@ public void testCreateAndGetTable() { } @Test - public void testCreateAndListTable() { + void testCreateAndListTable() { String tableName = "test_create_and_list_table"; TableId tableId = TableId.of(DATASET, tableName); TimePartitioning partitioning = TimePartitioning.of(Type.DAY); @@ -2219,7 +2215,7 @@ public void testCreateAndListTable() { } @Test - public void testCreateAndGetTableWithBasicTableMetadataView() { + void testCreateAndGetTableWithBasicTableMetadataView() { String tableName = "test_create_and_get_table_with_basic_metadata_view"; TableId tableId = TableId.of(DATASET, tableName); TimePartitioning partitioning = TimePartitioning.of(Type.DAY); @@ -2251,7 +2247,7 @@ public void testCreateAndGetTableWithBasicTableMetadataView() { } @Test - public void testCreateAndGetTableWithFullTableMetadataView() { + void testCreateAndGetTableWithFullTableMetadataView() { String tableName = "test_create_and_get_table_with_full_metadata_view"; TableId tableId = TableId.of(DATASET, tableName); TimePartitioning partitioning = TimePartitioning.of(Type.DAY); @@ -2282,7 +2278,7 @@ public void testCreateAndGetTableWithFullTableMetadataView() { } @Test - public void testCreateAndGetTableWithStorageStatsTableMetadataView() { + void testCreateAndGetTableWithStorageStatsTableMetadataView() { String tableName = "test_create_and_get_table_with_storage_stats_metadata_view"; TableId tableId = TableId.of(DATASET, tableName); TimePartitioning partitioning = TimePartitioning.of(Type.DAY); @@ -2314,7 +2310,7 @@ public void testCreateAndGetTableWithStorageStatsTableMetadataView() { } @Test - public void testCreateAndGetTableWithUnspecifiedTableMetadataView() { + void testCreateAndGetTableWithUnspecifiedTableMetadataView() { String tableName = "test_create_and_get_table_with_unspecified_metadata_view"; TableId tableId = TableId.of(DATASET, tableName); TimePartitioning partitioning = TimePartitioning.of(Type.DAY); @@ -2346,7 +2342,7 @@ public void testCreateAndGetTableWithUnspecifiedTableMetadataView() { } @Test - public void testCreateAndGetTableWithSelectedField() { + void testCreateAndGetTableWithSelectedField() { String tableName = "test_create_and_get_selected_fields_table"; TableId tableId = TableId.of(DATASET, tableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); @@ -2386,7 +2382,7 @@ public void testCreateAndGetTableWithSelectedField() { } @Test - public void testCreateExternalTable() throws InterruptedException { + void testCreateExternalTable() throws InterruptedException { String tableName = "test_create_external_table"; TableId tableId = TableId.of(DATASET, tableName); @@ -2444,7 +2440,7 @@ public void testCreateExternalTable() throws InterruptedException { } @Test - public void testSetPermExternalTableSchema() { + void testSetPermExternalTableSchema() { String tableName = "test_create_external_table_perm"; TableId tableId = TableId.of(DATASET, tableName); ExternalTableDefinition externalTableDefinition = @@ -2466,7 +2462,7 @@ public void testSetPermExternalTableSchema() { } @Test - public void testUpdatePermExternableTableWithAutodetectSchemaUpdatesSchema() { + void testUpdatePermExternableTableWithAutodetectSchemaUpdatesSchema() { String tableName = "test_create_external_table_perm_with_auto_detect"; TableId tableId = TableId.of(DATASET, tableName); Schema setSchema = Schema.of(TIMESTAMP_FIELD_SCHEMA, STRING_FIELD_SCHEMA); @@ -2501,7 +2497,7 @@ public void testUpdatePermExternableTableWithAutodetectSchemaUpdatesSchema() { } @Test - public void testCreateViewTable() throws InterruptedException { + void testCreateViewTable() throws InterruptedException { String tableName = "test_create_view_table"; TableId tableId = TableId.of(DATASET, tableName); ViewDefinition viewDefinition = @@ -2549,7 +2545,7 @@ public void testCreateViewTable() throws InterruptedException { } @Test - public void testCreateMaterializedViewTable() { + void testCreateMaterializedViewTable() { String tableName = "test_materialized_view_table"; TableId tableId = TableId.of(DATASET, tableName); MaterializedViewDefinition viewDefinition = @@ -2573,7 +2569,7 @@ public void testCreateMaterializedViewTable() { } @Test - public void testTableIAM() { + void testTableIAM() { String tableName = "test_iam_table"; TableId tableId = TableId.of(DATASET, tableName); StandardTableDefinition tableDefinition = @@ -2602,7 +2598,7 @@ public void testTableIAM() { } @Test - public void testListTables() { + void testListTables() { String tableName = "test_list_tables"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); @@ -2621,7 +2617,7 @@ public void testListTables() { } @Test - public void testListTablesWithPartitioning() { + void testListTablesWithPartitioning() { String tableName = "test_list_tables_partitioning"; TimePartitioning timePartitioning = TimePartitioning.of(Type.DAY, EXPIRATION_MS); StandardTableDefinition tableDefinition = @@ -2654,7 +2650,7 @@ public void testListTablesWithPartitioning() { } @Test - public void testListTablesWithRangePartitioning() { + void testListTablesWithRangePartitioning() { String tableName = "test_list_tables_range_partitioning"; StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder() @@ -2684,7 +2680,7 @@ public void testListTablesWithRangePartitioning() { } @Test - public void testListPartitions() throws InterruptedException { + void testListPartitions() throws InterruptedException { String tableName = "test_table_partitions_" + UUID.randomUUID().toString().substring(0, 8); Date date = Date.fromJavaUtilDate(new java.util.Date()); String partitionDate = date.toString().replaceAll("-", ""); @@ -2710,7 +2706,7 @@ public void testListPartitions() throws InterruptedException { } @Test - public void testUpdateTable() { + void testUpdateTable() { String tableName = "test_update_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = @@ -2740,7 +2736,7 @@ public void testUpdateTable() { } @Test - public void testUpdateTimePartitioning() { + void testUpdateTimePartitioning() { String tableName = "testUpdateTimePartitioning"; TableId tableId = TableId.of(DATASET, tableName); StandardTableDefinition tableDefinition = @@ -2782,8 +2778,7 @@ public void testUpdateTimePartitioning() { table.delete(); } - @Test - public void testUpdateTableWithSelectedFields() { + void testUpdateTableWithSelectedFields() { String tableName = "test_update_with_selected_fields_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); @@ -2814,16 +2809,16 @@ public void testUpdateTableWithSelectedFields() { } @Test - public void testUpdateNonExistingTable() { + void testUpdateNonExistingTable() { TableInfo tableInfo = TableInfo.of( TableId.of(DATASET, "test_update_non_existing_table"), StandardTableDefinition.of(SIMPLE_SCHEMA)); BigQueryException exception = assertThrows( - "BigQueryException was expected", BigQueryException.class, - () -> bigquery.update(tableInfo)); + () -> bigquery.update(tableInfo), + "BigQueryException was expected"); BigQueryError error = exception.getError(); assertNotNull(error); assertEquals("notFound", error.getReason()); @@ -2831,12 +2826,12 @@ public void testUpdateNonExistingTable() { } @Test - public void testDeleteNonExistingTable() { + void testDeleteNonExistingTable() { assertFalse(bigquery.delete("test_delete_non_existing_table")); } @Test - public void testDeleteJob() { + void testDeleteJob() { String query = "SELECT 17 as foo"; QueryJobConfiguration config = QueryJobConfiguration.of(query); String jobName = "jobId_" + UUID.randomUUID().toString(); @@ -2850,7 +2845,7 @@ public void testDeleteJob() { } @Test - public void testInsertAll() throws IOException { + void testInsertAll() throws IOException { String tableName = "test_insert_all_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); @@ -2909,7 +2904,7 @@ public void testInsertAll() throws IOException { } @Test - public void testInsertAllWithSuffix() throws InterruptedException { + void testInsertAllWithSuffix() throws InterruptedException { String tableName = "test_insert_all_with_suffix_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); @@ -2977,7 +2972,7 @@ public void testInsertAllWithSuffix() throws InterruptedException { } @Test - public void testInsertAllWithErrors() { + void testInsertAllWithErrors() { String tableName = "test_insert_all_with_errors_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); @@ -3047,7 +3042,7 @@ public void testInsertAllWithErrors() { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testListAllTableData() { + void testListAllTableData() { Page rows = bigquery.listTableData(TABLE_ID); int rowCount = 0; for (FieldValueList row : rows.getValues()) { @@ -3092,7 +3087,7 @@ public void testListAllTableData() { } @Test - public void testListPageWithStartIndex() { + void testListPageWithStartIndex() { String tableName = "midyear_population_agespecific"; TableId tableId = TableId.of(PUBLIC_PROJECT, PUBLIC_DATASET, tableName); Table table = bigquery.getTable(tableId); @@ -3111,7 +3106,7 @@ public void testListPageWithStartIndex() { } @Test - public void testModelLifecycle() throws InterruptedException { + void testModelLifecycle() throws InterruptedException { String modelName = RemoteBigQueryHelper.generateModelName(); @@ -3170,7 +3165,7 @@ public void testModelLifecycle() throws InterruptedException { } @Test - public void testEmptyListModels() { + void testEmptyListModels() { String datasetId = "test_empty_dataset_list_models_" + RANDOM_ID; assertNotNull(bigquery.create(DatasetInfo.of(datasetId))); Page models = bigquery.listModels(datasetId, BigQuery.ModelListOption.pageSize(100)); @@ -3181,7 +3176,7 @@ public void testEmptyListModels() { } @Test - public void testEmptyListRoutines() { + void testEmptyListRoutines() { String datasetId = "test_empty_dataset_list_routines_" + RANDOM_ID; assertNotNull(bigquery.create(DatasetInfo.of(datasetId))); Page routines = @@ -3193,7 +3188,7 @@ public void testEmptyListRoutines() { } @Test - public void testRoutineLifecycle() throws InterruptedException { + void testRoutineLifecycle() throws InterruptedException { String routineName = RemoteBigQueryHelper.generateRoutineName(); // Create a routine using SQL. String sql = @@ -3236,7 +3231,7 @@ public void testRoutineLifecycle() throws InterruptedException { } @Test - public void testRoutineAPICreation() { + void testRoutineAPICreation() { String routineName = RemoteBigQueryHelper.generateRoutineName(); RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); RoutineInfo routineInfo = @@ -3258,7 +3253,7 @@ public void testRoutineAPICreation() { } @Test - public void testRoutineAPICreationJavascriptUDF() { + void testRoutineAPICreationJavascriptUDF() { String routineName = RemoteBigQueryHelper.generateRoutineName(); RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); RoutineInfo routineInfo = @@ -3286,7 +3281,7 @@ public void testRoutineAPICreationJavascriptUDF() { } @Test - public void testRoutineAPICreationTVF() { + void testRoutineAPICreationTVF() { String routineName = RemoteBigQueryHelper.generateRoutineName(); RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); List columns = @@ -3314,7 +3309,7 @@ public void testRoutineAPICreationTVF() { } @Test - public void testRoutineDataGovernanceType() { + void testRoutineDataGovernanceType() { String routineName = RemoteBigQueryHelper.generateRoutineName(); RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); RoutineInfo routineInfo = @@ -3341,7 +3336,7 @@ public void testRoutineDataGovernanceType() { } @Test - public void testAuthorizeRoutine() { + void testAuthorizeRoutine() { String routineName = RemoteBigQueryHelper.generateRoutineName(); RoutineId routineId = RoutineId.of(PROJECT_ID, ROUTINE_DATASET, routineName); RoutineInfo routineInfo = @@ -3367,7 +3362,7 @@ public void testAuthorizeRoutine() { } @Test - public void testAuthorizeDataset() { + void testAuthorizeDataset() { String datasetName = RemoteBigQueryHelper.generateDatasetName(); DatasetId datasetId = DatasetId.of(PROJECT_ID, datasetName); List targetTypes = ImmutableList.of("VIEWS"); @@ -3412,14 +3407,14 @@ public void testAuthorizeDataset() { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testSingleStatementsQueryException() throws InterruptedException { + void testSingleStatementsQueryException() throws InterruptedException { String invalidQuery = String.format("INSERT %s.%s VALUES('3', 10);", DATASET, TABLE_ID.getTable()); BigQueryException exception = assertThrows( - "BigQueryException was expected", BigQueryException.class, - () -> bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor()); + () -> bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor(), + "BigQueryException was expected"); assertEquals("invalidQuery", exception.getReason()); assertNotNull(exception.getMessage()); BigQueryError error = exception.getError(); @@ -3429,16 +3424,16 @@ public void testSingleStatementsQueryException() throws InterruptedException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testMultipleStatementsQueryException() throws InterruptedException { + void testMultipleStatementsQueryException() throws InterruptedException { String invalidQuery = String.format( "INSERT %s.%s VALUES('3', 10); DELETE %s.%s where c2=3;", DATASET, TABLE_ID.getTable(), DATASET, TABLE_ID.getTable()); BigQueryException exception = assertThrows( - "BigQueryException was expected", BigQueryException.class, - () -> bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor()); + () -> bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor(), + "BigQueryException was expected"); assertEquals("invalidQuery", exception.getReason()); assertNotNull(exception.getMessage()); BigQueryError error = exception.getError(); @@ -3447,7 +3442,7 @@ public void testMultipleStatementsQueryException() throws InterruptedException { } @Test - public void testTimestamp() throws InterruptedException { + void testTimestamp() throws InterruptedException { String query = "SELECT TIMESTAMP '2022-01-24T23:54:25.095574Z'"; String timestampStringValueExpected = "2022-01-24T23:54:25.095574Z"; @@ -3465,7 +3460,7 @@ public void testTimestamp() throws InterruptedException { } @Test - public void testLosslessTimestamp() throws InterruptedException { + void testLosslessTimestamp() throws InterruptedException { String query = "SELECT TIMESTAMP '2022-01-24T23:54:25.095574Z'"; long expectedTimestamp = 1643068465095574L; @@ -3505,7 +3500,7 @@ public void testLosslessTimestamp() throws InterruptedException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testQuery() throws InterruptedException { + void testQuery() throws InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); @@ -3538,7 +3533,7 @@ public void testQuery() throws InterruptedException { } @Test - public void testQueryStatistics() throws InterruptedException { + void testQueryStatistics() throws InterruptedException { // Use CURRENT_TIMESTAMP to avoid potential caching. String query = "SELECT CURRENT_TIMESTAMP() AS ts"; QueryJobConfiguration config = @@ -3555,7 +3550,7 @@ public void testQueryStatistics() throws InterruptedException { } @Test - public void testExecuteSelectDefaultConnectionSettings() throws SQLException { + void testExecuteSelectDefaultConnectionSettings() throws SQLException { // Use the default connection settings Connection connection = bigquery.createConnection(); String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; @@ -3565,7 +3560,7 @@ public void testExecuteSelectDefaultConnectionSettings() throws SQLException { } @Test - public void testExecuteSelectWithReadApi() throws SQLException { + void testExecuteSelectWithReadApi() throws SQLException { final int rowLimit = 5000; final String QUERY = "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 LIMIT %s"; @@ -3595,7 +3590,7 @@ public void testExecuteSelectWithReadApi() throws SQLException { } @Test - public void testExecuteSelectWithFastQueryReadApi() throws SQLException { + void testExecuteSelectWithFastQueryReadApi() throws SQLException { final int rowLimit = 5000; final String QUERY = "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 LIMIT %s"; @@ -3622,7 +3617,7 @@ public void testExecuteSelectWithFastQueryReadApi() throws SQLException { } @Test - public void testExecuteSelectReadApiEmptyResultSet() throws SQLException { + void testExecuteSelectReadApiEmptyResultSet() throws SQLException { ConnectionSettings connectionSettings = ConnectionSettings.newBuilder() .setJobTimeoutMs( @@ -3640,7 +3635,7 @@ public void testExecuteSelectReadApiEmptyResultSet() throws SQLException { } @Test - public void testExecuteSelectWithCredentials() throws SQLException { + void testExecuteSelectWithCredentials() throws SQLException { // This test validate that executeSelect uses the same credential provided by the BigQuery // object used to create the Connection client. // This is done the following scenarios: @@ -3684,7 +3679,7 @@ public void testExecuteSelectWithCredentials() throws SQLException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testQueryTimeStamp() throws InterruptedException { + void testQueryTimeStamp() throws InterruptedException { String query = "SELECT TIMESTAMP '2022-01-24T23:54:25.095574Z'"; Instant beforeQueryInstant = Instant.parse("2022-01-24T23:54:25.095574Z"); long microsBeforeQuery = @@ -3721,7 +3716,7 @@ public void testQueryTimeStamp() throws InterruptedException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testQueryCaseInsensitiveSchemaFieldByGetName() throws InterruptedException { + void testQueryCaseInsensitiveSchemaFieldByGetName() throws InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); @@ -3751,7 +3746,7 @@ public void testQueryCaseInsensitiveSchemaFieldByGetName() throws InterruptedExc /* TODO(prasmish): replicate bigquery.query part of the test case for executeSelect - modify this test case */ @Test - public void testQueryExternalHivePartitioningOptionAutoLayout() throws InterruptedException { + void testQueryExternalHivePartitioningOptionAutoLayout() throws InterruptedException { String tableName = "test_queryexternalhivepartition_autolayout_table"; String sourceUri = "gs://" + CLOUD_SAMPLES_DATA + "/bigquery/hive-partitioning-samples/autolayout/*"; @@ -3787,7 +3782,7 @@ public void testQueryExternalHivePartitioningOptionAutoLayout() throws Interrupt /* TODO(prasmish): replicate bigquery.query part of the test case for executeSelect - modify this test case */ @Test - public void testQueryExternalHivePartitioningOptionCustomLayout() throws InterruptedException { + void testQueryExternalHivePartitioningOptionCustomLayout() throws InterruptedException { String tableName = "test_queryexternalhivepartition_customlayout_table"; String sourceUri = "gs://" + CLOUD_SAMPLES_DATA + "/bigquery/hive-partitioning-samples/customlayout/*"; @@ -3823,7 +3818,7 @@ public void testQueryExternalHivePartitioningOptionCustomLayout() throws Interru } @Test - public void testConnectionImplDryRun() throws SQLException { + void testConnectionImplDryRun() throws SQLException { String query = String.format( "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from %s where StringField = ? order by TimestampField", @@ -3848,7 +3843,7 @@ public void testConnectionImplDryRun() throws SQLException { } @Test - public void testConnectionImplDryRunNoQueryParameters() throws SQLException { + void testConnectionImplDryRunNoQueryParameters() throws SQLException { String query = String.format( "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, " @@ -3878,7 +3873,7 @@ public void testConnectionImplDryRunNoQueryParameters() throws SQLException { @Test // This test case test the order of the records, making sure that the result is not jumbled up due // to the multithreaded BigQueryResult implementation - public void testBQResultSetMultiThreadedOrder() throws SQLException { + void testBQResultSetMultiThreadedOrder() throws SQLException { String query = "SELECT date FROM " + TABLE_ID_LARGE.getTable() @@ -3905,7 +3900,7 @@ public void testBQResultSetMultiThreadedOrder() throws SQLException { } @Test - public void testBQResultSetPaginationSlowQuery() throws SQLException { + void testBQResultSetPaginationSlowQuery() throws SQLException { String query = "SELECT date, county, state_name, confirmed_cases, deaths FROM " + TABLE_ID_LARGE.getTable() @@ -3934,7 +3929,7 @@ public void testBQResultSetPaginationSlowQuery() throws SQLException { } @Test - public void testExecuteSelectSinglePageTableRow() throws SQLException { + void testExecuteSelectSinglePageTableRow() throws SQLException { String query = "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, " + "NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from " @@ -3998,7 +3993,7 @@ public void testExecuteSelectSinglePageTableRow() throws SQLException { } @Test - public void testExecuteSelectSinglePageTableRowWithReadAPI() throws SQLException { + void testExecuteSelectSinglePageTableRowWithReadAPI() throws SQLException { String query = "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, " + "NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from " @@ -4063,7 +4058,7 @@ public void testExecuteSelectSinglePageTableRowWithReadAPI() throws SQLException } @Test - public void testConnectionClose() throws SQLException { + void testConnectionClose() throws SQLException { String query = "SELECT date, county, state_name, confirmed_cases, deaths FROM " + TABLE_ID_LARGE.getTable() @@ -4089,7 +4084,7 @@ public void testConnectionClose() throws SQLException { } @Test - public void testBQResultSetPagination() throws SQLException { + void testBQResultSetPagination() throws SQLException { String query = "SELECT date, county, state_name, confirmed_cases, deaths FROM " + TABLE_ID_LARGE.getTable() @@ -4115,7 +4110,7 @@ public void testBQResultSetPagination() throws SQLException { } @Test - public void testReadAPIIterationAndOrder() + void testReadAPIIterationAndOrder() throws SQLException { // use read API to read 300K records and check the order String query = "SELECT date, county, state_name, confirmed_cases, deaths FROM " @@ -4152,7 +4147,7 @@ public void testReadAPIIterationAndOrder() } @Test - public void testReadAPIIterationAndOrderAsync() + void testReadAPIIterationAndOrderAsync() throws SQLException, ExecutionException, InterruptedException { // use read API to read 300K records and check the order @@ -4200,7 +4195,7 @@ public void testReadAPIIterationAndOrderAsync() // TODO(prasmish): Remove this test case if it turns out to be flaky, as expecting the process to // be uncompleted in 1000ms is nondeterministic! Though very likely it won't be complete in the // specified amount of time - public void testExecuteSelectAsyncCancel() + void testExecuteSelectAsyncCancel() throws SQLException, ExecutionException, InterruptedException { // use read API to read 300K records and check the order @@ -4242,7 +4237,7 @@ public void testExecuteSelectAsyncCancel() // TODO(prasmish): Remove this test case if it turns out to be flaky, as expecting the process to // be uncompleted in 1000ms is nondeterministic! Though very likely it won't be complete in the // specified amount of time - public void testExecuteSelectAsyncTimeout() + void testExecuteSelectAsyncTimeout() throws SQLException, ExecutionException, InterruptedException { // use read API to read 300K records and check the order @@ -4272,7 +4267,7 @@ public void testExecuteSelectAsyncTimeout() } @Test - public void testExecuteSelectWithNamedQueryParametersAsync() + void testExecuteSelectWithNamedQueryParametersAsync() throws BigQuerySQLException, ExecutionException, InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField FROM " @@ -4302,14 +4297,14 @@ public void testExecuteSelectWithNamedQueryParametersAsync() // Ref: https://github.com/googleapis/java-bigquery/issues/2070. Adding a pre-submit test to see // if bigquery.createConnection() returns null @Test - public void testCreateDefaultConnection() throws BigQuerySQLException { + void testCreateDefaultConnection() throws BigQuerySQLException { Connection connection = bigquery.createConnection(); - assertNotNull("bigquery.createConnection() returned null", connection); + assertNotNull(connection, "bigquery.createConnection() returned null"); assertTrue(connection.close()); } @Test - public void testReadAPIConnectionMultiClose() + void testReadAPIConnectionMultiClose() throws SQLException { // use read API to read 300K records, then closes the connection. This test // repeats it multiple times and assets if the connection was closed @@ -4345,7 +4340,7 @@ public void testReadAPIConnectionMultiClose() } @Test - public void testExecuteSelectSinglePageTableRowColInd() throws SQLException { + void testExecuteSelectSinglePageTableRowColInd() throws SQLException { String query = "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, " + "NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from " @@ -4424,7 +4419,7 @@ public void testExecuteSelectSinglePageTableRowColInd() throws SQLException { } @Test - public void testExecuteSelectStruct() throws SQLException { + void testExecuteSelectStruct() throws SQLException { String query = "select (STRUCT(\"Vancouver\" as city, 5 as years)) as address"; ConnectionSettings connectionSettings = ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); @@ -4457,7 +4452,7 @@ public void testExecuteSelectStruct() throws SQLException { } @Test - public void testExecuteSelectStructSubField() throws SQLException { + void testExecuteSelectStructSubField() throws SQLException { String query = "select address.city from (select (STRUCT(\"Vancouver\" as city, 5 as years)) as address)"; ConnectionSettings connectionSettings = @@ -4483,7 +4478,7 @@ public void testExecuteSelectStructSubField() throws SQLException { } @Test - public void testExecuteSelectArray() throws SQLException { + void testExecuteSelectArray() throws SQLException { String query = "SELECT [1,2,3]"; ConnectionSettings connectionSettings = ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); @@ -4506,7 +4501,7 @@ public void testExecuteSelectArray() throws SQLException { } @Test - public void testExecuteSelectArrayOfStruct() throws SQLException { + void testExecuteSelectArrayOfStruct() throws SQLException { String query = "SELECT [STRUCT(\"Vancouver\" as city, 5 as years), STRUCT(\"Boston\" as city, 10 as years)]"; ConnectionSettings connectionSettings = @@ -4546,7 +4541,7 @@ public void testExecuteSelectArrayOfStruct() throws SQLException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testFastQueryMultipleRuns() throws InterruptedException { + void testFastQueryMultipleRuns() throws InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FASTQUERY.getTable(); QueryJobConfiguration config = @@ -4583,7 +4578,7 @@ public void testFastQueryMultipleRuns() throws InterruptedException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testFastQuerySinglePageDuplicateRequestIds() throws InterruptedException { + void testFastQuerySinglePageDuplicateRequestIds() throws InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FASTQUERY.getTable(); QueryJobConfiguration config = @@ -4616,7 +4611,7 @@ public void testFastQuerySinglePageDuplicateRequestIds() throws InterruptedExcep /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testFastSQLQuery() throws InterruptedException { + void testFastSQLQuery() throws InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FASTQUERY.getTable(); QueryJobConfiguration config = @@ -4646,7 +4641,7 @@ public void testFastSQLQuery() throws InterruptedException { } @Test - public void testProjectIDFastSQLQueryWithJobId() throws InterruptedException { + void testProjectIDFastSQLQueryWithJobId() throws InterruptedException { String random_project_id = "RANDOM_PROJECT_" + UUID.randomUUID().toString().replace('-', '_'); System.out.println(random_project_id); String query = @@ -4667,7 +4662,7 @@ public void testProjectIDFastSQLQueryWithJobId() throws InterruptedException { } @Test - public void testLocationFastSQLQueryWithJobId() throws InterruptedException { + void testLocationFastSQLQueryWithJobId() throws InterruptedException { DatasetInfo infoUK = DatasetInfo.newBuilder(UK_DATASET) .setDescription(DESCRIPTION) @@ -4728,7 +4723,7 @@ public void testLocationFastSQLQueryWithJobId() throws InterruptedException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testFastSQLQueryMultiPage() throws InterruptedException { + void testFastSQLQueryMultiPage() throws InterruptedException { String query = "SELECT date, county, state_name, county_fips_code, confirmed_cases, deaths FROM " + TABLE_ID_LARGE.getTable(); @@ -4761,7 +4756,7 @@ public void testFastSQLQueryMultiPage() throws InterruptedException { } @Test - public void testFastDMLQuery() throws InterruptedException { + void testFastDMLQuery() throws InterruptedException { String tableName = TABLE_ID_FASTQUERY.getTable(); String dmlQuery = String.format("UPDATE %s.%s SET StringField = 'hello' WHERE TRUE", DATASET, tableName); @@ -4777,7 +4772,7 @@ public void testFastDMLQuery() throws InterruptedException { } @Test - public void testFastDDLQuery() throws InterruptedException { + void testFastDDLQuery() throws InterruptedException { String tableName = "test_table_fast_query_ddl"; String tableNameFastQuery = TABLE_ID_DDL.getTable(); String ddlQuery = @@ -4816,7 +4811,7 @@ public void testFastDDLQuery() throws InterruptedException { } @Test - public void testFastQuerySlowDDL() throws InterruptedException { + void testFastQuerySlowDDL() throws InterruptedException { String tableName = "test_table_fast_query_ddl_slow_" + UUID.randomUUID().toString().substring(0, 8); // This query take more than 10s to run and should fall back on the old query path @@ -4849,7 +4844,7 @@ public void testFastQuerySlowDDL() throws InterruptedException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testFastQueryHTTPException() throws InterruptedException { + void testFastQueryHTTPException() throws InterruptedException { String queryInvalid = "CREATE OR REPLACE SELECT * FROM UPDATE TABLE SET " + TABLE_ID_FASTQUERY.getTable(); QueryJobConfiguration configInvalidQuery = @@ -4858,9 +4853,9 @@ public void testFastQueryHTTPException() throws InterruptedException { .build(); BigQueryException exception = assertThrows( - "BigQueryException was expected", BigQueryException.class, - () -> bigquery.query(configInvalidQuery)); + () -> bigquery.query(configInvalidQuery), + "BigQueryException was expected"); BigQueryError error = exception.getError(); assertNotNull(error.getMessage()); assertEquals("invalidQuery", error.getReason()); @@ -4874,16 +4869,16 @@ public void testFastQueryHTTPException() throws InterruptedException { BigQueryException exception1 = assertThrows( - "BigQueryException was expected", BigQueryException.class, - () -> bigquery.query(configMissingTable)); + () -> bigquery.query(configMissingTable), + "BigQueryException was expected"); BigQueryError error1 = exception1.getError(); assertNotNull(error1.getMessage()); assertEquals("notFound", error1.getReason()); } @Test - public void testQuerySessionSupport() throws InterruptedException { + void testQuerySessionSupport() throws InterruptedException { String query = "CREATE TEMPORARY TABLE temptable AS SELECT 17 as foo"; QueryJobConfiguration queryJobConfiguration = QueryJobConfiguration.newBuilder(query) @@ -4916,7 +4911,7 @@ public void testQuerySessionSupport() throws InterruptedException { } @Test - public void testLoadSessionSupportWriteChannelConfiguration() throws InterruptedException { + void testLoadSessionSupportWriteChannelConfiguration() throws InterruptedException { TableId sessionTableId = TableId.of("_SESSION", "test_temp_destination_table_from_file"); WriteChannelConfiguration configuration = @@ -4990,7 +4985,7 @@ public void testLoadSessionSupportWriteChannelConfiguration() throws Interrupted } @Test - public void testLoadSessionSupport() throws InterruptedException { + void testLoadSessionSupport() throws InterruptedException { // Start the session TableId sessionTableId = TableId.of("_SESSION", "test_temp_destination_table"); LoadJobConfiguration configuration = @@ -5053,7 +5048,7 @@ public void testLoadSessionSupport() throws InterruptedException { // } @Test - public void testExecuteSelectSessionSupport() throws BigQuerySQLException { + void testExecuteSelectSessionSupport() throws BigQuerySQLException { String query = "SELECT 17 as foo"; ConnectionSettings connectionSettings = ConnectionSettings.newBuilder() @@ -5067,7 +5062,7 @@ public void testExecuteSelectSessionSupport() throws BigQuerySQLException { } @Test - public void testDmlStatistics() throws InterruptedException { + void testDmlStatistics() throws InterruptedException { String tableName = TABLE_ID_FASTQUERY.getTable(); // Run a DML statement to UPDATE 2 rows of data String dmlQuery = @@ -5089,7 +5084,7 @@ public void testDmlStatistics() throws InterruptedException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testTransactionInfo() throws InterruptedException { + void testTransactionInfo() throws InterruptedException { String tableName = TABLE_ID_FASTQUERY.getTable(); String transaction = String.format( @@ -5111,7 +5106,7 @@ public void testTransactionInfo() throws InterruptedException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testScriptStatistics() throws InterruptedException { + void testScriptStatistics() throws InterruptedException { String script = "-- Declare a variable to hold names as an array.\n" + "DECLARE top_names ARRAY;\n" @@ -5164,7 +5159,7 @@ public void testScriptStatistics() throws InterruptedException { } @Test - public void testQueryParameterModeWithDryRun() { + void testQueryParameterModeWithDryRun() { String query = "SELECT TimestampField, StringField, BooleanField, BigNumericField, BigNumericField1, BigNumericField2, BigNumericField3, BigNumericField4 FROM " + TABLE_ID.getTable() @@ -5191,7 +5186,7 @@ public void testQueryParameterModeWithDryRun() { } @Test - public void testPositionalQueryParameters() throws InterruptedException { + void testPositionalQueryParameters() throws InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField, BigNumericField, BigNumericField1, BigNumericField2, BigNumericField3, BigNumericField4 FROM " + TABLE_ID.getTable() @@ -5268,7 +5263,7 @@ public void testPositionalQueryParameters() throws InterruptedException { /* TODO(prasmish): expand below test case with all the fields shown in the above test case */ @Test - public void testExecuteSelectWithPositionalQueryParameters() throws BigQuerySQLException { + void testExecuteSelectWithPositionalQueryParameters() throws BigQuerySQLException { String query = "SELECT TimestampField, StringField FROM " + TABLE_ID.getTable() @@ -5288,7 +5283,7 @@ public void testExecuteSelectWithPositionalQueryParameters() throws BigQuerySQLE } @Test - public void testNamedQueryParameters() throws InterruptedException { + void testNamedQueryParameters() throws InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable() @@ -5311,7 +5306,7 @@ public void testNamedQueryParameters() throws InterruptedException { } @Test - public void testExecuteSelectWithNamedQueryParameters() throws BigQuerySQLException { + void testExecuteSelectWithNamedQueryParameters() throws BigQuerySQLException { String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable() @@ -5335,7 +5330,7 @@ public void testExecuteSelectWithNamedQueryParameters() throws BigQuerySQLExcept /* TODO(prasmish): replicate relevant parts of the test case for executeSelect */ @Test - public void testStructNamedQueryParameters() throws InterruptedException { + void testStructNamedQueryParameters() throws InterruptedException { QueryParameterValue booleanValue = QueryParameterValue.bool(true); QueryParameterValue stringValue = QueryParameterValue.string("test-stringField"); QueryParameterValue integerValue = QueryParameterValue.int64(10); @@ -5362,7 +5357,7 @@ public void testStructNamedQueryParameters() throws InterruptedException { } @Test - public void testRepeatedRecordNamedQueryParameters() throws InterruptedException { + void testRepeatedRecordNamedQueryParameters() throws InterruptedException { String[] stringValues = new String[] {"test-stringField", "test-stringField2"}; List tuples = new ArrayList<>(); for (int i = 0; i < 2; i++) { @@ -5403,7 +5398,7 @@ public void testRepeatedRecordNamedQueryParameters() throws InterruptedException } @Test - public void testUnnestRepeatedRecordNamedQueryParameter() throws InterruptedException { + void testUnnestRepeatedRecordNamedQueryParameter() throws InterruptedException { Boolean[] boolValues = new Boolean[] {true, false}; List tuples = new ArrayList<>(); for (int i = 0; i < 2; i++) { @@ -5442,7 +5437,7 @@ public void testUnnestRepeatedRecordNamedQueryParameter() throws InterruptedExce } @Test - public void testUnnestRepeatedRecordNamedQueryParameterFromDataset() throws InterruptedException { + void testUnnestRepeatedRecordNamedQueryParameterFromDataset() throws InterruptedException { TableId tableId = TableId.of(DATASET, "test_repeated_record_table"); setUpRepeatedRecordTable(tableId); @@ -5548,7 +5543,7 @@ private void setUpRepeatedRecordTable(TableId tableId) { } @Test - public void testEmptyRepeatedRecordNamedQueryParameters() throws InterruptedException { + void testEmptyRepeatedRecordNamedQueryParameters() throws InterruptedException { QueryParameterValue[] tuples = {}; QueryParameterValue repeatedRecord = @@ -5563,13 +5558,13 @@ public void testEmptyRepeatedRecordNamedQueryParameters() throws InterruptedExce .build(); assertThrows( - "an empty array of struct query parameter shouldn't work with 'IN UNNEST'", BigQueryException.class, - () -> bigquery.query(config)); + () -> bigquery.query(config), + "an empty array of struct query parameter shouldn't work with 'IN UNNEST'"); } @Test - public void testStructQuery() throws InterruptedException { + void testStructQuery() throws InterruptedException { // query into a table String query = String.format("SELECT RecordField FROM %s.%s", DATASET, TABLE_ID.getTable()); QueryJobConfiguration config = @@ -5597,7 +5592,7 @@ private static void assertsFieldValue(FieldValue record) { /* TODO(prasmish): replicate relevant parts of the test case for executeSelect */ @Test - public void testNestedStructNamedQueryParameters() throws InterruptedException { + void testNestedStructNamedQueryParameters() throws InterruptedException { QueryParameterValue booleanValue = QueryParameterValue.bool(true); QueryParameterValue stringValue = QueryParameterValue.string("test-stringField"); QueryParameterValue integerValue = QueryParameterValue.int64(10); @@ -5639,7 +5634,7 @@ public void testNestedStructNamedQueryParameters() throws InterruptedException { /* TODO(prasmish): replicate relevant parts of the test case for executeSelect */ @Test - public void testBytesParameter() throws Exception { + void testBytesParameter() throws Exception { String query = "SELECT BYTE_LENGTH(@p) AS length"; QueryParameterValue bytesParameter = QueryParameterValue.bytes(new byte[] {1, 3}); QueryJobConfiguration config = @@ -5660,7 +5655,7 @@ public void testBytesParameter() throws Exception { } @Test - public void testGeographyParameter() throws Exception { + void testGeographyParameter() throws Exception { // Issues a simple ST_DISTANCE using two geopoints, one being a named geography parameter. String query = "SELECT ST_DISTANCE(ST_GEOGFROMTEXT(\"POINT(-122.335503 47.625536)\"), @geo) < 3000 as within3k"; @@ -5683,7 +5678,7 @@ public void testGeographyParameter() throws Exception { } @Test - public void testListJobs() { + void testListJobs() { Page jobs = bigquery.listJobs(); for (Job job : jobs.getValues()) { assertNotNull(job.getJobId()); @@ -5695,7 +5690,7 @@ public void testListJobs() { } @Test - public void testListJobsWithSelectedFields() { + void testListJobsWithSelectedFields() { Page jobs = bigquery.listJobs(JobListOption.fields(JobField.USER_EMAIL)); for (Job job : jobs.getValues()) { assertNotNull(job.getJobId()); @@ -5707,7 +5702,7 @@ public void testListJobsWithSelectedFields() { } @Test - public void testListJobsWithCreationBounding() { + void testListJobsWithCreationBounding() { long currentMillis = currentTimeMillis(); long lowerBound = currentMillis - 3600 * 1000; long upperBound = currentMillis; @@ -5723,14 +5718,14 @@ public void testListJobsWithCreationBounding() { foundMax = Math.max(job.getStatistics().getCreationTime(), foundMax); } assertTrue( - "Found min job time " + foundMin + " earlier than " + lowerBound, foundMin >= lowerBound); + foundMin >= lowerBound, "Found min job time " + foundMin + " earlier than " + lowerBound); assertTrue( - "Found max job time " + foundMax + " later than " + upperBound, foundMax <= upperBound); - assertTrue("no jobs listed", jobCount > 0); + foundMax <= upperBound, "Found max job time " + foundMax + " later than " + upperBound); + assertTrue(jobCount > 0, "no jobs listed"); } @Test - public void testCreateAndGetJob() throws InterruptedException, TimeoutException { + void testCreateAndGetJob() throws InterruptedException, TimeoutException { String sourceTableName = "test_create_and_get_job_source_table"; String destinationTableName = "test_create_and_get_job_destination_table"; TableId sourceTable = TableId.of(DATASET, sourceTableName); @@ -5769,8 +5764,7 @@ public void testCreateAndGetJob() throws InterruptedException, TimeoutException } @Test - public void testCreateJobAndWaitForWithRetryOptions() - throws InterruptedException, TimeoutException { + void testCreateJobAndWaitForWithRetryOptions() throws InterruptedException, TimeoutException { // Note: This only tests the non failure/retry case. For retry cases, see unit tests with mocked // RPC calls. QueryJobConfiguration config = @@ -5789,8 +5783,7 @@ public void testCreateJobAndWaitForWithRetryOptions() } @Test - public void testCreateAndGetJobWithSelectedFields() - throws InterruptedException, TimeoutException { + void testCreateAndGetJobWithSelectedFields() throws InterruptedException, TimeoutException { String sourceTableName = "test_create_and_get_job_with_selected_fields_source_table"; String destinationTableName = "test_create_and_get_job_with_selected_fields_destination_table"; TableId sourceTable = TableId.of(DATASET, sourceTableName); @@ -5838,7 +5831,7 @@ public void testCreateAndGetJobWithSelectedFields() } @Test - public void testCopyJob() throws InterruptedException, TimeoutException { + void testCopyJob() throws InterruptedException, TimeoutException { String sourceTableName = "test_copy_job_source_table"; String destinationTableName = "test_copy_job_destination_table"; TableId sourceTable = TableId.of(DATASET, sourceTableName); @@ -5870,7 +5863,7 @@ public void testCopyJob() throws InterruptedException, TimeoutException { } @Test - public void testCopyJobStatistics() throws InterruptedException, TimeoutException { + void testCopyJobStatistics() throws InterruptedException, TimeoutException { String sourceTableName = "test_copy_job_statistics_source_table"; String destinationTableName = "test_copy_job_statistics_destination_table"; @@ -5901,7 +5894,7 @@ public void testCopyJobStatistics() throws InterruptedException, TimeoutExceptio } @Test - public void testSnapshotTableCopyJob() throws InterruptedException { + void testSnapshotTableCopyJob() throws InterruptedException { String sourceTableName = "test_copy_job_base_table"; String ddlTableName = TABLE_ID_DDL.getTable(); // this creates a snapshot table at specified snapshotTime @@ -5979,7 +5972,7 @@ public void testSnapshotTableCopyJob() throws InterruptedException { } @Test - public void testCopyJobWithLabelsAndExpTime() throws InterruptedException { + void testCopyJobWithLabelsAndExpTime() throws InterruptedException { String destExpiryTime = "2099-12-31T23:59:59.999999999Z"; String sourceTableName = "test_copy_job_source_table_label" + UUID.randomUUID().toString().substring(0, 8); @@ -6058,7 +6051,7 @@ public void testQueryJob() throws InterruptedException, TimeoutException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testQueryJobWithConnectionProperties() throws InterruptedException { + void testQueryJobWithConnectionProperties() throws InterruptedException { String tableName = "test_query_job_table_connection_properties"; String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); TableId destinationTable = TableId.of(DATASET, tableName); @@ -6078,7 +6071,7 @@ public void testQueryJobWithConnectionProperties() throws InterruptedException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testQueryJobWithLabels() throws InterruptedException, TimeoutException { + void testQueryJobWithLabels() throws InterruptedException, TimeoutException { String tableName = "test_query_job_table"; String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); Map labels = ImmutableMap.of("test-job-name", "test-query-job"); @@ -6101,7 +6094,7 @@ public void testQueryJobWithLabels() throws InterruptedException, TimeoutExcepti } @Test - public void testQueryJobWithSearchReturnsSearchStatisticsUnused() throws InterruptedException { + void testQueryJobWithSearchReturnsSearchStatisticsUnused() throws InterruptedException { String tableName = "test_query_job_table"; String query = "SELECT * FROM " @@ -6131,7 +6124,7 @@ public void testQueryJobWithSearchReturnsSearchStatisticsUnused() throws Interru /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testQueryJobWithRangePartitioning() throws InterruptedException { + void testQueryJobWithRangePartitioning() throws InterruptedException { String tableName = "test_query_job_table_rangepartitioning"; String query = "SELECT IntegerField, TimestampField, StringField, BooleanField FROM " @@ -6156,7 +6149,7 @@ public void testQueryJobWithRangePartitioning() throws InterruptedException { } @Test - public void testLoadJobWithRangePartitioning() throws InterruptedException { + void testLoadJobWithRangePartitioning() throws InterruptedException { String tableName = "test_load_job_table_rangepartitioning"; TableId destinationTable = TableId.of(DATASET, tableName); try { @@ -6180,7 +6173,7 @@ public void testLoadJobWithRangePartitioning() throws InterruptedException { } @Test - public void testLoadJobWithDecimalTargetTypes() throws InterruptedException { + void testLoadJobWithDecimalTargetTypes() throws InterruptedException { String tableName = "test_load_job_table_parquet_decimalTargetTypes"; TableId destinationTable = TableId.of(DATASET, tableName); String sourceUri = "gs://" + CLOUD_SAMPLES_DATA + "/bigquery/numeric/numeric_38_12.parquet"; @@ -6208,7 +6201,7 @@ public void testLoadJobWithDecimalTargetTypes() throws InterruptedException { } @Test - public void testExternalTableWithDecimalTargetTypes() throws InterruptedException { + void testExternalTableWithDecimalTargetTypes() throws InterruptedException { String tableName = "test_create_external_table_parquet_decimalTargetTypes"; TableId destinationTable = TableId.of(DATASET, tableName); String sourceUri = "gs://" + CLOUD_SAMPLES_DATA + "/bigquery/numeric/numeric_38_12.parquet"; @@ -6228,7 +6221,7 @@ public void testExternalTableWithDecimalTargetTypes() throws InterruptedExceptio } @Test - public void testQueryJobWithDryRun() throws InterruptedException, TimeoutException { + void testQueryJobWithDryRun() throws InterruptedException, TimeoutException { String tableName = "test_query_job_table"; String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); TableId destinationTable = TableId.of(DATASET, tableName); @@ -6246,7 +6239,7 @@ public void testQueryJobWithDryRun() throws InterruptedException, TimeoutExcepti } @Test - public void testExtractJob() throws InterruptedException, TimeoutException { + void testExtractJob() throws InterruptedException, TimeoutException { String tableName = "test_export_job_table"; TableId destinationTable = TableId.of(DATASET, tableName); Map labels = ImmutableMap.of("test-job-name", "test-load-extract-job"); @@ -6286,7 +6279,7 @@ public void testExtractJob() throws InterruptedException, TimeoutException { } @Test - public void testExtractJobWithModel() throws InterruptedException { + void testExtractJobWithModel() throws InterruptedException { String modelName = RemoteBigQueryHelper.generateModelName(); String sql = "CREATE MODEL `" @@ -6323,7 +6316,7 @@ public void testExtractJobWithModel() throws InterruptedException { } @Test - public void testExtractJobWithLabels() throws InterruptedException, TimeoutException { + void testExtractJobWithLabels() throws InterruptedException, TimeoutException { String tableName = "test_export_job_table_label"; Map labels = ImmutableMap.of("test_job_name", "test_export_job"); TableId destinationTable = TableId.of(DATASET, tableName); @@ -6349,7 +6342,7 @@ public void testExtractJobWithLabels() throws InterruptedException, TimeoutExcep } @Test - public void testCancelJob() throws InterruptedException, TimeoutException { + void testCancelJob() throws InterruptedException, TimeoutException { String destinationTableName = "test_cancel_query_job_table"; String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); TableId destinationTable = TableId.of(DATASET, destinationTableName); @@ -6363,12 +6356,12 @@ public void testCancelJob() throws InterruptedException, TimeoutException { } @Test - public void testCancelNonExistingJob() { + void testCancelNonExistingJob() { assertFalse(bigquery.cancel("test_cancel_non_existing_job")); } @Test - public void testInsertFromFile() throws InterruptedException, IOException, TimeoutException { + void testInsertFromFile() throws InterruptedException, IOException, TimeoutException { String destinationTableName = "test_insert_from_file_table"; TableId tableId = TableId.of(DATASET, destinationTableName); WriteChannelConfiguration configuration = @@ -6441,8 +6434,7 @@ public void testInsertFromFile() throws InterruptedException, IOException, Timeo } @Test - public void testInsertFromFileWithLabels() - throws InterruptedException, IOException, TimeoutException { + void testInsertFromFileWithLabels() throws InterruptedException, IOException, TimeoutException { String destinationTableName = "test_insert_from_file_table_with_labels"; TableId tableId = TableId.of(DATASET, destinationTableName); WriteChannelConfiguration configuration = @@ -6472,7 +6464,7 @@ public void testInsertFromFileWithLabels() } @Test - public void testInsertWithDecimalTargetTypes() + void testInsertWithDecimalTargetTypes() throws InterruptedException, IOException, TimeoutException { String destinationTableName = "test_insert_from_file_table_with_decimal_target_type"; TableId tableId = TableId.of(DATASET, destinationTableName); @@ -6499,7 +6491,7 @@ public void testInsertWithDecimalTargetTypes() } @Test - public void testLocation() throws Exception { + void testLocation() throws Exception { String location = "EU"; String wrongLocation = "US"; @@ -6563,14 +6555,14 @@ public void testLocation() throws Exception { .isEmpty(); assertThrows( - "querying a table with wrong location shouldn't work", BigQueryException.class, () -> otelBigquery .query( QueryJobConfiguration.of(query), JobId.newBuilder().setLocation(wrongLocation).build()) - .iterateAll()); + .iterateAll(), + "querying a table with wrong location shouldn't work"); // Test write { @@ -6590,14 +6582,14 @@ public void testLocation() throws Exception { } assertThrows( - "writing to a table with wrong location shouldn't work", BigQueryException.class, () -> { try (TableDataWriteChannel ignore = otelBigquery.writer( JobId.newBuilder().setLocation(wrongLocation).build(), writeChannelConfiguration)) {} - }); + }, + "writing to a table with wrong location shouldn't work"); } } finally { RemoteBigQueryHelper.forceDelete(bigquery, datasetName); @@ -6605,7 +6597,7 @@ public void testLocation() throws Exception { } @Test - public void testWriteChannelPreserveAsciiControlCharacters() + void testWriteChannelPreserveAsciiControlCharacters() throws InterruptedException, IOException, TimeoutException { String destinationTableName = "test_write_channel_preserve_ascii_control_characters"; TableId tableId = TableId.of(DATASET, destinationTableName); @@ -6632,7 +6624,7 @@ public void testWriteChannelPreserveAsciiControlCharacters() } @Test - public void testLoadJobPreserveAsciiControlCharacters() throws InterruptedException { + void testLoadJobPreserveAsciiControlCharacters() throws InterruptedException { String destinationTableName = "test_load_job_preserve_ascii_control_characters"; TableId destinationTable = TableId.of(DATASET, destinationTableName); @@ -6652,7 +6644,7 @@ public void testLoadJobPreserveAsciiControlCharacters() throws InterruptedExcept } @Test - public void testReferenceFileSchemaUriForAvro() { + void testReferenceFileSchemaUriForAvro() { try { String destinationTableName = "test_reference_file_schema_avro"; TableId tableId = TableId.of(DATASET, destinationTableName); @@ -6711,7 +6703,7 @@ public void testReferenceFileSchemaUriForAvro() { } @Test - public void testReferenceFileSchemaUriForParquet() { + void testReferenceFileSchemaUriForParquet() { try { String destinationTableName = "test_reference_file_schema_parquet"; TableId tableId = TableId.of(DATASET, destinationTableName); @@ -6769,7 +6761,7 @@ public void testReferenceFileSchemaUriForParquet() { } @Test - public void testCreateExternalTableWithReferenceFileSchemaAvro() { + void testCreateExternalTableWithReferenceFileSchemaAvro() { String destinationTableName = "test_create_external_table_reference_file_schema_avro"; TableId tableId = TableId.of(DATASET, destinationTableName); Schema expectedSchema = @@ -6808,7 +6800,7 @@ public void testCreateExternalTableWithReferenceFileSchemaAvro() { } @Test - public void testCreateExternalTableWithReferenceFileSchemaParquet() { + void testCreateExternalTableWithReferenceFileSchemaParquet() { String destinationTableName = "test_create_external_table_reference_file_schema_parquet"; TableId tableId = TableId.of(DATASET, destinationTableName); Schema expectedSchema = @@ -6849,7 +6841,7 @@ public void testCreateExternalTableWithReferenceFileSchemaParquet() { } @Test - public void testCloneTableCopyJob() throws InterruptedException { + void testCloneTableCopyJob() throws InterruptedException { String sourceTableName = "test_copy_job_base_table"; String ddlTableName = TABLE_ID_DDL.getTable(); String cloneTableName = "test_clone_table"; @@ -6902,15 +6894,15 @@ public void testCloneTableCopyJob() throws InterruptedException { } @Test - public void testHivePartitioningOptionsFieldsFieldExistence() throws InterruptedException { + void testHivePartitioningOptionsFieldsFieldExistence() throws InterruptedException { String tableName = "hive_partitioned_external_table"; // Create data on GCS String sourceDirectory = "bigquery/hive-partitioning-table/example"; BlobInfo blobInfo = BlobInfo.newBuilder(BUCKET, sourceDirectory + "/key=foo/data.json").build(); assertNotNull( - "Failed to upload JSON to GCS", - storage.create(blobInfo, "{\"name\":\"bar\"}".getBytes(StandardCharsets.UTF_8))); + storage.create(blobInfo, "{\"name\":\"bar\"}".getBytes(StandardCharsets.UTF_8)), + "Failed to upload JSON to GCS"); String sourceUri = "gs://" + BUCKET + "/" + sourceDirectory + "/*"; String sourceUriPrefix = "gs://" + BUCKET + "/" + sourceDirectory + "/"; @@ -6946,7 +6938,7 @@ public void testHivePartitioningOptionsFieldsFieldExistence() throws Interrupted } @Test - public void testPrimaryKey() { + void testPrimaryKey() { String tableName = "test_primary_key"; TableId tableId = TableId.of(DATASET, tableName); PrimaryKey primaryKey = PrimaryKey.newBuilder().setColumns(Arrays.asList("ID")).build(); @@ -6971,7 +6963,7 @@ public void testPrimaryKey() { } @Test - public void testPrimaryKeyUpdate() { + void testPrimaryKeyUpdate() { String tableName = "test_primary_key_update"; TableId tableId = TableId.of(DATASET, tableName); PrimaryKey primaryKey = @@ -7000,7 +6992,7 @@ public void testPrimaryKeyUpdate() { } @Test - public void testForeignKeys() { + void testForeignKeys() { String tableNamePk = "test_foreign_key"; String tableNameFk = "test_foreign_key2"; // TableIds referenced by foreign keys need project id to be specified @@ -7050,7 +7042,7 @@ public void testForeignKeys() { } @Test - public void testForeignKeysUpdate() { + void testForeignKeysUpdate() { String tableNameFk = "test_foreign_key"; String tableNamePk1 = "test_foreign_key2"; String tableNamePk2 = "test_foreign_key3"; @@ -7148,7 +7140,7 @@ public void testForeignKeysUpdate() { } @Test - public void testAlreadyExistJobExceptionHandling() throws InterruptedException { + void testAlreadyExistJobExceptionHandling() throws InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField FROM " + DATASET @@ -7175,7 +7167,7 @@ public void testAlreadyExistJobExceptionHandling() throws InterruptedException { } @Test - public void testStatelessQueries() throws InterruptedException { + void testStatelessQueries() throws InterruptedException { // Create local BigQuery to not contaminate global test parameters. RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); BigQuery bigQuery = bigqueryHelper.getOptions().getService(); @@ -7187,8 +7179,8 @@ public void testStatelessQueries() throws InterruptedException { // Ideally Stateless query will return queryId but in some cases it would return jobId instead // of queryId based on the query complexity or other factors (job timeout configs). assertTrue( - "Exactly one of jobId or queryId should be non-null", - (tableResult.getJobId() != null) ^ (tableResult.getQueryId() != null)); + (tableResult.getJobId() != null) ^ (tableResult.getQueryId() != null), + "Exactly one of jobId or queryId should be non-null"); // Job creation takes over, no query id is created. bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_REQUIRED); @@ -7209,7 +7201,7 @@ private TableResult executeSimpleQuery(BigQuery bigQuery) throws InterruptedExce } @Test - public void testTableResultJobIdAndQueryId() throws InterruptedException { + void testTableResultJobIdAndQueryId() throws InterruptedException { // For stateless queries, jobId and queryId are populated based on the following criteria: // 1. For stateless queries, then queryId is populated. // 2. For queries that fails the requirements to be stateless, then jobId is populated and @@ -7232,8 +7224,8 @@ public void testTableResultJobIdAndQueryId() throws InterruptedException { // Ideally Stateless query will return queryId but in some cases it would return jobId instead // of queryId based on the query complexity or other factors (job timeout configs). assertTrue( - "Exactly one of jobId or queryId should be non-null", - (result.getJobId() != null) ^ (result.getQueryId() != null)); + (result.getJobId() != null) ^ (result.getQueryId() != null), + "Exactly one of jobId or queryId should be non-null"); // Test scenario 2 by failing stateless check by setting job timeout. QueryJobConfiguration configQueryWithJob = @@ -7261,7 +7253,7 @@ public void testTableResultJobIdAndQueryId() throws InterruptedException { } @Test - public void testStatelessQueriesWithLocation() throws Exception { + void testStatelessQueriesWithLocation() throws Exception { // This test validates BigQueryOption location is used for stateless query by verifying that the // stateless query fails when the BigQueryOption location does not match the dataset location. String location = "EU"; @@ -7294,7 +7286,6 @@ public void testStatelessQueriesWithLocation() throws Exception { // Test stateless query when BigQueryOption location does not match dataset location. assertThrows( - "querying a table with wrong location shouldn't work", BigQueryException.class, () -> { BigQuery bigQueryWrongLocation = @@ -7306,14 +7297,15 @@ public void testStatelessQueriesWithLocation() throws Exception { .getOptions() .setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); bigQueryWrongLocation.query(QueryJobConfiguration.of(query)); - }); + }, + "querying a table with wrong location shouldn't work"); } finally { RemoteBigQueryHelper.forceDelete(bigQuery, datasetName); } } @Test - public void testQueryWithTimeout() throws InterruptedException { + void testQueryWithTimeout() throws InterruptedException { // Validate that queryWithTimeout returns either TableResult or Job object RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); @@ -7357,7 +7349,7 @@ public void testQueryWithTimeout() throws InterruptedException { } @Test - public void testUniverseDomainWithInvalidUniverseDomain() { + void testUniverseDomainWithInvalidUniverseDomain() { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); BigQueryOptions bigQueryOptions = bigqueryHelper.getOptions().toBuilder() @@ -7368,9 +7360,9 @@ public void testUniverseDomainWithInvalidUniverseDomain() { BigQueryException exception = assertThrows( - "RPCs to invalid universe domain should fail", BigQueryException.class, - () -> bigQuery.listDatasets("bigquery-public-data")); + () -> bigQuery.listDatasets("bigquery-public-data"), + "RPCs to invalid universe domain should fail"); assertEquals(HTTP_UNAUTHORIZED, exception.getCode()); assertNotNull(exception.getMessage()); assertTrue( @@ -7380,7 +7372,7 @@ public void testUniverseDomainWithInvalidUniverseDomain() { } @Test - public void testInvalidUniverseDomainWithMismatchCredentials() { + void testInvalidUniverseDomainWithMismatchCredentials() { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); BigQueryOptions bigQueryOptions = bigqueryHelper.getOptions().toBuilder() @@ -7390,9 +7382,9 @@ public void testInvalidUniverseDomainWithMismatchCredentials() { BigQueryException exception = assertThrows( - "RPCs to invalid universe domain should fail", BigQueryException.class, - () -> bigQuery.listDatasets("bigquery-public-data")); + () -> bigQuery.listDatasets("bigquery-public-data"), + "RPCs to invalid universe domain should fail"); assertEquals(HTTP_UNAUTHORIZED, exception.getCode()); assertNotNull(exception.getMessage()); assertTrue( @@ -7402,7 +7394,7 @@ public void testInvalidUniverseDomainWithMismatchCredentials() { } @Test - public void testUniverseDomainWithMatchingDomain() { + void testUniverseDomainWithMatchingDomain() { // Test a valid domain using the default credentials and Google default universe domain. RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); BigQueryOptions bigQueryOptions = @@ -7427,7 +7419,7 @@ public void testUniverseDomainWithMatchingDomain() { } @Test - public void testExternalTableMetadataCachingNotEnable() throws InterruptedException { + void testExternalTableMetadataCachingNotEnable() throws InterruptedException { String tableName = "test_metadata_cache_not_enable"; TableId tableId = TableId.of(DATASET, tableName); ExternalTableDefinition externalTableDefinition = @@ -7468,7 +7460,7 @@ public void testExternalTableMetadataCachingNotEnable() throws InterruptedExcept } @Test - public void testExternalMetadataCacheModeFailForNonBiglake() { + void testExternalMetadataCacheModeFailForNonBiglake() { // Validate that MetadataCacheMode is passed to the backend. // TODO: Enhance this test after BigLake testing infrastructure is inplace. String tableName = "test_metadata_cache_mode_fail_for_non_biglake"; @@ -7482,9 +7474,9 @@ public void testExternalMetadataCacheModeFailForNonBiglake() { BigQueryException exception = assertThrows( - "BigQueryException was expected", BigQueryException.class, - () -> bigquery.create(tableInfo)); + () -> bigquery.create(tableInfo), + "BigQueryException was expected"); BigQueryError error = exception.getError(); assertNotNull(error); assertEquals("invalid", error.getReason()); @@ -7495,7 +7487,7 @@ public void testExternalMetadataCacheModeFailForNonBiglake() { } @Test - public void testObjectTable() throws InterruptedException { + void testObjectTable() throws InterruptedException { String tableName = "test_object_table"; TableId tableId = TableId.of(DATASET, tableName); @@ -7538,7 +7530,7 @@ public void testObjectTable() throws InterruptedException { } @Test - public void testQueryExportStatistics() throws InterruptedException { + void testQueryExportStatistics() throws InterruptedException { String query = String.format( "EXPORT DATA OPTIONS(\n" @@ -7562,7 +7554,7 @@ public void testQueryExportStatistics() throws InterruptedException { } @Test - public void testLoadConfigurationFlexibleColumnName() throws InterruptedException { + void testLoadConfigurationFlexibleColumnName() throws InterruptedException { // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#columnnamecharactermap for // mapping. @@ -7618,7 +7610,7 @@ public void testLoadConfigurationFlexibleColumnName() throws InterruptedExceptio } @Test - public void testStatementType() throws InterruptedException { + void testStatementType() throws InterruptedException { String tableName = "test_materialized_view_table_statemnt_type"; String createQuery = String.format( diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITHighPrecisionTimestamp.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITHighPrecisionTimestamp.java index 332071a620..4942c30083 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITHighPrecisionTimestamp.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITHighPrecisionTimestamp.java @@ -15,11 +15,11 @@ */ package com.google.cloud.bigquery.it; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQueryException; @@ -47,9 +47,9 @@ import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.StreamSupport; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; public class ITHighPrecisionTimestamp { @@ -69,7 +69,7 @@ public class ITHighPrecisionTimestamp { private static final String TIMESTAMP2 = "1970-01-01T12:34:56.123456789123Z"; private static final String TIMESTAMP3 = "2000-01-01T12:34:56.123456789123Z"; - @BeforeClass + @BeforeAll public static void beforeClass() { BigQueryOptions.Builder builder = BigQueryOptions.newBuilder() @@ -110,7 +110,7 @@ public static void beforeClass() { assertEquals(0, response.getInsertErrors().size()); } - @AfterClass + @AfterAll public static void afterClass() { if (bigquery != null) { bigquery.delete(defaultTableId); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java index 790f35fe5c..84e355f9e6 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java @@ -16,12 +16,13 @@ package com.google.cloud.bigquery.it; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.google.cloud.ServiceOptions; import com.google.cloud.bigquery.BigQuery; @@ -53,6 +54,7 @@ import java.io.IOException; import java.math.BigDecimal; import java.nio.charset.StandardCharsets; +import java.sql.Date; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Time; @@ -66,12 +68,12 @@ import java.util.logging.Level; import java.util.logging.Logger; import org.apache.arrow.vector.util.JsonStringArrayList; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(value = 1800) // 30 min timeout public class ITNightlyBigQueryTest { private static final Logger logger = Logger.getLogger(ITNightlyBigQueryTest.class.getName()); private static final String DATASET = RemoteBigQueryHelper.generateDatasetName(); @@ -170,9 +172,7 @@ public class ITNightlyBigQueryTest { .setDescription("IntervalFieldDescription") .build()); - @Rule public Timeout globalTimeout = Timeout.seconds(1800); // setting 30 mins as the timeout - - @BeforeClass + @BeforeAll public static void beforeClass() throws InterruptedException, IOException { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); bigquery = bigqueryHelper.getOptions().getService(); @@ -181,17 +181,15 @@ public static void beforeClass() throws InterruptedException, IOException { populateTestRecords(DATASET, TABLE); } - @AfterClass + @AfterAll public static void afterClass() { try { if (bigquery != null) { deleteTable(DATASET, TABLE); RemoteBigQueryHelper.forceDelete(bigquery, DATASET); - } else { - fail("Error clearing the test dataset"); } } catch (BigQueryException e) { - fail("Error clearing the test dataset " + e); + throw new RuntimeException("Error clearing the test dataset " + e); } } @@ -199,9 +197,8 @@ public static void afterClass() { public void testInvalidQuery() throws BigQuerySQLException { Connection connection = getConnection(); try { - BigQueryResult bigQueryResult = connection.executeSelect(INVALID_QUERY); - fail("BigQuerySQLException was expected"); - } catch (BigQuerySQLException ex) { + BigQuerySQLException ex = + assertThrows(BigQuerySQLException.class, () -> connection.executeSelect(INVALID_QUERY)); assertNotNull(ex.getMessage()); assertTrue(ex.getMessage().toLowerCase().contains("unexpected keyword into")); } finally { @@ -215,282 +212,300 @@ public void testInvalidQuery() throws BigQuerySQLException { @Test public void testIterateAndOrder() throws SQLException { Connection connection = getConnection(); - BigQueryResult bigQueryResult = connection.executeSelect(QUERY); - logger.log(Level.INFO, "Query used: {0}", QUERY); - ResultSet rs = bigQueryResult.getResultSet(); - int cnt = 0; - - int prevIntegerFieldVal = 0; - while (rs.next()) { - if (cnt == 0) { // first row is supposed to be null - assertNull(rs.getString("StringField")); - assertNull(rs.getString("GeographyField")); - Object intAryField = rs.getObject("IntegerArrayField"); - if (intAryField instanceof JsonStringArrayList) { - assertEquals( - new JsonStringArrayList(), - ((JsonStringArrayList) intAryField)); // null array is returned as an empty array + try { + BigQueryResult bigQueryResult = connection.executeSelect(QUERY); + logger.log(Level.INFO, "Query used: {0}", QUERY); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + + int prevIntegerFieldVal = 0; + while (rs.next()) { + if (cnt == 0) { // first row is supposed to be null + assertNull(rs.getString("StringField")); + assertNull(rs.getString("GeographyField")); + Object intAryField = rs.getObject("IntegerArrayField"); + if (intAryField instanceof JsonStringArrayList) { + assertEquals( + new JsonStringArrayList(), + ((JsonStringArrayList) intAryField)); // null array is returned as an empty array + } + assertFalse(rs.getBoolean("BooleanField")); + assertTrue(0.0d == rs.getDouble("BigNumericField")); + assertTrue(0 == rs.getInt("IntegerField")); + assertTrue(0L == rs.getLong("NumericField")); + assertNull(rs.getBytes("BytesField")); + assertNull(rs.getTimestamp("TimestampField")); + assertNull(rs.getTime("TimeField")); + assertNull(rs.getDate("DateField")); + assertNull(rs.getString("JSONField")); + assertFalse(rs.getBoolean("BooleanField_1")); + assertNull(rs.getString("StringField_1")); + assertNull(rs.getString("hello")); // equivalent of testJsonType + assertEquals(0, rs.getInt("id")); + + } else { // remaining rows are supposed to be non null + assertNotNull(rs.getString("StringField")); + assertNotNull(rs.getString("GeographyField")); + assertNotNull(rs.getObject("IntegerArrayField")); + assertTrue(rs.getBoolean("BooleanField")); + assertTrue(0.0d < rs.getDouble("BigNumericField")); + assertTrue(0 < rs.getInt("IntegerField")); + assertTrue(0L < rs.getLong("NumericField")); + assertNotNull(rs.getBytes("BytesField")); + assertNotNull(rs.getTimestamp("TimestampField")); + assertNotNull(rs.getTime("TimeField")); + assertNotNull(rs.getDate("DateField")); + assertNotNull(rs.getString("JSONField")); + assertFalse(rs.getBoolean("BooleanField_1")); + assertNotNull(rs.getString("StringField_1")); + + // check the order of the records + assertTrue(prevIntegerFieldVal < rs.getInt("IntegerField")); + prevIntegerFieldVal = rs.getInt("IntegerField"); + + testForAllDataTypeValues(rs, cnt); // asserts the value of each row } - assertFalse(rs.getBoolean("BooleanField")); - assertTrue(0.0d == rs.getDouble("BigNumericField")); - assertTrue(0 == rs.getInt("IntegerField")); - assertTrue(0L == rs.getLong("NumericField")); - assertNull(rs.getBytes("BytesField")); - assertNull(rs.getTimestamp("TimestampField")); - assertNull(rs.getTime("TimeField")); - assertNull(rs.getDate("DateField")); - assertNull(rs.getString("JSONField")); - assertFalse(rs.getBoolean("BooleanField_1")); - assertNull(rs.getString("StringField_1")); - assertNull(rs.getString("hello")); // equivalent of testJsonType - assertEquals(0, rs.getInt("id")); - - } else { // remaining rows are supposed to be non null - assertNotNull(rs.getString("StringField")); - assertNotNull(rs.getString("GeographyField")); - assertNotNull(rs.getObject("IntegerArrayField")); - assertTrue(rs.getBoolean("BooleanField")); - assertTrue(0.0d < rs.getDouble("BigNumericField")); - assertTrue(0 < rs.getInt("IntegerField")); - assertTrue(0L < rs.getLong("NumericField")); - assertNotNull(rs.getBytes("BytesField")); - assertNotNull(rs.getTimestamp("TimestampField")); - assertNotNull(rs.getTime("TimeField")); - assertNotNull(rs.getDate("DateField")); - assertNotNull(rs.getString("JSONField")); - assertFalse(rs.getBoolean("BooleanField_1")); - assertNotNull(rs.getString("StringField_1")); - - // check the order of the records - assertTrue(prevIntegerFieldVal < rs.getInt("IntegerField")); - prevIntegerFieldVal = rs.getInt("IntegerField"); - - testForAllDataTypeValues(rs, cnt); // asserts the value of each row + ++cnt; } - ++cnt; + assertEquals(LIMIT_RECS, cnt); // all the records were retrieved + } finally { + connection.close(); } - assertEquals(LIMIT_RECS, cnt); // all the records were retrieved - connection.close(); } /* This tests for the order of the records using default connection settings as well as the value of the records using testForAllDataTypeValues */ @Test - public void testIterateAndOrderDefaultConnSettings() throws SQLException { + void testIterateAndOrderDefaultConnSettings() throws SQLException { Connection connection = bigquery.createConnection(); - BigQueryResult bigQueryResult = connection.executeSelect(QUERY); - logger.log(Level.INFO, "Query used: {0}", QUERY); - ResultSet rs = bigQueryResult.getResultSet(); - int cnt = 0; - - int prevIntegerFieldVal = 0; - while (rs.next()) { - if (cnt == 0) { // first row is supposed to be null - assertNull(rs.getString("StringField")); - assertNull(rs.getString("GeographyField")); - Object intAryField = rs.getObject("IntegerArrayField"); - if (intAryField instanceof JsonStringArrayList) { - assertEquals( - new JsonStringArrayList(), - ((JsonStringArrayList) intAryField)); // null array is returned as an empty array + try { + BigQueryResult bigQueryResult = connection.executeSelect(QUERY); + logger.log(Level.INFO, "Query used: {0}", QUERY); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + + int prevIntegerFieldVal = 0; + while (rs.next()) { + if (cnt == 0) { // first row is supposed to be null + assertNull(rs.getString("StringField")); + assertNull(rs.getString("GeographyField")); + Object intAryField = rs.getObject("IntegerArrayField"); + if (intAryField instanceof JsonStringArrayList) { + assertEquals( + new JsonStringArrayList(), + ((JsonStringArrayList) intAryField)); // null array is returned as an empty array + } + assertFalse(rs.getBoolean("BooleanField")); + assertTrue(0.0d == rs.getDouble("BigNumericField")); + assertTrue(0 == rs.getInt("IntegerField")); + assertTrue(0L == rs.getLong("NumericField")); + assertNull(rs.getBytes("BytesField")); + assertNull(rs.getTimestamp("TimestampField")); + assertNull(rs.getTime("TimeField")); + assertNull(rs.getDate("DateField")); + assertNull(rs.getString("JSONField")); + assertFalse(rs.getBoolean("BooleanField_1")); + assertNull(rs.getString("StringField_1")); + assertNull(rs.getString("hello")); // equivalent of testJsonType + assertEquals(0, rs.getInt("id")); + + } else { // remaining rows are supposed to be non null + assertNotNull(rs.getString("StringField")); + assertNotNull(rs.getString("GeographyField")); + assertNotNull(rs.getObject("IntegerArrayField")); + assertTrue(rs.getBoolean("BooleanField")); + assertTrue(0.0d < rs.getDouble("BigNumericField")); + assertTrue(0 < rs.getInt("IntegerField")); + assertTrue(0L < rs.getLong("NumericField")); + assertNotNull(rs.getBytes("BytesField")); + assertNotNull(rs.getTimestamp("TimestampField")); + assertNotNull(rs.getTime("TimeField")); + assertNotNull(rs.getDate("DateField")); + assertNotNull(rs.getString("JSONField")); + assertFalse(rs.getBoolean("BooleanField_1")); + assertNotNull(rs.getString("StringField_1")); + + // check the order of the records + assertTrue(prevIntegerFieldVal < rs.getInt("IntegerField")); + prevIntegerFieldVal = rs.getInt("IntegerField"); + + testForAllDataTypeValues(rs, cnt); // asserts the value of each row } - assertFalse(rs.getBoolean("BooleanField")); - assertTrue(0.0d == rs.getDouble("BigNumericField")); - assertTrue(0 == rs.getInt("IntegerField")); - assertTrue(0L == rs.getLong("NumericField")); - assertNull(rs.getBytes("BytesField")); - assertNull(rs.getTimestamp("TimestampField")); - assertNull(rs.getTime("TimeField")); - assertNull(rs.getDate("DateField")); - assertNull(rs.getString("JSONField")); - assertFalse(rs.getBoolean("BooleanField_1")); - assertNull(rs.getString("StringField_1")); - assertNull(rs.getString("hello")); // equivalent of testJsonType - assertEquals(0, rs.getInt("id")); - - } else { // remaining rows are supposed to be non null - assertNotNull(rs.getString("StringField")); - assertNotNull(rs.getString("GeographyField")); - assertNotNull(rs.getObject("IntegerArrayField")); - assertTrue(rs.getBoolean("BooleanField")); - assertTrue(0.0d < rs.getDouble("BigNumericField")); - assertTrue(0 < rs.getInt("IntegerField")); - assertTrue(0L < rs.getLong("NumericField")); - assertNotNull(rs.getBytes("BytesField")); - assertNotNull(rs.getTimestamp("TimestampField")); - assertNotNull(rs.getTime("TimeField")); - assertNotNull(rs.getDate("DateField")); - assertNotNull(rs.getString("JSONField")); - assertFalse(rs.getBoolean("BooleanField_1")); - assertNotNull(rs.getString("StringField_1")); - - // check the order of the records - assertTrue(prevIntegerFieldVal < rs.getInt("IntegerField")); - prevIntegerFieldVal = rs.getInt("IntegerField"); - - testForAllDataTypeValues(rs, cnt); // asserts the value of each row + ++cnt; } - ++cnt; + assertEquals(LIMIT_RECS, cnt); // all the records were retrieved + } finally { + connection.close(); } - assertEquals(LIMIT_RECS, cnt); // all the records were retrieved - assertTrue(connection.close()); } /* This tests interrupts the execution in between and checks if it has been interrupted successfully while using ReadAPI */ @Test - public void testConnectionClose() throws SQLException { + void testConnectionClose() throws SQLException { Connection connection = bigquery.createConnection(); - assertNotNull("bigquery.createConnection() returned null", connection); - BigQueryResult bigQueryResult = connection.executeSelect(QUERY); - logger.log(Level.INFO, "Query used: {0}", QUERY); - ResultSet rs = bigQueryResult.getResultSet(); - int cnt = 0; - while (rs.next()) { - ++cnt; - if (cnt == 50000) { // interrupt at 50K - assertTrue(connection.close()); + try { + assertNotNull(connection, "bigquery.createConnection() returned null"); + BigQueryResult bigQueryResult = connection.executeSelect(QUERY); + logger.log(Level.INFO, "Query used: {0}", QUERY); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + while (rs.next()) { + ++cnt; + if (cnt == 50000) { // interrupt at 50K + assertTrue(connection.close()); + } } + assertTrue(LIMIT_RECS > cnt); + // we stopped at 50K but still we can expect additional records (typically ~100) + // to be retrieved + // as a number of records should have been already buffered. less than + // LIMIT_RECS should be retrieved + } finally { + connection.close(); } - assertTrue( - LIMIT_RECS - > cnt); // we stopped at 50K but still we can expect additional records (typically ~100) - // to be retrieved - // as a number of records should have been already buffered. less than - // LIMIT_RECS should be retrieved } @Test - public void testMultipleRuns() throws SQLException { - - Connection connection = getConnection(); - BigQueryResult bigQueryResult = connection.executeSelect(MULTI_QUERY); - logger.log(Level.INFO, "Query used: {0}", MULTI_QUERY); - ResultSet rs = bigQueryResult.getResultSet(); - int cnt = 0; + void testMultipleRuns() throws SQLException { int totalCnt = 0; - - int prevIntegerFieldVal = 0; - while (rs.next()) { - if (cnt == 0) { // first row is supposed to be null - assertNull(rs.getString("StringField")); - assertNull(rs.getString("GeographyField")); - Object intAryField = rs.getObject("IntegerArrayField"); - if (intAryField instanceof JsonStringArrayList) { - assertEquals( - new JsonStringArrayList(), - ((JsonStringArrayList) intAryField)); // null array is returned as an empty array + Connection connection = getConnection(); + try { + BigQueryResult bigQueryResult = connection.executeSelect(MULTI_QUERY); + logger.log(Level.INFO, "Query used: {0}", MULTI_QUERY); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + + int prevIntegerFieldVal = 0; + while (rs.next()) { + if (cnt == 0) { // first row is supposed to be null + assertNull(rs.getString("StringField")); + assertNull(rs.getString("GeographyField")); + Object intAryField = rs.getObject("IntegerArrayField"); + if (intAryField instanceof JsonStringArrayList) { + assertEquals( + new JsonStringArrayList(), + ((JsonStringArrayList) intAryField)); // null array is returned as an empty array + } + assertFalse(rs.getBoolean("BooleanField")); + assertTrue(0.0d == rs.getDouble("BigNumericField")); + assertTrue(0 == rs.getInt("IntegerField")); + assertTrue(0L == rs.getLong("NumericField")); + assertNull(rs.getBytes("BytesField")); + assertNull(rs.getTimestamp("TimestampField")); + assertNull(rs.getTime("TimeField")); + assertNull(rs.getDate("DateField")); + assertNull(rs.getString("JSONField")); + assertFalse(rs.getBoolean("BooleanField_1")); + assertNull(rs.getString("StringField_1")); + assertNull(rs.getString("hello")); // equivalent of testJsonType + assertEquals(0, rs.getInt("id")); + + } else { // remaining rows are supposed to be non null + // check the order of the records + assertTrue(prevIntegerFieldVal < rs.getInt("IntegerField")); + prevIntegerFieldVal = rs.getInt("IntegerField"); + + testForAllDataTypeValues(rs, cnt); // asserts the value of each row } - assertFalse(rs.getBoolean("BooleanField")); - assertTrue(0.0d == rs.getDouble("BigNumericField")); - assertTrue(0 == rs.getInt("IntegerField")); - assertTrue(0L == rs.getLong("NumericField")); - assertNull(rs.getBytes("BytesField")); - assertNull(rs.getTimestamp("TimestampField")); - assertNull(rs.getTime("TimeField")); - assertNull(rs.getDate("DateField")); - assertNull(rs.getString("JSONField")); - assertFalse(rs.getBoolean("BooleanField_1")); - assertNull(rs.getString("StringField_1")); - assertNull(rs.getString("hello")); // equivalent of testJsonType - assertEquals(0, rs.getInt("id")); - - } else { // remaining rows are supposed to be non null - // check the order of the records - assertTrue(prevIntegerFieldVal < rs.getInt("IntegerField")); - prevIntegerFieldVal = rs.getInt("IntegerField"); - - testForAllDataTypeValues(rs, cnt); // asserts the value of each row + ++cnt; } - ++cnt; + totalCnt += cnt; + } finally { + connection.close(); } - connection.close(); - totalCnt += cnt; + // Repeat the same run - connection = getConnection(); - bigQueryResult = connection.executeSelect(MULTI_QUERY); - rs = bigQueryResult.getResultSet(); - cnt = 0; - prevIntegerFieldVal = 0; - while (rs.next()) { - if (cnt == 0) { // first row is supposed to be null - assertNull(rs.getString("StringField")); - assertNull(rs.getString("GeographyField")); - Object intAryField = rs.getObject("IntegerArrayField"); - if (intAryField instanceof JsonStringArrayList) { - assertEquals( - new JsonStringArrayList(), - ((JsonStringArrayList) intAryField)); // null array is returned as an empty array + Connection connection1 = getConnection(); + try { + BigQueryResult bigQueryResult = connection1.executeSelect(MULTI_QUERY); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + int prevIntegerFieldVal = 0; + while (rs.next()) { + if (cnt == 0) { // first row is supposed to be null + assertNull(rs.getString("StringField")); + assertNull(rs.getString("GeographyField")); + Object intAryField = rs.getObject("IntegerArrayField"); + if (intAryField instanceof JsonStringArrayList) { + assertEquals( + new JsonStringArrayList(), + ((JsonStringArrayList) intAryField)); // null array is returned as an empty array + } + assertFalse(rs.getBoolean("BooleanField")); + assertTrue(0.0d == rs.getDouble("BigNumericField")); + assertTrue(0 == rs.getInt("IntegerField")); + assertTrue(0L == rs.getLong("NumericField")); + assertNull(rs.getBytes("BytesField")); + assertNull(rs.getTimestamp("TimestampField")); + assertNull(rs.getTime("TimeField")); + assertNull(rs.getDate("DateField")); + assertNull(rs.getString("JSONField")); + assertFalse(rs.getBoolean("BooleanField_1")); + assertNull(rs.getString("StringField_1")); + assertNull(rs.getString("hello")); // equivalent of testJsonType + assertEquals(0, rs.getInt("id")); + + } else { // remaining rows are supposed to be non null + // check the order of the records + assertTrue(prevIntegerFieldVal < rs.getInt("IntegerField")); + prevIntegerFieldVal = rs.getInt("IntegerField"); + + testForAllDataTypeValues(rs, cnt); // asserts the value of each row } - assertFalse(rs.getBoolean("BooleanField")); - assertTrue(0.0d == rs.getDouble("BigNumericField")); - assertTrue(0 == rs.getInt("IntegerField")); - assertTrue(0L == rs.getLong("NumericField")); - assertNull(rs.getBytes("BytesField")); - assertNull(rs.getTimestamp("TimestampField")); - assertNull(rs.getTime("TimeField")); - assertNull(rs.getDate("DateField")); - assertNull(rs.getString("JSONField")); - assertFalse(rs.getBoolean("BooleanField_1")); - assertNull(rs.getString("StringField_1")); - assertNull(rs.getString("hello")); // equivalent of testJsonType - assertEquals(0, rs.getInt("id")); - - } else { // remaining rows are supposed to be non null - // check the order of the records - assertTrue(prevIntegerFieldVal < rs.getInt("IntegerField")); - prevIntegerFieldVal = rs.getInt("IntegerField"); - - testForAllDataTypeValues(rs, cnt); // asserts the value of each row + ++cnt; } - ++cnt; + totalCnt += cnt; + } finally { + connection1.close(); } - connection.close(); - totalCnt += cnt; assertEquals(MULTI_LIMIT_RECS * 2, totalCnt); } @Test - public void testPositionalParams() + void testPositionalParams() throws SQLException { // Bypasses Read API as it doesnt support Positional Params Connection connection = getConnection(); - Parameter dateParam = - Parameter.newBuilder().setValue(QueryParameterValue.date("2022-01-01")).build(); - Parameter boolParam = Parameter.newBuilder().setValue(QueryParameterValue.bool(true)).build(); - Parameter intParam = Parameter.newBuilder().setValue(QueryParameterValue.int64(1)).build(); - Parameter numericParam = - Parameter.newBuilder().setValue(QueryParameterValue.numeric(new BigDecimal(100))).build(); - List parameters = ImmutableList.of(dateParam, boolParam, intParam, numericParam); - - BigQueryResult bigQueryResult = connection.executeSelect(POSITIONAL_QUERY, parameters); - logger.log(Level.INFO, "Query used: {0}", POSITIONAL_QUERY); - ResultSet rs = bigQueryResult.getResultSet(); - int cnt = 0; - while (rs.next()) { - assertFalse(rs.getBoolean("BooleanField")); - assertTrue(0.0d <= rs.getDouble("BigNumericField")); - assertTrue(0 <= rs.getInt("IntegerField")); - assertTrue(0L <= rs.getLong("NumericField")); - assertNotNull(rs.getBytes("BytesField")); - assertNotNull(rs.getTimestamp("TimestampField")); - assertNotNull(rs.getTime("TimeField")); - assertNotNull(rs.getDate("DateField")); - assertNotNull(rs.getString("JSONField")); - assertTrue(rs.getBoolean("BooleanField_1")); - assertNotNull(rs.getString("StringField_1")); - ++cnt; + try { + Parameter dateParam = + Parameter.newBuilder().setValue(QueryParameterValue.date("2022-01-01")).build(); + Parameter boolParam = Parameter.newBuilder().setValue(QueryParameterValue.bool(true)).build(); + Parameter intParam = Parameter.newBuilder().setValue(QueryParameterValue.int64(1)).build(); + Parameter numericParam = + Parameter.newBuilder().setValue(QueryParameterValue.numeric(new BigDecimal(100))).build(); + List parameters = ImmutableList.of(dateParam, boolParam, intParam, numericParam); + + BigQueryResult bigQueryResult = connection.executeSelect(POSITIONAL_QUERY, parameters); + logger.log(Level.INFO, "Query used: {0}", POSITIONAL_QUERY); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + while (rs.next()) { + assertFalse(rs.getBoolean("BooleanField")); + assertTrue(0.0d <= rs.getDouble("BigNumericField")); + assertTrue(0 <= rs.getInt("IntegerField")); + assertTrue(0L <= rs.getLong("NumericField")); + assertNotNull(rs.getBytes("BytesField")); + assertNotNull(rs.getTimestamp("TimestampField")); + assertNotNull(rs.getTime("TimeField")); + assertNotNull(rs.getDate("DateField")); + assertNotNull(rs.getString("JSONField")); + assertTrue(rs.getBoolean("BooleanField_1")); + assertNotNull(rs.getString("StringField_1")); + ++cnt; + } + assertEquals(MULTI_LIMIT_RECS, cnt); + } finally { + connection.close(); } - connection.close(); - assertEquals(MULTI_LIMIT_RECS, cnt); } @Test // This testcase reads rows in bulk for a public table to make sure we do not get // table-not-found exception. Ref: b/241134681 . This exception has been seen while reading data // in bulk - public void testForTableNotFound() throws SQLException { + void testForTableNotFound() throws SQLException { int recordCnt = 50000000; // 5Mil String query = String.format( @@ -578,8 +593,7 @@ private static void testForAllDataTypeValues(ResultSet rs, int cnt) throws SQLEx // Timestamp, Time, DateTime and Date fields assertEquals(1649064795000L, rs.getTimestamp("TimestampField").getTime()); - assertEquals( - java.sql.Date.valueOf("2022-01-01").toString(), rs.getDate("DateField").toString()); + assertEquals(Date.valueOf("2022-01-01").toString(), rs.getDate("DateField").toString()); // Time is represented independent of a specific date and timezone. For example a 12:11:35 (GMT) // is returned as // 17:11:35 (GMT+5:30) . So we need to adjust the offset @@ -615,15 +629,15 @@ private static void addBatchRecords(TableId tableId) { for (Map.Entry> entry : response.getInsertErrors().entrySet()) { logger.log(Level.WARNING, "Exception while adding records {0}", entry.getValue()); } - fail("Response has errors"); + throw new BigQueryException(0, "Response has errors"); } } catch (BigQueryException e) { logger.log(Level.WARNING, "Exception while adding records {0}", e); - fail("Error in addBatchRecords"); + throw new BigQueryException(0, "Error in addBatchRecords", e); } } - private static void createTable(String datasetName, String tableName, Schema schema) { + static void createTable(String datasetName, String tableName, Schema schema) { try { TableId tableId = TableId.of(datasetName, tableName); TableDefinition tableDefinition = StandardTableDefinition.of(schema); @@ -635,7 +649,7 @@ private static void createTable(String datasetName, String tableName, Schema sch } } - public static void deleteTable(String datasetName, String tableName) { + static void deleteTable(String datasetName, String tableName) { try { assertTrue(bigquery.delete(TableId.of(datasetName, tableName))); } catch (BigQueryException e) { @@ -643,7 +657,7 @@ public static void deleteTable(String datasetName, String tableName) { } } - public static void createDataset(String datasetName) { + static void createDataset(String datasetName) { try { DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build(); Dataset newDataset = bigquery.create(datasetInfo); @@ -653,7 +667,7 @@ public static void createDataset(String datasetName) { } } - public static void deleteDataset(String datasetName) { + static void deleteDataset(String datasetName) { try { DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build(); assertTrue(bigquery.delete(datasetInfo.getDatasetId())); @@ -663,7 +677,6 @@ public static void deleteDataset(String datasetName) { } private Connection getConnection() { - ConnectionSettings connectionSettings = ConnectionSettings.newBuilder() .setDefaultDataset(DatasetId.of(DATASET)) diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITRemoteUDFTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITRemoteUDFTest.java index 7a3194e525..6598d08357 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITRemoteUDFTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITRemoteUDFTest.java @@ -15,8 +15,8 @@ */ package com.google.cloud.bigquery.it; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import com.google.cloud.ServiceOptions; import com.google.cloud.bigquery.BigQuery; @@ -39,11 +39,11 @@ import java.util.HashMap; import java.util.Map; import java.util.UUID; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -public class ITRemoteUDFTest { +class ITRemoteUDFTest { private static final String ID = UUID.randomUUID().toString().substring(0, 8); private static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); @@ -56,8 +56,8 @@ public class ITRemoteUDFTest { private static Connection connection; private static BigQuery bigquery; - @Before - public void setUp() throws IOException { + @BeforeEach + void setUp() throws IOException { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); bigquery = bigqueryHelper.getOptions().getService(); client = ConnectionServiceClient.create(); @@ -76,8 +76,8 @@ public void setUp() throws IOException { connection = client.createConnection(request); } - @AfterClass - public static void afterClass() { + @AfterAll + static void afterClass() { if (bigquery != null) { RemoteBigQueryHelper.forceDelete(bigquery, ROUTINE_DATASET); } @@ -89,7 +89,7 @@ public static void afterClass() { } @Test - public void testRoutineRemoteUDF() { + void testRoutineRemoteUDF() { String routineName = RemoteBigQueryHelper.generateRoutineName(); RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); Map userDefinedContext = diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java index eec39f633f..3968cd05e2 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java @@ -21,7 +21,7 @@ import com.google.api.services.bigquery.model.DatasetList; import com.google.api.services.bigquery.model.DatasetReference; import java.util.Collections; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class HttpBigQueryRpcTest { @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java index 5aadd11e33..589f7ccda0 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java @@ -16,8 +16,8 @@ package com.google.cloud.bigquery.testing; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption; @@ -27,12 +27,12 @@ import java.io.InputStream; import java.time.Duration; import java.util.concurrent.ExecutionException; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class RemoteBigQueryHelperTest { private static final String DATASET_NAME = "dataset-name"; diff --git a/pom.xml b/pom.xml index 4cded9553c..f99bb3f907 100644 --- a/pom.xml +++ b/pom.xml @@ -110,12 +110,6 @@ - - junit - junit - 4.13.2 - test - com.google.truth truth @@ -128,12 +122,6 @@ - - org.mockito - mockito-core - 4.11.0 - test - com.google.cloud google-cloud-storage @@ -152,6 +140,13 @@ 2.70.0 test + + org.mockito + mockito-bom + 4.11.0 + pom + import + From ffb0fdfbbb8ed08c8e221e68544f3076e90c0039 Mon Sep 17 00:00:00 2001 From: Kirill Logachev Date: Fri, 9 Jan 2026 13:24:25 -0800 Subject: [PATCH 26/36] chore: move Google JDBC driver code (#4050) --- google-cloud-bigquery-jdbc/pom.xml | 329 + .../BigQueryConversionException.java | 29 + .../BigQueryJdbcCoercionException.java | 36 + ...BigQueryJdbcCoercionNotFoundException.java | 40 + .../exception/BigQueryJdbcException.java | 76 + .../BigQueryJdbcRuntimeException.java | 48 + ...ryJdbcSqlFeatureNotSupportedException.java | 40 + .../BigQueryJdbcSqlSyntaxErrorException.java | 36 + .../bigquery/jdbc/BigQueryArrowArray.java | 105 + .../jdbc/BigQueryArrowBatchWrapper.java | 68 + .../bigquery/jdbc/BigQueryArrowResultSet.java | 492 ++ .../bigquery/jdbc/BigQueryArrowStruct.java | 87 + .../bigquery/jdbc/BigQueryBaseArray.java | 172 + .../bigquery/jdbc/BigQueryBaseResultSet.java | 618 ++ .../bigquery/jdbc/BigQueryBaseStruct.java | 97 + .../jdbc/BigQueryCallableStatement.java | 1341 +++++ .../cloud/bigquery/jdbc/BigQueryCoercion.java | 44 + .../bigquery/jdbc/BigQueryConnection.java | 1160 ++++ .../jdbc/BigQueryConnectionProperty.java | 117 + .../jdbc/BigQueryDaemonPollingTask.java | 122 + .../jdbc/BigQueryDatabaseMetaData.java | 5346 +++++++++++++++++ .../jdbc/BigQueryDefaultCoercions.java | 102 + .../cloud/bigquery/jdbc/BigQueryDriver.java | 249 + .../bigquery/jdbc/BigQueryErrorMessage.java | 29 + .../jdbc/BigQueryFieldValueListWrapper.java | 81 + .../jdbc/BigQueryJdbcBulkInsertWriter.java | 130 + .../jdbc/BigQueryJdbcCustomLogger.java | 32 + .../jdbc/BigQueryJdbcOAuthUtility.java | 759 +++ .../bigquery/jdbc/BigQueryJdbcParameter.java | 117 + .../jdbc/BigQueryJdbcProxyUtility.java | 291 + .../bigquery/jdbc/BigQueryJdbcRootLogger.java | 193 + .../jdbc/BigQueryJdbcTypeMappings.java | 159 + .../bigquery/jdbc/BigQueryJdbcUrlUtility.java | 920 +++ .../bigquery/jdbc/BigQueryJsonArray.java | 105 + .../bigquery/jdbc/BigQueryJsonResultSet.java | 317 + .../bigquery/jdbc/BigQueryJsonStruct.java | 80 + .../jdbc/BigQueryNoOpsConnection.java | 191 + .../bigquery/jdbc/BigQueryNoOpsResultSet.java | 693 +++ .../bigquery/jdbc/BigQueryNoOpsStatement.java | 90 + .../jdbc/BigQueryParameterHandler.java | 281 + .../jdbc/BigQueryPooledConnection.java | 497 ++ .../jdbc/BigQueryPreparedStatement.java | 611 ++ .../bigquery/jdbc/BigQueryResultSet.java | 46 + .../jdbc/BigQueryResultSetFinalizers.java | 73 + .../jdbc/BigQueryResultSetMetadata.java | 213 + .../cloud/bigquery/jdbc/BigQuerySettings.java | 857 +++ .../jdbc/BigQuerySqlTypeConverter.java | 81 + .../bigquery/jdbc/BigQueryStatement.java | 1522 +++++ .../bigquery/jdbc/BigQueryThreadFactory.java | 44 + .../bigquery/jdbc/BigQueryTypeCoercer.java | 149 + .../jdbc/BigQueryTypeCoercerBuilder.java | 79 + .../jdbc/BigQueryTypeCoercionUtility.java | 409 ++ .../cloud/bigquery/jdbc/DataSource.java | 664 ++ .../jdbc/PooledConnectionDataSource.java | 68 + .../jdbc/PooledConnectionListener.java | 145 + .../META-INF/services/java.sql.Driver | 1 + .../DatabaseMetaData_GetCrossReference.sql | 72 + .../jdbc/DatabaseMetaData_GetExportedKeys.sql | 71 + .../jdbc/DatabaseMetaData_GetImportedKeys.sql | 71 + .../jdbc/DatabaseMetaData_GetPrimaryKeys.sql | 30 + .../bigquery/jdbc/dependencies.properties | 4 + ...FormatTypeBigQueryCoercionUtilityTest.java | 234 + .../BigQueryArrowArrayOfPrimitivesTest.java | 367 ++ .../jdbc/BigQueryArrowArrayOfStructTest.java | 205 + .../jdbc/BigQueryArrowResultSetTest.java | 358 ++ .../jdbc/BigQueryArrowStructTest.java | 245 + .../jdbc/BigQueryBaseResultSetTest.java | 104 + ...igQueryBigQueryTypeCoercerBuilderTest.java | 43 + .../jdbc/BigQueryCallableStatementTest.java | 1118 ++++ .../bigquery/jdbc/BigQueryConnectionTest.java | 343 ++ .../jdbc/BigQueryDaemonPollingTaskTest.java | 63 + .../jdbc/BigQueryDatabaseMetaDataTest.java | 3209 ++++++++++ .../jdbc/BigQueryDefaultCoercionsTest.java | 221 + .../bigquery/jdbc/BigQueryDriverTest.java | 97 + .../bigquery/jdbc/BigQueryJdbcBaseTest.java | 82 + .../jdbc/BigQueryJdbcOAuthUtilityTest.java | 505 ++ .../jdbc/BigQueryJdbcParameterTest.java | 75 + .../jdbc/BigQueryJdbcProxyUtilityTest.java | 293 + .../jdbc/BigQueryJdbcUrlUtilityTest.java | 803 +++ .../BigQueryJsonArrayOfPrimitivesTest.java | 342 ++ .../jdbc/BigQueryJsonArrayOfStructTest.java | 204 + .../jdbc/BigQueryJsonResultSetTest.java | 476 ++ .../bigquery/jdbc/BigQueryJsonStructTest.java | 264 + .../jdbc/BigQueryParameterHandlerTest.java | 142 + .../jdbc/BigQueryPooledConnectionTest.java | 174 + .../jdbc/BigQueryResultSetFinalizersTest.java | 67 + .../jdbc/BigQueryResultSetMetadataTest.java | 277 + .../bigquery/jdbc/BigQueryStatementTest.java | 483 ++ .../jdbc/BigQueryThreadFactoryTest.java | 41 + .../jdbc/BigQueryTypeCoercerTest.java | 100 + ...dValueTypeBigQueryCoercionUtilityTest.java | 378 ++ .../cloud/bigquery/jdbc/NullHandlingTest.java | 44 + .../jdbc/PooledConnectionDataSourceTest.java | 80 + .../jdbc/PooledConnectionListenerTest.java | 172 + .../google/cloud/bigquery/jdbc/TestType.java | 31 + .../google/cloud/bigquery/jdbc/it/ITBase.java | 21 + .../bigquery/jdbc/it/ITBigQueryJDBCTest.java | 4439 ++++++++++++++ .../jdbc/it/ITNightlyBigQueryTest.java | 1713 ++++++ .../bigquery/jdbc/it/ITPSCBigQueryTest.java | 300 + .../bigquery/jdbc/it/ITProxyBigQueryTest.java | 234 + .../bigquery/jdbc/it/ITTPCBigQueryTest.java | 220 + .../bigquery/jdbc/rules/TimeZoneRule.java | 57 + .../bigquery/jdbc/utils/ArrowUtilities.java | 54 + .../bigquery/jdbc/utils/TestUtilities.java | 143 + .../cloud/bigquery/jdbc/utils/URIBuilder.java | 34 + .../src/test/resources/fake.p12 | Bin 0 -> 5223 bytes .../test/resources/test_truststore_nopass.jks | Bin 0 -> 32 bytes .../resources/test_truststore_withpass.jks | Bin 0 -> 1187 bytes 108 files changed, 39784 insertions(+) create mode 100644 google-cloud-bigquery-jdbc/pom.xml create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryConversionException.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionException.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionNotFoundException.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcException.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcRuntimeException.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlFeatureNotSupportedException.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlSyntaxErrorException.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArray.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowBatchWrapper.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSet.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStruct.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseArray.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSet.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseStruct.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatement.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCoercion.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnection.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionProperty.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTask.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaData.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercions.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDriver.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryErrorMessage.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryFieldValueListWrapper.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBulkInsertWriter.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcCustomLogger.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtility.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameter.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtility.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcRootLogger.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcTypeMappings.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtility.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArray.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSet.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStruct.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsConnection.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsResultSet.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsStatement.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandler.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnection.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPreparedStatement.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSet.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizers.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadata.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySettings.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySqlTypeConverter.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryStatement.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactory.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercer.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerBuilder.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercionUtility.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/DataSource.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSource.java create mode 100644 google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionListener.java create mode 100644 google-cloud-bigquery-jdbc/src/main/resources/META-INF/services/java.sql.Driver create mode 100644 google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetCrossReference.sql create mode 100644 google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetExportedKeys.sql create mode 100644 google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetImportedKeys.sql create mode 100644 google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetPrimaryKeys.sql create mode 100644 google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/dependencies.properties create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/ArrowFormatTypeBigQueryCoercionUtilityTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfPrimitivesTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfStructTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSetTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStructTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSetTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBigQueryTypeCoercerBuilderTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatementTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTaskTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaDataTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercionsTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDriverTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBaseTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtilityTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameterTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtilityTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtilityTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfPrimitivesTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfStructTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSetTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStructTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandlerTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnectionTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizersTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadataTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryStatementTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactoryTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/FieldValueTypeBigQueryCoercionUtilityTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/NullHandlingTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSourceTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionListenerTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/TestType.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBase.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBigQueryJDBCTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITNightlyBigQueryTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITPSCBigQueryTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITProxyBigQueryTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITTPCBigQueryTest.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/rules/TimeZoneRule.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/ArrowUtilities.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/TestUtilities.java create mode 100644 google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/URIBuilder.java create mode 100644 google-cloud-bigquery-jdbc/src/test/resources/fake.p12 create mode 100644 google-cloud-bigquery-jdbc/src/test/resources/test_truststore_nopass.jks create mode 100644 google-cloud-bigquery-jdbc/src/test/resources/test_truststore_withpass.jks diff --git a/google-cloud-bigquery-jdbc/pom.xml b/google-cloud-bigquery-jdbc/pom.xml new file mode 100644 index 0000000000..a697ba1017 --- /dev/null +++ b/google-cloud-bigquery-jdbc/pom.xml @@ -0,0 +1,329 @@ + + + + 4.0.0 + com.google.cloud + google-cloud-bigquery-jdbc + 0.1.0 + jar + BigQuery JDBC + https://github.com/googleapis/java-bigquery-jdbc + JDBC for BigQuery + + + UTF-8 + UTF-8 + github + google-cloud-bigquery-jdbc + + + + + + + src/main/resources + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.5.2 + + ${skipSurefire} + + + + org.jacoco + jacoco-maven-plugin + 0.8.13 + + + + + + com.google.cloud + google-cloud-bigquery-jdbc-parent + 0.1.0 + + + + com.google.cloud + google-cloud-bigquery + + + com.google.cloud + google-cloud-bigquerystorage + + + com.google.api + api-common + + + org.apache.arrow + arrow-vector + + + com.google.guava + guava + + + com.google.cloud + google-cloud-core + + + com.google.api + gax + + + com.google.auth + google-auth-library-oauth2-http + + + com.google.auth + google-auth-library-credentials + + + + + org.apache.arrow + arrow-memory-core + + + org.apache.arrow + arrow-memory-netty + + + com.google.protobuf + protobuf-java + + + com.google.api.grpc + proto-google-cloud-bigquerystorage-v1 + + + com.google.code.gson + gson + + + com.google.code.findbugs + jsr305 + + + org.apache.httpcomponents.core5 + httpcore5 + + + org.apache.httpcomponents.client5 + httpclient5 + + + com.google.http-client + google-http-client + + + com.google.http-client + google-http-client-apache-v5 + + + org.apache.httpcomponents + httpcore + + + org.apache.httpcomponents + httpclient + + + + + com.google.cloud + google-cloud-core-http + + + com.google.api + gax-grpc + + + io.grpc + grpc-api + + + io.grpc + grpc-netty-shaded + + + io.grpc + grpc-core + + + + com.google.truth + truth + 1.1.3 + test + + + junit + junit + 4.13.2 + test + + + org.mockito + mockito-core + 4.11.0 + test + + + + + + java17 + + [17,) + + + !jvm + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + --add-opens=java.base/java.nio=org.apache.arrow.memory.core,ALL-UNNAMED + + + + + + + + + release-all-dependencies-shaded + + + + org.apache.maven.plugins + maven-shade-plugin + 3.5.2 + + + + + + com + shaded.bqjdbc.com + + com.google.cloud.bigquery.jdbc.* + + + + org + shaded.bqjdbc.org + + org.conscrypt.* + + + + io + shaded.bqjdbc.io + + + + + + + + + + + + release-all-dependencies + + + + org.apache.maven.plugins + maven-shade-plugin + 3.5.2 + + + package + + shade + + + false + + + java.base/java.nio=ALL-UNNAMED + + + + + + + META-INF/io.netty.versions.properties + + + + + *:* + + META-INF/LICENSE* + META-INF/NOTICE* + META-INF/DEPENDENCIES + META-INF/proguard/*.pro + META-INF/maven/** + META-INF/*.MF + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + arrow-git.properties + + + + + + + + + + + + + + docker + + + env.JDBC_DOCKER_ENV + + + + + /mvn/test-target + + + + \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryConversionException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryConversionException.java new file mode 100644 index 0000000000..90e758b05e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryConversionException.java @@ -0,0 +1,29 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import java.sql.SQLException; + +/** + * Exception for errors that occur when the driver cannot convert a value from one type to another. + */ +public class BigQueryConversionException extends SQLException { + + public BigQueryConversionException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionException.java new file mode 100644 index 0000000000..185ef54bb1 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionException.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.api.core.InternalApi; + +/** + * Thrown to indicate that the coercion was attempted but couldn't be performed successfully because + * of some error. + */ +@InternalApi +public class BigQueryJdbcCoercionException extends RuntimeException { + + /** + * Construct a new exception with the specified cause. + * + * @param cause the actual cause which was thrown while performing the coercion. + */ + public BigQueryJdbcCoercionException(Exception cause) { + super("Coercion error", cause); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionNotFoundException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionNotFoundException.java new file mode 100644 index 0000000000..b4eafb2ee5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionNotFoundException.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.api.core.InternalApi; + +/** + * Thrown to indicate that the current TypeCoercer can not perform the coercion as the Coercion + * implementation is not registered for the mentioned source and target type. + */ +@InternalApi +public class BigQueryJdbcCoercionNotFoundException extends RuntimeException { + + /** + * Construct a new exception. + * + * @param source the source type. + * @param target the target type. + */ + public BigQueryJdbcCoercionNotFoundException(Class source, Class target) { + super( + String.format( + "Coercion not found for [%s -> %s] conversion", + source.getCanonicalName(), target.getCanonicalName())); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcException.java new file mode 100644 index 0000000000..72a22aba61 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcException.java @@ -0,0 +1,76 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.cloud.bigquery.BigQueryException; +import java.sql.SQLException; + +public class BigQueryJdbcException extends SQLException { + private BigQueryException bigQueryException = null; + + /** + * Constructs a new BigQueryJdbcException with the given message. + * + * @param message The detail message. + */ + public BigQueryJdbcException(String message) { + super(message); + } + + /** + * Constructs a new BigQueryJdbcException from InterruptedException + * + * @param ex The InterruptedException to be thrown. + */ + public BigQueryJdbcException(InterruptedException ex) { + super(ex); + } + + /** + * Constructs a new BigQueryJdbcException from BigQueryException + * + * @param ex The BigQueryException to be thrown. + */ + public BigQueryJdbcException(BigQueryException ex) { + super(ex); + this.bigQueryException = ex; + } + + /** + * Construct a new BigQueryJdbcException with the cause. + * + * @param message Specific message that is being added to the Exception. + * @param cause Throwable that is being converted. + */ + public BigQueryJdbcException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Constructs a new BigQueryJdbcException with the specified cause and a detail message of + * (cause==null ? null : cause.toString()) + * + * @param cause Throwable that is being converted. + */ + public BigQueryJdbcException(Throwable cause) { + super(cause); + } + + public BigQueryException getBigQueryException() { + return bigQueryException; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcRuntimeException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcRuntimeException.java new file mode 100644 index 0000000000..38e5171be4 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcRuntimeException.java @@ -0,0 +1,48 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +public class BigQueryJdbcRuntimeException extends RuntimeException { + + /** + * Constructs a new BigQueryJdbcRuntimeException with the given message. + * + * @param message The detail message. + */ + public BigQueryJdbcRuntimeException(String message) { + super(message); + } + + /** + * Constructs a new BigQueryJdbcRuntimeException from a Throwable exception. + * + * @param ex Throwable to be thrown. + */ + public BigQueryJdbcRuntimeException(Throwable ex) { + super(ex); + } + + /** + * Constructs a new BigQueryJdbcRuntimeException from a Throwable exception and a message. + * + * @param message The detail message. + * @param ex Throwable to be thrown. + */ + public BigQueryJdbcRuntimeException(String message, InterruptedException ex) { + super(message, ex); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlFeatureNotSupportedException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlFeatureNotSupportedException.java new file mode 100644 index 0000000000..8c93d8764b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlFeatureNotSupportedException.java @@ -0,0 +1,40 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.cloud.bigquery.BigQueryException; +import java.sql.SQLFeatureNotSupportedException; + +public class BigQueryJdbcSqlFeatureNotSupportedException extends SQLFeatureNotSupportedException { + /** + * Constructs a new BigQueryJdbcSqlFeatureNotSupportedException with the given message. + * + * @param message The detail message. + */ + public BigQueryJdbcSqlFeatureNotSupportedException(String message) { + super(message); + } + + /** + * Constructs a new BigQueryJdbcSqlFeatureNotSupportedException from BigQueryException + * + * @param ex The BigQueryException to be thrown. + */ + public BigQueryJdbcSqlFeatureNotSupportedException(BigQueryException ex) { + super(ex); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlSyntaxErrorException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlSyntaxErrorException.java new file mode 100644 index 0000000000..99edcd0c54 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlSyntaxErrorException.java @@ -0,0 +1,36 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.cloud.bigquery.BigQueryException; +import java.sql.SQLSyntaxErrorException; + +/** + * Specific {@link SQLSyntaxErrorException} thrown when the SQLState class value is '42', or under + * vendor-specified conditions. This indicates that the in-progress query has violated SQL syntax + * rules. + */ +public class BigQueryJdbcSqlSyntaxErrorException extends SQLSyntaxErrorException { + /** + * Constructs a new BigQueryJdbcSqlSyntaxErrorException from BigQueryException + * + * @param ex The BigQueryException to be thrown. + */ + public BigQueryJdbcSqlSyntaxErrorException(BigQueryException ex) { + super(ex.getMessage(), "Incorrect SQL syntax."); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArray.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArray.java new file mode 100644 index 0000000000..49bd565df7 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArray.java @@ -0,0 +1,105 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import java.sql.ResultSet; +import java.sql.SQLException; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; + +/** + * An implementation of {@link BigQueryBaseArray} used to represent Array values from Arrow data. + */ +class BigQueryArrowArray extends BigQueryBaseArray { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryArrowArray.class.getName()); + private static final BigQueryTypeCoercer BIGQUERY_TYPE_COERCER = + BigQueryTypeCoercionUtility.INSTANCE; + private JsonStringArrayList values; + + public BigQueryArrowArray(Field schema, JsonStringArrayList values) { + super(schema); + this.values = values; + } + + @Override + public Object getArray() { + LOG.finest("++enter++"); + ensureValid(); + if (values == null) { + return null; + } + return getArrayInternal(0, values.size()); + } + + @Override + public Object getArray(long index, int count) { + LOG.finest("++enter++"); + ensureValid(); + if (values == null) { + return null; + } + Tuple range = createRange(index, count, this.values.size()); + return getArrayInternal(range.x(), range.y()); + } + + @Override + public ResultSet getResultSet() throws SQLException { + LOG.finest("++enter++"); + ensureValid(); + if (values == null) { + return new BigQueryArrowResultSet(); + } + BigQueryArrowBatchWrapper arrowBatchWrapper = + BigQueryArrowBatchWrapper.getNestedFieldValueListWrapper(values); + return BigQueryArrowResultSet.getNestedResultSet( + Schema.of(singleElementSchema()), arrowBatchWrapper, 0, this.values.size()); + } + + @Override + public ResultSet getResultSet(long index, int count) throws SQLException { + LOG.finest("++enter++"); + ensureValid(); + if (values == null) { + return new BigQueryArrowResultSet(); + } + Tuple range = createRange(index, count, this.values.size()); + BigQueryArrowBatchWrapper arrowBatchWrapper = + BigQueryArrowBatchWrapper.getNestedFieldValueListWrapper(values); + return BigQueryArrowResultSet.getNestedResultSet( + Schema.of(singleElementSchema()), arrowBatchWrapper, range.x(), range.y()); + } + + @Override + public void free() { + LOG.finest("++enter++"); + this.values = null; + markInvalid(); + } + + @Override + Object getCoercedValue(int index) { + LOG.finest("++enter++"); + Object value = this.values.get(index); + return this.arrayOfStruct + ? new BigQueryArrowStruct(schema.getSubFields(), (JsonStringHashMap) value) + : BIGQUERY_TYPE_COERCER.coerceTo(getTargetClass(), value); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowBatchWrapper.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowBatchWrapper.java new file mode 100644 index 0000000000..4d322ad977 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowBatchWrapper.java @@ -0,0 +1,68 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import org.apache.arrow.vector.util.JsonStringArrayList; + +/** This class acts as a facade layer and wraps Arrow's VectorSchemaRoot & JsonStringArrayList */ +class BigQueryArrowBatchWrapper { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryArrowBatchWrapper.class.getName()); + // Reference to the current arrowBatch + private final ArrowRecordBatch currentArrowBatch; + // Reference to the nested Records, set as null otherwise (Arrays) + private final JsonStringArrayList nestedRecords; + + // Marks the end of the stream for the ResultSet + private final boolean isLast; + + private BigQueryArrowBatchWrapper( + ArrowRecordBatch currentArrowBatch, JsonStringArrayList nestedRecords, boolean isLast) { + this.currentArrowBatch = currentArrowBatch; + this.nestedRecords = nestedRecords; + this.isLast = isLast; + } + + static BigQueryArrowBatchWrapper of(ArrowRecordBatch currentArrowBatch, boolean... isLast) { + LOG.finest("++enter++"); + boolean isLastFlag = isLast != null && isLast.length == 1 && isLast[0]; + return new BigQueryArrowBatchWrapper(currentArrowBatch, null, isLastFlag); + } + + static BigQueryArrowBatchWrapper getNestedFieldValueListWrapper( + JsonStringArrayList nestedRecords, boolean... isLast) { + LOG.finest("++enter++"); + boolean isLastFlag = isLast != null && isLast.length == 1 && isLast[0]; + return new BigQueryArrowBatchWrapper(null, nestedRecords, isLastFlag); + } + + ArrowRecordBatch getCurrentArrowBatch() { + LOG.finest("++enter++"); + return this.currentArrowBatch; + } + + JsonStringArrayList getNestedRecords() { + LOG.finest("++enter++"); + return this.nestedRecords; + } + + boolean isLast() { + LOG.finest("++enter++"); + return this.isLast; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSet.java new file mode 100644 index 0000000000..004dfb02b4 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSet.java @@ -0,0 +1,492 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryBaseArray.isArray; +import static com.google.cloud.bigquery.jdbc.BigQueryBaseStruct.isStruct; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import java.io.IOException; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.VectorLoader; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.ipc.ReadChannel; +import org.apache.arrow.vector.ipc.message.MessageSerializer; +import org.apache.arrow.vector.util.ByteArrayReadableSeekableByteChannel; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; + +/** {@link ResultSet} Implementation for Arrow datasource (Using Storage Read APIs) */ +class BigQueryArrowResultSet extends BigQueryBaseResultSet { + private final long totalRows; + // count of rows read by the current instance of ResultSet + private long rowCount = 0; + // IMP: This is a buffer of Arrow batches, the max size should be kept at min as + // possible to avoid holding too much memory + private final BlockingQueue buffer; + + // TODO(neenu): See if it makes sense to have the nested batch represented by + // 'JsonStringArrayList' directly + // points to the nested batch of arrow record + private final BigQueryArrowBatchWrapper currentNestedBatch; + private final int fromIndex; + private final int toIndexExclusive; + + // Acts as a cursor, resets to -1 when the `currentBatch` is processed. points to a + // logical row in the columnar BigQueryBigQueryArrowBatchWrapper currentBatch + private int currentBatchRowIndex = -1; + private boolean hasReachedEnd = false; + + // Tracks the index of the nested element under process + private int nestedRowIndex; + + private boolean afterLast = false; + + private ArrowDeserializer arrowDeserializer; + BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); + // Decoder object will be reused to avoid re-allocation and too much garbage collection. + private VectorSchemaRoot vectorSchemaRoot; + private VectorLoader vectorLoader; + // producer thread's reference + private final Thread ownedThread; + + private BigQueryArrowResultSet( + Schema schema, + ArrowSchema arrowSchema, + long totalRows, + BigQueryStatement statement, + BlockingQueue buffer, + BigQueryArrowBatchWrapper currentNestedBatch, + boolean isNested, + int fromIndex, + int toIndexExclusive, + Thread ownedThread, + BigQuery bigQuery) + throws SQLException { + super(bigQuery, statement, schema, isNested); + LOG.finest("++enter++"); + this.totalRows = totalRows; + this.buffer = buffer; + this.currentNestedBatch = currentNestedBatch; + this.fromIndex = fromIndex; + this.toIndexExclusive = toIndexExclusive; + this.nestedRowIndex = fromIndex - 1; + this.ownedThread = ownedThread; + if (!isNested && arrowSchema != null) { + try { + this.arrowDeserializer = new ArrowDeserializer(arrowSchema); + } catch (IOException ex) { + throw new BigQueryJdbcException(ex); + } + } + } + + /** + * This method returns an instance of BigQueryArrowResultSet after adding it in the list of + * ArrowResultSetFinalizer + * + * @return BigQueryArrowResultSet + */ + static BigQueryArrowResultSet of( + Schema schema, + ArrowSchema arrowSchema, + long totalRows, + BigQueryStatement statement, + BlockingQueue buffer, + Thread ownedThread, + BigQuery bigQuery) + throws SQLException { + return new BigQueryArrowResultSet( + schema, + arrowSchema, + totalRows, + statement, + buffer, + null, + false, + -1, + -1, + ownedThread, + bigQuery); + } + + BigQueryArrowResultSet() throws SQLException { + super(null, null, null, false); + this.totalRows = 0; + this.buffer = null; + this.currentNestedBatch = null; + this.fromIndex = 0; + this.toIndexExclusive = 0; + this.ownedThread = null; + this.arrowDeserializer = null; + this.vectorSchemaRoot = null; + this.vectorLoader = null; + } + + static BigQueryArrowResultSet getNestedResultSet( + Schema schema, BigQueryArrowBatchWrapper nestedBatch, int fromIndex, int toIndexExclusive) + throws SQLException { + return new BigQueryArrowResultSet( + schema, null, -1, null, null, nestedBatch, true, fromIndex, toIndexExclusive, null, null); + } + + private class ArrowDeserializer implements AutoCloseable { + + /* Decoder object will be reused to avoid re-allocation and too much garbage collection. */ + private ArrowDeserializer(ArrowSchema arrowSchema) throws IOException { + org.apache.arrow.vector.types.pojo.Schema schema = + MessageSerializer.deserializeSchema( + new org.apache.arrow.vector.ipc.ReadChannel( + new ByteArrayReadableSeekableByteChannel( + arrowSchema.getSerializedSchema().toByteArray()))); + List vectors = new ArrayList<>(); + List fields = schema.getFields(); + for (org.apache.arrow.vector.types.pojo.Field field : fields) { + vectors.add(field.createVector(allocator)); + } + vectorSchemaRoot = new VectorSchemaRoot(vectors); + vectorLoader = new VectorLoader(vectorSchemaRoot); + } + + private void deserializeArrowBatch(ArrowRecordBatch batch) throws SQLException { + LOG.finest("++enter++"); + try { + if (vectorSchemaRoot != null) { + // Clear vectorSchemaRoot before populating a new batch + vectorSchemaRoot.clear(); + } + org.apache.arrow.vector.ipc.message.ArrowRecordBatch deserializedBatch = + MessageSerializer.deserializeRecordBatch( + new ReadChannel( + new ByteArrayReadableSeekableByteChannel( + batch.getSerializedRecordBatch().toByteArray())), + allocator); + + vectorLoader.load(deserializedBatch); + // Release buffers from batch (they are still held in the vectors in root). + deserializedBatch.close(); + } catch (RuntimeException | IOException ex) { + throw new BigQueryJdbcException(ex); + } + } + + @Override + public void close() { + LOG.finest("++enter++"); + vectorSchemaRoot.close(); + allocator.close(); + } + } + + @Override + public boolean next() throws SQLException { + checkClosed(); + if (this.isNested) { + if (this.currentNestedBatch == null || this.currentNestedBatch.getNestedRecords() == null) { + throw new IllegalStateException( + "currentNestedBatch/JsonStringArrayList can not be null working with the nested record"); + } + if (this.nestedRowIndex < (this.toIndexExclusive - 1)) { + /* Check if there's a next record in the array which can be read */ + this.nestedRowIndex++; + return true; + } + this.afterLast = true; + return false; + } else { + /* Non nested */ + if (this.hasReachedEnd || this.isLast()) { + this.afterLast = true; + return false; + } + try { + if (this.currentBatchRowIndex == -1 + || this.currentBatchRowIndex == (this.vectorSchemaRoot.getRowCount() - 1)) { + /* Start of iteration or we have exhausted the current batch */ + // Advance the cursor. Potentially blocking operation. + BigQueryArrowBatchWrapper batchWrapper = this.buffer.take(); + if (batchWrapper.isLast()) { + /* Marks the end of the records */ + if (this.vectorSchemaRoot != null) { + // IMP: To avoid memory leak: clear vectorSchemaRoot as it still holds + // the last batch + this.vectorSchemaRoot.clear(); + } + this.hasReachedEnd = true; + this.rowCount++; + return false; + } + // Valid batch, process it + ArrowRecordBatch arrowBatch = batchWrapper.getCurrentArrowBatch(); + // Populates vectorSchemaRoot + this.arrowDeserializer.deserializeArrowBatch(arrowBatch); + // Pointing to the first row in this fresh batch + this.currentBatchRowIndex = 0; + this.rowCount++; + return true; + } + // There are rows left in the current batch. + else if (this.currentBatchRowIndex < this.vectorSchemaRoot.getRowCount()) { + this.currentBatchRowIndex++; + this.rowCount++; + return true; + } + } catch (InterruptedException | SQLException ex) { + throw new BigQueryJdbcException( + "Error occurred while advancing the cursor. This could happen when connection is closed while the next method is being called.", + ex); + } + } + return false; + } + + private Object getObjectInternal(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + Object value; + if (this.isNested) { + // BigQuery doesn't support multidimensional arrays, so + // just the default row num column (1) and the actual column (2) is supposed to be read + if (!(columnIndex == 1 || columnIndex == 2)) { + + throw new IllegalArgumentException( + "Column index is required to be 1 or 2 for nested arrays"); + } + if (this.currentNestedBatch.getNestedRecords() == null) { + throw new IllegalStateException("JsonStringArrayList cannot be null for nested records."); + } + // For Arrays the first column is Index, ref: + // https://docs.oracle.com/javase/7/docs/api/java/sql/Array.html#getResultSet() + if (columnIndex == 1) { + return this.nestedRowIndex + 1; + } + // columnIndex = 2, return the data against the current nestedRowIndex + else { + value = this.currentNestedBatch.getNestedRecords().get(this.nestedRowIndex); + } + } else { + // get the current column + // SQL index to Java Index + FieldVector currentColumn = this.vectorSchemaRoot.getVector(columnIndex - 1); + // get the current row + value = currentColumn.getObject(this.currentBatchRowIndex); + } + setWasNull(value); + return value; + } + + @Override + public Object getObject(int columnIndex) throws SQLException { + + // columnIndex is SQL index starting at 1 + LOG.finest("++enter++"); + checkClosed(); + Object value = getObjectInternal(columnIndex); + if (value == null) { + return null; + } + + if (this.isNested && columnIndex == 1) { + return this.bigQueryTypeCoercer.coerceTo(Integer.class, value); + } + + if (this.isNested && columnIndex == 2) { + Field arrayField = this.schema.getFields().get(0); + if (isStruct(arrayField)) { + return new BigQueryArrowStruct(arrayField.getSubFields(), (JsonStringHashMap) value); + } + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + arrayField.getType().getStandardType()); + return this.bigQueryTypeCoercer.coerceTo(targetClass, value); + } + + int fieldIndex = this.isNested ? 0 : columnIndex - 1; + Field fieldSchema = this.schemaFieldList.get(fieldIndex); + if (isArray(fieldSchema)) { + JsonStringArrayList originalList = (JsonStringArrayList) value; + StandardSQLTypeName elementTypeName = fieldSchema.getType().getStandardType(); + if (elementTypeName == StandardSQLTypeName.NUMERIC + || elementTypeName == StandardSQLTypeName.BIGNUMERIC) { + JsonStringArrayList newList = new JsonStringArrayList<>(); + for (Object item : originalList) { + if (item != null) { + newList.add(((BigDecimal) item).stripTrailingZeros()); + } else { + newList.add(null); + } + } + return new BigQueryArrowArray(fieldSchema, newList); + } else if (elementTypeName == StandardSQLTypeName.RANGE) { + JsonStringArrayList newList = new JsonStringArrayList<>(); + for (Object item : originalList) { + if (item != null) { + JsonStringHashMap rangeMap = (JsonStringHashMap) item; + Object start = rangeMap.get("start"); + Object end = rangeMap.get("end"); + + Object representativeElement = (start != null) ? start : end; + StandardSQLTypeName rangeElementType = getElementTypeFromValue(representativeElement); + + String formattedStart = formatRangeElement(start, rangeElementType); + String formattedEnd = formatRangeElement(end, rangeElementType); + + newList.add(String.format("[%s, %s)", formattedStart, formattedEnd)); + } else { + newList.add(null); + } + } + return new BigQueryArrowArray(fieldSchema, newList); + } + return new BigQueryArrowArray(fieldSchema, originalList); + } else if (isStruct(fieldSchema)) { + return new BigQueryArrowStruct(fieldSchema.getSubFields(), (JsonStringHashMap) value); + } else if (fieldSchema.getType().getStandardType() == StandardSQLTypeName.RANGE) { + JsonStringHashMap rangeMap = (JsonStringHashMap) value; + Object start = rangeMap.get("start"); + Object end = rangeMap.get("end"); + + Object representativeElement = (start != null) ? start : end; + StandardSQLTypeName elementType = getElementTypeFromValue(representativeElement); + + String formattedStart = formatRangeElement(start, elementType); + String formattedEnd = formatRangeElement(end, elementType); + + return String.format("[%s, %s)", formattedStart, formattedEnd); + } else { + if ((fieldSchema.getType().getStandardType() == StandardSQLTypeName.NUMERIC + || fieldSchema.getType().getStandardType() == StandardSQLTypeName.BIGNUMERIC) + && value instanceof BigDecimal) { + // The Arrow DecimalVector may return a BigDecimal with a larger scale than necessary. + // Strip trailing zeros to match JSON API and CLI output + return ((BigDecimal) value).stripTrailingZeros(); + } + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + fieldSchema.getType().getStandardType()); + return this.bigQueryTypeCoercer.coerceTo(targetClass, value); + } + } + + private StandardSQLTypeName getElementTypeFromValue(Object element) { + if (element == null) { + return StandardSQLTypeName.STRING; + } + if (element instanceof Integer) { + return StandardSQLTypeName.DATE; + } + if (element instanceof Long) { + return StandardSQLTypeName.TIMESTAMP; + } + if (element instanceof LocalDateTime) { + return StandardSQLTypeName.DATETIME; + } + return StandardSQLTypeName.STRING; + } + + private String formatRangeElement(Object element, StandardSQLTypeName elementType) { + if (element == null) { + return "UNBOUNDED"; + } + switch (elementType) { + case DATE: + // Arrow gives DATE as an Integer (days since epoch) + Date date = this.bigQueryTypeCoercer.coerceTo(Date.class, (Integer) element); + return date.toString(); + case DATETIME: + // Arrow gives DATETIME as a LocalDateTime + Timestamp dtTs = + this.bigQueryTypeCoercer.coerceTo(Timestamp.class, (LocalDateTime) element); + return this.bigQueryTypeCoercer.coerceTo(String.class, dtTs); + case TIMESTAMP: + // Arrow gives TIMESTAMP as a Long (microseconds since epoch) + Timestamp ts = this.bigQueryTypeCoercer.coerceTo(Timestamp.class, (Long) element); + return this.bigQueryTypeCoercer.coerceTo(String.class, ts); + default: + // Fallback for any other unexpected type + return element.toString(); + } + } + + @Override + public void close() { + LOG.fine(String.format("Closing BigqueryArrowResultSet %s.", this)); + this.isClosed = true; + if (ownedThread != null && !ownedThread.isInterrupted()) { + // interrupt the producer thread when result set is closed + ownedThread.interrupt(); + } + super.close(); + } + + @Override + public boolean isBeforeFirst() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (this.isNested) { + return this.nestedRowIndex < this.fromIndex; + } else { + return this.rowCount == 0; + } + } + + @Override + public boolean isAfterLast() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + return this.afterLast; + } + + @Override + public boolean isFirst() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (this.isNested) { + return this.nestedRowIndex == this.fromIndex; + } else { + return this.rowCount == 1; + } + } + + @Override + public boolean isLast() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (this.isNested) { + return this.nestedRowIndex == this.toIndexExclusive - 1; + } else { + return this.rowCount == this.totalRows; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStruct.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStruct.java new file mode 100644 index 0000000000..33befe902b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStruct.java @@ -0,0 +1,87 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryBaseArray.isArray; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import java.lang.reflect.Array; +import java.util.ArrayList; +import java.util.List; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; + +/** + * An implementation of {@link BigQueryBaseStruct} used to represent Struct values from Arrow data. + */ +class BigQueryArrowStruct extends BigQueryBaseStruct { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryArrowStruct.class.getName()); + + private static final BigQueryTypeCoercer BIGQUERY_TYPE_COERCER = + BigQueryTypeCoercionUtility.INSTANCE; + + private final FieldList schema; + + private final JsonStringHashMap values; + + BigQueryArrowStruct(FieldList schema, JsonStringHashMap values) { + this.schema = schema; + this.values = values; + } + + @Override + FieldList getSchema() { + return this.schema; + } + + @Override + public Object[] getAttributes() { + LOG.finest("++enter++"); + int size = this.schema.size(); + Object[] attributes = (Object[]) Array.newInstance(Object.class, size); + + if (this.values == null) { + return attributes; + } + List structValues = new ArrayList<>(this.values.values()); + + for (int index = 0; index < size; index++) { + Field currentSchema = this.schema.get(index); + Object currentValue = structValues.get(index); + Object coercedValue = getValue(currentSchema, currentValue); + Array.set(attributes, index, coercedValue); + } + return attributes; + } + + private Object getValue(Field currentSchema, Object currentValue) { + LOG.finest("++enter++"); + if (isArray(currentSchema)) { + return new BigQueryArrowArray(currentSchema, (JsonStringArrayList) currentValue); + } else if (isStruct(currentSchema)) { + return new BigQueryArrowStruct( + currentSchema.getSubFields(), (JsonStringHashMap) currentValue); + } else { + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + currentSchema.getType().getStandardType()); + return BIGQUERY_TYPE_COERCER.coerceTo(targetClass, currentValue); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseArray.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseArray.java new file mode 100644 index 0000000000..5fc2c15bbe --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseArray.java @@ -0,0 +1,172 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.Field.Mode.REPEATED; +import static com.google.cloud.bigquery.jdbc.BigQueryBaseStruct.isStruct; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.lang.reflect.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Struct; +import java.util.Arrays; +import java.util.Base64; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * An abstract implementation of {@link java.sql.Array} used as a base class for {@link + * BigQueryArrowArray} and {@link BigQueryJsonArray}. An Array value is a transaction-duration + * reference to an SQL ARRAY value. + */ +abstract class BigQueryBaseArray implements java.sql.Array { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryBaseArray.class.getName()); + + protected final boolean arrayOfStruct; + private boolean valid; + protected Field schema; + + BigQueryBaseArray(Field schema) { + this.schema = schema; + this.arrayOfStruct = isStruct(schema); + this.valid = true; + } + + @Override + public final String getBaseTypeName() { + LOG.finest("++enter++"); + ensureValid(); + return this.schema.getType().getStandardType().name(); + } + + @Override + public final int getBaseType() { + LOG.finest("++enter++"); + ensureValid(); + return BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.get( + schema.getType().getStandardType()); + } + + @Override + public final Object getArray(Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Override + public final Object getArray(long index, int count, Map> map) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Override + public final ResultSet getResultSet(Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Override + public final ResultSet getResultSet(long index, int count, Map> map) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + protected Object getArrayInternal(int fromIndex, int toIndexExclusive) { + LOG.finest("++enter++"); + Class targetClass = getTargetClass(); + int size = toIndexExclusive - fromIndex; + Object javaArray = Array.newInstance(targetClass, size); + + for (int index = 0; index < size; index++) { + Array.set(javaArray, index, getCoercedValue(fromIndex + index)); + } + return javaArray; + } + + protected void ensureValid() throws IllegalStateException { + LOG.finest("++enter++"); + if (!this.valid) { + throw new IllegalStateException(INVALID_ARRAY); + } + } + + protected void markInvalid() { + LOG.finest("++enter++"); + this.schema = null; + this.valid = false; + } + + protected Field singleElementSchema() { + LOG.finest("++enter++"); + return this.schema.toBuilder().setMode(Mode.REQUIRED).build(); + } + + protected Tuple createRange(long index, int count, int size) + throws IllegalStateException { + LOG.finest("++enter++"); + // jdbc array follows 1 based array indexing + long normalisedFromIndex = index - 1; + if (normalisedFromIndex + count > size) { + throw new IllegalArgumentException( + String.format( + "The array index is out of range: %d, number of elements: %d.", index + count, size)); + } + long toIndex = normalisedFromIndex + count; + return Tuple.of((int) normalisedFromIndex, (int) toIndex); + } + + protected Class getTargetClass() { + LOG.finest("++enter++"); + return this.arrayOfStruct + ? Struct.class + : BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + this.schema.getType().getStandardType()); + } + + abstract Object getCoercedValue(int index); + + static boolean isArray(Field currentSchema) { + LOG.finest("++enter++"); + return currentSchema.getMode() == REPEATED; + } + + @Override + public String toString() { + try { + Object[] array = (Object[]) getArray(); + if (array == null) { + return "null"; + } + if (this.schema.getType().getStandardType() == StandardSQLTypeName.BYTES) { + return Arrays.stream(array) + .map( + element -> + element == null ? "null" : Base64.getEncoder().encodeToString((byte[]) element)) + .collect(Collectors.joining(", ", "[", "]")); + } + return Arrays.deepToString(array); + } catch (SQLException e) { + return "[Error converting array to string: " + e.getMessage() + "]"; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSet.java new file mode 100644 index 0000000000..7367a81536 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSet.java @@ -0,0 +1,618 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryConversionException; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionException; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionNotFoundException; +import java.io.InputStream; +import java.io.Reader; +import java.io.StringReader; +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Calendar; + +public abstract class BigQueryBaseResultSet extends BigQueryNoOpsResultSet + implements BigQueryResultSet { + protected final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private BigQuery bigQuery; + private JobId jobId; + private String queryId; + private QueryStatistics queryStatistics; + protected final BigQueryStatement statement; + protected final Schema schema; + protected final FieldList schemaFieldList; + protected final boolean isNested; + protected boolean isClosed = false; + protected boolean wasNull = false; + protected final BigQueryTypeCoercer bigQueryTypeCoercer = BigQueryTypeCoercionUtility.INSTANCE; + + protected BigQueryBaseResultSet( + BigQuery bigQuery, BigQueryStatement statement, Schema schema, boolean isNested) { + this.bigQuery = bigQuery; + this.statement = statement; + this.schema = schema; + this.schemaFieldList = schema != null ? schema.getFields() : null; + this.isNested = isNested; + } + + public QueryStatistics getQueryStatistics() { + if (queryStatistics != null) { + return queryStatistics; + } + if (jobId == null || bigQuery == null) { + return null; + } + Job job = bigQuery.getJob(jobId); + queryStatistics = job != null ? job.getStatistics() : null; + return queryStatistics; + } + + public void setJobId(JobId jobId) { + this.jobId = jobId; + } + + public JobId getJobId() { + return jobId; + } + + public void setQueryId(String queryId) { + this.queryId = queryId; + } + + public String getQueryId() { + return queryId; + } + + @Override + public void close() { + try { + if (statement != null && statement.isCloseOnCompletion() && !statement.hasMoreResults()) { + statement.close(); + } + } catch (SQLException ex) { + LOG.warning( + String.format("Exception during ResultState.close() operation: %s", ex.getMessage())); + } + } + + protected SQLException createCoercionException( + int columnIndex, Class targetClass, Exception cause) throws SQLException { + checkClosed(); + StandardSQLTypeName type; + String typeName; + + if (isNested) { + if (columnIndex == 1) { + return new BigQueryConversionException( + String.format("Cannot convert index column to type %s.", targetClass.getSimpleName()), + cause); + } else if (columnIndex == 2) { + Field arrayField = this.schema.getFields().get(0); + type = arrayField.getType().getStandardType(); + typeName = type.name(); + } else { + throw new SQLException( + "For a nested ResultSet from an Array, columnIndex must be 1 or 2.", cause); + } + } else { + Field field = this.schemaFieldList.get(columnIndex - 1); + type = field.getType().getStandardType(); + typeName = type.name(); + } + return new BigQueryConversionException( + String.format( + "Cannot convert value of type %s to type %s.", typeName, targetClass.getSimpleName()), + cause); + } + + private StandardSQLTypeName getStandardSQLTypeName(int columnIndex) throws SQLException { + checkClosed(); + if (isNested) { + if (columnIndex == 1) { + return StandardSQLTypeName.INT64; + } else if (columnIndex == 2) { + if (this.schema == null || this.schema.getFields().isEmpty()) { + throw new SQLException("Schema not available for nested result set."); + } + Field arrayField = this.schema.getFields().get(0); + return arrayField.getType().getStandardType(); + } else { + throw new SQLException("For a nested ResultSet from an Array, columnIndex must be 1 or 2."); + } + } else { + if (this.schemaFieldList == null + || columnIndex > this.schemaFieldList.size() + || columnIndex < 1) { + throw new SQLException("Invalid column index: " + columnIndex); + } + Field field = this.schemaFieldList.get(columnIndex - 1); + return field.getType().getStandardType(); + } + } + + protected void setWasNull(Object val) { + this.wasNull = val == null; + } + + @Override + public boolean wasNull() throws SQLException { + checkClosed(); + return this.wasNull; + } + + @Override + public ResultSetMetaData getMetaData() throws SQLException { + checkClosed(); + if (this.isNested) { + return BigQueryResultSetMetadata.of(this.schemaFieldList, this.statement); + } else { + return BigQueryResultSetMetadata.of(this.schema.getFields(), this.statement); + } + } + + @Override + public int getType() throws SQLException { + checkClosed(); + return ResultSet.TYPE_FORWARD_ONLY; + } + + @Override + public int getConcurrency() throws SQLException { + checkClosed(); + return ResultSet.CONCUR_READ_ONLY; + } + + @Override + public Statement getStatement() throws SQLException { + checkClosed(); + return this.statement; + } + + @Override + public int getHoldability() throws SQLException { + checkClosed(); + return ResultSet.HOLD_CURSORS_OVER_COMMIT; + } + + @Override + public boolean isClosed() { + return this.isClosed; + } + + public abstract Object getObject(int columnIndex) throws SQLException; + + protected int getColumnIndex(String columnLabel) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (columnLabel == null) { + throw new SQLException("Column label cannot be null"); + } + // use schema to get the column index, add 1 for SQL index + return this.schemaFieldList.getIndex(columnLabel) + 1; + } + + @Override + public String getString(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(String.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, String.class, e); + } + } + + @Override + public boolean getBoolean(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + + StandardSQLTypeName type = getStandardSQLTypeName(columnIndex); + if (type == StandardSQLTypeName.GEOGRAPHY + || type == StandardSQLTypeName.RANGE + || type == StandardSQLTypeName.JSON) { + throw createCoercionException(columnIndex, Boolean.class, null); + } + + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Boolean.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, Boolean.class, e); + } + } + + @Override + public byte getByte(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Byte.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Byte.class, e); + } + } + + @Override + public short getShort(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Short.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Short.class, e); + } + } + + @Override + public int getInt(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Integer.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Integer.class, e); + } + } + + @Override + public long getLong(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Long.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Long.class, e); + } + } + + @Override + public float getFloat(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Float.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Float.class, e); + } + } + + @Override + public double getDouble(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Double.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Double.class, e); + } + } + + @Override + public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(BigDecimal.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, BigDecimal.class, e); + } + } + + @Override + public byte[] getBytes(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(byte[].class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, byte[].class, e); + } + } + + @Override + public Date getDate(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(java.sql.Date.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, java.sql.Date.class, e); + } + } + + @Override + public Time getTime(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + StandardSQLTypeName type = getStandardSQLTypeName(columnIndex); + if (type == StandardSQLTypeName.INT64) { + throw createCoercionException(columnIndex, java.sql.Time.class, null); + } + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(java.sql.Time.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, java.sql.Time.class, e); + } + } + + @Override + public Timestamp getTimestamp(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + StandardSQLTypeName type = getStandardSQLTypeName(columnIndex); + if (type == StandardSQLTypeName.INT64) { + throw createCoercionException(columnIndex, java.sql.Timestamp.class, null); + } + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(java.sql.Timestamp.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, java.sql.Timestamp.class, e); + } + } + + @Override + public BigDecimal getBigDecimal(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(BigDecimal.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, BigDecimal.class, e); + } + } + + @Override + public Array getArray(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + return (Array) getObject(columnIndex); + } catch (ClassCastException e) { + throw createCoercionException(columnIndex, Array.class, e); + } + } + + @Override + public Blob getBlob(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + byte[] value = getBytes(columnIndex); + return new javax.sql.rowset.serial.SerialBlob(value); + } + + @Override + public Clob getClob(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + String value = getString(columnIndex); + return new javax.sql.rowset.serial.SerialClob(value.toCharArray()); + } + + @Override + public Reader getCharacterStream(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + String value = getString(columnIndex); + return value == null ? null : new StringReader(value); + } + + private InputStream getInputStream(String value, java.nio.charset.Charset charset) { + LOG.finest("++enter++"); + if (value == null) { + return null; + } + return new java.io.ByteArrayInputStream(value.getBytes(charset)); + } + + @Override + public InputStream getAsciiStream(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + return getInputStream(getString(columnIndex), StandardCharsets.US_ASCII); + } + + @Override + public InputStream getUnicodeStream(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + return getInputStream(getString(columnIndex), StandardCharsets.UTF_16LE); + } + + @Override + public InputStream getBinaryStream(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + byte[] bytes = getBytes(columnIndex); + return bytes == null ? null : new java.io.ByteArrayInputStream(bytes); + } + + @Override + public Date getDate(int columnIndex, Calendar cal) throws SQLException { + LOG.finest("++enter++"); + Date date = getDate(columnIndex); + if (date == null || cal == null) { + return null; + } + cal.setTimeInMillis(date.getTime()); + return new java.sql.Date(cal.getTimeInMillis()); + } + + @Override + public Time getTime(int columnIndex, Calendar cal) throws SQLException { + LOG.finest("++enter++"); + Time time = getTime(columnIndex); + if (time == null || cal == null) { + return null; + } + cal.setTimeInMillis(time.getTime()); + return new java.sql.Time(cal.getTimeInMillis()); + } + + @Override + public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { + LOG.finest("++enter++"); + Timestamp timeStamp = getTimestamp(columnIndex); + if (timeStamp == null || cal == null) { + return null; + } + cal.setTimeInMillis(timeStamp.getTime()); + return new java.sql.Timestamp(cal.getTimeInMillis()); + } + + @Override + public int findColumn(String columnLabel) throws SQLException { + LOG.finest("++enter++"); + return getColumnIndex(columnLabel); + } + + @Override + public Object getObject(String columnLabel) throws SQLException { + return getObject(getColumnIndex(columnLabel)); + } + + @Override + public String getString(String columnLabel) throws SQLException { + return getString(getColumnIndex(columnLabel)); + } + + @Override + public boolean getBoolean(String columnLabel) throws SQLException { + return getBoolean(getColumnIndex(columnLabel)); + } + + @Override + public byte getByte(String columnLabel) throws SQLException { + return getByte(getColumnIndex(columnLabel)); + } + + @Override + public short getShort(String columnLabel) throws SQLException { + return getShort(getColumnIndex(columnLabel)); + } + + @Override + public int getInt(String columnLabel) throws SQLException { + return getInt(getColumnIndex(columnLabel)); + } + + @Override + public long getLong(String columnLabel) throws SQLException { + return getLong(getColumnIndex(columnLabel)); + } + + @Override + public float getFloat(String columnLabel) throws SQLException { + return getFloat(getColumnIndex(columnLabel)); + } + + @Override + public double getDouble(String columnLabel) throws SQLException { + return getDouble(getColumnIndex(columnLabel)); + } + + @Override + public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { + return getBigDecimal(getColumnIndex(columnLabel), scale); + } + + @Override + public byte[] getBytes(String columnLabel) throws SQLException { + return getBytes(getColumnIndex(columnLabel)); + } + + @Override + public Date getDate(String columnLabel) throws SQLException { + return getDate(getColumnIndex(columnLabel)); + } + + @Override + public Time getTime(String columnLabel) throws SQLException { + return getTime(getColumnIndex(columnLabel)); + } + + @Override + public Timestamp getTimestamp(String columnLabel) throws SQLException { + return getTimestamp(getColumnIndex(columnLabel)); + } + + @Override + public InputStream getAsciiStream(String columnLabel) throws SQLException { + return getAsciiStream(getColumnIndex(columnLabel)); + } + + @Override + public InputStream getUnicodeStream(String columnLabel) throws SQLException { + return getUnicodeStream(getColumnIndex(columnLabel)); + } + + @Override + public InputStream getBinaryStream(String columnLabel) throws SQLException { + return getBinaryStream(getColumnIndex(columnLabel)); + } + + @Override + public BigDecimal getBigDecimal(String columnLabel) throws SQLException { + return getBigDecimal(getColumnIndex(columnLabel)); + } + + @Override + public Blob getBlob(String columnLabel) throws SQLException { + return getBlob(getColumnIndex(columnLabel)); + } + + @Override + public Clob getClob(String columnLabel) throws SQLException { + return getClob(getColumnIndex(columnLabel)); + } + + @Override + public Array getArray(String columnLabel) throws SQLException { + return getArray(getColumnIndex(columnLabel)); + } + + @Override + public Reader getCharacterStream(String columnLabel) throws SQLException { + return getCharacterStream(getColumnIndex(columnLabel)); + } + + @Override + public Date getDate(String columnLabel, Calendar cal) throws SQLException { + return getDate(getColumnIndex(columnLabel), cal); + } + + @Override + public Time getTime(String columnLabel, Calendar cal) throws SQLException { + return getTime(getColumnIndex(columnLabel), cal); + } + + @Override + public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { + return getTimestamp(getColumnIndex(columnLabel), cal); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseStruct.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseStruct.java new file mode 100644 index 0000000000..ab9cf61cb8 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseStruct.java @@ -0,0 +1,97 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.StandardSQLTypeName.STRUCT; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.Date; +import java.sql.SQLException; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Base64; +import java.util.Map; + +/** + * An abstract implementation of {@link java.sql.Struct} used as a base class for {@link + * BigQueryArrowStruct} and {@link BigQueryJsonStruct}. A Struct object contains a value for each + * attribute of the SQL structured type that it represents. + */ +abstract class BigQueryBaseStruct implements java.sql.Struct { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryBaseStruct.class.getName()); + + abstract FieldList getSchema(); + + @Override + public final String getSQLTypeName() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Override + public final Object[] getAttributes(Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + static boolean isStruct(Field currentSchema) { + LOG.finest("++enter++"); + return currentSchema.getType().getStandardType() == STRUCT; + } + + @Override + public String toString() { + try { + FieldList schema = getSchema(); + Object[] attributes = getAttributes(); + + if (schema == null || attributes == null || schema.size() != attributes.length) { + return "{}"; + } + + StringBuilder sb = new StringBuilder("{"); + for (int i = 0; i < attributes.length; i++) { + if (i > 0) { + sb.append(","); + } + String fieldName = schema.get(i).getName(); + Object value = attributes[i]; + + sb.append("\"").append(fieldName.replace("\"", "\\\"")).append("\":"); + + if (value == null) { + sb.append("null"); + } else if (value instanceof String || value instanceof org.apache.arrow.vector.util.Text) { + String stringValue = value.toString().replace("\"", "\\\""); + sb.append("\"").append(stringValue).append("\""); + } else if (value instanceof Timestamp || value instanceof Date || value instanceof Time) { + sb.append("\"").append(value.toString()).append("\""); + } else if (value instanceof byte[]) { + sb.append("\"").append(Base64.getEncoder().encodeToString((byte[]) value)).append("\""); + } else { + sb.append(value.toString()); + } + } + sb.append("}"); + return sb.toString(); + } catch (SQLException e) { + return "{ \"error\": \"Error converting struct to string: " + e.getMessage() + "\" }"; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatement.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatement.java new file mode 100644 index 0000000000..041505c623 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatement.java @@ -0,0 +1,1341 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; +import com.google.common.annotations.VisibleForTesting; +import java.io.BufferedReader; +import java.io.CharArrayReader; +import java.io.FilterReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.PipedReader; +import java.io.Reader; +import java.io.StringReader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.CallableStatement; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.Calendar; +import java.util.Map; + +class BigQueryCallableStatement extends BigQueryPreparedStatement implements CallableStatement { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + + BigQueryCallableStatement(BigQueryConnection connection, String callableStmtSql) + throws SQLException { + super(connection, callableStmtSql); + } + + @VisibleForTesting + protected String getCallableStatementSql() { + return this.currentQuery; + } + + @VisibleForTesting + protected BigQueryParameterHandler getParameterHandler() { + return this.parameterHandler; + } + + @Override + public Array getArray(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Array) { + return (Array) param; + } + if (param.getClass().isAssignableFrom(Array.class)) { + return getObject(arg0, Array.class); + } + return null; + } + + @Override + public Array getArray(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Array) { + return (Array) param; + } + if (param.getClass().isAssignableFrom(Array.class)) { + return getObject(arg0, Array.class); + } + return null; + } + + @Override + public BigDecimal getBigDecimal(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof BigDecimal) { + return (BigDecimal) param; + } + if (param.getClass().isAssignableFrom(BigDecimal.class)) { + return getObject(arg0, BigDecimal.class); + } + return null; + } + + @Override + public BigDecimal getBigDecimal(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof BigDecimal) { + return (BigDecimal) param; + } + if (param.getClass().isAssignableFrom(BigDecimal.class)) { + return getObject(arg0, BigDecimal.class); + } + return null; + } + + @Override + public BigDecimal getBigDecimal(int arg0, int arg1) throws SQLException { + LOG.finest("++enter++"); + return getBigDecimal(arg0); + } + + @Override + public Blob getBlob(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Blob) { + return (Blob) param; + } + if (param.getClass().isAssignableFrom(Blob.class)) { + return getObject(arg0, Blob.class); + } + return null; + } + + @Override + public Blob getBlob(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Blob) { + return (Blob) param; + } + if (param.getClass().isAssignableFrom(Blob.class)) { + return getObject(arg0, Blob.class); + } + return null; + } + + @Override + public boolean getBoolean(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Boolean) { + return (Boolean) param; + } + if (param.getClass().isAssignableFrom(Boolean.class)) { + return getObject(arg0, Boolean.class); + } + return false; + } + + @Override + public boolean getBoolean(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Boolean) { + return (Boolean) param; + } + if (param.getClass().isAssignableFrom(Boolean.class)) { + return getObject(arg0, Boolean.class); + } + return false; + } + + @Override + public byte getByte(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Byte) { + return (Byte) param; + } + if (param.getClass().isAssignableFrom(Byte.class)) { + return getObject(arg0, Byte.class); + } + return -1; + } + + @Override + public byte getByte(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Byte) { + return (Byte) param; + } + if (param.getClass().isAssignableFrom(Byte.class)) { + return getObject(arg0, Byte.class); + } + return -1; + } + + @Override + public byte[] getBytes(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof byte[] || param.getClass().isAssignableFrom(byte[].class)) { + return (byte[]) param; + } + if (param instanceof String) { + return param.toString().getBytes(); + } + return null; + } + + @Override + public byte[] getBytes(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof byte[] || param.getClass().isAssignableFrom(byte[].class)) { + return (byte[]) param; + } + if (param instanceof String) { + return param.toString().getBytes(); + } + return null; + } + + // FilterReader, InputStreamReader, PipedReader, StringReader + @Override + public Reader getCharacterStream(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String || param.getClass().isAssignableFrom(String.class)) { + return new StringReader(param.toString()); + } + + if (param instanceof BufferedReader) { + return (BufferedReader) param; + } + if (param.getClass().isAssignableFrom(BufferedReader.class)) { + return getObject(arg0, BufferedReader.class); + } + + if (param instanceof CharArrayReader) { + return (CharArrayReader) param; + } + if (param.getClass().isAssignableFrom(CharArrayReader.class)) { + return getObject(arg0, CharArrayReader.class); + } + + if (param instanceof FilterReader) { + return (FilterReader) param; + } + if (param.getClass().isAssignableFrom(FilterReader.class)) { + return getObject(arg0, FilterReader.class); + } + + if (param instanceof InputStreamReader) { + return (InputStreamReader) param; + } + if (param.getClass().isAssignableFrom(InputStreamReader.class)) { + return getObject(arg0, InputStreamReader.class); + } + + if (param instanceof PipedReader) { + return (PipedReader) param; + } + if (param.getClass().isAssignableFrom(PipedReader.class)) { + return getObject(arg0, PipedReader.class); + } + + if (param instanceof StringReader) { + return (StringReader) param; + } + if (param.getClass().isAssignableFrom(StringReader.class)) { + return getObject(arg0, StringReader.class); + } + return null; + } + + @Override + public Reader getCharacterStream(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String || param.getClass().isAssignableFrom(String.class)) { + return new StringReader(param.toString()); + } + + if (param instanceof BufferedReader) { + return (BufferedReader) param; + } + if (param.getClass().isAssignableFrom(BufferedReader.class)) { + return getObject(arg0, BufferedReader.class); + } + + if (param instanceof CharArrayReader) { + return (CharArrayReader) param; + } + if (param.getClass().isAssignableFrom(CharArrayReader.class)) { + return getObject(arg0, CharArrayReader.class); + } + + if (param instanceof FilterReader) { + return (FilterReader) param; + } + if (param.getClass().isAssignableFrom(FilterReader.class)) { + return getObject(arg0, FilterReader.class); + } + + if (param instanceof InputStreamReader) { + return (InputStreamReader) param; + } + if (param.getClass().isAssignableFrom(InputStreamReader.class)) { + return getObject(arg0, InputStreamReader.class); + } + + if (param instanceof PipedReader) { + return (PipedReader) param; + } + if (param.getClass().isAssignableFrom(PipedReader.class)) { + return getObject(arg0, PipedReader.class); + } + + if (param instanceof StringReader) { + return (StringReader) param; + } + if (param.getClass().isAssignableFrom(StringReader.class)) { + return getObject(arg0, StringReader.class); + } + return null; + } + + @Override + public Clob getClob(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Clob) { + return (Clob) param; + } + if (param.getClass().isAssignableFrom(Clob.class)) { + return getObject(arg0, Clob.class); + } + return null; + } + + @Override + public Clob getClob(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Clob) { + return (Clob) param; + } + if (param.getClass().isAssignableFrom(Clob.class)) { + return getObject(arg0, Clob.class); + } + return null; + } + + @Override + public Date getDate(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Date) { + return (Date) param; + } + if (param.getClass().isAssignableFrom(Date.class)) { + return getObject(arg0, Date.class); + } + return null; + } + + @Override + public Date getDate(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Date) { + return (Date) param; + } + if (param.getClass().isAssignableFrom(Date.class)) { + return getObject(arg0, Date.class); + } + return null; + } + + @Override + public Date getDate(int arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Date) { + Date dateParam = (Date) param; + if (arg1 != null) { + arg1.setTime(dateParam); + return new Date(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Date.class)) { + Date dateObj = getObject(arg0, Date.class); + if (arg1 != null) { + arg1.setTime(dateObj); + return new Date(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public Date getDate(String arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Date) { + Date dateParam = (Date) param; + if (arg1 != null) { + arg1.setTime(dateParam); + return new Date(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Date.class)) { + Date dateObj = getObject(arg0, Date.class); + if (arg1 != null) { + arg1.setTime(dateObj); + return new Date(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public double getDouble(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Double) { + return (Double) param; + } + if (param.getClass().isAssignableFrom(Double.class)) { + return getObject(arg0, Double.class); + } + return 0; + } + + @Override + public double getDouble(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Double) { + return (Double) param; + } + if (param.getClass().isAssignableFrom(Double.class)) { + return getObject(arg0, Double.class); + } + return 0; + } + + @Override + public float getFloat(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Float) { + return (Float) param; + } + if (param.getClass().isAssignableFrom(Float.class)) { + return getObject(arg0, Float.class); + } + return 0; + } + + @Override + public float getFloat(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Float) { + return (Float) param; + } + if (param.getClass().isAssignableFrom(Float.class)) { + return getObject(arg0, Float.class); + } + return 0; + } + + @Override + public int getInt(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Integer) { + return (Integer) param; + } + if (param.getClass().isAssignableFrom(Integer.class)) { + return getObject(arg0, Integer.class); + } + return 0; + } + + @Override + public int getInt(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Integer) { + return (Integer) param; + } + if (param.getClass().isAssignableFrom(Integer.class)) { + return getObject(arg0, Integer.class); + } + return 0; + } + + @Override + public long getLong(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Long) { + return (Long) param; + } + if (param.getClass().isAssignableFrom(Long.class)) { + return getObject(arg0, Long.class); + } + if (param instanceof Integer) { + return (Long) param; + } + if (param.getClass().isAssignableFrom(Integer.class)) { + return getObject(arg0, Integer.class); + } + return 0; + } + + @Override + public long getLong(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Long) { + return (Long) param; + } + if (param.getClass().isAssignableFrom(Long.class)) { + return getObject(arg0, Long.class); + } + if (param instanceof Integer) { + return (Long) param; + } + if (param.getClass().isAssignableFrom(Integer.class)) { + return getObject(arg0, Integer.class); + } + return 0; + } + + @Override + public Reader getNCharacterStream(int arg0) throws SQLException { + LOG.finest("++enter++"); + return getCharacterStream(arg0); + } + + @Override + public Reader getNCharacterStream(String arg0) throws SQLException { + LOG.finest("++enter++"); + return getCharacterStream(arg0); + } + + @Override + public NClob getNClob(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof NClob) {} + if (param.getClass().isAssignableFrom(NClob.class)) { + return getObject(arg0, NClob.class); + } + return null; + } + + @Override + public NClob getNClob(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof NClob) { + return (NClob) param; + } + if (param.getClass().isAssignableFrom(NClob.class)) { + return getObject(arg0, NClob.class); + } + return null; + } + + @Override + public String getNString(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String) { + return param.toString(); + } + if (param.getClass().isAssignableFrom(String.class)) { + return getObject(arg0, String.class); + } + return null; + } + + @Override + public String getNString(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String) { + return param.toString(); + } + if (param.getClass().isAssignableFrom(String.class)) { + return getObject(arg0, String.class); + } + return null; + } + + @Override + public Object getObject(int arg0) throws SQLException { + LOG.finest("++enter++"); + return this.parameterHandler.getParameter(arg0); + } + + @Override + public Object getObject(String arg0) throws SQLException { + LOG.finest("++enter++"); + return this.parameterHandler.getParameter(arg0); + } + + @Override + public Object getObject(int arg0, Map> arg1) throws SQLException { + LOG.finest("++enter++"); + String paramKey = this.parameterHandler.getSqlType(arg0).name(); + if (arg1.containsKey(paramKey)) { + Class argJavaType = arg1.get(paramKey); + Class paramJavaType = this.parameterHandler.getType(arg0); + if (paramJavaType.isAssignableFrom(argJavaType)) { + return this.parameterHandler.getParameter(arg0); + } + } + return null; + } + + @Override + public Object getObject(String arg0, Map> arg1) throws SQLException { + LOG.finest("++enter++"); + String paramKey = this.parameterHandler.getSqlType(arg0).name(); + if (arg1.containsKey(paramKey)) { + Class argJavaType = arg1.get(paramKey); + Class paramJavaType = this.parameterHandler.getType(arg0); + if (paramJavaType.isAssignableFrom(argJavaType)) { + return this.parameterHandler.getParameter(arg0); + } + } + return null; + } + + @Override + public T getObject(int arg0, Class arg1) throws SQLException { + LOG.finest("++enter++"); + Class javaType = this.parameterHandler.getType(arg0); + if (javaType.isAssignableFrom(arg1)) { + return (T) this.parameterHandler.getParameter(arg0); + } + return null; + } + + @Override + public T getObject(String arg0, Class arg1) throws SQLException { + LOG.finest("++enter++"); + Class javaType = this.parameterHandler.getType(arg0); + if (javaType.isAssignableFrom(arg1)) { + return (T) this.parameterHandler.getParameter(arg0); + } + return null; + } + + @Override + public Ref getRef(int arg0) throws SQLException { + // TODO Auto-generated method stub + return null; + } + + @Override + public Ref getRef(String arg0) throws SQLException { + // TODO Auto-generated method stub + return null; + } + + @Override + public RowId getRowId(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof RowId) { + return (RowId) param; + } + if (param.getClass().isAssignableFrom(RowId.class)) { + return getObject(arg0, RowId.class); + } + return null; + } + + @Override + public RowId getRowId(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof RowId) { + return (RowId) param; + } + if (param.getClass().isAssignableFrom(RowId.class)) { + return getObject(arg0, RowId.class); + } + return null; + } + + @Override + public SQLXML getSQLXML(int arg0) throws SQLException { + // TODO Auto-generated method stub + return null; + } + + @Override + public SQLXML getSQLXML(String arg0) throws SQLException { + // TODO Auto-generated method stub + return null; + } + + @Override + public short getShort(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Short) { + return (Short) param; + } + if (param.getClass().isAssignableFrom(Short.class)) { + return getObject(arg0, Short.class); + } + return 0; + } + + @Override + public short getShort(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Short) { + return (Short) param; + } + if (param.getClass().isAssignableFrom(Short.class)) { + return getObject(arg0, Short.class); + } + return 0; + } + + @Override + public String getString(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String) { + return param.toString(); + } + if (param.getClass().isAssignableFrom(String.class)) { + return getObject(arg0, String.class); + } + return null; + } + + @Override + public String getString(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String) { + return param.toString(); + } + if (param.getClass().isAssignableFrom(String.class)) { + return getObject(arg0, String.class); + } + return null; + } + + @Override + public Time getTime(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Time) { + return (Time) param; + } + if (param.getClass().isAssignableFrom(Time.class)) { + return getObject(arg0, Time.class); + } + return null; + } + + @Override + public Time getTime(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Time) { + return (Time) param; + } + if (param.getClass().isAssignableFrom(Time.class)) { + return getObject(arg0, Time.class); + } + return null; + } + + @Override + public Time getTime(int arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Time) { + Time timeParam = (Time) param; + if (arg1 != null) { + arg1.setTimeInMillis(timeParam.getTime()); + return new Time(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Time.class)) { + Time timeObj = getObject(arg0, Time.class); + if (arg1 != null) { + arg1.setTimeInMillis(timeObj.getTime()); + return new Time(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public Time getTime(String arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Time) { + Time timeParam = (Time) param; + if (arg1 != null) { + arg1.setTimeInMillis(timeParam.getTime()); + return new Time(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Time.class)) { + Time timeObj = getObject(arg0, Time.class); + if (arg1 != null) { + arg1.setTimeInMillis(timeObj.getTime()); + return new Time(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public Timestamp getTimestamp(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Timestamp) { + return (Timestamp) param; + } + if (param.getClass().isAssignableFrom(Timestamp.class)) { + return getObject(arg0, Timestamp.class); + } + return null; + } + + @Override + public Timestamp getTimestamp(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Timestamp) { + return (Timestamp) param; + } + if (param.getClass().isAssignableFrom(Timestamp.class)) { + return getObject(arg0, Timestamp.class); + } + return null; + } + + @Override + public Timestamp getTimestamp(int arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Timestamp) { + Timestamp timestampParam = (Timestamp) param; + if (arg1 != null) { + arg1.setTimeInMillis(timestampParam.getTime()); + return new Timestamp(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Timestamp.class)) { + Timestamp timestampObj = getObject(arg0, Timestamp.class); + if (arg1 != null) { + arg1.setTimeInMillis(timestampObj.getTime()); + return new Timestamp(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public Timestamp getTimestamp(String arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Timestamp) { + Timestamp timestampParam = (Timestamp) param; + if (arg1 != null) { + arg1.setTimeInMillis(timestampParam.getTime()); + return new Timestamp(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Timestamp.class)) { + Timestamp timestampObj = getObject(arg0, Timestamp.class); + if (arg1 != null) { + arg1.setTimeInMillis(timestampObj.getTime()); + return new Timestamp(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public URL getURL(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof URL) { + return (URL) param; + } + if (param.getClass().isAssignableFrom(URL.class)) { + return getObject(arg0, URL.class); + } + return null; + } + + @Override + public URL getURL(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof URL) { + return (URL) param; + } + if (param.getClass().isAssignableFrom(URL.class)) { + return getObject(arg0, URL.class); + } + return null; + } + + @Override + public void registerOutParameter(int paramIndex, int sqlType) throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + String.format("registerOutParameter: paramIndex %s, sqlType %s", paramIndex, sqlType)); + checkClosed(); + try { + this.parameterHandler.setParameter( + paramIndex, + null, + BigQueryJdbcTypeMappings.getJavaType(sqlType), + BigQueryParameterHandler.BigQueryStatementParameterType.OUT, + -1); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public void registerOutParameter(String paramName, int sqlType) throws SQLException { + LOG.finest("++enter++"); + LOG.finest(String.format("registerOutParameter: paramName %s, sqlType %s", paramName, sqlType)); + checkClosed(); + try { + this.parameterHandler.setParameter( + paramName, + null, + BigQueryJdbcTypeMappings.getJavaType(sqlType), + BigQueryParameterHandler.BigQueryStatementParameterType.OUT, + -1); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public void registerOutParameter(int paramIndex, int sqlType, int scale) throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + String.format( + "registerOutParameter: paramIndex %s, sqlType %s, scale %s", + paramIndex, sqlType, scale)); + checkClosed(); + if (sqlType != Types.NUMERIC && sqlType != Types.DECIMAL) { + throw new IllegalArgumentException( + String.format("registerOutParameter: Invalid sqlType passed in %s", sqlType)); + } + try { + this.parameterHandler.setParameter( + paramIndex, + null, + BigQueryJdbcTypeMappings.getJavaType(sqlType), + BigQueryParameterHandler.BigQueryStatementParameterType.OUT, + scale); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public void registerOutParameter(int paramIndex, int sqlType, String typeName) + throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + String.format( + "registerOutParameter: paramIndex %s, sqlType %s, typeName %s", + paramIndex, sqlType, typeName)); + // fully qualified sql typeName is not supported by the driver and hence ignored. + registerOutParameter(paramIndex, sqlType); + } + + @Override + public void registerOutParameter(String paramName, int sqlType, int scale) throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + String.format( + "registerOutParameter: paramIndex %s, sqlType %s, scale %s", + paramName, sqlType, scale)); + checkClosed(); + if (sqlType != Types.NUMERIC && sqlType != Types.DECIMAL) { + throw new IllegalArgumentException( + String.format("registerOutParameter: Invalid sqlType passed in %s", sqlType)); + } + try { + this.parameterHandler.setParameter( + paramName, + null, + BigQueryJdbcTypeMappings.getJavaType(sqlType), + BigQueryParameterHandler.BigQueryStatementParameterType.OUT, + scale); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public void registerOutParameter(String paramName, int sqlType, String typeName) + throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + String.format( + "registerOutParameter: paramIndex %s, sqlType %s, typeName %s", + paramName, sqlType, typeName)); + // fully qualified sql typeName is not supported by the driver and hence ignored. + registerOutParameter(paramName, sqlType); + } + + @Override + public void setAsciiStream(String arg0, InputStream arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setAsciiStream(String arg0, InputStream arg1, int arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setAsciiStream(String arg0, InputStream arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBigDecimal(String arg0, BigDecimal arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setBinaryStream(String arg0, InputStream arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setBinaryStream(String arg0, InputStream arg1, int arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBinaryStream(String arg0, InputStream arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setBlob(String arg0, Blob arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBlob(String arg0, InputStream arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBlob(String arg0, InputStream arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBoolean(String arg0, boolean arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Boolean.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setByte(String arg0, byte arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Byte.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setBytes(String arg0, byte[] arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, byte[].class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setCharacterStream(String arg0, Reader arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setCharacterStream(String arg0, Reader arg1, int arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setCharacterStream(String arg0, Reader arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setClob(String arg0, Clob arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setClob(String arg0, Reader arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setClob(String arg0, Reader arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setDate(String arg0, Date arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setDate(String arg0, Date arg1, Calendar arg2) throws SQLException { + arg2.setTimeInMillis(arg1.getTime()); + this.parameterHandler.setParameter( + arg0, + new Date(arg2.getTimeInMillis()), + arg1.getClass(), + BigQueryStatementParameterType.IN, + 0); + } + + @Override + public void setDouble(String arg0, double arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Double.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setFloat(String arg0, float arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Float.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setInt(String arg0, int arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Integer.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setLong(String arg0, long arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Long.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setNCharacterStream(String arg0, Reader arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNCharacterStream(String arg0, Reader arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setNClob(String arg0, NClob arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNClob(String arg0, Reader arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNClob(String arg0, Reader arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNString(String arg0, String arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setNull(String arg0, int arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNull(String arg0, int arg1, String arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setObject(String arg0, Object arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setObject(String arg0, Object arg1, int arg2) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + StandardSQLTypeName sqlType = this.parameterHandler.getSqlType(arg0); + if (BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.containsKey(sqlType)) { + int javaSqlType = BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.get(sqlType); + if (javaSqlType != arg2) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + String.format("Unsupported sql type:%s ", arg2)); + } + } else { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + String.format("parameter sql type not supported: %s", sqlType)); + } + } + + @Override + public void setObject(String arg0, Object arg1, int arg2, int arg3) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, arg3); + StandardSQLTypeName sqlType = this.parameterHandler.getSqlType(arg0); + if (BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.containsKey(sqlType)) { + int javaSqlType = BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.get(sqlType); + if (javaSqlType != arg2) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + String.format("Unsupported sql type:%s ", arg2)); + } + } else { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + String.format("parameter sql type not supported: %s", sqlType)); + } + } + + @Override + public void setRowId(String arg0, RowId arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setSQLXML(String arg0, SQLXML arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setShort(String arg0, short arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Short.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setString(String arg0, String arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setTime(String arg0, Time arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setTime(String arg0, Time arg1, Calendar arg2) throws SQLException { + arg2.setTimeInMillis(arg1.getTime()); + this.parameterHandler.setParameter( + arg0, + new Time(arg2.getTimeInMillis()), + arg1.getClass(), + BigQueryStatementParameterType.IN, + 0); + } + + @Override + public void setTimestamp(String arg0, Timestamp arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setTimestamp(String arg0, Timestamp arg1, Calendar arg2) throws SQLException { + arg2.setTimeInMillis(arg1.getTime()); + this.parameterHandler.setParameter( + arg0, + new Timestamp(arg2.getTimeInMillis()), + arg1.getClass(), + BigQueryStatementParameterType.IN, + 0); + } + + @Override + public void setURL(String arg0, URL arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public boolean wasNull() throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + return false; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCoercion.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCoercion.java new file mode 100644 index 0000000000..6265af0dec --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCoercion.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.util.function.Function; + +/** + * A {@link BigQueryCoercion} is responsible for coercing one type to another. An implementation of + * {@link BigQueryCoercion} is used to extend the behaviour of {@link BigQueryTypeCoercer} for the + * coercion of one user defined type to another. + * + * @param represents the source type + * @param represents the target type + */ +@InternalApi +interface BigQueryCoercion extends Function { + /** + * Coerce the provided value to the desired type. + * + * @param value the input value. + * @return the output value after coercion. + */ + OUTPUT coerce(INPUT value); + + @Override + default OUTPUT apply(INPUT input) { + return coerce(input); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnection.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnection.java new file mode 100644 index 0000000000..e93938f258 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnection.java @@ -0,0 +1,1160 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.gax.core.CredentialsProvider; +import com.google.api.gax.core.FixedCredentialsProvider; +import com.google.api.gax.retrying.RetrySettings; +import com.google.api.gax.rpc.FixedHeaderProvider; +import com.google.api.gax.rpc.HeaderProvider; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.auth.Credentials; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.ConnectionProperty; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.cloud.bigquery.storage.v1.BigQueryReadClient; +import com.google.cloud.bigquery.storage.v1.BigQueryReadSettings; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteSettings; +import com.google.cloud.http.HttpTransportOptions; +import java.io.IOException; +import java.io.InputStream; +import java.sql.CallableStatement; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.Statement; +import java.time.Duration; +import java.util.ArrayList; +import java.util.ConcurrentModificationException; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executor; +import java.util.concurrent.TimeUnit; + +/** + * An implementation of {@link java.sql.Connection} for establishing a connection with BigQuery and + * executing SQL statements + * + * @see BigQueryStatement + */ +public class BigQueryConnection extends BigQueryNoOpsConnection { + + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + String connectionClassName = this.toString(); + private static final String DEFAULT_JDBC_TOKEN_VALUE = "Google-BigQuery-JDBC-Driver"; + private static final String DEFAULT_VERSION = "0.0.0"; + private static HeaderProvider HEADER_PROVIDER; + BigQueryReadClient bigQueryReadClient = null; + BigQueryWriteClient bigQueryWriteClient = null; + BigQuery bigQuery; + String connectionUrl; + Set openStatements; + boolean autoCommit; + int transactionIsolation; + List sqlWarnings; + String catalog; + int holdability; + long retryTimeoutInSeconds; + Duration retryTimeoutDuration; + long retryInitialDelayInSeconds; + Duration retryInitialDelayDuration; + long retryMaxDelayInSeconds; + Duration retryMaxDelayDuration; + // transactionStarted is false by default. + // when autocommit is false transaction starts and session is initialized. + boolean transactionStarted; + ConnectionProperty sessionInfoConnectionProperty; + boolean isClosed; + DatasetId defaultDataset; + String location; + boolean enableHighThroughputAPI; + int highThroughputMinTableSize; + int highThroughputActivationRatio; + boolean enableSession; + boolean unsupportedHTAPIFallback; + boolean useQueryCache; + String queryDialect; + int metadataFetchThreadCount; + boolean allowLargeResults; + String destinationTable; + String destinationDataset; + long destinationDatasetExpirationTime; + String kmsKeyName; + String universeDomain; + List queryProperties; + Map authProperties; + Map overrideProperties; + Credentials credentials; + boolean useStatelessQueryMode; + int numBufferedRows; + HttpTransportOptions httpTransportOptions; + TransportChannelProvider transportChannelProvider; + long maxResults; + long jobTimeoutInSeconds; + boolean enableWriteAPI; + int writeAPIActivationRowCount; + int writeAPIAppendRowCount; + int requestGoogleDriveScope; + List additionalProjects; + boolean filterTablesOnDefaultDataset; + String sslTrustStorePath; + String sslTrustStorePassword; + long maxBytesBilled; + Map labels; + + BigQueryConnection(String url) throws IOException { + this.connectionUrl = url; + this.openStatements = ConcurrentHashMap.newKeySet(); + this.autoCommit = true; + this.sqlWarnings = new ArrayList<>(); + this.transactionStarted = false; + this.isClosed = false; + this.labels = BigQueryJdbcUrlUtility.parseLabels(url, connectionClassName); + this.maxBytesBilled = + BigQueryJdbcUrlUtility.parseMaximumBytesBilled(url, this.connectionClassName); + this.retryTimeoutInSeconds = + BigQueryJdbcUrlUtility.parseRetryTimeoutInSecs(url, this.connectionClassName); + this.retryTimeoutDuration = Duration.ofMillis(retryTimeoutInSeconds * 1000L); + this.retryInitialDelayInSeconds = + BigQueryJdbcUrlUtility.parseRetryInitialDelayInSecs(url, this.connectionClassName); + this.retryInitialDelayDuration = Duration.ofMillis(retryInitialDelayInSeconds * 1000L); + this.retryMaxDelayInSeconds = + BigQueryJdbcUrlUtility.parseRetryMaxDelayInSecs(url, this.connectionClassName); + this.retryMaxDelayDuration = Duration.ofMillis(retryMaxDelayInSeconds * 1000L); + this.jobTimeoutInSeconds = + BigQueryJdbcUrlUtility.parseJobTimeout(url, this.connectionClassName); + this.authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(url, this.connectionClassName); + this.catalog = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.PROJECT_ID_PROPERTY_NAME, + BigQueryOptions.getDefaultProjectId(), + this.connectionClassName); + this.universeDomain = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_UNIVERSE_DOMAIN_VALUE, + this.connectionClassName); + this.overrideProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(url, this.connectionClassName); + if (universeDomain != null) { + this.overrideProperties.put( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, universeDomain); + } + this.credentials = + BigQueryJdbcOAuthUtility.getCredentials( + authProperties, overrideProperties, this.connectionClassName); + String defaultDatasetString = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.DEFAULT_DATASET_PROPERTY_NAME, + null, + this.connectionClassName); + if (defaultDatasetString == null || defaultDatasetString.trim().isEmpty()) { + this.defaultDataset = null; + } else { + String[] parts = defaultDatasetString.split("\\."); + if (parts.length == 2) { + this.defaultDataset = DatasetId.of(parts[0], parts[1]); + } else if (parts.length == 1) { + this.defaultDataset = DatasetId.of(parts[0]); + } else { + throw new IllegalArgumentException( + "DefaultDataset format is invalid. Supported options are datasetId or" + + " projectId.datasetId"); + } + } + this.location = + BigQueryJdbcUrlUtility.parseStringProperty( + url, BigQueryJdbcUrlUtility.LOCATION_PROPERTY_NAME, null, this.connectionClassName); + this.enableHighThroughputAPI = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.ENABLE_HTAPI_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_ENABLE_HTAPI_VALUE, + this.connectionClassName); + this.highThroughputMinTableSize = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.HTAPI_MIN_TABLE_SIZE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_HTAPI_MIN_TABLE_SIZE_VALUE, + this.connectionClassName); + this.highThroughputActivationRatio = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.HTAPI_ACTIVATION_RATIO_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_HTAPI_ACTIVATION_RATIO_VALUE, + this.connectionClassName); + this.useQueryCache = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.USE_QUERY_CACHE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_USE_QUERY_CACHE, + this.connectionClassName); + this.useStatelessQueryMode = + BigQueryJdbcUrlUtility.parseJobCreationMode(url, this.connectionClassName); + this.queryDialect = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.QUERY_DIALECT_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_QUERY_DIALECT_VALUE, + this.connectionClassName); + this.allowLargeResults = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.ALLOW_LARGE_RESULTS_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_ALLOW_LARGE_RESULTS, + this.connectionClassName); + this.destinationTable = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.LARGE_RESULTS_TABLE_PROPERTY_NAME, + null, + this.connectionClassName); + this.destinationDataset = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.LARGE_RESULTS_DATASET_PROPERTY_NAME, + null, + this.connectionClassName); + this.destinationDatasetExpirationTime = + BigQueryJdbcUrlUtility.parseLongProperty( + url, + BigQueryJdbcUrlUtility.DESTINATION_DATASET_EXPIRATION_TIME_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_DESTINATION_DATASET_EXPIRATION_TIME_VALUE, + this.connectionClassName); + this.kmsKeyName = + BigQueryJdbcUrlUtility.parseStringProperty( + url, BigQueryJdbcUrlUtility.KMS_KEY_NAME_PROPERTY_NAME, null, this.connectionClassName); + Map proxyProperties = + BigQueryJdbcProxyUtility.parseProxyProperties(url, this.connectionClassName); + this.sslTrustStorePath = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.SSL_TRUST_STORE_PROPERTY_NAME, + null, + this.connectionClassName); + this.sslTrustStorePassword = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.SSL_TRUST_STORE_PWD_PROPERTY_NAME, + null, + this.connectionClassName); + this.httpTransportOptions = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + proxyProperties, + this.sslTrustStorePath, + this.sslTrustStorePassword, + this.connectionClassName); + this.transportChannelProvider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + proxyProperties, + this.sslTrustStorePath, + this.sslTrustStorePassword, + this.connectionClassName); + this.enableSession = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.ENABLE_SESSION_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_ENABLE_SESSION_VALUE, + this.connectionClassName); + this.unsupportedHTAPIFallback = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_UNSUPPORTED_HTAPI_FALLBACK_VALUE, + this.connectionClassName); + this.maxResults = + BigQueryJdbcUrlUtility.parseLongProperty( + url, + BigQueryJdbcUrlUtility.MAX_RESULTS_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_MAX_RESULTS_VALUE, + this.connectionClassName); + Map queryPropertiesMap = + BigQueryJdbcUrlUtility.parseQueryProperties(url, this.connectionClassName); + this.sessionInfoConnectionProperty = getSessionPropertyFromQueryProperties(queryPropertiesMap); + this.queryProperties = convertMapToConnectionPropertiesList(queryPropertiesMap); + this.enableWriteAPI = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.ENABLE_WRITE_API_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_ENABLE_WRITE_API_VALUE, + this.connectionClassName); + this.writeAPIActivationRowCount = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.SWA_ACTIVATION_ROW_COUNT_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_SWA_ACTIVATION_ROW_COUNT_VALUE, + this.connectionClassName); + this.writeAPIAppendRowCount = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.SWA_APPEND_ROW_COUNT_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_SWA_APPEND_ROW_COUNT_VALUE, + this.connectionClassName); + this.additionalProjects = + BigQueryJdbcUrlUtility.parseStringListProperty( + url, + BigQueryJdbcUrlUtility.ADDITIONAL_PROJECTS_PROPERTY_NAME, + this.connectionClassName); + this.filterTablesOnDefaultDataset = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_FILTER_TABLES_ON_DEFAULT_DATASET_VALUE, + this.connectionClassName); + this.requestGoogleDriveScope = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE, + this.connectionClassName); + this.metadataFetchThreadCount = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.METADATA_FETCH_THREAD_COUNT_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_METADATA_FETCH_THREAD_COUNT_VALUE, + this.connectionClassName); + + HEADER_PROVIDER = createHeaderProvider(); + this.bigQuery = getBigQueryConnection(); + } + + String getLibraryVersion(Class libraryClass) { + LOG.finest("++enter++"); + String version = null; + try (InputStream in = + libraryClass.getResourceAsStream( + "/com/google/cloud/bigquery/jdbc/dependencies.properties")) { + if (in != null) { + Properties props = new Properties(); + props.load(in); + version = props.getProperty("version.jdbc"); + } + } catch (IOException e) { + return DEFAULT_VERSION; + } + + return version != null ? version : DEFAULT_VERSION; + } + + private String buildPartnerToken(String url) { + String partnerTokenString = + BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, this.connectionClassName); + if (partnerTokenString == null || partnerTokenString.isEmpty()) { + return ""; + } + return partnerTokenString; + } + + HeaderProvider createHeaderProvider() { + String partnerToken = buildPartnerToken(this.connectionUrl); + String headerToken = + DEFAULT_JDBC_TOKEN_VALUE + "/" + getLibraryVersion(this.getClass()) + partnerToken; + return FixedHeaderProvider.create("user-agent", headerToken); + } + + protected void addOpenStatements(Statement statement) { + LOG.finest(String.format("Statement %s added to Connection %s.", statement, this)); + this.openStatements.add(statement); + } + + BigQueryReadClient getBigQueryReadClient() { + try { + if (this.bigQueryReadClient == null) { + this.bigQueryReadClient = getBigQueryReadClientConnection(); + } + } catch (IOException e) { + throw new BigQueryJdbcRuntimeException(e); + } + return this.bigQueryReadClient; + } + + BigQueryWriteClient getBigQueryWriteClient() { + try { + if (this.bigQueryWriteClient == null) { + this.bigQueryWriteClient = getBigQueryWriteClientConnection(); + } + } catch (IOException e) { + throw new BigQueryJdbcRuntimeException(e); + } + return this.bigQueryWriteClient; + } + + BigQuery getBigQuery() { + return this.bigQuery; + } + + String getConnectionUrl() { + return connectionUrl; + } + + /** + * Creates and returns a new {@code Statement} object for executing BigQuery SQL queries + * + * @return a new {@code Statement} object + * @see Connection#createStatement() + */ + @Override + public Statement createStatement() throws SQLException { + checkClosed(); + BigQueryStatement currentStatement = new BigQueryStatement(this); + LOG.fine(String.format("Statement %s created.", currentStatement)); + addOpenStatements(currentStatement); + return currentStatement; + } + + /** + * Creates and returns a new {@code Statement} object for executing BigQuery SQL queries. This + * method is similar to {@link BigQueryConnection#createStatement()}, but it overrides the type + * and concurrency of the generated {@code ResultSet}. + * + * @throws SQLException if a BigQuery connection error occurs, if this method is called on a + * closed connection, or the given parameters are not {@code ResultSet} constants indicating + * type and concurrency. + * @throws BigQueryJdbcSqlFeatureNotSupportedException if this method is not supported for the + * specified result set type and result set concurrency. + * @see Connection#createStatement(int, int) + * @see ResultSet + */ + @Override + public Statement createStatement(int resultSetType, int resultSetConcurrency) + throws SQLException { + checkClosed(); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Unsupported createStatement feature."); + } + return createStatement(); + } + + /** + * Creates and returns a new {@code Statement} object for executing BigQuery SQL queries. This + * method is similar to {@link BigQueryConnection#createStatement()}, but it overrides the type, + * concurrency, and holdability of the generated {@code ResultSet}. + * + * @throws SQLException if a BigQuery connection error occurs, if this method is called on a + * closed connection, or the given parameters are not {@code ResultSet} constants indicating + * type, concurrency, and holdability. + * @throws BigQueryJdbcSqlFeatureNotSupportedException if this method is not supported for the + * specified result set type, result set holdability and result set concurrency. + * @see Connection#createStatement(int, int, int) + * @see ResultSet + */ + @Override + public Statement createStatement( + int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY + || resultSetHoldability != ResultSet.CLOSE_CURSORS_AT_COMMIT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Unsupported createStatement feature"); + } + return createStatement(); + } + + @Override + public PreparedStatement prepareStatement(String sql) throws SQLException { + checkClosed(); + PreparedStatement currentStatement = new BigQueryPreparedStatement(this, sql); + LOG.fine(String.format("Prepared Statement %s created.", currentStatement)); + addOpenStatements(currentStatement); + return currentStatement; + } + + @Override + public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { + if (autoGeneratedKeys != Statement.NO_GENERATED_KEYS) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("autoGeneratedKeys is not supported"); + } + return prepareStatement(sql); + } + + @Override + public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException("autoGeneratedKeys is not supported"); + } + + @Override + public PreparedStatement prepareStatement( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY + || resultSetHoldability != ResultSet.CLOSE_CURSORS_AT_COMMIT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Unsupported prepareStatement feature"); + } + return prepareStatement(sql); + } + + @Override + public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) + throws SQLException { + LOG.finest("++enter++"); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Unsupported prepareStatement feature"); + } + return prepareStatement(sql); + } + + public DatasetId getDefaultDataset() { + checkClosed(); + return this.defaultDataset; + } + + String getDestinationDataset() { + return this.destinationDataset; + } + + String getDestinationTable() { + return this.destinationTable; + } + + long getDestinationDatasetExpirationTime() { + return this.destinationDatasetExpirationTime; + } + + String getKmsKeyName() { + return this.kmsKeyName; + } + + List getQueryProperties() { + return this.queryProperties; + } + + public String getLocation() { + checkClosed(); + return this.location; + } + + public Map getAuthProperties() { + checkClosed(); + return this.authProperties; + } + + long getMaxResults() { + return maxResults; + } + + long getRetryTimeoutInSeconds() { + return this.retryTimeoutInSeconds; + } + + Duration getRetryTimeoutDuration() { + return this.retryTimeoutDuration; + } + + long getRetryInitialDelayInSeconds() { + return this.retryInitialDelayInSeconds; + } + + Duration getRetryInitialDelayDuration() { + return this.retryInitialDelayDuration; + } + + long getRetryMaxDelayInSeconds() { + return this.retryMaxDelayInSeconds; + } + + Duration getRetryMaxDelayDuration() { + return this.retryMaxDelayDuration; + } + + long getJobTimeoutInSeconds() { + return this.jobTimeoutInSeconds; + } + + long getMaxBytesBilled() { + return this.maxBytesBilled; + } + + Map getLabels() { + return this.labels; + } + + /** + * Begins a transaction.
+ * The transaction ends when a {@link BigQueryConnection#commit()} or {@link + * BigQueryConnection#rollback()} is made.
+ * For more information about transactions in BigQuery, see Multi-statement transactions. + */ + private void beginTransaction() { + LOG.finest("++enter++"); + QueryJobConfiguration.Builder transactionBeginJobConfig = + QueryJobConfiguration.newBuilder("BEGIN TRANSACTION;"); + try { + if (this.sessionInfoConnectionProperty != null) { + transactionBeginJobConfig.setConnectionProperties(this.queryProperties); + } else { + transactionBeginJobConfig.setCreateSession(true); + } + Job job = this.bigQuery.create(JobInfo.of(transactionBeginJobConfig.build())); + job = job.waitFor(); + Job transactionBeginJob = this.bigQuery.getJob(job.getJobId()); + if (this.sessionInfoConnectionProperty == null) { + this.sessionInfoConnectionProperty = + ConnectionProperty.newBuilder() + .setKey("session_id") + .setValue(transactionBeginJob.getStatistics().getSessionInfo().getSessionId()) + .build(); + this.queryProperties.add(this.sessionInfoConnectionProperty); + } + this.transactionStarted = true; + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } + + public boolean isTransactionStarted() { + return this.transactionStarted; + } + + boolean isSessionEnabled() { + return this.enableSession; + } + + boolean isUnsupportedHTAPIFallback() { + return this.unsupportedHTAPIFallback; + } + + ConnectionProperty getSessionInfoConnectionProperty() { + return this.sessionInfoConnectionProperty; + } + + boolean isEnableHighThroughputAPI() { + return this.enableHighThroughputAPI; + } + + boolean isUseQueryCache() { + return useQueryCache; + } + + boolean getUseStatelessQueryMode() { + return useStatelessQueryMode; + } + + boolean isAllowLargeResults() { + return allowLargeResults; + } + + String getQueryDialect() { + return queryDialect; + } + + Integer getNumBufferedRows() { + return numBufferedRows; + } + + int getHighThroughputMinTableSize() { + return highThroughputMinTableSize; + } + + List getAdditionalProjects() { + return this.additionalProjects; + } + + int getHighThroughputActivationRatio() { + return highThroughputActivationRatio; + } + + boolean isFilterTablesOnDefaultDataset() { + return this.filterTablesOnDefaultDataset; + } + + int isRequestGoogleDriveScope() { + return requestGoogleDriveScope; + } + + int getMetadataFetchThreadCount() { + return this.metadataFetchThreadCount; + } + + boolean isEnableWriteAPI() { + return enableWriteAPI; + } + + int getWriteAPIActivationRowCount() { + return writeAPIActivationRowCount; + } + + int getWriteAPIAppendRowCount() { + return writeAPIAppendRowCount; + } + + String getSSLTrustStorePath() { + return sslTrustStorePath; + } + + String getSSLTrustStorePassword() { + return sslTrustStorePassword; + } + + @Override + public boolean isValid(int timeout) throws SQLException { + if (timeout < 0) { + throw new BigQueryJdbcException("timeout must be >= 0"); + } + if (!isClosed()) { + try (Statement statement = createStatement(); + ResultSet rs = statement.executeQuery("SELECT 1")) { + LOG.finest("Running validation query"); + // TODO(obada): set query timeout when it's implemented + // TODO(obada): use dry run + if (rs.next()) { + if (rs.getInt(1) == 1) { + return true; + } + } + } catch (SQLException ex) { + // Ignore + } + } + return false; + } + + @Override + public void abort(Executor executor) throws SQLException { + LOG.finest("++enter++"); + close(); + } + + // TODO: Throw exception translation of BigQueryJdbcSqlClientInfoException when implementing below + @Override + public void setClientInfo(String name, String value) {} + + @Override + public String getClientInfo(String name) { + return null; + } + + @Override + public String getCatalog() { + return this.catalog; + } + + @Override + public Properties getClientInfo() { + return null; + } + + @Override + public void setClientInfo(Properties properties) {} + + @Override + public SQLWarning getWarnings() { + return this.sqlWarnings.isEmpty() ? null : this.sqlWarnings.get(0); + } + + @Override + public void clearWarnings() { + this.sqlWarnings.clear(); + } + + @Override + public boolean getAutoCommit() { + checkClosed(); + return this.autoCommit; + } + + /** + * Sets this connection's auto-commit mode to the given state.
+ * If this method is called during a transaction and the auto-commit mode is changed, the + * transaction is committed. If setAutoCommit is called and the auto-commit mode is not changed, + * the call is a no-op. + * + * @param autoCommit {@code true} to enable auto-commit mode; {@code false} to disable it + * @see Connection#setAutoCommit(boolean) + */ + @Override + public void setAutoCommit(boolean autoCommit) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + checkIfEnabledSession("setAutoCommit"); + if (this.autoCommit == autoCommit) { + return; + } + + if (isTransactionStarted()) { + commitTransaction(); + } + + this.autoCommit = autoCommit; + if (!this.autoCommit) { + beginTransaction(); + } + } + + @Override + public void commit() { + LOG.finest("++enter++"); + checkClosed(); + checkIfEnabledSession("commit"); + if (!isTransactionStarted()) { + throw new IllegalStateException( + "Cannot commit without an active transaction. Please set setAutoCommit to false to start" + + " a transaction."); + } + commitTransaction(); + if (!getAutoCommit()) { + beginTransaction(); + } + } + + @Override + public void rollback() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + checkIfEnabledSession("rollback"); + if (!isTransactionStarted()) { + throw new IllegalStateException( + "Cannot rollback without an active transaction. Please set setAutoCommit to false to" + + " start a transaction."); + } + try { + QueryJobConfiguration transactionRollbackJobConfig = + QueryJobConfiguration.newBuilder("ROLLBACK TRANSACTION;") + .setConnectionProperties(this.queryProperties) + .build(); + Job rollbackJob = this.bigQuery.create(JobInfo.of(transactionRollbackJobConfig)); + rollbackJob.waitFor(); + this.transactionStarted = false; + if (!getAutoCommit()) { + beginTransaction(); + } + } catch (InterruptedException | BigQueryException ex) { + throw new BigQueryJdbcException(ex); + } + } + + @Override + public DatabaseMetaData getMetaData() throws SQLException { + return new BigQueryDatabaseMetaData(this); + } + + @Override + public int getTransactionIsolation() { + // only supports Connection.TRANSACTION_SERIALIZABLE + return Connection.TRANSACTION_SERIALIZABLE; + } + + @Override + public void setTransactionIsolation(int level) throws SQLException { + if (level != Connection.TRANSACTION_SERIALIZABLE) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Transaction serializable not supported"); + } + this.transactionIsolation = level; + } + + @Override + public int getHoldability() { + return this.holdability; + } + + @Override + public void setHoldability(int holdability) throws SQLException { + if (holdability != ResultSet.CLOSE_CURSORS_AT_COMMIT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "CLOSE_CURSORS_AT_COMMIT not supported"); + } + this.holdability = holdability; + } + + /** + * Releases this {@code BigQueryConnection} object's BigQuery resources immediately instead of + * waiting for them to be automatically released. + * + * @throws SQLException if a BigQuery access error occurs + * @see Connection#close() + */ + @Override + public void close() throws SQLException { + LOG.fine("Closing Connection " + this); + // TODO(neenu-postMVP): Release all connection state objects + // check for and close all existing transactions + + if (isClosed()) { + return; + } + try { + if (this.bigQueryReadClient != null) { + this.bigQueryReadClient.shutdown(); + this.bigQueryReadClient.awaitTermination(1, TimeUnit.MINUTES); + this.bigQueryReadClient.close(); + } + + if (this.bigQueryWriteClient != null) { + this.bigQueryWriteClient.shutdown(); + this.bigQueryWriteClient.awaitTermination(1, TimeUnit.MINUTES); + this.bigQueryWriteClient.close(); + } + + for (Statement statement : this.openStatements) { + statement.close(); + } + this.openStatements.clear(); + } catch (ConcurrentModificationException ex) { + throw new BigQueryJdbcException(ex); + } catch (InterruptedException e) { + throw new BigQueryJdbcRuntimeException(e); + } + this.isClosed = true; + } + + @Override + public boolean isClosed() { + return this.isClosed; + } + + private void checkClosed() { + if (isClosed()) { + throw new IllegalStateException("This " + getClass().getName() + " has been closed"); + } + } + + private void checkIfEnabledSession(String methodName) { + if (!this.enableSession) { + throw new IllegalStateException( + String.format("Session needs to be enabled to use %s method.", methodName)); + } + } + + private ConnectionProperty getSessionPropertyFromQueryProperties( + Map queryPropertiesMap) { + LOG.finest("++enter++"); + if (queryPropertiesMap != null) { + if (queryPropertiesMap.containsKey("session_id")) { + return ConnectionProperty.newBuilder() + .setKey("session_id") + .setValue(queryPropertiesMap.get("session_id")) + .build(); + } + } + return null; + } + + private List convertMapToConnectionPropertiesList( + Map queryPropertiesMap) { + LOG.finest("++enter++"); + List connectionProperties = new ArrayList(); + if (queryPropertiesMap != null) { + for (Map.Entry entry : queryPropertiesMap.entrySet()) { + connectionProperties.add( + ConnectionProperty.newBuilder() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build()); + } + } + return connectionProperties; + } + + void removeStatement(Statement statement) { + this.openStatements.remove(statement); + } + + private BigQuery getBigQueryConnection() { + // 404 Not Found - check if the project exists + // 403 Forbidden - execute a dryRun to check if the user has bigquery.jobs.create permissions + BigQueryOptions.Builder bigQueryOptions = BigQueryOptions.newBuilder(); + if (this.retryTimeoutInSeconds > 0L + || (this.retryInitialDelayInSeconds > 0L && this.retryMaxDelayInSeconds > 0L)) { + RetrySettings.Builder retry_settings_builder = RetrySettings.newBuilder(); + if (this.retryTimeoutInSeconds > 0L) { + retry_settings_builder.setTotalTimeoutDuration(this.retryTimeoutDuration); + } + if (this.retryInitialDelayInSeconds > 0L && this.retryMaxDelayInSeconds > 0L) { + retry_settings_builder.setInitialRetryDelayDuration(retryInitialDelayDuration); + retry_settings_builder.setMaxRetryDelayDuration(retryMaxDelayDuration); + } + bigQueryOptions.setRetrySettings(retry_settings_builder.build()); + } + + if (this.catalog != null) { + bigQueryOptions.setProjectId(this.catalog); + } + if (this.credentials != null) { + bigQueryOptions.setCredentials(this.credentials); + } + if (this.location != null) { + bigQueryOptions.setLocation(this.location); + } + if (this.overrideProperties.containsKey( + BigQueryJdbcUrlUtility.BIGQUERY_ENDPOINT_OVERRIDE_PROPERTY_NAME)) { + bigQueryOptions.setHost( + this.overrideProperties.get( + BigQueryJdbcUrlUtility.BIGQUERY_ENDPOINT_OVERRIDE_PROPERTY_NAME)); + } + if (this.universeDomain != null) { + bigQueryOptions.setUniverseDomain(this.universeDomain); + } + if (this.httpTransportOptions != null) { + bigQueryOptions.setTransportOptions(this.httpTransportOptions); + } + + BigQueryOptions options = bigQueryOptions.setHeaderProvider(HEADER_PROVIDER).build(); + options.setQueryPreviewEnabled(String.valueOf(this.useStatelessQueryMode)); + return options.getService(); + } + + private BigQueryReadClient getBigQueryReadClientConnection() throws IOException { + BigQueryReadSettings.Builder bigQueryReadSettings = + BigQueryReadSettings.newBuilder().setHeaderProvider(HEADER_PROVIDER); + if (getRetrySettings() != null) { + bigQueryReadSettings.createReadSessionSettings().setRetrySettings(getRetrySettings()); + } + if (this.catalog != null) { + bigQueryReadSettings.setQuotaProjectId(this.catalog); + } + if (this.credentials != null) { + CredentialsProvider fixedProvider = FixedCredentialsProvider.create(credentials); + bigQueryReadSettings.setCredentialsProvider(fixedProvider); + } + if (this.overrideProperties.containsKey( + BigQueryJdbcUrlUtility.HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME)) { + bigQueryReadSettings.setEndpoint( + this.overrideProperties.get( + BigQueryJdbcUrlUtility.HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME)); + } + if (this.universeDomain != null) { + bigQueryReadSettings.setUniverseDomain(this.universeDomain); + } + if (this.transportChannelProvider != null) { + bigQueryReadSettings.setTransportChannelProvider(this.transportChannelProvider); + } + + return BigQueryReadClient.create(bigQueryReadSettings.build()); + } + + private BigQueryWriteClient getBigQueryWriteClientConnection() throws IOException { + BigQueryWriteSettings.Builder bigQueryWriteSettings = + BigQueryWriteSettings.newBuilder().setHeaderProvider(HEADER_PROVIDER); + if (getRetrySettings() != null) { + bigQueryWriteSettings.createWriteStreamSettings().setRetrySettings(getRetrySettings()); + } + if (this.catalog != null) { + bigQueryWriteSettings.setQuotaProjectId(this.catalog); + } + if (this.credentials != null) { + CredentialsProvider fixedProvider = FixedCredentialsProvider.create(credentials); + bigQueryWriteSettings.setCredentialsProvider(fixedProvider); + } + // Same endpoint as READ API + if (this.overrideProperties.containsKey( + BigQueryJdbcUrlUtility.HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME)) { + bigQueryWriteSettings.setEndpoint( + this.overrideProperties.get( + BigQueryJdbcUrlUtility.HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME)); + } + if (this.universeDomain != null) { + bigQueryWriteSettings.setUniverseDomain(this.universeDomain); + } + if (this.transportChannelProvider != null) { + bigQueryWriteSettings.setTransportChannelProvider(this.transportChannelProvider); + } + + return BigQueryWriteClient.create(bigQueryWriteSettings.build()); + } + + RetrySettings getRetrySettings() { + RetrySettings.Builder retrySettingsBuilder = null; + + if (this.retryTimeoutInSeconds > 0L + || (this.retryInitialDelayInSeconds > 0L && this.retryMaxDelayInSeconds > 0L)) { + retrySettingsBuilder = RetrySettings.newBuilder(); + if (this.retryTimeoutInSeconds > 0L) { + retrySettingsBuilder.setTotalTimeoutDuration(this.retryTimeoutDuration); + } + if (this.retryInitialDelayInSeconds > 0L && this.retryMaxDelayInSeconds > 0L) { + retrySettingsBuilder.setInitialRetryDelayDuration(retryInitialDelayDuration); + retrySettingsBuilder.setMaxRetryDelayDuration(retryMaxDelayDuration); + } + } + return retrySettingsBuilder == null ? null : retrySettingsBuilder.build(); + } + + private void commitTransaction() { + try { + QueryJobConfiguration transactionCommitJobConfig = + QueryJobConfiguration.newBuilder("COMMIT TRANSACTION;") + .setConnectionProperties(this.queryProperties) + .build(); + Job commitJob = this.bigQuery.create(JobInfo.of(transactionCommitJobConfig)); + commitJob.waitFor(); + this.transactionStarted = false; + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } + + @Override + public CallableStatement prepareCall(String sql) throws SQLException { + checkClosed(); + CallableStatement currentStatement = new BigQueryCallableStatement(this, sql); + LOG.fine(String.format("Callable Statement %s created.", currentStatement)); + addOpenStatements(currentStatement); + return currentStatement; + } + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) + throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported CallableStatement feature"); + } + return prepareCall(sql); + } + + @Override + public CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY + || resultSetHoldability != ResultSet.CLOSE_CURSORS_AT_COMMIT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported CallableStatement feature"); + } + return prepareCall(sql); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionProperty.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionProperty.java new file mode 100644 index 0000000000..17be15d866 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionProperty.java @@ -0,0 +1,117 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import java.util.List; + +class BigQueryConnectionProperty { + + private final String name; + private final String description; + private final String defaultValue; + private final List validValues; + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + public String getDefaultValue() { + return defaultValue; + } + + public List getValidValues() { + return validValues; + } + + BigQueryConnectionProperty(Builder builder) { + this.name = builder.name; + this.defaultValue = builder.defaultValue; + this.description = builder.description; + this.validValues = builder.validValues; + } + + /** Returns a builder for a BigQueryConnectionProperty object. */ + static BigQueryConnectionProperty.Builder newBuilder() { + return new BigQueryConnectionProperty.Builder(); + } + + BigQueryConnectionProperty.Builder toBuilder() { + return new BigQueryConnectionProperty.Builder(this); + } + + @Override + public String toString() { + return "BigQueryConnectionProperty{" + + "name='" + + name + + '\'' + + ", description='" + + description + + '\'' + + ", defaultValue='" + + defaultValue + + '\'' + + ", validValues=" + + validValues + + '}'; + } + + static final class Builder { + + private String name; + private String description; + private String defaultValue; + private List validValues; + + private Builder(BigQueryConnectionProperty bigQueryConnectionProperty) { + this.name = bigQueryConnectionProperty.name; + this.description = bigQueryConnectionProperty.description; + this.defaultValue = bigQueryConnectionProperty.defaultValue; + this.validValues = bigQueryConnectionProperty.validValues; + } + + private Builder() {} + + Builder setName(String name) { + this.name = name; + return this; + } + + Builder setDescription(String description) { + this.description = description; + return this; + } + + Builder setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + Builder setValidValues(List validValues) { + this.validValues = validValues; + return this; + } + + BigQueryConnectionProperty build() { + return new BigQueryConnectionProperty(this); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTask.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTask.java new file mode 100644 index 0000000000..386785660a --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTask.java @@ -0,0 +1,122 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.lang.ref.Reference; +import java.lang.ref.ReferenceQueue; +import java.util.List; + +/** + * This class is used to add polling threads required for polling the reference queues associated + * with the two ResultSets + */ +@InternalApi +class BigQueryDaemonPollingTask extends Thread { + + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryDaemonPollingTask.class.getName()); + + static ReferenceQueue referenceQueueArrowRs; + static ReferenceQueue referenceQueueJsonRs; + static List arrowRsFinalizers; + static List jsonRsFinalizers; + + private static BigQueryDaemonPollingTask arrowDaemon; + private static BigQueryDaemonPollingTask jsonDaemon; + + private static final Object LOCK = new Object(); + + private BigQueryDaemonPollingTask( + List arrowRsFinalizers, + ReferenceQueue referenceQueueArrowRs) { + BigQueryDaemonPollingTask.referenceQueueArrowRs = referenceQueueArrowRs; + BigQueryDaemonPollingTask.arrowRsFinalizers = arrowRsFinalizers; + setDaemon(true); + } + + private BigQueryDaemonPollingTask( + ReferenceQueue referenceQueueJsonRs, + List jsonRsFinalizers) { + BigQueryDaemonPollingTask.referenceQueueJsonRs = referenceQueueJsonRs; + BigQueryDaemonPollingTask.jsonRsFinalizers = jsonRsFinalizers; + setDaemon(true); + } + + /** + * ThreadSafe method which creates two instances of polling task, one each for each type of + * ResultSet + * + * @param referenceQueueArrowRs ReferenceQueue for ArrowResultSet + * @param referenceQueueJsonRs ReferenceQueue for JsonResultSet + * @param arrowRsFinalizers Finalizer for ArrowResultSet + * @param jsonRsFinalizers Finalizer for JsonResultSet + * @return true if the tasks were created + */ + public static boolean startGcDaemonTask( + ReferenceQueue referenceQueueArrowRs, + ReferenceQueue referenceQueueJsonRs, + List arrowRsFinalizers, + List jsonRsFinalizers) { + LOG.finest("++enter++"); + synchronized (LOCK) { + // 2 Background threads will be required to monitor the respective queues + if (arrowDaemon == null && jsonDaemon == null) { + arrowDaemon = new BigQueryDaemonPollingTask(arrowRsFinalizers, referenceQueueArrowRs); + arrowDaemon.start(); + + jsonDaemon = new BigQueryDaemonPollingTask(referenceQueueJsonRs, jsonRsFinalizers); + jsonDaemon.start(); + + return true; + } + } + // Task(s) are already initialised + return false; + } + + @Override + public void run() { + + Reference reference; + try { + LOG.finest("++enter++"); + // poll for Arrow ResultSets + if (referenceQueueArrowRs != null) { + + while ((reference = referenceQueueArrowRs.remove()) != null) { + LOG.fine("Clearing Arrow ResultSet reference " + referenceQueueArrowRs); + ((BigQueryResultSetFinalizers.ArrowResultSetFinalizer) reference).finalizeResources(); + reference.clear(); + } + } + // poll for JSON ResultSets + else if (referenceQueueJsonRs != null) { + while ((reference = referenceQueueJsonRs.remove()) != null) { + LOG.fine("Clearing Json ResultSet reference " + referenceQueueJsonRs); + ((BigQueryResultSetFinalizers.JsonResultSetFinalizer) reference).finalizeResources(); + reference.clear(); + } + } else { + throw new BigQueryJdbcRuntimeException("Null Reference Queue"); + } + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaData.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaData.java new file mode 100644 index 0000000000..66917ea889 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaData.java @@ -0,0 +1,5346 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.gax.paging.Page; +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQuery.DatasetListOption; +import com.google.cloud.bigquery.BigQuery.RoutineListOption; +import com.google.cloud.bigquery.BigQuery.TableListOption; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.Routine; +import com.google.cloud.bigquery.RoutineArgument; +import com.google.cloud.bigquery.RoutineId; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLDataType; +import com.google.cloud.bigquery.StandardSQLField; +import com.google.cloud.bigquery.StandardSQLTableType; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.Table; +import com.google.cloud.bigquery.TableDefinition; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.RowIdLifetime; +import java.sql.SQLException; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Properties; +import java.util.Scanner; +import java.util.Set; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.Callable; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.regex.Pattern; +import javax.annotation.Nullable; + +/** + * An implementation of {@link java.sql.DatabaseMetaData}. This interface is implemented by driver + * vendors to let users know the capabilities of a Database Management System (DBMS) in combination + * with the driver based on JDBCā„¢ technology ("JDBC driver") that is used with it. + * + * @see BigQueryStatement + */ +// TODO(neenu): test and verify after post MVP implementation. +class BigQueryDatabaseMetaData implements DatabaseMetaData { + final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private static final String DATABASE_PRODUCT_NAME = "Google BigQuery"; + private static final String DATABASE_PRODUCT_VERSION = "2.0"; + private static final String DRIVER_NAME = "GoogleJDBCDriverForGoogleBigQuery"; + private static final String DRIVER_DEFAULT_VERSION = "0.0.0"; + private static final String SCHEMA_TERM = "Dataset"; + private static final String CATALOG_TERM = "Project"; + private static final String PROCEDURE_TERM = "Procedure"; + private static final String GET_PRIMARY_KEYS_SQL = "DatabaseMetaData_GetPrimaryKeys.sql"; + private static final String GET_IMPORTED_KEYS_SQL = "DatabaseMetaData_GetImportedKeys.sql"; + private static final String GET_EXPORTED_KEYS_SQL = "DatabaseMetaData_GetExportedKeys.sql"; + private static final String GET_CROSS_REFERENCE_SQL = "DatabaseMetaData_GetCrossReference.sql"; + private static final int API_EXECUTOR_POOL_SIZE = 50; + private static final int DEFAULT_PAGE_SIZE = 500; + private static final int DEFAULT_QUEUE_CAPACITY = 5000; + // Declared package-private for testing. + static final String GOOGLE_SQL_QUOTED_IDENTIFIER = "`"; + // Does not include SQL:2003 Keywords as per JDBC spec. + // https://en.wikipedia.org/wiki/List_of_SQL_reserved_words + static final String GOOGLE_SQL_RESERVED_KEYWORDS = + "ASC,ASSERT_ROWS_MODIFIED,DESC,ENUM,EXCLUDE,FOLLOWING,HASH,IF," + + "IGNORE,LIMIT,LOOKUP,NULLS,PRECEDING,PROTO,QUALIFY,RESPECT,STRUCT,UNBOUNDED"; + static final String GOOGLE_SQL_NUMERIC_FNS = + "ABS,ACOS,ACOSH,ASIN,ASINH,ATAN,ATAN2,ATANH,CBRT,CEIL,CEILING,COS" + + ",COSH,COSINE_DISTANCE,COT,COTH,CSC,CSCH,DIV,EXP,EUCLIDEAN_DISTANCE,FLOOR" + + ",GREATEST,IS_INF,LEAST,LN,LOG,LOG10,MOD,POW,RAND,RANGE_BUCKET,ROUND," + + ",SAFE_ADD,SAFE_DIVIDE,SAFE_MULTIPLY,SAFE_NEGATE,SAFE_SUBTRACT,SEC,SECH," + + "SIGN,SIN,SINH,SQRT,TAN,TANH,TRUNC"; + static final String GOOGLE_SQL_STRING_FNS = + "ASCII,BYTE_LENGTH,CHAR_LENGTH,CHARACTER_LENGTH,CHR,CODE_POINTS_TO_BYTES," + + "CODE_POINTS_TO_STRING,COLLATE,CONCAT,CONTAINS_SUBSTR,EDIT_DISTANCE,ENDS_WITH," + + "FORMAT,FROM_BASE32,FROM_BASE64,FROM_HEX,INITCAP,INSTR,LEFT,LENGTH,LOWER," + + "LPAD,LTRIM,NORMALIZ,NORMALIZE_AND_CASEFOLD,OCTET_LENGTH,REGEXP_CONTAINS," + + "REGEXP_EXTRACT,REGEXP_EXTRACT_ALL,REGEXP_INSTR,REGEXP_REPLACE,REGEXP_SUBSTR," + + "REPEAT,REPLACE,REVERSE,RIGHT,RPAD,RTRIM,SAFE_CONVERT_BYTES_TO_STRING,SOUNDEX," + + "SPLIT,STARTS_WITH,STRPOS,SUBSTR,SUBSTRING,TO_BASE32,TO_BASE64,TO_CODE_POINTS," + + "TO_HEX,TRANSLATE,TRIMunicode,UNICODE,UPPER"; + static final String GOOGLE_SQL_TIME_DATE_FNS = + "DATE,DATE_ADD,DATE_BUCKET,DATE_DIFF,DATE_FROM_UNIX_DATE," + + "DATE_SUB,DATE_TRUNC,DATETIME,DATETIME_ADD.,DATETIME_BUCKET," + + "DATETIME_DIFF,DATETIME_SUB,DATETIME_TRUNC,CURRENT_DATE,CURRENT_DATETIME," + + "CURRENT_TIME,CURRENT_TIMESTAMP,CURRENT_TIME,EXTRACT,FORMAT_TIME,PARSE_TIME," + + "TIME,TIME_ADD,TIME_DIFF,TIME_SUB,TIME_TRUNC,CURRENT_TIMESTAMP,EXTRACT," + + "FORMAT_TIMESTAMP,GENERATE_TIMESTAMP_ARRAY,PARSE_TIMESTAMP,TIMESTAMP," + + "TIMESTAMP_ADD,TIMESTAMP_DIFF,TIMESTAMP_MICROS,TIMESTAMP_MILLIS,TIMESTAMP_SECONDS," + + "TIMESTAMP_SUB,TIMESTAMP_TRUNC,UNIX_MICROS,UNIX_MILLIS,UNIX_SECONDS"; + static final String GOOGLE_SQL_ESCAPE = "\\"; + static final String GOOGLE_SQL_CATALOG_SEPARATOR = "."; + static final int GOOGLE_SQL_MAX_COL_NAME_LEN = 300; + static final int GOOGLE_SQL_MAX_COLS_PER_TABLE = 10000; + + String URL; + BigQueryConnection connection; + private final BigQueryStatement statement; + private final BigQuery bigquery; + private final int metadataFetchThreadCount; + private static final AtomicReference parsedDriverVersion = new AtomicReference<>(null); + private static final AtomicReference parsedDriverMajorVersion = + new AtomicReference<>(null); + private static final AtomicReference parsedDriverMinorVersion = + new AtomicReference<>(null); + + BigQueryDatabaseMetaData(BigQueryConnection connection) throws SQLException { + this.URL = connection.getConnectionUrl(); + this.connection = connection; + this.statement = connection.createStatement().unwrap(BigQueryStatement.class); + this.bigquery = connection.getBigQuery(); + this.metadataFetchThreadCount = connection.getMetadataFetchThreadCount(); + loadDriverVersionProperties(); + } + + @Override + public boolean allProceduresAreCallable() { + // Returns false because BigQuery's IAM permissions can allow a user + // to discover a procedure's existence without having rights to execute it. + return false; + } + + @Override + public boolean allTablesAreSelectable() { + // Returns true to ensure maximum compatibility with client applications + // that expect a positive response to discover and list all available tables. + return true; + } + + @Override + public String getURL() { + return this.URL; + } + + @Override + public String getUserName() { + return null; + } + + @Override + public boolean isReadOnly() { + return false; + } + + @Override + public boolean nullsAreSortedHigh() { + return false; + } + + @Override + public boolean nullsAreSortedLow() { + return false; + } + + @Override + public boolean nullsAreSortedAtStart() { + return false; + } + + @Override + public boolean nullsAreSortedAtEnd() { + return false; + } + + @Override + public String getDatabaseProductName() { + return DATABASE_PRODUCT_NAME; + } + + @Override + public String getDatabaseProductVersion() { + return DATABASE_PRODUCT_VERSION; + } + + @Override + public String getDriverName() { + return DRIVER_NAME; + } + + @Override + public String getDriverVersion() { + return parsedDriverVersion.get() != null ? parsedDriverVersion.get() : DRIVER_DEFAULT_VERSION; + } + + @Override + public int getDriverMajorVersion() { + return parsedDriverMajorVersion.get() != null ? parsedDriverMajorVersion.get() : 0; + } + + @Override + public int getDriverMinorVersion() { + return parsedDriverMinorVersion.get() != null ? parsedDriverMinorVersion.get() : 0; + } + + @Override + public boolean usesLocalFiles() { + return false; + } + + @Override + public boolean usesLocalFilePerTable() { + return false; + } + + @Override + public boolean supportsMixedCaseIdentifiers() { + return false; + } + + @Override + public boolean storesUpperCaseIdentifiers() { + return false; + } + + @Override + public boolean storesLowerCaseIdentifiers() { + return false; + } + + @Override + public boolean storesMixedCaseIdentifiers() { + return false; + } + + @Override + public boolean supportsMixedCaseQuotedIdentifiers() { + return false; + } + + @Override + public boolean storesUpperCaseQuotedIdentifiers() { + return false; + } + + @Override + public boolean storesLowerCaseQuotedIdentifiers() { + return false; + } + + @Override + public boolean storesMixedCaseQuotedIdentifiers() { + return false; + } + + @Override + public String getIdentifierQuoteString() { + return GOOGLE_SQL_QUOTED_IDENTIFIER; + } + + @Override + public String getSQLKeywords() { + return GOOGLE_SQL_RESERVED_KEYWORDS; + } + + @Override + public String getNumericFunctions() { + return GOOGLE_SQL_NUMERIC_FNS; + } + + @Override + public String getStringFunctions() { + return GOOGLE_SQL_STRING_FNS; + } + + @Override + // GoogleSQL has UDF (user defined functions). + // System functions like DATABASE(), USER() are not supported. + public String getSystemFunctions() { + return null; + } + + @Override + public String getTimeDateFunctions() { + return GOOGLE_SQL_TIME_DATE_FNS; + } + + @Override + public String getSearchStringEscape() { + return GOOGLE_SQL_ESCAPE; + } + + @Override + // No extra characters beyond a-z, A-Z, 0-9 and _ + public String getExtraNameCharacters() { + return null; + } + + @Override + public boolean supportsAlterTableWithAddColumn() { + return true; + } + + @Override + public boolean supportsAlterTableWithDropColumn() { + return true; + } + + @Override + public boolean supportsColumnAliasing() { + return true; + } + + @Override + public boolean nullPlusNonNullIsNull() { + return true; + } + + @Override + public boolean supportsConvert() { + return false; + } + + @Override + public boolean supportsConvert(int fromType, int toType) { + return false; + } + + @Override + public boolean supportsTableCorrelationNames() { + return true; + } + + @Override + public boolean supportsDifferentTableCorrelationNames() { + return false; + } + + @Override + public boolean supportsExpressionsInOrderBy() { + return true; + } + + @Override + public boolean supportsOrderByUnrelated() { + return true; + } + + @Override + public boolean supportsGroupBy() { + return true; + } + + @Override + public boolean supportsGroupByUnrelated() { + return true; + } + + @Override + public boolean supportsGroupByBeyondSelect() { + return true; + } + + @Override + public boolean supportsLikeEscapeClause() { + return false; + } + + @Override + public boolean supportsMultipleResultSets() { + return false; + } + + @Override + public boolean supportsMultipleTransactions() { + return false; + } + + @Override + public boolean supportsNonNullableColumns() { + return false; + } + + @Override + public boolean supportsMinimumSQLGrammar() { + return false; + } + + @Override + public boolean supportsCoreSQLGrammar() { + return false; + } + + @Override + public boolean supportsExtendedSQLGrammar() { + return false; + } + + @Override + public boolean supportsANSI92EntryLevelSQL() { + return false; + } + + @Override + public boolean supportsANSI92IntermediateSQL() { + return false; + } + + @Override + public boolean supportsANSI92FullSQL() { + return false; + } + + @Override + public boolean supportsIntegrityEnhancementFacility() { + return false; + } + + @Override + public boolean supportsOuterJoins() { + return false; + } + + @Override + public boolean supportsFullOuterJoins() { + return false; + } + + @Override + public boolean supportsLimitedOuterJoins() { + return false; + } + + @Override + public String getSchemaTerm() { + return SCHEMA_TERM; + } + + @Override + public String getProcedureTerm() { + return PROCEDURE_TERM; + } + + @Override + public String getCatalogTerm() { + return CATALOG_TERM; + } + + @Override + public boolean isCatalogAtStart() { + return true; + } + + @Override + public String getCatalogSeparator() { + return GOOGLE_SQL_CATALOG_SEPARATOR; + } + + @Override + public boolean supportsSchemasInDataManipulation() { + return false; + } + + @Override + public boolean supportsSchemasInProcedureCalls() { + return false; + } + + @Override + public boolean supportsSchemasInTableDefinitions() { + return false; + } + + @Override + public boolean supportsSchemasInIndexDefinitions() { + return false; + } + + @Override + public boolean supportsSchemasInPrivilegeDefinitions() { + return false; + } + + @Override + public boolean supportsCatalogsInDataManipulation() { + return false; + } + + @Override + public boolean supportsCatalogsInProcedureCalls() { + return false; + } + + @Override + public boolean supportsCatalogsInTableDefinitions() { + return false; + } + + @Override + public boolean supportsCatalogsInIndexDefinitions() { + return false; + } + + @Override + public boolean supportsCatalogsInPrivilegeDefinitions() { + return false; + } + + @Override + public boolean supportsPositionedDelete() { + return false; + } + + @Override + public boolean supportsPositionedUpdate() { + return false; + } + + @Override + public boolean supportsSelectForUpdate() { + return false; + } + + @Override + public boolean supportsStoredProcedures() { + return false; + } + + @Override + public boolean supportsSubqueriesInComparisons() { + return false; + } + + @Override + public boolean supportsSubqueriesInExists() { + return false; + } + + @Override + public boolean supportsSubqueriesInIns() { + return false; + } + + @Override + public boolean supportsSubqueriesInQuantifieds() { + return false; + } + + @Override + public boolean supportsCorrelatedSubqueries() { + return false; + } + + @Override + public boolean supportsUnion() { + return true; + } + + @Override + public boolean supportsUnionAll() { + return true; + } + + @Override + public boolean supportsOpenCursorsAcrossCommit() { + return false; + } + + @Override + public boolean supportsOpenCursorsAcrossRollback() { + return false; + } + + @Override + public boolean supportsOpenStatementsAcrossCommit() { + return false; + } + + @Override + public boolean supportsOpenStatementsAcrossRollback() { + return false; + } + + @Override + // No limit + public int getMaxBinaryLiteralLength() { + return 0; + } + + @Override + // No Limit + public int getMaxCharLiteralLength() { + return 0; + } + + @Override + // GoogleSQL documentation says 300. + // https://cloud.google.com/bigquery/quotas#all_tables + public int getMaxColumnNameLength() { + return GOOGLE_SQL_MAX_COL_NAME_LEN; + } + + @Override + // No specific limits for group by. + public int getMaxColumnsInGroupBy() { + return 0; + } + + @Override + // No specific limits for index. + public int getMaxColumnsInIndex() { + return 0; + } + + @Override + // No specific limit for Order By. + public int getMaxColumnsInOrderBy() { + return 0; + } + + @Override + // All columns can be selected. No specific limits. + public int getMaxColumnsInSelect() { + return 0; + } + + @Override + public int getMaxColumnsInTable() { + return GOOGLE_SQL_MAX_COLS_PER_TABLE; + } + + @Override + public int getMaxConnections() { + // Per JDBC spec, returns 0 as there is no connection limit or is unknown. + return 0; + } + + @Override + public int getMaxCursorNameLength() { + // BigQuery does not support named cursors or positioned updates/deletes. + return 0; + } + + @Override + public int getMaxIndexLength() { + // Per the JDBC spec, 0 indicates this feature is not supported. + return 0; + } + + @Override + public int getMaxSchemaNameLength() { + // Dataset IDs can be up to 1024 characters long. + // See: https://cloud.google.com/bigquery/docs/datasets#dataset-naming + return 1024; + } + + @Override + public int getMaxProcedureNameLength() { + // Routine IDs can be up to 256 characters long. + // See: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#RoutineReference.FIELDS.routine_id + return 256; + } + + @Override + public int getMaxCatalogNameLength() { + // Corresponds to the BigQuery Project ID, which can be a maximum of 30 characters. + // See: + // https://cloud.google.com/resource-manager/docs/creating-managing-projects#before_you_begin + return 30; + } + + @Override + public int getMaxRowSize() { + // Per JDBC spec, returns 0 as there is no fixed limit or is unknown. + return 0; + } + + @Override + public boolean doesMaxRowSizeIncludeBlobs() { + return false; + } + + @Override + public int getMaxStatementLength() { + // Per JDBC spec, returns 0 as there is no fixed limit or is unknown. + // See: https://cloud.google.com/bigquery/quotas#query_jobs + return 0; + } + + @Override + public int getMaxStatements() { + // Per JDBC spec, returns 0 as there is no fixed limit or is unknown. + return 0; + } + + @Override + public int getMaxTableNameLength() { + // Table IDs can be up to 1024 characters long. + // See: https://cloud.google.com/bigquery/docs/tables#table-naming + return 1024; + } + + @Override + public int getMaxTablesInSelect() { + // BigQuery allows up to 1,000 tables to be referenced per query. + // See: https://cloud.google.com/bigquery/quotas#query_jobs + return 1000; + } + + @Override + public int getMaxUserNameLength() { + return 0; + } + + @Override + public int getDefaultTransactionIsolation() { + return Connection.TRANSACTION_SERIALIZABLE; + } + + @Override + public boolean supportsTransactions() { + return true; + } + + @Override + public boolean supportsTransactionIsolationLevel(int level) { + return level == Connection.TRANSACTION_SERIALIZABLE; + } + + @Override + public boolean supportsDataDefinitionAndDataManipulationTransactions() { + return false; + } + + @Override + public boolean supportsDataManipulationTransactionsOnly() { + return false; + } + + @Override + public boolean dataDefinitionCausesTransactionCommit() { + return false; + } + + @Override + public boolean dataDefinitionIgnoredInTransactions() { + return false; + } + + @Override + public ResultSet getProcedures( + String catalog, String schemaPattern, String procedureNamePattern) { + if ((catalog == null || catalog.isEmpty()) + || (schemaPattern != null && schemaPattern.isEmpty()) + || (procedureNamePattern != null && procedureNamePattern.isEmpty())) { + LOG.warning("Returning empty ResultSet as catalog is null/empty or a pattern is empty."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getProcedures called for catalog: %s, schemaPattern: %s, procedureNamePattern: %s", + catalog, schemaPattern, procedureNamePattern)); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Pattern procedureNameRegex = compileSqlLikePattern(procedureNamePattern); + final Schema resultSchema = defineGetProceduresSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final List> processingTaskFutures = new ArrayList<>(); + final String catalogParam = catalog; + + Runnable procedureFetcher = + () -> { + ExecutorService apiExecutor = null; + ExecutorService routineProcessorExecutor = null; + final FieldList localResultSchemaFields = resultSchemaFields; + final List>> apiFutures = new ArrayList<>(); + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty()) { + LOG.info("Fetcher thread found no matching datasets. Finishing."); + return; + } + + apiExecutor = Executors.newFixedThreadPool(API_EXECUTOR_POOL_SIZE); + routineProcessorExecutor = Executors.newFixedThreadPool(this.metadataFetchThreadCount); + + LOG.fine("Submitting parallel findMatchingRoutines tasks..."); + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Fetcher interrupted during dataset iteration submission."); + break; + } + + final DatasetId currentDatasetId = dataset.getDatasetId(); + Callable> apiCallable = + () -> + findMatchingBigQueryObjects( + "Routine", + () -> + bigquery.listRoutines( + currentDatasetId, RoutineListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getRoutine( + RoutineId.of( + currentDatasetId.getProject(), + currentDatasetId.getDataset(), + name)), + (rt) -> rt.getRoutineId().getRoutine(), + procedureNamePattern, + procedureNameRegex, + LOG); + Future> apiFuture = apiExecutor.submit(apiCallable); + apiFutures.add(apiFuture); + } + LOG.fine("Finished submitting " + apiFutures.size() + " findMatchingRoutines tasks."); + apiExecutor.shutdown(); + + LOG.fine("Processing results from findMatchingRoutines tasks..."); + for (Future> apiFuture : apiFutures) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Fetcher interrupted while processing API futures."); + break; + } + try { + List routinesResult = apiFuture.get(); + if (routinesResult != null) { + for (Routine routine : routinesResult) { + if (Thread.currentThread().isInterrupted()) break; + + if ("PROCEDURE".equalsIgnoreCase(routine.getRoutineType())) { + LOG.fine( + "Submitting processing task for procedure: " + routine.getRoutineId()); + final Routine finalRoutine = routine; + Future processFuture = + routineProcessorExecutor.submit( + () -> + processProcedureInfo( + finalRoutine, collectedResults, localResultSchemaFields)); + processingTaskFutures.add(processFuture); + } else { + LOG.finer("Skipping non-procedure routine: " + routine.getRoutineId()); + } + } + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning("Fetcher thread interrupted while waiting for API future result."); + break; + } catch (ExecutionException e) { + LOG.warning( + "Error executing findMatchingRoutines task: " + + e.getMessage() + + ". Cause: " + + e.getCause()); + } catch (CancellationException e) { + LOG.warning("A findMatchingRoutines task was cancelled."); + } + } + + LOG.fine( + "Finished submitting " + + processingTaskFutures.size() + + " processProcedureInfo tasks."); + + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher interrupted before waiting for processing tasks; cancelling remaining."); + processingTaskFutures.forEach(f -> f.cancel(true)); + } else { + LOG.fine("Waiting for processProcedureInfo tasks to complete..."); + waitForTasksCompletion(processingTaskFutures); + LOG.fine("All processProcedureInfo tasks completed or handled."); + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetProceduresComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getProcedures", LOG); + } + + if (!Thread.currentThread().isInterrupted()) { + populateQueue(collectedResults, queue, localResultSchemaFields); + } + + } catch (Throwable t) { + LOG.severe("Unexpected error in procedure fetcher runnable: " + t.getMessage()); + apiFutures.forEach(f -> f.cancel(true)); + processingTaskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, localResultSchemaFields); + shutdownExecutor(apiExecutor); + shutdownExecutor(routineProcessorExecutor); + LOG.info("Procedure fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(procedureFetcher, "getProcedures-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getProcedures"); + return resultSet; + } + + Schema defineGetProceduresSchema() { + List fields = new ArrayList<>(9); + fields.add( + Field.newBuilder("PROCEDURE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("reserved1", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("reserved2", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("reserved3", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("SPECIFIC_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + void processProcedureInfo( + Routine routine, List collectedResults, FieldList resultSchemaFields) { + + RoutineId routineId = routine.getRoutineId(); + LOG.fine("Processing procedure info for: " + routineId); + + try { + if (!"PROCEDURE".equalsIgnoreCase(routine.getRoutineType())) { + LOG.warning( + "processProcedureInfo called with non-procedure type: " + + routine.getRoutineType() + + " for " + + routineId); + return; + } + + String catalogName = routineId.getProject(); + String schemaName = routineId.getDataset(); + String procedureName = routineId.getRoutine(); + String remarks = routine.getDescription(); + + List values = new ArrayList<>(resultSchemaFields.size()); + + values.add(createStringFieldValue(catalogName)); // 1. PROCEDURE_CAT + values.add(createStringFieldValue(schemaName)); // 2. PROCEDURE_SCHEM + values.add(createStringFieldValue(procedureName)); // 3. PROCEDURE_NAME + values.add(createNullFieldValue()); // 4. reserved1 + values.add(createNullFieldValue()); // 5. reserved2 + values.add(createNullFieldValue()); // 6. reserved3 + values.add(createStringFieldValue(remarks)); // 7. REMARKS + values.add( + createLongFieldValue( + (long) DatabaseMetaData.procedureResultUnknown)); // 8. PROCEDURE_TYPE + values.add(createStringFieldValue(procedureName)); // 9. SPECIFIC_NAME + + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + + LOG.fine("Processed and added procedure info row for: " + routineId); + + } catch (Exception e) { + LOG.warning( + String.format( + "Error processing procedure info for %s: %s. Skipping this procedure.", + routineId, e.getMessage())); + } + } + + Comparator defineGetProceduresComparator(FieldList resultSchemaFields) { + final int PROC_CAT_IDX = resultSchemaFields.getIndex("PROCEDURE_CAT"); + final int PROC_SCHEM_IDX = resultSchemaFields.getIndex("PROCEDURE_SCHEM"); + final int PROC_NAME_IDX = resultSchemaFields.getIndex("PROCEDURE_NAME"); + final int SPEC_NAME_IDX = resultSchemaFields.getIndex("SPECIFIC_NAME"); + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_CAT_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_NAME_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, SPEC_NAME_IDX), + Comparator.nullsFirst(String::compareTo)); + } + + @Override + public ResultSet getProcedureColumns( + String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern) { + + if (catalog == null || catalog.isEmpty()) { + LOG.warning("Returning empty ResultSet because catalog (project) is null or empty."); + return new BigQueryJsonResultSet(); + } + if ((schemaPattern != null && schemaPattern.isEmpty()) + || (procedureNamePattern != null && procedureNamePattern.isEmpty()) + || (columnNamePattern != null && columnNamePattern.isEmpty())) { + LOG.warning("Returning empty ResultSet because an explicit empty pattern was provided."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getProcedureColumns called for catalog: %s, schemaPattern: %s, procedureNamePattern:" + + " %s, columnNamePattern: %s", + catalog, schemaPattern, procedureNamePattern, columnNamePattern)); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Pattern procedureNameRegex = compileSqlLikePattern(procedureNamePattern); + final Pattern columnNameRegex = compileSqlLikePattern(columnNamePattern); + + final Schema resultSchema = defineGetProcedureColumnsSchema(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final List> processingTaskFutures = new ArrayList<>(); + final String catalogParam = catalog; + + Runnable procedureColumnFetcher = + () -> { + ExecutorService listRoutinesExecutor = null; + ExecutorService getRoutineDetailsExecutor = null; + ExecutorService processArgsExecutor = null; + + final String fetcherThreadNameSuffix = + "-" + catalogParam.substring(0, Math.min(10, catalogParam.length())); + + try { + List datasetsToScan = + fetchMatchingDatasetsForProcedureColumns(catalogParam, schemaPattern, schemaRegex); + if (datasetsToScan.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher: No matching datasets or interrupted early. Catalog: " + catalogParam); + return; + } + + listRoutinesExecutor = + Executors.newFixedThreadPool( + API_EXECUTOR_POOL_SIZE, + runnable -> new Thread(runnable, "pcol-list-rout" + fetcherThreadNameSuffix)); + List procedureIdsToGet = + listMatchingProcedureIdsFromDatasets( + datasetsToScan, + procedureNamePattern, + procedureNameRegex, + listRoutinesExecutor, + catalogParam, + LOG); + shutdownExecutor(listRoutinesExecutor); + listRoutinesExecutor = null; + + if (procedureIdsToGet.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info("Fetcher: No procedure IDs found or interrupted. Catalog: " + catalogParam); + return; + } + + getRoutineDetailsExecutor = + Executors.newFixedThreadPool( + 100, + runnable -> new Thread(runnable, "pcol-get-details" + fetcherThreadNameSuffix)); + List fullRoutines = + fetchFullRoutineDetailsForIds(procedureIdsToGet, getRoutineDetailsExecutor, LOG); + shutdownExecutor(getRoutineDetailsExecutor); + getRoutineDetailsExecutor = null; + + if (fullRoutines.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher: No full routines fetched or interrupted. Catalog: " + catalogParam); + return; + } + + processArgsExecutor = + Executors.newFixedThreadPool( + this.metadataFetchThreadCount, + runnable -> new Thread(runnable, "pcol-proc-args" + fetcherThreadNameSuffix)); + submitProcedureArgumentProcessingJobs( + fullRoutines, + columnNameRegex, + collectedResults, + resultSchema.getFields(), + processArgsExecutor, + processingTaskFutures, + LOG); + + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher: Interrupted before waiting for argument processing. Catalog: " + + catalogParam); + processingTaskFutures.forEach(f -> f.cancel(true)); + } else { + LOG.fine( + "Fetcher: Waiting for " + + processingTaskFutures.size() + + " argument processing tasks. Catalog: " + + catalogParam); + waitForTasksCompletion(processingTaskFutures); + LOG.fine( + "Fetcher: All argument processing tasks completed or handled. Catalog: " + + catalogParam); + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetProcedureColumnsComparator(resultSchema.getFields()); + sortResults(collectedResults, comparator, "getProcedureColumns", LOG); + populateQueue(collectedResults, queue, resultSchema.getFields()); + } + + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning( + "Fetcher: Interrupted in main try block for catalog " + + catalogParam + + ". Error: " + + e.getMessage()); + processingTaskFutures.forEach(f -> f.cancel(true)); + } catch (Throwable t) { + LOG.severe( + "Fetcher: Unexpected error in main try block for catalog " + + catalogParam + + ". Error: " + + t.getMessage()); + processingTaskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, resultSchema.getFields()); + if (listRoutinesExecutor != null) shutdownExecutor(listRoutinesExecutor); + if (getRoutineDetailsExecutor != null) shutdownExecutor(getRoutineDetailsExecutor); + if (processArgsExecutor != null) shutdownExecutor(processArgsExecutor); + LOG.info("Procedure column fetcher thread finished for catalog: " + catalogParam); + } + }; + + Thread fetcherThread = + new Thread(procedureColumnFetcher, "getProcedureColumns-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getProcedureColumns for catalog: " + catalog); + return resultSet; + } + + private List fetchMatchingDatasetsForProcedureColumns( + String catalogParam, String schemaPattern, Pattern schemaRegex) throws InterruptedException { + LOG.fine( + String.format( + "Fetching matching datasets for catalog '%s', schemaPattern '%s'", + catalogParam, schemaPattern)); + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets(catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + LOG.info( + String.format( + "Found %d datasets to scan for procedures in catalog '%s'.", + datasetsToScan.size(), catalogParam)); + return datasetsToScan; + } + + List listMatchingProcedureIdsFromDatasets( + List datasetsToScan, + String procedureNamePattern, + Pattern procedureNameRegex, + ExecutorService listRoutinesExecutor, + String catalogParam, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + + logger.fine( + String.format( + "Listing matching procedure IDs from %d datasets for catalog '%s'.", + datasetsToScan.size(), catalogParam)); + final List>> listRoutineFutures = new ArrayList<>(); + final List procedureIdsToGet = Collections.synchronizedList(new ArrayList<>()); + + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + logger.warning( + "Interrupted during submission of routine listing tasks for catalog: " + catalogParam); + throw new InterruptedException("Interrupted while listing routines"); + } + final DatasetId currentDatasetId = dataset.getDatasetId(); + Callable> listCallable = + () -> + findMatchingBigQueryObjects( + "Routine", + () -> + bigquery.listRoutines( + currentDatasetId, RoutineListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getRoutine( + RoutineId.of( + currentDatasetId.getProject(), currentDatasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + procedureNamePattern, + procedureNameRegex, + logger); + listRoutineFutures.add(listRoutinesExecutor.submit(listCallable)); + } + logger.fine( + "Submitted " + + listRoutineFutures.size() + + " routine list tasks for catalog: " + + catalogParam); + + for (Future> listFuture : listRoutineFutures) { + if (Thread.currentThread().isInterrupted()) { + logger.warning( + "Interrupted while collecting routine list results for catalog: " + catalogParam); + listRoutineFutures.forEach(f -> f.cancel(true)); + throw new InterruptedException("Interrupted while collecting routine lists"); + } + try { + List listedRoutines = listFuture.get(); + if (listedRoutines != null) { + for (Routine listedRoutine : listedRoutines) { + if (listedRoutine != null + && "PROCEDURE".equalsIgnoreCase(listedRoutine.getRoutineType())) { + if (listedRoutine.getRoutineId() != null) { + procedureIdsToGet.add(listedRoutine.getRoutineId()); + } else { + logger.warning( + "Found a procedure type routine with a null ID during listing phase for" + + " catalog: " + + catalogParam); + } + } + } + } + } catch (ExecutionException e) { + logger.warning( + "Error getting routine list result for catalog " + catalogParam + ": " + e.getCause()); + } catch (CancellationException e) { + logger.warning("Routine list task cancelled for catalog: " + catalogParam); + } + } + logger.info( + String.format( + "Found %d procedure IDs to fetch details for in catalog '%s'.", + procedureIdsToGet.size(), catalogParam)); + return procedureIdsToGet; + } + + List fetchFullRoutineDetailsForIds( + List procedureIdsToGet, + ExecutorService getRoutineDetailsExecutor, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + logger.fine( + String.format("Fetching full details for %d procedure IDs.", procedureIdsToGet.size())); + final List> getRoutineFutures = new ArrayList<>(); + final List fullRoutines = Collections.synchronizedList(new ArrayList<>()); + + for (RoutineId procId : procedureIdsToGet) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Interrupted during submission of getRoutine detail tasks."); + throw new InterruptedException("Interrupted while submitting getRoutine tasks"); + } + final RoutineId currentProcId = procId; + Callable getCallable = + () -> { + try { + return bigquery.getRoutine(currentProcId); + } catch (Exception e) { + logger.warning( + "Failed to get full details for routine " + + currentProcId + + ": " + + e.getMessage()); + return null; + } + }; + getRoutineFutures.add(getRoutineDetailsExecutor.submit(getCallable)); + } + logger.fine("Submitted " + getRoutineFutures.size() + " getRoutine detail tasks."); + + for (Future getFuture : getRoutineFutures) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Interrupted while collecting getRoutine detail results."); + getRoutineFutures.forEach(f -> f.cancel(true)); // Cancel remaining + throw new InterruptedException("Interrupted while collecting Routine details"); + } + try { + Routine fullRoutine = getFuture.get(); + if (fullRoutine != null) { + fullRoutines.add(fullRoutine); + } + } catch (ExecutionException e) { + logger.warning("Error processing getRoutine future result: " + e.getCause()); + } catch (CancellationException e) { + logger.warning("getRoutine detail task cancelled."); + } + } + logger.info( + String.format("Successfully fetched full details for %d routines.", fullRoutines.size())); + return fullRoutines; + } + + void submitProcedureArgumentProcessingJobs( + List fullRoutines, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields, + ExecutorService processArgsExecutor, + List> outArgumentProcessingFutures, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + logger.fine( + String.format("Submitting argument processing jobs for %d routines.", fullRoutines.size())); + + for (Routine fullRoutine : fullRoutines) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Interrupted during submission of argument processing tasks."); + throw new InterruptedException("Interrupted while submitting argument processing jobs"); + } + if (fullRoutine != null) { + if ("PROCEDURE".equalsIgnoreCase(fullRoutine.getRoutineType())) { + final Routine finalFullRoutine = fullRoutine; + Future processFuture = + processArgsExecutor.submit( + () -> + processProcedureArguments( + finalFullRoutine, columnNameRegex, collectedResults, resultSchemaFields)); + outArgumentProcessingFutures.add(processFuture); + } else { + logger.warning( + "Routine " + + (fullRoutine.getRoutineId() != null + ? fullRoutine.getRoutineId().toString() + : "UNKNOWN_ID") + + " fetched via getRoutine was not of type PROCEDURE (Type: " + + fullRoutine.getRoutineType() + + "). Skipping argument processing."); + } + } + } + logger.fine( + "Finished submitting " + + outArgumentProcessingFutures.size() + + " processProcedureArguments tasks."); + } + + Schema defineGetProcedureColumnsSchema() { + List fields = new ArrayList<>(20); + fields.add( + Field.newBuilder("PROCEDURE_CAT", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("COLUMN_TYPE", StandardSQLTypeName.INT64).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("PRECISION", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("LENGTH", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + fields.add(Field.newBuilder("SCALE", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + fields.add(Field.newBuilder("RADIX", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("COLUMN_DEF", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("SQL_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SQL_DATETIME_SUB", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("SPECIFIC_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + void processProcedureArguments( + Routine routine, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields) { + + RoutineId routineId = routine.getRoutineId(); + List arguments; + try { + arguments = routine.getArguments(); + } catch (Exception e) { + LOG.warning( + String.format( + "Could not retrieve arguments list for procedure %s: %s. No arguments will be" + + " processed.", + routineId, e.getMessage())); + return; + } + + if (arguments == null || arguments.isEmpty()) { + LOG.fine("Procedure " + routineId + " has no arguments."); + return; + } + + String catalogName = routineId.getProject(); + String schemaName = routineId.getDataset(); + String procedureName = routineId.getRoutine(); + String specificName = procedureName; + + for (int i = 0; i < arguments.size(); i++) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Argument processing task interrupted for " + routineId); + break; + } + + int ordinalPosition = i + 1; + RoutineArgument arg; + String argName; + + try { + arg = arguments.get(i); + argName = arg.getName(); + } catch (Exception listAccessException) { + LOG.warning( + String.format( + "Exception during arguments.get(%d) for Proc: %s. Ordinal: %d. Message: %s." + + " Generating fallback row.", + i, routineId, ordinalPosition, listAccessException.getMessage())); + argName = "arg_retrieval_err_" + ordinalPosition; + arg = null; + } + + // Filter by columnNamePattern, but not by generated fallback name + if (columnNameRegex != null) { + assert argName != null; + if (!argName.startsWith("arg_")) { + if (!columnNameRegex.matcher(argName).matches()) { + continue; + } + } + } + + List values = + createProcedureColumnRow( + catalogName, schemaName, procedureName, specificName, arg, ordinalPosition, argName); + + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + } + } + + List createProcedureColumnRow( + String catalog, + String schemaName, + String procedureName, + String specificName, + @Nullable RoutineArgument argument, + int ordinalPosition, + String columnName) { + + List values = new ArrayList<>(20); + ColumnTypeInfo typeInfo; + + if (argument == null) { + LOG.warning( + String.format( + "Proc: %s, Arg: %s (Pos %d) - RoutineArgument object is null. Defaulting type to" + + " VARCHAR.", + procedureName, columnName, ordinalPosition)); + typeInfo = new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + } else { + try { + StandardSQLDataType argumentDataType = argument.getDataType(); + if (argumentDataType == null) { + LOG.warning( + String.format( + "Proc: %s, Arg: %s (Pos %d) - argument.getDataType() returned null. Defaulting" + + " type to VARCHAR.", + procedureName, columnName, ordinalPosition)); + typeInfo = new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + } else { + typeInfo = + determineTypeInfoFromDataType( + argumentDataType, procedureName, columnName, ordinalPosition); + } + } catch (Exception e) { + LOG.warning( + String.format( + "Proc: %s, Arg: %s (Pos %d) - Unexpected Exception during type processing." + + " Defaulting type to VARCHAR. Error: %s", + procedureName, columnName, ordinalPosition, e.getMessage())); + typeInfo = new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + } + } + + String argumentModeStr = null; + if (argument != null) { + try { + argumentModeStr = argument.getMode(); + } catch (Exception e) { + LOG.warning( + String.format( + "Proc: %s, Arg: %s (Pos %d) - Could not get argument mode. Error: %s", + procedureName, columnName, ordinalPosition, e.getMessage())); + } + } + + values.add(createStringFieldValue(catalog)); // 1. PROCEDURE_CAT + values.add(createStringFieldValue(schemaName)); // 2. PROCEDURE_SCHEM + values.add(createStringFieldValue(procedureName)); // 3. PROCEDURE_NAME + values.add(createStringFieldValue(columnName)); // 4. COLUMN_NAME + long columnTypeJdbc = DatabaseMetaData.procedureColumnUnknown; + if ("IN".equalsIgnoreCase(argumentModeStr)) { + columnTypeJdbc = DatabaseMetaData.procedureColumnIn; + } else if ("OUT".equalsIgnoreCase(argumentModeStr)) { + columnTypeJdbc = DatabaseMetaData.procedureColumnOut; + } else if ("INOUT".equalsIgnoreCase(argumentModeStr)) { + columnTypeJdbc = DatabaseMetaData.procedureColumnInOut; + } + values.add(createLongFieldValue(columnTypeJdbc)); // 5. COLUMN_TYPE + values.add(createLongFieldValue((long) typeInfo.jdbcType)); // 6. DATA_TYPE (java.sql.Types) + values.add(createStringFieldValue(typeInfo.typeName)); // 7. TYPE_NAME (DB type name) + values.add( + createLongFieldValue( + typeInfo.columnSize == null ? null : typeInfo.columnSize.longValue())); // 8. PRECISION + values.add( + createNullFieldValue()); // 9. LENGTH (length in bytes - often null for procedure params) + values.add( + createLongFieldValue( + typeInfo.decimalDigits == null + ? null + : typeInfo.decimalDigits.longValue())); // 10. SCALE + values.add( + createLongFieldValue( + typeInfo.numPrecRadix == null ? null : typeInfo.numPrecRadix.longValue())); // 11. RADIX + values.add(createLongFieldValue((long) DatabaseMetaData.procedureNullable)); // 12. NULLABLE + values.add( + createNullFieldValue()); // 13. REMARKS (Can be argument.getDescription() if available and + // needed) + values.add(createNullFieldValue()); // 14. COLUMN_DEF (Default value - typically null) + values.add(createNullFieldValue()); // 15. SQL_DATA_TYPE (reserved) + values.add(createNullFieldValue()); // 16. SQL_DATETIME_SUB (reserved) + values.add(createNullFieldValue()); // 17. CHAR_OCTET_LENGTH (null for non-char/binary) + values.add(createLongFieldValue((long) ordinalPosition)); // 18. ORDINAL_POSITION + values.add(createStringFieldValue("YES")); // 19. IS_NULLABLE (Default to "YES") + values.add(createStringFieldValue(specificName)); // 20. SPECIFIC_NAME + + return values; + } + + ColumnTypeInfo determineTypeInfoFromDataType( + StandardSQLDataType argumentDataType, + String procedureName, + String columnName, + int ordinalPosition) { + + ColumnTypeInfo defaultVarcharTypeInfo = + new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + try { + String typeKind = argumentDataType.getTypeKind(); + if (typeKind != null && !typeKind.isEmpty()) { + if ("ARRAY".equalsIgnoreCase(typeKind)) { + return new ColumnTypeInfo(Types.ARRAY, "ARRAY", null, null, null); + } + StandardSQLTypeName determinedType = StandardSQLTypeName.valueOf(typeKind.toUpperCase()); + return getColumnTypeInfoForSqlType(determinedType); + } + } catch (Exception e) { + LOG.warning( + String.format( + "Proc: %s, Arg: %s (Pos %d) - Caught an unexpected Exception during type" + + " determination. Defaulting type to VARCHAR. Error: %s", + procedureName, columnName, ordinalPosition, e.getMessage())); + } + return defaultVarcharTypeInfo; + } + + Comparator defineGetProcedureColumnsComparator(FieldList resultSchemaFields) { + final int PROC_CAT_IDX = resultSchemaFields.getIndex("PROCEDURE_CAT"); + final int PROC_SCHEM_IDX = resultSchemaFields.getIndex("PROCEDURE_SCHEM"); + final int PROC_NAME_IDX = resultSchemaFields.getIndex("PROCEDURE_NAME"); + final int SPEC_NAME_IDX = resultSchemaFields.getIndex("SPECIFIC_NAME"); + final int COL_NAME_IDX = resultSchemaFields.getIndex("COLUMN_NAME"); + + if (PROC_CAT_IDX < 0 + || PROC_SCHEM_IDX < 0 + || PROC_NAME_IDX < 0 + || SPEC_NAME_IDX < 0 + || COL_NAME_IDX < 0) { + LOG.severe( + "Could not find required columns (PROCEDURE_CAT, SCHEM, NAME, SPECIFIC_NAME, COLUMN_NAME)" + + " in getProcedureColumns schema for sorting. Returning null comparator."); + return null; + } + + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_CAT_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_SCHEM_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, SPEC_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, COL_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)); + } + + @Override + public ResultSet getTables( + String catalog, String schemaPattern, String tableNamePattern, String[] types) { + + Tuple effectiveIdentifiers = + determineEffectiveCatalogAndSchema(catalog, schemaPattern); + String effectiveCatalog = effectiveIdentifiers.x(); + String effectiveSchemaPattern = effectiveIdentifiers.y(); + + if ((effectiveCatalog == null || effectiveCatalog.isEmpty()) + || (effectiveSchemaPattern != null && effectiveSchemaPattern.isEmpty()) + || (tableNamePattern != null && tableNamePattern.isEmpty())) { + LOG.warning( + "Returning empty ResultSet as one or more patterns are empty or catalog is null."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getTables called for catalog: %s, schemaPattern: %s, tableNamePattern: %s, types: %s", + effectiveCatalog, effectiveSchemaPattern, tableNamePattern, Arrays.toString(types))); + + final Pattern schemaRegex = compileSqlLikePattern(effectiveSchemaPattern); + final Pattern tableNameRegex = compileSqlLikePattern(tableNamePattern); + final Set requestedTypes = + (types == null || types.length == 0) ? null : new HashSet<>(Arrays.asList(types)); + + final Schema resultSchema = defineGetTablesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final String catalogParam = effectiveCatalog; + final String schemaParam = effectiveSchemaPattern; + + Runnable tableFetcher = + () -> { + ExecutorService apiExecutor = null; + ExecutorService tableProcessorExecutor = null; + final FieldList localResultSchemaFields = resultSchemaFields; + final List>> apiFutures = new ArrayList<>(); + final List> processingFutures = new ArrayList<>(); + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaParam, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty()) { + LOG.info("Fetcher thread found no matching datasets. Returning empty resultset."); + return; + } + + apiExecutor = Executors.newFixedThreadPool(API_EXECUTOR_POOL_SIZE); + tableProcessorExecutor = Executors.newFixedThreadPool(this.metadataFetchThreadCount); + + LOG.fine("Submitting parallel findMatchingTables tasks..."); + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Table fetcher interrupted during dataset iteration."); + break; + } + + final DatasetId currentDatasetId = dataset.getDatasetId(); + Callable> apiCallable = + () -> + findMatchingBigQueryObjects( + "Table", + () -> + bigquery.listTables( + currentDatasetId, TableListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getTable( + TableId.of( + currentDatasetId.getProject(), + currentDatasetId.getDataset(), + name)), + (tbl) -> tbl.getTableId().getTable(), + tableNamePattern, + tableNameRegex, + LOG); + Future> apiFuture = apiExecutor.submit(apiCallable); + apiFutures.add(apiFuture); + } + LOG.fine("Finished submitting " + apiFutures.size() + " findMatchingTables tasks."); + apiExecutor.shutdown(); + + LOG.fine("Processing results from findMatchingTables tasks..."); + for (Future> apiFuture : apiFutures) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Table fetcher interrupted while processing API futures."); + break; + } + try { + List
tablesResult = apiFuture.get(); + if (tablesResult != null) { + for (Table table : tablesResult) { + if (Thread.currentThread().isInterrupted()) break; + + final Table currentTable = table; + Future processFuture = + tableProcessorExecutor.submit( + () -> + processTableInfo( + currentTable, + requestedTypes, + collectedResults, + localResultSchemaFields)); + processingFutures.add(processFuture); + } + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning("Fetcher thread interrupted while waiting for API future result."); + break; + } catch (ExecutionException e) { + LOG.warning( + "Error executing findMatchingTables task: " + + e.getMessage() + + ". Cause: " + + e.getCause()); + } catch (CancellationException e) { + LOG.warning("A findMatchingTables task was cancelled."); + } + } + + LOG.fine( + "Finished submitting " + processingFutures.size() + " processTableInfo tasks."); + + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher interrupted before waiting for processing tasks; cancelling remaining."); + processingFutures.forEach(f -> f.cancel(true)); + } else { + LOG.fine("Waiting for processTableInfo tasks to complete..."); + waitForTasksCompletion(processingFutures); + LOG.fine("All processTableInfo tasks completed."); + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetTablesComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getTables", LOG); + } + + if (!Thread.currentThread().isInterrupted()) { + populateQueue(collectedResults, queue, localResultSchemaFields); + } + + } catch (Throwable t) { + LOG.severe("Unexpected error in table fetcher runnable: " + t.getMessage()); + apiFutures.forEach(f -> f.cancel(true)); + processingFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, localResultSchemaFields); + shutdownExecutor(apiExecutor); + shutdownExecutor(tableProcessorExecutor); + LOG.info("Table fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(tableFetcher, "getTables-fetcher-" + effectiveCatalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getTables"); + return resultSet; + } + + Schema defineGetTablesSchema() { + List fields = new ArrayList<>(10); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("TABLE_TYPE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SELF_REFERENCING_COL_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("REF_GENERATION", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + void processTableInfo( + Table table, + Set requestedTypes, + List collectedResults, + FieldList resultSchemaFields) { + + TableId tableId = table.getTableId(); + LOG.fine("Processing table info for: " + tableId); + + try { + String catalogName = tableId.getProject(); + String schemaName = tableId.getDataset(); + String tableName = tableId.getTable(); + TableDefinition definition = table.getDefinition(); + String bqTableType = definition.getType().toString(); + String remarks = table.getDescription(); + + if (requestedTypes != null && !requestedTypes.contains(bqTableType)) { + LOG.finer( + String.format( + "Skipping table %s as its type '%s' is not in the requested types %s", + tableId, bqTableType, requestedTypes)); + return; + } + + List values = new ArrayList<>(resultSchemaFields.size()); + values.add(createStringFieldValue(catalogName)); // 1. TABLE_CAT + values.add(createStringFieldValue(schemaName)); // 2. TABLE_SCHEM + values.add(createStringFieldValue(tableName)); // 3. TABLE_NAME + values.add(createStringFieldValue(bqTableType)); // 4. TABLE_TYPE + values.add(createStringFieldValue(remarks)); // 5. REMARKS + values.add(createNullFieldValue()); // 6. TYPE_CAT (always null) + values.add(createNullFieldValue()); // 7. TYPE_SCHEM (always null) + values.add(createNullFieldValue()); // 8. TYPE_NAME (always null) + values.add(createNullFieldValue()); // 9. SELF_REFERENCING_COL_NAME (always null) + values.add(createNullFieldValue()); // 10. REF_GENERATION (always null) + + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + + LOG.fine("Processed and added table info row for: " + tableId); + } catch (Exception e) { + LOG.warning( + String.format( + "Error processing table info for %s: %s. Skipping this table.", + tableId, e.getMessage())); + } + } + + Comparator defineGetTablesComparator(FieldList resultSchemaFields) { + final int TABLE_TYPE_IDX = resultSchemaFields.getIndex("TABLE_TYPE"); + final int TABLE_CAT_IDX = resultSchemaFields.getIndex("TABLE_CAT"); + final int TABLE_SCHEM_IDX = resultSchemaFields.getIndex("TABLE_SCHEM"); + final int TABLE_NAME_IDX = resultSchemaFields.getIndex("TABLE_NAME"); + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_TYPE_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_CAT_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_NAME_IDX), + Comparator.nullsFirst(String::compareTo)); + } + + @Override + public ResultSet getSchemas() { + LOG.info("getSchemas() called"); + + return getSchemas(null, null); + } + + @Override + public ResultSet getCatalogs() { + LOG.info("getCatalogs() called"); + + final List accessibleCatalogs = getAccessibleCatalogNames(); + final Schema catalogsSchema = defineGetCatalogsSchema(); + final FieldList schemaFields = catalogsSchema.getFields(); + final List catalogRows = + prepareGetCatalogsRows(schemaFields, accessibleCatalogs); + + final BlockingQueue queue = + new LinkedBlockingQueue<>(catalogRows.isEmpty() ? 1 : catalogRows.size() + 1); + + populateQueue(catalogRows, queue, schemaFields); + signalEndOfData(queue, schemaFields); + + return BigQueryJsonResultSet.of( + catalogsSchema, catalogRows.size(), queue, this.statement, new Thread[0]); + } + + Schema defineGetCatalogsSchema() { + return Schema.of( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING).setMode(Mode.REQUIRED).build()); + } + + List prepareGetCatalogsRows( + FieldList schemaFields, List accessibleCatalogs) { + List catalogRows = new ArrayList<>(); + for (String catalogName : accessibleCatalogs) { + FieldValue fieldValue = FieldValue.of(FieldValue.Attribute.PRIMITIVE, catalogName); + catalogRows.add(FieldValueList.of(Collections.singletonList(fieldValue), schemaFields)); + } + return catalogRows; + } + + @Override + public ResultSet getTableTypes() { + LOG.info("getTableTypes() called"); + + final Schema tableTypesSchema = defineGetTableTypesSchema(); + final List tableTypeRows = prepareGetTableTypesRows(tableTypesSchema); + + BlockingQueue queue = + new LinkedBlockingQueue<>(tableTypeRows.size() + 1); + + populateQueue(tableTypeRows, queue, tableTypesSchema.getFields()); + signalEndOfData(queue, tableTypesSchema.getFields()); + + return BigQueryJsonResultSet.of( + tableTypesSchema, tableTypeRows.size(), queue, this.statement, new Thread[0]); + } + + static Schema defineGetTableTypesSchema() { + return Schema.of( + Field.newBuilder("TABLE_TYPE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + } + + static List prepareGetTableTypesRows(Schema schema) { + final String[] tableTypes = {"EXTERNAL", "MATERIALIZED VIEW", "SNAPSHOT", "TABLE", "VIEW"}; + List rows = new ArrayList<>(tableTypes.length); + FieldList schemaFields = schema.getFields(); + + for (String typeName : tableTypes) { + FieldValue fieldValue = FieldValue.of(FieldValue.Attribute.PRIMITIVE, typeName); + rows.add(FieldValueList.of(Collections.singletonList(fieldValue), schemaFields)); + } + return rows; + } + + @Override + public ResultSet getColumns( + String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) { + + Tuple effectiveIdentifiers = + determineEffectiveCatalogAndSchema(catalog, schemaPattern); + String effectiveCatalog = effectiveIdentifiers.x(); + String effectiveSchemaPattern = effectiveIdentifiers.y(); + + if ((effectiveCatalog == null || effectiveCatalog.isEmpty()) + || (effectiveSchemaPattern != null && effectiveSchemaPattern.isEmpty()) + || (tableNamePattern != null && tableNamePattern.isEmpty()) + || (columnNamePattern != null && columnNamePattern.isEmpty())) { + LOG.warning( + "Returning empty ResultSet as one or more patterns are empty or catalog is null."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getColumns called for catalog: %s, schemaPattern: %s, tableNamePattern: %s," + + " columnNamePattern: %s", + effectiveCatalog, effectiveSchemaPattern, tableNamePattern, columnNamePattern)); + + Pattern schemaRegex = compileSqlLikePattern(effectiveSchemaPattern); + Pattern tableNameRegex = compileSqlLikePattern(tableNamePattern); + Pattern columnNameRegex = compileSqlLikePattern(columnNamePattern); + + final Schema resultSchema = defineGetColumnsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final String catalogParam = effectiveCatalog; + final String schemaParam = effectiveSchemaPattern; + + Runnable columnFetcher = + () -> { + ExecutorService columnExecutor = null; + final List> taskFutures = new ArrayList<>(); + final FieldList localResultSchemaFields = resultSchemaFields; + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaParam, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty()) { + LOG.info("Fetcher thread found no matching datasets. Returning empty resultset."); + return; + } + + columnExecutor = Executors.newFixedThreadPool(this.metadataFetchThreadCount); + + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Fetcher interrupted during dataset iteration."); + break; + } + + DatasetId datasetId = dataset.getDatasetId(); + LOG.info("Processing dataset: " + datasetId.getDataset()); + + List
tablesToScan = + findMatchingBigQueryObjects( + "Table", + () -> + bigquery.listTables( + datasetId, TableListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getTable( + TableId.of(datasetId.getProject(), datasetId.getDataset(), name)), + (tbl) -> tbl.getTableId().getTable(), + tableNamePattern, + tableNameRegex, + LOG); + + for (Table table : tablesToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher interrupted during table iteration for dataset " + + datasetId.getDataset()); + break; + } + + TableId tableId = table.getTableId(); + LOG.fine("Submitting task for table: " + tableId); + final Table finalTable = table; + Future future = + columnExecutor.submit( + () -> + processTableColumns( + finalTable, + columnNameRegex, + collectedResults, + localResultSchemaFields)); + taskFutures.add(future); + } + if (Thread.currentThread().isInterrupted()) break; + } + + waitForTasksCompletion(taskFutures); + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetColumnsComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getColumns", LOG); + } + + if (!Thread.currentThread().isInterrupted()) { + populateQueue(collectedResults, queue, localResultSchemaFields); + } + + } catch (Throwable t) { + LOG.severe("Unexpected error in column fetcher runnable: " + t.getMessage()); + taskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, localResultSchemaFields); + shutdownExecutor(columnExecutor); + LOG.info("Column fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(columnFetcher, "getColumns-fetcher-" + effectiveCatalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of(resultSchema, -1, queue, null, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getColumns"); + return resultSet; + } + + private void processTableColumns( + Table table, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields) { + TableId tableId = table.getTableId(); + LOG.fine("Processing columns for table: " + tableId); + TableDefinition definition = table.getDefinition(); + Schema tableSchema = (definition != null) ? definition.getSchema() : null; + + try { + if (tableSchema == null) { + LOG.fine( + "Schema not included in table object for " + + tableId + + ", fetching full table details..."); + Table fullTable = bigquery.getTable(tableId); + if (fullTable != null) { + definition = fullTable.getDefinition(); + tableSchema = (definition != null) ? definition.getSchema() : null; + } else { + LOG.warning( + "Table " + tableId + " not found when fetching full details for columns. Skipping."); + return; + } + } + + if (tableSchema == null + || tableSchema.getFields() == null + || tableSchema.getFields().isEmpty()) { + LOG.warning( + String.format( + "Schema not found or fields are null for table %s (Type: %s). Skipping columns.", + tableId, definition.getType())); + return; + } + + FieldList fields = tableSchema.getFields(); + String catalogName = tableId.getProject(); + String schemaName = tableId.getDataset(); + String tableName = tableId.getTable(); + + for (int i = 0; i < fields.size(); i++) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Task for table " + tableId + " interrupted during column iteration."); + break; + } + Field field = fields.get(i); + String currentColumnName = field.getName(); + if (columnNameRegex != null && !columnNameRegex.matcher(currentColumnName).matches()) + continue; + List values = createColumnRow(catalogName, schemaName, tableName, field, i + 1); + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + } + LOG.fine("Finished processing columns for table: " + tableId); + } catch (BigQueryException e) { + LOG.warning( + String.format( + "BigQueryException processing table %s: %s (Code: %d)", + tableId, e.getMessage(), e.getCode())); + } catch (Exception e) { + LOG.severe( + String.format("Unexpected error processing table %s: %s", tableId, e.getMessage())); + } + } + + private Schema defineGetColumnsSchema() { + List fields = new ArrayList<>(24); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 1 + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 2 + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 3 + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 4 + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); // 5 + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 6 + fields.add( + Field.newBuilder("COLUMN_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 7 + fields.add( + Field.newBuilder("BUFFER_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 8 + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 9 + fields.add( + Field.newBuilder("NUM_PREC_RADIX", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 10 + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); // 11 + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 12 + fields.add( + Field.newBuilder("COLUMN_DEF", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 13 + fields.add( + Field.newBuilder("SQL_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 14 + fields.add( + Field.newBuilder("SQL_DATETIME_SUB", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 15 + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 16 + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); // 17 + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 18 + fields.add( + Field.newBuilder("SCOPE_CATALOG", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 19 + fields.add( + Field.newBuilder("SCOPE_SCHEMA", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 20 + fields.add( + Field.newBuilder("SCOPE_TABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 21 + fields.add( + Field.newBuilder("SOURCE_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 22 + fields.add( + Field.newBuilder("IS_AUTOINCREMENT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 23 + fields.add( + Field.newBuilder("IS_GENERATEDCOLUMN", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 24 + return Schema.of(fields); + } + + List createColumnRow( + String catalog, String schemaName, String tableName, Field field, int ordinalPosition) { + List values = new ArrayList<>(24); + Field.Mode mode = (field.getMode() == null) ? Field.Mode.NULLABLE : field.getMode(); + ColumnTypeInfo typeInfo = mapBigQueryTypeToJdbc(field); + + values.add(createStringFieldValue(catalog)); // 1. TABLE_CAT + values.add(createStringFieldValue(schemaName)); // 2. TABLE_SCHEM + values.add(createStringFieldValue(tableName)); // 3. TABLE_NAME + values.add(createStringFieldValue(field.getName())); // 4. COLUMN_NAME + values.add(createLongFieldValue((long) typeInfo.jdbcType)); // 5. DATA_TYPE + values.add(createStringFieldValue(typeInfo.typeName)); // 6. TYPE_NAME + values.add( + createLongFieldValue( + typeInfo.columnSize == null + ? null + : typeInfo.columnSize.longValue())); // 7. COLUMN_SIZE + values.add(createNullFieldValue()); // 8. BUFFER_LENGTH + values.add( + createLongFieldValue( + typeInfo.decimalDigits == null + ? null + : typeInfo.decimalDigits.longValue())); // 9. DECIMAL_DIGITS + values.add( + createLongFieldValue( + typeInfo.numPrecRadix == null + ? null + : typeInfo.numPrecRadix.longValue())); // 10. NUM_PREC_RADIX + int nullable = + (mode == Field.Mode.REQUIRED) + ? DatabaseMetaData.columnNoNulls + : DatabaseMetaData.columnNullable; + values.add(createLongFieldValue((long) nullable)); // 11. NULLABLE + values.add(createStringFieldValue(field.getDescription())); // 12. REMARKS + values.add(createNullFieldValue()); // 13. COLUMN_DEF + values.add(createNullFieldValue()); // 14. SQL_DATA_TYPE + values.add(createNullFieldValue()); // 15. SQL_DATETIME_SUB + values.add(createNullFieldValue()); // 16. CHAR_OCTET_LENGTH + values.add(createLongFieldValue((long) ordinalPosition)); // 17. ORDINAL_POSITION + String isNullable = ""; + switch (mode) { + case REQUIRED: + isNullable = "NO"; + break; + case NULLABLE: + case REPEATED: + isNullable = "YES"; + break; + } + values.add(createStringFieldValue(isNullable)); // 18. IS_NULLABLE + values.add(createNullFieldValue()); // 19. SCOPE_CATALOG + values.add(createNullFieldValue()); // 20. SCOPE_SCHEMA + values.add(createNullFieldValue()); // 21. SCOPE_TABLE + values.add(createNullFieldValue()); // 22. SOURCE_DATA_TYPE + values.add(createStringFieldValue("NO")); // 23. IS_AUTOINCREMENT + values.add(createStringFieldValue("NO")); // 24. IS_GENERATEDCOLUMN + + return values; + } + + static class ColumnTypeInfo { + final int jdbcType; + final String typeName; + final Integer columnSize; + final Integer decimalDigits; + final Integer numPrecRadix; + + ColumnTypeInfo( + int jdbcType, + String typeName, + Integer columnSize, + Integer decimalDigits, + Integer numPrecRadix) { + this.jdbcType = jdbcType; + this.typeName = typeName; + this.columnSize = columnSize; + this.decimalDigits = decimalDigits; + this.numPrecRadix = numPrecRadix; + } + } + + ColumnTypeInfo mapBigQueryTypeToJdbc(Field field) { + Mode mode = (field.getMode() == null) ? Mode.NULLABLE : field.getMode(); + if (mode == Mode.REPEATED) { + return new ColumnTypeInfo(Types.ARRAY, "ARRAY", null, null, null); + } + + StandardSQLTypeName bqType = null; + if (field.getType() != null && field.getType().getStandardType() != null) { + bqType = field.getType().getStandardType(); + } + return getColumnTypeInfoForSqlType(bqType); + } + + private Comparator defineGetColumnsComparator(FieldList resultSchemaFields) { + final int TABLE_CAT_IDX = resultSchemaFields.getIndex("TABLE_CAT"); + final int TABLE_SCHEM_IDX = resultSchemaFields.getIndex("TABLE_SCHEM"); + final int TABLE_NAME_IDX = resultSchemaFields.getIndex("TABLE_NAME"); + final int ORDINAL_POS_IDX = resultSchemaFields.getIndex("ORDINAL_POSITION"); + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_CAT_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_NAME_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getLongValueOrNull(fvl, ORDINAL_POS_IDX), + Comparator.nullsFirst(Long::compareTo)); + } + + @Override + public ResultSet getColumnPrivileges( + String catalog, String schema, String table, String columnNamePattern) { + LOG.info( + String.format( + "getColumnPrivileges called for catalog: %s, schema: %s, table: %s, columnNamePattern:" + + " %s. BigQuery IAM model differs from SQL privileges; returning empty ResultSet.", + catalog, schema, table, columnNamePattern)); + + final Schema resultSchema = defineGetColumnPrivilegesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetColumnPrivilegesSchema() { + List fields = defineBasePrivilegeFields(); + + Field columnNameField = + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build(); + fields.add(3, columnNameField); + + return Schema.of(fields); + } + + @Override + public ResultSet getTablePrivileges( + String catalog, String schemaPattern, String tableNamePattern) { + LOG.info( + String.format( + "getTablePrivileges called for catalog: %s, schemaPattern: %s, tableNamePattern: %s. " + + "BigQuery IAM model differs from SQL privileges; returning empty ResultSet.", + catalog, schemaPattern, tableNamePattern)); + + final Schema resultSchema = defineGetTablePrivilegesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetTablePrivilegesSchema() { + List fields = defineBasePrivilegeFields(); + return Schema.of(fields); + } + + @Override + public ResultSet getBestRowIdentifier( + String catalog, String schema, String table, int scope, boolean nullable) { + LOG.info( + String.format( + "getBestRowIdentifier called for catalog: %s, schema: %s, table: %s, scope: %d," + + " nullable: %s. BigQuery does not support best row identifiers; returning empty" + + " ResultSet.", + catalog, schema, table, scope, nullable)); + + final Schema resultSchema = defineGetBestRowIdentifierSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetBestRowIdentifierSchema() { + List fields = new ArrayList<>(8); + fields.add( + Field.newBuilder("SCOPE", StandardSQLTypeName.INT64).setMode(Field.Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("BUFFER_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PSEUDO_COLUMN", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + @Override + public ResultSet getVersionColumns(String catalog, String schema, String table) { + LOG.info( + String.format( + "getVersionColumns called for catalog: %s, schema: %s, table: %s. " + + "Automatic version columns not supported by BigQuery; returning empty ResultSet.", + catalog, schema, table)); + + final Schema resultSchema = defineGetVersionColumnsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetVersionColumnsSchema() { + List fields = new ArrayList<>(8); + fields.add( + Field.newBuilder("SCOPE", StandardSQLTypeName.INT64).setMode(Field.Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("BUFFER_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PSEUDO_COLUMN", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + @Override + public ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException { + String sql = readSqlFromFile(GET_PRIMARY_KEYS_SQL); + try { + String formattedSql = replaceSqlParameters(sql, catalog, schema, table); + return this.statement.executeQuery(formattedSql); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Override + public ResultSet getImportedKeys(String catalog, String schema, String table) + throws SQLException { + String sql = readSqlFromFile(GET_IMPORTED_KEYS_SQL); + try { + String formattedSql = replaceSqlParameters(sql, catalog, schema, table); + return this.statement.executeQuery(formattedSql); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Override + public ResultSet getExportedKeys(String catalog, String schema, String table) + throws SQLException { + String sql = readSqlFromFile(GET_EXPORTED_KEYS_SQL); + try { + String formattedSql = replaceSqlParameters(sql, catalog, schema, table); + return this.statement.executeQuery(formattedSql); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Override + public ResultSet getCrossReference( + String parentCatalog, + String parentSchema, + String parentTable, + String foreignCatalog, + String foreignSchema, + String foreignTable) + throws SQLException { + String sql = readSqlFromFile(GET_CROSS_REFERENCE_SQL); + try { + String formattedSql = + replaceSqlParameters( + sql, + parentCatalog, + parentSchema, + parentTable, + foreignCatalog, + foreignSchema, + foreignTable); + return this.statement.executeQuery(formattedSql); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Override + public ResultSet getTypeInfo() { + LOG.info("getTypeInfo() called"); + + final Schema typeInfoSchema = defineGetTypeInfoSchema(); + final FieldList schemaFields = typeInfoSchema.getFields(); + final List typeInfoRows = prepareGetTypeInfoRows(schemaFields); + + final Comparator comparator = defineGetTypeInfoComparator(schemaFields); + sortResults(typeInfoRows, comparator, "getTypeInfo", LOG); + final BlockingQueue queue = + new LinkedBlockingQueue<>(typeInfoRows.size() + 1); + + populateQueue(typeInfoRows, queue, schemaFields); + signalEndOfData(queue, schemaFields); + return BigQueryJsonResultSet.of( + typeInfoSchema, typeInfoRows.size(), queue, this.statement, new Thread[0]); + } + + Schema defineGetTypeInfoSchema() { + List fields = new ArrayList<>(18); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 1 + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 2 + fields.add( + Field.newBuilder("PRECISION", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 3 + fields.add( + Field.newBuilder("LITERAL_PREFIX", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 4 + fields.add( + Field.newBuilder("LITERAL_SUFFIX", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 5 + fields.add( + Field.newBuilder("CREATE_PARAMS", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 6 + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 7 + fields.add( + Field.newBuilder("CASE_SENSITIVE", StandardSQLTypeName.BOOL) + .setMode(Mode.REQUIRED) + .build()); // 8 + fields.add( + Field.newBuilder("SEARCHABLE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 9 + fields.add( + Field.newBuilder("UNSIGNED_ATTRIBUTE", StandardSQLTypeName.BOOL) + .setMode(Mode.NULLABLE) + .build()); // 10 + fields.add( + Field.newBuilder("FIXED_PREC_SCALE", StandardSQLTypeName.BOOL) + .setMode(Mode.REQUIRED) + .build()); // 11 + fields.add( + Field.newBuilder("AUTO_INCREMENT", StandardSQLTypeName.BOOL) + .setMode(Mode.REQUIRED) + .build()); // 12 + fields.add( + Field.newBuilder("LOCAL_TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 13 + fields.add( + Field.newBuilder("MINIMUM_SCALE", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 14 + fields.add( + Field.newBuilder("MAXIMUM_SCALE", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 15 + fields.add( + Field.newBuilder("SQL_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 16 + fields.add( + Field.newBuilder("SQL_DATETIME_SUB", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 17 + fields.add( + Field.newBuilder("NUM_PREC_RADIX", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 18 + return Schema.of(fields); + } + + List prepareGetTypeInfoRows(FieldList schemaFields) { + List rows = new ArrayList<>(); + + Function createRow = + (data) -> { + List values = new ArrayList<>(18); + values.add(createStringFieldValue(data.typeName)); // 1. TYPE_NAME + values.add(createLongFieldValue((long) data.jdbcType)); // 2. DATA_TYPE + values.add(createLongFieldValue(data.precision)); // 3. PRECISION + values.add(createStringFieldValue(data.literalPrefix)); // 4. LITERAL_PREFIX + values.add(createStringFieldValue(data.literalSuffix)); // 5. LITERAL_SUFFIX + values.add(createStringFieldValue(data.createParams)); // 6. CREATE_PARAMS + values.add(createLongFieldValue((long) data.nullable)); // 7. NULLABLE + values.add(createBooleanFieldValue(data.caseSensitive)); // 8. CASE_SENSITIVE + values.add(createLongFieldValue((long) data.searchable)); // 9. SEARCHABLE + values.add(createBooleanFieldValue(data.unsignedAttribute)); // 10. UNSIGNED_ATTRIBUTE + values.add(createBooleanFieldValue(data.fixedPrecScale)); // 11. FIXED_PREC_SCALE + values.add(createBooleanFieldValue(data.autoIncrement)); // 12. AUTO_INCREMENT + values.add(createStringFieldValue(data.localTypeName)); // 13. LOCAL_TYPE_NAME + values.add(createLongFieldValue(data.minimumScale)); // 14. MINIMUM_SCALE + values.add(createLongFieldValue(data.maximumScale)); // 15. MAXIMUM_SCALE + values.add(createNullFieldValue()); // 16. SQL_DATA_TYPE + values.add(createNullFieldValue()); // 17. SQL_DATETIME_SUB + values.add(createLongFieldValue(data.numPrecRadix)); // 18. NUM_PREC_RADIX + return FieldValueList.of(values, schemaFields); + }; + + rows.add( + createRow.apply( + new TypeInfoRowData( + "INT64", + Types.BIGINT, + 19L, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "INT64", + 0L, + 0L, + 10L))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "BOOL", + Types.BOOLEAN, + 1L, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typePredBasic, + false, + false, + false, + "BOOL", + 0L, + 0L, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "FLOAT64", + Types.DOUBLE, + 15L, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "FLOAT64", + null, + null, + 2L))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "NUMERIC", + Types.NUMERIC, + 38L, + null, + null, + "PRECISION,SCALE", + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + true, + false, + "NUMERIC", + 9L, + 9L, + 10L))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "BIGNUMERIC", + Types.NUMERIC, + 77L, + null, + null, + "PRECISION,SCALE", + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + true, + false, + "BIGNUMERIC", + 38L, + 38L, + 10L))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "STRING", + Types.NVARCHAR, + null, + "'", + "'", + "LENGTH", + DatabaseMetaData.typeNullable, + true, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "STRING", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "TIMESTAMP", + Types.TIMESTAMP, + 29L, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "TIMESTAMP", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "DATE", + Types.DATE, + 10L, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "DATE", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "TIME", + Types.TIME, + 15L, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "TIME", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "DATETIME", + Types.TIMESTAMP, + 29L, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "DATETIME", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "GEOGRAPHY", + Types.OTHER, + null, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "GEOGRAPHY", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "JSON", + Types.OTHER, + null, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "JSON", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "INTERVAL", + Types.OTHER, + null, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "INTERVAL", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "BYTES", + Types.VARBINARY, + null, + "0x", + null, + "LENGTH", + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "BYTES", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "STRUCT", + Types.STRUCT, + null, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typePredNone, + false, + false, + false, + "STRUCT", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "ARRAY", + Types.ARRAY, + null, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typePredNone, + false, + false, + false, + "ARRAY", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "RANGE", + Types.OTHER, + null, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "RANGE", + null, + null, + null))); + + return rows; + } + + Comparator defineGetTypeInfoComparator(FieldList schemaFields) { + final int DATA_TYPE_IDX = schemaFields.getIndex("DATA_TYPE"); + if (DATA_TYPE_IDX < 0) { + LOG.severe( + "Could not find DATA_TYPE column in getTypeInfo schema for sorting. Returning null" + + " comparator."); + return null; + } + Comparator comparator = + Comparator.comparing( + (FieldValueList fvl) -> getLongValueOrNull(fvl, DATA_TYPE_IDX), + Comparator.nullsFirst(Long::compareTo)); + return comparator; + } + + @Override + public ResultSet getIndexInfo( + String catalog, String schema, String table, boolean unique, boolean approximate) { + LOG.info( + String.format( + "getIndexInfo called for catalog: %s, schema: %s, table: %s, unique: %s, approximate:" + + " %s. Traditional indexes not supported by BigQuery; returning empty ResultSet.", + catalog, schema, table, unique, approximate)); + + final Schema resultSchema = defineGetIndexInfoSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetIndexInfoSchema() { + List fields = new ArrayList<>(13); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("NON_UNIQUE", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("INDEX_QUALIFIER", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("INDEX_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE", StandardSQLTypeName.INT64).setMode(Field.Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("ASC_OR_DESC", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("CARDINALITY", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PAGES", StandardSQLTypeName.INT64).setMode(Field.Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("FILTER_CONDITION", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + @Override + public boolean supportsResultSetType(int type) { + // BigQuery primarily supports forward-only result sets. + return type == ResultSet.TYPE_FORWARD_ONLY; + } + + @Override + public boolean supportsResultSetConcurrency(int type, int concurrency) { + // BigQuery primarily supports forward-only, read-only result sets. + return type == ResultSet.TYPE_FORWARD_ONLY && concurrency == ResultSet.CONCUR_READ_ONLY; + } + + @Override + public boolean ownUpdatesAreVisible(int type) { + return false; + } + + @Override + public boolean ownDeletesAreVisible(int type) { + return false; + } + + @Override + public boolean ownInsertsAreVisible(int type) { + return false; + } + + @Override + public boolean othersUpdatesAreVisible(int type) { + return false; + } + + @Override + public boolean othersDeletesAreVisible(int type) { + return false; + } + + @Override + public boolean othersInsertsAreVisible(int type) { + return false; + } + + @Override + public boolean updatesAreDetected(int type) { + return false; + } + + @Override + public boolean deletesAreDetected(int type) { + return false; + } + + @Override + public boolean insertsAreDetected(int type) { + return false; + } + + @Override + public boolean supportsBatchUpdates() { + return false; + } + + @Override + public ResultSet getUDTs( + String catalog, String schemaPattern, String typeNamePattern, int[] types) { + LOG.info( + String.format( + "getUDTs called for catalog: %s, schemaPattern: %s, typeNamePattern: %s, types: %s. " + + "Feature not supported by BigQuery; returning empty ResultSet.", + catalog, + schemaPattern, + typeNamePattern, + (types == null ? "null" : Arrays.toString(types)))); + + final Schema resultSchema = defineGetUDTsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetUDTsSchema() { + List fields = new ArrayList<>(7); + fields.add( + Field.newBuilder("TYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("CLASS_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("BASE_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + @Override + public Connection getConnection() { + return connection; + } + + @Override + public boolean supportsSavepoints() { + return false; + } + + @Override + public boolean supportsNamedParameters() { + return false; + } + + @Override + public boolean supportsMultipleOpenResults() { + return false; + } + + @Override + public boolean supportsGetGeneratedKeys() { + return false; + } + + @Override + public ResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern) { + LOG.info( + String.format( + "getSuperTables called for catalog: %s, schemaPattern: %s, tableNamePattern: %s. " + + "BigQuery does not support super tables; returning empty ResultSet.", + catalog, schemaPattern, tableNamePattern)); + + final Schema resultSchema = defineGetSuperTablesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetSuperTablesSchema() { + List fields = new ArrayList<>(4); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 1. TABLE_CAT + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 2. TABLE_SCHEM + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 3. TABLE_NAME + fields.add( + Field.newBuilder("SUPERTABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 4. SUPERTABLE_NAME + return Schema.of(fields); + } + + @Override + public ResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern) { + LOG.info( + String.format( + "getSuperTypes called for catalog: %s, schemaPattern: %s, typeNamePattern: %s. BigQuery" + + " does not support user-defined type hierarchies; returning empty ResultSet.", + catalog, schemaPattern, typeNamePattern)); + + final Schema resultSchema = defineGetSuperTypesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetSuperTypesSchema() { + List fields = new ArrayList<>(6); + fields.add( + Field.newBuilder("TYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 1. TYPE_CAT + fields.add( + Field.newBuilder("TYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 2. TYPE_SCHEM + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 3. TYPE_NAME + fields.add( + Field.newBuilder("SUPERTYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 4. SUPERTYPE_CAT + fields.add( + Field.newBuilder("SUPERTYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 5. SUPERTYPE_SCHEM + fields.add( + Field.newBuilder("SUPERTYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 6. SUPERTYPE_NAME + return Schema.of(fields); + } + + @Override + public ResultSet getAttributes( + String catalog, String schemaPattern, String typeNamePattern, String attributeNamePattern) { + LOG.info( + String.format( + "getAttributes called for catalog: %s, schemaPattern: %s, typeNamePattern: %s," + + " attributeNamePattern: %s. Feature not supported by BigQuery; returning empty" + + " ResultSet.", + catalog, schemaPattern, typeNamePattern, attributeNamePattern)); + + final Schema resultSchema = defineGetAttributesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetAttributesSchema() { + List fields = new ArrayList<>(21); + fields.add( + Field.newBuilder("TYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("ATTR_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("ATTR_TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("ATTR_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("NUM_PREC_RADIX", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("ATTR_DEF", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SQL_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SQL_DATETIME_SUB", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("SCOPE_CATALOG", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SCOPE_SCHEMA", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SCOPE_TABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SOURCE_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + @Override + public boolean supportsResultSetHoldability(int holdability) { + if (holdability == ResultSet.CLOSE_CURSORS_AT_COMMIT) { + return true; + } + return false; + } + + @Override + public int getResultSetHoldability() { + return ResultSet.CLOSE_CURSORS_AT_COMMIT; + } + + @Override + // Obtained from java libraries pom + // https://github.com/googleapis/java-bigquery/blob/main/pom.xml + public int getDatabaseMajorVersion() { + return 2; + } + + @Override + public int getDatabaseMinorVersion() { + return 0; + } + + @Override + public int getJDBCMajorVersion() { + return 4; + } + + @Override + public int getJDBCMinorVersion() { + return 2; + } + + @Override + public int getSQLStateType() { + return DatabaseMetaData.sqlStateSQL; + } + + @Override + public boolean locatorsUpdateCopy() { + return false; + } + + @Override + public boolean supportsStatementPooling() { + return false; + } + + @Override + public RowIdLifetime getRowIdLifetime() { + return null; + } + + @Override + public ResultSet getSchemas(String catalog, String schemaPattern) { + if ((catalog != null && catalog.isEmpty()) + || (schemaPattern != null && schemaPattern.isEmpty())) { + LOG.warning("Returning empty ResultSet as catalog or schemaPattern is an empty string."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getSchemas called for catalog: %s, schemaPattern: %s", catalog, schemaPattern)); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Schema resultSchema = defineGetSchemasSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final String catalogParam = catalog; + + Runnable schemaFetcher = + () -> { + final FieldList localResultSchemaFields = resultSchemaFields; + List projectsToScanList = new ArrayList<>(); + + if (catalogParam != null) { + projectsToScanList.add(catalogParam); + } else { + projectsToScanList.addAll(getAccessibleCatalogNames()); + } + + if (projectsToScanList.isEmpty()) { + LOG.info( + "No valid projects to scan (primary, specified, or additional). Returning empty" + + " resultset."); + return; + } + + try { + for (String currentProjectToScan : projectsToScanList) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Schema fetcher interrupted during project iteration for project: " + + currentProjectToScan); + break; + } + LOG.info("Fetching schemas for project: " + currentProjectToScan); + List datasetsInProject = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + currentProjectToScan, + BigQuery.DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(currentProjectToScan, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + + if (datasetsInProject.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher thread found no matching datasets in project: " + + currentProjectToScan); + continue; + } + + LOG.fine("Processing found datasets for project: " + currentProjectToScan); + for (Dataset dataset : datasetsInProject) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Schema fetcher interrupted during dataset iteration for project: " + + currentProjectToScan); + break; + } + processSchemaInfo(dataset, collectedResults, localResultSchemaFields); + } + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetSchemasComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getSchemas", LOG); + } + + if (!Thread.currentThread().isInterrupted()) { + populateQueue(collectedResults, queue, localResultSchemaFields); + } + + } catch (Throwable t) { + LOG.severe("Unexpected error in schema fetcher runnable: " + t.getMessage()); + } finally { + signalEndOfData(queue, localResultSchemaFields); + LOG.info("Schema fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(schemaFetcher, "getSchemas-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getSchemas"); + return resultSet; + } + + Schema defineGetSchemasSchema() { + List fields = new ArrayList<>(2); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("TABLE_CATALOG", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + void processSchemaInfo( + Dataset dataset, List collectedResults, FieldList resultSchemaFields) { + DatasetId datasetId = dataset.getDatasetId(); + LOG.finer("Processing schema info for dataset: " + datasetId); + try { + String schemaName = datasetId.getDataset(); + String catalogName = datasetId.getProject(); + List values = new ArrayList<>(resultSchemaFields.size()); + values.add(createStringFieldValue(schemaName)); + values.add(createStringFieldValue(catalogName)); + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + LOG.finer("Processed and added schema info row for: " + datasetId); + } catch (Exception e) { + LOG.warning( + String.format( + "Error processing schema info for dataset %s: %s. Skipping this schema.", + datasetId, e.getMessage())); + } + } + + Comparator defineGetSchemasComparator(FieldList resultSchemaFields) { + final int TABLE_CATALOG_IDX = resultSchemaFields.getIndex("TABLE_CATALOG"); + final int TABLE_SCHEM_IDX = resultSchemaFields.getIndex("TABLE_SCHEM"); + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_CATALOG_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)); + } + + @Override + public boolean supportsStoredFunctionsUsingCallSyntax() { + return false; + } + + @Override + public boolean autoCommitFailureClosesAllResultSets() { + return false; + } + + @Override + public ResultSet getClientInfoProperties() { + LOG.info("getClientInfoProperties() called."); + + final Schema resultSchema = defineGetClientInfoPropertiesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(4); + final List collectedResults = new ArrayList<>(3); + + try { + collectedResults.add( + FieldValueList.of( + Arrays.asList( + createStringFieldValue("ApplicationName"), + createLongFieldValue(25L), + createNullFieldValue(), + createStringFieldValue( + "The name of the application currently utilizing the connection.")), + resultSchemaFields)); + + collectedResults.add( + FieldValueList.of( + Arrays.asList( + createStringFieldValue("ClientHostname"), + createLongFieldValue(25L), + createNullFieldValue(), + createStringFieldValue( + "The hostname of the computer the application using the connection is running" + + " on.")), + resultSchemaFields)); + + collectedResults.add( + FieldValueList.of( + Arrays.asList( + createStringFieldValue("ClientUser"), + createLongFieldValue(25L), + createNullFieldValue(), + createStringFieldValue( + "The name of the user that the application using the connection is performing" + + " work for.")), + resultSchemaFields)); + + Comparator comparator = + Comparator.comparing( + (FieldValueList fvl) -> + getStringValueOrNull(fvl, resultSchemaFields.getIndex("NAME")), + Comparator.nullsFirst(String::compareToIgnoreCase)); + sortResults(collectedResults, comparator, "getClientInfoProperties", LOG); + populateQueue(collectedResults, queue, resultSchemaFields); + + } catch (Exception e) { + LOG.warning("Unexpected error processing client info properties: " + e.getMessage()); + collectedResults.clear(); + queue.clear(); + } finally { + signalEndOfData(queue, resultSchemaFields); + } + return BigQueryJsonResultSet.of( + resultSchema, collectedResults.size(), queue, this.statement, new Thread[0]); + } + + Schema defineGetClientInfoPropertiesSchema() { + List fields = new ArrayList<>(4); + fields.add( + Field.newBuilder("NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 1 + fields.add( + Field.newBuilder("MAX_LEN", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 2 + fields.add( + Field.newBuilder("DEFAULT_VALUE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 3 + fields.add( + Field.newBuilder("DESCRIPTION", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 4 + return Schema.of(fields); + } + + @Override + public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern) { + if ((catalog == null || catalog.isEmpty()) + || (schemaPattern != null && schemaPattern.isEmpty()) + || (functionNamePattern != null && functionNamePattern.isEmpty())) { + LOG.warning( + "Returning empty ResultSet as catalog is null/empty or a pattern is empty for" + + " getFunctions."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getFunctions called for catalog: %s, schemaPattern: %s, functionNamePattern: %s", + catalog, schemaPattern, functionNamePattern)); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Pattern functionNameRegex = compileSqlLikePattern(functionNamePattern); + final Schema resultSchema = defineGetFunctionsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final List> processingTaskFutures = new ArrayList<>(); + final String catalogParam = catalog; + + Runnable functionFetcher = + () -> { + ExecutorService apiExecutor = null; + ExecutorService routineProcessorExecutor = null; + final FieldList localResultSchemaFields = resultSchemaFields; + final List>> apiFutures = new ArrayList<>(); + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty()) { + LOG.info("Fetcher thread found no matching datasets. Returning empty resultset."); + return; + } + + apiExecutor = Executors.newFixedThreadPool(API_EXECUTOR_POOL_SIZE); + routineProcessorExecutor = Executors.newFixedThreadPool(this.metadataFetchThreadCount); + + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Function fetcher interrupted during dataset iteration submission."); + break; + } + + final DatasetId currentDatasetId = dataset.getDatasetId(); + + Callable> apiCallable = + () -> { + LOG.fine( + String.format( + "Fetching all routines for dataset: %s, pattern: %s", + currentDatasetId.getDataset(), functionNamePattern)); + return findMatchingBigQueryObjects( + "Routine", + () -> + bigquery.listRoutines( + currentDatasetId, RoutineListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getRoutine( + RoutineId.of( + currentDatasetId.getProject(), + currentDatasetId.getDataset(), + name)), + (rt) -> rt.getRoutineId().getRoutine(), + functionNamePattern, + functionNameRegex, + LOG); + }; + Future> apiFuture = apiExecutor.submit(apiCallable); + apiFutures.add(apiFuture); + } + LOG.fine( + "Finished submitting " + + apiFutures.size() + + " findMatchingRoutines (for functions) tasks."); + apiExecutor.shutdown(); + + for (Future> apiFuture : apiFutures) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Function fetcher interrupted while processing API futures."); + break; + } + try { + List routinesResult = apiFuture.get(); + if (routinesResult != null) { + for (Routine routine : routinesResult) { + if (Thread.currentThread().isInterrupted()) { + break; + } + String routineType = routine.getRoutineType(); + if ("SCALAR_FUNCTION".equalsIgnoreCase(routineType) + || "TABLE_FUNCTION".equalsIgnoreCase(routineType)) { + LOG.fine( + "Submitting processing task for function: " + + routine.getRoutineId() + + " of type " + + routineType); + final Routine finalRoutine = routine; + Future processFuture = + routineProcessorExecutor.submit( + () -> + processFunctionInfo( + finalRoutine, collectedResults, localResultSchemaFields)); + processingTaskFutures.add(processFuture); + } + } + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning( + "Function fetcher thread interrupted while waiting for API future result."); + break; + } catch (ExecutionException | CancellationException e) { + LOG.warning( + "Error or cancellation in findMatchingRoutines (for functions) task: " + + e.getMessage()); + } + } + waitForTasksCompletion(processingTaskFutures); + Comparator comparator = + defineGetFunctionsComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getFunctions", LOG); + populateQueue(collectedResults, queue, localResultSchemaFields); + } catch (Throwable t) { + LOG.severe("Unexpected error in function fetcher runnable: " + t.getMessage()); + apiFutures.forEach(f -> f.cancel(true)); + processingTaskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, localResultSchemaFields); + shutdownExecutor(apiExecutor); + shutdownExecutor(routineProcessorExecutor); + LOG.info("Function fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(functionFetcher, "getFunctions-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getFunctions"); + return resultSet; + } + + Schema defineGetFunctionsSchema() { + List fields = new ArrayList<>(6); + fields.add( + Field.newBuilder("FUNCTION_CAT", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("FUNCTION_SCHEM", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("FUNCTION_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("FUNCTION_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("SPECIFIC_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + void processFunctionInfo( + Routine routine, List collectedResults, FieldList resultSchemaFields) { + RoutineId routineId = routine.getRoutineId(); + LOG.fine("Processing function info for: " + routineId); + + try { + String catalogName = routineId.getProject(); + String schemaName = routineId.getDataset(); + String functionName = routineId.getRoutine(); + String remarks = routine.getDescription(); + String specificName = functionName; + + short functionType; + String bqRoutineType = routine.getRoutineType(); + if ("SCALAR_FUNCTION".equalsIgnoreCase(bqRoutineType)) { + functionType = DatabaseMetaData.functionResultUnknown; + } else if ("TABLE_FUNCTION".equalsIgnoreCase(bqRoutineType)) { + functionType = DatabaseMetaData.functionReturnsTable; + } else { + functionType = DatabaseMetaData.functionResultUnknown; + } + + List values = new ArrayList<>(resultSchemaFields.size()); + values.add(createStringFieldValue(catalogName)); // 1. FUNCTION_CAT + values.add(createStringFieldValue(schemaName)); // 2. FUNCTION_SCHEM + values.add(createStringFieldValue(functionName)); // 3. FUNCTION_NAME + values.add(createStringFieldValue(remarks)); // 4. REMARKS + values.add(createLongFieldValue((long) functionType)); // 5. FUNCTION_TYPE + values.add(createStringFieldValue(specificName)); // 6. SPECIFIC_NAME + + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + LOG.fine("Processed and added function info row for: " + routineId); + + } catch (Exception e) { + LOG.warning( + String.format( + "Error processing function info for %s: %s. Skipping this function.", + routineId, e.getMessage())); + } + } + + Comparator defineGetFunctionsComparator(FieldList resultSchemaFields) { + final int FUNC_CAT_IDX = resultSchemaFields.getIndex("FUNCTION_CAT"); + final int FUNC_SCHEM_IDX = resultSchemaFields.getIndex("FUNCTION_SCHEM"); + final int FUNC_NAME_IDX = resultSchemaFields.getIndex("FUNCTION_NAME"); + final int SPEC_NAME_IDX = resultSchemaFields.getIndex("SPECIFIC_NAME"); + + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_CAT_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_NAME_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, SPEC_NAME_IDX), + Comparator.nullsFirst(String::compareTo)); + } + + @Override + public ResultSet getFunctionColumns( + String catalog, String schemaPattern, String functionNamePattern, String columnNamePattern) { + if (catalog == null || catalog.isEmpty()) { + LOG.warning("Returning empty ResultSet catalog (project) is null or empty."); + return new BigQueryJsonResultSet(); + } + if ((schemaPattern != null && schemaPattern.isEmpty()) + || (functionNamePattern != null && functionNamePattern.isEmpty()) + || (columnNamePattern != null && columnNamePattern.isEmpty())) { + LOG.warning("Returning empty ResultSet because an explicit empty pattern was provided."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getFunctionColumns called for catalog: %s, schemaPattern: %s, functionNamePattern: %s," + + " columnNamePattern: %s", + catalog, schemaPattern, functionNamePattern, columnNamePattern)); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Pattern functionNameRegex = compileSqlLikePattern(functionNamePattern); + final Pattern columnNameRegex = compileSqlLikePattern(columnNamePattern); + + final Schema resultSchema = defineGetFunctionColumnsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final List> processingTaskFutures = new ArrayList<>(); + final String catalogParam = catalog; + + Runnable functionColumnFetcher = + () -> { + ExecutorService listRoutinesExecutor = null; + ExecutorService getRoutineDetailsExecutor = null; + ExecutorService processParamsExecutor = null; + final String fetcherThreadNameSuffix = + "-" + catalogParam.substring(0, Math.min(10, catalogParam.length())); + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher: No matching datasets or interrupted early. Catalog: " + catalogParam); + return; + } + + listRoutinesExecutor = + Executors.newFixedThreadPool( + API_EXECUTOR_POOL_SIZE, + runnable -> new Thread(runnable, "funcol-list-rout" + fetcherThreadNameSuffix)); + List functionIdsToGet = + listMatchingFunctionIdsFromDatasets( + datasetsToScan, + functionNamePattern, + functionNameRegex, + listRoutinesExecutor, + catalogParam, + LOG); + shutdownExecutor(listRoutinesExecutor); + listRoutinesExecutor = null; + + if (functionIdsToGet.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info("Fetcher: No function IDs found or interrupted. Catalog: " + catalogParam); + return; + } + + getRoutineDetailsExecutor = + Executors.newFixedThreadPool( + this.metadataFetchThreadCount, + runnable -> + new Thread(runnable, "funcol-get-details" + fetcherThreadNameSuffix)); + List fullFunctions = + fetchFullRoutineDetailsForIds(functionIdsToGet, getRoutineDetailsExecutor, LOG); + shutdownExecutor(getRoutineDetailsExecutor); + getRoutineDetailsExecutor = null; + + if (fullFunctions.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher: No full functions fetched or interrupted. Catalog: " + catalogParam); + return; + } + + processParamsExecutor = + Executors.newFixedThreadPool( + this.metadataFetchThreadCount, + runnable -> + new Thread(runnable, "funcol-proc-params" + fetcherThreadNameSuffix)); + submitFunctionParameterProcessingJobs( + fullFunctions, + columnNameRegex, + collectedResults, + resultSchemaFields, + processParamsExecutor, + processingTaskFutures, + LOG); + + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher: Interrupted before waiting for parameter processing. Catalog: " + + catalogParam); + processingTaskFutures.forEach(f -> f.cancel(true)); + } else { + LOG.fine( + "Fetcher: Waiting for " + + processingTaskFutures.size() + + " parameter processing tasks. Catalog: " + + catalogParam); + waitForTasksCompletion(processingTaskFutures); + LOG.fine( + "Fetcher: All parameter processing tasks completed or handled. Catalog: " + + catalogParam); + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetFunctionColumnsComparator(resultSchemaFields); + sortResults(collectedResults, comparator, "getFunctionColumns", LOG); + populateQueue(collectedResults, queue, resultSchemaFields); + } + + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning( + "Fetcher: Interrupted in main try block for catalog " + + catalogParam + + ". Error: " + + e.getMessage()); + processingTaskFutures.forEach(f -> f.cancel(true)); + } catch (Throwable t) { + LOG.severe( + "Fetcher: Unexpected error in main try block for catalog " + + catalogParam + + ". Error: " + + t.getMessage()); + processingTaskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, resultSchemaFields); + if (listRoutinesExecutor != null) shutdownExecutor(listRoutinesExecutor); + if (getRoutineDetailsExecutor != null) shutdownExecutor(getRoutineDetailsExecutor); + if (processParamsExecutor != null) shutdownExecutor(processParamsExecutor); + LOG.info("Function column fetcher thread finished for catalog: " + catalogParam); + } + }; + + Thread fetcherThread = + new Thread(functionColumnFetcher, "getFunctionColumns-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getFunctionColumns for catalog: " + catalog); + return resultSet; + } + + Schema defineGetFunctionColumnsSchema() { + List fields = new ArrayList<>(17); + fields.add( + Field.newBuilder("FUNCTION_CAT", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 1 + fields.add( + Field.newBuilder("FUNCTION_SCHEM", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 2 + fields.add( + Field.newBuilder("FUNCTION_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 3 + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 4 + fields.add( + Field.newBuilder("COLUMN_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 5 + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 6 + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 7 + fields.add( + Field.newBuilder("PRECISION", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 8 + fields.add( + Field.newBuilder("LENGTH", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); // 9 + fields.add( + Field.newBuilder("SCALE", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); // 10 + fields.add( + Field.newBuilder("RADIX", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); // 11 + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 12 + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 13 + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 14 + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 15 + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 16 + fields.add( + Field.newBuilder("SPECIFIC_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 17 + return Schema.of(fields); + } + + List listMatchingFunctionIdsFromDatasets( + List datasetsToScan, + String functionNamePattern, + Pattern functionNameRegex, + ExecutorService listRoutinesExecutor, + String catalogParam, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + + logger.fine( + String.format( + "Listing matching function IDs from %d datasets for catalog '%s'.", + datasetsToScan.size(), catalogParam)); + final List>> listRoutineFutures = new ArrayList<>(); + final List functionIdsToGet = Collections.synchronizedList(new ArrayList<>()); + + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + logger.warning( + "Interrupted during submission of routine (function) listing tasks for catalog: " + + catalogParam); + throw new InterruptedException("Interrupted while listing functions"); + } + final DatasetId currentDatasetId = dataset.getDatasetId(); + Callable> listCallable = + () -> + findMatchingBigQueryObjects( + "Routine", + () -> + bigquery.listRoutines( + currentDatasetId, RoutineListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getRoutine( + RoutineId.of( + currentDatasetId.getProject(), currentDatasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + functionNamePattern, + functionNameRegex, + logger); + listRoutineFutures.add(listRoutinesExecutor.submit(listCallable)); + } + logger.fine( + "Submitted " + + listRoutineFutures.size() + + " routine (function) list tasks for catalog: " + + catalogParam); + + for (Future> listFuture : listRoutineFutures) { + if (Thread.currentThread().isInterrupted()) { + logger.warning( + "Interrupted while collecting routine (function) list results for catalog: " + + catalogParam); + listRoutineFutures.forEach(f -> f.cancel(true)); + throw new InterruptedException("Interrupted while collecting function lists"); + } + try { + List listedRoutines = listFuture.get(); + if (listedRoutines != null) { + for (Routine listedRoutine : listedRoutines) { + if (listedRoutine != null + && ("SCALAR_FUNCTION".equalsIgnoreCase(listedRoutine.getRoutineType()) + || "TABLE_FUNCTION".equalsIgnoreCase(listedRoutine.getRoutineType()))) { + if (listedRoutine.getRoutineId() != null) { + functionIdsToGet.add(listedRoutine.getRoutineId()); + } else { + logger.warning( + "Found a function type routine with a null ID during listing phase for catalog:" + + " " + + catalogParam); + } + } + } + } + } catch (ExecutionException e) { + logger.warning( + "Error getting routine (function) list result for catalog " + + catalogParam + + ": " + + e.getCause()); + } catch (CancellationException e) { + logger.warning("Routine (function) list task cancelled for catalog: " + catalogParam); + } + } + logger.info( + String.format( + "Found %d function IDs to fetch details for in catalog '%s'.", + functionIdsToGet.size(), catalogParam)); + return functionIdsToGet; + } + + void submitFunctionParameterProcessingJobs( + List fullFunctions, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields, + ExecutorService processParamsExecutor, + List> outParameterProcessingFutures, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + logger.fine( + String.format( + "Submitting parameter processing jobs for %d functions.", fullFunctions.size())); + + for (Routine fullFunction : fullFunctions) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Interrupted during submission of function parameter processing tasks."); + throw new InterruptedException( + "Interrupted while submitting function parameter processing jobs"); + } + if (fullFunction != null) { + String routineType = fullFunction.getRoutineType(); + if ("SCALAR_FUNCTION".equalsIgnoreCase(routineType) + || "TABLE_FUNCTION".equalsIgnoreCase(routineType)) { + final Routine finalFullFunction = fullFunction; + Future processFuture = + processParamsExecutor.submit( + () -> + processFunctionParametersAndReturnValue( + finalFullFunction, + columnNameRegex, + collectedResults, + resultSchemaFields)); + outParameterProcessingFutures.add(processFuture); + } else { + logger.warning( + "Routine " + + (fullFunction.getRoutineId() != null + ? fullFunction.getRoutineId().toString() + : "UNKNOWN_ID") + + " fetched for getFunctionColumns was not of a function type (Type: " + + routineType + + "). Skipping parameter processing."); + } + } + } + logger.fine( + "Finished submitting " + + outParameterProcessingFutures.size() + + " processFunctionParametersAndReturnValue tasks."); + } + + void processFunctionParametersAndReturnValue( + Routine routine, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields) { + RoutineId routineId = routine.getRoutineId(); + if (routineId == null) { + LOG.warning("Processing a routine with a null ID. Skipping."); + return; + } + LOG.finer("Processing function parameters and return value for: " + routineId); + + String functionCatalog = routineId.getProject(); + String functionSchema = routineId.getDataset(); + String functionName = routineId.getRoutine(); + String specificName = functionName; + + if (routine.getReturnTableType() != null) { + StandardSQLTableType returnTableType = routine.getReturnTableType(); + if (returnTableType != null && returnTableType.getColumns() != null) { + List tableColumns = returnTableType.getColumns(); + for (int i = 0; i < tableColumns.size(); i++) { + StandardSQLField tableColumn = tableColumns.get(i); + String columnName = tableColumn.getName(); + if (columnNameRegex != null + && (columnName == null || !columnNameRegex.matcher(columnName).matches())) { + continue; + } + List rowValues = + createFunctionColumnRow( + functionCatalog, + functionSchema, + functionName, + specificName, + columnName, + DatabaseMetaData.functionColumnResult, + tableColumn.getDataType(), + i + 1); + collectedResults.add(FieldValueList.of(rowValues, resultSchemaFields)); + } + } + } + + List arguments = routine.getArguments(); + if (arguments != null) { + for (int i = 0; i < arguments.size(); i++) { + RoutineArgument arg = arguments.get(i); + String argName = arg.getName(); + + if (columnNameRegex != null + && (argName == null || !columnNameRegex.matcher(argName).matches())) { + continue; + } + + short columnType; + String originalMode = arg.getMode(); + + if ("IN".equalsIgnoreCase(originalMode)) { + columnType = DatabaseMetaData.functionColumnIn; + } else if ("OUT".equalsIgnoreCase(originalMode)) { + columnType = DatabaseMetaData.functionColumnOut; + } else if ("INOUT".equalsIgnoreCase(originalMode)) { + columnType = DatabaseMetaData.functionColumnInOut; + } else { + columnType = DatabaseMetaData.functionColumnUnknown; + } + + List rowValues = + createFunctionColumnRow( + functionCatalog, + functionSchema, + functionName, + specificName, + argName, + columnType, + arg.getDataType(), + i + 1); + collectedResults.add(FieldValueList.of(rowValues, resultSchemaFields)); + } + } + } + + List createFunctionColumnRow( + String functionCatalog, + String functionSchema, + String functionName, + String specificName, + String columnName, + int columnType, + StandardSQLDataType dataType, + int ordinalPosition) { + + List values = new ArrayList<>(17); + ColumnTypeInfo typeInfo = + determineTypeInfoFromDataType(dataType, functionName, columnName, ordinalPosition); + + values.add(createStringFieldValue(functionCatalog)); // 1. FUNCTION_CAT + values.add(createStringFieldValue(functionSchema)); // 2. FUNCTION_SCHEM + values.add(createStringFieldValue(functionName)); // 3. FUNCTION_NAME + values.add(createStringFieldValue(columnName)); // 4. COLUMN_NAME + values.add(createLongFieldValue((long) columnType)); // 5. COLUMN_TYPE + + values.add(createLongFieldValue((long) typeInfo.jdbcType)); // 6. DATA_TYPE + values.add(createStringFieldValue(typeInfo.typeName)); // 7. TYPE_NAME + values.add( + createLongFieldValue( + typeInfo.columnSize == null ? null : typeInfo.columnSize.longValue())); // 8. PRECISION + if (typeInfo.typeName != null + && (typeInfo.typeName.equalsIgnoreCase("STRING") + || typeInfo.typeName.equalsIgnoreCase("NVARCHAR") + || typeInfo.typeName.equalsIgnoreCase("BYTES") + || typeInfo.typeName.equalsIgnoreCase("VARBINARY"))) { + values.add( + createLongFieldValue( + typeInfo.columnSize == null ? null : typeInfo.columnSize.longValue())); // 9. LENGTH + values.add( + createLongFieldValue( + typeInfo.decimalDigits == null + ? null + : typeInfo.decimalDigits.longValue())); // 10. SCALE + values.add( + createLongFieldValue( + typeInfo.numPrecRadix == null + ? null + : typeInfo.numPrecRadix.longValue())); // 11. RADIX + values.add( + createLongFieldValue((long) DatabaseMetaData.functionNullableUnknown)); // 12. NULLABLE + values.add(createStringFieldValue(null)); // 13. REMARKS + values.add( + createLongFieldValue( + typeInfo.columnSize == null + ? null + : typeInfo.columnSize.longValue())); // 14. CHAR_OCTET_LENGTH + } else { + values.add(createNullFieldValue()); // 9. LENGTH + values.add( + createLongFieldValue( + typeInfo.decimalDigits == null + ? null + : typeInfo.decimalDigits.longValue())); // 10. SCALE + values.add( + createLongFieldValue( + typeInfo.numPrecRadix == null + ? null + : typeInfo.numPrecRadix.longValue())); // 11. RADIX + values.add( + createLongFieldValue((long) DatabaseMetaData.functionNullableUnknown)); // 12. NULLABLE + values.add(createStringFieldValue(null)); // 13. REMARKS + values.add(createNullFieldValue()); // 14. CHAR_OCTET_LENGTH + } + + values.add(createLongFieldValue((long) ordinalPosition)); // 15. ORDINAL_POSITION + values.add(createStringFieldValue("")); // 16. IS_NULLABLE + values.add(createStringFieldValue(specificName)); // 17. SPECIFIC_NAME + + return values; + } + + Comparator defineGetFunctionColumnsComparator(FieldList resultSchemaFields) { + final int FUNC_CAT_IDX = resultSchemaFields.getIndex("FUNCTION_CAT"); + final int FUNC_SCHEM_IDX = resultSchemaFields.getIndex("FUNCTION_SCHEM"); + final int FUNC_NAME_IDX = resultSchemaFields.getIndex("FUNCTION_NAME"); + final int SPEC_NAME_IDX = resultSchemaFields.getIndex("SPECIFIC_NAME"); + final int ORDINAL_POS_IDX = resultSchemaFields.getIndex("ORDINAL_POSITION"); + + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_CAT_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_SCHEM_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, SPEC_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getLongValueOrNull(fvl, ORDINAL_POS_IDX), + Comparator.nullsFirst(Long::compareTo)); + } + + @Override + public ResultSet getPseudoColumns( + String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) { + LOG.info( + String.format( + "getPseudoColumns called for catalog: %s, schemaPattern: %s, tableNamePattern: %s," + + " columnNamePattern: %s. Pseudo columns not supported by BigQuery; returning" + + " empty ResultSet.", + catalog, schemaPattern, tableNamePattern, columnNamePattern)); + + final Schema resultSchema = defineGetPseudoColumnsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetPseudoColumnsSchema() { + List fields = new ArrayList<>(12); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("NUM_PREC_RADIX", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("COLUMN_USAGE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + @Override + public boolean generatedKeyAlwaysReturned() { + return false; + } + + @Override + public T unwrap(Class iface) { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) { + return false; + } + + // --- Helper Methods --- + + /** + * Determines the effective catalog and schema pattern to use for metadata retrieval. + * + *

This method applies the logic for the {@code FilterTablesOnDefaultDataset} connection + * property. If this property is enabled and the provided {@code catalog} or {@code schemaPattern} + * are null, empty, or wildcard ('%'), they may be overridden by the default catalog (project) and + * default dataset (schema) configured in the {@link BigQueryConnection}. + * + * @param catalog The catalog name provided by the user; may be {@code null}. + * @param schemaPattern The schema name pattern provided by the user; may be {@code null}. + * @return A {@link Tuple} where {@code Tuple.x()} is the effective catalog string and {@code + * Tuple.y()} is the effective schema pattern string. These are the values that should be used + * for querying BigQuery's metadata. + * @see BigQueryConnection#isFilterTablesOnDefaultDataset() + */ + private Tuple determineEffectiveCatalogAndSchema( + String catalog, String schemaPattern) { + String effectiveCatalog = catalog; + String effectiveSchemaPattern = schemaPattern; + + if (this.connection.isFilterTablesOnDefaultDataset() + && this.connection.getDefaultDataset() != null + && this.connection.getDefaultDataset().getDataset() != null + && !this.connection.getDefaultDataset().getDataset().isEmpty()) { + + String defaultProjectFromConnection = this.connection.getCatalog(); + // We only use the dataset part of the DefaultDataset for schema filtering + String defaultSchemaFromConnection = this.connection.getDefaultDataset().getDataset(); + + boolean catalogIsNullOrEmptyOrWildcard = + (catalog == null || catalog.isEmpty() || catalog.equals("%")); + boolean schemaPatternIsNullOrEmptyOrWildcard = + (schemaPattern == null || schemaPattern.isEmpty() || schemaPattern.equals("%")); + + final String logPrefix = "FilterTablesOnDefaultDatasetTrue: "; + if (catalogIsNullOrEmptyOrWildcard && schemaPatternIsNullOrEmptyOrWildcard) { + effectiveCatalog = defaultProjectFromConnection; + effectiveSchemaPattern = defaultSchemaFromConnection; + LOG.info( + String.format( + logPrefix + "Using default catalog '%s' and default dataset '%s'.", + effectiveCatalog, + effectiveSchemaPattern)); + } else if (catalogIsNullOrEmptyOrWildcard) { + effectiveCatalog = defaultProjectFromConnection; + LOG.info( + String.format( + logPrefix + + "Using default catalog '%s' with user dataset '%s'. Default dataset '%s' ignored.", + effectiveCatalog, + effectiveSchemaPattern, + defaultSchemaFromConnection)); + } else if (schemaPatternIsNullOrEmptyOrWildcard) { + effectiveSchemaPattern = defaultSchemaFromConnection; + LOG.info( + String.format( + logPrefix + "Using user catalog '%s' and default dataset '%s'.", + effectiveCatalog, + effectiveSchemaPattern)); + } else { + LOG.info( + String.format( + logPrefix + + "Using user catalog '%s' and schema '%s'. Default dataset '%s' ignored.", + effectiveCatalog, + effectiveSchemaPattern, + defaultSchemaFromConnection)); + } + } + return Tuple.of(effectiveCatalog, effectiveSchemaPattern); + } + + private ColumnTypeInfo getColumnTypeInfoForSqlType(StandardSQLTypeName bqType) { + if (bqType == null) { + LOG.warning("Null BigQuery type encountered: " + bqType.name() + ". Mapping to VARCHAR."); + return new ColumnTypeInfo(Types.VARCHAR, bqType.name(), null, null, null); + } + + switch (bqType) { + case INT64: + return new ColumnTypeInfo(Types.BIGINT, "BIGINT", 19, 0, 10); + case BOOL: + return new ColumnTypeInfo(Types.BOOLEAN, "BOOLEAN", 1, null, null); + case FLOAT64: + return new ColumnTypeInfo(Types.DOUBLE, "DOUBLE", 15, null, 10); + case NUMERIC: + return new ColumnTypeInfo(Types.NUMERIC, "NUMERIC", 38, 9, 10); + case BIGNUMERIC: + return new ColumnTypeInfo(Types.NUMERIC, "NUMERIC", 77, 38, 10); + case STRING: + return new ColumnTypeInfo(Types.NVARCHAR, "NVARCHAR", null, null, null); + case TIMESTAMP: + case DATETIME: + return new ColumnTypeInfo(Types.TIMESTAMP, "TIMESTAMP", 29, null, null); + case DATE: + return new ColumnTypeInfo(Types.DATE, "DATE", 10, null, null); + case TIME: + return new ColumnTypeInfo(Types.TIME, "TIME", 15, null, null); + case GEOGRAPHY: + case JSON: + case INTERVAL: + return new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + case BYTES: + return new ColumnTypeInfo(Types.VARBINARY, "VARBINARY", null, null, null); + case STRUCT: + return new ColumnTypeInfo(Types.STRUCT, "STRUCT", null, null, null); + default: + LOG.warning( + "Unknown BigQuery type encountered: " + bqType.name() + ". Mapping to VARCHAR."); + return new ColumnTypeInfo(Types.VARCHAR, bqType.name(), null, null, null); + } + } + + List findMatchingBigQueryObjects( + String objectTypeName, + Supplier> listAllOperation, + Function getSpecificOperation, + Function nameExtractor, + String pattern, + Pattern regex, + BigQueryJdbcCustomLogger logger) { + + boolean needsList = needsListing(pattern); + List resultList = new ArrayList<>(); + + try { + Iterable objects; + if (needsList) { + logger.info( + String.format( + "Listing all %ss (pattern: %s)...", + objectTypeName, pattern == null ? "" : pattern)); + Page firstPage = listAllOperation.get(); + objects = firstPage.iterateAll(); + logger.fine( + String.format( + "Retrieved initial %s list, iterating & filtering if needed...", objectTypeName)); + + } else { + logger.info(String.format("Getting specific %s: '%s'", objectTypeName, pattern)); + T specificObject = getSpecificOperation.apply(pattern); + objects = + (specificObject == null) + ? Collections.emptyList() + : Collections.singletonList(specificObject); + if (specificObject == null) { + logger.info(String.format("Specific %s not found: '%s'", objectTypeName, pattern)); + } + } + + boolean wasListing = needsList; + for (T obj : objects) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Thread interrupted during " + objectTypeName + " processing loop."); + throw new InterruptedException( + "Interrupted during " + objectTypeName + " processing loop"); + } + if (obj != null) { + if (wasListing && regex != null) { + String name = nameExtractor.apply(obj); + if (name != null && regex.matcher(name).matches()) { + resultList.add(obj); + } + } else { + resultList.add(obj); + } + } + } + + } catch (BigQueryException e) { + if (!needsList && e.getCode() == 404) { + logger.info(String.format("%s '%s' not found (API error 404).", objectTypeName, pattern)); + } else { + logger.warning( + String.format( + "BigQueryException finding %ss for pattern '%s': %s (Code: %d)", + objectTypeName, pattern, e.getMessage(), e.getCode())); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + logger.warning("Interrupted while finding " + objectTypeName + "s."); + } catch (Exception e) { + logger.severe( + String.format( + "Unexpected exception finding %ss for pattern '%s': %s", + objectTypeName, pattern, e.getMessage())); + } + return resultList; + } + + private static class TypeInfoRowData { + String typeName; + int jdbcType; + Long precision; + String literalPrefix; + String literalSuffix; + String createParams; + int nullable; + boolean caseSensitive; + int searchable; + boolean unsignedAttribute; + boolean fixedPrecScale; + boolean autoIncrement; + String localTypeName; + Long minimumScale; + Long maximumScale; + Long numPrecRadix; + + TypeInfoRowData( + String typeName, + int jdbcType, + Long precision, + String literalPrefix, + String literalSuffix, + String createParams, + int nullable, + boolean caseSensitive, + int searchable, + boolean unsignedAttribute, + boolean fixedPrecScale, + boolean autoIncrement, + String localTypeName, + Long minimumScale, + Long maximumScale, + Long numPrecRadix) { + this.typeName = typeName; + this.jdbcType = jdbcType; + this.precision = precision; + this.literalPrefix = literalPrefix; + this.literalSuffix = literalSuffix; + this.createParams = createParams; + this.nullable = nullable; + this.caseSensitive = caseSensitive; + this.searchable = searchable; + this.unsignedAttribute = unsignedAttribute; + this.fixedPrecScale = fixedPrecScale; + this.autoIncrement = autoIncrement; + this.localTypeName = localTypeName; + this.minimumScale = minimumScale; + this.maximumScale = maximumScale; + this.numPrecRadix = numPrecRadix; + } + } + + void sortResults( + List collectedResults, + Comparator comparator, + String operationName, + BigQueryJdbcCustomLogger logger) { + + if (collectedResults == null || collectedResults.isEmpty()) { + logger.info(String.format("No results collected for %s, skipping sort.", operationName)); + return; + } + if (comparator == null) { + logger.info(String.format("No comparator provided for %s, skipping sort.", operationName)); + return; + } + + logger.info( + String.format( + "Sorting %d collected %s results...", collectedResults.size(), operationName)); + try { + collectedResults.sort(comparator); + logger.info(String.format("%s result sorting completed.", operationName)); + } catch (Exception e) { + logger.severe( + String.format("Error during sorting %s results: %s", operationName, e.getMessage())); + } + } + + private List defineBasePrivilegeFields() { + List fields = new ArrayList<>(7); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("GRANTOR", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("GRANTEE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("PRIVILEGE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("IS_GRANTABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + return fields; + } + + Pattern compileSqlLikePattern(String sqlLikePattern) { + if (sqlLikePattern == null) { + return null; + } + if (sqlLikePattern.isEmpty()) { + return Pattern.compile("(?!)"); + } + StringBuilder regex = new StringBuilder(sqlLikePattern.length() * 2); + regex.append('^'); + for (int i = 0; i < sqlLikePattern.length(); i++) { + char c = sqlLikePattern.charAt(i); + switch (c) { + case '%': + regex.append(".*"); + break; + case '_': + regex.append('.'); + break; + case '\\': + case '.': + case '[': + case ']': + case '(': + case ')': + case '{': + case '}': + case '*': + case '+': + case '?': + case '^': + case '$': + case '|': + regex.append('\\').append(c); + break; + default: + regex.append(c); + break; + } + } + regex.append('$'); + return Pattern.compile(regex.toString(), Pattern.CASE_INSENSITIVE); + } + + boolean needsListing(String pattern) { + return pattern == null || pattern.contains("%") || pattern.contains("_"); + } + + FieldValue createStringFieldValue(String value) { + return FieldValue.of(FieldValue.Attribute.PRIMITIVE, value); + } + + FieldValue createLongFieldValue(Long value) { + return (value == null) + ? FieldValue.of(FieldValue.Attribute.PRIMITIVE, null) + : FieldValue.of(FieldValue.Attribute.PRIMITIVE, String.valueOf(value)); + } + + FieldValue createNullFieldValue() { + return FieldValue.of(FieldValue.Attribute.PRIMITIVE, null); + } + + FieldValue createBooleanFieldValue(Boolean value) { + return (value == null) + ? FieldValue.of(FieldValue.Attribute.PRIMITIVE, null) + : FieldValue.of(FieldValue.Attribute.PRIMITIVE, value ? "1" : "0"); + } + + private String getStringValueOrNull(FieldValueList fvl, int index) { + if (fvl == null || index < 0 || index >= fvl.size()) return null; + FieldValue fv = fvl.get(index); + return (fv == null || fv.isNull()) ? null : fv.getStringValue(); + } + + private Long getLongValueOrNull(FieldValueList fvl, int index) { + if (fvl == null || index < 0 || index >= fvl.size()) return null; + FieldValue fv = fvl.get(index); + try { + return (fv == null || fv.isNull()) ? null : fv.getLongValue(); + } catch (NumberFormatException e) { + LOG.warning("Could not parse Long value for index " + index); + return null; + } + } + + private void waitForTasksCompletion(List> taskFutures) { + LOG.info(String.format("Waiting for %d submitted tasks to complete...", taskFutures.size())); + for (Future future : taskFutures) { + try { + if (!future.isCancelled()) { + future.get(); + } + } catch (CancellationException e) { + LOG.warning("A table processing task was cancelled."); + } catch (ExecutionException e) { + LOG.severe( + String.format( + "Error executing table processing task: %s", + (e.getCause() != null ? e.getCause().getMessage() : e.getMessage()))); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning( + "Fetcher thread interrupted while waiting for tasks. Attempting to cancel remaining" + + " tasks."); + taskFutures.forEach(f -> f.cancel(true)); + break; + } + } + LOG.info("Finished waiting for tasks."); + } + + private void populateQueue( + List collectedResults, + BlockingQueue queue, + FieldList resultSchemaFields) { + LOG.info(String.format("Populating queue with %d results...", collectedResults.size())); + try { + for (FieldValueList sortedRow : collectedResults) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Interrupted during queue population."); + break; + } + queue.put(BigQueryFieldValueListWrapper.of(resultSchemaFields, sortedRow)); + } + LOG.info("Finished populating queue."); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning("Interrupted while putting row onto queue."); + } catch (Exception e) { + LOG.severe("Unexpected error populating queue: " + e.getMessage()); + } + } + + private void signalEndOfData( + BlockingQueue queue, FieldList resultSchemaFields) { + try { + LOG.info("Adding end signal to queue."); + queue.put(BigQueryFieldValueListWrapper.of(resultSchemaFields, null, true)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning("Interrupted while sending end signal to queue."); + } catch (Exception e) { + LOG.severe("Exception while sending end signal to queue: " + e.getMessage()); + } + } + + private void shutdownExecutor(ExecutorService executor) { + if (executor == null || executor.isShutdown()) { + return; + } + LOG.info("Shutting down column executor service..."); + executor.shutdown(); + try { + if (!executor.awaitTermination(10, TimeUnit.SECONDS)) { + LOG.warning("Executor did not terminate gracefully after 10s, forcing shutdownNow()."); + List droppedTasks = executor.shutdownNow(); + LOG.warning( + "Executor shutdownNow() initiated. Dropped tasks count: " + droppedTasks.size()); + if (!executor.awaitTermination(10, TimeUnit.SECONDS)) { + LOG.severe("Executor did not terminate even after shutdownNow()."); + } + } + LOG.info("Executor shutdown complete."); + } catch (InterruptedException ie) { + LOG.warning( + "Interrupted while waiting for executor termination. Forcing shutdownNow() again."); + executor.shutdownNow(); + Thread.currentThread().interrupt(); + } + } + + private String getCurrentCatalogName() { + return this.connection.getCatalog(); + } + + private List getAccessibleCatalogNames() { + Set accessibleCatalogs = new HashSet<>(); + String primaryCatalog = getCurrentCatalogName(); + if (primaryCatalog != null && !primaryCatalog.isEmpty()) { + accessibleCatalogs.add(primaryCatalog); + } + + List additionalProjects = this.connection.getAdditionalProjects(); + if (additionalProjects != null) { + for (String project : additionalProjects) { + if (project != null && !project.isEmpty()) { + accessibleCatalogs.add(project); + } + } + } + + List sortedCatalogs = new ArrayList<>(accessibleCatalogs); + Collections.sort(sortedCatalogs); + return sortedCatalogs; + } + + static String readSqlFromFile(String filename) { + InputStream in; + in = BigQueryDatabaseMetaData.class.getResourceAsStream(filename); + BufferedReader reader = new BufferedReader(new InputStreamReader(in)); + StringBuilder builder = new StringBuilder(); + try (Scanner scanner = new Scanner(reader)) { + while (scanner.hasNextLine()) { + String line = scanner.nextLine(); + builder.append(line).append("\n"); + } + } + return builder.toString(); + } + + String replaceSqlParameters(String sql, String... params) throws SQLException { + return String.format(sql, (Object[]) params); + } + + private void loadDriverVersionProperties() { + if (parsedDriverVersion.get() != null) { + return; + } + Properties props = new Properties(); + try (InputStream input = + getClass().getResourceAsStream("/com/google/cloud/bigquery/jdbc/dependencies.properties")) { + if (input == null) { + String errorMessage = + "Could not find dependencies.properties. Driver version information is unavailable."; + LOG.severe(errorMessage); + throw new IllegalStateException(errorMessage); + } + props.load(input); + String versionString = props.getProperty("version.jdbc"); + if (versionString == null || versionString.trim().isEmpty()) { + String errorMessage = + "The property version.jdbc not found or empty in dependencies.properties."; + LOG.severe(errorMessage); + throw new IllegalStateException(errorMessage); + } + parsedDriverVersion.compareAndSet(null, versionString.trim()); + String[] parts = versionString.split("\\."); + if (parts.length < 2) { + return; + } + parsedDriverMajorVersion.compareAndSet(null, Integer.parseInt(parts[0])); + String minorPart = parts[1]; + String numericMinor = minorPart.replaceAll("[^0-9].*", ""); + if (!numericMinor.isEmpty()) { + parsedDriverMinorVersion.compareAndSet(null, Integer.parseInt(numericMinor)); + } + } catch (IOException | NumberFormatException e) { + String errorMessage = + "Error reading dependencies.properties. Driver version information is" + + " unavailable. Error: " + + e.getMessage(); + LOG.severe(errorMessage); + throw new IllegalStateException(errorMessage, e); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercions.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercions.java new file mode 100644 index 0000000000..324888982a --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercions.java @@ -0,0 +1,102 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.math.RoundingMode; + +/** A registry of all the inbuilt {@link BigQueryCoercion}s that framework offers by default. */ +@InternalApi +class BigQueryDefaultCoercions { + + /** + * Creates a {@link BigQueryTypeCoercerBuilder} with all the inbuilt {@link BigQueryCoercion}s. + *

  • {@link BigQueryTypeCoercer#INSTANCE} uses this builder to populate itself with all the + * default {@link BigQueryCoercion}s. + *
  • A {@link BigQueryTypeCoercerBuilder} created through {@link BigQueryTypeCoercer#builder()} + * method also contains all these default {@link BigQueryCoercion}s + */ + static BigQueryTypeCoercerBuilder builder() { + BigQueryTypeCoercerBuilder builder = new BigQueryTypeCoercerBuilder(); + + // TODO: can we figure out the class parameters from coercion itself? + builder.registerTypeCoercion( + (String s) -> s != null && ("1".equals(s.trim()) || Boolean.parseBoolean(s)), + String.class, + Boolean.class); + builder.registerTypeCoercion(Integer::parseInt, String.class, Integer.class); + builder.registerTypeCoercion(BigInteger::new, String.class, BigInteger.class); + builder.registerTypeCoercion(Long::valueOf, String.class, Long.class); + builder.registerTypeCoercion(Double::valueOf, String.class, Double.class); + builder.registerTypeCoercion(BigDecimal::new, String.class, BigDecimal.class); + + builder.registerTypeCoercion((b) -> b ? 1 : 0, Boolean.class, Integer.class); + + builder.registerTypeCoercion(Integer::intValue, Integer.class, Integer.class); + builder.registerTypeCoercion(Integer::shortValue, Integer.class, Short.class); + builder.registerTypeCoercion(Integer::byteValue, Integer.class, Byte.class); + builder.registerTypeCoercion(Integer::doubleValue, Integer.class, Double.class); + builder.registerTypeCoercion(Integer::floatValue, Integer.class, Float.class); + + builder.registerTypeCoercion(Long::intValue, Long.class, Integer.class); + builder.registerTypeCoercion(Long::shortValue, Long.class, Short.class); + builder.registerTypeCoercion(Long::byteValue, Long.class, Byte.class); + builder.registerTypeCoercion(Long::doubleValue, Long.class, Double.class); + builder.registerTypeCoercion(Long::floatValue, Long.class, Float.class); + + builder.registerTypeCoercion(Double::floatValue, Double.class, Float.class); + builder.registerTypeCoercion(Double::longValue, Double.class, Long.class); + builder.registerTypeCoercion(Double::intValue, Double.class, Integer.class); + builder.registerTypeCoercion(Double::shortValue, Double.class, Short.class); + builder.registerTypeCoercion(Double::byteValue, Double.class, Byte.class); + builder.registerTypeCoercion(BigDecimal::valueOf, Double.class, BigDecimal.class); + + builder.registerTypeCoercion(Float::intValue, Float.class, Integer.class); + builder.registerTypeCoercion(Float::byteValue, Float.class, Byte.class); + builder.registerTypeCoercion(Float::shortValue, Float.class, Short.class); + builder.registerTypeCoercion(Float::doubleValue, Float.class, Double.class); + + builder.registerTypeCoercion(BigInteger::longValue, BigInteger.class, Long.class); + builder.registerTypeCoercion(BigDecimal::new, BigInteger.class, BigDecimal.class); + + builder.registerTypeCoercion(BigDecimal::doubleValue, BigDecimal.class, Double.class); + builder.registerTypeCoercion(BigDecimal::toBigInteger, BigDecimal.class, BigInteger.class); + builder.registerTypeCoercion( + bigDecimal -> bigDecimal.setScale(0, RoundingMode.DOWN).intValueExact(), + BigDecimal.class, + Integer.class); + builder.registerTypeCoercion( + bigDecimal -> bigDecimal.setScale(0, RoundingMode.DOWN).longValueExact(), + BigDecimal.class, + Long.class); + builder.registerTypeCoercion( + bigDecimal -> bigDecimal.setScale(0, RoundingMode.DOWN).shortValueExact(), + BigDecimal.class, + Short.class); + builder.registerTypeCoercion( + bigDecimal -> bigDecimal.setScale(0, RoundingMode.DOWN).byteValueExact(), + BigDecimal.class, + Byte.class); + builder.registerTypeCoercion(BigDecimal::floatValue, BigDecimal.class, Float.class); + + builder.registerTypeCoercion(unused -> false, Void.class, Boolean.class); + + return builder; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDriver.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDriver.java new file mode 100644 index 0000000000..2c7eba16cd --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDriver.java @@ -0,0 +1,249 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import io.grpc.LoadBalancerRegistry; +import io.grpc.internal.PickFirstLoadBalancerProvider; +import java.io.IOException; +import java.sql.Connection; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.DriverPropertyInfo; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * JDBC {@link Driver} implementation for BigQuery. + * + *

    Usage: + * + *

    + *  String CONNECTION_URL = "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443"
    + *            + "ProjectId=test;OAuthType=3""
    + *  Connection connection = DriverManager.getConnection(CONNECTION_URL);
    + * 
    + * + *

    Valid URLs take the form: + * + *

    + *  jdbc:bigquery://{host}:{port};ProjectId={projectId};OAuthType={oAuthType};
    + *  {property1}={value1};{property2}={value2};...
    + * 
    + */ +public class BigQueryDriver implements Driver { + + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryDriver.class.getName()); + // TODO: update this when JDBC goes GA + private static final int JDBC_MAJOR_VERSION = 0; + private static final int JDBC_MINOR_VERSION = 1; + static BigQueryDriver registeredBigqueryJdbcDriver; + + static { + try { + register(); + } catch (SQLException e) { + throw new ExceptionInInitializerError("Registering driver failed: " + e.getMessage()); + } + LoadBalancerRegistry.getDefaultRegistry().register(new PickFirstLoadBalancerProvider()); + } + + public BigQueryDriver() {} + + static void register() throws SQLException { + LOG.finest("++enter++"); + if (isRegistered()) { + LOG.warning("Driver is already registered. It can only be registered once."); + } + + DriverManager.registerDriver(LazyHolder.INSTANCE); + BigQueryDriver.registeredBigqueryJdbcDriver = LazyHolder.INSTANCE; + } + + static boolean isRegistered() { + LOG.finest("++enter++"); + return registeredBigqueryJdbcDriver != null; + } + + /** + * @return the registered JDBC driver for BigQuery. + * @throws IllegalStateException if the driver has not been registered. + */ + public static BigQueryDriver getRegisteredDriver() throws IllegalStateException { + LOG.finest("++enter++"); + if (isRegistered()) { + return registeredBigqueryJdbcDriver; + } + throw new IllegalStateException( + "Driver is not registered (or it has not been registered using Driver.register() method)"); + } + + /** + * Attempts to establish a BigQuery connection to the given URL, using the provided connection + * Properties. + * + *

    Valid URLs take the form: + * + *

    +   *  jdbc:bigquery://{host}:{port};ProjectId={projectId};OAuthType={oAuthType};
    +   *  {property1}={value1};{property2}={value2};...
    +   * 
    + * + * @param url the BigQuery URL to connect to + * @param info a list of arbitrary string tag/value pairs as connection arguments. + * @return A connection to the URL if it was established successfully, otherwise {@code null} + * @throws SQLException if driver fails to connect to clients. + * @see Driver#connect(String, Properties) + */ + @Override + public Connection connect(String url, Properties info) throws SQLException { + LOG.finest("++enter++"); + try { + if (acceptsURL(url)) { + // strip 'jdbc:' from the URL, add any extra properties + String connectionUri = + BigQueryJdbcUrlUtility.appendPropertiesToURL(url.substring(5), this.toString(), info); + + // LogLevel + String logLevelStr = + BigQueryJdbcUrlUtility.parseUriProperty( + connectionUri, BigQueryJdbcUrlUtility.LOG_LEVEL_PROPERTY_NAME); + if (logLevelStr == null) { + logLevelStr = System.getenv(BigQueryJdbcUrlUtility.LOG_LEVEL_ENV_VAR); + } + Level logLevel = BigQueryJdbcUrlUtility.parseLogLevel(logLevelStr); + + // LogPath + String logPath = + BigQueryJdbcUrlUtility.parseUriProperty( + connectionUri, BigQueryJdbcUrlUtility.LOG_PATH_PROPERTY_NAME); + if (logPath == null) { + logPath = System.getenv(BigQueryJdbcUrlUtility.LOG_PATH_ENV_VAR); + } + if (logPath == null) { + logPath = BigQueryJdbcUrlUtility.DEFAULT_LOG_PATH; + } + + BigQueryJdbcRootLogger.setLevel(logLevel, logPath); + // Logging starts from here. + BigQueryConnection connection = new BigQueryConnection(connectionUri); + LOG.info( + String.format( + "Driver info : { {Database Product Name : %s}, " + + "{Database Product Version : %s}, " + + "{Driver Name : %s}, " + + "{Driver Version : %s}, " + + "{LogLevel : %s}, " + + "{LogPath : %s}, " + + "{Driver Instance : %s} }", + connection.getMetaData().getDatabaseProductName(), + connection.getMetaData().getDatabaseProductVersion(), + connection.getMetaData().getDriverName(), + connection.getMetaData().getDriverVersion(), + logLevel, + logPath, + this.toString())); + return connection; + } else { + throw new IllegalArgumentException( + "Invalid URL provided, must start with \"jdbc:bigquery:\""); + } + } catch (IOException e) { + LOG.warning("Getting a warning: " + e.getMessage()); + } + return null; + } + + /** + * @param url a JDBC connection URL + * @return True if the URL is non-empty and starts with "jdbc:bigquery" + * @see Driver#acceptsURL(String) + */ + @Override + public boolean acceptsURL(String url) throws SQLException { + LOG.finest("++enter++"); + if (url == null || url.isEmpty()) { + throw new BigQueryJdbcException("Connection URL is null."); + } + return url.startsWith("jdbc:bigquery:"); + } + + /** + * Gets information about the possible BigQuery JDBC Connection Properties. + * + * @param url the BigQuery connection URL + * @param info a proposed list BigQuery connection properties + * @return an array of {@code DriverPropertyInfo} objects describing possible properties. + * @see Driver#getPropertyInfo(String, Properties) + */ + @Override + public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) { + LOG.finest("++enter++"); + String connectionUri = + BigQueryJdbcUrlUtility.appendPropertiesToURL(url.substring(5), this.toString(), info); + List propertyInfoList = new ArrayList<>(); + + for (BigQueryConnectionProperty prop : BigQueryJdbcUrlUtility.VALID_PROPERTIES) { + DriverPropertyInfo driverProperty = + new DriverPropertyInfo( + prop.getName(), + BigQueryJdbcUrlUtility.parseUriProperty(connectionUri, prop.getName())); + driverProperty.description = prop.getDescription(); + propertyInfoList.add(driverProperty); + } + Map oAuthProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(url, this.toString()); + for (Map.Entry authProperty : oAuthProperties.entrySet()) { + propertyInfoList.add(new DriverPropertyInfo(authProperty.getKey(), authProperty.getValue())); + } + return propertyInfoList.toArray(new DriverPropertyInfo[0]); + } + + @Override + public int getMajorVersion() { + LOG.finest("++enter++"); + return JDBC_MAJOR_VERSION; + } + + @Override + public int getMinorVersion() { + LOG.finest("++enter++"); + return JDBC_MINOR_VERSION; + } + + @Override + public boolean jdbcCompliant() { + LOG.finest("++enter++"); + return false; + } + + @Override + public Logger getParentLogger() { + LOG.finest("++enter++"); + return BigQueryJdbcRootLogger.getRootLogger(); + } + + private static class LazyHolder { + static final BigQueryDriver INSTANCE = new BigQueryDriver(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryErrorMessage.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryErrorMessage.java new file mode 100644 index 0000000000..9afa074522 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryErrorMessage.java @@ -0,0 +1,29 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; + +@InternalApi +class BigQueryErrorMessage { + + static final String CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED = + "Customized Types are not supported yet."; + static final String INVALID_ARRAY = "Invalid java.sql.Array instance."; + static final String METHOD_NOT_IMPLEMENTED = "This method is not implemented."; + static final String OAUTH_TYPE_ERROR_MESSAGE = "Invalid Auth type specified"; +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryFieldValueListWrapper.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryFieldValueListWrapper.java new file mode 100644 index 0000000000..9d8b1b2f77 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryFieldValueListWrapper.java @@ -0,0 +1,81 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValueList; +import java.util.List; + +/** + * Package-private, This class acts as a facade layer and wraps the FieldList(schema) and + * FieldValueList + */ +class BigQueryFieldValueListWrapper { + + // This is a reference schema to the schema + private final FieldList fieldList; + + // POJO representation of the JSON response + private final FieldValueList fieldValueList; + + // This is very similar to the FieldValueList above, except we get the + // reference as a List in case of an Array + private final List arrayFieldValueList; + + // This flag marks the end of the stream for the ResultSet + private boolean isLast = false; + + static BigQueryFieldValueListWrapper of( + FieldList fieldList, FieldValueList fieldValueList, boolean... isLast) { + boolean isLastFlag = isLast != null && isLast.length == 1 && isLast[0]; + return new BigQueryFieldValueListWrapper(fieldList, fieldValueList, null, isLastFlag); + } + + static BigQueryFieldValueListWrapper getNestedFieldValueListWrapper( + FieldList fieldList, List arrayFieldValueList, boolean... isLast) { + boolean isLastFlag = isLast != null && isLast.length == 1 && isLast[0]; + return new BigQueryFieldValueListWrapper(fieldList, null, arrayFieldValueList, isLastFlag); + } + + private BigQueryFieldValueListWrapper( + FieldList fieldList, + FieldValueList fieldValueList, + List arrayFieldValueList, + boolean isLast) { + this.fieldList = fieldList; + this.fieldValueList = fieldValueList; + this.arrayFieldValueList = arrayFieldValueList; + this.isLast = isLast; + } + + public FieldList getFieldList() { + return this.fieldList; + } + + public FieldValueList getFieldValueList() { + return this.fieldValueList; + } + + public List getArrayFieldValueList() { + return this.arrayFieldValueList; + } + + public boolean isLast() { + return this.isLast; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBulkInsertWriter.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBulkInsertWriter.java new file mode 100644 index 0000000000..d4e9702621 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBulkInsertWriter.java @@ -0,0 +1,130 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.ApiFuture; +import com.google.api.core.ApiFutureCallback; +import com.google.api.core.ApiFutures; +import com.google.api.gax.retrying.RetrySettings; +import com.google.cloud.bigquery.storage.v1.AppendRowsResponse; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; +import com.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1.Exceptions; +import com.google.cloud.bigquery.storage.v1.Exceptions.StorageException; +import com.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; +import com.google.cloud.bigquery.storage.v1.JsonStreamWriter; +import com.google.cloud.bigquery.storage.v1.TableName; +import com.google.cloud.bigquery.storage.v1.WriteStream; +import com.google.common.util.concurrent.MoreExecutors; +import com.google.gson.JsonArray; +import com.google.protobuf.Descriptors.DescriptorValidationException; +import java.io.IOException; +import java.util.concurrent.Phaser; +import javax.annotation.concurrent.GuardedBy; + +class BigQueryJdbcBulkInsertWriter { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private JsonStreamWriter jsonStreamWriter; + private final Phaser openRequestCount = new Phaser(1); + private final Object streamLock = new Object(); + + @GuardedBy("lock") + private RuntimeException error = null; + + void initialize(TableName parentTable, BigQueryWriteClient client, RetrySettings retrySettings) + throws IOException, DescriptorValidationException, InterruptedException { + WriteStream stream = WriteStream.newBuilder().setType(WriteStream.Type.PENDING).build(); + + CreateWriteStreamRequest createWriteStreamRequest = + CreateWriteStreamRequest.newBuilder() + .setParent(parentTable.toString()) + .setWriteStream(stream) + .build(); + WriteStream writeStream = client.createWriteStream(createWriteStreamRequest); + + JsonStreamWriter.Builder jsonStreamWriterBuilder = + JsonStreamWriter.newBuilder(writeStream.getName(), writeStream.getTableSchema()); + + if (retrySettings != null) { + jsonStreamWriterBuilder.setRetrySettings(retrySettings); + } + + this.jsonStreamWriter = jsonStreamWriterBuilder.build(); + } + + void append(JsonArray data, long offset) throws DescriptorValidationException, IOException { + synchronized (this.streamLock) { + if (this.error != null) { + throw this.error; + } + } + + ApiFuture future = jsonStreamWriter.append(data, offset); + ApiFutures.addCallback( + future, new AppendCompleteCallback(this), MoreExecutors.directExecutor()); + openRequestCount.register(); + } + + long cleanup(BigQueryWriteClient client) { + openRequestCount.arriveAndAwaitAdvance(); + jsonStreamWriter.close(); + + synchronized (this.streamLock) { + if (this.error != null) { + throw this.error; + } + } + + // Finalize the stream. + FinalizeWriteStreamResponse finalizeResponse = + client.finalizeWriteStream(jsonStreamWriter.getStreamName()); + LOG.finest("Rows written: " + finalizeResponse.getRowCount()); + return finalizeResponse.getRowCount(); + } + + String getStreamName() { + return jsonStreamWriter.getStreamName(); + } + + static class AppendCompleteCallback implements ApiFutureCallback { + + private final BigQueryJdbcBulkInsertWriter parent; + + AppendCompleteCallback(BigQueryJdbcBulkInsertWriter parent) { + this.parent = parent; + } + + public void onSuccess(AppendRowsResponse response) { + done(); + } + + public void onFailure(Throwable throwable) { + synchronized (this.parent.streamLock) { + if (this.parent.error == null) { + StorageException storageException = Exceptions.toStorageException(throwable); + this.parent.error = + (storageException != null) ? storageException : new RuntimeException(throwable); + } + } + done(); + } + + private void done() { + this.parent.openRequestCount.arriveAndDeregister(); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcCustomLogger.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcCustomLogger.java new file mode 100644 index 0000000000..611b200ff6 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcCustomLogger.java @@ -0,0 +1,32 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import java.util.logging.Logger; + +class BigQueryJdbcCustomLogger extends Logger { + + protected BigQueryJdbcCustomLogger(String name, String resourceBundleName) { + super(name, resourceBundleName); + this.setParent(BigQueryJdbcRootLogger.getRootLogger()); + } + + BigQueryJdbcCustomLogger(String name) { + this(name, null); + this.setParent(BigQueryJdbcRootLogger.getRootLogger()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtility.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtility.java new file mode 100644 index 0000000000..5f486f1e58 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtility.java @@ -0,0 +1,759 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.api.client.util.PemReader.readFirstSectionAndClose; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.OAUTH_TYPE_ERROR_MESSAGE; + +import com.google.api.client.util.PemReader; +import com.google.api.client.util.SecurityUtils; +import com.google.auth.oauth2.AccessToken; +import com.google.auth.oauth2.ClientId; +import com.google.auth.oauth2.ExternalAccountCredentials; +import com.google.auth.oauth2.GoogleCredentials; +import com.google.auth.oauth2.ImpersonatedCredentials; +import com.google.auth.oauth2.ServiceAccountCredentials; +import com.google.auth.oauth2.UserAuthorizer; +import com.google.auth.oauth2.UserCredentials; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.gson.Strictness; +import com.google.gson.stream.JsonReader; +import java.awt.Desktop; +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.PrintWriter; +import java.io.Reader; +import java.io.StringReader; +import java.net.ServerSocket; +import java.net.Socket; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.security.GeneralSecurityException; +import java.security.KeyFactory; +import java.security.NoSuchAlgorithmException; +import java.security.PrivateKey; +import java.security.spec.InvalidKeySpecException; +import java.security.spec.PKCS8EncodedKeySpec; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +final class BigQueryJdbcOAuthUtility { + + private static final String USER_AUTH_SUCCESS_HTTP_RESPONSE = + "HTTP/1.1 200 OK\n" + + "Content-Length: 300\n" + + "Connection: close\n" + + "Content-Type: text/html; charset=utf-8\n" + + "\n" + + "\n" + + "Thank you for using JDBC Driver for Google BigQuery!\n" + + "You may now close the window."; + + private static final int USER_AUTH_TIMEOUT_MS = 120000; + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJdbcOAuthUtility.class.getName()); + + private static final Map BYOID_NAME_MAP = + new HashMap() { + { + put(BigQueryJdbcUrlUtility.BYOID_AUDIENCE_URI_PROPERTY_NAME, "audience"); + put(BigQueryJdbcUrlUtility.BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME, "credential_source"); + put(BigQueryJdbcUrlUtility.BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME, "subject_token_type"); + put(BigQueryJdbcUrlUtility.BYOID_TOKEN_URI_PROPERTY_NAME, "token_url"); + put( + BigQueryJdbcUrlUtility.BYOID_POOL_USER_PROJECT_PROPERTY_NAME, + "workforce_pool_user_project"); + put( + BigQueryJdbcUrlUtility.BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME, + "service_account_impersonation_url"); + } + }; + + /** + * Parses the OAuth properties from the given URL. + * + * @param url The URL to parse. + * @return A map of OAuth properties. + */ + static Map parseOAuthProperties(String url, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + Map oauthProperties = new HashMap<>(); + + AuthType authType; + try { + authType = + AuthType.fromValue( + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_TYPE_VALUE, + callerClassName)); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(OAUTH_TYPE_ERROR_MESSAGE); + } + oauthProperties.put(BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME, String.valueOf(authType)); + switch (authType) { + case GOOGLE_SERVICE_ACCOUNT: + // For using a Google Service Account (OAuth Type 0) + // need: project id, OAuthServiceAcctEmail and OAuthPvtKey or OAuthPvtKeyPath that can be + // .p12 or json. + // TODO: validation if .p12 or json file can be in getPropertyInfo can be handy for user + String serviceAccountEmail = + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_SA_EMAIL_PROPERTY_NAME); + String serviceAccountPK = + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME); + String serviceAccountPrivateKeyPath = + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME); + String p12Password = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.OAUTH_P12_PASSWORD_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_P12_PASSWORD_VALUE, + callerClassName); + + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_EMAIL_PROPERTY_NAME, serviceAccountEmail); + oauthProperties.put(BigQueryJdbcUrlUtility.OAUTH_P12_PASSWORD_PROPERTY_NAME, p12Password); + if (serviceAccountEmail != null && serviceAccountPK != null) { + oauthProperties.put(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME, serviceAccountPK); + } else { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME, + serviceAccountPrivateKeyPath); + } + break; + case GOOGLE_USER_ACCOUNT: + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME)); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + int reqGoogleDriveScope = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE, + callerClassName); + oauthProperties.put( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + String.valueOf(reqGoogleDriveScope)); + LOG.fine("RequestGoogleDriveScope parsed."); + break; + case PRE_GENERATED_TOKEN: + String refreshToken = + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME); + if (refreshToken != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME, refreshToken); + LOG.fine("OAuthRefreshToken provided."); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME)); + LOG.fine("OAuthClientId provided."); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + LOG.fine("OAuthClientSecret provided."); + break; + } + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_ACCESS_TOKEN_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_ACCESS_TOKEN_PROPERTY_NAME)); + LOG.fine("OAuthAccessToken provided."); + break; + case APPLICATION_DEFAULT_CREDENTIALS: + // For Application Default Credentials (OAuth Type 3) + // need: project id + break; + case EXTERNAL_ACCOUNT_AUTH: + // For External account authentication (OAuth Type 4) + // need: project id, OAuthPvtKey or OAuthPvtKeyPath or BYOID_PROPERTIES + String pvtKey = + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME); + String pvtKeyPath = + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME); + if (pvtKey != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME)); + LOG.fine("OAuthPvtKey provided."); + } else if (pvtKeyPath != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME)); + LOG.fine("OAuthPvtKeyPath provided."); + } else { + for (String property : BigQueryJdbcUrlUtility.BYOID_PROPERTIES) { + String value = + BigQueryJdbcUrlUtility.parseBYOIDProperty(url, property, callerClassName); + if (value != null) { + oauthProperties.put(property, value); + LOG.fine(property + " provided."); + } + } + String universeDomainProp = BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME; + String universeDomain = BigQueryJdbcUrlUtility.parseUriProperty(url, universeDomainProp); + if (universeDomain != null) { + oauthProperties.put(universeDomainProp, universeDomain); + LOG.fine(universeDomainProp + " provided. Caller : " + callerClassName); + } + } + break; + } + + if (authType == AuthType.GOOGLE_SERVICE_ACCOUNT + || authType == AuthType.GOOGLE_USER_ACCOUNT + || authType == AuthType.PRE_GENERATED_TOKEN) { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_EMAIL_VALUE, + callerClassName)); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_CHAIN_VALUE, + callerClassName)); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_SCOPES_VALUE, + callerClassName)); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_VALUE, + callerClassName)); + } + return oauthProperties; + } + + /** + * Gets the credentials for the given Auth properties. + * + * @param authProperties A map of Auth properties. + * @return A GoogleCredentials object. + */ + static GoogleCredentials getCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + AuthType authType = + AuthType.valueOf(authProperties.get(BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME)); + + GoogleCredentials credentials; + switch (authType) { + case GOOGLE_SERVICE_ACCOUNT: + credentials = + getGoogleServiceAccountCredentials(authProperties, overrideProperties, callerClassName); + break; + case GOOGLE_USER_ACCOUNT: + credentials = + getGoogleUserAccountCredentials(authProperties, overrideProperties, callerClassName); + break; + case PRE_GENERATED_TOKEN: + credentials = + getPreGeneratedTokensCredentials(authProperties, overrideProperties, callerClassName); + break; + case APPLICATION_DEFAULT_CREDENTIALS: + // This auth method doesn't support service account impersonation + return getApplicationDefaultCredentials(callerClassName); + case EXTERNAL_ACCOUNT_AUTH: + // This auth method doesn't support service account impersonation + return getExternalAccountAuthCredentials(authProperties, callerClassName); + default: + throw new IllegalStateException(OAUTH_TYPE_ERROR_MESSAGE); + } + + return getServiceAccountImpersonatedCredentials(credentials, authProperties); + } + + private static boolean isFileExists(String filename) { + try { + return filename != null && !filename.isEmpty() && Files.exists(Paths.get(filename)); + } catch (Exception e) { + // Filename is invalid + return false; + } + } + + private static boolean isJson(String value) { + try { + // This is done this way to ensure strict Json parsing + // https://github.com/google/gson/issues/1208#issuecomment-2120764686 + InputStream stream = new ByteArrayInputStream(value.getBytes()); + InputStreamReader reader = new InputStreamReader(stream); + JsonReader jsonReader = new JsonReader(reader); + jsonReader.setStrictness(Strictness.STRICT); + JsonElement json = JsonParser.parseReader(jsonReader); + return json != null; + } catch (Exception e) { + // Unable to parse json string + return false; + } + } + + private static GoogleCredentials getGoogleServiceAccountCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + ServiceAccountCredentials.Builder builder; + try { + final String pvtKeyPath = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME); + final String pvtKey = authProperties.get(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME); + final String pvtEmail = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_SA_EMAIL_PROPERTY_NAME); + final String p12Password = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_P12_PASSWORD_PROPERTY_NAME); + + final String keyPath = pvtKeyPath != null ? pvtKeyPath : pvtKey; + PrivateKey key = null; + InputStream stream = null; + + if (isFileExists(keyPath)) { + key = privateKeyFromP12File(keyPath, p12Password); + if (key == null) { + stream = Files.newInputStream(Paths.get(keyPath)); + } + } else if (isJson(pvtKey)) { + stream = new ByteArrayInputStream(pvtKey.getBytes()); + } else if (pvtKey != null) { + key = privateKeyFromPkcs8(pvtKey); + } + + if (stream != null) { + builder = ServiceAccountCredentials.fromStream(stream).toBuilder(); + } else if (pvtEmail != null && key != null) { + builder = + ServiceAccountCredentials.newBuilder().setClientEmail(pvtEmail).setPrivateKey(key); + } else { + LOG.severe("No valid Service Account credentials provided."); + throw new BigQueryJdbcRuntimeException("No valid credentials provided."); + } + + if (overrideProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME)) { + builder.setTokenServerUri( + new URI(overrideProperties.get(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME))); + } + if (overrideProperties.containsKey( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)) { + builder.setUniverseDomain( + overrideProperties.get(BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)); + } + } catch (URISyntaxException | IOException e) { + LOG.severe("Validation failure for Service Account credentials."); + throw new BigQueryJdbcRuntimeException(e); + } + LOG.info("GoogleCredentials instantiated. Auth Method: Service Account."); + return builder.build(); + } + + static UserAuthorizer getUserAuthorizer( + Map authProperties, + Map overrideProperties, + int port, + String callerClassName) + throws URISyntaxException { + LOG.finest("++enter++\t" + callerClassName); + List scopes = new ArrayList<>(); + scopes.add("https://www.googleapis.com/auth/bigquery"); + + // Add Google Drive scope conditionally + if (authProperties.containsKey( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)) { + try { + int driveScopeValue = + Integer.parseInt( + authProperties.get( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)); + if (driveScopeValue == 1) { + scopes.add("https://www.googleapis.com/auth/drive.readonly"); + LOG.fine("Added Google Drive read-only scope. Caller: " + callerClassName); + } + } catch (NumberFormatException e) { + LOG.severe( + "Invalid value for RequestGoogleDriveScope, defaulting to not request Drive scope. Caller: " + + callerClassName); + } + } + + List responseTypes = new ArrayList<>(); + responseTypes.add("code"); + + ClientId clientId = + ClientId.of( + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME), + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + UserAuthorizer.Builder userAuthorizerBuilder = + UserAuthorizer.newBuilder() + .setClientId(clientId) + .setScopes(scopes) + .setCallbackUri(URI.create("http://localhost:" + port)); + + if (overrideProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME)) { + userAuthorizerBuilder.setTokenServerUri( + new URI(overrideProperties.get(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME))); + } + + return userAuthorizerBuilder.build(); + } + + static UserCredentials getCredentialsFromCode( + UserAuthorizer userAuthorizer, String code, String callerClassName) throws IOException { + LOG.finest("++enter++\t" + callerClassName); + return userAuthorizer.getCredentialsFromCode(code, URI.create("")); + } + + private static GoogleCredentials getGoogleUserAccountCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + try { + ServerSocket serverSocket = new ServerSocket(0); + serverSocket.setSoTimeout(USER_AUTH_TIMEOUT_MS); + int port = serverSocket.getLocalPort(); + UserAuthorizer userAuthorizer = + getUserAuthorizer(authProperties, overrideProperties, port, callerClassName); + + URL authURL = userAuthorizer.getAuthorizationUrl("user", "", URI.create("")); + String code; + + if (Desktop.isDesktopSupported()) { + Desktop.getDesktop().browse(authURL.toURI()); + + Socket socket = serverSocket.accept(); + + OutputStream outputStream = socket.getOutputStream(); + PrintWriter printWriter = new PrintWriter(outputStream); + + BufferedReader bufferedReader = + new BufferedReader(new InputStreamReader(socket.getInputStream())); + String response = bufferedReader.readLine(); + + Pattern p = Pattern.compile("(?<=code=).*?(?=&|$)"); + Matcher m = p.matcher(response); + + if (!m.find()) { + throw new BigQueryJdbcRuntimeException("Could not retrieve the code for user auth"); + } + code = m.group(); + + printWriter.println(USER_AUTH_SUCCESS_HTTP_RESPONSE); + printWriter.flush(); + socket.close(); + serverSocket.close(); + } else { + throw new BigQueryJdbcRuntimeException("User auth only supported in desktop environments"); + } + + return getCredentialsFromCode(userAuthorizer, code, callerClassName); + } catch (IOException | URISyntaxException ex) { + LOG.severe( + String.format( + "Failed to establish connection using User Account authentication: %s", + ex.getMessage())); + throw new BigQueryJdbcRuntimeException(ex); + } + } + + private static GoogleCredentials getPreGeneratedAccessTokenCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + GoogleCredentials.Builder builder = GoogleCredentials.newBuilder(); + if (overrideProperties.containsKey( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)) { + builder.setUniverseDomain( + overrideProperties.get(BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)); + } + LOG.info("Connection established. Auth Method: Pre-generated Access Token."); + return builder + .setAccessToken( + AccessToken.newBuilder() + .setTokenValue( + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_ACCESS_TOKEN_PROPERTY_NAME)) + .build()) + .build(); + } + + static GoogleCredentials getPreGeneratedTokensCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + if (authProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME)) { + try { + return getPreGeneratedRefreshTokenCredentials( + authProperties, overrideProperties, callerClassName); + } catch (URISyntaxException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } else { + return getPreGeneratedAccessTokenCredentials( + authProperties, overrideProperties, callerClassName); + } + } + + static UserCredentials getPreGeneratedRefreshTokenCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) + throws URISyntaxException { + LOG.finest("++enter++\t" + callerClassName); + + UserCredentials.Builder userCredentialsBuilder = + UserCredentials.newBuilder() + .setRefreshToken( + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME)) + .setClientId(authProperties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME)) + .setClientSecret( + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + + if (overrideProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME)) { + userCredentialsBuilder.setTokenServerUri( + new URI(overrideProperties.get(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME))); + } + if (overrideProperties.containsKey( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)) { + userCredentialsBuilder.setUniverseDomain( + overrideProperties.get(BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)); + } + LOG.info("Connection established. Auth Method: Pre-generated Refresh Token."); + return userCredentialsBuilder.build(); + } + + private static GoogleCredentials getApplicationDefaultCredentials(String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + try { + GoogleCredentials credentials = GoogleCredentials.getApplicationDefault(); + String principal = "unknown"; + if (credentials instanceof ServiceAccountCredentials) { + principal = ((ServiceAccountCredentials) credentials).getClientEmail(); + } else if (credentials instanceof UserCredentials) { + principal = "user credentials"; + } else if (credentials instanceof ExternalAccountCredentials) { + principal = "external account"; + } + LOG.info( + String.format( + "Connection established. Auth Method: Application Default Credentials, Principal: %s.", + principal)); + return credentials; + } catch (IOException exception) { + // TODO throw exception + throw new BigQueryJdbcRuntimeException("Application default credentials not found."); + } + } + + private static GoogleCredentials getExternalAccountAuthCredentials( + Map authProperties, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + try { + JsonObject jsonObject = null; + String credentialsPath = null; + if (authProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME)) { + String pvtKeyPath = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME).trim(); + if (pvtKeyPath.startsWith("{")) { + jsonObject = JsonParser.parseString(pvtKeyPath).getAsJsonObject(); + } else { + credentialsPath = pvtKeyPath; + } + } else if (authProperties.containsKey( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME)) { + credentialsPath = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME); + } else { + jsonObject = new JsonObject(); + for (String property : BigQueryJdbcUrlUtility.BYOID_PROPERTIES) { + if (Objects.equals( + property, BigQueryJdbcUrlUtility.BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME)) { + jsonObject.add( + BYOID_NAME_MAP.get(property), + JsonParser.parseString(authProperties.get(property)).getAsJsonObject()); + } else if (authProperties.containsKey(property)) { + jsonObject.addProperty(BYOID_NAME_MAP.get(property), authProperties.get(property)); + } + } + if (authProperties.containsKey( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)) { + jsonObject.addProperty( + "universe_domain", + authProperties.get(BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)); + } + } + + if (credentialsPath != null) { + return ExternalAccountCredentials.fromStream( + Files.newInputStream(Paths.get(credentialsPath))); + } else if (jsonObject != null) { + return ExternalAccountCredentials.fromStream( + new ByteArrayInputStream(jsonObject.toString().getBytes())); + } else { + throw new IllegalArgumentException( + "Insufficient info provided for external authentication"); + } + } catch (IOException e) { + throw new BigQueryJdbcRuntimeException(e); + } + } + + // This function checks if connection string contains configuration for + // credentials impersonation. If not, it returns regular credentials object. + // If impersonated service account is provided, returns Credentials object + // accomodating this information. + private static GoogleCredentials getServiceAccountImpersonatedCredentials( + GoogleCredentials credentials, Map authProperties) { + + String impersonationEmail = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME); + if (impersonationEmail == null || impersonationEmail.isEmpty()) { + return credentials; + } + + String impersonationChainString = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME); + List impersonationChain = null; + if (impersonationChainString != null && !impersonationChainString.isEmpty()) { + impersonationChain = Arrays.asList(impersonationChainString.split(",")); + } + + // Scopes has a default value, so it should never be null + List impersonationScopes = + Arrays.asList( + authProperties + .get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME) + .split(",")); + + // Token lifetime has a default value, so it should never be null + String impersonationLifetime = + authProperties.get( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME); + int impersonationLifetimeInt = 0; + try { + impersonationLifetimeInt = Integer.parseInt(impersonationLifetime); + } catch (NumberFormatException e) { + LOG.severe("Invalid value for ServiceAccountImpersonationTokenLifetime."); + throw new IllegalArgumentException( + "Invalid value for ServiceAccountImpersonationTokenLifetime: must be a positive integer.", + e); + } + + return ImpersonatedCredentials.create( + credentials, + impersonationEmail, + impersonationChain, + impersonationScopes, + impersonationLifetimeInt); + } + + static PrivateKey privateKeyFromP12File(String privateKeyFile, String password) { + try { + InputStream stream = Files.newInputStream(Paths.get(privateKeyFile)); + return SecurityUtils.loadPrivateKeyFromKeyStore( + SecurityUtils.getPkcs12KeyStore(), stream, "notasecret", "privatekey", password); + } catch (IOException | GeneralSecurityException e) { + LOG.warning("Unable to parse p12 file: " + e.getMessage()); + return null; + } + } + + static PrivateKey privateKeyFromPkcs8(String privateKeyPkcs8) { + try { + Reader reader = new StringReader(privateKeyPkcs8); + PemReader.Section section = readFirstSectionAndClose(reader, "PRIVATE KEY"); + if (section == null) { + throw new IOException("Invalid PKCS#8 data."); + } + byte[] bytes = section.getBase64DecodedBytes(); + PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes); + KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory(); + return keyFactory.generatePrivate(keySpec); + } catch (NoSuchAlgorithmException | InvalidKeySpecException | IOException e) { + LOG.warning("Unable to parse pkcs8 secret: " + e.getMessage()); + return null; + } + } + + enum AuthType { + GOOGLE_SERVICE_ACCOUNT(0), + GOOGLE_USER_ACCOUNT(1), + PRE_GENERATED_TOKEN(2), + APPLICATION_DEFAULT_CREDENTIALS(3), + EXTERNAL_ACCOUNT_AUTH(4); + + private final int value; + + AuthType(int value) { + this.value = value; + } + + static AuthType fromValue(int value) { + for (AuthType authType : values()) { + if (authType.value == value) { + return authType; + } + } + throw new IllegalStateException(OAUTH_TYPE_ERROR_MESSAGE + ": " + value); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameter.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameter.java new file mode 100644 index 0000000000..cb11d14e46 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameter.java @@ -0,0 +1,117 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; + +class BigQueryJdbcParameter { + private int index; + private Object value; + private Class type; + private StandardSQLTypeName sqlType; + // Additional parameters needed for CallableStatement. + private String paramName; + private BigQueryStatementParameterType paramType; + private int scale; + + BigQueryJdbcParameter() {} + + BigQueryJdbcParameter(BigQueryJdbcParameter parameter) { + this.index = parameter.index; + this.value = parameter.value; + this.type = parameter.type; + this.sqlType = parameter.sqlType; + } + + int getIndex() { + return index; + } + + void setIndex(int index) { + this.index = index; + } + + Object getValue() { + return value; + } + + void setValue(Object value) { + this.value = value; + } + + Class getType() { + return type; + } + + public void setType(Class type) { + this.type = type; + } + + StandardSQLTypeName getSqlType() { + return sqlType; + } + + void setSqlType(StandardSQLTypeName sqlType) { + this.sqlType = sqlType; + } + + String getParamName() { + return paramName; + } + + void setParamName(String paramName) { + this.paramName = paramName; + } + + BigQueryStatementParameterType getParamType() { + return paramType; + } + + void setParamType(BigQueryStatementParameterType paramType) { + this.paramType = paramType; + } + + int getScale() { + return scale; + } + + void setScale(int scale) { + this.scale = scale; + } + + @Override + public String toString() { + return "BigQueryJdbcParameter{" + + "index=" + + index + + ", value=" + + value + + ", type=" + + type + + ", sqlType=" + + sqlType + + ", paramName='" + + paramName + + '\'' + + ", paramType=" + + paramType.name() + + ", scale=" + + scale + + '}'; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtility.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtility.java new file mode 100644 index 0000000000..ebc5450b62 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtility.java @@ -0,0 +1,291 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.storage.v1.stub.BigQueryReadStubSettings.defaultGrpcTransportProviderBuilder; + +import com.google.api.client.http.HttpTransport; +import com.google.api.client.http.apache.v5.Apache5HttpTransport; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.auth.http.HttpTransportFactory; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.http.HttpTransportOptions; +import io.grpc.HttpConnectProxiedSocketAddress; +import io.grpc.ProxiedSocketAddress; +import io.grpc.ProxyDetector; +import io.grpc.netty.shaded.io.grpc.netty.GrpcSslContexts; +import io.grpc.netty.shaded.io.netty.handler.ssl.SslContext; +import java.io.FileInputStream; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.SocketAddress; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Pattern; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; +import org.apache.hc.client5.http.auth.AuthScope; +import org.apache.hc.client5.http.auth.UsernamePasswordCredentials; +import org.apache.hc.client5.http.impl.DefaultAuthenticationStrategy; +import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.client5.http.impl.classic.HttpClientBuilder; +import org.apache.hc.client5.http.impl.classic.HttpClients; +import org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManagerBuilder; +import org.apache.hc.client5.http.impl.routing.DefaultProxyRoutePlanner; +import org.apache.hc.client5.http.routing.HttpRoutePlanner; +import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactory; +import org.apache.hc.core5.http.HttpHost; + +final class BigQueryJdbcProxyUtility { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJdbcProxyUtility.class.getName()); + static final String validPortRegex = + "^([1-9][0-9]{0,3}|[1-5][0-9]{4}|6[0-4][0-9]{3}|65[0-4][0-9]{2}|655[0-2][0-9]|6553[0-5])$"; + + private BigQueryJdbcProxyUtility() {} + + static Map parseProxyProperties(String URL, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + Map proxyProperties = new HashMap<>(); + String proxyHost = + BigQueryJdbcUrlUtility.parseUriProperty( + URL, BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME); + if (proxyHost != null) { + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, proxyHost); + } + String proxyPort = + BigQueryJdbcUrlUtility.parseUriProperty( + URL, BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME); + if (proxyPort != null) { + if (!Pattern.compile(validPortRegex).matcher(proxyPort).find()) { + throw new IllegalArgumentException( + "Illegal port number provided %s. Please provide a valid port number."); + } + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, proxyPort); + } + String proxyUid = + BigQueryJdbcUrlUtility.parseUriProperty( + URL, BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME); + if (proxyUid != null) { + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME, proxyUid); + } + String proxyPwd = + BigQueryJdbcUrlUtility.parseUriProperty( + URL, BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME); + if (proxyPwd != null) { + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME, proxyPwd); + } + + boolean isMissingProxyHostOrPortWhenProxySet = + (proxyHost == null && proxyPort != null) || (proxyHost != null && proxyPort == null); + if (isMissingProxyHostOrPortWhenProxySet) { + throw new IllegalArgumentException( + "Both ProxyHost and ProxyPort parameters need to be specified. No defaulting behavior occurs."); + } + boolean isMissingProxyUidOrPwdWhenAuthSet = + (proxyUid == null && proxyPwd != null) || (proxyUid != null && proxyPwd == null); + if (isMissingProxyUidOrPwdWhenAuthSet) { + throw new IllegalArgumentException( + "Both ProxyUid and ProxyPwd parameters need to be specified for authentication."); + } + boolean isProxyAuthSetWithoutProxySettings = proxyUid != null && proxyHost == null; + if (isProxyAuthSetWithoutProxySettings) { + throw new IllegalArgumentException( + "Proxy authentication provided via connection string with no proxy host or port set."); + } + return proxyProperties; + } + + static HttpTransportOptions getHttpTransportOptions( + Map proxyProperties, + String sslTrustStorePath, + String sslTrustStorePassword, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + if (!proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME) + && sslTrustStorePath == null) { + return null; + } + return HttpTransportOptions.newBuilder() + .setHttpTransportFactory( + getHttpTransportFactory( + proxyProperties, sslTrustStorePath, sslTrustStorePassword, callerClassName)) + .build(); + } + + private static HttpTransportFactory getHttpTransportFactory( + Map proxyProperties, + String sslTrustStorePath, + String sslTrustStorePassword, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + HttpClientBuilder httpClientBuilder = HttpClients.custom(); + boolean explicitProxySet = + proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME); + + if (explicitProxySet) { + HttpHost proxyHostDetails = + new HttpHost( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME), + Integer.parseInt( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME))); + HttpRoutePlanner httpRoutePlanner = new DefaultProxyRoutePlanner(proxyHostDetails); + httpClientBuilder.setRoutePlanner(httpRoutePlanner); + addAuthToProxyIfPresent(proxyProperties, httpClientBuilder, callerClassName); + } else { + httpClientBuilder.useSystemProperties(); + } + + if (sslTrustStorePath != null) { + try (FileInputStream trustStoreStream = new FileInputStream(sslTrustStorePath)) { + KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); + char[] trustStorePasswordChars = + sslTrustStorePassword != null ? sslTrustStorePassword.toCharArray() : null; + trustStore.load(trustStoreStream, trustStorePasswordChars); + + TrustManagerFactory trustManagerFactory = + TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + trustManagerFactory.init(trustStore); + + SSLContext sslContext = SSLContext.getInstance("TLS"); + sslContext.init(null, trustManagerFactory.getTrustManagers(), null); + + SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext); + httpClientBuilder.setConnectionManager( + PoolingHttpClientConnectionManagerBuilder.create() + .setSSLSocketFactory(sslSocketFactory) + .build()); + } catch (IOException | GeneralSecurityException e) { + throw new BigQueryJdbcRuntimeException(e); + } + } + addAuthToProxyIfPresent(proxyProperties, httpClientBuilder, callerClassName); + + CloseableHttpClient httpClient = httpClientBuilder.build(); + final HttpTransport httpTransport = new Apache5HttpTransport(httpClient); + return () -> httpTransport; + } + + private static void addAuthToProxyIfPresent( + Map proxyProperties, + HttpClientBuilder closeableHttpClientBuilder, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + if (proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME) + && proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME)) { + + AuthScope authScope = + new AuthScope( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME), + Integer.parseInt( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME))); + UsernamePasswordCredentials usernamePasswordCredentials = + new UsernamePasswordCredentials( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME), + proxyProperties + .get(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME) + .toCharArray()); + + BasicCredentialsProvider proxyCredentialsProvider = new BasicCredentialsProvider(); + proxyCredentialsProvider.setCredentials(authScope, usernamePasswordCredentials); + closeableHttpClientBuilder.setDefaultCredentialsProvider(proxyCredentialsProvider); + closeableHttpClientBuilder.setProxyAuthenticationStrategy( + DefaultAuthenticationStrategy.INSTANCE); // order of challenge? so it will show up + } + } + + static TransportChannelProvider getTransportChannelProvider( + Map proxyProperties, + String sslTrustStorePath, + String sslTrustStorePassword, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + boolean hasProxy = proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME); + boolean hasSsl = sslTrustStorePath != null; + + if (!hasProxy && !hasSsl) { + return null; + } + + TransportChannelProvider transportChannelProvider = + defaultGrpcTransportProviderBuilder() + .setChannelConfigurator( + managedChannelBuilder -> { + if (hasProxy) { + managedChannelBuilder.proxyDetector( + new ProxyDetector() { + @Override + public ProxiedSocketAddress proxyFor(SocketAddress socketAddress) { + return getHttpConnectProxiedSocketAddress( + (InetSocketAddress) socketAddress, proxyProperties); + } + }); + } + if (hasSsl + && managedChannelBuilder + instanceof io.grpc.netty.shaded.io.grpc.netty.NettyChannelBuilder) { + try (FileInputStream trustStoreStream = + new FileInputStream(sslTrustStorePath)) { + KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); + char[] trustStorePasswordChars = + sslTrustStorePassword != null + ? sslTrustStorePassword.toCharArray() + : null; + trustStore.load(trustStoreStream, trustStorePasswordChars); + + TrustManagerFactory trustManagerFactory = + TrustManagerFactory.getInstance( + TrustManagerFactory.getDefaultAlgorithm()); + trustManagerFactory.init(trustStore); + + SslContext grpcSslContext = + GrpcSslContexts.forClient().trustManager(trustManagerFactory).build(); + ((io.grpc.netty.shaded.io.grpc.netty.NettyChannelBuilder) + managedChannelBuilder) + .sslContext(grpcSslContext); + + } catch (IOException | GeneralSecurityException e) { + throw new BigQueryJdbcRuntimeException(e); + } + } + return managedChannelBuilder; + }) + .build(); + return transportChannelProvider; + } + + private static HttpConnectProxiedSocketAddress getHttpConnectProxiedSocketAddress( + InetSocketAddress socketAddress, Map proxyProperties) { + String proxyHost = proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME); + int proxyPort = + Integer.parseInt(proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME)); + HttpConnectProxiedSocketAddress.Builder builder = + HttpConnectProxiedSocketAddress.newBuilder() + .setProxyAddress(new InetSocketAddress(proxyHost, proxyPort)) + .setTargetAddress(socketAddress); + if (proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME) + && proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME)) { + builder.setUsername(proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME)); + builder.setPassword(proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME)); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcRootLogger.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcRootLogger.java new file mode 100644 index 0000000000..ef963d87d3 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcRootLogger.java @@ -0,0 +1,193 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import java.io.IOException; +import java.lang.management.ManagementFactory; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Optional; +import java.util.logging.ConsoleHandler; +import java.util.logging.FileHandler; +import java.util.logging.Formatter; +import java.util.logging.Handler; +import java.util.logging.Level; +import java.util.logging.LogRecord; +import java.util.logging.Logger; + +/** This class is used to log messages from the BigQuery JDBC Driver. */ +class BigQueryJdbcRootLogger { + + /** + * Note: Each connection will have its own file handler with the level and logPath specified in + * the connection properties. But the logs will be driver logs and not connection specific. + */ + private static final Logger logger = Logger.getLogger("com.google.cloud.bigquery"); + + private static final Logger storageLogger = Logger.getLogger("com.google.cloud.bigquery.storage"); + + private static Handler fileHandler = null; + private static Path currentLogPath = null; + private static int fileCounter = 0; + + static { + logger.setUseParentHandlers(false); + storageLogger.setUseParentHandlers(true); + } + + public static Formatter getFormatter() { + return new Formatter() { + private static final String PATTERN = "yyyy-MM-dd HH:mm:ss.SSS"; + private static final String FORMAT = + "%1$s %2$5s %3$d --- [%4$-7.15s] %5$-50s %6$-20s: %7$s%8$s"; + private static final int MAX_THREAD_NAME_LENGTH = 15; + + /** + * Returns the thread for the given thread id. + * + * @param threadId ID for the thread being logged. + * @return returns the thread + */ + Optional getThread(long threadId) { + return Thread.getAllStackTraces().keySet().stream() + .filter(thread -> thread.getId() == threadId) + .findFirst(); + } + + @Override + public String format(LogRecord record) { + String date = new SimpleDateFormat(PATTERN).format(new Date(record.getMillis())); + String threadName = + getThread(record.getThreadID()) + .map(Thread::getName) + .map( + name -> + name.length() > MAX_THREAD_NAME_LENGTH + ? name.substring(name.length() - MAX_THREAD_NAME_LENGTH) + : name) + .orElse(""); + long processId = + Long.parseLong(ManagementFactory.getRuntimeMXBean().getName().split("@")[0]); + String sourceClassName = record.getLoggerName(); + String sourceMethodName = record.getSourceMethodName(); + return String.format( + FORMAT, + date, + record.getLevel().getName(), + processId, + threadName, + sourceClassName, + sourceMethodName, + record.getMessage(), + System.lineSeparator()); + } + }; + } + + public static Logger getRootLogger() { + return logger; + } + + private static void setHandler() throws IOException { + // If Console handler exists, remove it. + // If File handler exists, use it. Else create new one. + for (Handler h : logger.getHandlers()) { + if (h instanceof ConsoleHandler) { + h.close(); + logger.removeHandler(h); + break; + } + if (h instanceof FileHandler) { + fileHandler = h; + break; + } + } + + if (fileHandler == null) { + String fileName = String.format("BigQueryJdbc%d", fileCounter); + fileCounter++; + + currentLogPath = Files.createTempFile(fileName, ".log"); + currentLogPath.toFile().deleteOnExit(); + + fileHandler = new FileHandler(currentLogPath.toString(), 0, 1, true); + logger.addHandler(fileHandler); + } + } + + public static void setLevel(Level level, String logPath) throws IOException { + if (level != Level.OFF) { + setPath(logPath); + if (logger.getHandlers().length == 0) { + setHandler(); + fileHandler.setFormatter(getFormatter()); + logger.setUseParentHandlers(false); + } + fileHandler.setLevel(level); + logger.setLevel(level); + } else { + for (Handler h : logger.getHandlers()) { + h.close(); + logger.removeHandler(h); + } + fileHandler = null; + currentLogPath = null; + } + } + + static void setPath(String logPath) { + try { + if (!logPath.isEmpty() && !logPath.endsWith("/")) { + logPath = logPath + "/"; + } + Path dir = Paths.get(logPath); + if (!Files.exists(dir)) { + Files.createDirectory(dir); + } + + String fileName = String.format("BigQueryJdbc%d.log", fileCounter); + fileCounter++; + Path destination = Paths.get(logPath + fileName).toAbsolutePath(); + + if (currentLogPath != null && !currentLogPath.equals(destination)) { + Path source = Paths.get(currentLogPath.toUri()); + Files.move(source, destination, StandardCopyOption.REPLACE_EXISTING); + } + + currentLogPath = destination; + fileHandler = new FileHandler(currentLogPath.toString(), 0, 1, true); + fileHandler.setFormatter(getFormatter()); + + for (Handler h : logger.getHandlers()) { + if (h instanceof FileHandler) { + h.close(); + logger.removeHandler(h); + break; + } + } + + logger.addHandler(fileHandler); + + } catch (IOException ex) { + logger.warning("Log File warning : " + ex); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcTypeMappings.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcTypeMappings.java new file mode 100644 index 0000000000..b95ac02302 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcTypeMappings.java @@ -0,0 +1,159 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.common.collect.ImmutableMap; +import com.google.gson.JsonObject; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.AbstractMap.SimpleEntry; +import java.util.Map; + +@InternalApi +class BigQueryJdbcTypeMappings { + + static final Map> standardSQLToJavaTypeMapping = + ImmutableMap.ofEntries( + entry(StandardSQLTypeName.INT64, Long.class), + entry(StandardSQLTypeName.BOOL, Boolean.class), + entry(StandardSQLTypeName.FLOAT64, Double.class), + entry(StandardSQLTypeName.NUMERIC, BigDecimal.class), + entry(StandardSQLTypeName.BIGNUMERIC, BigDecimal.class), + entry(StandardSQLTypeName.STRING, String.class), + entry(StandardSQLTypeName.TIMESTAMP, Timestamp.class), + entry(StandardSQLTypeName.DATE, Date.class), + entry(StandardSQLTypeName.TIME, Time.class), + entry(StandardSQLTypeName.DATETIME, Timestamp.class), + entry(StandardSQLTypeName.GEOGRAPHY, String.class), + entry(StandardSQLTypeName.JSON, String.class), + entry(StandardSQLTypeName.INTERVAL, String.class), + entry(StandardSQLTypeName.RANGE, String.class), + entry(StandardSQLTypeName.BYTES, byte[].class), + entry(StandardSQLTypeName.STRUCT, Struct.class), + entry(StandardSQLTypeName.ARRAY, Array.class)); + + static final Map standardSQLToJavaSqlTypesMapping = + ImmutableMap.ofEntries( + entry(StandardSQLTypeName.INT64, Types.BIGINT), + entry(StandardSQLTypeName.BOOL, Types.BOOLEAN), + entry(StandardSQLTypeName.FLOAT64, Types.DOUBLE), + entry(StandardSQLTypeName.NUMERIC, Types.NUMERIC), + entry(StandardSQLTypeName.BIGNUMERIC, Types.NUMERIC), + entry(StandardSQLTypeName.STRING, Types.NVARCHAR), + entry(StandardSQLTypeName.TIMESTAMP, Types.TIMESTAMP), + entry(StandardSQLTypeName.DATE, Types.DATE), + entry(StandardSQLTypeName.TIME, Types.TIME), + entry(StandardSQLTypeName.DATETIME, Types.TIMESTAMP), + entry(StandardSQLTypeName.GEOGRAPHY, Types.OTHER), + entry(StandardSQLTypeName.JSON, Types.OTHER), + entry(StandardSQLTypeName.INTERVAL, Types.OTHER), + entry(StandardSQLTypeName.RANGE, Types.OTHER), + entry(StandardSQLTypeName.BYTES, Types.VARBINARY), + entry(StandardSQLTypeName.STRUCT, Types.STRUCT), + entry(StandardSQLTypeName.ARRAY, Types.ARRAY)); + + static final Map> javaSQLToJavaTypeMapping = + ImmutableMap.ofEntries( + entry(Types.BIGINT, Long.class), + entry(Types.INTEGER, Integer.class), + entry(Types.BOOLEAN, Boolean.class), + entry(Types.DOUBLE, Double.class), + entry(Types.FLOAT, Float.class), + entry(Types.NUMERIC, BigDecimal.class), + entry(Types.VARCHAR, String.class), + entry(Types.NVARCHAR, String.class), + entry(Types.TIMESTAMP, Timestamp.class), + entry(Types.DATE, Date.class), + entry(Types.TIME, Time.class), + entry(Types.OTHER, String.class), + entry(Types.BINARY, byte[].class), + entry(Types.VARBINARY, byte[].class), + entry(Types.STRUCT, Struct.class), + entry(Types.BIT, Boolean.class), + entry(Types.ARRAY, Array.class)); + + static StandardSQLTypeName classToType(Class type) + throws BigQueryJdbcSqlFeatureNotSupportedException { + if (Boolean.class.isAssignableFrom(type)) { + return StandardSQLTypeName.BOOL; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.STRING; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.GEOGRAPHY; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.DATETIME; + } else if (Integer.class.isAssignableFrom(type)) { + return StandardSQLTypeName.INT64; + } else if (Long.class.isAssignableFrom(type)) { + return StandardSQLTypeName.INT64; + } else if (Double.class.isAssignableFrom(type)) { + return StandardSQLTypeName.FLOAT64; + } else if (Float.class.isAssignableFrom(type)) { + return StandardSQLTypeName.FLOAT64; + } else if (BigDecimal.class.isAssignableFrom(type)) { + return StandardSQLTypeName.NUMERIC; + } else if (BigDecimal.class.isAssignableFrom(type)) { + return StandardSQLTypeName.BIGNUMERIC; + } else if (Date.class.isAssignableFrom(type)) { + return StandardSQLTypeName.DATE; + } else if (Timestamp.class.isAssignableFrom(type)) { + return StandardSQLTypeName.TIMESTAMP; + } else if (Time.class.isAssignableFrom(type)) { + return StandardSQLTypeName.TIME; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.JSON; + } else if (JsonObject.class.isAssignableFrom(type)) { + return StandardSQLTypeName.JSON; + } else if (Byte.class.isAssignableFrom(type)) { + return StandardSQLTypeName.BYTES; + } else if (Array.class.isAssignableFrom(type)) { + return StandardSQLTypeName.ARRAY; + } else if (Struct.class.isAssignableFrom(type)) { + return StandardSQLTypeName.STRUCT; + } else if (byte[].class.isAssignableFrom(type)) { + return StandardSQLTypeName.BYTES; + } + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported object type for QueryParameter: " + type); + } + + static Class getJavaType(int javaSQLType) throws BigQueryJdbcSqlFeatureNotSupportedException { + if (!javaSQLToJavaTypeMapping.containsKey(javaSQLType)) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported Java type for SQL type: " + javaSQLType); + } + Class javaType = javaSQLToJavaTypeMapping.get(javaSQLType); + if (javaType == null) { + // This should never happen unless the map was initialized with null values. + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported Java type for SQL type: " + javaSQLType); + } + return javaType; + } + + private static SimpleEntry entry(K key, V value) { + return new SimpleEntry<>(key, value); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtility.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtility.java new file mode 100644 index 0000000000..3b26f7be54 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtility.java @@ -0,0 +1,920 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.client.util.escape.CharEscapers; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; +import java.util.Set; +import java.util.logging.Level; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +/** + * This class implements all the methods that parse Connection property values from the Connection + * String. + */ +final class BigQueryJdbcUrlUtility { + + // TODO: Add all Connection options + static final String ALLOW_LARGE_RESULTS_PROPERTY_NAME = "AllowLargeResults"; + static final String LARGE_RESULTS_TABLE_PROPERTY_NAME = "LargeResultTable"; + static final String LARGE_RESULTS_DATASET_PROPERTY_NAME = "LargeResultDataset"; + static final String UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME = "UnsupportedHTAPIFallback"; + static final boolean DEFAULT_UNSUPPORTED_HTAPI_FALLBACK_VALUE = true; + static final String DESTINATION_DATASET_EXPIRATION_TIME_PROPERTY_NAME = + "LargeResultsDatasetExpirationTime"; + static final long DEFAULT_DESTINATION_DATASET_EXPIRATION_TIME_VALUE = 3600000L; + static final boolean DEFAULT_ALLOW_LARGE_RESULTS = true; + static final String QUERY_DIALECT_PROPERTY_NAME = "QueryDialect"; + static final String DEFAULT_QUERY_DIALECT_VALUE = "SQL"; + static final String UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME = "universeDomain"; + static final String DEFAULT_UNIVERSE_DOMAIN_VALUE = "googleapis.com"; + static final String PROJECT_ID_PROPERTY_NAME = "ProjectId"; + static final String DEFAULT_DATASET_PROPERTY_NAME = "DefaultDataset"; + static final String OAUTH_TYPE_PROPERTY_NAME = "OAuthType"; + static final String HTAPI_ACTIVATION_RATIO_PROPERTY_NAME = "HighThroughputActivationRatio"; + static final String KMS_KEY_NAME_PROPERTY_NAME = "KMSKeyName"; + static final String QUERY_PROPERTIES_NAME = "QueryProperties"; + static final int DEFAULT_HTAPI_ACTIVATION_RATIO_VALUE = + 2; // TODO: to adjust this value before private preview based on performance testing. + static final String HTAPI_MIN_TABLE_SIZE_PROPERTY_NAME = "HighThroughputMinTableSize"; + static final int DEFAULT_HTAPI_MIN_TABLE_SIZE_VALUE = 100; + static final int DEFAULT_OAUTH_TYPE_VALUE = -1; + static final String LOCATION_PROPERTY_NAME = "Location"; + static final String ENDPOINT_OVERRIDES_PROPERTY_NAME = "EndpointOverrides"; + static final String PRIVATE_SERVICE_CONNECT_PROPERTY_NAME = "PrivateServiceConnectUris"; + static final String OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME = + "ServiceAccountImpersonationEmail"; + static final String DEFAULT_OAUTH_SA_IMPERSONATION_EMAIL_VALUE = null; + static final String OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME = + "ServiceAccountImpersonationChain"; + static final String DEFAULT_OAUTH_SA_IMPERSONATION_CHAIN_VALUE = null; + static final String OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME = + "ServiceAccountImpersonationScopes"; + static final String DEFAULT_OAUTH_SA_IMPERSONATION_SCOPES_VALUE = + "https://www.googleapis.com/auth/bigquery"; + static final String OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME = + "ServiceAccountImpersonationTokenLifetime"; + static final String DEFAULT_OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_VALUE = "3600"; + static final String OAUTH_SA_EMAIL_PROPERTY_NAME = "OAuthServiceAcctEmail"; + static final String OAUTH_PVT_KEY_PATH_PROPERTY_NAME = "OAuthPvtKeyPath"; + static final String OAUTH_P12_PASSWORD_PROPERTY_NAME = "OAuthP12Password"; + static final String DEFAULT_OAUTH_P12_PASSWORD_VALUE = "notasecret"; + static final String OAUTH_PVT_KEY_PROPERTY_NAME = "OAuthPvtKey"; + static final String OAUTH2_TOKEN_URI_PROPERTY_NAME = "OAUTH2"; + static final String HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME = "READ_API"; + static final String BIGQUERY_ENDPOINT_OVERRIDE_PROPERTY_NAME = "BIGQUERY"; + static final String STS_ENDPOINT_OVERRIDE_PROPERTY_NAME = "STS"; + static final String OAUTH_ACCESS_TOKEN_PROPERTY_NAME = "OAuthAccessToken"; + static final String OAUTH_REFRESH_TOKEN_PROPERTY_NAME = "OAuthRefreshToken"; + static final String OAUTH_CLIENT_ID_PROPERTY_NAME = "OAuthClientId"; + static final String OAUTH_CLIENT_SECRET_PROPERTY_NAME = "OAuthClientSecret"; + static final String ENABLE_HTAPI_PROPERTY_NAME = "EnableHighThroughputAPI"; + static final String PROXY_HOST_PROPERTY_NAME = "ProxyHost"; + static final String PROXY_PORT_PROPERTY_NAME = "ProxyPort"; + static final String PROXY_USER_ID_PROPERTY_NAME = "ProxyUid"; + static final String PROXY_PASSWORD_PROPERTY_NAME = "ProxyPwd"; + static final boolean DEFAULT_ENABLE_HTAPI_VALUE = false; + static final boolean DEFAULT_ENABLE_SESSION_VALUE = false; + static final int DEFAULT_LOG_LEVEL = 0; + static final String LOG_LEVEL_PROPERTY_NAME = "LogLevel"; + static final String LOG_PATH_PROPERTY_NAME = "LogPath"; + static final String LOG_LEVEL_ENV_VAR = "BIGQUERY_JDBC_LOG_LEVEL"; + static final String LOG_PATH_ENV_VAR = "BIGQUERY_JDBC_LOG_PATH"; + static final String ENABLE_SESSION_PROPERTY_NAME = "EnableSession"; + static final String DEFAULT_LOG_PATH = ""; + static final String USE_QUERY_CACHE_PROPERTY_NAME = "UseQueryCache"; + static final boolean DEFAULT_USE_QUERY_CACHE = true; + static final String JOB_CREATION_MODE_PROPERTY_NAME = "JobCreationMode"; + static final int DEFAULT_JOB_CREATION_MODE = 2; + static final String MAX_RESULTS_PROPERTY_NAME = "MaxResults"; + static final long DEFAULT_MAX_RESULTS_VALUE = 10000; + static final String BYOID_AUDIENCE_URI_PROPERTY_NAME = "BYOID_AudienceUri"; + static final String BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME = "BYOID_CredentialSource"; + static final String BYOID_POOL_USER_PROJECT_PROPERTY_NAME = "BYOID_PoolUserProject"; + static final String BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME = "BYOID_SA_Impersonation_Uri"; + static final String BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME = "BYOID_SubjectTokenType"; + static final String BYOID_TOKEN_URI_PROPERTY_NAME = "BYOID_TokenUri"; + static final String PARTNER_TOKEN_PROPERTY_NAME = "PartnerToken"; + static final String METADATA_FETCH_THREAD_COUNT_PROPERTY_NAME = "MetaDataFetchThreadCount"; + static final int DEFAULT_METADATA_FETCH_THREAD_COUNT_VALUE = 32; + static final String RETRY_TIMEOUT_IN_SECS_PROPERTY_NAME = "Timeout"; + static final long DEFAULT_RETRY_TIMEOUT_IN_SECS_VALUE = 0L; + static final String JOB_TIMEOUT_PROPERTY_NAME = "JobTimeout"; + static final long DEFAULT_JOB_TIMEOUT_VALUE = 0L; + static final String RETRY_INITIAL_DELAY_PROPERTY_NAME = "RetryInitialDelay"; + static final long DEFAULT_RETRY_INITIAL_DELAY_VALUE = 0L; + static final String RETRY_MAX_DELAY_PROPERTY_NAME = "RetryMaxDelay"; + static final long DEFAULT_RETRY_MAX_DELAY_VALUE = 0L; + static final String ADDITIONAL_PROJECTS_PROPERTY_NAME = "AdditionalProjects"; + // Applicable only for connection pooling. + static final String CONNECTION_POOL_SIZE_PROPERTY_NAME = "ConnectionPoolSize"; + static final long DEFAULT_CONNECTION_POOL_SIZE_VALUE = 10L; + static final String LISTENER_POOL_SIZE_PROPERTY_NAME = "ListenerPoolSize"; + static final long DEFAULT_LISTENER_POOL_SIZE_VALUE = 10L; + static final String ENABLE_WRITE_API_PROPERTY_NAME = "EnableWriteAPI"; + static final boolean DEFAULT_ENABLE_WRITE_API_VALUE = false; + static final String SWA_APPEND_ROW_COUNT_PROPERTY_NAME = "SWA_AppendRowCount"; + static final int DEFAULT_SWA_APPEND_ROW_COUNT_VALUE = 1000; + static final String SWA_ACTIVATION_ROW_COUNT_PROPERTY_NAME = "SWA_ActivationRowCount"; + static final int DEFAULT_SWA_ACTIVATION_ROW_COUNT_VALUE = 3; + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJdbcUrlUtility.class.getName()); + static final String FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME = + "FilterTablesOnDefaultDataset"; + static final boolean DEFAULT_FILTER_TABLES_ON_DEFAULT_DATASET_VALUE = false; + static final String REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME = "RequestGoogleDriveScope"; + static final String SSL_TRUST_STORE_PROPERTY_NAME = "SSLTrustStore"; + static final String SSL_TRUST_STORE_PWD_PROPERTY_NAME = "SSLTrustStorePwd"; + static final int DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE = 0; + static final String MAX_BYTES_BILLED_PROPERTY_NAME = "MaximumBytesBilled"; + static final Long DEFAULT_MAX_BYTES_BILLED_VALUE = 0L; + static final String LABELS_PROPERTY_NAME = "Labels"; + static final List OVERRIDE_PROPERTIES = + Arrays.asList( + BIGQUERY_ENDPOINT_OVERRIDE_PROPERTY_NAME, + OAUTH2_TOKEN_URI_PROPERTY_NAME, + HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME, + STS_ENDPOINT_OVERRIDE_PROPERTY_NAME); + static final List BYOID_PROPERTIES = + Arrays.asList( + BYOID_AUDIENCE_URI_PROPERTY_NAME, + BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME, + BYOID_POOL_USER_PROJECT_PROPERTY_NAME, + BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME, + BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME, + BYOID_TOKEN_URI_PROPERTY_NAME); + + static Set PROXY_PROPERTIES = + Collections.unmodifiableSet( + new HashSet<>( + Arrays.asList( + BigQueryConnectionProperty.newBuilder() + .setName(PROXY_HOST_PROPERTY_NAME) + .setDescription("The host name of the proxy server.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PROXY_PORT_PROPERTY_NAME) + .setDescription( + "The port number of the proxy server to connect to. No defaulting" + + " behavior happens.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PROXY_USER_ID_PROPERTY_NAME) + .setDescription("The user name for an authenticated proxy server.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PROXY_PASSWORD_PROPERTY_NAME) + .setDescription("The password for an authenticated proxy server.") + .build()))); + + static Set AUTH_PROPERTIES = + Collections.unmodifiableSet( + new HashSet<>( + Arrays.asList( + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_TYPE_PROPERTY_NAME) + .setDescription( + "This option specifies how the connector obtains or provides the" + + " credentials for OAuth\n" + + "2.0 authentication") + .setDefaultValue(String.valueOf(DEFAULT_OAUTH_TYPE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_EMAIL_PROPERTY_NAME) + .setDescription( + "The Service Account email use for Service Account Authentication.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_PVT_KEY_PATH_PROPERTY_NAME) + .setDescription( + "The location of the credentials file used for this connection.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_PVT_KEY_PROPERTY_NAME) + .setDescription("The OAuth private key used for this connection.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_REFRESH_TOKEN_PROPERTY_NAME) + .setDescription( + "The pre-generated refresh token to be used with BigQuery for" + + " authentication.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_ACCESS_TOKEN_PROPERTY_NAME) + .setDescription( + "The pre-generated access token to be used with BigQuery for" + + " authentication.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_CLIENT_ID_PROPERTY_NAME) + .setDescription( + "The client ID to be used for user authentication or to refresh" + + " pre-generated tokens.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_CLIENT_SECRET_PROPERTY_NAME) + .setDescription( + "The client secret to be used for user authentication or to refresh" + + " pre-generated tokens.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME) + .setDescription("The service account email to be impersonated.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME) + .setDescription( + "Comma separated list of service account emails in the impersonation chain.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME) + .setDescription( + "Comma separated list of OAuth2 scopes to use with impersonated account.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME) + .setDescription("Impersonated account token lifetime.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_P12_PASSWORD_PROPERTY_NAME) + .setDescription("Password for p12 secret file.") + .build()))); + + static Set VALID_PROPERTIES = + Collections.unmodifiableSet( + new HashSet<>( + Arrays.asList( + BigQueryConnectionProperty.newBuilder() + .setName(MAX_BYTES_BILLED_PROPERTY_NAME) + .setDescription( + " Limits the bytes billed for this query. Queries with bytes billed above" + + " this limit will fail (without incurring a charge). If" + + " unspecified, the project default is used.") + .setDefaultValue(String.valueOf(DEFAULT_MAX_BYTES_BILLED_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(CONNECTION_POOL_SIZE_PROPERTY_NAME) + .setDescription("Connection pool size if connection pooling is enabled.") + .setDefaultValue(String.valueOf(DEFAULT_CONNECTION_POOL_SIZE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LISTENER_POOL_SIZE_PROPERTY_NAME) + .setDescription("Listener pool size if connection pooling is enabled.") + .setDefaultValue(String.valueOf(DEFAULT_LISTENER_POOL_SIZE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(RETRY_INITIAL_DELAY_PROPERTY_NAME) + .setDescription("Initial delay, in seconds, before the first retry.") + .setDefaultValue(String.valueOf(DEFAULT_RETRY_INITIAL_DELAY_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(RETRY_MAX_DELAY_PROPERTY_NAME) + .setDescription("Max limit for the retry delay, in seconds.") + .setDefaultValue(String.valueOf(DEFAULT_RETRY_MAX_DELAY_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(RETRY_TIMEOUT_IN_SECS_PROPERTY_NAME) + .setDescription( + "The length of time, in seconds, for which the connector retries a failed" + + " API call before timing out.") + .setDefaultValue(String.valueOf(DEFAULT_RETRY_TIMEOUT_IN_SECS_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(JOB_TIMEOUT_PROPERTY_NAME) + .setDescription( + "Job timeout (in seconds) after which the job is cancelled on the server") + .setDefaultValue(String.valueOf(DEFAULT_JOB_TIMEOUT_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME) + .setDescription( + "This option determines whether the connector uses the REST API or" + + " returns an error when encountering fetch workflows unsupported by" + + " the High-Throughput API.") + .setDefaultValue(String.valueOf(DEFAULT_UNSUPPORTED_HTAPI_FALLBACK_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(DESTINATION_DATASET_EXPIRATION_TIME_PROPERTY_NAME) + .setDescription( + "The expiration time (in milliseconds) for tables in a user-specified" + + " large result dataset.") + .setDefaultValue( + String.valueOf(DEFAULT_DESTINATION_DATASET_EXPIRATION_TIME_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME) + .setDescription( + "The name of the partner-operated cloud which is a new instance of Google" + + " production, known as a Trusted Partner Cloud universe.") + .setDefaultValue(DEFAULT_UNIVERSE_DOMAIN_VALUE) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PROJECT_ID_PROPERTY_NAME) + .setDescription("A globally unique identifier for your project.") + .setDefaultValue(BigQueryOptions.getDefaultProjectId()) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LOG_PATH_PROPERTY_NAME) + .setDescription( + "The directory where the connector saves log files (when logging is" + + " enabled).") + .setDefaultValue(DEFAULT_LOG_PATH) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(DEFAULT_DATASET_PROPERTY_NAME) + .setDescription( + "This default dataset for query execution. If this option is set, queries" + + " with unqualified \n" + + "table names will run against this dataset.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LOCATION_PROPERTY_NAME) + .setDescription( + "The location where datasets are created/queried. The location will be" + + " determined\n" + + " automatically by BigQuery if not specified.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ENABLE_HTAPI_PROPERTY_NAME) + .setDescription( + "Enables or disables Read API usage in the Driver. Disabled by default.") + .setDefaultValue(String.valueOf(DEFAULT_ENABLE_HTAPI_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(HTAPI_ACTIVATION_RATIO_PROPERTY_NAME) + .setDescription( + "Connector switches to BigQuery Storage API when the number of pages" + + " exceed this value.") + .setDefaultValue(String.valueOf(DEFAULT_HTAPI_ACTIVATION_RATIO_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(KMS_KEY_NAME_PROPERTY_NAME) + .setDescription( + "The KMS key name tells BigQuery which key to use when encrypting or" + + " decrypting your data.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(QUERY_PROPERTIES_NAME) + .setDescription( + "Connection-level properties to customize query behavior.") // TODO: + // Figure out + // a clean way + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LABELS_PROPERTY_NAME) + .setDescription( + "Labels associated with the query to organize and group query jobs.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(HTAPI_MIN_TABLE_SIZE_PROPERTY_NAME) + .setDescription( + "If the number of total rows exceeds this value, the connector switches" + + " to the BigQuery Storage API for faster processing.") + .setDefaultValue(String.valueOf(DEFAULT_HTAPI_MIN_TABLE_SIZE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ENABLE_SESSION_PROPERTY_NAME) + .setDescription( + "Enable to capture your SQL activities or enable multi statement" + + " transactions. Disabled by default.") + .setDefaultValue(String.valueOf(DEFAULT_ENABLE_SESSION_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LOG_LEVEL_PROPERTY_NAME) + .setDescription( + "Sets the Log Level for the Driver. Set to Level.OFF by default.") + .setDefaultValue(String.valueOf(DEFAULT_LOG_LEVEL)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(USE_QUERY_CACHE_PROPERTY_NAME) + .setDescription("Enables or disables Query caching. Set to true by default.") + .setDefaultValue(String.valueOf(DEFAULT_USE_QUERY_CACHE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(QUERY_DIALECT_PROPERTY_NAME) + .setDescription( + "Parameter for selecting if the queries should use standard or legacy SQL" + + " syntax.") + .setDefaultValue(DEFAULT_QUERY_DIALECT_VALUE) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ALLOW_LARGE_RESULTS_PROPERTY_NAME) + .setDescription( + "Enabled by default, must be used with legacy SQL. Used for setting" + + " destination table & dataset.") + .setDefaultValue(String.valueOf(DEFAULT_ALLOW_LARGE_RESULTS)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LARGE_RESULTS_TABLE_PROPERTY_NAME) + .setDescription("The destination table where queries are saved.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LARGE_RESULTS_DATASET_PROPERTY_NAME) + .setDescription("The destination dataset where queries are saved.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(JOB_CREATION_MODE_PROPERTY_NAME) + .setDescription( + "Enables or disables Stateless Query mode. Set to false by default.") + .setDefaultValue(String.valueOf(DEFAULT_JOB_CREATION_MODE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(MAX_RESULTS_PROPERTY_NAME) + .setDescription("Maximum number of results per page") + .setDefaultValue(String.valueOf(DEFAULT_MAX_RESULTS_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_AUDIENCE_URI_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. Corresponds to the audience" + + " property\n" + + " in the external account configuration file.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The file location or the URI" + + " of\n" + + " the subject token. Corresponds to the credential_source property" + + " in\n" + + " the external account configuration file.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_POOL_USER_PROJECT_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The project number associated" + + " with\n" + + " the workforce pool. Corresponds to the" + + " workforce_pool_user_project\n" + + " property in the external account configuration file.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The service account email." + + " Only\n" + + " present when service account impersonation is used. Corresponds" + + " to\n" + + " the service_account_impersonation_url property in the external" + + " account\n" + + " configuration file.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The subject token type." + + " Corresponds\n" + + " to the subject_token_type property in the external account" + + " configuration file.") + .setDefaultValue("urn:ietf:params:oauth:tokentype:id_token") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_TOKEN_URI_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The URI used to generate" + + " authentication\n" + + " tokens. Corresponds to the token_url property in the external" + + " account\n" + + " configuration file.") + .setDefaultValue("https://sts.googleapis.com/v1/token") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PARTNER_TOKEN_PROPERTY_NAME) + .setDescription("The partner name and environment.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(METADATA_FETCH_THREAD_COUNT_PROPERTY_NAME) + .setDescription( + "The number of threads used to call a DatabaseMetaData method.") + .setDefaultValue(String.valueOf(DEFAULT_METADATA_FETCH_THREAD_COUNT_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ENABLE_WRITE_API_PROPERTY_NAME) + .setDescription( + "Enables or disables Write API usage for bulk inserts in the Driver." + + " Disabled by default.") + .setDefaultValue(String.valueOf(DEFAULT_ENABLE_WRITE_API_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(SWA_ACTIVATION_ROW_COUNT_PROPERTY_NAME) + .setDescription( + "Connector switches to BigQuery Storage Write API when the number of rows" + + " for executeBatch insert exceed this value. Do not change unless" + + " necessary.") + .setDefaultValue(String.valueOf(DEFAULT_SWA_ACTIVATION_ROW_COUNT_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(SWA_APPEND_ROW_COUNT_PROPERTY_NAME) + .setDescription("Size of the write stream. Do not change unless necessary.") + .setDefaultValue(String.valueOf(DEFAULT_SWA_APPEND_ROW_COUNT_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ADDITIONAL_PROJECTS_PROPERTY_NAME) + .setDescription( + "A comma-separated list of Google Cloud project IDs that can be accessed" + + " for querying, in addition to the primary project specified in the" + + " connection.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME) + .setDescription( + "If true and DefaultDataset is set, DatabaseMetaData.getTables() and" + + " .getColumns() will filter results based on the DefaultDataset" + + " when catalog/schema patterns are null or wildcards.") + .setDefaultValue( + String.valueOf(DEFAULT_FILTER_TABLES_ON_DEFAULT_DATASET_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME) + .setDescription( + "Enables or disables whether the connector requests access to Google" + + " Drive. Set to false (0) by default.") + .setDefaultValue(String.valueOf(DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(SSL_TRUST_STORE_PROPERTY_NAME) + .setDescription( + "The full path of the Java TrustStore containing the server certificate" + + " for one-way SSL authentication.\n" + + "If the trust store requires a password, provide it using the" + + " property SSLTrustStorePwd.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(SSL_TRUST_STORE_PWD_PROPERTY_NAME) + .setDescription( + "The password for accessing the Java TrustStore that is specified using" + + " the property SSLTrustStore.") + .build()))); + + private BigQueryJdbcUrlUtility() {} + + /** + * Parses a URI property from the given URI. + * + * @param uri The URI to parse. + * @param property The name of the property to parse. + * @return The String value of the property, or the default value if the property is not found. + */ + static String parseUriProperty(String uri, String property) { + Pattern pattern = Pattern.compile(String.format("(?is)(?:;|\\?)%s=(.*?)(?:;|$)", property)); + Matcher matcher = pattern.matcher(uri); + if (matcher.find() && matcher.groupCount() == 1) { + return CharEscapers.decodeUriPath(matcher.group(1)); + } + return null; + } + + /** + * Appends the given properties to the given URL. + * + * @param url The URL to append the properties to. + * @param properties The properties to append. + * @return The string value of the updated URL. + */ + static String appendPropertiesToURL(String url, String callerClassName, Properties properties) { + LOG.finest("++enter++ " + callerClassName); + StringBuilder urlBuilder = new StringBuilder(url); + for (Entry entry : properties.entrySet()) { + if (entry.getValue() != null && !"".equals(entry.getValue())) { + LOG.info( + String.format("Appending %s with value %s to URL", entry.getKey(), entry.getValue())); + urlBuilder.append(";").append(entry.getKey()).append("=").append(entry.getValue()); + } + } + return urlBuilder.toString(); + } + + static boolean convertIntToBoolean(String value, String propertyName) { + int integerValue; + + try { + if (value.equalsIgnoreCase("true")) { + integerValue = 1; + } else if (value.equalsIgnoreCase("false")) { + integerValue = 0; + } else { + integerValue = Integer.parseInt(value); + } + + } catch (NumberFormatException ex) { + throw new IllegalArgumentException( + String.format( + "Invalid value for %s. For Boolean connection properties, use 0 for false and 1 for" + + " true.", + propertyName), + ex); + } + if (integerValue == 1) { + return true; + } else if (integerValue == 0) { + return false; + } else { + throw new IllegalArgumentException( + String.format( + "Invalid value for %s. For Boolean connection properties, use 0 for false and 1 for" + + " true.", + propertyName)); + } + } + + // todo just make it a map + static Map parseQueryProperties(String url, String callerClassName) { + return parsePropertiesMap(url, QUERY_PROPERTIES_NAME, callerClassName); + } + + static Map parseLabels(String url, String callerClassName) { + return parsePropertiesMap(url, LABELS_PROPERTY_NAME, callerClassName); + } + + static String parseStringProperty( + String url, String propertyName, String defaultValue, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + String parsedValue = BigQueryJdbcUrlUtility.parseUriProperty(url, propertyName); + if (parsedValue != null) { + return parsedValue; + } + return defaultValue; + } + + static List parseStringListProperty( + String url, String propertyName, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + String rawValue = parseStringProperty(url, propertyName, null, callerClassName); + if (rawValue == null || rawValue.trim().isEmpty()) { + return Collections.emptyList(); + } + return Arrays.stream(rawValue.split(",")) + .map(String::trim) + .filter(s -> !s.isEmpty()) + .collect(Collectors.toList()); + } + + public static String parsePartnerTokenProperty(String url, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + // This property is expected to be set by partners only. For more details on exact format + // supported, refer b/396086960 + String regex = + PARTNER_TOKEN_PROPERTY_NAME + "=\\s*\\(\\s*(GPN:[^;]*?)\\s*(?:;\\s*([^)]*?))?\\s*\\)"; + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(url); + + if (matcher.find()) { + String gpnPart = matcher.group(1); + String environmentPart = matcher.group(2); + StringBuilder partnerToken = new StringBuilder(" ("); + partnerToken.append(gpnPart); + if (environmentPart != null && !environmentPart.trim().isEmpty()) { + partnerToken.append("; "); + partnerToken.append(environmentPart); + } + partnerToken.append(")"); + return partnerToken.toString(); + } + return null; + } + + static Integer parseIntProperty( + String url, String propertyName, Integer defaultValue, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + String parsedValue = BigQueryJdbcUrlUtility.parseUriProperty(url, propertyName); + if (parsedValue != null) { + try { + return Integer.parseInt(parsedValue); + } catch (NumberFormatException e) { + LOG.severe( + String.format( + "Invalid integer value '%s' for property '%s'. Please provide a valid integer.", + parsedValue, propertyName)); + throw new IllegalArgumentException( + String.format("Invalid integer value for property '%s': %s", propertyName, parsedValue), + e); + } + } + return defaultValue; + } + + static Long parseLongProperty( + String url, String propertyName, Long defaultValue, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + String parsedValue = BigQueryJdbcUrlUtility.parseUriProperty(url, propertyName); + if (parsedValue != null) { + return Long.parseLong(parsedValue); + } + return defaultValue; + } + + static Boolean parseBooleanProperty( + String url, String propertyName, Boolean defaultValue, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + String parsedValue = BigQueryJdbcUrlUtility.parseUriProperty(url, propertyName); + if (parsedValue != null) { + return convertIntToBoolean(parsedValue, propertyName); + } + return defaultValue; + } + + public static Level parseLogLevel(String logLevelString) { + int logLevel = logLevelString != null ? Integer.parseInt(logLevelString) : DEFAULT_LOG_LEVEL; + switch (logLevel) { + case 8: + return Level.ALL; + case 7: + return Level.FINEST; + case 6: + return Level.FINER; + case 5: + return Level.FINE; + case 4: + return Level.CONFIG; + case 3: + return Level.INFO; + case 2: + return Level.WARNING; + case 1: + return Level.SEVERE; + case 0: + default: + LOG.info( + String.format( + "%s value not provided, defaulting to %s.", LOG_LEVEL_PROPERTY_NAME, Level.OFF)); + return Level.OFF; + } + } + + static Map parseOverrideProperties(String url, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + Map overrideProps = new HashMap<>(); + Pattern pattern = + Pattern.compile( + String.format( + "(?is)(%s|%s)=([^;]+)", + ENDPOINT_OVERRIDES_PROPERTY_NAME, PRIVATE_SERVICE_CONNECT_PROPERTY_NAME)); + Matcher matcher = pattern.matcher(url); + String overridePropertiesString; + if (matcher.find() && matcher.groupCount() >= 1) { + overridePropertiesString = matcher.group(2); + } else { + return overrideProps; + } + for (String property : OVERRIDE_PROPERTIES) { + Pattern propertyPattern = Pattern.compile(String.format("(?i)%s=(.*?)(?:[,;]|$)", property)); + Matcher propertyMatcher = propertyPattern.matcher(overridePropertiesString); + if (propertyMatcher.find() && propertyMatcher.groupCount() >= 1) { + overrideProps.put(property, propertyMatcher.group(1)); + } + } + return overrideProps; + } + + public static boolean parseJobCreationMode(String url, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + String jobCreationMode = + BigQueryJdbcUrlUtility.parseUriProperty(url, JOB_CREATION_MODE_PROPERTY_NAME); + + if (jobCreationMode == null) { + LOG.fine( + String.format( + "%s value not provided, defaulting to %s. Caller: %s", + JOB_CREATION_MODE_PROPERTY_NAME, DEFAULT_JOB_CREATION_MODE, callerClassName)); + // Default Job creation mode is JOB_CREATION_OPTIONAL(2) + // which translates to options.setQueryPreviewEnabled(true) + return true; + } + if (jobCreationMode.equalsIgnoreCase("1")) { + return false; + } else if (jobCreationMode.equalsIgnoreCase("2")) { + return true; + } else { + throw new NumberFormatException( + String.format( + "Invalid value for %s. Use 1 for JOB_CREATION_REQUIRED and 2 for" + + " JOB_CREATION_OPTIONAL.", + JOB_CREATION_MODE_PROPERTY_NAME)); + } + } + + public static String parseBYOIDProperty(String url, String property, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + String value = BigQueryJdbcUrlUtility.parseUriProperty(url, property); + String defaultValue = BigQueryJdbcUrlUtility.getConnectionPropertyDefaultValue(property); + if (value != null) { + return value; + } else if (defaultValue != null) { + return defaultValue; + } + return null; + } + + public static String getConnectionPropertyDefaultValue(String propertyName) { + // TODO: change how we store properties because this method has to go through all of them + for (BigQueryConnectionProperty property : VALID_PROPERTIES) { + if (property.getName().equals(propertyName)) { + return property.getDefaultValue(); + } + } + return null; + } + + public static long parseRetryTimeoutInSecs(String url, String callerClassName) { + return BigQueryJdbcUrlUtility.parseLongProperty( + url, + RETRY_TIMEOUT_IN_SECS_PROPERTY_NAME, + DEFAULT_RETRY_TIMEOUT_IN_SECS_VALUE, + callerClassName); + } + + public static long parseJobTimeout(String url, String callerClassName) { + return parseLongProperty( + url, JOB_TIMEOUT_PROPERTY_NAME, DEFAULT_JOB_TIMEOUT_VALUE, callerClassName); + } + + public static long parseRetryInitialDelayInSecs(String url, String callerClassName) { + return BigQueryJdbcUrlUtility.parseLongProperty( + url, RETRY_INITIAL_DELAY_PROPERTY_NAME, DEFAULT_RETRY_INITIAL_DELAY_VALUE, callerClassName); + } + + public static long parseRetryMaxDelayInSecs(String url, String callerClassName) { + return BigQueryJdbcUrlUtility.parseLongProperty( + url, RETRY_MAX_DELAY_PROPERTY_NAME, DEFAULT_RETRY_MAX_DELAY_VALUE, callerClassName); + } + + // Convenience Helper Methods + public static long parseConnectionPoolSize(String url, String callerClassName) { + if (url == null || url.isEmpty()) { + throw new BigQueryJdbcRuntimeException("Connection url is empty"); + } + return parseLongProperty( + url, + CONNECTION_POOL_SIZE_PROPERTY_NAME, + DEFAULT_CONNECTION_POOL_SIZE_VALUE, + callerClassName); + } + + public static long parseListenerPoolSize(String url, String callerClassName) { + if (url == null || url.isEmpty()) { + throw new BigQueryJdbcRuntimeException("Connection url is empty"); + } + return parseLongProperty( + url, LISTENER_POOL_SIZE_PROPERTY_NAME, DEFAULT_LISTENER_POOL_SIZE_VALUE, callerClassName); + } + + public static long parseMaximumBytesBilled(String url, String callerClassName) { + if (url == null || url.isEmpty()) { + throw new BigQueryJdbcRuntimeException("Connection url is empty"); + } + return parseLongProperty( + url, MAX_BYTES_BILLED_PROPERTY_NAME, DEFAULT_MAX_BYTES_BILLED_VALUE, callerClassName); + } + + private static Map parsePropertiesMap( + String url, String propertyName, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + String propertiesString = BigQueryJdbcUrlUtility.parseUriProperty(url, propertyName); + if (propertiesString == null || propertiesString.isEmpty()) { + LOG.fine(String.format("Unable to parse property name: %s from url: %s", propertyName, url)); + return null; + } + Map propertiesMap = new HashMap<>(); + String[] keyValuePairs = propertiesString.split(","); + + for (String keyValuePair : keyValuePairs) { + String[] parts = keyValuePair.split("="); + if (parts.length == 2) { + propertiesMap.put(parts[0], parts[1]); + } else { + LOG.warning( + String.format( + "Invalid KeyValue pair: %s found in url: %s for property name: %s", + keyValuePair, url, propertyName)); + } + } + return propertiesMap; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArray.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArray.java new file mode 100644 index 0000000000..3b557a15a7 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArray.java @@ -0,0 +1,105 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryFieldValueListWrapper.getNestedFieldValueListWrapper; + +import com.google.api.core.InternalApi; +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.Schema; +import java.sql.ResultSet; +import java.util.List; + +/** An implementation of {@link BigQueryBaseArray} used to represent Array values from Json data. */ +@InternalApi +class BigQueryJsonArray extends BigQueryBaseArray { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJsonArray.class.getName()); + private static final BigQueryTypeCoercer BIGQUERY_TYPE_COERCER = + BigQueryTypeCoercionUtility.INSTANCE; + private List values; + + BigQueryJsonArray(Field schema, FieldValue values) { + super(schema); + this.values = (values == null || values.isNull()) ? null : values.getRepeatedValue(); + } + + @Override + public Object getArray() { + ensureValid(); + LOG.finest("++enter++"); + if (this.values == null) { + return null; + } + return getArrayInternal(0, this.values.size()); + } + + @Override + public Object getArray(long index, int count) { + ensureValid(); + LOG.finest("++enter++"); + if (this.values == null) { + return null; + } + Tuple range = createRange(index, count, this.values.size()); + return getArrayInternal(range.x(), range.y()); + } + + @Override + public ResultSet getResultSet() { + ensureValid(); + LOG.finest("++enter++"); + if (this.values == null) { + return new BigQueryJsonResultSet(); + } + BigQueryFieldValueListWrapper bigQueryFieldValueListWrapper = + getNestedFieldValueListWrapper(FieldList.of(singleElementSchema()), this.values); + return BigQueryJsonResultSet.getNestedResultSet( + Schema.of(this.schema), bigQueryFieldValueListWrapper, 0, this.values.size()); + } + + @Override + public ResultSet getResultSet(long index, int count) { + ensureValid(); + LOG.finest("++enter++"); + if (this.values == null) { + return new BigQueryJsonResultSet(); + } + Tuple range = createRange(index, count, this.values.size()); + BigQueryFieldValueListWrapper bigQueryFieldValueListWrapper = + getNestedFieldValueListWrapper(FieldList.of(singleElementSchema()), this.values); + return BigQueryJsonResultSet.getNestedResultSet( + Schema.of(this.schema), bigQueryFieldValueListWrapper, range.x(), range.y()); + } + + @Override + public void free() { + this.values = null; + markInvalid(); + } + + @Override + Object getCoercedValue(int index) { + FieldValue fieldValue = this.values.get(index); + return this.arrayOfStruct + ? new BigQueryJsonStruct(this.schema.getSubFields(), fieldValue) + : BIGQUERY_TYPE_COERCER.coerceTo(getTargetClass(), fieldValue); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSet.java new file mode 100644 index 0000000000..f9d7b11538 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSet.java @@ -0,0 +1,317 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryBaseArray.isArray; +import static com.google.cloud.bigquery.jdbc.BigQueryBaseStruct.isStruct; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.concurrent.BlockingQueue; + +/** {@link ResultSet} Implementation for JSON datasource (Using REST APIs) */ +class BigQueryJsonResultSet extends BigQueryBaseResultSet { + private final long totalRows; + private final BlockingQueue buffer; + private boolean hasReachedEnd = false; + // Points to the current record + private BigQueryFieldValueListWrapper cursor; + // Tracks the index of the nested element under process + private int nestedRowIndex; + private long rowCnt = 0; + private boolean afterLast = false; + private final int fromIndex; + private final int toIndexExclusive; + private final Thread[] ownedThreads; + + private BigQueryJsonResultSet( + Schema schema, + long totalRows, + BlockingQueue buffer, + BigQueryStatement statement, + boolean isNested, + BigQueryFieldValueListWrapper cursor, + int fromIndex, + int toIndexExclusive, + Thread[] ownedThreads, + BigQuery bigQuery) { + super(bigQuery, statement, schema, isNested); + this.totalRows = totalRows; + this.buffer = buffer; + this.cursor = cursor; + this.fromIndex = fromIndex; + this.toIndexExclusive = toIndexExclusive; + this.nestedRowIndex = fromIndex - 1; + this.ownedThreads = ownedThreads; + } + + /** + * This method returns an instance of BigQueryJsonResultSet after adding it in the list of + * JsonResultSetFinalizer + * + * @return BigQueryJsonResultSet + */ + static BigQueryJsonResultSet of( + Schema schema, + long totalRows, + BlockingQueue buffer, + BigQueryStatement statement, + Thread[] ownedThreads, + BigQuery bigQuery) { + + return new BigQueryJsonResultSet( + schema, totalRows, buffer, statement, false, null, -1, -1, ownedThreads, bigQuery); + } + + static BigQueryJsonResultSet of( + Schema schema, + long totalRows, + BlockingQueue buffer, + BigQueryStatement statement, + Thread[] ownedThreads) { + + return new BigQueryJsonResultSet( + schema, totalRows, buffer, statement, false, null, -1, -1, ownedThreads, null); + } + + BigQueryJsonResultSet() { + super(null, null, null, false); + totalRows = 0; + buffer = null; + fromIndex = 0; + ownedThreads = new Thread[0]; + toIndexExclusive = 0; + } + + // + + /** + * Wrapper method which can be used for initialising the instance of BigQueryJsonResultSet for the + * nested Records + * + * @param schema Table schema + * @param cursor Points to the current record + * @param fromIndex starting index under consideration + * @param toIndexExclusive last index under consideration + * @return The BigQueryJsonResultSet + */ + static BigQueryJsonResultSet getNestedResultSet( + Schema schema, BigQueryFieldValueListWrapper cursor, int fromIndex, int toIndexExclusive) { + return new BigQueryJsonResultSet( + schema, + -1, + null, + null, /* statement will be null in case of nested java.sql.Result. */ + true, + cursor, + fromIndex, + toIndexExclusive, + null, + null); + } + + /* Advances the result set to the next row, returning false if no such row exists. Potentially blocking operation */ + public boolean next() throws SQLException { + checkClosed(); + if (this.isNested) { + // We are working with the nested record, the cursor would have been + // populated. + if (this.cursor == null || this.cursor.getArrayFieldValueList() == null) { + throw new IllegalStateException( + "Cursor/ArrayFieldValueList can not be null working with the nested record"); + } + // Check if there's a next record in the array which can be read + if (this.nestedRowIndex < (this.toIndexExclusive - 1)) { + this.nestedRowIndex++; + return true; + } + this.afterLast = true; + return false; + + } else { + // If end of stream is reached or we are past the last row i.e + // rowcnt == totalRows (rowcnt starts at 0) + // then we can simply return false + if (this.hasReachedEnd || this.isLast()) { + this.afterLast = true; + return false; + } + try { + // Advance the cursor,Potentially blocking operation + this.cursor = this.buffer.take(); + this.rowCnt++; + // Check for end of stream + if (this.cursor.isLast()) { + this.cursor = null; + this.hasReachedEnd = true; + return false; + } + // Cursor has been advanced + return true; + + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException( + "Error occurred while advancing the cursor. This could happen when connection is closed while we call the next method", + ex); + } + } + } + + @Override + public Object getObject(int columnIndex) throws SQLException { + // columnIndex is SQL index starting at 1 + checkClosed(); + LOG.finest("++enter++"); + FieldValue value = getObjectInternal(columnIndex); + if (value == null || value.isNull()) { + return null; + } + + if (this.isNested && columnIndex == 1) { + return this.bigQueryTypeCoercer.coerceTo(Integer.class, value); + } + + if (this.isNested && columnIndex == 2) { + Field arrayField = this.schema.getFields().get(0); + if (isStruct(arrayField)) { + return new BigQueryJsonStruct(arrayField.getSubFields(), value); + } + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + arrayField.getType().getStandardType()); + return this.bigQueryTypeCoercer.coerceTo(targetClass, value); + } + + int extraIndex = this.isNested ? 2 : 1; + Field fieldSchema = this.schemaFieldList.get(columnIndex - extraIndex); + if (isArray(fieldSchema)) { + return new BigQueryJsonArray(fieldSchema, value); + } else if (isStruct(fieldSchema)) { + return new BigQueryJsonStruct(fieldSchema.getSubFields(), value); + } else { + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + fieldSchema.getType().getStandardType()); + return this.bigQueryTypeCoercer.coerceTo(targetClass, value); + } + } + + /** + * This method will be called by every other getter of this {@link java.sql.ResultSet}, including + * {@link #getObject(int)} to get the value in its rawest form i.e. {@link FieldValue} to coerce + * it further as required. + * + * @param columnIndex the first column is 1, the second is 2, ... + * @return an instance of {@link FieldValue} represents value at columnIndex column. + */ + private FieldValue getObjectInternal(int columnIndex) throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + FieldValue value; + if (this.isNested) { + boolean validIndexForNestedResultSet = columnIndex == 1 || columnIndex == 2; + // BigQuery doesn't support multidimensional arrays, so just the default row + // num column (1) and the actual column (2) is supposed to be read + if (!validIndexForNestedResultSet) { + throw new IllegalArgumentException( + "Column index is required to be 1 or 2 for the nested arrays"); + } + if (this.cursor.getArrayFieldValueList() == null + || this.cursor.getArrayFieldValueList().get(this.nestedRowIndex) == null) { + throw new IllegalStateException("ArrayFieldValueList cannot be null"); + } + + // For Arrays the first column is Index, ref: + // https://docs.oracle.com/javase/7/docs/api/java/sql/Array.html#getResultSet() + if (columnIndex == 1) { + return FieldValue.of(Attribute.PRIMITIVE, Integer.toString(this.nestedRowIndex + 1)); + } else { + // columnIndex = 2 + // This ignores the columnIndex, as there's just one column, and we have already incremented + // the nestedRowIndex + value = this.cursor.getArrayFieldValueList().get(this.nestedRowIndex); + } + } + // non nested, return the value + else { + // SQL Index to 0 based index + value = this.cursor.getFieldValueList().get(columnIndex - 1); + } + setWasNull(value.getValue()); + return value; + } + + @Override + public void close() { + LOG.fine(String.format("Closing BigqueryJsonResultSet %s.", this)); + this.isClosed = true; + if (ownedThreads != null) { + for (Thread ownedThread : ownedThreads) { + if (!ownedThread.isInterrupted()) { + ownedThread.interrupt(); + } + } + } + super.close(); + } + + @Override + public boolean isBeforeFirst() throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + if (this.isNested) { + return this.nestedRowIndex < this.fromIndex; + } else { + return this.cursor == null && this.rowCnt == 0; + } + } + + @Override + public boolean isAfterLast() throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + return this.afterLast; + } + + @Override + public boolean isFirst() throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + if (this.isNested) { + return this.nestedRowIndex == this.fromIndex; + } else { + return this.rowCnt == 1; + } + } + + @Override + public boolean isLast() throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + if (this.isNested) { + return this.nestedRowIndex == this.toIndexExclusive - 1; + } else { + return this.rowCnt == this.totalRows; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStruct.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStruct.java new file mode 100644 index 0000000000..35217f8e71 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStruct.java @@ -0,0 +1,80 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryBaseArray.isArray; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import java.lang.reflect.Array; +import java.util.List; + +/** + * An implementation of {@link BigQueryBaseStruct} used to represent Struct values from Json data. + */ +@InternalApi +class BigQueryJsonStruct extends BigQueryBaseStruct { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJsonStruct.class.getName()); + + private static final BigQueryTypeCoercer BIGQUERY_TYPE_COERCER = + BigQueryTypeCoercionUtility.INSTANCE; + + private final FieldList schema; + private final List values; + + public BigQueryJsonStruct(FieldList schema, FieldValue values) { + this.schema = schema; + this.values = (values == null || values.isNull()) ? null : values.getRecordValue(); + } + + @Override + FieldList getSchema() { + return this.schema; + } + + @Override + public Object[] getAttributes() { + LOG.finest("++enter++"); + int size = schema.size(); + Object[] attributes = (Object[]) Array.newInstance(Object.class, size); + + for (int index = 0; index < size; index++) { + Field currentSchema = schema.get(index); + FieldValue currentValue = values == null ? null : values.get(index); + Object coercedValue = getValue(currentSchema, currentValue); + Array.set(attributes, index, coercedValue); + } + return attributes; + } + + private Object getValue(Field currentSchema, FieldValue currentValue) { + LOG.finest("++enter++"); + if (isArray(currentSchema)) { + return new BigQueryJsonArray(currentSchema, currentValue); + } else if (isStruct(currentSchema)) { + return new BigQueryJsonStruct(currentSchema.getSubFields(), currentValue); + } else { + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + currentSchema.getType().getStandardType()); + return BIGQUERY_TYPE_COERCER.coerceTo(targetClass, currentValue); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsConnection.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsConnection.java new file mode 100644 index 0000000000..1804cc14c4 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsConnection.java @@ -0,0 +1,191 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.METHOD_NOT_IMPLEMENTED; + +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.Array; +import java.sql.Blob; +import java.sql.CallableStatement; +import java.sql.Clob; +import java.sql.Connection; +import java.sql.NClob; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.SQLXML; +import java.sql.Savepoint; +import java.sql.Struct; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.Executor; + +/** NoOps Abstract base class for BigQuery JDBC Connection. */ +abstract class BigQueryNoOpsConnection implements Connection { + + @Override + public CallableStatement prepareCall(String sql) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String nativeSQL(String sql) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T unwrap(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean isWrapperFor(Class iface) { + return false; + } + + @Override + public boolean isReadOnly() { + return false; + } + + @Override + public void setReadOnly(boolean readOnly) {} + + @Override + public void setCatalog(String catalog) {} + + // TODO: post MVP feature + + @Override + public Map> getTypeMap() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setTypeMap(Map> map) {} + + @Override + public Savepoint setSavepoint() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Savepoint setSavepoint(String name) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void rollback(Savepoint savepoint) {} + + @Override + public void releaseSavepoint(Savepoint savepoint) {} + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Clob createClob() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Blob createBlob() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public NClob createNClob() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public SQLXML createSQLXML() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean isValid(int timeout) throws SQLException { + return false; + } + + @Override + public void setClientInfo(String name, String value) {} + + @Override + public String getClientInfo(String name) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Properties getClientInfo() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setClientInfo(Properties properties) {} + + @Override + public Array createArrayOf(String typeName, Object[] elements) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Struct createStruct(String typeName, Object[] attributes) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String getSchema() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setSchema(String schema) {} + + @Override + public void setNetworkTimeout(Executor executor, int milliseconds) {} + + @Override + public int getNetworkTimeout() { + return 0; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsResultSet.java new file mode 100644 index 0000000000..e4b29f7cd5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsResultSet.java @@ -0,0 +1,693 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.METHOD_NOT_IMPLEMENTED; + +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Map; + +/** NoOps Abstract base class for BigQuery JDBC ResultSet(s). */ +abstract class BigQueryNoOpsResultSet implements ResultSet { + + @Override + public int getFetchDirection() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setFetchSize(int rows) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int getFetchSize() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String getCursorName() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean absolute(int row) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void beforeFirst() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void afterLast() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean first() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean last() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int getRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean relative(int rows) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean previous() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setFetchDirection(int direction) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean rowUpdated() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean rowInserted() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean rowDeleted() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNull(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBoolean(int columnIndex, boolean x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateByte(int columnIndex, byte x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateShort(int columnIndex, short x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateInt(int columnIndex, int x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateLong(int columnIndex, long x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateFloat(int columnIndex, float x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateDouble(int columnIndex, double x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateString(int columnIndex, String x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBytes(int columnIndex, byte[] x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateDate(int columnIndex, Date x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateTime(int columnIndex, Time x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateObject(int columnIndex, Object x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNull(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBoolean(String columnLabel, boolean x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateByte(String columnLabel, byte x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateShort(String columnLabel, short x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateInt(String columnLabel, int x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateLong(String columnLabel, long x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateFloat(String columnLabel, float x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateDouble(String columnLabel, double x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateString(String columnLabel, String x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBytes(String columnLabel, byte[] x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateDate(String columnLabel, Date x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateTime(String columnLabel, Time x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, int length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, int length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateObject(String columnLabel, Object x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void insertRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void deleteRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void refreshRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void cancelRowUpdates() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void moveToInsertRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void moveToCurrentRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Object getObject(int columnIndex, Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Ref getRef(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Object getObject(String columnLabel, Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Ref getRef(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public URL getURL(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public URL getURL(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRef(int columnIndex, Ref x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRef(String columnLabel, Ref x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(int columnIndex, Blob x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(String columnLabel, Blob x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(int columnIndex, Clob x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(String columnLabel, Clob x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateArray(int columnIndex, Array x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateArray(String columnLabel, Array x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public RowId getRowId(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public RowId getRowId(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRowId(int columnIndex, RowId x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRowId(String columnLabel, RowId x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNString(int columnIndex, String nString) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNString(String columnLabel, String nString) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(int columnIndex, NClob nClob) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(String columnLabel, NClob nClob) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public NClob getNClob(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public NClob getNClob(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public SQLXML getSQLXML(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public SQLXML getSQLXML(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String getNString(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String getNString(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Reader getNCharacterStream(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Reader getNCharacterStream(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(int columnIndex, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(String columnLabel, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(int columnIndex, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(String columnLabel, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T getObject(int columnIndex, Class type) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T getObject(String columnLabel, Class type) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T unwrap(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public SQLWarning getWarnings() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void clearWarnings() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + void checkClosed() throws SQLException { + if (isClosed()) { + throw new BigQueryJdbcException("This " + getClass().getName() + " has been closed"); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsStatement.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsStatement.java new file mode 100644 index 0000000000..2e71bfaf75 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsStatement.java @@ -0,0 +1,90 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.METHOD_NOT_IMPLEMENTED; + +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +abstract class BigQueryNoOpsStatement implements Statement { + + @Override + public void setCursorName(String name) throws SQLException { + // TODO: ResultSet Concurrency is read only(Not updatable) + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T unwrap(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public ResultSet getGeneratedKeys() throws SQLException { + // TODO: Returns an empty resultset. + // return empty ResultSet + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int executeUpdate(String sql, String[] columnNames) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean execute(String sql, int[] columnIndexes) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean execute(String sql, String[] columnNames) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandler.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandler.java new file mode 100644 index 0000000000..9644dd581d --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandler.java @@ -0,0 +1,281 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.SQLException; +import java.util.ArrayList; + +class BigQueryParameterHandler { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + + public BigQueryParameterHandler(int parameterCount) { + this.parametersArraySize = parameterCount; + } + + BigQueryParameterHandler(int parameterCount, ArrayList parametersList) { + this.parametersArraySize = parameterCount; + this.parametersList = parametersList; + } + + // Indicates whether the parameter is input, output or both + // Default is UNSPECIFIED + // Used by CallableStatement + enum BigQueryStatementParameterType { + UNSPECIFIED, + IN, + OUT, + INOUT + }; + + private int parametersArraySize; + ArrayList parametersList = new ArrayList<>(parametersArraySize); + + private long highestIndex = 0; + + QueryJobConfiguration.Builder configureParameters( + QueryJobConfiguration.Builder jobConfigurationBuilder) throws SQLException { + LOG.finest("++enter++"); + try { + for (int i = 1; i <= this.parametersArraySize; i++) { + + Object parameterValue = getParameter(i); + StandardSQLTypeName sqlType = getSqlType(i); + LOG.info( + String.format( + "Parameter %s of type %s at index %s added to QueryJobConfiguration", + parameterValue, sqlType, i)); + jobConfigurationBuilder.addPositionalParameter( + QueryParameterValue.of(parameterValue, sqlType)); + } + } catch (NullPointerException e) { + if (e.getMessage().contains("Null type")) { + throw new BigQueryJdbcException("One or more parameters missing in Prepared statement.", e); + } + } + return jobConfigurationBuilder; + } + + void setParameter(int parameterIndex, Object value, Class type) + throws BigQueryJdbcSqlFeatureNotSupportedException { + LOG.finest("++enter++"); + LOG.finest(String.format("setParameter called by : %s", type.getName())); + checkValidIndex(parameterIndex); + + int arrayIndex = parameterIndex - 1; + if (parameterIndex >= this.highestIndex || this.parametersList.get(arrayIndex) == null) { + parametersList.ensureCapacity(parameterIndex); + while (parametersList.size() < parameterIndex) { + parametersList.add(null); + } + parametersList.set(arrayIndex, new BigQueryJdbcParameter()); + } + this.highestIndex = Math.max(parameterIndex, highestIndex); + BigQueryJdbcParameter parameter = parametersList.get(arrayIndex); + + parameter.setIndex(parameterIndex); + parameter.setValue(value); + parameter.setType(type); + parameter.setSqlType(BigQueryJdbcTypeMappings.classToType(type)); + parameter.setParamName(""); + parameter.setParamType(BigQueryStatementParameterType.UNSPECIFIED); + parameter.setScale(-1); + + LOG.finest(String.format("Parameter set { %s }", parameter.toString())); + } + + private void checkValidIndex(int parameterIndex) { + if (parameterIndex > this.parametersArraySize) { + throw new IndexOutOfBoundsException("All parameters already provided."); + } + } + + Object getParameter(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return null; + } + return parametersList.get(arrayIndex).getValue(); + } + + Class getType(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return null; + } + return parametersList.get(arrayIndex).getType(); + } + + StandardSQLTypeName getSqlType(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return null; + } + return parametersList.get(arrayIndex).getSqlType(); + } + + void clearParameters() { + LOG.finest("++enter++"); + parametersList.clear(); + highestIndex = 0; + } + + // set parameter by name and type + void setParameter( + String paramName, + Object value, + Class type, + BigQueryStatementParameterType paramType, + int scale) + throws BigQueryJdbcSqlFeatureNotSupportedException { + LOG.finest("++enter++"); + LOG.finest(String.format("setParameter called by : %s", type.getName())); + if (paramName == null || paramName.isEmpty()) { + throw new IllegalArgumentException("paramName cannot be null or empty"); + } + BigQueryJdbcParameter parameter = null; + for (BigQueryJdbcParameter p : parametersList) { + if (paramName.equals(p.getParamName())) { + parameter = p; + break; + } + } + if (parameter == null) { + // Add new parameter. + parameter = new BigQueryJdbcParameter(); + parameter.setIndex(-1); + } + parameter.setValue(value); + parameter.setType(type); + parameter.setSqlType(BigQueryJdbcTypeMappings.classToType(type)); + parameter.setParamName(paramName); + parameter.setParamType(paramType); + parameter.setScale(scale); + if (parameter.getIndex() == -1) { + parametersList.add(parameter); + } + LOG.finest(String.format("Parameter set { %s }", parameter.toString())); + } + + // set parameter by index and type + void setParameter( + int parameterIndex, + Object value, + Class type, + BigQueryStatementParameterType paramType, + int scale) + throws BigQueryJdbcSqlFeatureNotSupportedException { + LOG.finest("++enter++"); + LOG.finest(String.format("setParameter called by : %s", type.getName())); + checkValidIndex(parameterIndex); + int arrayIndex = parameterIndex - 1; + if (parameterIndex >= this.highestIndex || this.parametersList.get(arrayIndex) == null) { + parametersList.ensureCapacity(parameterIndex); + while (parametersList.size() < parameterIndex) { + parametersList.add(null); + } + parametersList.set(arrayIndex, new BigQueryJdbcParameter()); + } + this.highestIndex = Math.max(parameterIndex, highestIndex); + BigQueryJdbcParameter parameter = parametersList.get(arrayIndex); + + parameter.setIndex(parameterIndex); + parameter.setValue(value); + parameter.setType(type); + parameter.setSqlType(BigQueryJdbcTypeMappings.classToType(type)); + parameter.setParamName(""); + parameter.setParamType(paramType); + parameter.setScale(scale); + + LOG.finest(String.format("Parameter set { %s }", parameter.toString())); + } + + // Get Parameter by name + Object getParameter(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getValue(); + } + } + return null; + } + + // Get parameter type by index + BigQueryStatementParameterType getParameterType(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return null; + } + return parametersList.get(arrayIndex).getParamType(); + } + + // Get parameter type by name + BigQueryStatementParameterType getParameterType(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getParamType(); + } + } + return null; + } + + // Get scale type by index + int getParameterScale(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return -1; + } + return parametersList.get(arrayIndex).getScale(); + } + + // Get parameter scale by name + int getParameterScale(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getScale(); + } + } + return -1; + } + + Class getType(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getType(); + } + } + return null; + } + + StandardSQLTypeName getSqlType(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getSqlType(); + } + } + return null; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnection.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnection.java new file mode 100644 index 0000000000..ebb07dc11f --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnection.java @@ -0,0 +1,497 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.common.annotations.VisibleForTesting; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.UUID; +import java.util.concurrent.Executor; +import java.util.concurrent.LinkedBlockingDeque; +import javax.sql.ConnectionEvent; +import javax.sql.ConnectionEventListener; +import javax.sql.PooledConnection; +import javax.sql.StatementEventListener; + +class BigQueryPooledConnection implements PooledConnection { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + + private String id; // Mainly for internal use + private Connection bqConnection; + private boolean inUse = false; + private Long listenerPoolSize = 10L; + private LinkedBlockingDeque listeners; + + BigQueryPooledConnection(Connection bqConnection) { + this.bqConnection = bqConnection; + this.id = UUID.randomUUID().toString(); + String connectionUrl = ((BigQueryConnection) bqConnection).getConnectionUrl(); + if (connectionUrl != null && !connectionUrl.isEmpty()) { + this.listenerPoolSize = + BigQueryJdbcUrlUtility.parseListenerPoolSize(connectionUrl, this.toString()); + } + if (getListenerPoolSize() > 0L) { + listeners = new LinkedBlockingDeque<>(getListenerPoolSize().intValue()); + } else { + listeners = new LinkedBlockingDeque<>(); + } + } + + Long getListenerPoolSize() { + return listenerPoolSize; + } + + @VisibleForTesting + boolean inUse() { + return inUse; + } + + @VisibleForTesting + boolean isListenerPooled(ConnectionEventListener l) { + return listeners.contains(l); + } + + @Override + public synchronized Connection getConnection() throws SQLException { + LOG.finest("++enter++"); + if (inUse) { + throw new SQLException("PooledConnection is already in use."); + } + inUse = true; + // Return a wrapper around the underlying physical connection. + return new BigQueryPooledConnectionWrapper(bqConnection, this); + } + + @Override + public synchronized void close() throws SQLException { + LOG.finest("++enter++"); + // Notify listeners that the *PooledConnection* is being closed. + ConnectionEvent event = new ConnectionEvent(this); + for (ConnectionEventListener listener : listeners) { + listener.connectionClosed(event); // This is likely not the intended event for this action + } + // Marks the pooled connection to be not in use. + inUse = false; + } + + @Override + public synchronized void addConnectionEventListener(ConnectionEventListener listener) { + LOG.finest("++enter++"); + if (listener == null) { + return; + } + if (this.listeners.contains(listener)) { + return; + } + this.listeners.add(listener); + } + + @Override + public synchronized void removeConnectionEventListener(ConnectionEventListener listener) { + LOG.finest("++enter++"); + if (listener == null) { + return; + } + if (!this.listeners.contains(listener)) { + return; + } + this.listeners.remove(listener); + } + + // Method called by the BigQueryPooledConnectionWrapper when the logical + // Connection is closed. + public synchronized void connectionHandleClosed(BigQueryPooledConnectionWrapper handle) { + LOG.finest("++enter++"); + inUse = false; + ConnectionEvent event = new ConnectionEvent(this); + for (ConnectionEventListener listener : listeners) { + listener.connectionClosed(event); + } + LOG.finest("Connection handle returned to the pool."); + } + + // Method to notify listeners about a connection error. This can be called + // by the application if they are using PooledConnection directly or by the + // BigQueryPooledConnectionWrapper when a connection is aborted. + public synchronized void fireConnectionError(SQLException e) { + LOG.finest("++enter++"); + inUse = false; + ConnectionEvent event = new ConnectionEvent(this, e); + for (ConnectionEventListener listener : listeners) { + listener.connectionErrorOccurred(event); + } + LOG.finest( + String.format("Connection handle removed from the pool due to error: %s", e.getMessage())); + // Listners no longer need to listen for this connection since it has been removed from the + // pool. + for (ConnectionEventListener listener : listeners) { + removeConnectionEventListener(listener); + } + } + + @Override + public void addStatementEventListener(StatementEventListener arg0) { + throw new UnsupportedOperationException( + "Method 'addStatementEventListener' is not supported by the BQ Driver"); + } + + @Override + public void removeStatementEventListener(StatementEventListener arg0) { + throw new UnsupportedOperationException( + "Method 'removeStatementEventListener' is not supported by the BQ Driver"); + } + + // Inner class: Connection Wrapper around the actual physical Connection + // This class notifies the listeners or calls the listner notification methods + // provided by the pooled connection. + static class BigQueryPooledConnectionWrapper implements Connection { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private Connection bqConnectionDelegate; + private BigQueryPooledConnection pooledConnection; + private boolean closed = false; + + public BigQueryPooledConnectionWrapper( + Connection bqConnectionDelegate, BigQueryPooledConnection pooledConnection) { + this.bqConnectionDelegate = bqConnectionDelegate; + this.pooledConnection = pooledConnection; + } + + @Override + public void close() throws SQLException { + LOG.finest("++enter++"); + if (!closed) { + // Instead of physically closing, we notify the PooledConnection + // that this handle is no longer in use. + pooledConnection.connectionHandleClosed(this); + closed = true; + LOG.finest("Logical connection closed (returned to pool)."); + } + } + + @Override + public boolean isClosed() throws SQLException { + return closed || bqConnectionDelegate.isClosed(); + } + + @Override + public java.sql.Statement createStatement() throws SQLException { + return bqConnectionDelegate.createStatement(); + } + + @Override + public java.sql.PreparedStatement prepareStatement(String sql) throws SQLException { + return bqConnectionDelegate.prepareStatement(sql); + } + + @Override + public java.sql.CallableStatement prepareCall(String sql) throws SQLException { + return bqConnectionDelegate.prepareCall(sql); + } + + @Override + public String nativeSQL(String sql) throws SQLException { + return bqConnectionDelegate.nativeSQL(sql); + } + + @Override + public void setAutoCommit(boolean autoCommit) throws SQLException { + bqConnectionDelegate.setAutoCommit(autoCommit); + } + + @Override + public boolean getAutoCommit() throws SQLException { + return bqConnectionDelegate.getAutoCommit(); + } + + @Override + public void commit() throws SQLException { + bqConnectionDelegate.commit(); + } + + @Override + public void rollback() throws SQLException { + bqConnectionDelegate.rollback(); + } + + @Override + public java.sql.DatabaseMetaData getMetaData() throws SQLException { + return bqConnectionDelegate.getMetaData(); + } + + @Override + public void setReadOnly(boolean readOnly) throws SQLException { + bqConnectionDelegate.setReadOnly(readOnly); + } + + @Override + public boolean isReadOnly() throws SQLException { + return bqConnectionDelegate.isReadOnly(); + } + + @Override + public void setCatalog(String catalog) throws SQLException { + bqConnectionDelegate.setCatalog(catalog); + } + + @Override + public String getCatalog() throws SQLException { + return bqConnectionDelegate.getCatalog(); + } + + @Override + public void setTransactionIsolation(int level) throws SQLException { + bqConnectionDelegate.setTransactionIsolation(level); + } + + @Override + public int getTransactionIsolation() throws SQLException { + return bqConnectionDelegate.getTransactionIsolation(); + } + + @Override + public java.sql.SQLWarning getWarnings() throws SQLException { + return bqConnectionDelegate.getWarnings(); + } + + @Override + public void clearWarnings() throws SQLException { + bqConnectionDelegate.clearWarnings(); + } + + @Override + public java.sql.Statement createStatement(int resultSetType, int resultSetConcurrency) + throws SQLException { + return bqConnectionDelegate.createStatement(resultSetType, resultSetConcurrency); + } + + @Override + public java.sql.PreparedStatement prepareStatement( + String sql, int resultSetType, int resultSetConcurrency) throws SQLException { + return bqConnectionDelegate.prepareStatement(sql, resultSetType, resultSetConcurrency); + } + + @Override + public java.sql.CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency) throws SQLException { + return bqConnectionDelegate.prepareCall(sql, resultSetType, resultSetConcurrency); + } + + @Override + public java.util.Map> getTypeMap() throws SQLException { + return bqConnectionDelegate.getTypeMap(); + } + + @Override + public void setTypeMap(java.util.Map> map) throws SQLException { + bqConnectionDelegate.setTypeMap(map); + } + + @Override + public void setHoldability(int holdability) throws SQLException { + bqConnectionDelegate.setHoldability(holdability); + } + + @Override + public int getHoldability() throws SQLException { + return bqConnectionDelegate.getHoldability(); + } + + @Override + public java.sql.Savepoint setSavepoint() throws SQLException { + return bqConnectionDelegate.setSavepoint(); + } + + @Override + public java.sql.Savepoint setSavepoint(String name) throws SQLException { + return bqConnectionDelegate.setSavepoint(name); + } + + @Override + public void rollback(java.sql.Savepoint savepoint) throws SQLException { + bqConnectionDelegate.rollback(savepoint); + } + + @Override + public void releaseSavepoint(java.sql.Savepoint savepoint) throws SQLException { + bqConnectionDelegate.releaseSavepoint(savepoint); + } + + @Override + public java.sql.Statement createStatement( + int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + return bqConnectionDelegate.createStatement( + resultSetType, resultSetConcurrency, resultSetHoldability); + } + + @Override + public java.sql.PreparedStatement prepareStatement( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + return bqConnectionDelegate.prepareStatement( + sql, resultSetType, resultSetConcurrency, resultSetHoldability); + } + + @Override + public java.sql.CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + return bqConnectionDelegate.prepareCall( + sql, resultSetType, resultSetConcurrency, resultSetHoldability); + } + + @Override + public java.sql.PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) + throws SQLException { + return bqConnectionDelegate.prepareStatement(sql, autoGeneratedKeys); + } + + @Override + public java.sql.PreparedStatement prepareStatement(String sql, int[] columnIndices) + throws SQLException { + return bqConnectionDelegate.prepareStatement(sql, columnIndices); + } + + @Override + public java.sql.PreparedStatement prepareStatement(String sql, String[] columnNames) + throws SQLException { + return bqConnectionDelegate.prepareStatement(sql, columnNames); + } + + @Override + public java.sql.Clob createClob() throws SQLException { + return bqConnectionDelegate.createClob(); + } + + @Override + public java.sql.Blob createBlob() throws SQLException { + return bqConnectionDelegate.createBlob(); + } + + @Override + public java.sql.NClob createNClob() throws SQLException { + return bqConnectionDelegate.createNClob(); + } + + @Override + public java.sql.SQLXML createSQLXML() throws SQLException { + return bqConnectionDelegate.createSQLXML(); + } + + @Override + public boolean isValid(int timeout) throws SQLException { + return bqConnectionDelegate.isValid(timeout); + } + + @Override + public void setClientInfo(String name, String value) throws java.sql.SQLClientInfoException { + bqConnectionDelegate.setClientInfo(name, value); + } + + @Override + public void setClientInfo(java.util.Properties properties) + throws java.sql.SQLClientInfoException { + bqConnectionDelegate.setClientInfo(properties); + } + + @Override + public String getClientInfo(String name) throws SQLException { + return bqConnectionDelegate.getClientInfo(name); + } + + @Override + public java.util.Properties getClientInfo() throws SQLException { + return bqConnectionDelegate.getClientInfo(); + } + + @Override + public java.sql.Array createArrayOf(String typeName, Object[] elements) throws SQLException { + return bqConnectionDelegate.createArrayOf(typeName, elements); + } + + @Override + public java.sql.Struct createStruct(String typeName, Object[] attributes) throws SQLException { + return bqConnectionDelegate.createStruct(typeName, attributes); + } + + @Override + public T unwrap(Class iface) throws SQLException { + return bqConnectionDelegate.unwrap(iface); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return bqConnectionDelegate.isWrapperFor(iface); + } + + @Override + public void setSchema(String schema) throws SQLException { + bqConnectionDelegate.setSchema(schema); + } + + @Override + public String getSchema() throws SQLException { + return bqConnectionDelegate.getSchema(); + } + + @Override + public void setNetworkTimeout(java.util.concurrent.Executor executor, int milliseconds) + throws SQLException { + bqConnectionDelegate.setNetworkTimeout(executor, milliseconds); + } + + @Override + public int getNetworkTimeout() throws SQLException { + return bqConnectionDelegate.getNetworkTimeout(); + } + + @Override + public void abort(Executor arg0) throws SQLException { + LOG.finest("++enter++"); + if (!closed) { + // We notify the pooled connection that physical connection + // is being aborted. We assume here that abort() is called for + // error cases. + SQLException e = new SQLException("Connection is being terminated and aborted"); + pooledConnection.fireConnectionError(e); + closed = true; + LOG.finest("Logical connection aborted (removed from pool)."); + } + // Call the delate abort to actually close the undelying connection. + bqConnectionDelegate.abort(arg0); + } + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + BigQueryPooledConnection other = (BigQueryPooledConnection) obj; + if (id == null) { + if (other.id != null) return false; + } else if (!id.equals(other.id)) return false; + return true; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPreparedStatement.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPreparedStatement.java new file mode 100644 index 0000000000..2410f6a58a --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPreparedStatement.java @@ -0,0 +1,611 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.gax.retrying.RetrySettings; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; +import com.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; +import com.google.cloud.bigquery.storage.v1.TableName; +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; +import com.google.protobuf.Descriptors.DescriptorValidationException; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.ParameterMetaData; +import java.sql.PreparedStatement; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.LinkedList; +import java.util.Queue; + +class BigQueryPreparedStatement extends BigQueryStatement implements PreparedStatement { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private static final char POSITIONAL_PARAMETER_CHAR = '?'; + // Making this protected so BigQueryCallableStatement subclass can access the parameters. + protected final BigQueryParameterHandler parameterHandler; + protected int parameterCount = 0; + protected String currentQuery; + private Queue> batchParameters = new LinkedList<>(); + private Schema insertSchema = null; + private TableName insertTableName = null; + + BigQueryPreparedStatement(BigQueryConnection connection, String query) { + super(connection); + setCurrentQuery(query); + this.parameterHandler = new BigQueryParameterHandler(this.parameterCount); + } + + void setCurrentQuery(String currentQuery) { + this.parameterCount = getParameterCount(currentQuery); + this.currentQuery = currentQuery; + } + + private int getParameterCount(String query) { + LOG.finest("++enter++"); + return (int) query.chars().filter(ch -> ch == POSITIONAL_PARAMETER_CHAR).count(); + } + + @Override + public ResultSet executeQuery() throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(this.currentQuery); + try { + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(this.currentQuery); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration = this.parameterHandler.configureParameters(jobConfiguration); + runQuery(this.currentQuery, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + return getCurrentResultSet(); + } + + @Override + public long executeLargeUpdate() throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(this.currentQuery); + try { + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(this.currentQuery); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration = this.parameterHandler.configureParameters(jobConfiguration); + runQuery(this.currentQuery, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + return this.currentUpdateCount; + } + + @Override + public int executeUpdate() throws SQLException { + LOG.finest("++enter++"); + return checkUpdateCount(executeLargeUpdate()); + } + + @Override + public boolean execute() throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(this.currentQuery); + try { + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(this.currentQuery); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration = this.parameterHandler.configureParameters(jobConfiguration); + runQuery(this.currentQuery, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + return getCurrentResultSet() != null; + } + + @Override + public void clearParameters() { + LOG.finest("++enter++"); + this.parameterHandler.clearParameters(); + this.parameterCount = 0; + } + + @Override + public void setNull(int parameterIndex, int sqlType) { + // TODO(neenu): implement null case + } + + @Override + public void setBoolean(int parameterIndex, boolean x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Boolean.class); + } + + @Override + public void setByte(int parameterIndex, byte x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Byte.class); + } + + @Override + public void setShort(int parameterIndex, short x) { + // TODO(neenu): implement Bytes conversion. + } + + @Override + public void setInt(int parameterIndex, int x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Integer.class); + } + + @Override + public void setLong(int parameterIndex, long x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Long.class); + } + + @Override + public void setFloat(int parameterIndex, float x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Float.class); + } + + @Override + public void setDouble(int parameterIndex, double x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Double.class); + } + + @Override + public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, BigDecimal.class); + } + + @Override + public void setString(int parameterIndex, String x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, String.class); + } + + @Override + public void setBytes(int parameterIndex, byte[] x) { + // TODO(neenu): implement Bytes conversion. + } + + @Override + public void setDate(int parameterIndex, Date x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x.toString(), String.class); + } + + @Override + public void setTime(int parameterIndex, Time x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x.toString(), String.class); + } + + @Override + public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x.toString(), String.class); + } + + @Override + public void setAsciiStream(int parameterIndex, InputStream x, int length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setUnicodeStream(int parameterIndex, InputStream x, int length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBinaryStream(int parameterIndex, InputStream x, int length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setObject(int parameterIndex, Object x, int targetSqlType) {} + + @Override + public void setObject(int parameterIndex, Object x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void addBatch() { + LOG.finest("++enter++"); + ArrayList currentParameterList = + deepCopyParameterList(this.parameterHandler.parametersList); + this.batchParameters.add(currentParameterList); + } + + private ArrayList deepCopyParameterList( + ArrayList parametersList) { + ArrayList copiedParameterList = new ArrayList<>(); + for (BigQueryJdbcParameter parameter : parametersList) { + BigQueryJdbcParameter newParameter = new BigQueryJdbcParameter(parameter); + copiedParameterList.add(newParameter); + } + return copiedParameterList; + } + + @Override + public int[] executeBatch() throws SQLException { + LOG.finest("++enter++"); + int[] result = new int[this.batchParameters.size()]; + if (this.batchParameters.isEmpty()) { + return result; + } + if (useWriteAPI()) { + try (BigQueryWriteClient writeClient = this.connection.getBigQueryWriteClient()) { + LOG.info("Using Write API for bulk INSERT operation."); + ArrayList currentParameterList = this.batchParameters.peek(); + if (this.insertSchema == null && this.insertTableName == null) { + QueryStatistics insertJobQueryStatistics = + getQueryStatistics(getWriteBatchJobConfiguration(currentParameterList)); + setInsertMetadata(insertJobQueryStatistics); + } + + long rowCount = bulkInsertWithWriteAPI(writeClient); + int[] insertArray = new int[Math.toIntExact(rowCount)]; + Arrays.fill(insertArray, 1); + return insertArray; + + } catch (DescriptorValidationException | IOException | InterruptedException e) { + throw new BigQueryJdbcRuntimeException(e); + } + + } else { + try { + LOG.info("Using individual INSERT query runs."); + int count = this.batchParameters.size(); + StringBuilder combinedQuery = new StringBuilder(); + for (int i = 0; i < count; i++) { + + if (this.currentQuery.trim().endsWith(";")) { + combinedQuery.append(this.currentQuery); + } else { + combinedQuery.append(this.currentQuery).append(";"); + } + } + // executeBatch in PreparedStatement is used for BulkInsert/DML. + // If not correct Type, fails later. + runQuery( + combinedQuery.toString(), getStandardBatchJobConfiguration(combinedQuery.toString())); + int i = 0; + while (getUpdateCount() != -1 && i < count) { + result[i] = getUpdateCount(); + getMoreResults(); + i++; + } + return result; + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + } + + private long bulkInsertWithWriteAPI(BigQueryWriteClient bigQueryWriteClient) + throws DescriptorValidationException, + IOException, + InterruptedException, + BigQueryJdbcException { + LOG.finest("++enter++"); + RetrySettings retrySettings = this.connection.getRetrySettings(); + + BigQueryJdbcBulkInsertWriter bulkInsertWriter = new BigQueryJdbcBulkInsertWriter(); + bulkInsertWriter.initialize(this.insertTableName, bigQueryWriteClient, retrySettings); + + try { + long offset = 0; + JsonArray jsonArray = new JsonArray(); + Gson gson = new Gson(); + int count = this.batchParameters.size(); + for (int i = 0; i < count; i++) { + + ArrayList parameterList = this.batchParameters.poll(); + FieldList fieldLists = this.insertSchema.getFields(); + if (fieldLists.size() == parameterList.size()) { + + JsonObject rowObject = new JsonObject(); + for (int j = 0; j < parameterList.size(); j++) { + BigQueryJdbcParameter parameter = parameterList.get(j); + if (parameter.getSqlType() == StandardSQLTypeName.STRING) { + rowObject.addProperty(fieldLists.get(j).getName(), parameter.getValue().toString()); + } else { + rowObject.addProperty(fieldLists.get(j).getName(), gson.toJson(parameter.getValue())); + } + } + jsonArray.add(rowObject); + + if (jsonArray.size() == this.querySettings.getWriteAPIAppendRowCount() + || this.batchParameters.size() == 0) { + bulkInsertWriter.append(jsonArray, offset); + LOG.finest("Append called "); + offset += jsonArray.size(); + jsonArray = new JsonArray(); + } + } else { + throw new BigQueryJdbcException("Mismatch between field count and parameter count."); + } + } + } catch (BigQueryJdbcException e) { + throw new RuntimeException(e); + } + + long rowCount = bulkInsertWriter.cleanup(bigQueryWriteClient); + + BatchCommitWriteStreamsRequest commitRequest = + BatchCommitWriteStreamsRequest.newBuilder() + .setParent(this.insertTableName.toString()) + .addWriteStreams(bulkInsertWriter.getStreamName()) + .build(); + BatchCommitWriteStreamsResponse commitResponse = + bigQueryWriteClient.batchCommitWriteStreams(commitRequest); + if (commitResponse.hasCommitTime() == false) { + throw new BigQueryJdbcException("Error committing the streams"); + } + LOG.finest("Commit called."); + return rowCount; + } + + private void setInsertMetadata(QueryStatistics statistics) throws SQLException { + LOG.finest("++enter++"); + if (!statistics.getStatementType().equals(StatementType.INSERT) + || statistics.getSchema() == null + || statistics.getReferencedTables().stream().distinct().count() > 1) { + throw new BigQueryJdbcException( + "Use java.sql.Statement.executeBatch() for heterogeneous DML batches"); + } + + this.insertSchema = statistics.getSchema(); + TableId tableID = statistics.getReferencedTables().get(0); + this.insertTableName = + TableName.of(tableID.getProject(), tableID.getDataset(), tableID.getTable()); + LOG.finest( + String.format( + "this.insertTableName : %s, this.insertSchema : %s", + this.insertTableName, this.insertSchema.toString())); + } + + QueryJobConfiguration getWriteBatchJobConfiguration( + ArrayList currentParameterList) throws SQLException { + LOG.finest("++enter++"); + BigQueryParameterHandler batchHandler = + new BigQueryParameterHandler(this.parameterCount, currentParameterList); + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(this.currentQuery); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration = batchHandler.configureParameters(jobConfiguration); + return jobConfiguration.build(); + } + + QueryJobConfiguration getStandardBatchJobConfiguration(String query) throws SQLException { + LOG.finest("++enter++"); + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(query); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration.setPriority(QueryJobConfiguration.Priority.BATCH); + int index = 0; + while (!this.batchParameters.isEmpty()) { + ArrayList parameterList = this.batchParameters.poll(); + + for (BigQueryJdbcParameter parameter : parameterList) { + Object parameterValue = parameter.getValue(); + StandardSQLTypeName sqlType = parameter.getSqlType(); + LOG.finest( + String.format( + "Parameter %s of type %s at index %s added to QueryJobConfiguration", + parameterValue, sqlType, index++)); + jobConfiguration.addPositionalParameter(QueryParameterValue.of(parameterValue, sqlType)); + } + } + return jobConfiguration.build(); + } + + Boolean useWriteAPI() { + LOG.finest("++enter++"); + if (this.querySettings.isUseWriteAPI()) { + if (this.batchParameters.size() >= this.querySettings.getWriteAPIActivationRowCount()) { + return true; + } + } + return false; + } + + @Override + public void setCharacterStream(int parameterIndex, Reader reader, int length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setRef(int parameterIndex, Ref x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBlob(int parameterIndex, Blob x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setClob(int parameterIndex, Clob x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setArray(int parameterIndex, Array x) { + // TODO(neenu) :IMPLEMENT ARRAY + } + + @Override + public ResultSetMetaData getMetaData() { + // TODO(neenu) :IMPLEMENT metadata + return null; + } + + @Override + public void setDate(int parameterIndex, Date x, Calendar cal) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setTime(int parameterIndex, Time x, Calendar cal) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNull(int parameterIndex, int sqlType, String typeName) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setURL(int parameterIndex, URL x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public ParameterMetaData getParameterMetaData() { + // TODO(neenu) :IMPLEMENT + return null; + } + + @Override + public void setRowId(int parameterIndex, RowId x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNString(int parameterIndex, String value) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNCharacterStream(int parameterIndex, Reader value, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNClob(int parameterIndex, NClob value) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setClob(int parameterIndex, Reader reader, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBlob(int parameterIndex, InputStream inputStream, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNClob(int parameterIndex, Reader reader, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setSQLXML(int parameterIndex, SQLXML xmlObject) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) { + // TODO(neenu) : IMPLEMENT? + } + + @Override + public void setAsciiStream(int parameterIndex, InputStream x, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBinaryStream(int parameterIndex, InputStream x, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setCharacterStream(int parameterIndex, Reader reader, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setAsciiStream(int parameterIndex, InputStream x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBinaryStream(int parameterIndex, InputStream x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setCharacterStream(int parameterIndex, Reader reader) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNCharacterStream(int parameterIndex, Reader value) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setClob(int parameterIndex, Reader reader) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBlob(int parameterIndex, InputStream inputStream) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNClob(int parameterIndex, Reader reader) { + // TODO :NOT IMPLEMENTED + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSet.java new file mode 100644 index 0000000000..c24e37abdd --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSet.java @@ -0,0 +1,46 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; + +public interface BigQueryResultSet { + /* + * This function returns Job Id for the corresponding BQ Job that generated result. + * Note that it is not available for certain queries (low-latency queries) and for metadata results. + * + * @return JobId object or null. + */ + public JobId getJobId(); + + /* + * This function returns Query Id for the corresponding low-latency query produced results. + * It is null for regular (non-low latency) jobs and metadata results. + * + * @return Query Id string or null. + */ + public String getQueryId(); + + /* + * Returns com.google.cloud.bigquery.JobStatistics.QueryStatistics object with statistics for the + * completed Job for non-low latency queries. + * + * @return QueryStatistics object or null. + */ + public QueryStatistics getQueryStatistics(); +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizers.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizers.java new file mode 100644 index 0000000000..15a1cca349 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizers.java @@ -0,0 +1,73 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.lang.ref.PhantomReference; +import java.lang.ref.ReferenceQueue; + +@InternalApi +class BigQueryResultSetFinalizers { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryResultSetFinalizers.class.getName()); + + @InternalApi + static class ArrowResultSetFinalizer extends PhantomReference { + Thread ownedThread; + + public ArrowResultSetFinalizer( + BigQueryArrowResultSet referent, + ReferenceQueue q, + Thread ownedThread) { + super(referent, q); + this.ownedThread = ownedThread; + } + + // Free resources. Remove all the hard refs + public void finalizeResources() { + LOG.finest("++enter++"); + if (ownedThread != null && !ownedThread.isInterrupted()) { + ownedThread.interrupt(); + } + } + } + + @InternalApi + static class JsonResultSetFinalizer extends PhantomReference { + Thread[] ownedThreads; + + public JsonResultSetFinalizer( + BigQueryJsonResultSet referent, + ReferenceQueue q, + Thread[] ownedThreads) { + super(referent, q); + this.ownedThreads = ownedThreads; + } + + // Free resources. Remove all the hard refs + public void finalizeResources() { + LOG.finest("++enter++"); + if (ownedThreads != null) { + for (Thread ownedThread : ownedThreads) { + if (!ownedThread.isInterrupted()) { + ownedThread.interrupt(); + } + } + } + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadata.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadata.java new file mode 100644 index 0000000000..d18c689333 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadata.java @@ -0,0 +1,213 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; + +/** This class returns ResultSetMetadata for the JSON and the Arrow ResultSets */ +class BigQueryResultSetMetadata implements ResultSetMetaData { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private final FieldList schemaFieldList; + private final Statement statement; + private final int columnCount; + + private static final int DEFAULT_DISPLAY_SIZE = 50; + + private BigQueryResultSetMetadata(FieldList schemaFieldList, Statement statement) { + LOG.finest("++enter++"); + this.schemaFieldList = schemaFieldList; + this.columnCount = schemaFieldList.size(); + this.statement = statement; + } + + static BigQueryResultSetMetadata of(FieldList schemaFieldList, Statement statement) { + return new BigQueryResultSetMetadata(schemaFieldList, statement); + } + + private Field getField(int sqlColumn) { + return this.schemaFieldList.get(sqlColumn - 1); + } + + @Override + public int getColumnCount() { + return this.columnCount; + } + + @Override + public boolean isAutoIncrement(int column) { + // BQ doesn't support auto increment + return false; + } + + @Override + public boolean isCaseSensitive(int column) { + int colType = getColumnType(column); + return colType == Types.NVARCHAR; + } + + @Override + public boolean isSearchable(int column) { + int colType = getColumnType(column); + return colType != Types.OTHER; + } + + @Override + public boolean isCurrency(int column) { + return false; + } + + @Override + public int isNullable(int column) { + Mode colMode = getField(column).getMode(); + if (colMode == null) { + return ResultSetMetaData.columnNullableUnknown; + } + return colMode == Mode.NULLABLE + ? ResultSetMetaData.columnNullable + : ResultSetMetaData.columnNoNulls; + } + + @Override + public boolean isSigned(int column) { + int colType = getColumnType(column); + return colType == Types.FLOAT + || colType == Types.DOUBLE + || colType == Types.BIGINT + || colType == Types.NUMERIC; + } + + @Override + public int getColumnDisplaySize(int column) { + int colType = getColumnType(column); + switch (colType) { + case Types.BOOLEAN: + return 5; + case Types.DATE: + case Types.BIGINT: + return 10; + case Types.DOUBLE: + case Types.DECIMAL: + case Types.NUMERIC: + return 14; + case Types.TIMESTAMP: + return 16; + default: + return DEFAULT_DISPLAY_SIZE; + } + } + + @Override + public String getColumnLabel(int column) { + return getField(column).getName(); + } + + @Override + public String getColumnName(int column) { + return getField(column).getName(); + } + + @Override + public int getPrecision(int column) { + return (int) (getField(column).getPrecision() != null ? getField(column).getPrecision() : 0); + } + + @Override + public int getScale(int column) { + return (int) (getField(column).getScale() != null ? getField(column).getScale() : 0); + } + + @Override + public String getTableName(int column) { + // returning "" as per the specs as there might be multiple tables involved, or we + // might be reading from the temp table + return ""; + } + + @Override + public String getCatalogName(int column) { + return ""; // not applicable + } + + @Override + public String getSchemaName(int column) { + return ""; // not applicable + } + + private StandardSQLTypeName getStandardSQLTypeName(int column) { + Field field = getField(column); + if (field.getMode() == Mode.REPEATED) { + return StandardSQLTypeName.ARRAY; + } + return getField(column).getType().getStandardType(); + } + + @Override + public int getColumnType(int column) { + return BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.get( + getStandardSQLTypeName(column)); + } + + @Override + public String getColumnTypeName(int column) { + return getStandardSQLTypeName(column).name(); + } + + @Override + public boolean isReadOnly(int column) { + return false; + } + + @Override + public boolean isWritable(int column) { + return !isReadOnly(column); + } + + @Override + public boolean isDefinitelyWritable(int column) { + return false; + } + + @Override + public String getColumnClassName(int column) { + Field field = getField(column); + if (field.getMode() == Mode.REPEATED) { + return java.sql.Array.class.getName(); + } + return BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping + .get(field.getType().getStandardType()) + .getName(); + } + + // Unsupported methods: + @Override + public T unwrap(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException("unwrap is not implemented"); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException("isWrapperFor is not implemented"); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySettings.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySettings.java new file mode 100644 index 0000000000..16f13a7784 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySettings.java @@ -0,0 +1,857 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.Clustering; +import com.google.cloud.bigquery.ConnectionProperty; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.EncryptionConfiguration; +import com.google.cloud.bigquery.ExternalTableDefinition; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryJobConfiguration.Priority; +import com.google.cloud.bigquery.RangePartitioning; +import com.google.cloud.bigquery.TimePartitioning; +import com.google.cloud.bigquery.UserDefinedFunction; +import java.util.List; +import java.util.Map; + +/** This class is used to pass user defined settings for execution of Queries. */ +// TODO: Expose this class as public once we decide on how to expose the slow +// query path to the end users. IMP: revisit the set of params to be exposed via BigQuerySettings +class BigQuerySettings { + + private final boolean useReadAPI; + private final int highThroughputActivationRatio; + private final int highThroughputMinTableSize; + private final boolean unsupportedHTAPIFallback; + + private final boolean enableSession; + + private final ConnectionProperty sessionInfoConnectionProperty; + + private final boolean useQueryCache; + private final String queryDialect; + private final List queryProperties; + private final Boolean allowLargeResults; + private final String kmsKeyName; + private final Clustering clustering; + + private final JobInfo.CreateDisposition createDisposition; + + private final EncryptionConfiguration destinationEncryptionConfiguration; + + private final String destinationTable; + private final String destinationDataset; + private final long destinationDatasetExpirationTime; + + private final long jobTimeoutMs; + + private final int maximumBillingTier; + + private final QueryJobConfiguration.Priority priority; + + private final RangePartitioning rangePartitioning; + + private final List schemaUpdateOptions; + + private final Map tableDefinitions; + + private final TimePartitioning timePartitioning; + + private final List userDefinedFunctions; + + private final JobInfo.WriteDisposition writeDisposition; + + private final int numBufferedRows; + + private final long maxResultPerPage; + + private final DatasetId defaultDataset; + + private final boolean useWriteAPI; + private final int writeAPIActivationRowCount; + private final int writeAPIAppendRowCount; + + private final long maxBytesBilled; + private final Map labels; + + private BigQuerySettings(Builder builder) { + this.useReadAPI = builder.useReadAPI; + this.highThroughputActivationRatio = builder.highThroughputActivationRatio; + this.highThroughputMinTableSize = builder.highThroughputMinTableSize; + this.useQueryCache = builder.useQueryCache; + this.queryDialect = builder.queryDialect; + this.queryProperties = builder.queryProperties; + this.allowLargeResults = builder.allowLargeResults; + this.kmsKeyName = builder.kmsKeyName; + this.clustering = builder.clustering; + this.createDisposition = builder.createDisposition; + this.destinationEncryptionConfiguration = builder.destinationEncryptionConfiguration; + this.destinationTable = builder.destinationTable; + this.destinationDataset = builder.destinationDataset; + this.destinationDatasetExpirationTime = builder.destinationDatasetExpirationTime; + this.jobTimeoutMs = builder.jobTimeoutMs; + this.maximumBillingTier = builder.maximumBillingTier; + this.priority = builder.priority; + this.rangePartitioning = builder.rangePartitioning; + this.schemaUpdateOptions = builder.schemaUpdateOptions; + this.tableDefinitions = builder.tableDefinitions; + this.timePartitioning = builder.timePartitioning; + this.userDefinedFunctions = builder.userDefinedFunctions; + this.writeDisposition = builder.writeDisposition; + this.numBufferedRows = builder.numBufferedRows; + this.maxResultPerPage = builder.maxResultPerPage; + this.defaultDataset = builder.defaultDataset; + this.enableSession = builder.enableSession; + this.unsupportedHTAPIFallback = builder.unsupportedHTAPIFallback; + this.sessionInfoConnectionProperty = builder.sessionInfoConnectionProperty; + this.useWriteAPI = builder.useWriteAPI; + this.writeAPIActivationRowCount = builder.writeAPIActivationRowCount; + this.writeAPIAppendRowCount = builder.writeAPIAppendRowCount; + this.maxBytesBilled = builder.maxBytesBilled; + this.labels = builder.labels; + } + + /** Returns a builder for a BigQuerySettings object. */ + static Builder newBuilder() { + return new Builder(); + } + + /** + * Returns useReadAPI flag, enabled by default. Read API will be used if the underlying conditions + * are satisfied and this flag is enabled + */ + Boolean getUseReadAPI() { + return useReadAPI; + } + + /** + * Returns integer value for when the connector switches to BigQuery Storage API when the number + * of pages and rows in query results exceed this value and HighThroughPutMinTableSize, + * respectively. + */ + int getHighThroughputActivationRatio() { + return highThroughputActivationRatio; + } + + /** + * Returns integer value for when query results are large, exceeding both row and page limits, the + * connector switches to the BigQuery Storage API for faster processing. + */ + int getHighThroughputMinTableSize() { + return highThroughputMinTableSize; + } + + /** + * Determines if session features are enabled. + * + *

    Enabling session-level features allows for capturing SQL activities or enabling + * multi-statement transactions. Session tracking is disabled by default. + * + * @return true if session is enabled, false otherwise. + */ + boolean isEnableSession() { + return enableSession; + } + + /** + * When the connector uses fetch workflows not supported on the High-Throughput API, this option + * specifies whether the connector falls back to the REST API or returns an error. By default it + * falls back to standard API. + * + * @return true if falls back to standard, false to error. + */ + boolean isUnsupportedHTAPIFallback() { + return unsupportedHTAPIFallback; + } + + /** + * Returns information about the BigQuery session ConnectionProperty associated with this job. + * + *

    BigQuery's sessions provide a way to link multiple jobs and maintain temporary data, such as + * temporary tables, between them. They are needed for using multi-statement transactions that + * span across multiple queries. + * + * @return An instance of {@link ConnectionProperty} containing session details, or {@code null} + * if this job is not part of a session. + */ + ConnectionProperty getSessionInfoConnectionProperty() { + return sessionInfoConnectionProperty; + } + + Boolean getUseQueryCache() { + return useQueryCache; + } + + String getQueryDialect() { + return queryDialect; + } + + List getQueryProperties() { + return this.queryProperties; + } + + /** + * Returns the KMS resource name which is the unique identifier you give to your encryption key in + * Google Cloud's Key Management Service (KMS). Tells BigQuery which key to use when encrypting or + * decrypting your data. + */ + String getKmsKeyName() { + return kmsKeyName; + } + + Boolean getAllowLargeResults() { + return allowLargeResults; + } + + /** Returns the clustering specification for the destination table. */ + Clustering getClustering() { + return clustering; + } + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + JobInfo.CreateDisposition getCreateDisposition() { + return createDisposition; + } + + /** Returns the custom encryption configuration (e.g., Cloud KMS keys) */ + EncryptionConfiguration getDestinationEncryptionConfiguration() { + return destinationEncryptionConfiguration; + } + + /** + * Returns the table where you want to store query results. If not provided a default temp table + * is created when needed. + */ + String getDestinationTable() { + return destinationTable; + } + + /** + * Returns the dataset where you want to store query results. If not provided a default dataset is + * created when needed. + */ + String getDestinationDataset() { + return destinationDataset; + } + + long getDestinationDatasetExpirationTime() { + return destinationDatasetExpirationTime; + } + + /** Returns the timeout associated with this job */ + Long getJobTimeoutMs() { + return jobTimeoutMs; + } + + /** Returns the optional billing tier limit for this job. */ + Integer getMaximumBillingTier() { + return maximumBillingTier; + } + + /** Returns the query priority. */ + QueryJobConfiguration.Priority getPriority() { + return priority; + } + + /** Returns the range partitioning specification for the table */ + RangePartitioning getRangePartitioning() { + return rangePartitioning; + } + + /** + * Returns options allowing the schema of the destination table to be updated as a side effect of + * the query job. Schema update options are supported in two cases: when writeDisposition is + * WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a partition + * of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE will always + * overwrite the schema. + */ + List getSchemaUpdateOptions() { + return schemaUpdateOptions; + } + + /** + * Returns the external tables definitions. If querying external data sources outside BigQuery, + * this value describes the data format, location and other properties of the data sources. By + * defining these properties, the data sources can be queried as if they were standard BigQuery + * tables. + */ + Map getTableDefinitions() { + return tableDefinitions; + } + + /** Returns the time partitioning specification for the destination table. */ + TimePartitioning getTimePartitioning() { + return timePartitioning; + } + + /** + * Returns user defined function resources that can be used by this query. Function resources can + * either be defined inline ({@link UserDefinedFunction.Type#INLINE}) or loaded from a Google + * Cloud Storage URI ({@link UserDefinedFunction.Type#FROM_URI}. + */ + List getUserDefinedFunctions() { + return userDefinedFunctions; + } + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + JobInfo.WriteDisposition getWriteDisposition() { + return writeDisposition; + } + + /** Returns the number of rows of data to pre-fetch */ + Integer getNumBufferedRows() { + return numBufferedRows; + } + + Long getMaxResultPerPage() { + return maxResultPerPage; + } + + DatasetId getDefaultDataset() { + return defaultDataset; + } + + boolean isUseWriteAPI() { + return useWriteAPI; + } + + int getWriteAPIActivationRowCount() { + return writeAPIActivationRowCount; + } + + int getWriteAPIAppendRowCount() { + return writeAPIAppendRowCount; + } + + long getMaxBytesBilled() { + return maxBytesBilled; + } + + Map getLabels() { + return labels; + } + + @Override + public String toString() { + return "BigQuerySettings{" + + "enableSession=" + + enableSession + + ", " + + "unsupportedHTAPIFallback=" + + unsupportedHTAPIFallback + + ", " + + "sessionInfo=" + + sessionInfoConnectionProperty + + ", " + + "useReadAPI=" + + useReadAPI + + ", " + + "kmsKeyName=" + + kmsKeyName + + ", " + + "highThroughputMinTableSize=" + + highThroughputMinTableSize + + ", " + + "highThroughputActivationRatio=" + + highThroughputActivationRatio + + ", " + + "useQueryCache=" + + useQueryCache + + ", " + + "queryDialect=" + + queryDialect + + ", " + + "queryProperties=" + + queryProperties + + ", " + + "allowLargeResults=" + + allowLargeResults + + ", " + + "clustering=" + + clustering + + ", " + + "createDisposition=" + + createDisposition + + ", " + + "destinationEncryptionConfiguration=" + + destinationEncryptionConfiguration + + ", " + + "destinationTable=" + + destinationTable + + ", " + + "destinationDataset=" + + destinationDataset + + ", " + + "destinationDatasetExpirationTime=" + + destinationDatasetExpirationTime + + ", " + + "jobTimeoutMs=" + + jobTimeoutMs + + ", " + + "maximumBillingTier=" + + maximumBillingTier + + ", " + + "priority=" + + priority + + ", " + + "rangePartitioning=" + + rangePartitioning + + ", " + + "schemaUpdateOptions=" + + schemaUpdateOptions + + ", " + + "tableDefinitions=" + + tableDefinitions + + ", " + + "timePartitioning=" + + timePartitioning + + ", " + + "userDefinedFunctions=" + + userDefinedFunctions + + ", " + + "writeDisposition=" + + writeDisposition + + ", " + + "numBufferedRows=" + + numBufferedRows + + ", " + + "maxResultPerPage=" + + maxResultPerPage + + ", " + + "defaultDataset=" + + defaultDataset + + ", " + + "useWriteAPI=" + + useWriteAPI + + ", " + + "writeAPIActivationRowCount=" + + writeAPIActivationRowCount + + ", " + + "writeAPIAppendRowCount=" + + writeAPIAppendRowCount + + ", " + + "maxBytesBilled=" + + maxBytesBilled + + "}"; + } + + /** Returns a builder pre-populated using the current values of this field. */ + Builder toBuilder() { + return new Builder(this); + } + + static final class Builder { + + private boolean useReadAPI; + private int highThroughputMinTableSize; + private int highThroughputActivationRatio; + private boolean enableSession; + private boolean unsupportedHTAPIFallback; + private ConnectionProperty sessionInfoConnectionProperty; + private boolean useQueryCache; + private String queryDialect; + private List queryProperties; + private Boolean allowLargeResults; + private String kmsKeyName; + private Clustering clustering; + private JobInfo.CreateDisposition createDisposition; + private EncryptionConfiguration destinationEncryptionConfiguration; + private String destinationTable; + private String destinationDataset; + private long destinationDatasetExpirationTime; + private long jobTimeoutMs; + private int maximumBillingTier; + private QueryJobConfiguration.Priority priority; + private RangePartitioning rangePartitioning; + private List schemaUpdateOptions; + private Map tableDefinitions; + private TimePartitioning timePartitioning; + private List userDefinedFunctions; + private JobInfo.WriteDisposition writeDisposition; + private int numBufferedRows; + private long maxResultPerPage; + private DatasetId defaultDataset; + private boolean useWriteAPI; + private int writeAPIActivationRowCount; + private int writeAPIAppendRowCount; + private long maxBytesBilled; + private Map labels; + + Builder() { + this.withDefaultValues(); + } + + private Builder(BigQuerySettings querySettings) { + this.useReadAPI = querySettings.getUseReadAPI(); + this.highThroughputMinTableSize = querySettings.getHighThroughputMinTableSize(); + this.highThroughputActivationRatio = querySettings.getHighThroughputActivationRatio(); + this.enableSession = querySettings.isEnableSession(); + this.unsupportedHTAPIFallback = querySettings.isUnsupportedHTAPIFallback(); + this.sessionInfoConnectionProperty = querySettings.getSessionInfoConnectionProperty(); + this.useQueryCache = querySettings.getUseQueryCache(); + this.queryDialect = querySettings.getQueryDialect(); + this.queryProperties = querySettings.getQueryProperties(); + this.allowLargeResults = querySettings.getAllowLargeResults(); + this.kmsKeyName = querySettings.getKmsKeyName(); + this.clustering = querySettings.getClustering(); + this.createDisposition = querySettings.getCreateDisposition(); + this.destinationEncryptionConfiguration = + querySettings.getDestinationEncryptionConfiguration(); + this.destinationTable = querySettings.getDestinationTable(); + this.destinationDataset = querySettings.getDestinationDataset(); + this.destinationDatasetExpirationTime = querySettings.destinationDatasetExpirationTime; + this.jobTimeoutMs = querySettings.getJobTimeoutMs(); + this.maximumBillingTier = querySettings.getMaximumBillingTier(); + this.priority = querySettings.getPriority(); + this.rangePartitioning = querySettings.getRangePartitioning(); + this.schemaUpdateOptions = querySettings.getSchemaUpdateOptions(); + this.tableDefinitions = querySettings.getTableDefinitions(); + this.timePartitioning = querySettings.getTimePartitioning(); + this.userDefinedFunctions = querySettings.getUserDefinedFunctions(); + this.writeDisposition = querySettings.getWriteDisposition(); + this.numBufferedRows = querySettings.getNumBufferedRows(); + this.maxResultPerPage = querySettings.getMaxResultPerPage(); + this.defaultDataset = querySettings.getDefaultDataset(); + this.useWriteAPI = querySettings.isUseWriteAPI(); + this.writeAPIActivationRowCount = querySettings.getWriteAPIActivationRowCount(); + this.writeAPIAppendRowCount = querySettings.getWriteAPIAppendRowCount(); + this.maxBytesBilled = querySettings.getMaxBytesBilled(); + this.labels = querySettings.getLabels(); + } + + Builder withDefaultValues() { + return setUseReadAPI(false) // Read API is disabled by default; + .setQueryDialect(BigQueryJdbcUrlUtility.DEFAULT_QUERY_DIALECT_VALUE) + .setNumBufferedRows(10000) // 10K records will be kept in the buffer (Blocking Queue); + .setMaxResultPerPage(BigQueryJdbcUrlUtility.DEFAULT_MAX_RESULTS_VALUE); + } + + /** + * Sets useReadAPI flag, enabled by default. Read API will be used if the underlying conditions + * are satisfied and this flag is enabled + * + * @param useReadAPI or {@code true} for none + */ + Builder setUseReadAPI(boolean useReadAPI) { + this.useReadAPI = useReadAPI; + return this; + } + + /** + * Sets the minimum table size for which the BigQuery Storage API will be used. + * + *

    When query results are large, exceeding both the row and page limits, the connector + * automatically switches to the BigQuery Storage API for faster processing. This method allows + * you to configure a threshold for table size, enabling the use of the BigQuery Storage API + * when the limit is exceeded, provided the table size exceeds the specified value. + * + * @param highThroughputMinTableSize the minimum table size to trigger the use of the BigQuery + * Storage API + */ + Builder setHighThroughputMinTableSize(int highThroughputMinTableSize) { + this.highThroughputMinTableSize = highThroughputMinTableSize; + return this; + } + + /** + * Sets the activation ratio for switching to the BigQuery Storage API. + * + *

    The connector switches to the BigQuery Storage API when the number of pages in the query + * results exceeds this value AND the table size is greater than or equal to the value set or + * default value of {@link #setHighThroughputMinTableSize(int)}. + * + * @param highThroughputActivationRatio the activation ratio for switching to BigQuery Storage + * API + */ + Builder setHighThroughputActivationRatio(int highThroughputActivationRatio) { + this.highThroughputActivationRatio = highThroughputActivationRatio; + return this; + } + + /** + * setting true, enables session-level features such as capturing SQL activities or enabling + * multi-statement transactions. Session tracking is disabled by default. + */ + Builder setEnableSession(boolean enableSession) { + this.enableSession = enableSession; + return this; + } + + /** + * When the connector uses fetch workflows not supported on the High-Throughput API, this option + * specifies whether the connector falls back to the REST API or returns an error. By default it + * falls back to standard API. + * + * @param unsupportedHTAPIFallback true if falls back to standard, false to error. + */ + Builder setUnsupportedHTAPIFallback(boolean unsupportedHTAPIFallback) { + this.unsupportedHTAPIFallback = unsupportedHTAPIFallback; + return this; + } + + /** + * Setting session information associated with the job. + * + *

    BigQuery's sessions provide a way to link multiple jobs and maintain temporary data, such + * as temporary tables, between them. They are needed for using multi-statement transactions + * that span across multiple queries. + */ + Builder setSessionInfoConnectionProperty(ConnectionProperty sessionInfoConnectionProperty) { + this.sessionInfoConnectionProperty = sessionInfoConnectionProperty; + return this; + } + + Builder setUseQueryCache(boolean useQueryCache) { + this.useQueryCache = useQueryCache; + return this; + } + + Builder setAllowLargeResults(Boolean allowLargeResults) { + this.allowLargeResults = allowLargeResults; + return this; + } + + /** + * Set the KMS resource key name which is the unique identifier you give to your encryption key + * in Google Cloud's Key Management Service (KMS). Tells BigQuery which key to use when + * encrypting or decrypting your data. + */ + Builder setKmsKeyName(String kmsKeyName) { + this.kmsKeyName = kmsKeyName; + return this; + } + + Builder setQueryDialect(String queryDialect) { + this.queryDialect = queryDialect; + return this; + } + + Builder setQueryProperties(List queryProperties) { + this.queryProperties = queryProperties; + return this; + } + + /** Sets the clustering specification for the destination table. */ + Builder setClustering(Clustering clustering) { + this.clustering = clustering; + return this; + } + + /** + * Sets whether the job is allowed to create tables. + * + * @see + * Create Disposition + */ + Builder setCreateDisposition(JobInfo.CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + /** + * Sets the custom encryption configuration (e.g., Cloud KMS keys). + * + * @param destinationEncryptionConfiguration destinationEncryptionConfiguration or {@code null} + * for none + */ + Builder setDestinationEncryptionConfiguration( + EncryptionConfiguration destinationEncryptionConfiguration) { + this.destinationEncryptionConfiguration = destinationEncryptionConfiguration; + return this; + } + + Builder setDestinationTable(String destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + Builder setDestinationDataset(String destinationDataset) { + this.destinationDataset = destinationDataset; + return this; + } + + Builder setDestinationDatasetExpirationTime(long destinationDatasetExpirationTime) { + this.destinationDatasetExpirationTime = destinationDatasetExpirationTime; + return this; + } + + /** + * [Optional] Job timeout in milliseconds. If this time limit is exceeded, BigQuery may attempt + * to terminate the job. + * + * @param jobTimeoutMs jobTimeoutMs or {@code null} for none + */ + Builder setJobTimeoutMs(long jobTimeoutMs) { + this.jobTimeoutMs = jobTimeoutMs; + return this; + } + + /** + * Limits the billing tier for this job. Queries that have resource usage beyond this tier will + * fail (without incurring a charge). If unspecified, this will be set to your project default. + * + * @param maximumBillingTier maximum billing tier for this job + */ + Builder setMaximumBillingTier(int maximumBillingTier) { + this.maximumBillingTier = maximumBillingTier; + return this; + } + + /** + * Sets a priority for the query. If not specified the priority is assumed to be {@link + * Priority#INTERACTIVE}. + */ + Builder setPriority(QueryJobConfiguration.Priority priority) { + this.priority = priority; + return this; + } + + /** + * Range partitioning specification for this table. Only one of timePartitioning and + * rangePartitioning should be specified. + * + * @param rangePartitioning rangePartitioning or {@code null} for none + */ + Builder setRangePartitioning(RangePartitioning rangePartitioning) { + this.rangePartitioning = rangePartitioning; + return this; + } + + /** + * Sets options allowing the schema of the destination table to be updated as a side effect of + * the query job. Schema update options are supported in two cases: when writeDisposition is + * WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a + * partition of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE + * will always overwrite the schema. + */ + Builder setSchemaUpdateOptions(List schemaUpdateOptions) { + this.schemaUpdateOptions = schemaUpdateOptions; + return this; + } + + /** + * Sets the external tables definitions. If querying external data sources outside BigQuery, + * this value describes the data format, location and other properties of the data sources. By + * defining these properties, the data sources can be queried as if they were standard BigQuery + * tables. + */ + Builder setTableDefinitions(Map tableDefinitions) { + this.tableDefinitions = tableDefinitions; + return this; + } + + /** Sets the time partitioning specification for the destination table. */ + Builder setTimePartitioning(TimePartitioning timePartitioning) { + this.timePartitioning = timePartitioning; + return this; + } + + /** + * Sets user defined function resources that can be used by this query. Function resources can + * either be defined inline ({@link UserDefinedFunction#inline(String)}) or loaded from a Google + * Cloud Storage URI ({@link UserDefinedFunction#fromUri(String)}. + */ + Builder setUserDefinedFunctions(List userDefinedFunctions) { + this.userDefinedFunctions = userDefinedFunctions; + return this; + } + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + Builder setWriteDisposition(JobInfo.WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + /** + * Sets the number of rows in the buffer (a blocking queue) that query results are consumed + * from. + * + * @param numBufferedRows numBufferedRows or {@code null} for none + */ + Builder setNumBufferedRows(int numBufferedRows) { + this.numBufferedRows = numBufferedRows; + return this; + } + + /** + * Sets the maximum records per page to be used for pagination. This is used as an input for the + * tabledata.list and jobs.getQueryResults RPC calls + * + * @param maxResultPerPage + */ + Builder setMaxResultPerPage(long maxResultPerPage) { + this.maxResultPerPage = maxResultPerPage; + return this; + } + + Builder setDefaultDataset(DatasetId defaultDataset) { + this.defaultDataset = defaultDataset; + return this; + } + + Builder setUseWriteAPI(boolean useWriteAPI) { + this.useWriteAPI = useWriteAPI; + return this; + } + + Builder setWriteAPIActivationRowCount(int writeAPIActivationRowCount) { + this.writeAPIActivationRowCount = writeAPIActivationRowCount; + return this; + } + + Builder setWriteAPIAppendRowCount(int writeAPIAppendRowCount) { + this.writeAPIAppendRowCount = writeAPIAppendRowCount; + return this; + } + + Builder setMaxBytesBilled(long maxBytesBilled) { + this.maxBytesBilled = maxBytesBilled; + return this; + } + + Builder setLabels(Map labels) { + this.labels = labels; + return this; + } + + /** Creates a {@code BigQuerySettings} object. */ + BigQuerySettings build() { + return new BigQuerySettings(this); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySqlTypeConverter.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySqlTypeConverter.java new file mode 100644 index 0000000000..cfdc64a14e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySqlTypeConverter.java @@ -0,0 +1,81 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.jdbc.BigQueryStatement.SqlType; + +class BigQuerySqlTypeConverter { + + static SqlType getSqlTypeFromStatementType(StatementType statementType) { + switch (statementType.toString()) { + case "SELECT": + return SqlType.SELECT; + + case "INSERT": + case "UPDATE": + case "DELETE": + case "MERGE": + return SqlType.DML; + case "CALL": + return SqlType.DML_EXTRA; + + case "CREATE_TABLE": + case "CREATE_TABLE_AS_SELECT": + case "CREATE_VIEW": + case "CREATE_MODEL": + case "CREATE_MATERIALIZED_VIEW": + case "CREATE_FUNCTION": + case "CREATE_TABLE_FUNCTION": + case "CREATE_PROCEDURE": + case "CREATE_ROW_ACCESS_POLICY": + case "CREATE_SCHEMA": + case "CREATE_SNAPSHOT_TABLE": + case "CREATE_SEARCH_INDEX": + case "DROP_TABLE": + case "DROP_EXTERNAL_TABLE": + case "DROP_VIEW": + case "DROP_MODEL": + case "DROP_MATERIALIZED_VIEW": + case "DROP_FUNCTION": + case "DROP_TABLE_FUNCTION": + case "DROP_PROCEDURE": + case "DROP_SEARCH_INDEX": + case "DROP_SCHEMA": + case "DROP_SNAPSHOT_TABLE": + case "DROP_ROW_ACCESS_POLICY": + case "ALTER_TABLE": + case "ALTER_VIEW": + case "ALTER_MATERIALIZED_VIEW": + case "ALTER_SCHEMA": + case "TRUNCATE_TABLE": + case "CREATE_EXTERNAL_TABLE": + return SqlType.DDL; + case "SCRIPT": + return SqlType.SCRIPT; + case "BEGIN_TRANSACTION": + case "COMMIT_TRANSACTION": + case "ROLLBACK_TRANSACTION": + return SqlType.TCL; + case "EXPORT_DATA": + case "EXPORT_MODEL": + case "LOAD_DATA": + default: + return SqlType.OTHER; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryStatement.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryStatement.java new file mode 100644 index 0000000000..0da0868880 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryStatement.java @@ -0,0 +1,1522 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.api.gax.paging.Page; +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQuery.JobListOption; +import com.google.cloud.bigquery.BigQuery.QueryResultsOption; +import com.google.cloud.bigquery.BigQuery.TableDataListOption; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.EncryptionConfiguration; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobConfiguration; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.JobStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.JobStatistics.ScriptStatistics; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableResult; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlSyntaxErrorException; +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import com.google.cloud.bigquery.storage.v1.BigQueryReadClient; +import com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1.DataFormat; +import com.google.cloud.bigquery.storage.v1.ReadRowsRequest; +import com.google.cloud.bigquery.storage.v1.ReadRowsResponse; +import com.google.cloud.bigquery.storage.v1.ReadSession; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterators; +import java.lang.ref.ReferenceQueue; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.UUID; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.ThreadFactory; +import java.util.logging.Level; + +/** + * An implementation of {@link java.sql.Statement} for executing BigQuery SQL statement and + * returning the results it produces. + * + * @see BigQueryConnection#createStatement + * @see ResultSet + */ +public class BigQueryStatement extends BigQueryNoOpsStatement { + + // TODO (obada): Update this after benchmarking + private static final int MAX_PROCESS_QUERY_THREADS_CNT = 50; + protected static ExecutorService queryTaskExecutor = + Executors.newFixedThreadPool(MAX_PROCESS_QUERY_THREADS_CNT); + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private static final String DEFAULT_DATASET_NAME = "_google_jdbc"; + private static final String DEFAULT_TABLE_NAME = "temp_table_"; + private static final String JDBC_JOB_PREFIX = "google-jdbc-"; + protected ResultSet currentResultSet; + protected long currentUpdateCount = -1; + protected List jobIds = new ArrayList<>(); + protected JobIdWrapper parentJobId = null; + protected int currentJobIdIndex = -1; + protected List batchQueries = new ArrayList<>(); + protected BigQueryConnection connection; + protected int maxFieldSize = 0; + protected int maxRows = 0; + protected boolean isClosed = false; + protected boolean closeOnCompletion = false; + protected Object cancelLock = new Object(); + protected boolean isCanceled = false; + protected boolean poolable; + protected int queryTimeout = 0; + protected SQLWarning warning; + private int fetchDirection = ResultSet.FETCH_FORWARD; + private int fetchSize; + private String scriptQuery; + private Map extraLabels = new HashMap<>(); + + private BigQueryReadClient bigQueryReadClient = null; + private final BigQuery bigQuery; + + final BigQuerySettings querySettings; + + private BlockingQueue bigQueryFieldValueListWrapperBlockingQueue; + + private BlockingQueue arrowBatchWrapperBlockingQueue; + + // Variables Required for the ReferenceQueue implementation + static ReferenceQueue referenceQueueArrowRs = new ReferenceQueue<>(); + static ReferenceQueue referenceQueueJsonRs = new ReferenceQueue<>(); + static List arrowResultSetFinalizers = + new ArrayList<>(); + static List jsonResultSetFinalizers = + new ArrayList<>(); + + private static final ThreadFactory JDBC_THREAD_FACTORY = + new BigQueryThreadFactory("BigQuery-Thread-"); + + static { + BigQueryDaemonPollingTask.startGcDaemonTask( + referenceQueueArrowRs, + referenceQueueJsonRs, + arrowResultSetFinalizers, + jsonResultSetFinalizers); + } + + @VisibleForTesting + public BigQueryStatement(BigQueryConnection connection) { + this.connection = connection; + this.bigQuery = connection.getBigQuery(); + this.querySettings = generateBigQuerySettings(); + } + + private void resetStatementFields() { + this.isCanceled = false; + this.scriptQuery = null; + this.parentJobId = null; + this.currentJobIdIndex = -1; + this.currentUpdateCount = -1; + } + + private BigQuerySettings generateBigQuerySettings() { + LOG.finest("++enter++"); + + BigQuerySettings.Builder querySettings = BigQuerySettings.newBuilder(); + DatasetId defaultDataset = this.connection.getDefaultDataset(); + if (defaultDataset != null) { + querySettings.setDefaultDataset(this.connection.defaultDataset); + } + Long maxBytesBilled = this.connection.getMaxBytesBilled(); + if (maxBytesBilled > 0) { + querySettings.setMaxBytesBilled(maxBytesBilled); + } + if (this.connection.getLabels() != null && !this.connection.getLabels().isEmpty()) { + querySettings.setLabels(this.connection.getLabels()); + } + querySettings.setMaxResultPerPage(this.connection.getMaxResults()); + querySettings.setUseReadAPI(this.connection.isEnableHighThroughputAPI()); + querySettings.setHighThroughputMinTableSize(this.connection.getHighThroughputMinTableSize()); + querySettings.setHighThroughputActivationRatio( + this.connection.getHighThroughputActivationRatio()); + querySettings.setUnsupportedHTAPIFallback(this.connection.isUnsupportedHTAPIFallback()); + querySettings.setUseQueryCache(this.connection.isUseQueryCache()); + querySettings.setQueryDialect(this.connection.getQueryDialect()); + querySettings.setKmsKeyName(this.connection.getKmsKeyName()); + querySettings.setQueryProperties(this.connection.getQueryProperties()); + querySettings.setAllowLargeResults(this.connection.isAllowLargeResults()); + if (this.connection.getJobTimeoutInSeconds() > 0) { + querySettings.setJobTimeoutMs(this.connection.getJobTimeoutInSeconds() * 1000L); + } + if (this.connection.getDestinationTable() != null) { + querySettings.setDestinationTable(this.connection.getDestinationTable()); + } + if (this.connection.getDestinationDataset() != null) { + querySettings.setDestinationDataset(this.connection.getDestinationDataset()); + querySettings.setDestinationDatasetExpirationTime( + this.connection.getDestinationDatasetExpirationTime()); + } + // only create session if enable session and session info is null + if (this.connection.enableSession) { + if (this.connection.sessionInfoConnectionProperty == null) { + querySettings.setEnableSession(this.connection.isSessionEnabled()); + } else { + querySettings.setSessionInfoConnectionProperty( + this.connection.getSessionInfoConnectionProperty()); + } + } + querySettings.setUseWriteAPI(this.connection.isEnableWriteAPI()); + querySettings.setWriteAPIActivationRowCount(this.connection.getWriteAPIActivationRowCount()); + querySettings.setWriteAPIAppendRowCount(this.connection.getWriteAPIAppendRowCount()); + + return querySettings.build(); + } + + /** + * This method executes a BigQuery SQL query, return a single {@code ResultSet} object. + * + *

    Example of running a query: + * + *

    +   *  Connection connection = DriverManager.getConnection(CONNECTION_URL);
    +   *  Statement bigQueryStatement = bigQueryConnection.createStatement();
    +   *  ResultSet result = bigQueryStatement.executeQuery(QUERY);
    +   * 
    + * + * @param sql BigQuery SQL query + * @return {@code ResultSet} containing the output of the query + * @throws SQLException if a BigQuery access error occurs, this method is called on a closed + * {@code Statement}, the given SQL statement produces multiple or no result sets. + * @see java.sql.Statement#executeQuery(String) + */ + @Override + public ResultSet executeQuery(String sql) throws SQLException { + // TODO: write method to return state variables to original state. + LOG.finest("++enter++"); + logQueryExecutionStart(sql); + try { + QueryJobConfiguration jobConfiguration = + setDestinationDatasetAndTableInJobConfig(getJobConfig(sql).build()); + runQuery(sql, jobConfiguration); + } catch (InterruptedException ex) { + throw new BigQueryJdbcException(ex); + } + + if (!isSingularResultSet()) { + throw new BigQueryJdbcException( + "Query returned more than one or didn't return any ResultSet."); + } + // This contains all the other assertions spec required on this method + return getCurrentResultSet(); + } + + @Override + public long executeLargeUpdate(String sql) throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(sql); + try { + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(sql); + runQuery(sql, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + if (this.currentUpdateCount == -1) { + throw new BigQueryJdbcException( + "Update query expected to return affected row count. Double check query type."); + } + return this.currentUpdateCount; + } + + @Override + public int executeUpdate(String sql) throws SQLException { + LOG.finest("++enter++"); + return checkUpdateCount(executeLargeUpdate(sql)); + } + + int checkUpdateCount(long updateCount) { + LOG.finest("++enter++"); + if (updateCount > Integer.MAX_VALUE) { + LOG.warning("Warning: Table update exceeded maximum limit!"); + // Update count is -2 if update is successful but the update count exceeds Integer.MAX_VALUE + return -2; + } + return (int) updateCount; + } + + @Override + public boolean execute(String sql) throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(sql); + try { + QueryJobConfiguration jobConfiguration = getJobConfig(sql).build(); + // If Large Results are enabled, ensure query type is SELECT + if (isLargeResultsEnabled() && getQueryType(jobConfiguration, null) == SqlType.SELECT) { + jobConfiguration = setDestinationDatasetAndTableInJobConfig(jobConfiguration); + } + runQuery(sql, jobConfiguration); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + return getCurrentResultSet() != null; + } + + StatementType getStatementType(QueryJobConfiguration queryJobConfiguration) throws SQLException { + LOG.finest("++enter++"); + QueryJobConfiguration dryRunJobConfiguration = + queryJobConfiguration.toBuilder().setDryRun(true).build(); + Job job; + try { + job = bigQuery.create(JobInfo.of(dryRunJobConfiguration)); + } catch (BigQueryException ex) { + if (ex.getMessage().contains("Syntax error")) { + throw new BigQueryJdbcSqlSyntaxErrorException(ex); + } + throw new BigQueryJdbcException(ex); + } + QueryStatistics statistics = job.getStatistics(); + return statistics.getStatementType(); + } + + SqlType getQueryType(QueryJobConfiguration jobConfiguration, StatementType statementType) + throws SQLException { + LOG.finest("++enter++"); + if (statementType == null) { + statementType = getStatementType(jobConfiguration); + } + + SqlType sqlType = BigQuerySqlTypeConverter.getSqlTypeFromStatementType(statementType); + LOG.fine( + String.format( + "Query: %s, Statement Type: %s, SQL Type: %s", + jobConfiguration.getQuery(), statementType, sqlType)); + return sqlType; + } + + QueryStatistics getQueryStatistics(QueryJobConfiguration queryJobConfiguration) + throws BigQueryJdbcSqlSyntaxErrorException, BigQueryJdbcException { + LOG.finest("++enter++"); + QueryJobConfiguration dryRunJobConfiguration = + queryJobConfiguration.toBuilder().setDryRun(true).build(); + Job job; + try { + job = this.bigQuery.create(JobInfo.of(dryRunJobConfiguration)); + return job.getStatistics(); + } catch (BigQueryException ex) { + if (ex.getMessage().contains("Syntax error")) { + throw new BigQueryJdbcSqlSyntaxErrorException(ex); + } + throw new BigQueryJdbcException(ex); + } + } + + /** + * Releases this Statement's BigQuery and JDBC resources immediately instead of waiting for this + * to happen when it is automatically closed. These resources include the {@code ResultSet} + * object, batch queries, job IDs, and BigQuery connection
    + * + *

    Calling the method close on a Statement object that is already closed has no effect. + * + * @throws SQLException if a BigQuery access error occurs + */ + @Override + public void close() throws SQLException { + LOG.fine(String.format("Closing Statement %s.", this)); + if (isClosed()) { + return; + } + + boolean cancelSucceeded = false; + try { + cancel(); // This attempts to cancel jobs and calls closeStatementResources() + cancelSucceeded = true; + } catch (SQLException e) { + LOG.warning(String.format("Failed to cancel statement during close().", e)); + } finally { + if (!cancelSucceeded) { + closeStatementResources(); + } + this.connection = null; + this.isClosed = true; + } + } + + @Override + public int getMaxFieldSize() { + return this.maxFieldSize; + } + + @Override + public void setMaxFieldSize(int max) { + this.maxFieldSize = max; + } + + @Override + public int getMaxRows() { + return this.maxRows; + } + + @Override + public void setMaxRows(int max) { + this.maxRows = max; + } + + @Override + public void setEscapeProcessing(boolean enable) { + // TODO: verify how to implement this method + } + + @Override + public int getQueryTimeout() { + return this.queryTimeout; + } + + @Override + public void setQueryTimeout(int seconds) { + if (seconds < 0) { + throw new IllegalArgumentException("Query Timeout should be >= 0."); + } + this.queryTimeout = seconds; + } + + /** + * Cancels this {@code Statement} object, the running threads, and BigQuery jobs. + * + * @throws SQLException if a BigQuery access error occurs or this method is called on a closed + * {@code Statement} + */ + @Override + public void cancel() throws SQLException { + LOG.finest(String.format("Statement %s cancelled", this)); + synchronized (cancelLock) { + this.isCanceled = true; + for (JobId jobId : this.jobIds) { + try { + this.bigQuery.cancel(jobId); + LOG.info("Job " + jobId + "cancelled."); + } catch (BigQueryException e) { + if (e.getMessage() != null + && (e.getMessage().contains("Job is already in state DONE") + || e.getMessage().contains("Error: 3848323"))) { + LOG.warning("Attempted to cancel a job that was already done: " + jobId); + } else { + throw new BigQueryJdbcException(e); + } + } + } + jobIds.clear(); + } + // If a ResultSet exists, then it will be closed as well, closing the + // ownedThreads + closeStatementResources(); + } + + @Override + public SQLWarning getWarnings() { + return this.warning; + } + + @Override + public void clearWarnings() { + this.warning = null; + } + + @Override + public ResultSet getResultSet() { + return this.currentResultSet; + } + + @VisibleForTesting + void setUpdateCount(long count) { + this.currentUpdateCount = count; + } + + @Override + public int getUpdateCount() { + return (int) this.currentUpdateCount; + } + + @Override + public long getLargeUpdateCount() { + return this.currentUpdateCount; + } + + @Override + public boolean getMoreResults() throws SQLException { + return getMoreResults(CLOSE_CURRENT_RESULT); + } + + private void closeStatementResources() throws SQLException { + LOG.finest("++enter++"); + if (this.currentResultSet != null) { + // If Statement has 'CloseOnCompletion' set, resultset might + // call into the same function; In order to avoid stack overflow + // we will cleanup resultset before calling into 'close'. + ResultSet tmp = this.currentResultSet; + this.currentResultSet = null; + tmp.close(); + } + this.batchQueries.clear(); + this.currentUpdateCount = -1; + this.currentJobIdIndex = -1; + if (this.connection != null) { + if (this.connection.isTransactionStarted()) { + this.connection.rollback(); + } + this.connection.removeStatement(this); + } + } + + private boolean isSingularResultSet() { + return this.currentResultSet != null + && (this.parentJobId == null || this.parentJobId.getJobs().size() == 1); + } + + private String generateJobId() { + return JDBC_JOB_PREFIX + UUID.randomUUID().toString(); + } + + private class ExecuteResult { + public final TableResult tableResult; + public final Job job; + + ExecuteResult(TableResult tableResult, Job job) { + this.tableResult = tableResult; + this.job = job; + } + } + + @InternalApi + ExecuteResult executeJob(QueryJobConfiguration jobConfiguration) + throws InterruptedException, BigQueryException, BigQueryJdbcException { + LOG.finest("++enter++"); + Job job = null; + // Location is not properly passed from the connection, + // so we need to explicitly set it; + // Do not set custom JobId here or it will disable jobless queries. + JobId jobId = JobId.newBuilder().setLocation(connection.getLocation()).build(); + if (connection.getUseStatelessQueryMode()) { + Object result = bigQuery.queryWithTimeout(jobConfiguration, jobId, null); + if (result instanceof TableResult) { + TableResult tableResult = (TableResult) result; + if (tableResult.getJobId() != null) { + return new ExecuteResult(tableResult, bigQuery.getJob(tableResult.getJobId())); + } + return new ExecuteResult((TableResult) result, null); + } + + if (result instanceof Job) { + job = (Job) result; + } else { + throw new BigQueryJdbcException("Unexpected result type from queryWithTimeout"); + } + } else { + // Update jobId with custom JobId if jobless query is disabled. + jobId = jobId.toBuilder().setJob(generateJobId()).build(); + JobInfo jobInfo = JobInfo.newBuilder(jobConfiguration).setJobId(jobId).build(); + job = bigQuery.create(jobInfo); + } + + if (job == null) { + throw new BigQueryJdbcException("Failed to create BQ Job."); + } + synchronized (cancelLock) { + if (isCanceled) { + job.cancel(); + throw new BigQueryJdbcException("Query was cancelled."); + } + jobId = job.getJobId(); + jobIds.add(jobId); + } + LOG.info("Query submitted with Job ID: " + job.getJobId().getJob()); + TableResult result = + job.getQueryResults(QueryResultsOption.pageSize(querySettings.getMaxResultPerPage())); + synchronized (cancelLock) { + jobIds.remove(jobId); + } + return new ExecuteResult(result, job); + } + + /** + * Execute the SQL script and sets the reference of the underlying job, passing null querySettings + * will result in the FastQueryPath + */ + @InternalApi + void runQuery(String query, QueryJobConfiguration jobConfiguration) + throws SQLException, InterruptedException { + LOG.finest("++enter++"); + LOG.fine("Run Query started"); + + if (queryTimeout > 0) { + jobConfiguration = + jobConfiguration.toBuilder().setJobTimeoutMs(Long.valueOf(queryTimeout) * 1000).build(); + } + + try { + resetStatementFields(); + ExecuteResult executeResult = executeJob(jobConfiguration); + StatementType statementType = + executeResult.job == null + ? getStatementType(jobConfiguration) + : ((QueryStatistics) executeResult.job.getStatistics()).getStatementType(); + SqlType queryType = getQueryType(jobConfiguration, statementType); + handleQueryResult(query, executeResult.tableResult, queryType); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } catch (BigQueryException ex) { + if (ex.getMessage().contains("Syntax error")) { + throw new BigQueryJdbcSqlSyntaxErrorException(ex); + } + throw new BigQueryJdbcException(ex); + } + } + + private boolean isLargeResultsEnabled() { + String destinationTable = this.querySettings.getDestinationTable(); + String destinationDataset = this.querySettings.getDestinationDataset(); + return destinationDataset != null || destinationTable != null; + } + + private QueryJobConfiguration setDestinationDatasetAndTableInJobConfig( + QueryJobConfiguration jobConfiguration) { + String destinationTable = this.querySettings.getDestinationTable(); + String destinationDataset = this.querySettings.getDestinationDataset(); + if (destinationDataset != null || destinationTable != null) { + if (destinationDataset != null) { + checkIfDatasetExistElseCreate(destinationDataset); + } + if (jobConfiguration.useLegacySql() && destinationDataset == null) { + checkIfDatasetExistElseCreate(DEFAULT_DATASET_NAME); + destinationDataset = DEFAULT_DATASET_NAME; + } + if (destinationTable == null) { + destinationTable = getDefaultDestinationTable(); + } + return jobConfiguration.toBuilder() + .setAllowLargeResults(this.querySettings.getAllowLargeResults()) + .setDestinationTable(TableId.of(destinationDataset, destinationTable)) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setWriteDisposition(JobInfo.WriteDisposition.WRITE_TRUNCATE) + .build(); + } + return jobConfiguration; + } + + Job getNextJob() { + while (this.currentJobIdIndex + 1 < this.parentJobId.getJobs().size()) { + this.currentJobIdIndex += 1; + Job currentJob = this.parentJobId.getJobs().get(this.currentJobIdIndex); + QueryStatistics queryStatistics = currentJob.getStatistics(); + ScriptStatistics scriptStatistics = queryStatistics.getScriptStatistics(); + // EXPRESSION jobs are not relevant for customer query and can be + // created by BQ depending on various conditions. We will just ignore + // them when presenting results. + if (!"expression".equalsIgnoreCase(scriptStatistics.getEvaluationKind())) { + return currentJob; + } + } + return null; + } + + void handleQueryResult(String query, TableResult results, SqlType queryType) + throws SQLException, InterruptedException { + LOG.finest("++enter++"); + switch (queryType) { + case SELECT: + processQueryResponse(query, results); + break; + case DML: + case DML_EXTRA: + try { + Job completedJob = this.bigQuery.getJob(results.getJobId()).waitFor(); + JobStatistics.QueryStatistics statistics = completedJob.getStatistics(); + updateAffectedRowCount(statistics.getNumDmlAffectedRows()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } catch (NullPointerException ex) { + throw new BigQueryJdbcException(ex); + } + break; + case TCL: + case DDL: + updateAffectedRowCount(results.getTotalRows()); + break; + case SCRIPT: + try { + Page childJobs = + this.bigQuery.listJobs(JobListOption.parentJobId(results.getJobId().getJob())); + + ArrayList childJobList = new ArrayList<>(); + Iterator iterableJobs = childJobs.iterateAll().iterator(); + iterableJobs.forEachRemaining(childJobList::add); + Collections.reverse(childJobList); + + this.scriptQuery = query; + this.parentJobId = new JobIdWrapper(results.getJobId(), results, childJobList); + this.currentJobIdIndex = -1; + + Job currentJob = getNextJob(); + if (currentJob == null) { + return; + } + StatementType statementType = + ((QueryStatistics) (currentJob.getStatistics())).getStatementType(); + SqlType sqlType = getQueryType(currentJob.getConfiguration(), statementType); + handleQueryResult(query, currentJob.getQueryResults(), sqlType); + } catch (NullPointerException ex) { + throw new BigQueryJdbcException(ex); + } + break; + case OTHER: + throw new BigQueryJdbcException(String.format("Unexpected value: " + queryType)); + } + } + + private void updateAffectedRowCount(Long count) throws SQLException { + // TODO(neenu): check if this need to be closed vs removed) + if (this.currentResultSet != null) { + try { + this.currentResultSet.close(); + this.currentResultSet = null; + } catch (SQLException ex) { + throw new BigQueryJdbcException(ex); + } + } + this.currentUpdateCount = count; + } + + @InternalApi + BigQueryReadClient getBigQueryReadClient() { + if (this.bigQueryReadClient == null) { + this.bigQueryReadClient = this.connection.getBigQueryReadClient(); + } + return this.bigQueryReadClient; + } + + @InternalApi + ReadSession getReadSession(CreateReadSessionRequest readSessionRequest) { + LOG.finest("++enter++"); + return getBigQueryReadClient().createReadSession(readSessionRequest); + } + + @InternalApi + ArrowSchema getArrowSchema(ReadSession readSession) { + return readSession.getArrowSchema(); + } + + /** Uses Bigquery Storage Read API and returns the stream as ResultSet */ + @InternalApi + ResultSet processArrowResultSet(TableResult results) throws SQLException { + LOG.finest("++enter++"); + + // set the resultset + long totalRows = (getMaxRows() > 0) ? getMaxRows() : results.getTotalRows(); + JobId currentJobId = results.getJobId(); + TableId destinationTable = getDestinationTable(currentJobId); + Schema schema = results.getSchema(); + try { + String parent = String.format("projects/%s", destinationTable.getProject()); + String srcTable = + String.format( + "projects/%s/datasets/%s/tables/%s", + destinationTable.getProject(), + destinationTable.getDataset(), + destinationTable.getTable()); + + // Read all the columns if the source table (temp table) and stream the data back in Arrow + // format + ReadSession.Builder sessionBuilder = + ReadSession.newBuilder().setTable(srcTable).setDataFormat(DataFormat.ARROW); + + CreateReadSessionRequest.Builder builder = + CreateReadSessionRequest.newBuilder() + .setParent(parent) + .setReadSession(sessionBuilder) + .setMaxStreamCount(1); + + ReadSession readSession = getReadSession(builder.build()); + this.arrowBatchWrapperBlockingQueue = new LinkedBlockingDeque<>(getBufferSize()); + // deserialize and populate the buffer async, so that the client isn't blocked + Thread populateBufferWorker = + populateArrowBufferedQueue( + readSession, this.arrowBatchWrapperBlockingQueue, this.bigQueryReadClient); + + BigQueryArrowResultSet arrowResultSet = + BigQueryArrowResultSet.of( + schema, + getArrowSchema(readSession), + totalRows, + this, + this.arrowBatchWrapperBlockingQueue, + populateBufferWorker, + this.bigQuery); + arrowResultSetFinalizers.add( + new BigQueryResultSetFinalizers.ArrowResultSetFinalizer( + arrowResultSet, referenceQueueArrowRs, populateBufferWorker)); + arrowResultSet.setJobId(currentJobId); + return arrowResultSet; + + } catch (Exception ex) { + throw new BigQueryJdbcException(ex.getMessage(), ex); + } + } + + /** Asynchronously reads results and populates an arrow record queue */ + @InternalApi + Thread populateArrowBufferedQueue( + ReadSession readSession, + BlockingQueue arrowBatchWrapperBlockingQueue, + BigQueryReadClient bqReadClient) { + LOG.finest("++enter++"); + + Runnable arrowStreamProcessor = + () -> { + try { + // Use the first stream to perform reading. + String streamName = readSession.getStreams(0).getName(); + ReadRowsRequest readRowsRequest = + ReadRowsRequest.newBuilder().setReadStream(streamName).build(); + + // Process each block of rows as they arrive and decode using our simple row reader. + com.google.api.gax.rpc.ServerStream stream = + bqReadClient.readRowsCallable().call(readRowsRequest); + for (ReadRowsResponse response : stream) { + if (Thread.currentThread().isInterrupted() + || queryTaskExecutor.isShutdown()) { // do not process and shutdown + break; + } + + ArrowRecordBatch currentBatch = response.getArrowRecordBatch(); + arrowBatchWrapperBlockingQueue.put(BigQueryArrowBatchWrapper.of(currentBatch)); + } + + } catch (RuntimeException | InterruptedException e) { + LOG.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ arrowStreamProcessor", + e); + } finally { // logic needed for graceful shutdown + // marking end of stream + try { + arrowBatchWrapperBlockingQueue.put( + BigQueryArrowBatchWrapper.of(null, true)); // mark the end of the stream + } catch (InterruptedException e) { + LOG.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ markLast", + e); + } + } + }; + + Thread populateBufferWorker = JDBC_THREAD_FACTORY.newThread(arrowStreamProcessor); + populateBufferWorker.start(); + return populateBufferWorker; + } + + /** Executes SQL query using either fast query path or read API */ + void processQueryResponse(String query, TableResult results) throws SQLException { + LOG.finest( + String.format( + "API call completed{Query=%s, Parent Job ID=%s, Total rows=%s} ", + query, results.getJobId(), results.getTotalRows())); + JobId currentJobId = results.getJobId(); + if (currentJobId == null) { + LOG.fine("Standard API with Stateless query used."); + this.currentResultSet = processJsonResultSet(results); + } else if (useReadAPI(results)) { + LOG.fine("HighThroughputAPI used."); + LOG.info("HTAPI job ID: " + currentJobId.getJob()); + this.currentResultSet = processArrowResultSet(results); + } else { + // read API cannot be used. + LOG.fine("Standard API used."); + this.currentResultSet = processJsonResultSet(results); + } + this.currentUpdateCount = -1; + } + + // The read Ratio should be met + // AND the User must not have disabled the Read API + @VisibleForTesting + boolean useReadAPI(TableResult results) throws BigQueryJdbcSqlFeatureNotSupportedException { + LOG.finest("++enter++"); + if (!meetsReadRatio(results)) { + return false; + } + LOG.fine("Read API threshold is met."); + return querySettings.getUseReadAPI(); + } + + private boolean meetsReadRatio(TableResult results) { + LOG.finest("++enter++"); + long totalRows = results.getTotalRows(); + + if (totalRows == 0 || totalRows < querySettings.getHighThroughputMinTableSize()) { + return false; + } + + // TODO(BQ Team): TableResult doesnt expose the number of records in the current page, hence the + // below log iterates and counts. This is inefficient and we may eventually want to expose + // PageSize with TableResults + // TODO(Obada): Scope for performance optimization. + int pageSize = Iterators.size(results.getValues().iterator()); + return totalRows / pageSize > querySettings.getHighThroughputActivationRatio(); + } + + BigQueryJsonResultSet processJsonResultSet(TableResult results) { + String jobIdOrQueryId = + results.getJobId() == null ? results.getQueryId() : results.getJobId().getJob(); + LOG.info(String.format("BigQuery Job %s completed. Fetching results.", jobIdOrQueryId)); + List threadList = new ArrayList(); + + Schema schema = results.getSchema(); + long totalRows = (getMaxRows() > 0) ? getMaxRows() : results.getTotalRows(); + this.bigQueryFieldValueListWrapperBlockingQueue = new LinkedBlockingDeque<>(getBufferSize()); + BlockingQueue> rpcResponseQueue = + new LinkedBlockingDeque<>(getPageCacheSize(getBufferSize(), schema)); + + JobId jobId = results.getJobId(); + if (jobId != null) { + // Thread to make rpc calls to fetch data from the server + Thread nextPageWorker = + runNextPageTaskAsync(results, results.getNextPageToken(), jobId, rpcResponseQueue); + threadList.add(nextPageWorker); + } else { + try { + populateFirstPage(results, rpcResponseQueue); + rpcResponseQueue.put(Tuple.of(null, false)); + } catch (InterruptedException e) { + LOG.log( + Level.WARNING, + "\n" + + Thread.currentThread().getName() + + " Interrupted @ processJsonQueryResponseResults"); + } + } + + // Thread to parse data received from the server to client library objects + Thread populateBufferWorker = + parseAndPopulateRpcDataAsync( + schema, this.bigQueryFieldValueListWrapperBlockingQueue, rpcResponseQueue); + threadList.add(populateBufferWorker); + + Thread[] jsonWorkers = threadList.toArray(new Thread[0]); + + BigQueryJsonResultSet jsonResultSet = + BigQueryJsonResultSet.of( + schema, + totalRows, + this.bigQueryFieldValueListWrapperBlockingQueue, + this, + jsonWorkers, + this.bigQuery); + jsonResultSet.setJobId(jobId); + jsonResultSet.setQueryId(results.getQueryId()); + jsonResultSetFinalizers.add( + new BigQueryResultSetFinalizers.JsonResultSetFinalizer( + jsonResultSet, referenceQueueJsonRs, jsonWorkers)); + return jsonResultSet; + } + + void populateFirstPage( + TableResult result, BlockingQueue> rpcResponseQueue) { + LOG.finest("++enter++"); + // parse and put the first page in the pageCache before the other pages are parsed from the RPC + // calls + try { + // this is the first page which we have received. + rpcResponseQueue.put(Tuple.of(result, true)); + } catch (InterruptedException e) { + LOG.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ populateFirstPage"); + } + } + + @Override + public void setFetchDirection(int direction) throws SQLException { + if (direction != ResultSet.FETCH_FORWARD) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Only FETCH_FORWARD is supported."); + } + this.fetchDirection = direction; + } + + @VisibleForTesting + Thread runNextPageTaskAsync( + TableResult result, + String firstPageToken, + JobId jobId, + BlockingQueue> rpcResponseQueue) { + LOG.finest("++enter++"); + // parse and put the first page in the pageCache before the other pages are parsed from the RPC + // calls + populateFirstPage(result, rpcResponseQueue); + + // This thread makes the RPC calls and paginates + Runnable nextPageTask = + () -> { + // results.getPageToken(); + String pageToken = firstPageToken; + TableId destinationTable = null; + if (firstPageToken != null) { + destinationTable = getDestinationTable(jobId); + } + try { + // paginate for non null token + while (pageToken != null) { + // do not process further pages and shutdown + if (Thread.currentThread().isInterrupted() || queryTaskExecutor.isShutdown()) { + LOG.log( + Level.WARNING, + "\n" + + Thread.currentThread().getName() + + " Interrupted @ runNextPageTaskAsync"); + break; + } + long startTime = System.nanoTime(); + TableResult results = + this.bigQuery.listTableData( + destinationTable, + TableDataListOption.pageSize(querySettings.getMaxResultPerPage()), + TableDataListOption.pageToken(pageToken)); + + pageToken = results.getNextPageToken(); + // this will be parsed asynchronously without blocking the current + // thread + rpcResponseQueue.put(Tuple.of(results, true)); + LOG.fine( + String.format( + "Fetched %d results from the server in %d ms.", + querySettings.getMaxResultPerPage(), + (int) ((System.nanoTime() - startTime) / 1000000))); + } + // this will stop the parseDataTask as well when the pagination + // completes + rpcResponseQueue.put(Tuple.of(null, false)); + } catch (Exception ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + // We cannot do queryTaskExecutor.shutdownNow() here as populate buffer method may not + // have finished processing the records and even that will be interrupted + }; + + Thread nextPageWorker = JDBC_THREAD_FACTORY.newThread(nextPageTask); + nextPageWorker.start(); + return nextPageWorker; + } + + /** + * Takes TableResult from rpcResponseQueue and populates + * bigQueryFieldValueListWrapperBlockingQueue with FieldValueList + */ + @VisibleForTesting + Thread parseAndPopulateRpcDataAsync( + Schema schema, + BlockingQueue bigQueryFieldValueListWrapperBlockingQueue, + BlockingQueue> rpcResponseQueue) { + LOG.finest("++enter++"); + + Runnable populateBufferRunnable = + () -> { // producer thread populating the buffer + Iterable fieldValueLists; + // as we have to process the first page + boolean hasRows = true; + while (hasRows) { + try { + Tuple nextPageTuple = rpcResponseQueue.take(); + if (nextPageTuple.x() != null) { + fieldValueLists = nextPageTuple.x().getValues(); + } else { + fieldValueLists = null; + } + hasRows = nextPageTuple.y(); + + } catch (InterruptedException e) { + LOG.log(Level.WARNING, "\n" + Thread.currentThread().getName() + " Interrupted", e); + // Thread might get interrupted while calling the Cancel method, which is + // expected, so logging this instead of throwing the exception back + break; + } + + if (Thread.currentThread().isInterrupted() + || queryTaskExecutor.isShutdown() + || fieldValueLists == null) { + // do not process further pages and shutdown (outerloop) + break; + } + + long startTime = System.nanoTime(); + long results = 0; + for (FieldValueList fieldValueList : fieldValueLists) { + try { + if (Thread.currentThread().isInterrupted() || queryTaskExecutor.isShutdown()) { + // do not process further pages and shutdown (inner loop) + break; + } + bigQueryFieldValueListWrapperBlockingQueue.put( + BigQueryFieldValueListWrapper.of(schema.getFields(), fieldValueList)); + results += 1; + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } + LOG.fine( + String.format( + "Processed %d results in %d ms.", + results, (int) ((System.nanoTime() - startTime) / 1000000))); + } + try { + // All the pages has been processed, put this marker + bigQueryFieldValueListWrapperBlockingQueue.put( + BigQueryFieldValueListWrapper.of(null, null, true)); + } catch (InterruptedException e) { + LOG.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ populateBufferAsync", + e); + } + }; + + Thread populateBufferWorker = JDBC_THREAD_FACTORY.newThread(populateBufferRunnable); + populateBufferWorker.start(); + return populateBufferWorker; + } + + /** + * Helper method that determines the optimal number of caches pages to improve read performance + */ + @VisibleForTesting + int getPageCacheSize(Integer numBufferedRows, Schema schema) { + LOG.finest("++enter++"); + // Min number of pages to cache + final int MIN_CACHE_SIZE = 3; + // Min number of pages to cache + final int MAX_CACHE_SIZE = 20; + int numColumns = schema.getFields().size(); + int numCachedPages; + long numCachedRows = numBufferedRows == null ? 0 : numBufferedRows.longValue(); + + // TODO: Further enhance this logic depending on customer feedback on memory consumption + if (numCachedRows > 10000) { + // the size of numBufferedRows is quite large and as per our tests we should be able to + // do enough even with low + numCachedPages = 2; + } + // too many fields are being read, setting the page size on the lower end + else if (numColumns > 15 && numCachedRows > 5000) { + numCachedPages = 3; + } + // low pagesize with fewer number of columns, we can cache more pages + else if (numCachedRows < 2000 && numColumns < 15) { + numCachedPages = 20; + } + // default - under 10K numCachedRows with any number of columns + else { + numCachedPages = 5; + } + return numCachedPages < MIN_CACHE_SIZE + ? MIN_CACHE_SIZE + : (Math.min(numCachedPages, MAX_CACHE_SIZE)); + } + + @Override + public int getFetchDirection() { + return this.fetchDirection; + } + + // TODO(neenu): Fix this value + // getNumBufferedRows in querySettings is always the same withDefaultValues - 20000 buffer size + // So, getBufferSize is also 20000. + private int getBufferSize() { + return (this.querySettings == null + || this.querySettings.getNumBufferedRows() == null + || this.querySettings.getNumBufferedRows() < 10000 + ? 20000 + : Math.min(this.querySettings.getNumBufferedRows() * 2, 100000)); + } + + /** Returns the destinationTable from jobId by calling `jobs.get` API */ + TableId getDestinationTable(JobId jobId) { + Job job = this.bigQuery.getJob(jobId); + LOG.finest(String.format("Destination Table retrieved from %s", job.getJobId())); + return ((QueryJobConfiguration) job.getConfiguration()).getDestinationTable(); + } + + QueryJobConfiguration.Builder getJobConfig(String query) { + LOG.finest("++enter++"); + QueryJobConfiguration.Builder queryConfigBuilder = QueryJobConfiguration.newBuilder(query); + if (this.querySettings.getJobTimeoutMs() > 0) { + queryConfigBuilder.setJobTimeoutMs(this.querySettings.getJobTimeoutMs()); + } + if (this.querySettings.getMaxBytesBilled() > 0) { + queryConfigBuilder.setMaximumBytesBilled(this.querySettings.getMaxBytesBilled()); + } + if (this.querySettings.getDefaultDataset() != null) { + queryConfigBuilder.setDefaultDataset(this.querySettings.getDefaultDataset()); + } + Map mergedLabels = new HashMap<>(); + if (this.querySettings.getLabels() != null) { + mergedLabels.putAll(this.querySettings.getLabels()); + } + if (this.extraLabels != null) { + mergedLabels.putAll(this.extraLabels); + } + queryConfigBuilder.setLabels(mergedLabels); + queryConfigBuilder.setUseQueryCache(this.querySettings.getUseQueryCache()); + queryConfigBuilder.setMaxResults(this.querySettings.getMaxResultPerPage()); + if (this.querySettings.getSessionInfoConnectionProperty() != null) { + queryConfigBuilder.setConnectionProperties( + ImmutableList.of(this.querySettings.getSessionInfoConnectionProperty())); + } else { + queryConfigBuilder.setCreateSession(querySettings.isEnableSession()); + } + if (this.querySettings.getKmsKeyName() != null) { + EncryptionConfiguration encryption = + EncryptionConfiguration.newBuilder() + .setKmsKeyName(this.querySettings.getKmsKeyName()) + .build(); + queryConfigBuilder.setDestinationEncryptionConfiguration(encryption); + } + if (this.querySettings.getQueryProperties() != null) { + queryConfigBuilder.setConnectionProperties(this.querySettings.getQueryProperties()); + } + boolean useLegacy = + QueryDialectType.BIG_QUERY.equals( + QueryDialectType.valueOf(this.querySettings.getQueryDialect())); + queryConfigBuilder.setUseLegacySql(useLegacy); + + return queryConfigBuilder; + } + + private void checkIfDatasetExistElseCreate(String datasetName) { + Dataset dataset = bigQuery.getDataset(DatasetId.of(datasetName)); + if (dataset == null) { + LOG.info(String.format("Creating a hidden dataset: %s ", datasetName)); + DatasetInfo datasetInfo = + DatasetInfo.newBuilder(datasetName) + .setDefaultTableLifetime(this.querySettings.getDestinationDatasetExpirationTime()) + .build(); + bigQuery.create(datasetInfo); + } + } + + private String getDefaultDestinationTable() { + String timeOfCreation = String.valueOf(Instant.now().toEpochMilli()); + String randomizedId = String.valueOf(new Random().nextInt(9999)); + return DEFAULT_TABLE_NAME + timeOfCreation + randomizedId; + } + + @InternalApi + JobIdWrapper insertJob(JobConfiguration jobConfiguration) throws SQLException { + Job job; + JobInfo jobInfo = JobInfo.of(jobConfiguration); + LOG.finest("++enter++"); + try { + job = this.bigQuery.create(jobInfo); + } catch (BigQueryException ex) { + throw new BigQueryJdbcException(ex); + } + return new JobIdWrapper(job.getJobId(), null, null); + } + + @Override + public void setFetchSize(int rows) { + this.fetchSize = rows; + } + + @Override + public int getFetchSize() { + return this.fetchSize; + } + + /** + * Gets the extra labels for this statement. + * + * @return A map of the extra labels. + */ + public Map getExtraLabels() { + return this.extraLabels; + } + + /** + * Sets the extra labels for this statement. + * + * @param extraLabels A map of the extra labels. + */ + public void setExtraLabels(Map extraLabels) { + this.extraLabels = extraLabels; + } + + @Override + public int getResultSetConcurrency() { + return ResultSet.CONCUR_READ_ONLY; + } + + ResultSet getCurrentResultSet() { + return this.currentResultSet; + } + + @Override + public int getResultSetType() { + return ResultSet.TYPE_FORWARD_ONLY; + } + + /** + * Wraps jobId and the firstPage of QueryResponse, so that we can avoid RPC to fetch the first + * page again + */ + static class JobIdWrapper { + + private JobId jobId; + private TableResult firstPage; + private ArrayList jobs; + + public JobIdWrapper(JobId jobId, TableResult firstPage, ArrayList jobs) { + this.jobId = jobId; + this.firstPage = firstPage; + this.jobs = jobs; + } + + JobId getJobId() { + return this.jobId; + } + + void setJobId(JobId jobId) { + this.jobId = jobId; + } + + TableResult getResults() { + return this.firstPage; + } + + void setResults(TableResult firstPage) { + this.firstPage = firstPage; + } + + ArrayList getJobs() { + return jobs; + } + + void setJobs(ArrayList jobs) { + this.jobs = jobs; + } + } + + @Override + public void addBatch(String sql) throws SQLException { + if (sql == null || sql.isEmpty()) { + return; + } + LOG.finest("++enter++"); + sql = sql.trim(); + if (!sql.endsWith(";")) { + sql += "; "; + } + SqlType sqlType = getQueryType(QueryJobConfiguration.newBuilder(sql).build(), null); + if (!SqlType.DML.equals(sqlType)) { + throw new IllegalArgumentException("addBatch currently supports DML operations."); + } + this.batchQueries.add(sql); + } + + @Override + public void clearBatch() { + this.batchQueries.clear(); + } + + @Override + public int[] executeBatch() throws SQLException { + LOG.finest("++enter++"); + int[] result = new int[this.batchQueries.size()]; + if (this.batchQueries.isEmpty()) { + return result; + } + + try { + String combinedQueries = String.join("", this.batchQueries); + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(combinedQueries); + jobConfiguration.setPriority(QueryJobConfiguration.Priority.BATCH); + runQuery(combinedQueries, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + + int i = 0; + while (getUpdateCount() != -1 && i < this.batchQueries.size()) { + result[i] = getUpdateCount(); + getMoreResults(); + i++; + } + + clearBatch(); + return result; + } + + @Override + public Connection getConnection() { + return this.connection; + } + + public boolean hasMoreResults() { + if (this.parentJobId == null) { + return false; + } + return this.currentJobIdIndex + 1 < this.parentJobId.getJobs().size(); + } + + @Override + public boolean getMoreResults(int current) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (current != CLOSE_CURRENT_RESULT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "The JDBC driver only supports Statement.CLOSE_CURRENT_RESULT."); + } + + if (this.parentJobId == null) { + return false; + } + + try { + if (this.currentResultSet != null) { + this.currentResultSet.close(); + this.currentResultSet = null; + // Statement can be closed if it was the last result + if (isClosed) { + return false; + } + } + + Job currentJob = getNextJob(); + if (currentJob != null) { + StatementType statementType = + ((QueryStatistics) (currentJob.getStatistics())).getStatementType(); + SqlType sqlType = getQueryType(currentJob.getConfiguration(), statementType); + handleQueryResult(this.scriptQuery, currentJob.getQueryResults(), sqlType); + + return sqlType == SqlType.SELECT; + } else { + resetStatementFields(); + return false; + } + } catch (InterruptedException | SQLException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } + + @Override + public boolean isWrapperFor(Class iface) { + return iface.isInstance(this); + } + + @Override + public T unwrap(Class iface) throws SQLException { + if (!isWrapperFor(iface)) { + throw new BigQueryJdbcException( + String.format("Unable to cast Statement to %s class.", iface.getName())); + } + return (T) this; + } + + @Override + public int getResultSetHoldability() { + return ResultSet.CLOSE_CURSORS_AT_COMMIT; + } + + @Override + public boolean isClosed() { + return this.isClosed; + } + + @Override + public void setPoolable(boolean poolable) { + this.poolable = poolable; + } + + @Override + public boolean isPoolable() { + return this.poolable; + } + + @Override + public void closeOnCompletion() { + this.closeOnCompletion = true; + } + + @Override + public boolean isCloseOnCompletion() { + return this.closeOnCompletion; + } + + protected void logQueryExecutionStart(String sql) { + if (sql == null) { + return; + } + String sanitizedSql = sql.trim().replaceAll("\\s+", " "); + String truncatedSql = + sanitizedSql.length() > 256 ? sanitizedSql.substring(0, 256) + "..." : sanitizedSql; + LOG.info("Executing query: " + truncatedSql); + LOG.info("Using query settings: " + this.querySettings.toString()); + } + + /** Throws a {@link BigQueryJdbcException} if this object is closed */ + void checkClosed() throws SQLException { + if (isClosed()) { + throw new BigQueryJdbcException("This " + getClass().getName() + " has been closed"); + } + } + + enum SqlType { + SELECT, + DML, + DML_EXTRA, + DDL, + SCRIPT, + TCL, + OTHER + } + + enum QueryDialectType { + SQL, + BIG_QUERY + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactory.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactory.java new file mode 100644 index 0000000000..a5aa7a73a5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactory.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.util.concurrent.ThreadFactory; + +@InternalApi +class BigQueryThreadFactory implements ThreadFactory { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryThreadFactory.class.getName()); + private String threadPrefix; + private int threadSerialNum = 0; + + public BigQueryThreadFactory(String threadPrefix) { + this.threadPrefix = threadPrefix; + } + + public BigQueryThreadFactory() { + this.threadPrefix = "DEFAULT_POOL_"; + } + + @Override + public Thread newThread(Runnable r) { + Thread t = new Thread(r, threadPrefix + (++threadSerialNum)); // non thread safe increment + t.setDaemon(true); + LOG.finest(String.format("New thread %s created.", t.getName())); + return t; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercer.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercer.java new file mode 100644 index 0000000000..d156c2d662 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercer.java @@ -0,0 +1,149 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionException; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionNotFoundException; +import java.util.Map; + +/** + * Provides a declarative mechanism for coercing an object from one type to another. For example, + * coercion of {@link String} to {@link Integer} can be achieved like this: + * + *

    + *   Integer value = BigQueryTypeCoercer.INSTANCE.coerceTo(Integer.class, "3452148");
    + *   System.out.println(value); // 3452148
    + * 
    + * + * A {@link BigQueryTypeCoercer} is baked with all the default {@link BigQueryCoercion}s from {@link + * BigQueryDefaultCoercions} to coerce all the primitive types. + * + *

    It is also possible to extend the behaviour of {@link BigQueryTypeCoercer} to other custom + * user defined types by creating an implementation of {@link BigQueryCoercion} and register it with + * {@link BigQueryTypeCoercerBuilder} using it's {@link + * BigQueryTypeCoercerBuilder#registerTypeCoercion(BigQueryCoercion)} method. + * + *

    + *   public class TextToStringCoercion extends BigQueryBigQueryCoercion{
    + *
    + *    public TextToStringCoercion() {
    + *       super(Text.class, String.class);
    + *    }
    + *
    + *    @Override
    + *    String coerce(Text text) {
    + *       return text.toString();  // logic to coerce from Text type to String type
    + *    }
    + *  }
    + * 
    + * + * and use it like this + * + *
    + *    byte[] bytesArray = {72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33};
    + *    Text text = new Text(bytesArray);
    + *
    + *    BigQueryTypeCoercer typeCoercer = new BigQueryTypeCoercerBuilder()
    + *         .registerCoercion(new TextToStringCoercion())  // registering a custom coercion
    + *         .build();
    + *    System.out.println(typeCoercer.coerceTo(String.class, text));  //  Hello World!
    + * 
    + */ +@InternalApi +class BigQueryTypeCoercer { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryTypeCoercer.class.getName()); + + /** A {@link BigQueryTypeCoercer} instance with all the inbuilt {@link BigQueryCoercion}s */ + static BigQueryTypeCoercer INSTANCE; + + static { + INSTANCE = BigQueryDefaultCoercions.builder().build(); + } + + private final Map, Map, BigQueryCoercion>> allCoercions; + + BigQueryTypeCoercer(Map, Map, BigQueryCoercion>> allCoercions) { + this.allCoercions = allCoercions; + } + + /** + * Coerce an object to the type specified. + * + * @param value the object that needs to be coerced. + * @param targetClass the target class for the coercion + * @throws BigQueryJdbcCoercionNotFoundException when coercion can not be performed to the target + * type. + * @throws BigQueryJdbcCoercionException when an error is encountered while performing the + * coercion. + */ + T coerceTo(Class targetClass, Object value) { + Class sourceClass = value == null ? Void.class : value.getClass(); + // FieldValue object for null-values requires special check + if (sourceClass == FieldValue.class && ((FieldValue.class.cast(value)).isNull())) { + sourceClass = Void.class; + } + // No coercion needed + if (sourceClass.equals(targetClass)) { + return targetClass.cast(value); + } + BigQueryCoercion coercion = findCoercion(sourceClass, targetClass); + LOG.finest(() -> String.format("%s coercion for %s", coercion, value)); + // Value is null case & no explicit coercion + if (sourceClass == Void.class && coercion == null) { + return null; + } + if (coercion == null) { + if (targetClass.equals(String.class)) { + return (T) value.toString(); + } + throw new BigQueryJdbcCoercionNotFoundException(sourceClass, targetClass); + } + try { + return coercion.coerce(sourceClass != Void.class ? value : null); + } catch (Exception ex) { + throw new BigQueryJdbcCoercionException(ex); + } + } + + /** + * Creates a {@link BigQueryTypeCoercerBuilder} with all the default coercions from {@link + * BigQueryDefaultCoercions}. + */ + static BigQueryTypeCoercerBuilder builder() { + return BigQueryDefaultCoercions.builder(); + } + + private BigQueryCoercion findCoercion(Class sourceClass, Class targetClass) { + Map, BigQueryCoercion> bySourceMap = this.allCoercions.get(sourceClass); + // AutoValue generated concrete classes are registered with their abstract classes and not the + // concrete class. Lets make sure the we can find the registered abstract class for such + // classes. The abstract class in these cases would be the super class of the generated + // AutoValue concrete classes. + if (bySourceMap == null) { + Class registeredAbstractClass = sourceClass.getSuperclass(); + bySourceMap = this.allCoercions.get(registeredAbstractClass); + } + // If we still can't find the coercion source class entry then just return. + if (bySourceMap == null) { + return null; + } + return (BigQueryCoercion) bySourceMap.get(targetClass); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerBuilder.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerBuilder.java new file mode 100644 index 0000000000..8539515ed1 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerBuilder.java @@ -0,0 +1,79 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; + +/** + * A builder to create {@link BigQueryTypeCoercer} to perform the coercion of custom user defined + * types. + */ +@InternalApi +class BigQueryTypeCoercerBuilder { + + private final Map, Map, BigQueryCoercion>> allCoercions; + + BigQueryTypeCoercerBuilder() { + this.allCoercions = new HashMap<>(); + } + + /** + * registers a {@link BigQueryCoercion} + * + * @param coercion A {@link BigQueryCoercion} to register with this builder. + */ + BigQueryTypeCoercerBuilder registerTypeCoercion(BigQueryCoercion coercion) { + Type[] typeArguments = + ((ParameterizedType) coercion.getClass().getGenericInterfaces()[0]) + .getActualTypeArguments(); + Class sourceClass = (Class) typeArguments[0]; + Class targetClass = (Class) typeArguments[1]; + this.registerInternal(coercion, sourceClass, targetClass); + return this; + } + + /** + * registers a {@link BigQueryCoercion} using an implementation of {@link Function} + * + * @param function A {@link Function} to register with the builder. + * @param sourceClass the source class + * @param targetClass the target class + */ + BigQueryTypeCoercerBuilder registerTypeCoercion( + Function function, Class sourceClass, Class targetClass) { + this.registerInternal((BigQueryCoercion) function::apply, sourceClass, targetClass); + return this; + } + + /** builds the {@link BigQueryTypeCoercer} with all the registered {@link BigQueryCoercion}s. */ + BigQueryTypeCoercer build() { + return new BigQueryTypeCoercer(this.allCoercions); + } + + private void registerInternal( + BigQueryCoercion coercion, Class sourceClass, Class targetClass) { + Map, BigQueryCoercion> mapBySource = + this.allCoercions.getOrDefault(sourceClass, new HashMap<>()); + mapBySource.put(targetClass, coercion); + this.allCoercions.putIfAbsent(sourceClass, mapBySource); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercionUtility.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercionUtility.java new file mode 100644 index 0000000000..9a4dc21304 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercionUtility.java @@ -0,0 +1,409 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.Range; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.Period; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.time.temporal.ChronoUnit; +import java.util.concurrent.TimeUnit; +import org.apache.arrow.vector.PeriodDuration; +import org.apache.arrow.vector.util.Text; + +@InternalApi +class BigQueryTypeCoercionUtility { + + static BigQueryTypeCoercer INSTANCE; + + static { + INSTANCE = + BigQueryTypeCoercer.builder() + .registerTypeCoercion(new FieldValueToString()) + .registerTypeCoercion(new FieldValueToInteger()) + .registerTypeCoercion(new FieldValueToFloat()) + .registerTypeCoercion(new FieldValueToShort()) + .registerTypeCoercion(new FieldValueToLong()) + .registerTypeCoercion(new FieldValueToDouble()) + .registerTypeCoercion(new FieldValueToBigDecimal()) + .registerTypeCoercion(new FieldValueToBoolean()) + .registerTypeCoercion(new FieldValueToBytesArray()) + .registerTypeCoercion(new FieldValueToTimestamp()) + .registerTypeCoercion(new FieldValueToTime()) + .registerTypeCoercion(new FieldValueToDate()) + .registerTypeCoercion(new FieldValueToObject()) + .registerTypeCoercion(new StringToBytesArray()) + .registerTypeCoercion(new RangeToString()) + .registerTypeCoercion(new IntegerToLong()) + .registerTypeCoercion(new BytesArrayToString()) + + // Read API Type coercions + .registerTypeCoercion(Timestamp::valueOf, LocalDateTime.class, Timestamp.class) + .registerTypeCoercion(Text::toString, Text.class, String.class) + .registerTypeCoercion(new TextToInteger()) + .registerTypeCoercion(new LongToTimestamp()) + .registerTypeCoercion(new LongToTime()) + .registerTypeCoercion(new IntegerToDate()) + .registerTypeCoercion( + (Timestamp ts) -> Date.valueOf(ts.toLocalDateTime().toLocalDate()), + Timestamp.class, + Date.class) + .registerTypeCoercion( + (Timestamp ts) -> Time.valueOf(ts.toLocalDateTime().toLocalTime()), + Timestamp.class, + Time.class) + .registerTypeCoercion( + (Time time) -> // Per JDBC spec, the date component should be 1970-01-01 + Timestamp.valueOf(LocalDateTime.of(LocalDate.ofEpochDay(0), time.toLocalTime())), + Time.class, + Timestamp.class) + .registerTypeCoercion( + (Date date) -> new Timestamp(date.getTime()), Date.class, Timestamp.class) + .registerTypeCoercion(new TimestampToString()) + .registerTypeCoercion(new TimeToString()) + .registerTypeCoercion((Long l) -> l != 0L, Long.class, Boolean.class) + .registerTypeCoercion((Double d) -> d != 0.0d, Double.class, Boolean.class) + .registerTypeCoercion( + (BigDecimal bd) -> bd.compareTo(BigDecimal.ZERO) != 0, + BigDecimal.class, + Boolean.class) + .registerTypeCoercion((Integer i) -> i != 0, Integer.class, Boolean.class) + .registerTypeCoercion((Float f) -> f != 0.0f, Float.class, Boolean.class) + .registerTypeCoercion((Short s) -> s.shortValue() != 0, Short.class, Boolean.class) + .registerTypeCoercion((Boolean b) -> b ? 1L : 0L, Boolean.class, Long.class) + .registerTypeCoercion((Boolean b) -> b ? 1.0d : 0.0d, Boolean.class, Double.class) + .registerTypeCoercion((Boolean b) -> b ? 1.0f : 0.0f, Boolean.class, Float.class) + .registerTypeCoercion((Boolean b) -> (short) (b ? 1 : 0), Boolean.class, Short.class) + .registerTypeCoercion((Boolean b) -> (byte) (b ? 1 : 0), Boolean.class, Byte.class) + .registerTypeCoercion( + (Boolean b) -> b ? BigDecimal.ONE : BigDecimal.ZERO, + Boolean.class, + BigDecimal.class) + .registerTypeCoercion(new PeriodDurationToString()) + .registerTypeCoercion(unused -> (byte) 0, Void.class, Byte.class) + .registerTypeCoercion(unused -> 0, Void.class, Integer.class) + .registerTypeCoercion(unused -> 0L, Void.class, Long.class) + .registerTypeCoercion(unused -> 0D, Void.class, Double.class) + .registerTypeCoercion(unused -> 0f, Void.class, Float.class) + .registerTypeCoercion(unused -> (short) 0, Void.class, Short.class) + .build(); + } + + private static class TimestampToString implements BigQueryCoercion { + private static final DateTimeFormatter FORMATTER = + DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSSS"); + + @Override + public String coerce(Timestamp value) { + return FORMATTER.format(value.toLocalDateTime()); + } + } + + private static class TimeToString implements BigQueryCoercion { + private static final DateTimeFormatter FORMATTER = DateTimeFormatter.ofPattern("HH:mm:ss.SSS"); + + @Override + public String coerce(Time value) { + return FORMATTER.format(value.toLocalTime()); + } + } + + private static class PeriodDurationToString implements BigQueryCoercion { + + @Override + public String coerce(PeriodDuration value) { + StringBuilder builder = new StringBuilder(); + + // Conversion of Period + Period period = value.getPeriod().normalized(); + + builder + .append(period.getYears()) + .append("-") + .append(period.getMonths()) + .append(" ") + .append(period.getDays()) + .append(" "); + + // Conversion of Duration + Duration duration = value.getDuration(); + if (duration.isNegative()) { + builder.append("-"); + duration = duration.negated(); + } + long hours = duration.toHours(); + duration = duration.minusHours(hours); + long minutes = duration.toMinutes(); + duration = duration.minusMinutes(minutes); + long seconds = duration.getSeconds(); + duration = duration.minusSeconds(seconds); + long microseconds = duration.toNanos() / 1000; + + builder + .append(hours) + .append(":") + .append(minutes) + .append(":") + .append(seconds) + .append(".") + .append(microseconds); + + String result = builder.toString(); + result = result.replaceFirst("--", "-"); + + return result; + } + } + + private static class IntegerToDate implements BigQueryCoercion { + + @Override + public Date coerce(Integer value) { + // For example int 18993 represents 2022-01-01 + // Using LocalDate here to avoid this date getting affected by local time zones. + LocalDate date = LocalDate.ofEpochDay(Long.valueOf(value)); + return Date.valueOf(date); + } + } + + private static class LongToTime implements BigQueryCoercion { + + @Override + public Time coerce(Long value) { + + int HH = (int) TimeUnit.MICROSECONDS.toHours(value); + int MM = (int) (TimeUnit.MICROSECONDS.toMinutes(value) % 60); + int SS = (int) (TimeUnit.MICROSECONDS.toSeconds(value) % 60); + + // Note: BQ Time has a precision of up to six fractional digits (microsecond precision) + // but java.sql.Time do not. So data after seconds is not returned. + return new Time(HH, MM, SS); + } + } + + private static class LongToTimestamp implements BigQueryCoercion { + + @Override + public Timestamp coerce(Long value) { + // Long value is in microseconds. All further calculations should account for the unit. + Instant instant = Instant.ofEpochMilli(value / 1000).plusNanos((value % 1000) * 1000); + // JDBC is defaulting to UTC because BQ UI defaults to UTC. + LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC")); + return Timestamp.valueOf(localDateTime); + } + } + + private static class TextToInteger implements BigQueryCoercion { + + @Override + public Integer coerce(Text value) { + return Integer.parseInt(value.toString()); + } + } + + private static class FieldValueToObject implements BigQueryCoercion { + + @Override + public Object coerce(FieldValue fieldValue) { + return fieldValue.getValue(); + } + } + + private static class FieldValueToDate implements BigQueryCoercion { + + @Override + public Date coerce(FieldValue fieldValue) { + return Date.valueOf(fieldValue.getStringValue()); + } + } + + private static class FieldValueToTime implements BigQueryCoercion { + + @Override + public Time coerce(FieldValue fieldValue) { + // Time ranges from 00:00:00 to 23:59:59.999999 in BigQuery + String strTime = fieldValue.getStringValue(); + try { + LocalTime localTime = LocalTime.parse(strTime); + // Convert LocalTime to milliseconds of the day. This correctly preserves millisecond + // precision and truncates anything smaller + long millis = TimeUnit.NANOSECONDS.toMillis(localTime.toNanoOfDay()); + return new Time(millis); + } catch (java.time.format.DateTimeParseException e) { + throw new IllegalArgumentException( + "Cannot parse the value " + strTime + " to java.sql.Time", e); + } + } + } + + private static class FieldValueToTimestamp implements BigQueryCoercion { + + @Override + public Timestamp coerce(FieldValue fieldValue) { + String rawValue = fieldValue.getStringValue(); + // BigQuery DATETIME strings are formatted like "YYYY-MM-DD'T'HH:MM:SS.fffffffff" + // BigQuery TIMESTAMP strings are numeric epoch seconds. + if (rawValue.contains("T")) { + // It's a DATETIME string. + // Timestamp.valueOf() expects "yyyy-mm-dd hh:mm:ss.fffffffff" format. + return Timestamp.valueOf(rawValue.replace('T', ' ')); + } else { + // It's a TIMESTAMP numeric string. + long microseconds = fieldValue.getTimestampValue(); + Instant instant = Instant.EPOCH.plus(microseconds, ChronoUnit.MICROS); + // JDBC is defaulting to UTC because BQ UI defaults to UTC. + LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC")); + return Timestamp.valueOf(localDateTime); + } + } + } + + private static class FieldValueToBytesArray implements BigQueryCoercion { + + @Override + public byte[] coerce(FieldValue fieldValue) { + return fieldValue.getBytesValue(); + } + } + + private static class StringToBytesArray implements BigQueryCoercion { + + @Override + public byte[] coerce(String value) { + return value.getBytes(); + } + } + + private static class BytesArrayToString implements BigQueryCoercion { + + @Override + public String coerce(byte[] value) { + return java.util.Base64.getEncoder().encodeToString(value); + } + } + + private static class FieldValueToBoolean implements BigQueryCoercion { + + @Override + public Boolean coerce(FieldValue fieldValue) { + return !fieldValue.isNull() && fieldValue.getBooleanValue(); + } + } + + private static class FieldValueToBigDecimal implements BigQueryCoercion { + + @Override + public BigDecimal coerce(FieldValue fieldValue) { + return fieldValue.getNumericValue(); + } + } + + private static class FieldValueToDouble implements BigQueryCoercion { + + @Override + public Double coerce(FieldValue fieldValue) { + return fieldValue.getDoubleValue(); + } + } + + private static class FieldValueToLong implements BigQueryCoercion { + + @Override + public Long coerce(FieldValue fieldValue) { + return fieldValue.getLongValue(); + } + } + + private static class FieldValueToInteger implements BigQueryCoercion { + + @Override + public Integer coerce(FieldValue fieldValue) { + return (int) fieldValue.getLongValue(); + } + } + + private static class FieldValueToFloat implements BigQueryCoercion { + + @Override + public Float coerce(FieldValue fieldValue) { + return (float) fieldValue.getDoubleValue(); + } + } + + private static class FieldValueToShort implements BigQueryCoercion { + + @Override + public Short coerce(FieldValue fieldValue) { + return (short) fieldValue.getLongValue(); + } + } + + private static class FieldValueToString implements BigQueryCoercion { + + @Override + public String coerce(FieldValue fieldValue) { + if (Attribute.REPEATED.equals(fieldValue.getAttribute())) { // Case for Arrays + return fieldValue.getValue().toString(); + } + if (Attribute.RANGE.equals(fieldValue.getAttribute())) { // Range values + Range rangeValue = fieldValue.getRangeValue(); + return INSTANCE.coerceTo(String.class, rangeValue); + } + if (Attribute.RECORD.equals(fieldValue.getAttribute())) { // Case for Structs + return fieldValue.getRecordValue().toString(); + } + return fieldValue.getStringValue(); + } + } + + private static class IntegerToLong implements BigQueryCoercion { + + @Override + public Long coerce(Integer intValue) { + if (intValue == null) { + return 0L; + } + return Long.valueOf(intValue); + } + } + + private static class RangeToString implements BigQueryCoercion { + + @Override + public String coerce(Range value) { + FieldValue startValue = value.getStart(); + FieldValue endValue = value.getEnd(); + + String start = startValue.isNull() ? "UNBOUNDED" : startValue.getStringValue(); + String end = endValue.isNull() ? "UNBOUNDED" : endValue.getStringValue(); + // The start of a range is inclusive, and the end is exclusive. + return String.format("[%s, %s)", start, end); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/DataSource.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/DataSource.java new file mode 100644 index 0000000000..b1501890bd --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/DataSource.java @@ -0,0 +1,664 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import java.io.PrintWriter; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Map; +import java.util.Properties; +import java.util.logging.Logger; + +/** + * BigQuery JDBC implementation of {@link javax.sql.DataSource} + * + *

    A factory for connections to the physical data source that this DataSource object represents. + * An alternative to the DriverManager facility, a DataSource object is the preferred means of + * getting a connection. An object that implements the DataSource interface will typically be + * registered with a naming service based on the Javaā„¢ Naming and Directory (JNDI) API. + */ +public class DataSource implements javax.sql.DataSource { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private String URL; + private String projectId; + private String defaultDataset; + private String location; + private String userAgent; + private Boolean enableHighThroughputAPI; + private Integer highThroughputMinTableSize; + private Integer highThroughputActivationRatio; + private Boolean unsupportedHTAPIFallback; + private String kmsKeyName; + private Map queryProperties; + private String logLevel; + private Boolean enableSession; + private String logPath; + private Integer oAuthType; + private String oAuthServiceAcctEmail; + private String oAuthPvtKeyPath; + private String oAuthPvtKey; + private String oAuthAccessToken; + private String oAuthRefreshToken; + private Boolean useQueryCache; + private String queryDialect; + private Boolean allowLargeResults; + private String destinationTable; + private String destinationDataset; + private Long destinationDatasetExpirationTime; + private String universeDomain; + private String proxyHost; + private String proxyPort; + private String proxyUid; + private String proxyPwd; + private String oAuthClientId; + private String oAuthClientSecret; + private Integer jobCreationMode; + private Long maxResults; + private String partnerToken; + private Boolean enableWriteAPI; + private String additionalProjects; + private Boolean filterTablesOnDefaultDataset; + private Integer requestGoogleDriveScope; + private Integer metadataFetchThreadCount; + private String sslTrustStorePath; + private String sslTrustStorePassword; + + // Make sure the JDBC driver class is loaded. + static { + try { + Class.forName("com.google.cloud.bigquery.jdbc.BigQueryDriver"); + } catch (ClassNotFoundException ex) { + throw new IllegalStateException( + "DataSource failed to load com.google.cloud.bigquery.jdbc.BigQueryDriver", ex); + } + } + + /** An implementation of DataSource must include a public no-arg constructor. */ + public DataSource() {} + + @Override + public Connection getConnection() throws SQLException { + if (getURL() == null) { + throw new BigQueryJdbcException( + "Connection URL is null. Please specify a valid Connection URL to get Connection."); + } + if (!BigQueryDriver.getRegisteredDriver().acceptsURL(getURL())) { + throw new BigQueryJdbcException( + "The URL " + getURL() + " is invalid. Please specify a valid Connection URL. "); + } + return DriverManager.getConnection(getURL(), createProperties()); + } + + private Properties createProperties() { + Properties connectionProperties = new Properties(); + if (this.projectId != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROJECT_ID_PROPERTY_NAME, this.projectId); + } + if (this.defaultDataset != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.DEFAULT_DATASET_PROPERTY_NAME, this.defaultDataset); + } + if (this.location != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LOCATION_PROPERTY_NAME, this.location); + } + if (this.enableHighThroughputAPI != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ENABLE_HTAPI_PROPERTY_NAME, + String.valueOf(this.enableHighThroughputAPI)); + } + if (this.unsupportedHTAPIFallback != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME, + String.valueOf(this.unsupportedHTAPIFallback)); + } + if (this.highThroughputMinTableSize != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.HTAPI_MIN_TABLE_SIZE_PROPERTY_NAME, + String.valueOf(this.highThroughputMinTableSize)); + } + if (this.highThroughputActivationRatio != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.HTAPI_ACTIVATION_RATIO_PROPERTY_NAME, + String.valueOf(this.highThroughputActivationRatio)); + } + if (this.kmsKeyName != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.KMS_KEY_NAME_PROPERTY_NAME, this.kmsKeyName); + } + if (this.queryProperties != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.QUERY_PROPERTIES_NAME, this.queryProperties.toString()); + } + if (this.enableSession != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ENABLE_SESSION_PROPERTY_NAME, String.valueOf(this.enableSession)); + } + if (this.logLevel != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LOG_LEVEL_PROPERTY_NAME, this.logLevel); + } + if (this.logPath != null) { + connectionProperties.setProperty(BigQueryJdbcUrlUtility.LOG_PATH_PROPERTY_NAME, this.logPath); + } + if (this.oAuthType != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME, String.valueOf(this.oAuthType)); + } + if (this.oAuthServiceAcctEmail != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_SA_EMAIL_PROPERTY_NAME, this.oAuthServiceAcctEmail); + } + if (this.oAuthPvtKeyPath != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME, this.oAuthPvtKeyPath); + } + if (this.oAuthPvtKey != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME, this.oAuthPvtKey); + } + if (this.oAuthAccessToken != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_ACCESS_TOKEN_PROPERTY_NAME, this.oAuthAccessToken); + } + if (this.oAuthRefreshToken != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME, this.oAuthRefreshToken); + } + if (this.useQueryCache != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.USE_QUERY_CACHE_PROPERTY_NAME, String.valueOf(this.useQueryCache)); + } + if (this.queryDialect != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.QUERY_DIALECT_PROPERTY_NAME, this.queryDialect); + } + if (this.allowLargeResults != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ALLOW_LARGE_RESULTS_PROPERTY_NAME, + String.valueOf(this.allowLargeResults)); + } + if (this.destinationTable != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LARGE_RESULTS_TABLE_PROPERTY_NAME, this.destinationTable); + } + if (this.destinationDataset != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LARGE_RESULTS_DATASET_PROPERTY_NAME, this.destinationDataset); + } + if (this.destinationDatasetExpirationTime != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.DESTINATION_DATASET_EXPIRATION_TIME_PROPERTY_NAME, + String.valueOf(this.destinationDatasetExpirationTime)); + } + if (this.universeDomain != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, this.universeDomain); + } + if (this.proxyHost != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, this.proxyHost); + } + if (this.proxyPort != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, this.proxyPort); + } + if (this.proxyUid != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME, this.proxyUid); + } + if (this.proxyPwd != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME, this.proxyPwd); + } + if (this.oAuthClientId != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, this.oAuthClientId); + } + if (this.oAuthClientSecret != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, this.oAuthClientSecret); + } + if (this.jobCreationMode != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.JOB_CREATION_MODE_PROPERTY_NAME, + String.valueOf(this.jobCreationMode)); + } + if (this.maxResults != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.MAX_RESULTS_PROPERTY_NAME, String.valueOf(this.maxResults)); + } + if (this.partnerToken != null && !this.partnerToken.isEmpty()) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PARTNER_TOKEN_PROPERTY_NAME, this.partnerToken); + } + if (this.enableWriteAPI != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ENABLE_WRITE_API_PROPERTY_NAME, + String.valueOf(this.enableWriteAPI)); + } + if (this.additionalProjects != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ADDITIONAL_PROJECTS_PROPERTY_NAME, this.additionalProjects); + } + if (this.filterTablesOnDefaultDataset != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME, + String.valueOf(this.filterTablesOnDefaultDataset)); + } + if (this.requestGoogleDriveScope != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + String.valueOf(this.requestGoogleDriveScope)); + } + if (this.metadataFetchThreadCount != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.METADATA_FETCH_THREAD_COUNT_PROPERTY_NAME, + String.valueOf(this.metadataFetchThreadCount)); + } + if (this.sslTrustStorePath != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.SSL_TRUST_STORE_PROPERTY_NAME, + String.valueOf(this.sslTrustStorePath)); + } + if (this.sslTrustStorePassword != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.SSL_TRUST_STORE_PWD_PROPERTY_NAME, + String.valueOf(this.sslTrustStorePassword)); + } + return connectionProperties; + } + + @Override + public Connection getConnection(String username, String password) throws SQLException { + LOG.warning( + "Username and Password is not supported in Bigquery JDBC Driver. Values discarded."); + return getConnection(); + } + + public String getURL() { + return URL; + } + + public void setURL(String URL) { + this.URL = URL; + } + + public String getProjectId() { + return projectId; + } + + public void setProjectId(String projectId) { + this.projectId = projectId; + } + + public String getDefaultDataset() { + return defaultDataset; + } + + public void setDefaultDataset(String defaultDataset) { + this.defaultDataset = defaultDataset; + } + + public String getLocation() { + return location; + } + + public void setLocation(String location) { + this.location = location; + } + + public String getUserAgent() { + return userAgent; + } + + public void setUserAgent(String userAgent) { + this.userAgent = userAgent; + } + + public String getPartnerToken() { + return partnerToken; + } + + public void setPartnerToken(String partnerToken) { + // This property is expected to be set by partners only. For more details on exact format + // supported, refer b/396086960 + this.partnerToken = partnerToken; + } + + public boolean getEnableHighThroughputAPI() { + return enableHighThroughputAPI; + } + + public void setEnableHighThroughputAPI(Boolean enableHighThroughputAPI) { + this.enableHighThroughputAPI = enableHighThroughputAPI; + } + + public int getHighThroughputMinTableSize() { + return highThroughputMinTableSize; + } + + public int getHighThroughputActivationRatio() { + return highThroughputActivationRatio; + } + + public void setHighThroughputMinTableSize(Integer highThroughputMinTableSize) { + this.highThroughputMinTableSize = highThroughputMinTableSize; + } + + public void setHighThroughputActivationRatio(Integer highThroughputActivationRatio) { + this.highThroughputActivationRatio = highThroughputActivationRatio; + } + + public void setKmsKeyName(String kmsKeyName) { + this.kmsKeyName = kmsKeyName; + } + + public String getKmsKeyName() { + return this.kmsKeyName; + } + + public void setQueryProperties(Map queryProperties) { + this.queryProperties = queryProperties; + } + + public Map getQueryProperties() { + return this.queryProperties; + } + + public void setUnsupportedHTAPIFallback(Boolean unsupportedHTAPIFallback) { + this.unsupportedHTAPIFallback = unsupportedHTAPIFallback; + } + + public boolean getUnsupportedHTAPIFallback() { + return this.unsupportedHTAPIFallback; + } + + public boolean getEnableSession() { + return enableSession; + } + + public void setEnableSession(Boolean enableSession) { + this.enableSession = enableSession; + } + + public String getLogLevel() { + return logLevel; + } + + public void setLogLevel(String logLevel) { + this.logLevel = logLevel; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } + + public String getUniverseDomain() { + return universeDomain; + } + + public void setUniverseDomain(String universeDomain) { + this.universeDomain = universeDomain; + } + + public String getProxyHost() { + return proxyHost; + } + + public void setProxyHost(String proxyHost) { + this.proxyHost = proxyHost; + } + + public String getProxyPort() { + return proxyPort; + } + + public void setProxyPort(String proxyPort) { + this.proxyPort = proxyPort; + } + + public String getProxyUid() { + return proxyUid; + } + + public void setProxyUid(String proxyUid) { + this.proxyUid = proxyUid; + } + + public String getProxyPwd() { + return proxyPwd; + } + + public void setProxyPwd(String proxyPwd) { + this.proxyPwd = proxyPwd; + } + + public int getOAuthType() { + return oAuthType; + } + + public void setOAuthType(Integer oAuthType) { + this.oAuthType = oAuthType; + } + + public String getOAuthServiceAcctEmail() { + return oAuthServiceAcctEmail; + } + + public void setOAuthServiceAcctEmail(String oAuthServiceAcctEmail) { + this.oAuthServiceAcctEmail = oAuthServiceAcctEmail; + } + + public String getOAuthPvtKeyPath() { + return oAuthPvtKeyPath; + } + + public String getOAuthPvtKey() { + return oAuthPvtKey; + } + + public void setOAuthPvtKey(String oAuthPvtKey) { + this.oAuthPvtKey = oAuthPvtKey; + } + + public void setOAuthPvtKeyPath(String oAuthPvtKeyPath) { + this.oAuthPvtKeyPath = oAuthPvtKeyPath; + } + + public String getOAuthAccessToken() { + return oAuthAccessToken; + } + + public void setOAuthAccessToken(String oAuthAccessToken) { + this.oAuthAccessToken = oAuthAccessToken; + } + + public String getOAuthRefreshToken() { + return oAuthRefreshToken; + } + + public void setOAuthRefreshToken(String oAuthRefreshToken) { + this.oAuthRefreshToken = oAuthRefreshToken; + } + + public Boolean getUseQueryCache() { + return useQueryCache; + } + + public String getQueryDialect() { + return queryDialect; + } + + public Boolean getAllowLargeResults() { + return allowLargeResults; + } + + public String getDestinationTable() { + return destinationTable; + } + + public String getDestinationDataset() { + return destinationDataset; + } + + public Long getDestinationDatasetExpirationTime() { + return destinationDatasetExpirationTime; + } + + public void setUseQueryCache(Boolean useQueryCache) { + this.useQueryCache = useQueryCache; + } + + public void setQueryDialect(String queryDialect) { + this.queryDialect = queryDialect; + } + + public void setAllowLargeResults(Boolean allowLargeResults) { + this.allowLargeResults = allowLargeResults; + } + + public void setDestinationTable(String destinationTable) { + this.destinationTable = destinationTable; + } + + public void setDestinationDataset(String destinationDataset) { + this.destinationDataset = destinationDataset; + } + + public void setDestinationDatasetExpirationTime(long destinationDatasetExpirationTime) { + this.destinationDatasetExpirationTime = destinationDatasetExpirationTime; + } + + public String getOAuthClientId() { + return oAuthClientId; + } + + public void setOAuthClientId(String oAuthClientId) { + this.oAuthClientId = oAuthClientId; + } + + public String getOAuthClientSecret() { + return oAuthClientSecret; + } + + public void setOAuthClientSecret(String oAuthClientSecret) { + this.oAuthClientSecret = oAuthClientSecret; + } + + public Integer getJobCreationMode() { + return jobCreationMode; + } + + public void setJobCreationMode(Integer jobCreationMode) { + this.jobCreationMode = jobCreationMode; + } + + public Boolean getEnableWriteAPI() { + return enableWriteAPI; + } + + public void setEnableWriteAPI(Boolean enableWriteAPI) { + this.enableWriteAPI = enableWriteAPI; + } + + public String getAdditionalProjects() { + return additionalProjects; + } + + public void setAdditionalProjects(String additionalProjects) { + this.additionalProjects = additionalProjects; + } + + public Boolean getFilterTablesOnDefaultDataset() { + return filterTablesOnDefaultDataset; + } + + public void setFilterTablesOnDefaultDataset(Boolean filterTablesOnDefaultDataset) { + this.filterTablesOnDefaultDataset = filterTablesOnDefaultDataset; + } + + public Integer getRequestGoogleDriveScope() { + return requestGoogleDriveScope; + } + + public void setRequestGoogleDriveScope(Integer requestGoogleDriveScope) { + this.requestGoogleDriveScope = requestGoogleDriveScope; + } + + public Integer getMetadataFetchThreadCount() { + return metadataFetchThreadCount; + } + + public void setMetadataFetchThreadCount(Integer metadataFetchThreadCount) { + this.metadataFetchThreadCount = metadataFetchThreadCount; + } + + public String getSSLTrustStorePath() { + return sslTrustStorePath; + } + + public void setSSLTrustStorePath(String sslTrustStorePath) { + this.sslTrustStorePath = sslTrustStorePath; + } + + public String getSSLTrustStorePassword() { + return sslTrustStorePassword; + } + + public void setSSLTrustStorePassword(String sslTrustStorePassword) { + this.sslTrustStorePassword = sslTrustStorePassword; + } + + @Override + public PrintWriter getLogWriter() { + return null; + } + + @Override + public void setLogWriter(PrintWriter out) {} + + @Override + public void setLoginTimeout(int seconds) {} + + @Override + public int getLoginTimeout() { + return 0; + } + + @Override + public Logger getParentLogger() { + return BigQueryJdbcRootLogger.getRootLogger(); + } + + @Override + public T unwrap(Class iface) { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) { + return false; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSource.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSource.java new file mode 100644 index 0000000000..412e4ca9ee --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSource.java @@ -0,0 +1,68 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.common.annotations.VisibleForTesting; +import java.sql.Connection; +import java.sql.SQLException; +import javax.sql.ConnectionPoolDataSource; +import javax.sql.PooledConnection; + +public class PooledConnectionDataSource extends DataSource implements ConnectionPoolDataSource { + private PooledConnectionListener connectionPoolManager = null; + Connection bqConnection = null; + + @Override + public PooledConnection getPooledConnection() throws SQLException { + if (connectionPoolManager != null && !connectionPoolManager.isConnectionPoolEmpty()) { + return connectionPoolManager.getPooledConnection(); + } + // Create the Underlying physical connection + if (bqConnection == null) { + bqConnection = super.getConnection(); + } + if (bqConnection == null) { + throw new BigQueryJdbcRuntimeException( + "Cannot get pooled connection: unable to get underlying physical connection"); + } + String connectionURl = ((BigQueryConnection) bqConnection).getConnectionUrl(); + Long connectionPoolSize = + BigQueryJdbcUrlUtility.parseConnectionPoolSize(connectionURl, this.toString()); + if (connectionPoolManager == null) { + connectionPoolManager = new PooledConnectionListener(connectionPoolSize); + } + BigQueryPooledConnection bqPooledConnection = new BigQueryPooledConnection(bqConnection); + bqPooledConnection.addConnectionEventListener(connectionPoolManager); + return bqPooledConnection; + } + + @VisibleForTesting + void setConnection(Connection connection) { + this.bqConnection = connection; + } + + @VisibleForTesting + public PooledConnectionListener getConnectionPoolManager() { + return this.connectionPoolManager; + } + + @Override + public PooledConnection getPooledConnection(String arg0, String arg1) throws SQLException { + throw new UnsupportedOperationException("This operation is not supported by the driver"); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionListener.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionListener.java new file mode 100644 index 0000000000..c0f0820438 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionListener.java @@ -0,0 +1,145 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import java.util.UUID; +import java.util.concurrent.LinkedBlockingDeque; +import javax.sql.ConnectionEvent; +import javax.sql.ConnectionEventListener; +import javax.sql.PooledConnection; + +public class PooledConnectionListener implements ConnectionEventListener { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private String id; // Mainly for internal use + private LinkedBlockingDeque connectionPool; + private Long connectionPoolSize = 0L; + + public PooledConnectionListener(Long connPoolSize) { + id = UUID.randomUUID().toString(); + this.connectionPoolSize = connPoolSize; + if (getConnectionPoolSize() > 0L) { + connectionPool = new LinkedBlockingDeque<>(getConnectionPoolSize().intValue()); + } else { + connectionPool = new LinkedBlockingDeque<>(); + } + } + + public Long getConnectionPoolSize() { + return this.connectionPoolSize; + } + + public int getConnectionPoolCurrentCapacity() { + return this.connectionPool.size(); + } + + public boolean isConnectionPoolEmpty() { + return (connectionPool != null && connectionPool.isEmpty()); + } + + PooledConnection getPooledConnection() { + if (isConnectionPoolEmpty()) { + LOG.warning("Connection pool is empty"); + return null; + } + // Return the first element. + return connectionPool.getFirst(); + } + + void addConnection(PooledConnection connection) { + LOG.finest("++enter++"); + if (connection == null) { + LOG.warning("Connection passed in is null"); + return; + } + if (connectionPool.contains(connection)) { + LOG.warning("Connection already in the pool"); + return; + } + connectionPool.add(connection); + } + + void removeConnection(PooledConnection connection) { + LOG.finest("++enter++"); + if (connection == null) { + LOG.warning("Connection passed in is null"); + return; + } + if (!connectionPool.contains(connection)) { + LOG.warning("Connection already in the pool"); + return; + } + connectionPool.remove(connection); + } + + @Override + public void connectionClosed(ConnectionEvent event) { + LOG.finest("++enter++"); + Object eventSource = event.getSource(); + if (eventSource == null + || !(eventSource instanceof BigQueryPooledConnection) + || !(eventSource.getClass().isAssignableFrom(BigQueryPooledConnection.class))) { + throw new IllegalArgumentException( + "Invalid ConnectionEvent source passed to connectionClosed. Expecting" + + " BigQueryPooledConnection."); + } + BigQueryPooledConnection bqPooledConnection = (BigQueryPooledConnection) eventSource; + addConnection(bqPooledConnection); + LOG.finest("Added pooled connection to connection pool"); + } + + @Override + public void connectionErrorOccurred(ConnectionEvent event) { + LOG.finest("++enter++"); + Object eventSource = event.getSource(); + if (eventSource == null + || !(eventSource instanceof BigQueryPooledConnection) + || !(eventSource.getClass().isAssignableFrom(BigQueryPooledConnection.class))) { + throw new IllegalArgumentException( + "Invalid ConnectionEvent source passed to connectionClosed. Expecting" + + " BigQueryPooledConnection."); + } + BigQueryPooledConnection bqPooledConnection = (BigQueryPooledConnection) eventSource; + removeConnection(bqPooledConnection); + String errorMessage = + (event.getSQLException() != null) + ? event.getSQLException().getMessage() + : "Connection error occured"; + LOG.finest( + String.format( + "Removed pooled connection from connection pool due to error: %s", errorMessage)); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + PooledConnectionListener other = (PooledConnectionListener) obj; + if (id == null) { + if (other.id != null) return false; + } else if (!id.equals(other.id)) return false; + return true; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/resources/META-INF/services/java.sql.Driver b/google-cloud-bigquery-jdbc/src/main/resources/META-INF/services/java.sql.Driver new file mode 100644 index 0000000000..1ea35896be --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/META-INF/services/java.sql.Driver @@ -0,0 +1 @@ +com.google.cloud.bigquery.jdbc.BigQueryDriver diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetCrossReference.sql b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetCrossReference.sql new file mode 100644 index 0000000000..da83862704 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetCrossReference.sql @@ -0,0 +1,72 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +SELECT PKTABLE_CAT, + PKTABLE_SCHEM, + PKTABLE_NAME, + PRIMARY.column_name AS PKCOLUMN_NAME, + FOREIGN.constraint_catalog AS FKTABLE_CAT, + FOREIGN.constraint_schema AS FKTABLE_SCHEM, + FOREIGN.table_name AS FKTABLE_NAME, + FOREIGN.column_name AS FKCOLUMN_NAME, + FOREIGN.ordinal_position AS KEY_SEQ, + NULL AS UPDATE_RULE, + NULL AS DELETE_RULE, + FOREIGN.constraint_name AS FK_NAME, + PRIMARY.constraint_name AS PK_NAME, + NULL AS DEFERRABILITY +FROM (SELECT DISTINCT CCU.table_catalog AS PKTABLE_CAT, + CCU.table_schema AS PKTABLE_SCHEM, + CCU.table_name AS PKTABLE_NAME, + TC.constraint_catalog, + TC.constraint_schema, + TC.constraint_name, + TC.table_catalog, + TC.table_schema, + TC.table_name, + TC.constraint_type, + KCU.column_name, + KCU.ordinal_position, + KCU.position_in_unique_constraint + FROM `%1$s.%2$s.INFORMATION_SCHEMA.TABLE_CONSTRAINTS` TC + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` KCU + USING + (constraint_catalog, + constraint_schema, + constraint_name, + table_catalog, + table_schema, + table_name) + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE` CCU + USING + (constraint_catalog, + constraint_schema, + constraint_name) + WHERE constraint_type = 'FOREIGN KEY' + AND TC.table_name = '%6$s') FOREIGN + INNER JOIN (SELECT * + FROM `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` + WHERE position_in_unique_constraint IS NULL + AND RTRIM(table_name) = '%3$s') PRIMARY +ON + FOREIGN.PKTABLE_CAT = PRIMARY.table_catalog + AND FOREIGN.PKTABLE_SCHEM = PRIMARY.table_schema + AND FOREIGN.PKTABLE_NAME = PRIMARY.table_name + AND FOREIGN.position_in_unique_constraint = + PRIMARY.ordinal_position +ORDER BY FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, KEY_SEQ \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetExportedKeys.sql b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetExportedKeys.sql new file mode 100644 index 0000000000..4058f6bff6 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetExportedKeys.sql @@ -0,0 +1,71 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +SELECT PKTABLE_CAT, + PKTABLE_SCHEM, + PKTABLE_NAME, + PRIMARY.column_name AS PKCOLUMN_NAME, + FOREIGN.constraint_catalog AS FKTABLE_CAT, + FOREIGN.constraint_schema AS FKTABLE_SCHEM, + FOREIGN.table_name AS FKTABLE_NAME, + FOREIGN.column_name AS FKCOLUMN_NAME, + FOREIGN.ordinal_position AS KEY_SEQ, + NULL AS UPDATE_RULE, + NULL AS DELETE_RULE, + FOREIGN.constraint_name AS FK_NAME, + PRIMARY.constraint_name AS PK_NAME, + NULL AS DEFERRABILITY +FROM (SELECT DISTINCT CCU.table_catalog AS PKTABLE_CAT, + CCU.table_schema AS PKTABLE_SCHEM, + CCU.table_name AS PKTABLE_NAME, + TC.constraint_catalog, + TC.constraint_schema, + TC.constraint_name, + TC.table_catalog, + TC.table_schema, + TC.table_name, + TC.constraint_type, + KCU.column_name, + KCU.ordinal_position, + KCU.position_in_unique_constraint + FROM `%1$s.%2$s.INFORMATION_SCHEMA.TABLE_CONSTRAINTS` TC + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` KCU + USING + (constraint_catalog, + constraint_schema, + constraint_name, + table_catalog, + table_schema, + table_name) + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE` CCU + USING + (constraint_catalog, + constraint_schema, + constraint_name) + WHERE constraint_type = 'FOREIGN KEY') FOREIGN + INNER JOIN (SELECT * + FROM `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` + WHERE position_in_unique_constraint IS NULL + AND RTRIM(table_name) = '%3$s') PRIMARY +ON + FOREIGN.PKTABLE_CAT = PRIMARY.table_catalog + AND FOREIGN.PKTABLE_SCHEM = PRIMARY.table_schema + AND FOREIGN.PKTABLE_NAME = PRIMARY.table_name + AND FOREIGN.position_in_unique_constraint = + PRIMARY.ordinal_position +ORDER BY FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, KEY_SEQ \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetImportedKeys.sql b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetImportedKeys.sql new file mode 100644 index 0000000000..3f4142eb05 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetImportedKeys.sql @@ -0,0 +1,71 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +SELECT PKTABLE_CAT, + PKTABLE_SCHEM, + PKTABLE_NAME, + PRIMARY.column_name AS PKCOLUMN_NAME, + FOREIGN.constraint_catalog AS FKTABLE_CAT, + FOREIGN.constraint_schema AS FKTABLE_SCHEM, + FOREIGN.table_name AS FKTABLE_NAME, + FOREIGN.column_name AS FKCOLUMN_NAME, + FOREIGN.ordinal_position AS KEY_SEQ, + NULL AS UPDATE_RULE, + NULL AS DELETE_RULE, + FOREIGN.constraint_name AS FK_NAME, + PRIMARY.constraint_name AS PK_NAME, + NULL AS DEFERRABILITY +FROM (SELECT DISTINCT CCU.table_catalog AS PKTABLE_CAT, + CCU.table_schema AS PKTABLE_SCHEM, + CCU.table_name AS PKTABLE_NAME, + TC.constraint_catalog, + TC.constraint_schema, + TC.constraint_name, + TC.table_catalog, + TC.table_schema, + TC.table_name, + TC.constraint_type, + KCU.column_name, + KCU.ordinal_position, + KCU.position_in_unique_constraint + FROM `%1$s.%2$s.INFORMATION_SCHEMA.TABLE_CONSTRAINTS` TC + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` KCU + USING + (constraint_catalog, + constraint_schema, + constraint_name, + table_catalog, + table_schema, + table_name) + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE` CCU + USING + (constraint_catalog, + constraint_schema, + constraint_name) + WHERE constraint_type = 'FOREIGN KEY' + AND TC.table_name = '%3$s') FOREIGN + INNER JOIN (SELECT * + FROM `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` + WHERE position_in_unique_constraint IS NULL) PRIMARY +ON + FOREIGN.PKTABLE_CAT = PRIMARY.table_catalog + AND FOREIGN.PKTABLE_SCHEM = PRIMARY.table_schema + AND FOREIGN.PKTABLE_NAME = PRIMARY.table_name + AND FOREIGN.position_in_unique_constraint = + PRIMARY.ordinal_position +ORDER BY PKTABLE_CAT, PKTABLE_SCHEM, PKTABLE_NAME, KEY_SEQ \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetPrimaryKeys.sql b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetPrimaryKeys.sql new file mode 100644 index 0000000000..282910fb97 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetPrimaryKeys.sql @@ -0,0 +1,30 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +SELECT table_catalog AS TABLE_CAT, + table_schema AS TABLE_SCHEM, + table_name AS TABLE_NAME, + column_name AS COLUMN_NAME, + ordinal_position AS KEY_SEQ, + constraint_name AS PK_NAME +FROM + %s.%s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE +WHERE + table_name = '%s' + AND CONTAINS_SUBSTR(constraint_name + , 'pk$') +ORDER BY + COLUMN_NAME; diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/dependencies.properties b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/dependencies.properties new file mode 100644 index 0000000000..6908cb6cc7 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/dependencies.properties @@ -0,0 +1,4 @@ +# Versions of oneself +# {x-version-update-start:google-cloud-bigquery-jdbc:current} +version.jdbc=${project.version} +# {x-version-update-end} \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/ArrowFormatTypeBigQueryCoercionUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/ArrowFormatTypeBigQueryCoercionUtilityTest.java new file mode 100644 index 0000000000..0524fc87d1 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/ArrowFormatTypeBigQueryCoercionUtilityTest.java @@ -0,0 +1,234 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryTypeCoercionUtility.INSTANCE; +import static com.google.common.truth.Truth.assertThat; +import static java.time.Month.FEBRUARY; +import static java.time.Month.JANUARY; + +import com.google.cloud.bigquery.FieldElementType; +import com.google.cloud.bigquery.Range; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.Duration; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.Period; +import org.apache.arrow.vector.PeriodDuration; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; +import org.apache.arrow.vector.util.Text; +import org.junit.Rule; +import org.junit.Test; + +public class ArrowFormatTypeBigQueryCoercionUtilityTest { + + @Rule public final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + private static final Range RANGE_DATE = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .setStart("1970-01-02") + .setEnd("1970-03-04") + .build(); + + private static final Range RANGE_DATETIME = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .setStart("2014-08-19 05:41:35.220000") + .setEnd("2015-09-20 06:41:35.220000") + .build(); + + private static final Range RANGE_TIMESTAMP = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .setStart("2014-08-19 12:41:35.220000+00:00") + .setEnd("2015-09-20 13:41:35.220000+01:00") + .build(); + + @Test + public void nullToString() { + assertThat(INSTANCE.coerceTo(String.class, null)).isNull(); + } + + @Test + public void JsonStringArrayListToString() { + JsonStringArrayList employeeList = new JsonStringArrayList<>(); + employeeList.add(1); + employeeList.add(2); + employeeList.add(3); + + assertThat(INSTANCE.coerceTo(String.class, employeeList)).isEqualTo("[1,2,3]"); + } + + @Test + public void localDateTimeToTimestamp() { + LocalDateTime localDatetime = LocalDateTime.of(1995, FEBRUARY, 23, 20, 15); + + assertThat(INSTANCE.coerceTo(Timestamp.class, localDatetime)) + .isEqualTo(Timestamp.valueOf(localDatetime)); + } + + @Test + public void textToString() { + Text text = new Text("Hello World!"); + + assertThat(INSTANCE.coerceTo(String.class, text)).isEqualTo("Hello World!"); + } + + @Test + public void nullToInteger() { + assertThat(INSTANCE.coerceTo(Integer.class, null)).isEqualTo(0); + } + + @Test + public void textToInteger() { + Text text = new Text("51423"); + + assertThat(INSTANCE.coerceTo(Integer.class, text)).isEqualTo(51423); + } + + @Test + public void longToInteger() { + assertThat(INSTANCE.coerceTo(Integer.class, 56L)).isEqualTo(56); + } + + @Test + public void bigDecimalToInteger() { + assertThat(INSTANCE.coerceTo(Integer.class, new BigDecimal("56"))).isEqualTo(56); + } + + @Test + public void nullToLong() { + assertThat(INSTANCE.coerceTo(Long.class, null)).isEqualTo(0L); + } + + @Test + public void bigDecimalToLong() { + assertThat(INSTANCE.coerceTo(Long.class, new BigDecimal("56"))).isEqualTo(56L); + } + + @Test + public void nullToDouble() { + assertThat(INSTANCE.coerceTo(Double.class, null)).isEqualTo(0D); + } + + @Test + public void bigDecimalToDouble() { + assertThat(INSTANCE.coerceTo(Double.class, new BigDecimal("56"))).isEqualTo(56D); + } + + @Test + public void nullToBoolean() { + assertThat(INSTANCE.coerceTo(Boolean.class, null)).isFalse(); + } + + @Test + public void nullToByteArray() { + assertThat(INSTANCE.coerceTo(byte[].class, null)).isNull(); + } + + @Test + public void nullToTimestamp() { + assertThat(INSTANCE.coerceTo(Timestamp.class, null)).isNull(); + } + + @Test + public void longToTimestamp() { + assertThat(INSTANCE.coerceTo(Timestamp.class, 1408452095220000L)) + .isEqualTo(new Timestamp(1408452095220L)); + } + + @Test + public void nullToTime() { + assertThat(INSTANCE.coerceTo(Time.class, null)).isNull(); + } + + @Test + public void longToTime() { + assertThat(INSTANCE.coerceTo(Time.class, 1408452095220000L)) + .isEqualTo(new Time(1408452095000L)); + } + + @Test + public void nullToDate() { + assertThat(INSTANCE.coerceTo(Date.class, null)).isNull(); + } + + @Test + public void integerToDate() { + LocalDate expectedDate = LocalDate.of(2022, JANUARY, 1); + assertThat(INSTANCE.coerceTo(Date.class, 18993).toLocalDate()).isEqualTo(expectedDate); + } + + @Test + public void periodDurationToString() { + Period period = Period.of(1, 3, 24); + Duration duration = Duration.ofHours(3).plusMinutes(45).plusSeconds(23).plusNanos(123456000); + PeriodDuration periodDuration = new PeriodDuration(period, duration); + assertThat(INSTANCE.coerceTo(String.class, periodDuration)).isEqualTo("1-3 24 3:45:23.123456"); + + Period period2 = Period.of(1, 6, -8); + Duration duration2 = Duration.ofHours(9).plusMinutes(43).plusSeconds(23).plusNanos(123456000); + PeriodDuration periodDuration2 = new PeriodDuration(period2, duration2); + assertThat(INSTANCE.coerceTo(String.class, periodDuration2)).isEqualTo("1-6 -8 9:43:23.123456"); + } + + // Range tests + + @Test + public void JsonStringHashMapToString() { + JsonStringHashMap employeeMap = new JsonStringHashMap<>(); + employeeMap.putIfAbsent("name1", "type1"); + employeeMap.putIfAbsent("name2", "type2"); + employeeMap.putIfAbsent("name3", "type3"); + + assertThat(INSTANCE.coerceTo(String.class, employeeMap)) + .isEqualTo("{\"name1\":\"type1\",\"name2\":\"type2\",\"name3\":\"type3\"}"); + } + + @Test + public void rangeDateToString() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATE.getStart().getStringValue(), RANGE_DATE.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATE)).isEqualTo(expectedRangeDate); + } + + @Test + public void rangeDatetimeToString() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATETIME.getStart().getStringValue(), RANGE_DATETIME.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATETIME)).isEqualTo(expectedRangeDate); + } + + @Test + public void rangeTimestampToString() { + String expectedRangeTimestamp = + String.format( + "[%s, %s)", + RANGE_TIMESTAMP.getStart().getStringValue(), RANGE_TIMESTAMP.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_TIMESTAMP)).isEqualTo(expectedRangeTimestamp); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfPrimitivesTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfPrimitivesTest.java new file mode 100644 index 0000000000..5b33fda788 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfPrimitivesTest.java @@ -0,0 +1,367 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.StandardSQLTypeName.BIGNUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.BYTES; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATE; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATETIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.FLOAT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.GEOGRAPHY; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.NUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIMESTAMP; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowArraySchemaAndValue; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.nestedResultSetToColumnLists; +import static com.google.common.truth.Truth.assertThat; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; +import static java.time.Month.MARCH; +import static java.util.Arrays.copyOfRange; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.stream.Stream; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.Text; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.function.ThrowingRunnable; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class BigQueryArrowArrayOfPrimitivesTest { + + private final Field schema; + private final JsonStringArrayList arrayValues; + private final Object[] expected; + private final int javaSqlTypeCode; + private Array array; + private final StandardSQLTypeName currentType; + + @ClassRule public static final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + public BigQueryArrowArrayOfPrimitivesTest( + StandardSQLTypeName currentType, + Tuple> schemaAndValue, + Object[] expected, + int javaSqlTypeCode) { + this.currentType = currentType; + this.schema = schemaAndValue.x(); + this.arrayValues = schemaAndValue.y(); + this.expected = expected; + this.javaSqlTypeCode = javaSqlTypeCode; + } + + @Before + public void setUp() { + array = new BigQueryArrowArray(this.schema, this.arrayValues); + } + + @Parameters(name = "{index}: primitive array of {0}") + public static Collection data() { + timeZoneRule.enforce(); + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820227000); + LocalDate aDate = LocalDate.of(2023, MARCH, 30); + LocalTime aTime = LocalTime.of(11, 14, 19, 820227); + return Arrays.asList( + new Object[][] { + { + INT64, + arrowArraySchemaAndValue(INT64, 10L, 20L, 30L, 40L), + new Long[] {10L, 20L, 30L, 40L}, + Types.BIGINT + }, + { + BOOL, + arrowArraySchemaAndValue(BOOL, TRUE, FALSE, FALSE, TRUE), + new Boolean[] {true, false, false, true}, + Types.BOOLEAN + }, + { + FLOAT64, + arrowArraySchemaAndValue( + FLOAT64, + Double.valueOf("11.2"), + Double.valueOf("33.4"), + Double.valueOf("55.6"), + Double.valueOf("77.8")), + new Double[] {11.2, 33.4, 55.6, 77.8}, + Types.DOUBLE + }, + { + NUMERIC, + arrowArraySchemaAndValue( + NUMERIC, + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657")), + new BigDecimal[] { + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657") + }, + Types.NUMERIC + }, + { + BIGNUMERIC, + arrowArraySchemaAndValue( + BIGNUMERIC, + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657")), + new BigDecimal[] { + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657") + }, + Types.NUMERIC + }, + { + STRING, + arrowArraySchemaAndValue( + STRING, new Text("one"), new Text("two"), new Text("three"), new Text("four")), + new String[] {"one", "two", "three", "four"}, + Types.NVARCHAR + }, + { + TIMESTAMP, + arrowArraySchemaAndValue( + TIMESTAMP, + Long.valueOf("1680174859820227"), + Long.valueOf("1680261259820227"), + Long.valueOf("1680347659820227"), + Long.valueOf("1680434059820227")), + new Timestamp[] { + Timestamp.valueOf(aTimeStamp), // 2023-03-30 16:44:19.82 + Timestamp.valueOf(aTimeStamp.plusDays(1)), + Timestamp.valueOf(aTimeStamp.plusDays(2)), + Timestamp.valueOf(aTimeStamp.plusDays(3)) + }, + Types.TIMESTAMP + }, + { + DATE, + arrowArraySchemaAndValue(DATE, 19446, 19447, 19448, 19449), + new Date[] { + Date.valueOf(aDate), + Date.valueOf(aDate.plusDays(1)), + Date.valueOf(aDate.plusDays(2)), + Date.valueOf(aDate.plusDays(3)) + }, + Types.DATE + }, + { + TIME, + arrowArraySchemaAndValue( + TIME, + Long.valueOf("40459820227"), // 11:14:19.820227 + Long.valueOf("40460820227"), + Long.valueOf("40461820227"), + Long.valueOf("40462820227")), + new Time[] { + Time.valueOf(aTime), + Time.valueOf(aTime.plusSeconds(1)), + Time.valueOf(aTime.plusSeconds(2)), + Time.valueOf(aTime.plusSeconds(3)) + }, + Types.TIME + }, + { + DATETIME, + arrowArraySchemaAndValue( + DATETIME, + LocalDateTime.parse("2023-03-30T11:14:19.820227"), + LocalDateTime.parse("2023-03-30T11:15:19.820227"), + LocalDateTime.parse("2023-03-30T11:16:19.820227"), + LocalDateTime.parse("2023-03-30T11:17:19.820227")), + new Timestamp[] { + Timestamp.valueOf(LocalDateTime.parse("2023-03-30T11:14:19.820227")), + Timestamp.valueOf(LocalDateTime.parse("2023-03-30T11:15:19.820227")), + Timestamp.valueOf(LocalDateTime.parse("2023-03-30T11:16:19.820227")), + Timestamp.valueOf(LocalDateTime.parse("2023-03-30T11:17:19.820227")) + }, + Types.TIMESTAMP + }, + { + GEOGRAPHY, + arrowArraySchemaAndValue( + GEOGRAPHY, + new Text("POINT(-122 47)"), + new Text("POINT(-122 48)"), + new Text("POINT(-121 47)"), + new Text("POINT(-123 48)")), + new String[] {"POINT(-122 47)", "POINT(-122 48)", "POINT(-121 47)", "POINT(-123 48)"}, + Types.OTHER + }, + { + BYTES, + arrowArraySchemaAndValue( + BYTES, + Stream.of("one", "two", "three", "four") + .map(String::getBytes) + .toArray(byte[][]::new)), // array of bytes array + new byte[][] { + "one".getBytes(), "two".getBytes(), "three".getBytes(), "four".getBytes() + }, + Types.VARBINARY + } + }); + } + + @Test + public void getArray() throws SQLException { + assertThat(array.getArray()).isEqualTo(this.expected); + } + + @Test + public void getSlicedArray() throws SQLException { + int fromIndex = 1; + int toIndexExclusive = 3; + Object[] expectedSlicedArray = + copyOfRange(this.expected, fromIndex, toIndexExclusive); // copying index(1,2) + + // the first element is at index 1 + assertThat(array.getArray(fromIndex + 1, 2)).isEqualTo(expectedSlicedArray); + } + + @Test + public void getSlicedArrayWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getArray(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getResultSet() throws SQLException { + ResultSet resultSet = this.array.getResultSet(); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + ArrayList indexList = indexAndValues.x(); + ArrayList columnValues = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {1, 2, 3, 4}); + assertThat(columnValues.toArray()).isEqualTo(this.expected); + } + + @Test + public void getSlicedResultSet() throws SQLException { + int fromIndex = 1; + int toIndexExclusive = 3; + Object[] expectedSlicedArray = + copyOfRange(this.expected, fromIndex, toIndexExclusive); // copying index(1,2) + + // the first element is at index 1 + ResultSet resultSet = array.getResultSet(fromIndex + 1, 2); + + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + ArrayList indexList = indexAndValues.x(); + ArrayList columnValues = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {2, 3}); + assertThat(columnValues.toArray()).isEqualTo(expectedSlicedArray); + } + + @Test + public void getSlicedResultSetWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getResultSet(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getBaseTypeName() throws SQLException { + assertThat(array.getBaseTypeName()).isEqualTo(this.currentType.name()); + } + + @Test + public void getBaseType() throws SQLException { + assertThat(array.getBaseType()).isEqualTo(this.javaSqlTypeCode); + } + + @Test + public void free() throws SQLException { + this.array.free(); + + ensureArrayIsInvalid(() -> array.getArray()); + ensureArrayIsInvalid(() -> array.getArray(1, 2)); + ensureArrayIsInvalid(() -> array.getResultSet()); + ensureArrayIsInvalid(() -> array.getResultSet(1, 2)); + ensureArrayIsInvalid(() -> array.getBaseTypeName()); + ensureArrayIsInvalid(() -> array.getBaseType()); + } + + @Test + public void getArrayWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(emptyMap())); + Exception exception2 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getResultSetWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getResultSet(emptyMap())); + Exception exception2 = + assertThrows( + SQLFeatureNotSupportedException.class, () -> array.getResultSet(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + private void ensureArrayIsInvalid(ThrowingRunnable block) { + Exception exception = assertThrows(IllegalStateException.class, block); + assertThat(exception.getMessage()).isEqualTo(INVALID_ARRAY); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfStructTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfStructTest.java new file mode 100644 index 0000000000..7cb84e70ab --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfStructTest.java @@ -0,0 +1,205 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.LegacySQLTypeName.RECORD; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowArrayOf; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowStructOf; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.nestedResultSetToColumnLists; +import static com.google.common.truth.Truth.assertThat; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.StandardSQLTypeName; +import java.sql.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Types; +import java.util.ArrayList; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; +import org.apache.arrow.vector.util.Text; +import org.junit.Before; +import org.junit.Test; +import org.junit.function.ThrowingRunnable; + +public class BigQueryArrowArrayOfStructTest { + + private Array array; + + @Before + public void setUp() { + FieldList profileSchema = + FieldList.of( + Field.newBuilder("name", LegacySQLTypeName.STRING).build(), + Field.newBuilder("age", LegacySQLTypeName.INTEGER).build(), + Field.newBuilder("adult", LegacySQLTypeName.BOOLEAN).build()); + + JsonStringHashMap record1 = + arrowStructOf( + Tuple.of(STRING, new Text("Arya")), Tuple.of(INT64, 15L), Tuple.of(BOOL, FALSE)) + .y(); + JsonStringHashMap record2 = + arrowStructOf( + Tuple.of(STRING, new Text("Khal Drogo")), + Tuple.of(INT64, 35L), + Tuple.of(BOOL, TRUE)) + .y(); + JsonStringHashMap record3 = + arrowStructOf( + Tuple.of(STRING, new Text("Ned Stark")), Tuple.of(INT64, 45L), Tuple.of(BOOL, TRUE)) + .y(); + JsonStringHashMap record4 = + arrowStructOf( + Tuple.of(STRING, new Text("Jon Snow")), Tuple.of(INT64, 25L), Tuple.of(BOOL, TRUE)) + .y(); + + Field arrayOfStructSchema = + Field.newBuilder("profiles", RECORD, profileSchema).setMode(Mode.REPEATED).build(); + + JsonStringArrayList> arrayOfStructValue = + arrowArrayOf(record1, record2, record3, record4); + array = new BigQueryArrowArray(arrayOfStructSchema, arrayOfStructValue); + } + + @Test + public void getArray() throws SQLException { + Struct[] structArray = (Struct[]) array.getArray(); + + assertThat(structArray.length).isEqualTo(4); + assertThat(structArray[0].getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(structArray[1].getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structArray[2].getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + assertThat(structArray[3].getAttributes()).isEqualTo(asList("Jon Snow", 25L, true).toArray()); + } + + @Test + public void getSlicedArray() throws SQLException { + Struct[] structArray = (Struct[]) array.getArray(2, 2); + + assertThat(structArray.length).isEqualTo(2); + assertThat(structArray[0].getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structArray[1].getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + } + + @Test + public void getSlicedArrayWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getArray(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getResultSet() throws SQLException { + ResultSet resultSet = array.getResultSet(); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + + ArrayList indexList = indexAndValues.x(); + ArrayList structs = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {1, 2, 3, 4}); + assertThat(structs.get(0).getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(structs.get(1).getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structs.get(2).getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + assertThat(structs.get(3).getAttributes()).isEqualTo(asList("Jon Snow", 25L, true).toArray()); + } + + @Test + public void getSlicedResultSet() throws SQLException { + ResultSet resultSet = array.getResultSet(2, 2); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + + ArrayList indexList = indexAndValues.x(); + ArrayList structs = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {2, 3}); + assertThat(structs.get(0).getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structs.get(1).getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + } + + @Test + public void getResultSetWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getResultSet(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getBaseTypeName() throws SQLException { + assertThat(array.getBaseTypeName()).isEqualTo(StandardSQLTypeName.STRUCT.name()); + } + + @Test + public void getBaseType() throws SQLException { + assertThat(array.getBaseType()).isEqualTo(Types.STRUCT); + } + + @Test + public void free() throws SQLException { + this.array.free(); + + ensureArrayIsInvalid(() -> array.getArray()); + ensureArrayIsInvalid(() -> array.getArray(1, 2)); + ensureArrayIsInvalid(() -> array.getBaseTypeName()); + ensureArrayIsInvalid(() -> array.getBaseType()); + } + + @Test + public void getArrayWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(emptyMap())); + Exception exception2 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getResultSetWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getResultSet(emptyMap())); + Exception exception2 = + assertThrows( + SQLFeatureNotSupportedException.class, () -> array.getResultSet(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + private void ensureArrayIsInvalid(ThrowingRunnable block) { + Exception exception = assertThrows(IllegalStateException.class, block); + assertThat(exception.getMessage()).isEqualTo(INVALID_ARRAY); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSetTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSetTest.java new file mode 100644 index 0000000000..efde49309d --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSetTest.java @@ -0,0 +1,358 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.utils.ArrowUtilities.serializeSchema; +import static com.google.cloud.bigquery.jdbc.utils.ArrowUtilities.serializeVectorSchemaRoot; +import static com.google.common.truth.Truth.assertThat; +import static org.apache.arrow.vector.types.Types.MinorType.INT; +import static org.apache.arrow.vector.types.Types.MinorType.VARCHAR; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.sql.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Struct; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingDeque; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.DateMilliVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.Float8Vector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.TimeMilliVector; +import org.apache.arrow.vector.TimeStampMicroVector; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.complex.ListVector; +import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.complex.impl.UnionListWriter; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.Text; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryArrowResultSetTest { + + private static final FieldList fieldList = + FieldList.of( + Field.of("boolField", StandardSQLTypeName.BOOL), + Field.of("int64Filed", StandardSQLTypeName.INT64), + Field.of("float64Field", StandardSQLTypeName.FLOAT64), + Field.of("stringField", StandardSQLTypeName.STRING), + Field.of("timeStampField", StandardSQLTypeName.TIMESTAMP), + Field.of("bytesField", StandardSQLTypeName.BYTES), + Field.newBuilder("intArrayField", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REPEATED) + .build(), + Field.of( + "structField", + StandardSQLTypeName.STRUCT, + Field.of("name", StandardSQLTypeName.STRING), + Field.of("age", StandardSQLTypeName.INT64)), + Field.of("numericField", StandardSQLTypeName.BIGNUMERIC), + Field.of("timeField", StandardSQLTypeName.TIME), + Field.of("dateField", StandardSQLTypeName.DATE)); + + private BigQueryArrowBatchWrapper arrowBatchWrapper; + private BigQueryArrowBatchWrapper arrowBatchWrapperLast; + + private BigQueryStatement statement; + + private BlockingQueue buffer; + private BlockingQueue bufferWithTwoRows; + private static final Schema QUERY_SCHEMA = Schema.of(fieldList); + + private VectorSchemaRoot vectorSchemaRoot; + private BigQueryArrowResultSet bigQueryArrowResultSet; + private BigQueryArrowResultSet bigQueryArrowResultSetNested; + + private VectorSchemaRoot getTestVectorSchemaRoot() { + RootAllocator allocator = new RootAllocator(); + BitVector boolField = + new BitVector("boolField", allocator); // Mapped with StandardSQLTypeName.BOOL + boolField.allocateNew(2); + boolField.set(0, 0); + boolField.setValueCount(1); + IntVector int64Filed = + new IntVector("int64Filed", allocator); // Mapped with StandardSQLTypeName.INT64 + int64Filed.allocateNew(2); + int64Filed.set(0, 1); + int64Filed.setValueCount(1); + Float8Vector float64Field = + new Float8Vector("float64Field", allocator); // Mapped with StandardSQLTypeName.FLOAT64 + float64Field.allocateNew(2); + float64Field.set(0, 1.1f); + float64Field.setValueCount(1); + VarCharVector stringField = + new VarCharVector("stringField", allocator); // Mapped with StandardSQLTypeName.STRING + stringField.allocateNew(2); + stringField.set(0, new Text("text1")); + stringField.setValueCount(1); + TimeStampMicroVector timeStampField = + new TimeStampMicroVector( + "timeStampField", allocator); // Mapped with StandardSQLTypeName.TIMESTAMP + timeStampField.allocateNew(2); + timeStampField.set(0, 10000L); + timeStampField.setValueCount(1); + VarBinaryVector bytesField = + new VarBinaryVector("bytesField", allocator); // Mapped with StandardSQLTypeName.BYTES + bytesField.allocateNew(2); + bytesField.set(0, "text1".getBytes()); + bytesField.setValueCount(1); + + ListVector listVector = ListVector.empty("intArrayField", allocator); + UnionListWriter listWriter = listVector.getWriter(); + listWriter.setPosition(0); + listWriter.startList(); + listWriter.writeBigInt(10L); + listWriter.writeBigInt(20L); + listWriter.setValueCount(2); + listWriter.endList(); + listVector.setValueCount(1); + + StructVector structVector = StructVector.empty("structField", allocator); + VarCharVector nameVector = + structVector.addOrGet( + "name", FieldType.notNullable(VARCHAR.getType()), VarCharVector.class); + IntVector ageVector = + structVector.addOrGet("age", FieldType.notNullable(INT.getType()), IntVector.class); + structVector.allocateNew(); + + nameVector.set(0, new Text("Jon Doe")); + nameVector.setValueCount(1); + + ageVector.set(0, 29); + ageVector.setValueCount(1); + + structVector.setValueCount(1); + structVector.setIndexDefined(0); + + IntVector numericField = + new IntVector("numericField", allocator); // Mapped with StandardSQLTypeName.BIGNUMERIC + numericField.allocateNew(1000); + numericField.set(0, 1); + numericField.setValueCount(1); + TimeMilliVector timeField = + new TimeMilliVector("timeField", allocator); // Mapped with StandardSQLTypeName.TIME + timeField.allocateNew(2); + timeField.set(0, 1234); + timeField.setValueCount(1); + DateMilliVector dateField = + new DateMilliVector("dateField", allocator); // Mapped with StandardSQLTypeName.DATE + dateField.allocateNew(2); + dateField.set(0, 5000); + dateField.setValueCount(1); + + List fieldVectors = + ImmutableList.of( + boolField, + int64Filed, + float64Field, + stringField, + timeStampField, + bytesField, + listVector, + structVector, + numericField, + timeField, + dateField); + return new VectorSchemaRoot(fieldVectors); + } + + private JsonStringArrayList getJsonStringArrayList() { + JsonStringArrayList jsonStringArrayList = new JsonStringArrayList<>(); + jsonStringArrayList.addAll(Arrays.asList(10L, 20L)); + return jsonStringArrayList; + } + + @Before + public void setUp() throws SQLException, IOException { + buffer = new LinkedBlockingDeque<>(); + bufferWithTwoRows = new LinkedBlockingDeque<>(); + vectorSchemaRoot = getTestVectorSchemaRoot(); + ArrowRecordBatch batch = + ArrowRecordBatch.newBuilder() + .setSerializedRecordBatch(serializeVectorSchemaRoot(vectorSchemaRoot)) + .build(); + arrowBatchWrapper = BigQueryArrowBatchWrapper.of(batch); + arrowBatchWrapperLast = BigQueryArrowBatchWrapper.of(null, true); // last flag + buffer.add(arrowBatchWrapper); + buffer.add(arrowBatchWrapperLast); + bufferWithTwoRows.add(arrowBatchWrapper); + bufferWithTwoRows.add(arrowBatchWrapperLast); + + statement = mock(BigQueryStatement.class); + ArrowSchema arrowSchema = + ArrowSchema.newBuilder() + .setSerializedSchema(serializeSchema(vectorSchemaRoot.getSchema())) + .build(); + Thread workerThread = new Thread(); + bigQueryArrowResultSet = + BigQueryArrowResultSet.of( + QUERY_SCHEMA, arrowSchema, 1, statement, buffer, workerThread, null); + + // nested result set data setup + JsonStringArrayList jsonStringArrayList = getJsonStringArrayList(); + Schema arraySchema = + Schema.of( + Field.newBuilder("integerArray", StandardSQLTypeName.INT64) + .setMode(Mode.REPEATED) + .build()); + bigQueryArrowResultSetNested = + BigQueryArrowResultSet.getNestedResultSet( + arraySchema, + BigQueryArrowBatchWrapper.getNestedFieldValueListWrapper(jsonStringArrayList), + 0, + jsonStringArrayList.size()); + } + + @Test + public void testVectorSchemaRoot() { + assertThat(vectorSchemaRoot).isNotNull(); + assertThat(vectorSchemaRoot.getRowCount()).isEqualTo(1); + } + + @Test + public void testBufferSize() { + assertThat(buffer).isNotNull(); + assertThat(buffer.size()).isEqualTo(2); + } + + @Test + public void testRowCount() throws SQLException, IOException { + ArrowSchema arrowSchema = + ArrowSchema.newBuilder() + .setSerializedSchema(serializeSchema(vectorSchemaRoot.getSchema())) + .build(); + Thread workerThread = new Thread(); + // ResultSet with 1 row buffer and 1 total rows. + BigQueryArrowResultSet bigQueryArrowResultSet2 = + BigQueryArrowResultSet.of( + QUERY_SCHEMA, arrowSchema, 1, statement, buffer, workerThread, null); + + assertThat(resultSetRowCount(bigQueryArrowResultSet2)).isEqualTo(1); + // ResultSet with 2 rows buffer and 1 total rows. + bigQueryArrowResultSet2 = + BigQueryArrowResultSet.of( + QUERY_SCHEMA, arrowSchema, 1, statement, bufferWithTwoRows, workerThread, null); + + assertThat(resultSetRowCount(bigQueryArrowResultSet2)).isEqualTo(1); + } + + @Test + // This method tests iteration and Resultset's type getters + public void testIteration() throws SQLException { + int cnt = 0; + assertThat(bigQueryArrowResultSet.isBeforeFirst()).isTrue(); + while (bigQueryArrowResultSet.next()) { + cnt++; + assertThat(bigQueryArrowResultSet.isLast()).isTrue(); // we have one test row + assertThat(bigQueryArrowResultSet.isFirst()).isTrue(); // we have one test row + + assertThat(bigQueryArrowResultSet.getString(4)).isEqualTo("text1"); + + // array + assertThat(bigQueryArrowResultSet.getArray("intArrayField").getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(bigQueryArrowResultSet.getArray(7).getArray()).isEqualTo(new Object[] {10L, 20L}); + assertThat(((Array) bigQueryArrowResultSet.getObject("intArrayField")).getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(((Array) bigQueryArrowResultSet.getObject(7)).getArray()) + .isEqualTo(new Object[] {10L, 20L}); + + // struct + assertThat(((Struct) bigQueryArrowResultSet.getObject("structField")).getAttributes()) + .isEqualTo(new Object[] {"Jon Doe", 29L}); + assertThat(((Struct) bigQueryArrowResultSet.getObject(8)).getAttributes()) + .isEqualTo(new Object[] {"Jon Doe", 29L}); + } + assertThat(cnt).isEqualTo(1); + assertThat(bigQueryArrowResultSet.next()).isFalse(); + assertThat(bigQueryArrowResultSet.isAfterLast()).isTrue(); + } + + @Test + public void testIsClosed() { + assertThat(bigQueryArrowResultSet.isClosed()).isFalse(); + } + + @Test + public void testResultSetHoldability() throws SQLException { + assertThat(bigQueryArrowResultSet.getHoldability()) + .isEqualTo(bigQueryArrowResultSet.HOLD_CURSORS_OVER_COMMIT); + } + + @Test + public void testStatement() throws SQLException { + assertThat(bigQueryArrowResultSet.getStatement()).isEqualTo(statement); + assertThat(bigQueryArrowResultSetNested.getStatement()).isNull(); + } + + @Test + public void testConcurrencyTypeColumn() throws SQLException { + assertThat(bigQueryArrowResultSet.getConcurrency()).isEqualTo(ResultSet.CONCUR_READ_ONLY); + assertThat(bigQueryArrowResultSet.getType()).isEqualTo(ResultSet.TYPE_FORWARD_ONLY); + assertThat(bigQueryArrowResultSet.findColumn("boolField")).isEqualTo(1); + } + + @Test + public void testIterationNested() throws SQLException { + int cnt = 0; + assertThat(bigQueryArrowResultSetNested.isBeforeFirst()).isTrue(); + while (bigQueryArrowResultSetNested.next()) { + cnt++; + if (cnt == 1) { + assertThat(bigQueryArrowResultSetNested.isFirst()).isTrue(); + + } else { // 2nd row is the last row + assertThat(bigQueryArrowResultSetNested.isLast()).isTrue(); + } + assertThat(bigQueryArrowResultSetNested.getInt(1)) + .isEqualTo(cnt); // the first column is index 1 + assertThat(bigQueryArrowResultSetNested.getInt(2)) + .isEqualTo(cnt * 10); // second column has values 10 and 20 + } + assertThat(cnt).isEqualTo(2); + assertThat(bigQueryArrowResultSetNested.next()).isFalse(); + assertThat(bigQueryArrowResultSetNested.isAfterLast()).isTrue(); + } + + private int resultSetRowCount(BigQueryArrowResultSet resultSet) throws SQLException { + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + return rowCount; + } + + // TODO: Unit Test for iteration and getters +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStructTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStructTest.java new file mode 100644 index 0000000000..2c3bedcc4c --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStructTest.java @@ -0,0 +1,245 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.StandardSQLTypeName.BIGNUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.BYTES; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATE; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATETIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.FLOAT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.GEOGRAPHY; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.NUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIMESTAMP; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowArraySchemaAndValue; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowStructOf; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.toArrowStruct; +import static com.google.common.truth.Truth.assertThat; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; +import static java.time.Month.MARCH; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; +import org.apache.arrow.vector.util.Text; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; + +public class BigQueryArrowStructTest { + + @Rule public final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + private Struct structWithPrimitiveValues; + + @Before + public void setUp() { + Tuple> schemaAndValues = + arrowStructOf( + Tuple.of(INT64, Long.valueOf("10")), + Tuple.of(BOOL, TRUE), + Tuple.of(FLOAT64, Double.valueOf("11.2")), + Tuple.of(NUMERIC, new BigDecimal("11.2657")), + Tuple.of(BIGNUMERIC, new BigDecimal("11.2657")), + Tuple.of(STRING, new Text("one")), + Tuple.of(TIMESTAMP, Long.valueOf("1680174859820227")), // 2023-03-30 16:44:19.82 + Tuple.of(DATE, 19446), // 2023-03-30 + Tuple.of(TIME, Long.valueOf("40459820227")), + Tuple.of(DATETIME, LocalDateTime.parse("2023-03-30T11:14:19.820227")), + Tuple.of(GEOGRAPHY, new Text("POINT(-122 47)")), + Tuple.of(BYTES, "one".getBytes())); + + structWithPrimitiveValues = new BigQueryArrowStruct(schemaAndValues.x(), schemaAndValues.y()); + } + + @Test + public void structOfPrimitives() throws SQLException { + assertThat(structWithPrimitiveValues.getAttributes()) + .isEqualTo( + asList( + 10L, + true, + 11.2, + new BigDecimal("11.2657"), + new BigDecimal("11.2657"), + "one", + Timestamp.valueOf(LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820227000)), + Date.valueOf(LocalDate.of(2023, MARCH, 30)), + Time.valueOf(LocalTime.of(11, 14, 19, 820227)), + Timestamp.valueOf("2023-03-30 11:14:19.820227"), + "POINT(-122 47)", + "one".getBytes()) + .toArray()); + } + + @Test + public void structOfArrays() throws SQLException { + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820227000); + LocalDate aDate = LocalDate.of(2023, MARCH, 30); + LocalTime aTime = LocalTime.of(11, 14, 19, 820227); + List>> schemaAndValues = + Arrays.asList( + arrowArraySchemaAndValue(INT64, 10L, 20L), + arrowArraySchemaAndValue(BOOL, Boolean.TRUE, FALSE), + arrowArraySchemaAndValue(FLOAT64, Double.valueOf("11.2"), Double.valueOf("33.4")), + arrowArraySchemaAndValue(NUMERIC, new BigDecimal("11.2657"), new BigDecimal("33.4657")), + arrowArraySchemaAndValue( + BIGNUMERIC, new BigDecimal("11.2657"), new BigDecimal("33.4657")), + arrowArraySchemaAndValue(STRING, new Text("one"), new Text("two")), + arrowArraySchemaAndValue( + TIMESTAMP, Long.valueOf("1680174859820227"), Long.valueOf("1680261259820227")), + arrowArraySchemaAndValue(DATE, 19446, 19447), + arrowArraySchemaAndValue( + TIME, Long.valueOf("40459820227"), Long.valueOf("40460820227")), + arrowArraySchemaAndValue( + DATETIME, + LocalDateTime.parse("2023-03-30T11:14:19.820227"), + LocalDateTime.parse("2023-03-30T11:15:19.820227")), + arrowArraySchemaAndValue( + GEOGRAPHY, new Text("POINT(-122 47)"), new Text("POINT(-122 48)")), + arrowArraySchemaAndValue( + BYTES, Stream.of("one", "two").map(String::getBytes).toArray(byte[][]::new))); + + List orderedSchemas = + schemaAndValues.stream().map(Tuple::x).collect(Collectors.toList()); + JsonStringHashMap jsonStringHashMap = toArrowStruct(schemaAndValues); + + Struct struct = new BigQueryArrowStruct(FieldList.of(orderedSchemas), jsonStringHashMap); + + Object[] attributes = struct.getAttributes(); + assertThat(((Array) attributes[0]).getArray()).isEqualTo(new Long[] {10L, 20L}); + assertThat(((Array) attributes[1]).getArray()).isEqualTo(new Boolean[] {true, false}); + assertThat(((Array) attributes[2]).getArray()).isEqualTo(new Double[] {11.2, 33.4}); + assertThat(((Array) attributes[3]).getArray()) + .isEqualTo(new BigDecimal[] {new BigDecimal("11.2657"), new BigDecimal("33.4657")}); + assertThat(((Array) attributes[4]).getArray()) + .isEqualTo(new BigDecimal[] {new BigDecimal("11.2657"), new BigDecimal("33.4657")}); + assertThat(((Array) attributes[5]).getArray()).isEqualTo(new String[] {"one", "two"}); + assertThat(((Array) attributes[6]).getArray()) + .isEqualTo( + new Timestamp[] { + Timestamp.valueOf(aTimeStamp), // 2023-03-30 16:44:19.82 + Timestamp.valueOf(aTimeStamp.plusDays(1)) + }); + assertThat(((Array) attributes[7]).getArray()) + .isEqualTo(new Date[] {Date.valueOf(aDate), Date.valueOf(aDate.plusDays(1))}); + assertThat(((Array) attributes[8]).getArray()) + .isEqualTo(new Time[] {Time.valueOf(aTime), Time.valueOf(aTime.plusSeconds(1))}); + assertThat(((Array) attributes[9]).getArray()) // DATETIME + .isEqualTo( + new Timestamp[] { + Timestamp.valueOf("2023-03-30 11:14:19.820227"), + Timestamp.valueOf("2023-03-30 11:15:19.820227") + }); + assertThat(((Array) attributes[10]).getArray()) + .isEqualTo(new String[] {"POINT(-122 47)", "POINT(-122 48)"}); + assertThat(((Array) attributes[11]).getArray()) + .isEqualTo(new byte[][] {"one".getBytes(), "two".getBytes()}); + } + + @Test + public void structOfStructs() throws SQLException { + FieldList profileSchema = + FieldList.of( + Field.of("name", LegacySQLTypeName.STRING), + Field.of("age", LegacySQLTypeName.INTEGER), + Field.of("adult", LegacySQLTypeName.BOOLEAN)); + FieldList addressSchema = + FieldList.of( + Field.of("state", LegacySQLTypeName.STRING), + Field.of("zip", LegacySQLTypeName.INTEGER)); + FieldList rootStructSchema = + FieldList.of( + Field.of("profile", LegacySQLTypeName.RECORD, profileSchema), + Field.of("address", LegacySQLTypeName.RECORD, addressSchema)); + + JsonStringHashMap profileValue = + new JsonStringHashMap() { + { + put("name", new Text("Arya")); + put("age", 15L); + put("adult", FALSE); + } + }; + JsonStringHashMap addressValue = + new JsonStringHashMap() { + { + put("state", new Text("Michigan")); + put("zip", 49086L); + } + }; + JsonStringHashMap rootStructValue = + new JsonStringHashMap() { + { + put("profile", profileValue); + put("address", addressValue); + } + }; + + Struct struct = new BigQueryArrowStruct(rootStructSchema, rootStructValue); + Object[] attributes = struct.getAttributes(); + Struct profileStruct = (Struct) attributes[0]; + Struct addressStruct = (Struct) attributes[1]; + + assertThat(profileStruct.getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(addressStruct.getAttributes()).isEqualTo(asList("Michigan", 49086L).toArray()); + } + + @Test + public void getSQLTypeNameIsNotSupported() { + Exception exception = + assertThrows( + SQLFeatureNotSupportedException.class, structWithPrimitiveValues::getSQLTypeName); + assertThat(exception.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getAttributesWithCustomTypeMappingsIsNotSupported() { + Exception exception = + assertThrows( + SQLFeatureNotSupportedException.class, + () -> structWithPrimitiveValues.getAttributes(emptyMap())); + assertThat(exception.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSetTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSetTest.java new file mode 100644 index 0000000000..90dad9935a --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSetTest.java @@ -0,0 +1,104 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertFalse; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.CALLS_REAL_METHODS; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import java.lang.reflect.Field; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryBaseResultSetTest { + private BigQuery bigQuery; + private BigQueryBaseResultSet resultSet; + private Job job; + private QueryStatistics statistics; + + @Before + public void setUp() { + // Using mock() for QueryStatistics because Builder() seems to not be available + // from outside. + bigQuery = mock(BigQuery.class); + job = mock(Job.class); + doReturn(job).when(bigQuery).getJob(any(JobId.class)); + + statistics = mock(QueryStatistics.class); + doReturn(statistics).when(job).getStatistics(); + + resultSet = mock(BigQueryBaseResultSet.class, CALLS_REAL_METHODS); + try { + Field field = BigQueryBaseResultSet.class.getDeclaredField("bigQuery"); + field.setAccessible(true); + field.set(resultSet, bigQuery); + } catch (Exception e) { + assertFalse(true); + } + } + + @Test + public void testGetQueryId() { + resultSet.setQueryId("queryId"); + assertThat(resultSet.getQueryId()).isEqualTo("queryId"); + } + + @Test + public void testGetJobId() { + resultSet.setJobId(JobId.of("jobId")); + assertThat(resultSet.getJobId()).isEqualTo(JobId.of("jobId")); + } + + @Test + public void testGetQueryStatistics() { + resultSet.setJobId(JobId.of("jobId")); + assertThat(resultSet.getQueryStatistics()).isInstanceOf(QueryStatistics.class); + } + + @Test + public void testGetQueryStatisticsCaching() { + resultSet.setJobId(JobId.of("jobId")); + assertThat(resultSet.getQueryStatistics()).isInstanceOf(QueryStatistics.class); + // Change return value to null to ensure lazy init saved the state + doReturn(null).when(job).getStatistics(); + assertThat(resultSet.getQueryStatistics()).isInstanceOf(QueryStatistics.class); + } + + @Test + public void testGetQueryStatistics_no_client() { + resultSet = mock(BigQueryBaseResultSet.class, CALLS_REAL_METHODS); + assertThat(resultSet.getQueryStatistics()).isNull(); + } + + @Test + public void testGetQueryStatistics_no_job_id() { + assertThat(resultSet.getQueryStatistics()).isNull(); + } + + @Test + public void testGetQueryStatistics_no_job() { + doReturn(job).when(bigQuery).getJob(any(JobId.class)); + assertThat(resultSet.getQueryStatistics()).isNull(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBigQueryTypeCoercerBuilderTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBigQueryTypeCoercerBuilderTest.java new file mode 100644 index 0000000000..4af1632456 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBigQueryTypeCoercerBuilderTest.java @@ -0,0 +1,43 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.cloud.bigquery.jdbc.TestType.Text; +import org.junit.Test; + +public class BigQueryBigQueryTypeCoercerBuilderTest { + + @Test + public void shouldBeAbleToConvertCustomTypes() { + byte[] bytesArray = {72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33}; + Text text = new Text(bytesArray); + + BigQueryTypeCoercer bigQueryTypeCoercer = + new BigQueryTypeCoercerBuilder().registerTypeCoercion(new TextToStringCoercion()).build(); + + assertThat(bigQueryTypeCoercer.coerceTo(String.class, text)).isEqualTo("Hello World!"); + } + + private static class TextToStringCoercion implements BigQueryCoercion { + @Override + public String coerce(Text value) { + return new String(value.getBytes()); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatementTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatementTest.java new file mode 100644 index 0000000000..f9729bf21e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatementTest.java @@ -0,0 +1,1118 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; +import java.io.IOException; +import java.io.Reader; +import java.io.StringReader; +import java.math.BigDecimal; +import java.sql.*; +import java.util.Calendar; +import java.util.HashMap; +import java.util.Map; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryCallableStatementTest { + + private BigQueryConnection bigQueryConnection; + private static final String GET_PARAM_KEY = "ParamKey"; + private static final String PARAM_KEY = GET_PARAM_KEY; + + @Before + public void setUp() throws IOException, SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + } + + @Test + public void testCreateCallableStatement() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc"); + assertNotNull(statement); + + assertEquals("call testProc", statement.getCallableStatementSql()); + } + + @Test + public void testRegisterOutParamIndexVarchar() throws SQLException { + registerOutParamIndexHelper(1, Types.VARCHAR, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamIndexNVarchar() throws SQLException { + registerOutParamIndexHelper(1, Types.NVARCHAR, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamIndexBigInt() throws SQLException { + registerOutParamIndexHelper(1, Types.BIGINT, Long.class, StandardSQLTypeName.INT64, -1); + } + + @Test + public void testRegisterOutParamIndexInteger() throws SQLException { + registerOutParamIndexHelper(1, Types.INTEGER, Integer.class, StandardSQLTypeName.INT64, -1); + } + + @Test + public void testRegisterOutParamIndexBoolean() throws SQLException { + registerOutParamIndexHelper(1, Types.BOOLEAN, Boolean.class, StandardSQLTypeName.BOOL, -1); + } + + @Test + public void testRegisterOutParamIndexDouble() throws SQLException { + registerOutParamIndexHelper(1, Types.DOUBLE, Double.class, StandardSQLTypeName.FLOAT64, -1); + } + + @Test + public void testRegisterOutParamIndexFloat() throws SQLException { + registerOutParamIndexHelper(1, Types.FLOAT, Float.class, StandardSQLTypeName.FLOAT64, -1); + } + + @Test + public void testRegisterOutParamIndexNumeric() throws SQLException { + registerOutParamIndexHelper(1, Types.NUMERIC, BigDecimal.class, StandardSQLTypeName.NUMERIC, 2); + } + + @Test + public void testRegisterOutParamIndexTimestamp() throws SQLException { + registerOutParamIndexHelper( + 1, Types.TIMESTAMP, Timestamp.class, StandardSQLTypeName.TIMESTAMP, -1); + } + + @Test + public void testRegisterOutParamIndexDate() throws SQLException { + registerOutParamIndexHelper(1, Types.DATE, Date.class, StandardSQLTypeName.DATE, -1); + } + + @Test + public void testRegisterOutParamIndexTime() throws SQLException { + registerOutParamIndexHelper(1, Types.TIME, Time.class, StandardSQLTypeName.TIME, -1); + } + + @Test + public void testRegisterOutParamIndexOther() throws SQLException { + registerOutParamIndexHelper(1, Types.OTHER, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamIndexBinary() throws SQLException { + registerOutParamIndexHelper(1, Types.BINARY, byte[].class, StandardSQLTypeName.BYTES, -1); + } + + @Test + public void testRegisterOutParamIndexVarBinary() throws SQLException { + registerOutParamIndexHelper(1, Types.VARBINARY, byte[].class, StandardSQLTypeName.BYTES, -1); + } + + @Test + public void testRegisterOutParamIndexStruct() throws SQLException { + registerOutParamIndexHelper(1, Types.STRUCT, Struct.class, StandardSQLTypeName.STRUCT, -1); + } + + @Test + public void testRegisterOutParamIndexArray() throws SQLException { + registerOutParamIndexHelper(1, Types.ARRAY, Array.class, StandardSQLTypeName.ARRAY, -1); + } + + @Test + public void testRegisterOutParamIndexBit() throws SQLException { + registerOutParamIndexHelper(1, Types.BIT, Boolean.class, StandardSQLTypeName.BOOL, -1); + } + + @Test + public void testRegisterOutParamNameVarchar() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.VARCHAR, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamNameNVarchar() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.NVARCHAR, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamNameBigInt() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.BIGINT, Long.class, StandardSQLTypeName.INT64, -1); + } + + @Test + public void testRegisterOutParamNameInteger() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.INTEGER, Integer.class, StandardSQLTypeName.INT64, -1); + } + + @Test + public void testRegisterOutParamNameBoolean() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.BOOLEAN, Boolean.class, StandardSQLTypeName.BOOL, -1); + } + + @Test + public void testRegisterOutParamNameDouble() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.DOUBLE, Double.class, StandardSQLTypeName.FLOAT64, -1); + } + + @Test + public void testRegisterOutParamNameFloat() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.FLOAT, Float.class, StandardSQLTypeName.FLOAT64, -1); + } + + @Test + public void testRegisterOutParamNameNumeric() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.NUMERIC, BigDecimal.class, StandardSQLTypeName.NUMERIC, 2); + } + + @Test + public void testRegisterOutParamNameTimestamp() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.TIMESTAMP, Timestamp.class, StandardSQLTypeName.TIMESTAMP, -1); + } + + @Test + public void testRegisterOutParamNameDate() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.DATE, Date.class, StandardSQLTypeName.DATE, -1); + } + + @Test + public void testRegisterOutParamNameTime() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.TIME, Time.class, StandardSQLTypeName.TIME, -1); + } + + @Test + public void testRegisterOutParamNameOther() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.OTHER, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamNameBinary() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.BINARY, byte[].class, StandardSQLTypeName.BYTES, -1); + } + + @Test + public void testRegisterOutParamNameVarBinary() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.VARBINARY, byte[].class, StandardSQLTypeName.BYTES, -1); + } + + @Test + public void testRegisterOutParamNameStruct() throws SQLException { + registerOutParamIndexHelper(1, Types.STRUCT, Struct.class, StandardSQLTypeName.STRUCT, -1); + } + + @Test + public void testRegisterOutParamNameArray() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.ARRAY, Array.class, StandardSQLTypeName.ARRAY, -1); + } + + @Test + public void testRegisterOutParamNameBit() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.BIT, Boolean.class, StandardSQLTypeName.BOOL, -1); + } + + @Test + public void testRegisterOutParamIndexScaleFail() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + assertThrows( + IllegalArgumentException.class, () -> statement.registerOutParameter(1, Types.VARCHAR, 3)); + } + + @Test + public void testRegisterOutNameIndexScaleFail() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + assertThrows( + IllegalArgumentException.class, + () -> statement.registerOutParameter("ParamKey", Types.VARCHAR, 3)); + } + + @Test + public void testGetArrayParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Array expected = mock(Array.class); + + statement.getParameterHandler().setParameter(1, expected, Array.class); + Array actual = statement.getArray(1); + assertEquals(expected, actual); + } + + @Test + public void testGetArrayParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Array expected = mock(Array.class); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Array.class, BigQueryStatementParameterType.IN, 0); + Array actual = statement.getArray(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetBigDecimalParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + BigDecimal expected = mock(BigDecimal.class); + + statement.getParameterHandler().setParameter(1, expected, BigDecimal.class); + BigDecimal actual = statement.getBigDecimal(1); + assertEquals(expected, actual); + } + + @Test + public void testGetBigDecimalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + BigDecimal expected = mock(BigDecimal.class); + + statement + .getParameterHandler() + .setParameter( + GET_PARAM_KEY, expected, BigDecimal.class, BigQueryStatementParameterType.IN, 0); + BigDecimal actual = statement.getBigDecimal(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetBooleanParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Boolean expected = true; + + statement.getParameterHandler().setParameter(1, expected, Boolean.class); + Boolean actual = statement.getBoolean(1); + assertEquals(expected, actual); + } + + @Test + public void testGetBooleanParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Boolean expected = true; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Boolean.class, BigQueryStatementParameterType.IN, 0); + Boolean actual = statement.getBoolean(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetByteParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Byte expected = "hello".getBytes()[0]; + + statement.getParameterHandler().setParameter(1, expected, Byte.class); + Byte actual = statement.getByte(1); + assertEquals(expected, actual); + } + + @Test + public void testGetByteParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Byte expected = "hello".getBytes()[0]; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Byte.class, BigQueryStatementParameterType.IN, 0); + Byte actual = statement.getByte(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetBytesParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + byte[] expected = "hello".getBytes(); + + statement.getParameterHandler().setParameter(1, expected, byte[].class); + byte[] actual = statement.getBytes(1); + assertEquals(expected, actual); + } + + @Test + public void testGetBytesParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + byte[] expected = "hello".getBytes(); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, byte[].class, BigQueryStatementParameterType.IN, 0); + byte[] actual = statement.getBytes(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetCharacterStreamParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Reader actual = statement.getCharacterStream(1); + assertNotNull(actual); + assertTrue(actual instanceof StringReader); + } + + @Test + public void testGetCharacterStreamParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Reader actual = statement.getCharacterStream(GET_PARAM_KEY); + assertNotNull(actual); + assertTrue(actual instanceof StringReader); + } + + @Test + public void testGetDateParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Date expected = mock(Date.class); + + statement.getParameterHandler().setParameter(1, expected, Date.class); + Date actual = statement.getDate(1); + assertEquals(expected, actual); + } + + @Test + public void testGetDateParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Date expected = mock(Date.class); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Date.class, BigQueryStatementParameterType.IN, 0); + Date actual = statement.getDate(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetDateParamWithCalByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Date expected = new Date(cal.getTimeInMillis()); + + statement.getParameterHandler().setParameter(1, expected, Date.class); + Date actual = statement.getDate(1, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetDateParamWithCalByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Date expected = new Date(cal.getTimeInMillis()); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Date.class, BigQueryStatementParameterType.IN, 0); + Date actual = statement.getDate(GET_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetDoubleParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Double expected = 10.123; + + statement.getParameterHandler().setParameter(1, expected, Double.class); + Double actual = statement.getDouble(1); + assertEquals(expected, actual); + } + + @Test + public void testGetDoubleParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Double expected = 10.123; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Double.class, BigQueryStatementParameterType.IN, 0); + Double actual = statement.getDouble(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetFloatParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Float expected = 10.123F; + + statement.getParameterHandler().setParameter(1, expected, Float.class); + Float actual = statement.getFloat(1); + assertEquals(expected, actual); + } + + @Test + public void testGetFloatParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Float expected = 10.123F; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Float.class, BigQueryStatementParameterType.IN, 0); + Float actual = statement.getFloat(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetIntegerParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Integer expected = 10; + + statement.getParameterHandler().setParameter(1, expected, Integer.class); + Integer actual = statement.getInt(1); + assertEquals(expected, actual); + } + + @Test + public void testGetIntegerParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Integer expected = 10; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Integer.class, BigQueryStatementParameterType.IN, 0); + Integer actual = statement.getInt(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetLongParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 10L; + + statement.getParameterHandler().setParameter(1, expected, Long.class); + Long actual = statement.getLong(1); + assertEquals(expected, actual); + } + + @Test + public void testGetLongParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 10L; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Long.class, BigQueryStatementParameterType.IN, 0); + Long actual = statement.getLong(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetNCharacterStreamParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Reader actual = statement.getNCharacterStream(1); + assertNotNull(actual); + assertTrue(actual instanceof StringReader); + } + + @Test + public void testGetNCharacterStreamParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Reader actual = statement.getNCharacterStream(GET_PARAM_KEY); + assertNotNull(actual); + assertTrue(actual instanceof StringReader); + } + + @Test + public void testGetNStringParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + String actual = statement.getNString(1); + assertEquals(expected, actual); + } + + @Test + public void testGetNStringByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + String actual = statement.getNString(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetObjectParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Object actual = statement.getObject(1); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Object actual = statement.getObject(GET_PARAM_KEY); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamWithMapByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Map> map = new HashMap<>(); + map.putIfAbsent(StandardSQLTypeName.STRING.name(), String.class); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Object actual = statement.getObject(1, map); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamWithMapByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Map> map = new HashMap<>(); + map.putIfAbsent(StandardSQLTypeName.STRING.name(), String.class); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Object actual = statement.getObject(GET_PARAM_KEY, map); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamWithClassByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Object actual = statement.getObject(1, String.class); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamWithClassByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Object actual = statement.getObject(GET_PARAM_KEY, String.class); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetStringParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "test"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + String actual = statement.getString(1); + assertEquals(expected, actual); + } + + @Test + public void testGetStringParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "test"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + String actual = statement.getString(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetTimeParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Time expected = mock(Time.class); + + statement.getParameterHandler().setParameter(1, expected, Time.class); + Time actual = statement.getTime(1); + assertEquals(expected, actual); + } + + @Test + public void testGetTimeParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Time expected = mock(Time.class); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Time.class, BigQueryStatementParameterType.IN, 0); + Time actual = statement.getTime(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetTimeParamWithCalByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Time expected = new Time(cal.getTimeInMillis()); + + statement.getParameterHandler().setParameter(1, expected, Time.class); + Time actual = statement.getTime(1, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetTimeParamWithCalByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Time expected = new Time(cal.getTimeInMillis()); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Time.class, BigQueryStatementParameterType.IN, 0); + Time actual = statement.getTime(GET_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetTimestampParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Timestamp expected = mock(Timestamp.class); + + statement.getParameterHandler().setParameter(1, expected, Timestamp.class); + Timestamp actual = statement.getTimestamp(1); + assertEquals(expected, actual); + } + + @Test + public void testGetTimestampParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Timestamp expected = mock(Timestamp.class); + + statement + .getParameterHandler() + .setParameter( + GET_PARAM_KEY, expected, Timestamp.class, BigQueryStatementParameterType.IN, 0); + Timestamp actual = statement.getTimestamp(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetTimestampParamWithCalByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Timestamp expected = new Timestamp(cal.getTimeInMillis()); + + statement.getParameterHandler().setParameter(1, expected, Timestamp.class); + Timestamp actual = statement.getTimestamp(1, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetTimestampParamWithCalByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Timestamp expected = new Timestamp(cal.getTimeInMillis()); + + statement + .getParameterHandler() + .setParameter( + GET_PARAM_KEY, expected, Timestamp.class, BigQueryStatementParameterType.IN, 0); + Timestamp actual = statement.getTimestamp(GET_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testSetBigDecimalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + BigDecimal expected = mock(BigDecimal.class); + + statement.setBigDecimal(PARAM_KEY, expected); + BigDecimal actual = statement.getBigDecimal(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetBooleanParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Boolean expected = true; + + statement.setBoolean(PARAM_KEY, expected); + Boolean actual = statement.getBoolean(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetByteParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Byte expected = "hello".getBytes()[0]; + + statement.setByte(PARAM_KEY, expected); + Byte actual = statement.getByte(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetByteArrayParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + byte[] expected = "heelo".getBytes(); + + statement.setBytes(PARAM_KEY, expected); + byte[] actual = statement.getBytes(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetDateParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Date expected = mock(Date.class); + statement.setDate(PARAM_KEY, expected); + Date actual = statement.getDate(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetDateCalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Date expectedDate = mock(Date.class); + Calendar expectedCal = mock(Calendar.class); + + doReturn(1L).when(expectedDate).getTime(); + doReturn(1L).when(expectedCal).getTime(); + doReturn(1L).when(expectedCal).getTimeInMillis(); + statement.setDate(PARAM_KEY, expectedDate, expectedCal); + Date actual = statement.getDate(PARAM_KEY); + assertEquals(new Date(1L), actual); + actual = statement.getDate(PARAM_KEY, expectedCal); + assertEquals(new Date(1L), actual); + } + + @Test + public void testSetDoubleParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Double expected = 123.123; + statement.setDouble(PARAM_KEY, expected); + Double actual = statement.getDouble(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetFloatParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Float expected = 123.123F; + statement.setFloat(PARAM_KEY, expected); + Float actual = statement.getFloat(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetIntParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Integer expected = 1; + statement.setInt(PARAM_KEY, expected); + Integer actual = statement.getInt(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetLongParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 1L; + statement.setLong(PARAM_KEY, expected); + Long actual = statement.getLong(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetObjectParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 1L; + statement.setObject(PARAM_KEY, expected); + Long actual = statement.getObject(PARAM_KEY, Long.class); + assertEquals(expected, actual); + } + + @Test + public void testSetObjectSqlTypeParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 1L; + statement.setObject(PARAM_KEY, expected, java.sql.Types.BIGINT); + Long actual = statement.getObject(PARAM_KEY, Long.class); + assertEquals(expected, actual); + } + + @Test + public void testSetObjectSqlTypeScaleParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 1L; + statement.setObject(PARAM_KEY, expected, java.sql.Types.BIGINT, 5); + Long actual = statement.getObject(PARAM_KEY, Long.class); + assertEquals(expected, actual); + int scale = statement.getParameterHandler().getParameterScale(PARAM_KEY); + assertEquals(5, scale); + } + + @Test + public void testSetStringParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + statement.setString(PARAM_KEY, expected); + String actual = statement.getString(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetTimeParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Time expected = mock(Time.class); + statement.setTime(PARAM_KEY, expected); + Time actual = statement.getTime(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetTimeCalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Time expectedTime = mock(Time.class); + Calendar expectedCal = mock(Calendar.class); + + doReturn(1L).when(expectedTime).getTime(); + doReturn(1L).when(expectedCal).getTime(); + doReturn(1L).when(expectedCal).getTimeInMillis(); + statement.setTime(PARAM_KEY, expectedTime, expectedCal); + Time actual = statement.getTime(PARAM_KEY); + assertEquals(new Time(1L), actual); + actual = statement.getTime(PARAM_KEY, expectedCal); + assertEquals(new Time(1L), actual); + } + + @Test + public void testSetTimestampParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Timestamp expected = mock(Timestamp.class); + statement.setTimestamp(PARAM_KEY, expected); + Timestamp actual = statement.getTimestamp(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetTimestampCalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Timestamp expectedTimestamp = mock(Timestamp.class); + Calendar expectedCal = mock(Calendar.class); + + doReturn(1L).when(expectedTimestamp).getTime(); + doReturn(1L).when(expectedCal).getTime(); + doReturn(1L).when(expectedCal).getTimeInMillis(); + statement.setTimestamp(PARAM_KEY, expectedTimestamp, expectedCal); + Timestamp actual = statement.getTimestamp(PARAM_KEY); + assertEquals(new Timestamp(1L), actual); + actual = statement.getTimestamp(PARAM_KEY, expectedCal); + assertEquals(new Timestamp(1L), actual); + } + + ////////// Private helper methods //////////////////////// + private void registerOutParamIndexHelper( + int paramIndex, int javaSqlType, Class javaType, StandardSQLTypeName sqlType, int scale) + throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + + if (scale >= 0) { + statement.registerOutParameter(paramIndex, javaSqlType, scale); + } else { + statement.registerOutParameter(paramIndex, javaSqlType); + } + BigQueryParameterHandler paramHandler = statement.getParameterHandler(); + assertNotNull(paramHandler); + + assertNull(paramHandler.getParameter(paramIndex)); + assertEquals(BigQueryStatementParameterType.OUT, paramHandler.getParameterType(paramIndex)); + assertEquals(scale, paramHandler.getParameterScale(paramIndex)); + assertEquals(javaType, paramHandler.getType(paramIndex)); + assertEquals(sqlType, paramHandler.getSqlType(paramIndex)); + } + + private void registerOutParamNameHelper( + String paramName, int javaSqlType, Class javaType, StandardSQLTypeName sqlType, int scale) + throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + + if (scale >= 0) { + statement.registerOutParameter(paramName, javaSqlType, scale); + } else { + statement.registerOutParameter(paramName, javaSqlType); + } + BigQueryParameterHandler paramHandler = statement.getParameterHandler(); + assertNotNull(paramHandler); + + assertNull(paramHandler.getParameter(paramName)); + assertEquals(BigQueryStatementParameterType.OUT, paramHandler.getParameterType(paramName)); + assertEquals(scale, paramHandler.getParameterScale(paramName)); + assertEquals(javaType, paramHandler.getType(paramName)); + assertEquals(sqlType, paramHandler.getSqlType(paramName)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionTest.java new file mode 100644 index 0000000000..22f98af07e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionTest.java @@ -0,0 +1,343 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.*; + +import com.google.api.gax.rpc.HeaderProvider; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; +import java.io.IOException; +import java.io.InputStream; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Properties; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryConnectionTest { + + private static final String DEFAULT_VERSION = "0.0.0"; + private static final String DEFAULT_JDBC_TOKEN_VALUE = "Google-BigQuery-JDBC-Driver"; + private String expectedVersion; + + @Before + public void setUp() throws IOException { + // Read the expected version from the dependencies.properties file once. + expectedVersion = getExpectedVersion(); + } + + private String getExpectedVersion() { + Properties props = new Properties(); + try (InputStream in = + getClass().getResourceAsStream("/com/google/cloud/bigquery/jdbc/dependencies.properties")) { + if (in != null) { + props.load(in); + String version = props.getProperty("version.jdbc"); + if (version != null) { + return version; + } + } + } catch (IOException e) { + System.err.println("Error reading dependencies.properties: " + e.getMessage()); + } + return DEFAULT_VERSION; + } + + @Test + public void testGetLibraryVersion() throws IOException, SQLException { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + String result = connection.getLibraryVersion(BigQueryConnection.class); + assertEquals(expectedVersion, result); + } + } + + @Test + public void testHeaderProvider() throws IOException, SQLException { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue(agent.startsWith(DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion)); + assertFalse(agent.contains("(GPN:")); + } + } + + @Test + public void testHeaderProviderWithPartnerToken() throws IOException, SQLException { + String partnerTokenString = "(GPN:MyPartner; staging)"; + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;PartnerToken=" + + partnerTokenString; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue( + agent.startsWith( + DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion + " " + partnerTokenString)); + assertTrue(agent.contains("(GPN:")); + assertTrue(agent.contains("MyPartner;")); + assertTrue(agent.contains("staging)")); + } + } + + @Test + public void testHeaderProviderWithEmptyPartnerToken() throws IOException, SQLException { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;PartnerToken="; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue(agent.startsWith(DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion)); + assertFalse(agent.contains("(GPN:")); + } + } + + @Test + public void testHeaderProviderWithPartnerTokenNoEnv() throws IOException, SQLException { + String partnerTokenString = "(GPN:MyPartner)"; + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;PartnerToken=" + + partnerTokenString; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue( + agent.startsWith( + DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion + " " + partnerTokenString)); + assertTrue(agent.contains("GPN:")); + assertTrue(agent.contains("MyPartner")); + } + } + + @Test + public void testHeaderProviderWithInvalidPartner() throws IOException, SQLException { + String partnerTokenString = "(MyPartner; staging)"; + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;PartnerToken=" + + partnerTokenString; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue(agent.startsWith(DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion)); + assertFalse(agent.contains("(MyPartner;")); + assertFalse(agent.contains("(GPN:")); + } + } + + @Test + public void testWriteAPIConnectionProperties() throws SQLException { + // Test without connection properties. Defaults to default values. + String connectionUriDefault = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connectionDefault = new BigQueryConnection(connectionUriDefault)) { + + assertFalse(connectionDefault.enableWriteAPI); + assertEquals(3, connectionDefault.writeAPIActivationRowCount); + assertEquals(1000, connectionDefault.writeAPIAppendRowCount); + } catch (IOException | SQLException e) { + throw new BigQueryJdbcException(e); + } + + // Test with connection properties + String connectionUri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "EnableWriteAPI=1;SWA_ActivationRowCount=6;SWA_AppendRowCount=500"; + try (BigQueryConnection connection = new BigQueryConnection(connectionUri)) { + assertTrue(connection.enableWriteAPI); + assertEquals(6, connection.writeAPIActivationRowCount); + assertEquals(500, connection.writeAPIAppendRowCount); + } catch (IOException | SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testGetWriteClient() throws SQLException { + // Test without connection properties. Defaults to default values. + String connectionUriDefault = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connectionDefault = new BigQueryConnection(connectionUriDefault)) { + assertNull(connectionDefault.bigQueryWriteClient); + // Lazy initialization + BigQueryWriteClient writeClient = connectionDefault.getBigQueryWriteClient(); + assertNotNull(writeClient); + assertFalse(writeClient.isShutdown()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testAdditionalProjects() throws IOException, BigQueryJdbcException { + String url1 = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "AdditionalProjects=projA,projB"; + try (BigQueryConnection conn1 = new BigQueryConnection(url1)) { + List additionalProjects1 = conn1.getAdditionalProjects(); + assertNotNull(additionalProjects1); + assertEquals(Arrays.asList("projA", "projB"), additionalProjects1); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + String url2 = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "AdditionalProjects=projX"; + try (BigQueryConnection conn2 = new BigQueryConnection(url2)) { + List additionalProjects2 = conn2.getAdditionalProjects(); + assertNotNull(additionalProjects2); + assertEquals(Collections.singletonList("projX"), additionalProjects2); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testFilterTablesOnDefaultDatasetProperty() throws SQLException, IOException { + // Test default value + String urlDefault = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connectionDefault = new BigQueryConnection(urlDefault)) { + assertFalse( + "Default value for FilterTablesOnDefaultDataset should be false", + connectionDefault.isFilterTablesOnDefaultDataset()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + + // Test explicitly setting to true + String urlTrue = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "FilterTablesOnDefaultDataset=1;"; + try (BigQueryConnection connectionTrue = new BigQueryConnection(urlTrue)) { + assertTrue( + "FilterTablesOnDefaultDataset should be true when set to 1", + connectionTrue.isFilterTablesOnDefaultDataset()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testRequestGoogleDriveScopeProperty() throws IOException, SQLException { + // Test enabled + String urlEnabled = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "RequestGoogleDriveScope=1;"; + try (BigQueryConnection connectionEnabled = new BigQueryConnection(urlEnabled)) { + assertEquals( + "RequestGoogleDriveScope should be enabled when set to 1", + 1, + connectionEnabled.isRequestGoogleDriveScope()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + + // Test disabled + String urlDisabled = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "RequestGoogleDriveScope=0;"; + try (BigQueryConnection connectionDisabled = new BigQueryConnection(urlDisabled)) { + assertEquals( + "RequestGoogleDriveScope should be disabled when set to 0", + 0, + connectionDisabled.isRequestGoogleDriveScope()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testMetaDataFetchThreadCountProperty() throws SQLException, IOException { + // Test Case 1: Should use the default value when the property is not specified. + String urlDefault = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connectionDefault = new BigQueryConnection(urlDefault)) { + assertEquals( + "Should use the default value when the property is not set", + BigQueryJdbcUrlUtility.DEFAULT_METADATA_FETCH_THREAD_COUNT_VALUE, + connectionDefault.getMetadataFetchThreadCount()); + } + + // Test Case 2: Should use the custom value when a valid integer is provided. + String urlCustom = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "MetaDataFetchThreadCount=16;"; + try (BigQueryConnection connectionCustom = new BigQueryConnection(urlCustom)) { + assertEquals( + "Should use the custom value when a valid integer is provided", + 16, + connectionCustom.getMetadataFetchThreadCount()); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTaskTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTaskTest.java new file mode 100644 index 0000000000..b99ff4bec5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTaskTest.java @@ -0,0 +1,63 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.cloud.bigquery.jdbc.BigQueryResultSetFinalizers.ArrowResultSetFinalizer; +import java.lang.ref.ReferenceQueue; +import java.util.ArrayList; +import java.util.List; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryDaemonPollingTaskTest { + + static ReferenceQueue referenceQueueArrowRs; + static ReferenceQueue referenceQueueJsonRs; + static List arrowResultSetFinalizers; + static List jsonResultSetFinalizers; + + @Before + public void setUp() { + referenceQueueArrowRs = new ReferenceQueue<>(); + referenceQueueJsonRs = new ReferenceQueue<>(); + arrowResultSetFinalizers = new ArrayList<>(); + jsonResultSetFinalizers = new ArrayList<>(); + } + + @Test + public void testStartGcDaemonTask() { + + // start the Daemon first and then make sure it doesn't get started again + BigQueryDaemonPollingTask.startGcDaemonTask( + referenceQueueArrowRs, + referenceQueueJsonRs, + arrowResultSetFinalizers, + jsonResultSetFinalizers); // Daemon thread might have already started by the Junit at + // BigQueryStatementTest, hence we ignore the response here and + // check it on the line below + + assertThat( + BigQueryDaemonPollingTask.startGcDaemonTask( + referenceQueueArrowRs, + referenceQueueJsonRs, + arrowResultSetFinalizers, + jsonResultSetFinalizers)) + .isFalse(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaDataTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaDataTest.java new file mode 100644 index 0000000000..536aae15bf --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaDataTest.java @@ -0,0 +1,3209 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; + +import com.google.api.gax.paging.Page; +import com.google.cloud.bigquery.*; +import com.google.cloud.bigquery.BigQuery.RoutineListOption; +import java.io.IOException; +import java.io.InputStream; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; +import java.util.*; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.regex.Pattern; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryDatabaseMetaDataTest { + + private BigQueryConnection bigQueryConnection; + private BigQueryDatabaseMetaData dbMetadata; + private BigQuery bigqueryClient; + + @Before + public void setUp() throws SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + bigqueryClient = mock(BigQuery.class); + Statement mockStatement = mock(Statement.class); + + when(bigQueryConnection.getConnectionUrl()).thenReturn("jdbc:bigquery://test-project"); + when(bigQueryConnection.getBigQuery()).thenReturn(bigqueryClient); + when(bigQueryConnection.createStatement()).thenReturn(mockStatement); + + dbMetadata = new BigQueryDatabaseMetaData(bigQueryConnection); + } + + private Table mockBigQueryTable( + String project, String dataset, String table, TableDefinition.Type type, String description) { + Table mockTable = mock(Table.class); + TableId mockTableId = TableId.of(project, dataset, table); + TableDefinition mockDefinition = mock(TableDefinition.class); + + when(mockTable.getTableId()).thenReturn(mockTableId); + when(mockTable.getDefinition()).thenReturn(mockDefinition); + when(mockDefinition.getType()).thenReturn(type); + when(mockTable.getDescription()).thenReturn(description); + + return mockTable; + } + + private StandardSQLDataType mockStandardSQLDataType(StandardSQLTypeName typeKind) { + StandardSQLDataType mockDataType = mock(StandardSQLDataType.class); + when(mockDataType.getTypeKind()).thenReturn(typeKind.name()); + return mockDataType; + } + + private RoutineArgument mockRoutineArgument(String name, StandardSQLTypeName type, String mode) { + RoutineArgument mockArg = mock(RoutineArgument.class); + when(mockArg.getName()).thenReturn(name); + StandardSQLDataType mockDataType = mockStandardSQLDataType(type); + when(mockArg.getDataType()).thenReturn(mockDataType); + when(mockArg.getMode()).thenReturn(mode); // "IN", "OUT", "INOUT", or null + return mockArg; + } + + private Routine mockBigQueryRoutineWithArgs( + String project, + String dataset, + String routineName, + String routineType, + String description, + List arguments) { + Routine mockRoutine = mock(Routine.class); + RoutineId mockRoutineId = RoutineId.of(project, dataset, routineName); + when(mockRoutine.getRoutineId()).thenReturn(mockRoutineId); + when(mockRoutine.getRoutineType()).thenReturn(routineType); + when(mockRoutine.getDescription()).thenReturn(description); + if (arguments != null) { + when(mockRoutine.getArguments()).thenReturn(arguments); + } else { + when(mockRoutine.getArguments()).thenReturn(Collections.emptyList()); + } + return mockRoutine; + } + + private StandardSQLTableType mockStandardSQLTableType(List columns) { + StandardSQLTableType mockTableType = mock(StandardSQLTableType.class); + when(mockTableType.getColumns()).thenReturn(columns); + return mockTableType; + } + + private StandardSQLField mockStandardSQLField(String name, StandardSQLTypeName type) { + StandardSQLField mockField = mock(StandardSQLField.class); + StandardSQLDataType mockedDataType = mockStandardSQLDataType(type); + when(mockField.getName()).thenReturn(name); + when(mockField.getDataType()).thenReturn(mockedDataType); + return mockField; + } + + @Test + public void testBigqueryDatabaseMetaDataGetters() throws SQLException { + BigQueryDatabaseMetaData dbMetadata = new BigQueryDatabaseMetaData(bigQueryConnection); + assertEquals("GoogleJDBCDriverForGoogleBigQuery", dbMetadata.getDriverName()); + assertEquals("Google BigQuery", dbMetadata.getDatabaseProductName()); + assertEquals("2.0", dbMetadata.getDatabaseProductVersion()); + assertEquals("Dataset", dbMetadata.getSchemaTerm()); + assertEquals("Procedure", dbMetadata.getProcedureTerm()); + assertEquals("Project", dbMetadata.getCatalogTerm()); + } + + @Test + public void testReadSqlFromFile() throws SQLException { + BigQueryDatabaseMetaData dbMetadata = new BigQueryDatabaseMetaData(bigQueryConnection); + + String primaryKeysQuery = + BigQueryDatabaseMetaData.readSqlFromFile("DatabaseMetaData_GetPrimaryKeys.sql"); + assertTrue(primaryKeysQuery.contains("pk$")); + + try { + when(bigQueryConnection.prepareStatement(primaryKeysQuery)).thenCallRealMethod(); + String sql = + dbMetadata.replaceSqlParameters( + primaryKeysQuery, "project_name", "dataset_name", "table_name"); + assertTrue(sql.contains("project_name.dataset_name.INFORMATION_SCHEMA.KEY_COLUMN_USAGE")); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testNeedsListing() { + assertTrue("Null pattern should require listing", dbMetadata.needsListing(null)); + assertTrue("Pattern with % should require listing", dbMetadata.needsListing("abc%def")); + assertTrue("Pattern with _ should require listing", dbMetadata.needsListing("abc_def")); + assertTrue("Pattern with both wildcards", dbMetadata.needsListing("a%c_d%f")); + assertFalse("Empty pattern should not require listing", dbMetadata.needsListing("")); + assertFalse("Pattern without wildcards", dbMetadata.needsListing("exactName")); + } + + @Test + public void testCompileSqlLikePattern() { + // Null input -> Null pattern + assertNull(dbMetadata.compileSqlLikePattern(null)); + + // Empty input -> Pattern matching nothing ($^) + Pattern emptyPattern = dbMetadata.compileSqlLikePattern(""); + assertNotNull(emptyPattern); + assertFalse(emptyPattern.matcher("").matches()); + assertFalse(emptyPattern.matcher("a").matches()); + assertEquals("(?!)", emptyPattern.pattern()); + + // Exact match + Pattern exactPattern = dbMetadata.compileSqlLikePattern("tableName"); + assertNotNull(exactPattern); + assertTrue(exactPattern.matcher("tableName").matches()); + assertTrue(exactPattern.matcher("TABLENAME").matches()); + assertFalse(exactPattern.matcher("tableNameX").matches()); + assertFalse(exactPattern.matcher("XtableName").matches()); + + // Percent wildcard (%) -> .* + Pattern percentPattern = dbMetadata.compileSqlLikePattern("table%"); + assertNotNull(percentPattern); + assertTrue(percentPattern.matcher("table").matches()); + assertTrue(percentPattern.matcher("tableName").matches()); + assertTrue(percentPattern.matcher("TABLE_123").matches()); + assertFalse(percentPattern.matcher("myTable").matches()); + + Pattern percentPattern2 = dbMetadata.compileSqlLikePattern("%Name"); + assertNotNull(percentPattern2); + assertTrue(percentPattern2.matcher("Name").matches()); + assertTrue(percentPattern2.matcher("tableName").matches()); + assertTrue(percentPattern2.matcher("VIEW_NAME").matches()); + assertFalse(percentPattern2.matcher("NameSuffix").matches()); + + Pattern percentPattern3 = dbMetadata.compileSqlLikePattern("ta%le"); + assertNotNull(percentPattern3); + assertTrue(percentPattern3.matcher("table").matches()); + assertTrue(percentPattern3.matcher("TALLE").matches()); + assertTrue(percentPattern3.matcher("tale").matches()); + assertFalse(percentPattern3.matcher("table123").matches()); + + // Underscore wildcard (_) -> . + Pattern underscorePattern = dbMetadata.compileSqlLikePattern("t_ble"); + assertNotNull(underscorePattern); + assertTrue(underscorePattern.matcher("table").matches()); + assertTrue(underscorePattern.matcher("tAble").matches()); + assertTrue(underscorePattern.matcher("tXble").matches()); + assertFalse(underscorePattern.matcher("tble").matches()); + assertFalse(underscorePattern.matcher("taable").matches()); + + // Mixed wildcards + Pattern mixedPattern = dbMetadata.compileSqlLikePattern("data_%_set%"); + assertNotNull(mixedPattern); + assertTrue(mixedPattern.matcher("data_1_set").matches()); + assertTrue(mixedPattern.matcher("data_foo_set_bar").matches()); + assertTrue(mixedPattern.matcher("DATA_X_SET").matches()); + assertFalse(mixedPattern.matcher("dataset").matches()); + assertFalse(mixedPattern.matcher("data_set").matches()); + + // Escaping regex metacharacters + Pattern dotPattern = dbMetadata.compileSqlLikePattern("version_1.0"); + assertNotNull(dotPattern); + assertTrue(dotPattern.matcher("version_1.0").matches()); + assertFalse(dotPattern.matcher("version_1X0").matches()); + + Pattern bracketPattern = dbMetadata.compileSqlLikePattern("array[0]"); + assertNotNull(bracketPattern); + assertTrue(bracketPattern.matcher("array[0]").matches()); + assertFalse(bracketPattern.matcher("array_0_").matches()); + } + + @Test + public void testMapBigQueryTypeToJdbc_ScalarTypes() { + // INT64 -> BIGINT + Field fieldInt64 = + Field.newBuilder("test_int", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoInt64 = + dbMetadata.mapBigQueryTypeToJdbc(fieldInt64); + assertEquals(Types.BIGINT, infoInt64.jdbcType); + assertEquals("BIGINT", infoInt64.typeName); + assertEquals(Integer.valueOf(19), infoInt64.columnSize); + assertEquals(Integer.valueOf(0), infoInt64.decimalDigits); + assertEquals(Integer.valueOf(10), infoInt64.numPrecRadix); + + // STRING -> NVARCHAR + Field fieldString = + Field.newBuilder("test_string", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoString = + dbMetadata.mapBigQueryTypeToJdbc(fieldString); + assertEquals(Types.NVARCHAR, infoString.jdbcType); + assertEquals("NVARCHAR", infoString.typeName); + assertNull(infoString.columnSize); + assertNull(infoString.decimalDigits); + assertNull(infoString.numPrecRadix); + + // BOOL -> BOOLEAN + Field fieldBool = + Field.newBuilder("test_bool", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoBool = dbMetadata.mapBigQueryTypeToJdbc(fieldBool); + assertEquals(Types.BOOLEAN, infoBool.jdbcType); + assertEquals("BOOLEAN", infoBool.typeName); + assertEquals(Integer.valueOf(1), infoBool.columnSize); + + // BYTES -> VARBINARY + Field fieldBytes = + Field.newBuilder("test_bytes", StandardSQLTypeName.BYTES) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoBytes = + dbMetadata.mapBigQueryTypeToJdbc(fieldBytes); + assertEquals(Types.VARBINARY, infoBytes.jdbcType); + assertEquals("VARBINARY", infoBytes.typeName); + assertNull(infoBytes.columnSize); + + // TIMESTAMP -> TIMESTAMP + Field fieldTimestamp = + Field.newBuilder("test_ts", StandardSQLTypeName.TIMESTAMP) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoTimestamp = + dbMetadata.mapBigQueryTypeToJdbc(fieldTimestamp); + assertEquals(Types.TIMESTAMP, infoTimestamp.jdbcType); + assertEquals("TIMESTAMP", infoTimestamp.typeName); + assertEquals(Integer.valueOf(29), infoTimestamp.columnSize); + assertNull(infoTimestamp.decimalDigits); + assertNull(infoTimestamp.numPrecRadix); + + // DATETIME -> TIMESTAMP + Field fieldDateTime = + Field.newBuilder("test_dt", StandardSQLTypeName.DATETIME) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoDateTime = + dbMetadata.mapBigQueryTypeToJdbc(fieldDateTime); + assertEquals(Types.TIMESTAMP, infoDateTime.jdbcType); + assertEquals("TIMESTAMP", infoDateTime.typeName); + assertEquals(Integer.valueOf(29), infoDateTime.columnSize); + assertNull(infoDateTime.decimalDigits); + assertNull(infoDateTime.numPrecRadix); + + // NUMERIC -> NUMERIC + Field fieldNumeric = + Field.newBuilder("test_num", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoNumeric = + dbMetadata.mapBigQueryTypeToJdbc(fieldNumeric); + assertEquals(Types.NUMERIC, infoNumeric.jdbcType); + assertEquals("NUMERIC", infoNumeric.typeName); + assertEquals(Integer.valueOf(38), infoNumeric.columnSize); + assertEquals(Integer.valueOf(9), infoNumeric.decimalDigits); + assertEquals(Integer.valueOf(10), infoNumeric.numPrecRadix); + + // BIGNUMERIC -> NUMERIC + Field fieldBigNumeric = + Field.newBuilder("test_bignum", StandardSQLTypeName.BIGNUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoBigNumeric = + dbMetadata.mapBigQueryTypeToJdbc(fieldBigNumeric); + assertEquals(Types.NUMERIC, infoBigNumeric.jdbcType); + assertEquals("NUMERIC", infoBigNumeric.typeName); + assertEquals(Integer.valueOf(77), infoBigNumeric.columnSize); + assertEquals(Integer.valueOf(38), infoBigNumeric.decimalDigits); + assertEquals(Integer.valueOf(10), infoBigNumeric.numPrecRadix); + + // GEOGRAPHY -> VARCHAR + Field fieldGeo = + Field.newBuilder("test_geo", StandardSQLTypeName.GEOGRAPHY) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoGeo = dbMetadata.mapBigQueryTypeToJdbc(fieldGeo); + assertEquals(Types.VARCHAR, infoGeo.jdbcType); + assertEquals("VARCHAR", infoGeo.typeName); + assertNull(infoGeo.columnSize); + + // DATE + Field fieldDate = + Field.newBuilder("test_date", StandardSQLTypeName.DATE) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoDate = dbMetadata.mapBigQueryTypeToJdbc(fieldDate); + assertEquals(Types.DATE, infoDate.jdbcType); + assertEquals("DATE", infoDate.typeName); + + // TIME + Field fieldTime = + Field.newBuilder("test_time", StandardSQLTypeName.TIME) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoTime = dbMetadata.mapBigQueryTypeToJdbc(fieldTime); + assertEquals(Types.TIME, infoTime.jdbcType); + assertEquals("TIME", infoTime.typeName); + + // STRUCT + Field fieldStruct = + Field.newBuilder( + "test_struct", + StandardSQLTypeName.STRUCT, + Field.of("sub_field", StandardSQLTypeName.STRING)) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoStruct = + dbMetadata.mapBigQueryTypeToJdbc(fieldStruct); + assertEquals(Types.STRUCT, infoStruct.jdbcType); + assertEquals("STRUCT", infoStruct.typeName); + } + + @Test + public void testMapBigQueryTypeToJdbc_ArrayType() { + Field fieldArray = + Field.newBuilder("test_array", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REPEATED) + .build(); + + BigQueryDatabaseMetaData.ColumnTypeInfo infoArray = + dbMetadata.mapBigQueryTypeToJdbc(fieldArray); + assertEquals(Types.ARRAY, infoArray.jdbcType); + assertEquals("ARRAY", infoArray.typeName); + assertNull(infoArray.columnSize); + assertNull(infoArray.decimalDigits); + assertNull(infoArray.numPrecRadix); + } + + @Test + public void testCreateColumnRow() { + Field realField = + Field.newBuilder("user_name", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("Name of the user") + .build(); + + String catalog = "test-project"; + String schema = "test_dataset"; + String table = "users"; + int ordinalPos = 3; + + List row = + dbMetadata.createColumnRow(catalog, schema, table, realField, ordinalPos); + + assertNotNull(row); + assertEquals(24, row.size()); + + assertEquals(catalog, row.get(0).getStringValue()); // 1. TABLE_CAT + assertEquals(schema, row.get(1).getStringValue()); // 2. TABLE_SCHEM + assertEquals(table, row.get(2).getStringValue()); // 3. TABLE_NAME + assertEquals("user_name", row.get(3).getStringValue()); // 4. COLUMN_NAME + assertEquals(String.valueOf(Types.NVARCHAR), row.get(4).getStringValue()); // 5. DATA_TYPE + assertEquals("NVARCHAR", row.get(5).getStringValue()); // 6. TYPE_NAME + assertTrue(row.get(6).isNull()); // 7. COLUMN_SIZE (was null for STRING) + assertTrue(row.get(7).isNull()); // 8. BUFFER_LENGTH (always null) + assertTrue(row.get(8).isNull()); // 9. DECIMAL_DIGITS (null for STRING) + assertTrue(row.get(9).isNull()); // 10. NUM_PREC_RADIX (null for STRING) + assertEquals( + String.valueOf(DatabaseMetaData.columnNullable), + row.get(10).getStringValue()); // 11. NULLABLE + assertEquals("Name of the user", row.get(11).getStringValue()); // 12. REMARKS + assertTrue(row.get(12).isNull()); // 13. COLUMN_DEF (null) + // 14, 15 unused + assertTrue(row.get(15).isNull()); // 16. CHAR_OCTET_LENGTH (was null) + assertEquals(String.valueOf(ordinalPos), row.get(16).getStringValue()); // 17. ORDINAL_POSITION + assertEquals("YES", row.get(17).getStringValue()); // 18. IS_NULLABLE (YES for NULLABLE mode) + // 19-22 SCOPE/SOURCE (null) + assertEquals("NO", row.get(22).getStringValue()); // 23. IS_AUTOINCREMENT (NO) + assertEquals("NO", row.get(23).getStringValue()); // 24. IS_GENERATEDCOLUMN (NO) + } + + @Test + public void testCreateColumnRow_RequiredInt() { + Field realField = + Field.newBuilder("user_id", StandardSQLTypeName.INT64).setMode(Field.Mode.REQUIRED).build(); + + String catalog = "p"; + String schema = "d"; + String table = "t"; + int ordinalPos = 1; + + List row = + dbMetadata.createColumnRow(catalog, schema, table, realField, ordinalPos); + + assertEquals(24, row.size()); + assertEquals("user_id", row.get(3).getStringValue()); // COLUMN_NAME + assertEquals(String.valueOf(Types.BIGINT), row.get(4).getStringValue()); // DATA_TYPE + assertEquals("BIGINT", row.get(5).getStringValue()); // TYPE_NAME + assertEquals("19", row.get(6).getStringValue()); // COLUMN_SIZE + assertEquals("0", row.get(8).getStringValue()); // DECIMAL_DIGITS + assertEquals("10", row.get(9).getStringValue()); // NUM_PREC_RADIX + assertEquals( + String.valueOf(DatabaseMetaData.columnNoNulls), row.get(10).getStringValue()); // NULLABLE + assertTrue(row.get(11).isNull()); // REMARKS (null description) + assertEquals(String.valueOf(ordinalPos), row.get(16).getStringValue()); // ORDINAL_POSITION + assertEquals("NO", row.get(17).getStringValue()); // IS_NULLABLE (NO for REQUIRED mode) + } + + @Test + public void testDefineGetTablesSchema() { + Schema schema = dbMetadata.defineGetTablesSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(10, fields.size()); + + Field tableCat = fields.get("TABLE_CAT"); + assertEquals("TABLE_CAT", tableCat.getName()); + assertEquals(StandardSQLTypeName.STRING, tableCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, tableCat.getMode()); + + Field tableName = fields.get("TABLE_NAME"); + assertEquals("TABLE_NAME", tableName.getName()); + assertEquals(StandardSQLTypeName.STRING, tableName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, tableName.getMode()); + + Field tableType = fields.get("TABLE_TYPE"); + assertEquals("TABLE_TYPE", tableType.getName()); + assertEquals(StandardSQLTypeName.STRING, tableType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, tableType.getMode()); + + Field remarks = fields.get("REMARKS"); + assertEquals("REMARKS", remarks.getName()); + assertEquals(StandardSQLTypeName.STRING, remarks.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, remarks.getMode()); + + Field refGeneration = fields.get("REF_GENERATION"); + assertEquals("REF_GENERATION", refGeneration.getName()); + assertEquals(StandardSQLTypeName.STRING, refGeneration.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, refGeneration.getMode()); + } + + @Test + public void testProcessTableInfo_Basic() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj"; + String schema = "ds"; + String name = "my_table"; + TableDefinition.Type type = TableDefinition.Type.TABLE; + String description = "My test table"; + + Table table = mockBigQueryTable(catalog, schema, name, type, description); + + dbMetadata.processTableInfo(table, null, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(10, row.size()); + assertEquals(catalog, row.get("TABLE_CAT").getStringValue()); + assertEquals(schema, row.get("TABLE_SCHEM").getStringValue()); + assertEquals(name, row.get("TABLE_NAME").getStringValue()); + assertEquals(type.toString(), row.get("TABLE_TYPE").getStringValue()); + assertEquals(description, row.get("REMARKS").getStringValue()); + assertTrue(row.get("TYPE_CAT").isNull()); + assertTrue(row.get("TYPE_SCHEM").isNull()); + assertTrue(row.get("TYPE_NAME").isNull()); + assertTrue(row.get("SELF_REFERENCING_COL_NAME").isNull()); + assertTrue(row.get("REF_GENERATION").isNull()); + } + + @Test + public void testProcessTableInfo_TypeFilterMatch() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Set requestedTypes = new HashSet<>(Arrays.asList("VIEW", "TABLE")); + + Table table = mockBigQueryTable("p", "d", "t", TableDefinition.Type.TABLE, "Desc"); + + dbMetadata.processTableInfo(table, requestedTypes, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + assertEquals("TABLE", collectedResults.get(0).get("TABLE_TYPE").getStringValue()); + } + + @Test + public void testProcessTableInfo_TypeFilterMismatch() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Set requestedTypes = new HashSet<>(Collections.singletonList("VIEW")); + + Table table = mockBigQueryTable("p", "d", "t", TableDefinition.Type.TABLE, "Desc"); + + dbMetadata.processTableInfo(table, requestedTypes, collectedResults, resultSchemaFields); + + assertEquals(0, collectedResults.size()); + } + + @Test + public void testProcessTableInfo_NullDescription() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + Table table = mockBigQueryTable("p", "d", "t_no_desc", TableDefinition.Type.TABLE, null); + + dbMetadata.processTableInfo(table, null, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertTrue(row.get("REMARKS").isNull()); + } + + private FieldValueList createTableRow( + String cat, String schem, String name, String type, FieldList schemaFields) { + List values = new ArrayList<>(); + values.add(dbMetadata.createStringFieldValue(cat)); // TABLE_CAT + values.add(dbMetadata.createStringFieldValue(schem)); // TABLE_SCHEM + values.add(dbMetadata.createStringFieldValue(name)); // TABLE_NAME + values.add(dbMetadata.createStringFieldValue(type)); // TABLE_TYPE + values.add(dbMetadata.createStringFieldValue("Remark for " + name)); // REMARKS + values.add(dbMetadata.createNullFieldValue()); // TYPE_CAT + values.add(dbMetadata.createNullFieldValue()); // TYPE_SCHEM + values.add(dbMetadata.createNullFieldValue()); // TYPE_NAME + values.add(dbMetadata.createNullFieldValue()); // SELF_REFERENCING_COL_NAME + values.add(dbMetadata.createNullFieldValue()); // REF_GENERATION + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testSortResults_Tables() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + // Add rows in unsorted order (based on TYPE, CAT, SCHEM, NAME) + results.add(createTableRow("cat_a", "sch_z", "table_1", "TABLE", schemaFields)); + results.add(createTableRow("cat_b", "sch_a", "view_1", "VIEW", schemaFields)); + results.add(createTableRow("cat_a", "sch_c", "table_2", "TABLE", schemaFields)); + results.add(createTableRow(null, "sch_b", "table_0", "TABLE", schemaFields)); + results.add(createTableRow("cat_a", "sch_z", "table_0", "TABLE", schemaFields)); + results.add(createTableRow("cat_a", null, "view_0", "VIEW", schemaFields)); + + Comparator comparator = dbMetadata.defineGetTablesComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getTables", dbMetadata.LOG); + + // Expected order: TABLEs first, then VIEWs. Within type, sort by CAT, SCHEM, NAME + assertEquals(6, results.size()); + + // 1. Null cat, sch_b, table_0, TABLE + assertTrue("Row 0 TABLE_CAT should be null", results.get(0).get("TABLE_CAT").isNull()); + assertEquals("sch_b", results.get(0).get("TABLE_SCHEM").getStringValue()); + assertEquals("table_0", results.get(0).get("TABLE_NAME").getStringValue()); + assertEquals("TABLE", results.get(0).get("TABLE_TYPE").getStringValue()); + + // 2. cat_a, sch_c, table_2, TABLE + assertEquals("cat_a", results.get(1).get("TABLE_CAT").getStringValue()); + assertEquals("sch_c", results.get(1).get("TABLE_SCHEM").getStringValue()); + assertEquals("table_2", results.get(1).get("TABLE_NAME").getStringValue()); + assertEquals("TABLE", results.get(1).get("TABLE_TYPE").getStringValue()); + + // 3. cat_a, sch_z, table_0, TABLE + assertEquals("cat_a", results.get(2).get("TABLE_CAT").getStringValue()); + assertEquals("sch_z", results.get(2).get("TABLE_SCHEM").getStringValue()); + assertEquals("table_0", results.get(2).get("TABLE_NAME").getStringValue()); + assertEquals("TABLE", results.get(2).get("TABLE_TYPE").getStringValue()); + + // 4. cat_a, sch_z, table_1, TABLE + assertEquals("cat_a", results.get(3).get("TABLE_CAT").getStringValue()); + assertEquals("sch_z", results.get(3).get("TABLE_SCHEM").getStringValue()); + assertEquals("table_1", results.get(3).get("TABLE_NAME").getStringValue()); + assertEquals("TABLE", results.get(3).get("TABLE_TYPE").getStringValue()); + + // 5. cat_a, null, view_0, VIEW + assertEquals("cat_a", results.get(4).get("TABLE_CAT").getStringValue()); + assertTrue("Row 4 TABLE_SCHEM should be null", results.get(4).get("TABLE_SCHEM").isNull()); + assertEquals("view_0", results.get(4).get("TABLE_NAME").getStringValue()); + assertEquals("VIEW", results.get(4).get("TABLE_TYPE").getStringValue()); + + // 6. cat_b, sch_a, view_1, VIEW + assertEquals("cat_b", results.get(5).get("TABLE_CAT").getStringValue()); + assertEquals("sch_a", results.get(5).get("TABLE_SCHEM").getStringValue()); + assertEquals("view_1", results.get(5).get("TABLE_NAME").getStringValue()); + assertEquals("VIEW", results.get(5).get("TABLE_TYPE").getStringValue()); + } + + @Test + public void testSortResults_Tables_EmptyList() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + Comparator comparator = dbMetadata.defineGetTablesComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getTables", dbMetadata.LOG); + assertTrue(results.isEmpty()); + } + + @Test + public void testDefineGetSchemasSchema() { + Schema schema = dbMetadata.defineGetSchemasSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(2, fields.size()); + + Field tableSchem = fields.get("TABLE_SCHEM"); + assertEquals("TABLE_SCHEM", tableSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, tableSchem.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, tableSchem.getMode()); + + Field tableCatalog = fields.get("TABLE_CATALOG"); + assertEquals("TABLE_CATALOG", tableCatalog.getName()); + assertEquals(StandardSQLTypeName.STRING, tableCatalog.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, tableCatalog.getMode()); + } + + private Dataset mockBigQueryDataset(String project, String datasetName) { + Dataset mockDataset = mock(Dataset.class); + DatasetId mockDatasetId = DatasetId.of(project, datasetName); + when(mockDataset.getDatasetId()).thenReturn(mockDatasetId); + return mockDataset; + } + + @Test + public void testProcessSchemaInfo() { + Schema resultSchema = dbMetadata.defineGetSchemasSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "project-alpha"; + String schemaName = "dataset_beta"; + Dataset dataset = mockBigQueryDataset(catalog, schemaName); + + dbMetadata.processSchemaInfo(dataset, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(2, row.size()); + assertEquals(schemaName, row.get("TABLE_SCHEM").getStringValue()); + assertEquals(catalog, row.get("TABLE_CATALOG").getStringValue()); + } + + private FieldValueList createSchemaRow(String cat, String schem, FieldList schemaFields) { + List values = new ArrayList<>(); + values.add(dbMetadata.createStringFieldValue(schem)); // TABLE_SCHEM + values.add(dbMetadata.createStringFieldValue(cat)); // TABLE_CATALOG + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testSortResults_Schemas() { + Schema resultSchema = dbMetadata.defineGetSchemasSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + results.add(createSchemaRow("proj_b", "schema_z", schemaFields)); + results.add(createSchemaRow("proj_a", "schema_y", schemaFields)); + results.add(createSchemaRow(null, "schema_x", schemaFields)); + results.add(createSchemaRow("proj_b", "schema_a", schemaFields)); + results.add(createSchemaRow("proj_a", "schema_c", schemaFields)); + + Comparator comparator = dbMetadata.defineGetSchemasComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getSchemas", dbMetadata.LOG); + + // Expected order: Sort by TABLE_CATALOG (nulls first), then TABLE_SCHEM + assertEquals(5, results.size()); + + // 1. Null catalog, schema_x + assertTrue("Row 0 TABLE_CATALOG should be null", results.get(0).get("TABLE_CATALOG").isNull()); + assertEquals("schema_x", results.get(0).get("TABLE_SCHEM").getStringValue()); + + // 2. proj_a, schema_c + assertEquals("proj_a", results.get(1).get("TABLE_CATALOG").getStringValue()); + assertEquals("schema_c", results.get(1).get("TABLE_SCHEM").getStringValue()); + + // 3. proj_a, schema_y + assertEquals("proj_a", results.get(2).get("TABLE_CATALOG").getStringValue()); + assertEquals("schema_y", results.get(2).get("TABLE_SCHEM").getStringValue()); + + // 4. proj_b, schema_a + assertEquals("proj_b", results.get(3).get("TABLE_CATALOG").getStringValue()); + assertEquals("schema_a", results.get(3).get("TABLE_SCHEM").getStringValue()); + + // 5. proj_b, schema_z + assertEquals("proj_b", results.get(4).get("TABLE_CATALOG").getStringValue()); + assertEquals("schema_z", results.get(4).get("TABLE_SCHEM").getStringValue()); + } + + @Test + public void testSortResults_Schemas_EmptyList() { + Schema resultSchema = dbMetadata.defineGetSchemasSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + Comparator comparator = dbMetadata.defineGetSchemasComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getSchemas", dbMetadata.LOG); + assertTrue(results.isEmpty()); + } + + private Routine mockBigQueryRoutine( + String project, String dataset, String routineName, String routineType, String description) { + Routine mockRoutine = mock(Routine.class); + RoutineId mockRoutineId = RoutineId.of(project, dataset, routineName); + when(mockRoutine.getRoutineId()).thenReturn(mockRoutineId); + when(mockRoutine.getRoutineType()).thenReturn(routineType); + when(mockRoutine.getDescription()).thenReturn(description); + return mockRoutine; + } + + @Test + public void testDefineGetProceduresSchema() { + Schema schema = dbMetadata.defineGetProceduresSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(9, fields.size()); + + Field procCat = fields.get("PROCEDURE_CAT"); + assertEquals("PROCEDURE_CAT", procCat.getName()); + assertEquals(StandardSQLTypeName.STRING, procCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, procCat.getMode()); + + Field procName = fields.get("PROCEDURE_NAME"); + assertEquals("PROCEDURE_NAME", procName.getName()); + assertEquals(StandardSQLTypeName.STRING, procName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, procName.getMode()); + + Field remarks = fields.get("REMARKS"); + assertEquals("REMARKS", remarks.getName()); + assertEquals(StandardSQLTypeName.STRING, remarks.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, remarks.getMode()); + + Field procType = fields.get("PROCEDURE_TYPE"); + assertEquals("PROCEDURE_TYPE", procType.getName()); + assertEquals(StandardSQLTypeName.INT64, procType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, procType.getMode()); + + Field specificName = fields.get("SPECIFIC_NAME"); + assertEquals("SPECIFIC_NAME", specificName.getName()); + assertEquals(StandardSQLTypeName.STRING, specificName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, specificName.getMode()); + } + + @Test + public void testProcessProcedureInfo_BasicProcedure() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj-1"; + String schema = "dataset_a"; + String name = "my_proc"; + String description = "A test procedure"; + + Routine routine = mockBigQueryRoutine(catalog, schema, name, "PROCEDURE", description); + + dbMetadata.processProcedureInfo(routine, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(9, row.size()); + assertEquals(catalog, row.get("PROCEDURE_CAT").getStringValue()); + assertEquals(schema, row.get("PROCEDURE_SCHEM").getStringValue()); + assertEquals(name, row.get("PROCEDURE_NAME").getStringValue()); + assertTrue(row.get("reserved1").isNull()); + assertTrue(row.get("reserved2").isNull()); + assertTrue(row.get("reserved3").isNull()); + assertEquals(description, row.get("REMARKS").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.procedureResultUnknown), + row.get("PROCEDURE_TYPE").getStringValue()); + assertEquals(name, row.get("SPECIFIC_NAME").getStringValue()); + } + + @Test + public void testProcessProcedureInfo_NullDescription() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj-1"; + String schema = "dataset_a"; + String name = "proc_no_desc"; + + Routine routine = mockBigQueryRoutine(catalog, schema, name, "PROCEDURE", null); + + dbMetadata.processProcedureInfo(routine, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertTrue(row.get("REMARKS").isNull()); + } + + @Test + public void testProcessProcedureInfo_SkipsNonProcedure() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + Routine routine = mockBigQueryRoutine("p", "d", "my_func", "FUNCTION", "A function"); + + dbMetadata.processProcedureInfo(routine, collectedResults, resultSchemaFields); + + assertTrue(collectedResults.isEmpty()); + } + + private FieldValueList createProcedureRow( + String cat, String schem, String name, String specName, FieldList schemaFields) { + List values = new ArrayList<>(); + values.add(dbMetadata.createStringFieldValue(cat)); // PROCEDURE_CAT + values.add(dbMetadata.createStringFieldValue(schem)); // PROCEDURE_SCHEM + values.add(dbMetadata.createStringFieldValue(name)); // PROCEDURE_NAME + values.add(dbMetadata.createNullFieldValue()); // reserved1 + values.add(dbMetadata.createNullFieldValue()); // reserved2 + values.add(dbMetadata.createNullFieldValue()); // reserved3 + values.add(dbMetadata.createStringFieldValue("Remark for " + name)); // REMARKS + values.add( + dbMetadata.createLongFieldValue( + (long) DatabaseMetaData.procedureResultUnknown)); // PROCEDURE_TYPE + values.add(dbMetadata.createStringFieldValue(specName)); // SPECIFIC_NAME + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testSortResults_Procedures() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + // Add rows in unsorted order (Sort by CAT, SCHEM, NAME, SPECIFIC_NAME) + results.add(createProcedureRow("cat_b", "sch_c", "proc_1", "proc_1_spec", schemaFields)); + results.add( + createProcedureRow("cat_a", "sch_z", "proc_alpha", "proc_alpha_spec", schemaFields)); + results.add(createProcedureRow("cat_a", "sch_z", "proc_beta", "proc_beta_spec", schemaFields)); + results.add(createProcedureRow(null, "sch_y", "proc_gamma", "proc_gamma_spec", schemaFields)); + results.add(createProcedureRow("cat_a", null, "proc_delta", "proc_delta_spec", schemaFields)); + results.add( + createProcedureRow("cat_a", "sch_z", "proc_alpha", "proc_alpha_spec_older", schemaFields)); + + Comparator comparator = dbMetadata.defineGetProceduresComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getProcedures", dbMetadata.LOG); + + // Expected Order: Null Cat, then Cat A (Null Schem, then sch_z), then Cat B. Within that, Name, + // then Spec Name. + assertEquals(6, results.size()); + + // 1. Null cat, sch_y, proc_gamma, proc_gamma_spec + assertTrue("Row 0 PROC_CAT should be null", results.get(0).get("PROCEDURE_CAT").isNull()); + assertEquals("sch_y", results.get(0).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_gamma", results.get(0).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_gamma_spec", results.get(0).get("SPECIFIC_NAME").getStringValue()); + + // 2. cat_a, Null schem, proc_delta, proc_delta_spec + assertEquals("cat_a", results.get(1).get("PROCEDURE_CAT").getStringValue()); + assertTrue("Row 1 PROC_SCHEM should be null", results.get(1).get("PROCEDURE_SCHEM").isNull()); + assertEquals("proc_delta", results.get(1).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_delta_spec", results.get(1).get("SPECIFIC_NAME").getStringValue()); + + // 3. cat_a, sch_z, proc_alpha, "proc_alpha_spec" (comes first lexicographically) <<< CORRECTED + // EXPECTATION + assertEquals("cat_a", results.get(2).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(2).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_alpha", results.get(2).get("PROCEDURE_NAME").getStringValue()); + assertEquals( + "proc_alpha_spec", results.get(2).get("SPECIFIC_NAME").getStringValue()); // <<< CORRECTED + + // 4. cat_a, sch_z, proc_alpha, "proc_alpha_spec_older" (comes second lexicographically) <<< + // CORRECTED EXPECTATION + assertEquals("cat_a", results.get(3).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(3).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_alpha", results.get(3).get("PROCEDURE_NAME").getStringValue()); + assertEquals( + "proc_alpha_spec_older", + results.get(3).get("SPECIFIC_NAME").getStringValue()); // <<< CORRECTED + + // 5. cat_a, sch_z, proc_beta, proc_beta_spec + assertEquals("cat_a", results.get(4).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(4).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_beta", results.get(4).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_beta_spec", results.get(4).get("SPECIFIC_NAME").getStringValue()); + + // 6. cat_b, sch_c, proc_1, proc_1_spec + assertEquals("cat_b", results.get(5).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_c", results.get(5).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_1", results.get(5).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_1_spec", results.get(5).get("SPECIFIC_NAME").getStringValue()); + } + + @Test + public void testSortResults_Procedures_EmptyList() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + Comparator comparator = dbMetadata.defineGetProceduresComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getProcedures", dbMetadata.LOG); + assertTrue(results.isEmpty()); + } + + @Test + public void testFindMatchingBigQueryObjects_Routines_ListWithPattern() { + String catalog = "p-cat"; + String schema = "d-sch"; + String pattern = "proc_%"; + DatasetId datasetId = DatasetId.of(catalog, schema); + + Routine proc1 = mockBigQueryRoutine(catalog, schema, "proc_abc", "PROCEDURE", "p1"); + Routine proc2 = mockBigQueryRoutine(catalog, schema, "proc_xyz", "PROCEDURE", "p2"); + Routine func1 = mockBigQueryRoutine(catalog, schema, "func_123", "FUNCTION", "f1"); + Routine otherProc = mockBigQueryRoutine(catalog, schema, "another_proc", "PROCEDURE", "p3"); + + Page page = mock(Page.class); + when(page.iterateAll()).thenReturn(Arrays.asList(proc1, func1, proc2, otherProc)); + when(bigqueryClient.listRoutines(eq(datasetId), any(BigQuery.RoutineListOption.class))) + .thenReturn(page); + + Pattern regex = dbMetadata.compileSqlLikePattern(pattern); + assertNotNull(regex); + + List results = + dbMetadata.findMatchingBigQueryObjects( + "Routine", + () -> bigqueryClient.listRoutines(datasetId, BigQuery.RoutineListOption.pageSize(500)), + (name) -> + bigqueryClient.getRoutine( + RoutineId.of(datasetId.getProject(), datasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + pattern, + regex, + dbMetadata.LOG); + + verify(bigqueryClient, times(1)) + .listRoutines(eq(datasetId), any(BigQuery.RoutineListOption.class)); + verify(bigqueryClient, never()).getRoutine(any(RoutineId.class)); + + assertNotNull(results); + List resultList = new ArrayList<>(results); + + assertEquals("Should contain only matching routines", 2, resultList.size()); + assertTrue("Should contain proc_abc", resultList.contains(proc1)); + assertTrue("Should contain proc_xyz", resultList.contains(proc2)); + assertFalse("Should not contain func_123", resultList.contains(func1)); + assertFalse("Should not contain another_proc", resultList.contains(otherProc)); + } + + @Test + public void testFindMatchingBigQueryObjects_Routines_ListNoPattern() { + String catalog = "p-cat"; + String schema = "d-sch"; + String pattern = null; + DatasetId datasetId = DatasetId.of(catalog, schema); + + Routine proc1 = mockBigQueryRoutine(catalog, schema, "proc_abc", "PROCEDURE", "p1"); + Routine func1 = mockBigQueryRoutine(catalog, schema, "func_123", "FUNCTION", "f1"); + + Page page = mock(Page.class); + when(page.iterateAll()).thenReturn(Arrays.asList(proc1, func1)); + when(bigqueryClient.listRoutines(eq(datasetId), any(BigQuery.RoutineListOption.class))) + .thenReturn(page); + + Pattern regex = dbMetadata.compileSqlLikePattern(pattern); + List results = + dbMetadata.findMatchingBigQueryObjects( + "Routine", + () -> bigqueryClient.listRoutines(datasetId, BigQuery.RoutineListOption.pageSize(500)), + (name) -> + bigqueryClient.getRoutine( + RoutineId.of(datasetId.getProject(), datasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + pattern, + regex, + dbMetadata.LOG); + + verify(bigqueryClient, times(1)) + .listRoutines(eq(datasetId), any(BigQuery.RoutineListOption.class)); + + assertNotNull(results); + List resultList = new ArrayList<>(results); + + assertEquals("Should contain all routines when pattern is null", 2, resultList.size()); + assertTrue("Should contain proc_abc", resultList.contains(proc1)); + assertTrue("Should contain func_123", resultList.contains(func1)); + } + + @Test + public void testFindMatchingBigQueryObjects_Routines_GetSpecific() { + String catalog = "p-cat"; + String schema = "d-sch"; + String procNameExact = "exactprocname"; + DatasetId datasetId = DatasetId.of(catalog, schema); + RoutineId routineId = RoutineId.of(catalog, schema, procNameExact); + Routine mockRoutine = mockBigQueryRoutine(catalog, schema, procNameExact, "PROCEDURE", "desc"); + + when(bigqueryClient.getRoutine(eq(routineId))).thenReturn(mockRoutine); + Pattern regex = dbMetadata.compileSqlLikePattern(procNameExact); + + List results = + dbMetadata.findMatchingBigQueryObjects( + "Routine", + () -> bigqueryClient.listRoutines(datasetId, BigQuery.RoutineListOption.pageSize(500)), + (name) -> + bigqueryClient.getRoutine( + RoutineId.of(datasetId.getProject(), datasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + procNameExact, + regex, + dbMetadata.LOG); + + verify(bigqueryClient, times(1)).getRoutine(eq(routineId)); + verify(bigqueryClient, never()) + .listRoutines(any(DatasetId.class), any(BigQuery.RoutineListOption.class)); + + assertNotNull(results); + List resultList = new ArrayList<>(results); + assertEquals(1, resultList.size()); + assertSame(mockRoutine, resultList.get(0)); + } + + @Test + public void testDefineGetProcedureColumnsSchema() { + Schema schema = dbMetadata.defineGetProcedureColumnsSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(20, fields.size()); + + Field procCat = fields.get("PROCEDURE_CAT"); + assertEquals("PROCEDURE_CAT", procCat.getName()); + assertEquals(StandardSQLTypeName.STRING, procCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, procCat.getMode()); + + Field colName = fields.get("COLUMN_NAME"); + assertEquals("COLUMN_NAME", colName.getName()); + assertEquals(StandardSQLTypeName.STRING, colName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, colName.getMode()); + + Field colType = fields.get("COLUMN_TYPE"); + assertEquals("COLUMN_TYPE", colType.getName()); + assertEquals(StandardSQLTypeName.INT64, colType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, colType.getMode()); + + Field dataType = fields.get("DATA_TYPE"); + assertEquals("DATA_TYPE", dataType.getName()); + assertEquals(StandardSQLTypeName.INT64, dataType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, dataType.getMode()); + + Field typeName = fields.get("TYPE_NAME"); + assertEquals("TYPE_NAME", typeName.getName()); + assertEquals(StandardSQLTypeName.STRING, typeName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, typeName.getMode()); + + Field ordinalPos = fields.get("ORDINAL_POSITION"); + assertEquals("ORDINAL_POSITION", ordinalPos.getName()); + assertEquals(StandardSQLTypeName.INT64, ordinalPos.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, ordinalPos.getMode()); + + Field isNullable = fields.get("IS_NULLABLE"); + assertEquals("IS_NULLABLE", isNullable.getName()); + assertEquals(StandardSQLTypeName.STRING, isNullable.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, isNullable.getMode()); + + Field specificName = fields.get("SPECIFIC_NAME"); + assertEquals("SPECIFIC_NAME", specificName.getName()); + assertEquals(StandardSQLTypeName.STRING, specificName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, specificName.getMode()); + } + + @Test + public void testDetermineTypeInfoFromDataType() { + // INT64 + StandardSQLDataType sqlInt64 = mockStandardSQLDataType(StandardSQLTypeName.INT64); + BigQueryDatabaseMetaData.ColumnTypeInfo infoInt64 = + dbMetadata.determineTypeInfoFromDataType(sqlInt64, "p", "c", 1); + assertEquals(Types.BIGINT, infoInt64.jdbcType); + assertEquals("BIGINT", infoInt64.typeName); + + // STRING + StandardSQLDataType sqlString = mockStandardSQLDataType(StandardSQLTypeName.STRING); + BigQueryDatabaseMetaData.ColumnTypeInfo infoString = + dbMetadata.determineTypeInfoFromDataType(sqlString, "p", "c", 1); + assertEquals(Types.NVARCHAR, infoString.jdbcType); + assertEquals("NVARCHAR", infoString.typeName); + + // BOOL + StandardSQLDataType sqlBool = mockStandardSQLDataType(StandardSQLTypeName.BOOL); + BigQueryDatabaseMetaData.ColumnTypeInfo infoBool = + dbMetadata.determineTypeInfoFromDataType(sqlBool, "p", "c", 1); + assertEquals(Types.BOOLEAN, infoBool.jdbcType); + assertEquals("BOOLEAN", infoBool.typeName); + + // STRUCT + StandardSQLDataType sqlStruct = mockStandardSQLDataType(StandardSQLTypeName.STRUCT); + BigQueryDatabaseMetaData.ColumnTypeInfo infoStruct = + dbMetadata.determineTypeInfoFromDataType(sqlStruct, "p", "c", 1); + assertEquals(Types.STRUCT, infoStruct.jdbcType); + assertEquals("STRUCT", infoStruct.typeName); + + // Case: null typeKind from StandardSQLDataType (should default to VARCHAR) + StandardSQLDataType sqlNullKind = mock(StandardSQLDataType.class); + when(sqlNullKind.getTypeKind()).thenReturn(null); + BigQueryDatabaseMetaData.ColumnTypeInfo infoNullKind = + dbMetadata.determineTypeInfoFromDataType(sqlNullKind, "p", "c", 1); + assertEquals(Types.VARCHAR, infoNullKind.jdbcType); + assertEquals("VARCHAR", infoNullKind.typeName); + + // Case: unknown typeKind from StandardSQLDataType (should default to VARCHAR) + StandardSQLDataType sqlUnknownKind = mock(StandardSQLDataType.class); + when(sqlUnknownKind.getTypeKind()).thenReturn("SUPER_DOOPER_TYPE"); + BigQueryDatabaseMetaData.ColumnTypeInfo infoUnknownKind = + dbMetadata.determineTypeInfoFromDataType(sqlUnknownKind, "p", "c", 1); + assertEquals(Types.VARCHAR, infoUnknownKind.jdbcType); + assertEquals("VARCHAR", infoUnknownKind.typeName); + } + + @Test + public void testCreateProcedureColumnRow_BasicInParam() { + String catalog = "proj_x"; + String schema = "data_y"; + String procName = "proc_z"; + String specificName = "proc_z_specific"; + + RoutineArgument arg = mockRoutineArgument("param_in", StandardSQLTypeName.STRING, "IN"); + int ordinalPos = 1; + + List row = + dbMetadata.createProcedureColumnRow( + catalog, schema, procName, specificName, arg, ordinalPos, "param_in"); + + assertNotNull(row); + assertEquals(20, row.size()); + + assertEquals(catalog, row.get(0).getStringValue()); // 1. PROCEDURE_CAT + assertEquals(schema, row.get(1).getStringValue()); // 2. PROCEDURE_SCHEM + assertEquals(procName, row.get(2).getStringValue()); // 3. PROCEDURE_NAME + assertEquals("param_in", row.get(3).getStringValue()); // 4. COLUMN_NAME + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnIn), + row.get(4).getStringValue()); // 5. COLUMN_TYPE + assertEquals(String.valueOf(Types.NVARCHAR), row.get(5).getStringValue()); // 6. DATA_TYPE + assertEquals("NVARCHAR", row.get(6).getStringValue()); // 7. TYPE_NAME + assertTrue(row.get(7).isNull()); // 8. PRECISION + assertTrue(row.get(8).isNull()); // 9. LENGTH + assertTrue(row.get(9).isNull()); // 10. SCALE + assertTrue(row.get(10).isNull()); // 11. RADIX + assertEquals( + String.valueOf(DatabaseMetaData.procedureNullable), + row.get(11).getStringValue()); // 12. NULLABLE + assertTrue(row.get(12).isNull()); // 13. REMARKS + assertTrue(row.get(13).isNull()); // 14. COLUMN_DEF + assertTrue(row.get(14).isNull()); // 15. SQL_DATA_TYPE + assertTrue(row.get(15).isNull()); // 16. SQL_DATETIME_SUB + assertTrue(row.get(16).isNull()); // 17. CHAR_OCTET_LENGTH + assertEquals(String.valueOf(ordinalPos), row.get(17).getStringValue()); // 18. ORDINAL_POSITION + assertEquals("YES", row.get(18).getStringValue()); // 19. IS_NULLABLE + assertEquals(specificName, row.get(19).getStringValue()); // 20. SPECIFIC_NAME + } + + @Test + public void testCreateProcedureColumnRow_NumericOutParam() { + RoutineArgument arg = mockRoutineArgument("param_out_num", StandardSQLTypeName.NUMERIC, "OUT"); + int ordinalPos = 2; + + List row = + dbMetadata.createProcedureColumnRow( + "p", "d", "proc", "proc_spec", arg, ordinalPos, "param_out_num"); + + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnOut), + row.get(4).getStringValue()); // COLUMN_TYPE + assertEquals(String.valueOf(Types.NUMERIC), row.get(5).getStringValue()); // DATA_TYPE + assertEquals("NUMERIC", row.get(6).getStringValue()); // TYPE_NAME + assertEquals("38", row.get(7).getStringValue()); // PRECISION + assertEquals("9", row.get(9).getStringValue()); // SCALE + assertEquals("10", row.get(10).getStringValue()); // RADIX + } + + @Test + public void testCreateProcedureColumnRow_InOutTimestampParam() { + RoutineArgument arg = + mockRoutineArgument("param_inout_ts", StandardSQLTypeName.TIMESTAMP, "INOUT"); + List row = + dbMetadata.createProcedureColumnRow( + "p", "d", "proc", "proc_spec", arg, 3, "param_inout_ts"); + + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnInOut), row.get(4).getStringValue()); + assertEquals(String.valueOf(Types.TIMESTAMP), row.get(5).getStringValue()); + assertEquals("TIMESTAMP", row.get(6).getStringValue()); + assertEquals("29", row.get(7).getStringValue()); // PRECISION for TIMESTAMP + } + + @Test + public void testCreateProcedureColumnRow_UnknownModeDefaultsToUnknownType() { + RoutineArgument arg = + mockRoutineArgument("param_unknown_mode", StandardSQLTypeName.BOOL, "UNKNOWN_MODE"); + List row = + dbMetadata.createProcedureColumnRow( + "p", "d", "proc", "proc_spec", arg, 1, "param_unknown_mode"); + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnUnknown), row.get(4).getStringValue()); + } + + @Test + public void testCreateProcedureColumnRow_NullArgumentObject() { + List row = + dbMetadata.createProcedureColumnRow( + "cat", "schem", "proc", "spec", null, 1, "fallback_arg_name"); + + assertNotNull(row); + assertEquals(20, row.size()); + assertEquals("fallback_arg_name", row.get(3).getStringValue()); // COLUMN_NAME + assertEquals(String.valueOf(Types.VARCHAR), row.get(5).getStringValue()); // DATA_TYPE + assertEquals("VARCHAR", row.get(6).getStringValue()); // TYPE_NAME + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnUnknown), row.get(4).getStringValue()); + } + + @Test + public void testCreateProcedureColumnRow_NullArgumentDataType() { + RoutineArgument argWithNullDataType = mock(RoutineArgument.class); + when(argWithNullDataType.getName()).thenReturn("arg_null_type"); + when(argWithNullDataType.getDataType()).thenReturn(null); + when(argWithNullDataType.getMode()).thenReturn("IN"); + + List row = + dbMetadata.createProcedureColumnRow( + "cat", "schem", "proc", "spec", argWithNullDataType, 1, "arg_null_type"); + + assertNotNull(row); + assertEquals("arg_null_type", row.get(3).getStringValue()); + assertEquals( + String.valueOf(Types.VARCHAR), row.get(5).getStringValue()); // DATA_TYPE should default + assertEquals("VARCHAR", row.get(6).getStringValue()); // TYPE_NAME should default + } + + @Test + public void testProcessProcedureArguments_NoArguments() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern noColumnNamePattern = null; // Match all columns + + Routine routine = + mockBigQueryRoutineWithArgs( + "p", "d", "proc_no_args", "PROCEDURE", "desc", Collections.emptyList()); // No arguments + + dbMetadata.processProcedureArguments( + routine, noColumnNamePattern, collectedResults, resultFields); + assertTrue(collectedResults.isEmpty()); + } + + @Test + public void testProcessProcedureArguments_WithArgumentsNoFilter() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern noColumnNamePattern = null; + + RoutineArgument arg1 = mockRoutineArgument("param1", StandardSQLTypeName.INT64, "IN"); + RoutineArgument arg2 = mockRoutineArgument("param2", StandardSQLTypeName.STRING, "OUT"); + Routine routine = + mockBigQueryRoutineWithArgs( + "p", "d", "proc_two_args", "PROCEDURE", "desc", Arrays.asList(arg1, arg2)); + + dbMetadata.processProcedureArguments( + routine, noColumnNamePattern, collectedResults, resultFields); + assertEquals(2, collectedResults.size()); + assertEquals("param1", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals("param2", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(1), collectedResults.get(0).get("ORDINAL_POSITION").getStringValue()); + assertEquals( + String.valueOf(2), collectedResults.get(1).get("ORDINAL_POSITION").getStringValue()); + } + + @Test + public void testProcessProcedureArguments_WithColumnNameFilter() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern columnNamePattern = Pattern.compile(".*_id"); // Match columns ending with _id + + RoutineArgument arg1 = mockRoutineArgument("user_id", StandardSQLTypeName.INT64, "IN"); + RoutineArgument arg2 = mockRoutineArgument("user_name", StandardSQLTypeName.STRING, "IN"); + RoutineArgument arg3 = mockRoutineArgument("session_id", StandardSQLTypeName.STRING, "INOUT"); + + Routine routine = + mockBigQueryRoutineWithArgs( + "p", "d", "proc_filtered_args", "PROCEDURE", "desc", Arrays.asList(arg1, arg2, arg3)); + + dbMetadata.processProcedureArguments( + routine, columnNamePattern, collectedResults, resultFields); + assertEquals(2, collectedResults.size()); + assertEquals("user_id", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals("session_id", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + } + + @Test + public void testProcessProcedureArguments_HandlesNullArgumentInList() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern noColumnNamePattern = null; + + RoutineArgument arg1 = mockRoutineArgument("valid_arg", StandardSQLTypeName.INT64, "IN"); + List argsWithNull = new ArrayList<>(); + argsWithNull.add(arg1); + argsWithNull.add(null); // Add a null argument + RoutineArgument arg3 = + mockRoutineArgument("another_valid_arg", StandardSQLTypeName.STRING, "OUT"); + argsWithNull.add(arg3); + + Routine routine = mock(Routine.class); + RoutineId routineId = RoutineId.of("p", "d", "proc_with_null_arg_in_list"); + when(routine.getRoutineId()).thenReturn(routineId); + when(routine.getRoutineType()).thenReturn("PROCEDURE"); + when(routine.getArguments()).thenReturn(argsWithNull); + + dbMetadata.processProcedureArguments( + routine, noColumnNamePattern, collectedResults, resultFields); + + assertEquals(3, collectedResults.size()); + assertEquals("valid_arg", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + // The fallback name generated by processProcedureArguments when arg is null in list + assertEquals( + "arg_retrieval_err_2", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(Types.VARCHAR), + collectedResults.get(1).get("DATA_TYPE").getStringValue()); // Default type for null arg + assertEquals("another_valid_arg", collectedResults.get(2).get("COLUMN_NAME").getStringValue()); + } + + private FieldValueList createProcedureColumnRowForSortTest( + String cat, + String schem, + String procName, + String specName, + String colName, + int ordinal, + FieldList schemaFields) { + List values = new ArrayList<>(20); + values.add(dbMetadata.createStringFieldValue(cat)); // 1. PROC_CAT + values.add(dbMetadata.createStringFieldValue(schem)); // 2. PROC_SCHEM + values.add(dbMetadata.createStringFieldValue(procName)); // 3. PROC_NAME + values.add(dbMetadata.createStringFieldValue(colName)); // 4. COLUMN_NAME + values.add( + dbMetadata.createLongFieldValue( + (long) DatabaseMetaData.procedureColumnIn)); // 5. COLUMN_TYPE + values.add(dbMetadata.createLongFieldValue((long) Types.VARCHAR)); // 6. DATA_TYPE + values.add(dbMetadata.createStringFieldValue("VARCHAR")); // 7. TYPE_NAME + values.add(dbMetadata.createNullFieldValue()); // 8. PRECISION + values.add(dbMetadata.createNullFieldValue()); // 9. LENGTH + values.add(dbMetadata.createNullFieldValue()); // 10. SCALE + values.add(dbMetadata.createNullFieldValue()); // 11. RADIX + values.add( + dbMetadata.createLongFieldValue((long) DatabaseMetaData.procedureNullable)); // 12. NULLABLE + values.add(dbMetadata.createStringFieldValue("Remark for " + colName)); // 13. REMARKS + values.add(dbMetadata.createNullFieldValue()); // 14. COLUMN_DEF + values.add(dbMetadata.createNullFieldValue()); // 15. SQL_DATA_TYPE + values.add(dbMetadata.createNullFieldValue()); // 16. SQL_DATETIME_SUB + values.add(dbMetadata.createNullFieldValue()); // 17. CHAR_OCTET_LENGTH + values.add(dbMetadata.createLongFieldValue((long) ordinal)); // 18. ORDINAL_POSITION + values.add(dbMetadata.createStringFieldValue("YES")); // 19. IS_NULLABLE + values.add(dbMetadata.createStringFieldValue(specName)); // 20. SPECIFIC_NAME + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testDefineGetProcedureColumnsComparator() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + // Order: PROC_CAT, PROC_SCHEM, PROC_NAME, SPECIFIC_NAME, COLUMN_NAME + results.add( + createProcedureColumnRowForSortTest( + "cat_b", "sch_y", "proc_1", "proc_1_spec", "param_a", 1, schemaFields)); + results.add( + createProcedureColumnRowForSortTest( + "cat_a", "sch_z", "proc_alpha", "proc_alpha_spec", "arg_z", 2, schemaFields)); + results.add( + createProcedureColumnRowForSortTest( + "cat_a", + "sch_z", + "proc_alpha", + "proc_alpha_spec", + "arg_m", + 1, + schemaFields)); // Same proc, different col + results.add( + createProcedureColumnRowForSortTest( + null, "sch_x", "proc_beta", "proc_beta_spec", "col_first", 1, schemaFields)); + results.add( + createProcedureColumnRowForSortTest( + "cat_a", null, "proc_gamma", "proc_gamma_spec", "input1", 1, schemaFields)); + + Comparator comparator = + dbMetadata.defineGetProcedureColumnsComparator(schemaFields); + assertNotNull(comparator); + dbMetadata.sortResults(results, comparator, "getProcedureColumns", dbMetadata.LOG); + + assertEquals(5, results.size()); + + // 1. Null cat, sch_x, proc_beta, proc_beta_spec, col_first + assertTrue(results.get(0).get("PROCEDURE_CAT").isNull()); + assertEquals("sch_x", results.get(0).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_beta", results.get(0).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_beta_spec", results.get(0).get("SPECIFIC_NAME").getStringValue()); + assertEquals("col_first", results.get(0).get("COLUMN_NAME").getStringValue()); + + // 2. cat_a, Null schem, proc_gamma, proc_gamma_spec, input1 + assertEquals("cat_a", results.get(1).get("PROCEDURE_CAT").getStringValue()); + assertTrue(results.get(1).get("PROCEDURE_SCHEM").isNull()); + assertEquals("proc_gamma", results.get(1).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_gamma_spec", results.get(1).get("SPECIFIC_NAME").getStringValue()); + assertEquals("input1", results.get(1).get("COLUMN_NAME").getStringValue()); + + // 3. cat_a, sch_z, proc_alpha, proc_alpha_spec, arg_m (m before z) + assertEquals("cat_a", results.get(2).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(2).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_alpha", results.get(2).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_alpha_spec", results.get(2).get("SPECIFIC_NAME").getStringValue()); + assertEquals("arg_m", results.get(2).get("COLUMN_NAME").getStringValue()); + + // 4. cat_a, sch_z, proc_alpha, proc_alpha_spec, arg_z + assertEquals("cat_a", results.get(3).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(3).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_alpha", results.get(3).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_alpha_spec", results.get(3).get("SPECIFIC_NAME").getStringValue()); + assertEquals("arg_z", results.get(3).get("COLUMN_NAME").getStringValue()); + + // 5. cat_b, sch_y, proc_1, proc_1_spec, param_a + assertEquals("cat_b", results.get(4).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_y", results.get(4).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_1", results.get(4).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_1_spec", results.get(4).get("SPECIFIC_NAME").getStringValue()); + assertEquals("param_a", results.get(4).get("COLUMN_NAME").getStringValue()); + } + + @Test + public void testListMatchingProcedureIdsFromDatasets() throws Exception { + String catalog = "test-proj"; + String schema1Name = "dataset1"; + String schema2Name = "dataset2"; + Dataset dataset1 = mockBigQueryDataset(catalog, schema1Name); + Dataset dataset2 = mockBigQueryDataset(catalog, schema2Name); + List datasetsToScan = Arrays.asList(dataset1, dataset2); + + Routine proc1_ds1 = mockBigQueryRoutine(catalog, schema1Name, "proc_a", "PROCEDURE", "desc a"); + Routine func1_ds1 = mockBigQueryRoutine(catalog, schema1Name, "func_b", "FUNCTION", "desc b"); + Routine proc2_ds2 = mockBigQueryRoutine(catalog, schema2Name, "proc_c", "PROCEDURE", "desc c"); + + Page page1 = mock(Page.class); + when(page1.iterateAll()).thenReturn(Arrays.asList(proc1_ds1, func1_ds1)); + when(bigqueryClient.listRoutines(eq(dataset1.getDatasetId()), any(RoutineListOption.class))) + .thenReturn(page1); + + Page page2 = mock(Page.class); + when(page2.iterateAll()).thenReturn(Collections.singletonList(proc2_ds2)); + when(bigqueryClient.listRoutines(eq(dataset2.getDatasetId()), any(RoutineListOption.class))) + .thenReturn(page2); + + ExecutorService mockExecutor = mock(ExecutorService.class); + doAnswer( + invocation -> { + Callable callable = invocation.getArgument(0); + @SuppressWarnings("unchecked") // Suppress warning for raw Future mock + Future mockedFuture = mock(Future.class); + + try { + Object result = callable.call(); + doReturn(result).when(mockedFuture).get(); + } catch (InterruptedException interruptedException) { + doThrow(interruptedException).when(mockedFuture).get(); + } catch (Exception e) { + doThrow(new ExecutionException(e)).when(mockedFuture).get(); + } + return mockedFuture; + }) + .when(mockExecutor) + .submit(any(Callable.class)); + + List resultIds = + dbMetadata.listMatchingProcedureIdsFromDatasets( + datasetsToScan, null, null, mockExecutor, catalog, dbMetadata.LOG); + + assertEquals(2, resultIds.size()); + assertTrue(resultIds.contains(proc1_ds1.getRoutineId())); + assertTrue(resultIds.contains(proc2_ds2.getRoutineId())); + assertFalse(resultIds.contains(func1_ds1.getRoutineId())); // Should not contain functions + + verify(mockExecutor, times(2)).submit(any(Callable.class)); + } + + @Test + public void testSubmitProcedureArgumentProcessingJobs_Basic() throws InterruptedException { + String catalog = "p"; + String schemaName = "d"; + RoutineArgument arg1 = mockRoutineArgument("arg1_name", StandardSQLTypeName.STRING, "IN"); + Routine proc1 = + mockBigQueryRoutineWithArgs( + catalog, schemaName, "proc1", "PROCEDURE", "desc1", Collections.singletonList(arg1)); + Routine func1 = + mockBigQueryRoutineWithArgs( + catalog, + schemaName, + "func1", + "FUNCTION", + "desc_func", + Collections.emptyList()); // Should be skipped + Routine proc2 = + mockBigQueryRoutineWithArgs( + catalog, schemaName, "proc2", "PROCEDURE", "desc2", Collections.emptyList()); + + List fullRoutines = Arrays.asList(proc1, func1, proc2); + Pattern columnNameRegex = null; + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + + ExecutorService mockExecutor = mock(ExecutorService.class); + List> processingTaskFutures = new ArrayList<>(); + + // Capture the runnable submitted to the executor + List submittedRunnables = new ArrayList<>(); + doAnswer( + invocation -> { + Runnable runnable = invocation.getArgument(0); + submittedRunnables.add(runnable); + Future future = mock(Future.class); + return future; + }) + .when(mockExecutor) + .submit(any(Runnable.class)); + + dbMetadata.submitProcedureArgumentProcessingJobs( + fullRoutines, + columnNameRegex, + collectedResults, + resultSchemaFields, + mockExecutor, + processingTaskFutures, + dbMetadata.LOG); + + verify(mockExecutor, times(2)).submit(any(Runnable.class)); + assertEquals(2, processingTaskFutures.size()); + } + + @Test + public void testDefineGetTableTypesSchema() { + Schema schema = BigQueryDatabaseMetaData.defineGetTableTypesSchema(); + + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Should have one column", 1, fields.size()); + + Field tableTypeField = fields.get("TABLE_TYPE"); + assertNotNull("TABLE_TYPE field should exist", tableTypeField); + assertEquals("Field name should be TABLE_TYPE", "TABLE_TYPE", tableTypeField.getName()); + assertEquals( + "Field type should be STRING", + StandardSQLTypeName.STRING, + tableTypeField.getType().getStandardType()); + assertEquals("Field mode should be REQUIRED", Field.Mode.REQUIRED, tableTypeField.getMode()); + } + + @Test + public void testPrepareGetTableTypesRows() { + Schema schema = BigQueryDatabaseMetaData.defineGetTableTypesSchema(); + List rows = BigQueryDatabaseMetaData.prepareGetTableTypesRows(schema); + + assertNotNull("Rows list should not be null", rows); + String[] expectedTableTypes = {"EXTERNAL", "MATERIALIZED VIEW", "SNAPSHOT", "TABLE", "VIEW"}; + assertEquals( + "Should have " + expectedTableTypes.length + " rows", + expectedTableTypes.length, + rows.size()); + + Set foundTypes = new HashSet<>(); + for (int i = 0; i < rows.size(); i++) { + FieldValueList row = rows.get(i); + assertEquals("Row " + i + " should have 1 field value", 1, row.size()); + assertFalse("FieldValue in row " + i + " should not be SQL NULL", row.get(0).isNull()); + + String tableType = row.get(0).getStringValue(); + foundTypes.add(tableType); + } + + assertEquals( + "All expected table types should be present and correctly mapped", + new HashSet<>(Arrays.asList(expectedTableTypes)), + foundTypes); + } + + @Test + public void testGetTableTypes() throws SQLException { + try (ResultSet rs = dbMetadata.getTableTypes()) { + assertNotNull("ResultSet from getTableTypes() should not be null", rs); + + ResultSetMetaData rsmd = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", rsmd); + assertEquals("Should have one column", 1, rsmd.getColumnCount()); + assertEquals("Column name should be TABLE_TYPE", "TABLE_TYPE", rsmd.getColumnName(1)); + assertEquals("Column type should be NVARCHAR", Types.NVARCHAR, rsmd.getColumnType(1)); + + List actualTableTypes = new ArrayList<>(); + while (rs.next()) { + actualTableTypes.add(rs.getString("TABLE_TYPE")); + } + + String[] expectedTableTypes = {"EXTERNAL", "MATERIALIZED VIEW", "SNAPSHOT", "TABLE", "VIEW"}; + assertEquals( + "Number of table types should match", expectedTableTypes.length, actualTableTypes.size()); + + Set expectedSet = new HashSet<>(Arrays.asList(expectedTableTypes)); + Set actualSet = new HashSet<>(actualTableTypes); + assertEquals( + "All expected table types should be present in the ResultSet", expectedSet, actualSet); + } + } + + @Test + public void testDefineGetSuperTablesSchema() { + Schema schema = dbMetadata.defineGetSuperTablesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 4 fields", 4, fields.size()); + + Field tableCat = fields.get("TABLE_CAT"); + assertNotNull(tableCat); + assertEquals("TABLE_CAT", tableCat.getName()); + assertEquals(StandardSQLTypeName.STRING, tableCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, tableCat.getMode()); + + Field tableSchem = fields.get("TABLE_SCHEM"); + assertNotNull(tableSchem); + assertEquals("TABLE_SCHEM", tableSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, tableSchem.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, tableSchem.getMode()); + + Field tableName = fields.get("TABLE_NAME"); + assertNotNull(tableName); + assertEquals("TABLE_NAME", tableName.getName()); + assertEquals(StandardSQLTypeName.STRING, tableName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, tableName.getMode()); + + Field superTableName = fields.get("SUPERTABLE_NAME"); + assertNotNull(superTableName); + assertEquals("SUPERTABLE_NAME", superTableName.getName()); + assertEquals(StandardSQLTypeName.STRING, superTableName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, superTableName.getMode()); + } + + @Test + public void testGetSuperTables_ReturnsEmptyResultSetWithCorrectMetadata() throws SQLException { + try (ResultSet rs = + dbMetadata.getSuperTables("testCatalog", "testSchemaPattern", "testTableNamePattern")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty (next() should return false)", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", metaData); + assertEquals("ResultSetMetaData should have 4 columns", 4, metaData.getColumnCount()); + + // Column 1: TABLE_CAT + assertEquals("TABLE_CAT", metaData.getColumnName(1)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(1)); // Assuming STRING maps to VARCHAR + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + // Column 2: TABLE_SCHEM + assertEquals("TABLE_SCHEM", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(2)); + + // Column 3: TABLE_NAME + assertEquals("TABLE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); // REQUIRED + + // Column 4: SUPERTABLE_NAME + assertEquals("SUPERTABLE_NAME", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); // REQUIRED + } + } + + @Test + public void testDefineGetSuperTypesSchema() { + Schema schema = dbMetadata.defineGetSuperTypesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 6 fields", 6, fields.size()); + + Field typeCat = fields.get("TYPE_CAT"); + assertNotNull(typeCat); + assertEquals("TYPE_CAT", typeCat.getName()); + assertEquals(StandardSQLTypeName.STRING, typeCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, typeCat.getMode()); + + Field typeSchem = fields.get("TYPE_SCHEM"); + assertNotNull(typeSchem); + assertEquals("TYPE_SCHEM", typeSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, typeSchem.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, typeSchem.getMode()); + + Field typeName = fields.get("TYPE_NAME"); + assertNotNull(typeName); + assertEquals("TYPE_NAME", typeName.getName()); + assertEquals(StandardSQLTypeName.STRING, typeName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, typeName.getMode()); + + Field superTypeCat = fields.get("SUPERTYPE_CAT"); + assertNotNull(superTypeCat); + assertEquals("SUPERTYPE_CAT", superTypeCat.getName()); + assertEquals(StandardSQLTypeName.STRING, superTypeCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, superTypeCat.getMode()); + + Field superTypeSchem = fields.get("SUPERTYPE_SCHEM"); + assertNotNull(superTypeSchem); + assertEquals("SUPERTYPE_SCHEM", superTypeSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, superTypeSchem.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, superTypeSchem.getMode()); + + Field superTypeName = fields.get("SUPERTYPE_NAME"); + assertNotNull(superTypeName); + assertEquals("SUPERTYPE_NAME", superTypeName.getName()); + assertEquals(StandardSQLTypeName.STRING, superTypeName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, superTypeName.getMode()); + } + + @Test + public void testGetSuperTypes_ReturnsEmptyResultSetWithCorrectMetadata() throws SQLException { + try (ResultSet rs = + dbMetadata.getSuperTypes("testCatalog", "testSchemaPattern", "testTypeNamePattern")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty (next() should return false)", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", metaData); + assertEquals("ResultSetMetaData should have 6 columns", 6, metaData.getColumnCount()); + + // Column 1: TYPE_CAT + assertEquals("TYPE_CAT", metaData.getColumnName(1)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + // Column 2: TYPE_SCHEM + assertEquals("TYPE_SCHEM", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(2)); + + // Column 3: TYPE_NAME + assertEquals("TYPE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + // Column 4: SUPERTYPE_CAT + assertEquals("SUPERTYPE_CAT", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(4)); + + // Column 5: SUPERTYPE_SCHEM + assertEquals("SUPERTYPE_SCHEM", metaData.getColumnName(5)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(5)); + + // Column 6: SUPERTYPE_NAME + assertEquals("SUPERTYPE_NAME", metaData.getColumnName(6)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(6)); + } + } + + @Test + public void testDefineGetAttributesSchema() { + Schema schema = dbMetadata.defineGetAttributesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 21 fields", 21, fields.size()); + + assertEquals("TYPE_CAT", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(0).getMode()); + + assertEquals("ATTR_NAME", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("DATA_TYPE", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(4).getMode()); + + assertEquals("ORDINAL_POSITION", fields.get(15).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(15).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(15).getMode()); + + assertEquals("IS_NULLABLE", fields.get(16).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(16).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(16).getMode()); + + assertEquals("SOURCE_DATA_TYPE", fields.get(20).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(20).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(20).getMode()); + } + + @Test + public void testGetAttributes_ReturnsEmptyResultSet() throws SQLException { + try (ResultSet rs = + dbMetadata.getAttributes("testCat", "testSchema", "testType", "testAttr%")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 21 columns", 21, metaData.getColumnCount()); + + assertEquals("TYPE_CAT", metaData.getColumnName(1)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + assertEquals("ATTR_NAME", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); + + assertEquals("DATA_TYPE", metaData.getColumnName(5)); + assertEquals(Types.BIGINT, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(5)); + + assertEquals("ORDINAL_POSITION", metaData.getColumnName(16)); + assertEquals(Types.BIGINT, metaData.getColumnType(16)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(16)); + + assertEquals("IS_NULLABLE", metaData.getColumnName(17)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(17)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(17)); + + assertEquals("SOURCE_DATA_TYPE", metaData.getColumnName(21)); + assertEquals(Types.BIGINT, metaData.getColumnType(21)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(21)); + } + } + + @Test + public void testDefineGetBestRowIdentifierSchema() { + Schema schema = dbMetadata.defineGetBestRowIdentifierSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 8 fields", 8, fields.size()); + + assertEquals("SCOPE", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(0).getMode()); + + assertEquals("COLUMN_NAME", fields.get(1).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(1).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(1).getMode()); + + assertEquals("DATA_TYPE", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("BUFFER_LENGTH", fields.get(5).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(5).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(5).getMode()); + + assertEquals("DECIMAL_DIGITS", fields.get(6).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(6).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(6).getMode()); + + assertEquals("PSEUDO_COLUMN", fields.get(7).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(7).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(7).getMode()); + } + + @Test + public void testGetBestRowIdentifier_ReturnsEmptyResultSetWithCorrectMetadata() + throws SQLException { + int testScope = DatabaseMetaData.bestRowSession; + boolean testNullable = true; + + try (ResultSet rs = + dbMetadata.getBestRowIdentifier( + "testCat", "testSchema", "testTable", testScope, testNullable)) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 8 columns", 8, metaData.getColumnCount()); + + assertEquals("SCOPE", metaData.getColumnName(1)); + assertEquals(Types.BIGINT, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(1)); + + assertEquals("COLUMN_NAME", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(2)); + + assertEquals("DATA_TYPE", metaData.getColumnName(3)); + assertEquals(Types.BIGINT, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("BUFFER_LENGTH", metaData.getColumnName(6)); + assertEquals(Types.BIGINT, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(6)); + + assertEquals("PSEUDO_COLUMN", metaData.getColumnName(8)); + assertEquals(Types.BIGINT, metaData.getColumnType(8)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(8)); + } + } + + @Test + public void testDefineGetUDTsSchema() { + Schema schema = dbMetadata.defineGetUDTsSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 7 fields", 7, fields.size()); + + assertEquals("TYPE_NAME", fields.get("TYPE_NAME").getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get("TYPE_NAME").getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get("TYPE_NAME").getMode()); + + assertEquals("CLASS_NAME", fields.get("CLASS_NAME").getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get("CLASS_NAME").getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get("CLASS_NAME").getMode()); + + assertEquals("DATA_TYPE", fields.get("DATA_TYPE").getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get("DATA_TYPE").getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get("DATA_TYPE").getMode()); + + assertEquals("BASE_TYPE", fields.get("BASE_TYPE").getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get("BASE_TYPE").getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get("BASE_TYPE").getMode()); + } + + @Test + public void testGetUDTs_ReturnsEmptyResultSet() throws SQLException { + int[] types = {Types.STRUCT, Types.DISTINCT}; + try (ResultSet rs = dbMetadata.getUDTs("testCat", "testSchema%", "testType%", types)) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 7 columns", 7, metaData.getColumnCount()); + + assertEquals("TYPE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("DATA_TYPE", metaData.getColumnName(5)); + assertEquals(Types.BIGINT, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(5)); + + assertEquals("BASE_TYPE", metaData.getColumnName(7)); + assertEquals(Types.BIGINT, metaData.getColumnType(7)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(7)); + } + } + + @Test + public void testDefineGetIndexInfoSchema() { + Schema schema = dbMetadata.defineGetIndexInfoSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 13 fields", 13, fields.size()); + + assertEquals("TABLE_NAME", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("NON_UNIQUE", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.BOOL, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("TYPE", fields.get(6).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(6).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(6).getMode()); + + assertEquals("CARDINALITY", fields.get(10).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(10).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(10).getMode()); + } + + @Test + public void testGetIndexInfo_ReturnsEmptyResultSetWithCorrectMetadata() throws SQLException { + try (ResultSet rs = dbMetadata.getIndexInfo("testCat", "testSchema", "testTable", true, true)) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 13 columns", 13, metaData.getColumnCount()); + + assertEquals("TABLE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("NON_UNIQUE", metaData.getColumnName(4)); + assertEquals(Types.BOOLEAN, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); + + assertEquals("TYPE", metaData.getColumnName(7)); + assertEquals(Types.BIGINT, metaData.getColumnType(7)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(7)); + + assertEquals("CARDINALITY", metaData.getColumnName(11)); + assertEquals(Types.BIGINT, metaData.getColumnType(11)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(11)); + } + } + + @Test + public void testDefineGetTablePrivilegesSchema() { + Schema schema = dbMetadata.defineGetTablePrivilegesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 7 fields", 7, fields.size()); + + assertEquals("TABLE_CAT", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(0).getMode()); + + assertEquals("TABLE_NAME", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("GRANTEE", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(4).getMode()); + + assertEquals("PRIVILEGE", fields.get(5).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(5).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(5).getMode()); + + assertEquals("IS_GRANTABLE", fields.get(6).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(6).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(6).getMode()); + } + + @Test + public void testGetTablePrivileges_ReturnsEmptyResultSetWithCorrectMetadata() + throws SQLException { + try (ResultSet rs = dbMetadata.getTablePrivileges("testCat", "testSchema%", "testTable%")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 7 columns", 7, metaData.getColumnCount()); + + assertEquals("TABLE_CAT", metaData.getColumnName(1)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + assertEquals("TABLE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("GRANTEE", metaData.getColumnName(5)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(5)); + + assertEquals("PRIVILEGE", metaData.getColumnName(6)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(6)); + + assertEquals("IS_GRANTABLE", metaData.getColumnName(7)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(7)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(7)); + } + } + + @Test + public void testDefineGetColumnPrivilegesSchema() { + Schema schema = dbMetadata.defineGetColumnPrivilegesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 8 fields", 8, fields.size()); + + assertEquals("TABLE_SCHEM", fields.get(1).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(1).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(1).getMode()); + + assertEquals("COLUMN_NAME", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("GRANTOR", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(4).getMode()); + + assertEquals("PRIVILEGE", fields.get(6).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(6).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(6).getMode()); + + assertEquals("IS_GRANTABLE", fields.get(7).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(7).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(7).getMode()); + } + + @Test + public void testGetColumnPrivileges_ReturnsEmptyResultSetWithCorrectMetadata() + throws SQLException { + try (ResultSet rs = + dbMetadata.getColumnPrivileges("testCat", "testSchema", "testTable", "testCol%")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 8 columns", 8, metaData.getColumnCount()); + + assertEquals("TABLE_SCHEM", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(2)); + + assertEquals("COLUMN_NAME", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); + + assertEquals("GRANTOR", metaData.getColumnName(5)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(5)); + + assertEquals("PRIVILEGE", metaData.getColumnName(7)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(7)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(7)); + + assertEquals("IS_GRANTABLE", metaData.getColumnName(8)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(8)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(8)); + } + } + + @Test + public void testDefineGetVersionColumnsSchema() { + Schema schema = dbMetadata.defineGetVersionColumnsSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 8 fields", 8, fields.size()); + + assertEquals("SCOPE", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(0).getMode()); + + assertEquals("COLUMN_NAME", fields.get(1).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(1).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(1).getMode()); + + assertEquals("DATA_TYPE", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("BUFFER_LENGTH", fields.get(5).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(5).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(5).getMode()); + + assertEquals("PSEUDO_COLUMN", fields.get(7).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(7).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(7).getMode()); + } + + @Test + public void testGetVersionColumns_ReturnsEmptyResultSetWithCorrectMetadata() throws SQLException { + try (ResultSet rs = dbMetadata.getVersionColumns("testCat", "testSchema", "testTable")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 8 columns", 8, metaData.getColumnCount()); + + assertEquals("SCOPE", metaData.getColumnName(1)); + assertEquals(Types.BIGINT, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + assertEquals("COLUMN_NAME", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(2)); + + assertEquals("DATA_TYPE", metaData.getColumnName(3)); + assertEquals(Types.BIGINT, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("BUFFER_LENGTH", metaData.getColumnName(6)); + assertEquals(Types.BIGINT, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(6)); + + assertEquals("PSEUDO_COLUMN", metaData.getColumnName(8)); + assertEquals(Types.BIGINT, metaData.getColumnType(8)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(8)); + } + } + + @Test + public void testDefineGetPseudoColumnsSchema() { + Schema schema = dbMetadata.defineGetPseudoColumnsSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 12 fields", 12, fields.size()); + + assertEquals("TABLE_NAME", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("COLUMN_NAME", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("DATA_TYPE", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(4).getMode()); + + assertEquals("COLUMN_SIZE", fields.get(5).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(5).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(5).getMode()); + + assertEquals("COLUMN_USAGE", fields.get(8).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(8).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(8).getMode()); + + assertEquals("IS_NULLABLE", fields.get(11).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(11).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(11).getMode()); + } + + @Test + public void testGetPseudoColumns_ReturnsEmptyResultSet() throws SQLException { + try (ResultSet rs = dbMetadata.getPseudoColumns("testCat", "testSchema%", "testTable%", "%")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 12 columns", 12, metaData.getColumnCount()); + + assertEquals("TABLE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("COLUMN_NAME", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); + + assertEquals("DATA_TYPE", metaData.getColumnName(5)); + assertEquals(Types.BIGINT, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(5)); + + assertEquals("COLUMN_SIZE", metaData.getColumnName(6)); + assertEquals(Types.BIGINT, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(6)); + + assertEquals("COLUMN_USAGE", metaData.getColumnName(9)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(9)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(9)); + + assertEquals("IS_NULLABLE", metaData.getColumnName(12)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(12)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(12)); + } + } + + @Test + public void testDefineGetFunctionsSchema() { + Schema schema = dbMetadata.defineGetFunctionsSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(6, fields.size()); + + Field funcCat = fields.get("FUNCTION_CAT"); + assertEquals("FUNCTION_CAT", funcCat.getName()); + assertEquals(StandardSQLTypeName.STRING, funcCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, funcCat.getMode()); + + Field funcSchem = fields.get("FUNCTION_SCHEM"); + assertEquals("FUNCTION_SCHEM", funcSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, funcSchem.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, funcSchem.getMode()); + + Field funcName = fields.get("FUNCTION_NAME"); + assertEquals("FUNCTION_NAME", funcName.getName()); + assertEquals(StandardSQLTypeName.STRING, funcName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, funcName.getMode()); + + Field remarks = fields.get("REMARKS"); + assertEquals("REMARKS", remarks.getName()); + assertEquals(StandardSQLTypeName.STRING, remarks.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, remarks.getMode()); + + Field funcType = fields.get("FUNCTION_TYPE"); + assertEquals("FUNCTION_TYPE", funcType.getName()); + assertEquals(StandardSQLTypeName.INT64, funcType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, funcType.getMode()); + + Field specificName = fields.get("SPECIFIC_NAME"); + assertEquals("SPECIFIC_NAME", specificName.getName()); + assertEquals(StandardSQLTypeName.STRING, specificName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, specificName.getMode()); + } + + @Test + public void testProcessFunctionInfo_ScalarFunction() { + Schema resultSchema = dbMetadata.defineGetFunctionsSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj-func"; + String schema = "dataset_func"; + String name = "my_scalar_func"; + String description = "A test scalar function"; + + Routine routine = mockBigQueryRoutine(catalog, schema, name, "SCALAR_FUNCTION", description); + + dbMetadata.processFunctionInfo(routine, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(6, row.size()); + assertEquals(catalog, row.get("FUNCTION_CAT").getStringValue()); + assertEquals(schema, row.get("FUNCTION_SCHEM").getStringValue()); + assertEquals(name, row.get("FUNCTION_NAME").getStringValue()); + assertEquals(description, row.get("REMARKS").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionResultUnknown), + row.get("FUNCTION_TYPE").getStringValue()); + assertEquals(name, row.get("SPECIFIC_NAME").getStringValue()); + } + + @Test + public void testProcessFunctionInfo_TableFunction() { + Schema resultSchema = dbMetadata.defineGetFunctionsSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj-func"; + String schema = "dataset_func"; + String name = "my_table_func"; + String description = "A test Table function"; + + Routine routine = mockBigQueryRoutine(catalog, schema, name, "TABLE_FUNCTION", description); + + dbMetadata.processFunctionInfo(routine, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(6, row.size()); + assertEquals(catalog, row.get("FUNCTION_CAT").getStringValue()); + assertEquals(schema, row.get("FUNCTION_SCHEM").getStringValue()); + assertEquals(name, row.get("FUNCTION_NAME").getStringValue()); + assertEquals(description, row.get("REMARKS").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionReturnsTable), + row.get("FUNCTION_TYPE").getStringValue()); + assertEquals(name, row.get("SPECIFIC_NAME").getStringValue()); + } + + private FieldValueList createFunctionRow( + String cat, + String schem, + String name, + String specName, + int funcType, + FieldList schemaFields) { + List values = new ArrayList<>(); + values.add(dbMetadata.createStringFieldValue(cat)); // FUNCTION_CAT + values.add(dbMetadata.createStringFieldValue(schem)); // FUNCTION_SCHEM + values.add(dbMetadata.createStringFieldValue(name)); // FUNCTION_NAME + values.add(dbMetadata.createStringFieldValue("Remark for " + name)); // REMARKS + values.add(dbMetadata.createLongFieldValue((long) funcType)); // FUNCTION_TYPE + values.add(dbMetadata.createStringFieldValue(specName)); // SPECIFIC_NAME + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testSortResults_Functions() { + Schema resultSchema = dbMetadata.defineGetFunctionsSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + // Add rows in unsorted order + results.add( + createFunctionRow( + "cat_b", + "sch_c", + "func_1", + "func_1_spec", + DatabaseMetaData.functionResultUnknown, + schemaFields)); + results.add( + createFunctionRow( + "cat_a", + "sch_z", + "func_alpha", + "func_alpha_spec", + DatabaseMetaData.functionReturnsTable, + schemaFields)); + results.add( + createFunctionRow( + "cat_a", + "sch_z", + "func_beta", + "func_beta_spec", + DatabaseMetaData.functionResultUnknown, + schemaFields)); + results.add( + createFunctionRow( + null, + "sch_y", + "func_gamma", + "func_gamma_spec", + DatabaseMetaData.functionReturnsTable, + schemaFields)); + results.add( + createFunctionRow( + "cat_a", + null, + "func_delta", + "func_delta_spec", + DatabaseMetaData.functionResultUnknown, + schemaFields)); + results.add( + createFunctionRow( + "cat_a", + "sch_z", + "func_alpha", + "func_alpha_spec_older", + DatabaseMetaData.functionReturnsTable, + schemaFields)); + + Comparator comparator = dbMetadata.defineGetFunctionsComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getFunctions", dbMetadata.LOG); + + // Expected Order: Null Cat, then Cat A (Null Schem, then sch_z), then Cat B. Within that, Name, + // then Spec Name. + assertEquals(6, results.size()); + + // Check order based on the comparator (CAT, SCHEM, NAME, SPECIFIC_NAME) + assertEquals("func_gamma", results.get(0).get("FUNCTION_NAME").getStringValue()); // null cat + assertEquals( + "func_delta", results.get(1).get("FUNCTION_NAME").getStringValue()); // cat_a, null schem + assertEquals( + "func_alpha", + results.get(2).get("FUNCTION_NAME").getStringValue()); // cat_a, sch_z, alpha, spec + assertEquals( + "func_alpha", + results.get(3).get("FUNCTION_NAME").getStringValue()); // cat_a, sch_z, alpha, spec_older + assertEquals( + "func_beta", results.get(4).get("FUNCTION_NAME").getStringValue()); // cat_a, sch_z, beta + assertEquals("func_1", results.get(5).get("FUNCTION_NAME").getStringValue()); // cat_b + } + + @Test + public void testDefineGetTypeInfoSchema() { + Schema schema = dbMetadata.defineGetTypeInfoSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 18 fields", 18, fields.size()); + + Field typeName = fields.get("TYPE_NAME"); + assertNotNull(typeName); + assertEquals("TYPE_NAME", typeName.getName()); + assertEquals(StandardSQLTypeName.STRING, typeName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, typeName.getMode()); + + Field dataType = fields.get("DATA_TYPE"); + assertNotNull(dataType); + assertEquals("DATA_TYPE", dataType.getName()); + assertEquals(StandardSQLTypeName.INT64, dataType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, dataType.getMode()); + + Field precision = fields.get("PRECISION"); + assertNotNull(precision); + assertEquals("PRECISION", precision.getName()); + assertEquals(StandardSQLTypeName.INT64, precision.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, precision.getMode()); + + Field caseSensitive = fields.get("CASE_SENSITIVE"); + assertNotNull(caseSensitive); + assertEquals("CASE_SENSITIVE", caseSensitive.getName()); + assertEquals(StandardSQLTypeName.BOOL, caseSensitive.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, caseSensitive.getMode()); + + Field numPrecRadix = fields.get("NUM_PREC_RADIX"); + assertNotNull(numPrecRadix); + assertEquals("NUM_PREC_RADIX", numPrecRadix.getName()); + assertEquals(StandardSQLTypeName.INT64, numPrecRadix.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, numPrecRadix.getMode()); + } + + @Test + public void testPrepareGetTypeInfoRows() { + Schema typeInfoSchema = dbMetadata.defineGetTypeInfoSchema(); + FieldList schemaFields = typeInfoSchema.getFields(); + List rows = dbMetadata.prepareGetTypeInfoRows(schemaFields); + + assertNotNull("Rows list should not be null", rows); + assertEquals("Should have 17 rows for 17 types", 17, rows.size()); + + // INT64 (should be BIGINT in JDBC) + Optional int64RowOpt = + rows.stream() + .filter(row -> "INT64".equals(row.get("TYPE_NAME").getStringValue())) + .findFirst(); + assertTrue("INT64 type info row should exist", int64RowOpt.isPresent()); + FieldValueList int64Row = int64RowOpt.get(); + assertEquals(String.valueOf(Types.BIGINT), int64Row.get("DATA_TYPE").getStringValue()); + assertEquals("19", int64Row.get("PRECISION").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.typeNullable), int64Row.get("NULLABLE").getStringValue()); + assertEquals("0", int64Row.get("CASE_SENSITIVE").getStringValue()); + assertEquals("10", int64Row.get("NUM_PREC_RADIX").getStringValue()); + + // BOOL (should be BOOLEAN in JDBC) + Optional boolRowOpt = + rows.stream() + .filter(row -> "BOOL".equals(row.get("TYPE_NAME").getStringValue())) + .findFirst(); + assertTrue("BOOL type info row should exist", boolRowOpt.isPresent()); + FieldValueList boolRow = boolRowOpt.get(); + assertEquals(String.valueOf(Types.BOOLEAN), boolRow.get("DATA_TYPE").getStringValue()); + assertEquals("1", boolRow.get("PRECISION").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.typeNullable), boolRow.get("NULLABLE").getStringValue()); + assertEquals("0", boolRow.get("CASE_SENSITIVE").getStringValue()); // false + assertTrue(boolRow.get("NUM_PREC_RADIX").isNull()); + + // STRING (should be NVARCHAR in JDBC) + Optional stringRowOpt = + rows.stream() + .filter(row -> "STRING".equals(row.get("TYPE_NAME").getStringValue())) + .findFirst(); + assertTrue("STRING type info row should exist", stringRowOpt.isPresent()); + FieldValueList stringRow = stringRowOpt.get(); + assertEquals(String.valueOf(Types.NVARCHAR), stringRow.get("DATA_TYPE").getStringValue()); + assertTrue(stringRow.get("PRECISION").isNull()); // Precision is null for STRING + assertEquals("'", stringRow.get("LITERAL_PREFIX").getStringValue()); + assertEquals("'", stringRow.get("LITERAL_SUFFIX").getStringValue()); + assertEquals("LENGTH", stringRow.get("CREATE_PARAMS").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.typeNullable), stringRow.get("NULLABLE").getStringValue()); + assertEquals("1", stringRow.get("CASE_SENSITIVE").getStringValue()); // true + assertTrue(stringRow.get("NUM_PREC_RADIX").isNull()); + } + + @Test + public void testGetTypeInfo() throws SQLException { + try (ResultSet rs = dbMetadata.getTypeInfo()) { + assertNotNull("ResultSet from getTypeInfo() should not be null", rs); + + ResultSetMetaData rsmd = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", rsmd); + assertEquals("Should have 18 columns", 18, rsmd.getColumnCount()); + assertEquals("TYPE_NAME", rsmd.getColumnName(1)); + assertEquals("DATA_TYPE", rsmd.getColumnName(2)); + assertEquals("PRECISION", rsmd.getColumnName(3)); + + List dataTypes = new ArrayList<>(); + int rowCount = 0; + while (rs.next()) { + rowCount++; + dataTypes.add(rs.getInt("DATA_TYPE")); + if ("INT64".equals(rs.getString("TYPE_NAME"))) { + assertEquals(Types.BIGINT, rs.getInt("DATA_TYPE")); + assertEquals(19, rs.getInt("PRECISION")); + } + } + assertEquals("Should have 17 rows for 17 types", 17, rowCount); + + // Verify sorting by DATA_TYPE + List sortedDataTypes = new ArrayList<>(dataTypes); + Collections.sort(sortedDataTypes); + assertEquals("Results should be sorted by DATA_TYPE", sortedDataTypes, dataTypes); + } + } + + @Test + public void testDefineGetFunctionColumnsSchema() { + Schema schema = dbMetadata.defineGetFunctionColumnsSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(17, fields.size()); + + assertEquals("FUNCTION_CAT", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(0).getMode()); + + assertEquals("COLUMN_NAME", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("COLUMN_TYPE", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(4).getMode()); + + assertEquals("ORDINAL_POSITION", fields.get(14).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(14).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(14).getMode()); + + assertEquals("SPECIFIC_NAME", fields.get(16).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(16).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(16).getMode()); + } + + @Test + public void testCreateFunctionColumnRow() { + StandardSQLDataType stringType = mockStandardSQLDataType(StandardSQLTypeName.STRING); + List row = + dbMetadata.createFunctionColumnRow( + "cat", + "sch", + "func", + "func_spec", + "param_in", + DatabaseMetaData.functionColumnIn, + stringType, + 1); + + assertEquals(17, row.size()); + assertEquals("cat", row.get(0).getStringValue()); + assertEquals("sch", row.get(1).getStringValue()); + assertEquals("func", row.get(2).getStringValue()); + assertEquals("param_in", row.get(3).getStringValue()); + assertEquals(String.valueOf(DatabaseMetaData.functionColumnIn), row.get(4).getStringValue()); + assertEquals(String.valueOf(Types.NVARCHAR), row.get(5).getStringValue()); // DATA_TYPE + assertEquals("NVARCHAR", row.get(6).getStringValue()); // TYPE_NAME + assertTrue(row.get(7).isNull()); // PRECISION + assertTrue(row.get(8).isNull()); // LENGTH + assertTrue(row.get(9).isNull()); // SCALE + assertTrue(row.get(10).isNull()); // RADIX + assertEquals( + String.valueOf(DatabaseMetaData.functionNullableUnknown), + row.get(11).getStringValue()); // NULLABLE + assertTrue(row.get(12).isNull()); // REMARKS + assertTrue(row.get(13).isNull()); // CHAR_OCTET_LENGTH (should be columnSize) + assertEquals("1", row.get(14).getStringValue()); // ORDINAL_POSITION + assertEquals("", row.get(15).getStringValue()); // IS_NULLABLE + assertEquals("func_spec", row.get(16).getStringValue()); // SPECIFIC_NAME + } + + @Test + public void testProcessFunctionParametersAndReturnValue_ScalarFunctionWithArgs() { + Schema resultSchema = dbMetadata.defineGetFunctionColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + RoutineArgument arg1 = mockRoutineArgument("in_str", StandardSQLTypeName.STRING, "IN"); + RoutineArgument arg2 = mockRoutineArgument("in_int", StandardSQLTypeName.INT64, "IN"); + Routine scalarFunc = + mockBigQueryRoutineWithArgs( + "cat", "ds", "my_scalar", "SCALAR_FUNCTION", "desc", Arrays.asList(arg1, arg2)); + when(scalarFunc.getReturnTableType()).thenReturn(null); // No return table for scalar + + dbMetadata.processFunctionParametersAndReturnValue( + scalarFunc, null, collectedResults, resultFields); + + assertEquals(2, collectedResults.size()); + // First argument + assertEquals("in_str", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionColumnIn), + collectedResults.get(0).get("COLUMN_TYPE").getStringValue()); + assertEquals("1", collectedResults.get(0).get("ORDINAL_POSITION").getStringValue()); + // Second argument + assertEquals("in_int", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionColumnIn), + collectedResults.get(1).get("COLUMN_TYPE").getStringValue()); + assertEquals("2", collectedResults.get(1).get("ORDINAL_POSITION").getStringValue()); + } + + @Test + public void testProcessFunctionParametersAndReturnValue_TableFunctionWithReturnTable() { + Schema resultSchema = dbMetadata.defineGetFunctionColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + StandardSQLField returnCol1 = mockStandardSQLField("out_id", StandardSQLTypeName.INT64); + StandardSQLField returnCol2 = mockStandardSQLField("out_val", StandardSQLTypeName.STRING); + StandardSQLTableType returnTable = + mockStandardSQLTableType(Arrays.asList(returnCol1, returnCol2)); + + Routine tableFunc = + mockBigQueryRoutineWithArgs( + "cat", "ds", "my_table_func", "TABLE_FUNCTION", "desc", Collections.emptyList()); + when(tableFunc.getReturnTableType()).thenReturn(returnTable); + + dbMetadata.processFunctionParametersAndReturnValue( + tableFunc, null, collectedResults, resultFields); + + assertEquals(2, collectedResults.size()); + // First return column + assertEquals("out_id", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionColumnResult), + collectedResults.get(0).get("COLUMN_TYPE").getStringValue()); + assertEquals("1", collectedResults.get(0).get("ORDINAL_POSITION").getStringValue()); + // Second return column + assertEquals("out_val", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionColumnResult), + collectedResults.get(1).get("COLUMN_TYPE").getStringValue()); + assertEquals("2", collectedResults.get(1).get("ORDINAL_POSITION").getStringValue()); + } + + @Test + public void testProcessFunctionParametersAndReturnValue_ColumnNameFilter() { + Schema resultSchema = dbMetadata.defineGetFunctionColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern columnNamePattern = Pattern.compile("id_.*"); // Match columns starting with "id_" + + RoutineArgument arg1 = mockRoutineArgument("id_arg", StandardSQLTypeName.INT64, "IN"); + RoutineArgument arg2 = mockRoutineArgument("name_arg", StandardSQLTypeName.STRING, "IN"); + StandardSQLField returnCol1 = mockStandardSQLField("id_return", StandardSQLTypeName.BOOL); + StandardSQLField returnCol2 = mockStandardSQLField("value_return", StandardSQLTypeName.FLOAT64); + StandardSQLTableType returnTable = + mockStandardSQLTableType(Arrays.asList(returnCol1, returnCol2)); + + Routine tableFunc = + mockBigQueryRoutineWithArgs( + "cat", "ds", "filter_func", "TABLE_FUNCTION", "desc", Arrays.asList(arg1, arg2)); + when(tableFunc.getReturnTableType()).thenReturn(returnTable); + + dbMetadata.processFunctionParametersAndReturnValue( + tableFunc, columnNamePattern, collectedResults, resultFields); + + assertEquals(2, collectedResults.size()); // Should match id_arg and id_return + assertEquals("id_return", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals("id_arg", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + } + + @Test + public void testDefineGetClientInfoPropertiesSchema() { + Schema schema = dbMetadata.defineGetClientInfoPropertiesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 4 fields", 4, fields.size()); + + Field nameField = fields.get("NAME"); + assertNotNull(nameField); + assertEquals("NAME", nameField.getName()); + assertEquals(StandardSQLTypeName.STRING, nameField.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, nameField.getMode()); + + Field maxLenField = fields.get("MAX_LEN"); + assertNotNull(maxLenField); + assertEquals("MAX_LEN", maxLenField.getName()); + assertEquals(StandardSQLTypeName.INT64, maxLenField.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, maxLenField.getMode()); + + Field defaultValueField = fields.get("DEFAULT_VALUE"); + assertNotNull(defaultValueField); + assertEquals("DEFAULT_VALUE", defaultValueField.getName()); + assertEquals(StandardSQLTypeName.STRING, defaultValueField.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, defaultValueField.getMode()); + + Field descriptionField = fields.get("DESCRIPTION"); + assertNotNull(descriptionField); + assertEquals("DESCRIPTION", descriptionField.getName()); + assertEquals(StandardSQLTypeName.STRING, descriptionField.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, descriptionField.getMode()); + } + + @Test + public void testGetClientInfoProperties() throws SQLException { + try (ResultSet rs = dbMetadata.getClientInfoProperties()) { + assertNotNull("ResultSet from getClientInfoProperties() should not be null", rs); + + ResultSetMetaData rsmd = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", rsmd); + assertEquals("Should have 4 columns", 4, rsmd.getColumnCount()); + assertEquals("NAME", rsmd.getColumnName(1)); + assertEquals(Types.NVARCHAR, rsmd.getColumnType(1)); + assertEquals("MAX_LEN", rsmd.getColumnName(2)); + assertEquals(Types.BIGINT, rsmd.getColumnType(2)); + assertEquals("DEFAULT_VALUE", rsmd.getColumnName(3)); + assertEquals(Types.NVARCHAR, rsmd.getColumnType(3)); + assertEquals("DESCRIPTION", rsmd.getColumnName(4)); + assertEquals(Types.NVARCHAR, rsmd.getColumnType(4)); + + List> actualRows = new ArrayList<>(); + while (rs.next()) { + Map row = new HashMap<>(); + row.put("NAME", rs.getString("NAME")); + row.put("MAX_LEN", rs.getLong("MAX_LEN")); + row.put("DEFAULT_VALUE", rs.getObject("DEFAULT_VALUE")); + row.put("DESCRIPTION", rs.getString("DESCRIPTION")); + actualRows.add(row); + } + + assertEquals("Should return 3 client info properties", 3, actualRows.size()); + + Map appNameRow = actualRows.get(0); + assertEquals("ApplicationName", appNameRow.get("NAME")); + assertEquals(25L, appNameRow.get("MAX_LEN")); + assertNull(appNameRow.get("DEFAULT_VALUE")); + assertEquals( + "The name of the application currently utilizing the connection.", + appNameRow.get("DESCRIPTION")); + + Map clientHostnameRow = actualRows.get(1); + assertEquals("ClientHostname", clientHostnameRow.get("NAME")); + + Map clientUserRow = actualRows.get(2); + assertEquals("ClientUser", clientUserRow.get("NAME")); + } + } + + @Test + public void testDefineGetCatalogsSchema() { + Schema schema = dbMetadata.defineGetCatalogsSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Should have one column", 1, fields.size()); + + Field tableCatField = fields.get("TABLE_CAT"); + assertNotNull("TABLE_CAT field should exist", tableCatField); + assertEquals("Field name should be TABLE_CAT", "TABLE_CAT", tableCatField.getName()); + assertEquals( + "Field type should be STRING", + StandardSQLTypeName.STRING, + tableCatField.getType().getStandardType()); + assertEquals("Field mode should be REQUIRED", Field.Mode.REQUIRED, tableCatField.getMode()); + } + + @Test + public void testPrepareGetCatalogsRows() { + Schema catalogsSchema = dbMetadata.defineGetCatalogsSchema(); + FieldList schemaFields = catalogsSchema.getFields(); + + // Test with a valid catalog name + List testCatalogName = new ArrayList<>(); + testCatalogName.add("test_catalog"); + + List rowsWithCatalog = + dbMetadata.prepareGetCatalogsRows(schemaFields, testCatalogName); + + assertNotNull("Rows list should not be null when catalog name is provided", rowsWithCatalog); + assertEquals("Should have one row when a catalog name is provided", 1, rowsWithCatalog.size()); + FieldValueList row = rowsWithCatalog.get(0); + assertEquals("Row should have 1 field value", 1, row.size()); + assertFalse("FieldValue in row should not be SQL NULL", row.get(0).isNull()); + assertEquals( + "TABLE_CAT should match the provided catalog name", + testCatalogName.get(0), + row.get(0).getStringValue()); + + // Test with empty catalog name list + List testEmptyCatalogList = new ArrayList<>(); + List rowsWithNullCatalog = + dbMetadata.prepareGetCatalogsRows(schemaFields, testEmptyCatalogList); + assertNotNull("Rows list should not be null when catalog name is null", rowsWithNullCatalog); + assertTrue("Should have zero rows when catalog name is null", rowsWithNullCatalog.isEmpty()); + } + + @Test + public void testGetSchemas_NoArgs_DelegatesCorrectly() { + BigQueryDatabaseMetaData spiedDbMetadata = spy(dbMetadata); + ResultSet mockResultSet = mock(ResultSet.class); + doReturn(mockResultSet).when(spiedDbMetadata).getSchemas(null, null); + + ResultSet rs = spiedDbMetadata.getSchemas(); + + assertSame( + "The returned ResultSet should be the one from the two-argument method", mockResultSet, rs); + verify(spiedDbMetadata, times(1)).getSchemas(null, null); + } + + // Non-Resultset DatabaseMetadata tests + + @Test + public void testIdentifierQuoteString() { + String actual = dbMetadata.getIdentifierQuoteString(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_QUOTED_IDENTIFIER, actual); + } + + @Test + public void testSQLKeyWords() { + String actual = dbMetadata.getSQLKeywords(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_RESERVED_KEYWORDS, actual); + } + + @Test + public void testNumericFunctions() { + String actual = dbMetadata.getNumericFunctions(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_NUMERIC_FNS, actual); + } + + @Test + public void testStringFunctions() { + String actual = dbMetadata.getStringFunctions(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_STRING_FNS, actual); + } + + @Test + public void testTimeAndDateFunctions() { + String actual = dbMetadata.getTimeDateFunctions(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_TIME_DATE_FNS, actual); + } + + @Test + public void testSystemFunctions() { + String actual = dbMetadata.getSystemFunctions(); + assertNull(actual); + } + + @Test + public void testSearchStringEscape() { + String actual = dbMetadata.getSearchStringEscape(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_ESCAPE, actual); + } + + @Test + public void testExtraNameChars() { + String actual = dbMetadata.getExtraNameCharacters(); + assertNull(actual); + } + + @Test + public void testCatalogSeparator() { + String actual = dbMetadata.getCatalogSeparator(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_CATALOG_SEPARATOR, actual); + } + + @Test + public void testMaxCharLiteralLength() { + int actual = dbMetadata.getMaxCharLiteralLength(); + assertEquals(0, actual); + } + + @Test + public void testMaxBinaryLiteralLength() { + int actual = dbMetadata.getMaxBinaryLiteralLength(); + assertEquals(0, actual); + } + + @Test + public void testMaxColumnNameLength() { + int actual = dbMetadata.getMaxColumnNameLength(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_MAX_COL_NAME_LEN, actual); + } + + @Test + public void testMaxColumnsInTable() { + int actual = dbMetadata.getMaxColumnsInTable(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_MAX_COLS_PER_TABLE, actual); + } + + @Test + public void testMaxColumnsInSelect() { + int actual = dbMetadata.getMaxColumnsInSelect(); + assertEquals(0, actual); + } + + @Test + public void testMaxColumnsInGroupBy() { + int actual = dbMetadata.getMaxColumnsInGroupBy(); + assertEquals(0, actual); + } + + @Test + public void testMaxColumnsInOrderBy() { + int actual = dbMetadata.getMaxColumnsInOrderBy(); + assertEquals(0, actual); + } + + @Test + public void testMaxColumnsInIndex() { + int actual = dbMetadata.getMaxColumnsInIndex(); + assertEquals(0, actual); + } + + @Test + public void testSupportsResultSetHoldabilitySupported() { + assertTrue(dbMetadata.supportsResultSetHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT)); + } + + @Test + public void testSupportsResultSetHoldabilityNotSupported() { + assertFalse(dbMetadata.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT)); + } + + @Test + public void testSupportsResultSetHoldabilityInvalid() { + assertFalse(dbMetadata.supportsResultSetHoldability(-1)); + } + + @Test + public void testResultSetHoldability() { + int actual = dbMetadata.getResultSetHoldability(); + assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, actual); + } + + @Test + public void testDatabaseMajorVersion() { + int actual = dbMetadata.getDatabaseMajorVersion(); + assertEquals(2, actual); + } + + @Test + public void testDatabaseMinorVersion() { + int actual = dbMetadata.getDatabaseMinorVersion(); + assertEquals(0, actual); + } + + @Test + public void testAllProceduresAreCallable() throws SQLException { + assertFalse(dbMetadata.allProceduresAreCallable()); + } + + @Test + public void testAllTablesAreSelectable() throws SQLException { + assertTrue(dbMetadata.allTablesAreSelectable()); + } + + @Test + public void testGetDriverVersionInfoFromProperties() { + Properties props = new Properties(); + String expectedVersionString = "0.0.0"; + int expectedMajor = 0; + int expectedMinor = 0; + + try (InputStream input = + getClass().getResourceAsStream("/com/google/cloud/bigquery/jdbc/dependencies.properties")) { + if (input != null) { + props.load(input); + expectedVersionString = props.getProperty("version.jdbc"); + if (expectedVersionString != null) { + String[] parts = expectedVersionString.split("\\."); + expectedMajor = Integer.parseInt(parts[0]); + expectedMinor = Integer.parseInt(parts[1].replaceAll("[^0-9].*", "")); + } + } + } catch (IOException | NumberFormatException e) { + fail( + "Error reading or parsing dependencies.properties for testing driver version: " + + e.getMessage()); + } + assertEquals(expectedVersionString, dbMetadata.getDriverVersion()); + assertEquals(expectedMajor, dbMetadata.getDriverMajorVersion()); + assertEquals(expectedMinor, dbMetadata.getDriverMinorVersion()); + } + + @Test + public void testSupportsUnion() throws SQLException { + assertTrue(dbMetadata.supportsUnion()); + } + + @Test + public void testSupportsUnionAll() throws SQLException { + assertTrue(dbMetadata.supportsUnionAll()); + } + + @Test + public void testGetMaxConnections() throws SQLException { + assertEquals(0, dbMetadata.getMaxConnections()); + } + + @Test + public void testGetMaxCursorNameLength() throws SQLException { + assertEquals(0, dbMetadata.getMaxCursorNameLength()); + } + + @Test + public void testGetMaxIndexLength() throws SQLException { + assertEquals(0, dbMetadata.getMaxIndexLength()); + } + + @Test + public void testGetMaxSchemaNameLength() throws SQLException { + assertEquals(1024, dbMetadata.getMaxSchemaNameLength()); + } + + @Test + public void testGetMaxProcedureNameLength() throws SQLException { + assertEquals(256, dbMetadata.getMaxProcedureNameLength()); + } + + @Test + public void testGetMaxCatalogNameLength() throws SQLException { + assertEquals(30, dbMetadata.getMaxCatalogNameLength()); + } + + @Test + public void testGetMaxRowSize() throws SQLException { + assertEquals(0, dbMetadata.getMaxRowSize()); + } + + @Test + public void testDoesMaxRowSizeIncludeBlobs() { + assertFalse(dbMetadata.doesMaxRowSizeIncludeBlobs()); + } + + @Test + public void testGetMaxStatementLength() throws SQLException { + assertEquals(0, dbMetadata.getMaxStatementLength()); + } + + @Test + public void testGetMaxStatements() throws SQLException { + assertEquals(0, dbMetadata.getMaxStatements()); + } + + @Test + public void testGetMaxTableNameLength() throws SQLException { + assertEquals(1024, dbMetadata.getMaxTableNameLength()); + } + + @Test + public void testGetMaxTablesInSelect() throws SQLException { + assertEquals(1000, dbMetadata.getMaxTablesInSelect()); + } + + @Test + public void testGetDefaultTransactionIsolation() throws SQLException { + assertEquals(8, dbMetadata.getDefaultTransactionIsolation()); + } + + @Test + public void testSupportsResultSetType() throws SQLException { + assertTrue(dbMetadata.supportsResultSetType(ResultSet.TYPE_FORWARD_ONLY)); + assertFalse(dbMetadata.supportsResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE)); + assertFalse(dbMetadata.supportsResultSetType(ResultSet.TYPE_SCROLL_SENSITIVE)); + } + + @Test + public void testSupportsResultSetConcurrency() throws SQLException { + assertTrue( + dbMetadata.supportsResultSetConcurrency( + ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)); + assertFalse( + dbMetadata.supportsResultSetConcurrency( + ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE)); + assertFalse( + dbMetadata.supportsResultSetConcurrency( + ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)); + assertFalse( + dbMetadata.supportsResultSetConcurrency( + ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE)); + } + + @Test + public void testGetSQLStateType() throws SQLException { + assertEquals(DatabaseMetaData.sqlStateSQL, dbMetadata.getSQLStateType()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercionsTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercionsTest.java new file mode 100644 index 0000000000..d9cc0efb77 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercionsTest.java @@ -0,0 +1,221 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryTypeCoercer.INSTANCE; +import static com.google.common.truth.Truth.assertThat; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Arrays; +import java.util.Collection; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class BigQueryDefaultCoercionsTest { + + private final BigQueryTypeCoercer bigQueryTypeCoercer; + + public BigQueryDefaultCoercionsTest(String label, BigQueryTypeCoercer bigQueryTypeCoercer) { + this.bigQueryTypeCoercer = bigQueryTypeCoercer; + } + + @Parameters(name = "{index}: {0}") + public static Collection data() { + return Arrays.asList( + new Object[][] { + {"default BigQueryTypeCoercer", INSTANCE}, + {"customizable BigQueryTypeCoercer", BigQueryTypeCoercer.builder().build()} + }); + } + + @Test + public void stringToBoolean() { + assertThat(bigQueryTypeCoercer.coerceTo(Boolean.class, "true")).isTrue(); + assertThat(bigQueryTypeCoercer.coerceTo(Boolean.class, "false")).isFalse(); + } + + @Test + public void stringToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, "3452148")).isEqualTo(3452148); + } + + @Test + public void stringToBigInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(BigInteger.class, "2147483647456")) + .isEqualTo(new BigInteger("2147483647456")); + } + + @Test + public void stringToLong() { + assertThat(bigQueryTypeCoercer.coerceTo(Long.class, "2147483647456")) + .isEqualTo(Long.valueOf("2147483647456")); + } + + @Test + public void stringToDouble() { + assertThat(bigQueryTypeCoercer.coerceTo(Double.class, "2147483647456.56684593495")) + .isEqualTo(Double.valueOf("2147483647456.56684593495")); + } + + @Test + public void stringToBigDecimal() { + assertThat(bigQueryTypeCoercer.coerceTo(BigDecimal.class, "2147483647456.56684593495")) + .isEqualTo(new BigDecimal("2147483647456.56684593495")); + } + + @Test + public void booleanToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, true)).isEqualTo("true"); + assertThat(bigQueryTypeCoercer.coerceTo(String.class, false)).isEqualTo("false"); + } + + @Test + public void booleanToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, true)).isEqualTo(1); + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, false)).isEqualTo(0); + } + + @Test + public void longToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, 2147483647L)).isEqualTo(2147483647); + } + + @Test + public void longToShort() { + assertThat(bigQueryTypeCoercer.coerceTo(Short.class, 32000L)).isEqualTo((short) 32000); + } + + @Test + public void longToByte() { + assertThat(bigQueryTypeCoercer.coerceTo(Byte.class, 127L)).isEqualTo((byte) 127); + } + + @Test + public void longToDouble() { + assertThat(bigQueryTypeCoercer.coerceTo(Double.class, 2147483647456L)) + .isEqualTo(Double.valueOf("2147483647456")); + } + + @Test + public void longToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, 2147483647456L)) + .isEqualTo("2147483647456"); + } + + @Test + public void doubleToFloat() { + assertThat(bigQueryTypeCoercer.coerceTo(Float.class, Double.valueOf("4567.213245"))) + .isEqualTo(Float.valueOf("4567.213245")); + } + + @Test + public void doubleToLong() { + assertThat(bigQueryTypeCoercer.coerceTo(Long.class, Double.valueOf("2147483647456.213245"))) + .isEqualTo(2147483647456L); + } + + @Test + public void doubleToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, Double.valueOf("21474836.213245"))) + .isEqualTo(21474836); + } + + @Test + public void doubleToBigDecimal() { + assertThat(bigQueryTypeCoercer.coerceTo(BigDecimal.class, Double.valueOf("21474836.213245"))) + .isEqualTo(new BigDecimal("21474836.213245")); + } + + @Test + public void doubleToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, Double.valueOf("21474836.213245"))) + .isEqualTo("2.1474836213245E7"); + } + + @Test + public void floatToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, 62356.45f)).isEqualTo(62356); + } + + @Test + public void floatToDouble() { + assertThat(bigQueryTypeCoercer.coerceTo(Double.class, 62356.45f)) + .isEqualTo(Double.valueOf(62356.45f)); + } + + @Test + public void floatToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, 62356.45f)).isEqualTo("62356.45"); + } + + @Test + public void bigIntegerToLong() { + assertThat(bigQueryTypeCoercer.coerceTo(Long.class, new BigInteger("2147483647"))) + .isEqualTo(2147483647L); + } + + @Test + public void bigIntegerToBigDecimal() { + assertThat(bigQueryTypeCoercer.coerceTo(BigDecimal.class, new BigInteger("2147483647"))) + .isEqualTo(new BigDecimal("2147483647")); + } + + @Test + public void bigIntegerToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, new BigInteger("2147483647"))) + .isEqualTo("2147483647"); + } + + @Test + public void bigDecimalToDouble() { + assertThat(bigQueryTypeCoercer.coerceTo(Double.class, new BigDecimal("2147483647.74356"))) + .isEqualTo(2147483647.74356); + } + + @Test + public void bigDecimalToBigInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(BigInteger.class, new BigDecimal("2147483647.74356"))) + .isEqualTo(new BigInteger("2147483647")); + } + + @Test + public void bigDecimalToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, new BigDecimal("2147483647.74356"))) + .isEqualTo(2147483647); + } + + @Test + public void bigDecimalToLong() { + assertThat(bigQueryTypeCoercer.coerceTo(Long.class, new BigDecimal("2147483647.74356"))) + .isEqualTo(2147483647L); + } + + @Test + public void bigDecimalToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, new BigDecimal("2147483647.74356"))) + .isEqualTo("2147483647.74356"); + } + + @Test + public void nullToBoolean() { + assertThat(bigQueryTypeCoercer.coerceTo(Boolean.class, null)).isFalse(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDriverTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDriverTest.java new file mode 100644 index 0000000000..125de54dfa --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDriverTest.java @@ -0,0 +1,97 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertThrows; + +import java.sql.Connection; +import java.sql.DriverPropertyInfo; +import java.sql.SQLException; +import java.util.Properties; +import java.util.logging.Logger; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryDriverTest { + + static BigQueryDriver bigQueryDriver; + + @Before + public void setUp() { + bigQueryDriver = BigQueryDriver.getRegisteredDriver(); + } + + @Test + public void testInvalidURLDoesNotConnect() { + assertThrows(IllegalArgumentException.class, () -> bigQueryDriver.connect("badURL.com", null)); + } + + @Test + public void testValidURLDoesConnect() throws SQLException { + Connection connection = + bigQueryDriver.connect( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;", + new Properties()); + assertThat(connection.isClosed()).isFalse(); + } + + @Test + public void testInvalidURLInAcceptsURLReturnsFalse() throws SQLException { + assertThat(bigQueryDriver.acceptsURL("badURL.com")).isFalse(); + } + + @Test + public void testValidURLInAcceptsURLReturnsTrue() throws SQLException { + assertThat(bigQueryDriver.acceptsURL("jdbc:bigquery:https://google.com:8080;projectId=123456")) + .isTrue(); + } + + @Test + public void testGetPropertyInfoReturnsValidProperties() { + DriverPropertyInfo[] res = + bigQueryDriver.getPropertyInfo( + "jdbc:bigquery:https://google.com:8080;projectId=123456;OAuthType=3", new Properties()); + int i = 0; + for (BigQueryConnectionProperty prop : BigQueryJdbcUrlUtility.VALID_PROPERTIES) { + assertThat(res[i].name).isEqualTo(prop.getName()); + i++; + } + } + + @Test + public void testGetMajorVersionMatchesDriverMajorVersion() { + assertThat(bigQueryDriver.getMajorVersion()).isEqualTo(0); + } + + @Test + public void testGetMinorVersionMatchesDriverMinorVersion() { + assertThat(bigQueryDriver.getMinorVersion()).isEqualTo(1); + } + + @Test + public void testGetParentLoggerReturnsLogger() { + assertThat(bigQueryDriver.getParentLogger()).isInstanceOf(Logger.class); + } + + @Test + public void testJDBCCompliantReturnsFalse() { + assertThat(bigQueryDriver.jdbcCompliant()).isFalse(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBaseTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBaseTest.java new file mode 100644 index 0000000000..616f3bab97 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBaseTest.java @@ -0,0 +1,82 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.jdbc.utils.URIBuilder; + +public class BigQueryJdbcBaseTest { + + // This is a fake pkcs8 key generated specifically for unittests + protected static final String fake_pkcs8_key = + "-----BEGIN PRIVATE KEY-----\n" + + // + "MIIBUwIBADANBgkqhkiG9w0BAQEFAASCAT0wggE5AgEAAkEAnt6w5AMZBvOecsJ9\n" + + // + "4TeVz+GpAtBnTqkxWfxLJykkvb+V/3IhXr5Zw40y47RdoRly/QDFJz3Ac+nmwCSP\n" + + // + "8QW3GQIDAQABAkBPmdrd1COFFSnN7F9wKg65QyMQ0uUAR8v/f2cUbwwGuhwdMuGZ\n" + + // + "DPwgVZySxFKort7TfPru6NzbACL3EFAl9y9RAiEA7XPq5Tu+LOw4/CZFABykguBV\n" + + // + "8rYC+F72+GqkhvlGhZUCIQCrR2/zGIKqJSTKfQhKOteP7cx5dWrumHYNuC5InOGC\n" + + // + "dQIgM6bzgcntJHh+LNtmRw/z+UQzbgiJvN1re7426+VtocECIE7ejFxicviqNfDP\n" + + // + "9ltIES8Dj152hRDtP589qoJhSy5pAiAJot/kBQD8yFYMU1X02oi+6f8QqXxcHwZX\n" + + // + "2wK1Zawz/A==\n" + + // + "-----END PRIVATE KEY-----"; + + protected static URIBuilder getBaseUri() { + return new URIBuilder("jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;"); + } + + protected static URIBuilder getBaseUri(int authType) { + return getBaseUri().append("OAuthType", authType); + } + + protected static URIBuilder getBaseUri(int authType, String projectId) { + return getBaseUri(authType).append("PROJECT_ID", projectId); + } + + protected static URIBuilder getUriOAuthServiceAccount() { + return getBaseUri() + .append("OAuthType", 0) + .append("OAuthServiceAcctEmail", "service@account") + .append("OAuthPvtKey", fake_pkcs8_key); + } + + protected static URIBuilder getUriOAuthUserAccount() { + return getBaseUri() + .append("OAuthType", 1) + .append("OAuthClientId", "client@id") + .append("OAuthClientSecret", "client_secret"); + } + + protected static URIBuilder getUriOAuthToken() { + return getBaseUri().append("OAuthType", 2).append("OAuthAccessToken", "RedactedToken"); + } + + protected static URIBuilder getUriOAuthApplicationDefault() { + return getBaseUri().append("OAuthType", 3); + } + + protected static URIBuilder getUriOAuthExternal() { + return getBaseUri().append("OAuthType", 4).append("OAuthPvtKey", fake_pkcs8_key); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtilityTest.java new file mode 100644 index 0000000000..b7fc031621 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtilityTest.java @@ -0,0 +1,505 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.auth.oauth2.GoogleCredentials; +import com.google.auth.oauth2.ImpersonatedCredentials; +import com.google.auth.oauth2.UserAuthorizer; +import com.google.auth.oauth2.UserCredentials; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Paths; +import java.security.PrivateKey; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; + +public class BigQueryJdbcOAuthUtilityTest extends BigQueryJdbcBaseTest { + + private static final int USER_AUTH_PORT = 53737; + private static final String EXPECTED_USER_AUTH_URL = + "https://accounts.google.com/o/oauth2/auth?response_type=code&client_id=client_id&redirect_uri=http://localhost:" + + USER_AUTH_PORT + + "&scope=https://www.googleapis.com/auth/bigquery&state=test_state&access_type=offline&prompt=consent&login_hint=test_user&include_granted_scopes=true"; + + @Test + public void testParseOAuthPropsForAuthType0KeyfileOnly() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=0;" + + "OAuthPvtKeyPath=C:\\SecureFiles\\ServiceKeyFile.p12;", + null); + + assertThat(result.get("OAuthType")).isEqualTo("GOOGLE_SERVICE_ACCOUNT"); + assertThat(result.get("OAuthPvtKeyPath")).isEqualTo("C:\\SecureFiles\\ServiceKeyFile.p12"); + } + + @Test + public void testParseOAuthPropsForAuthType0ViaEmail() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=0;" + + "OAuthServiceAcctEmail=dummytest@dummytest.iam.gserviceaccount.com;" + + "OAuthPvtKey=RedactedKey;", + null); + + assertThat(result.get("OAuthType")).isEqualTo("GOOGLE_SERVICE_ACCOUNT"); + assertThat(result.get("OAuthServiceAcctEmail")) + .isEqualTo("dummytest@dummytest.iam.gserviceaccount.com"); + assertThat(result.get("OAuthPvtKey")).isEqualTo("RedactedKey"); + } + + @Test + public void testInvalidTokenUriForAuthType0() { + String connectionString = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=0;" + + "OAuthServiceAcctEmail=dummytest@dummytest.iam.gserviceaccount.com;" + + "OAuthPvtKey=" + + fake_pkcs8_key + + ";" + + "EndpointOverrides=OAuth2=brokenuri{};"; + Map oauthProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(connectionString, null); + Map overrideProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connectionString, null); + + try { + BigQueryJdbcOAuthUtility.getCredentials(oauthProperties, overrideProperties, null); + Assert.fail(); + } catch (BigQueryJdbcRuntimeException e) { + assertThat(e.getMessage()).contains("java.net.URISyntaxException"); + } + } + + @Test + public void testParseOAuthPropsForAuthType2() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken;", + null); + + assertThat(result.get("OAuthType")).isEqualTo("PRE_GENERATED_TOKEN"); + assertThat(result.get("OAuthAccessToken")).isEqualTo("RedactedToken"); + } + + @Test + public void testParseOAuthPropsForAuthType3() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=MyBigQueryProject;", + null); + + assertThat(result.get("OAuthType")).isEqualTo("APPLICATION_DEFAULT_CREDENTIALS"); + } + + @Test + public void testParseOAuthPropsForDefaultAuthType() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3", + null); + + assertThat(result.get("OAuthType")).isEqualTo("APPLICATION_DEFAULT_CREDENTIALS"); + } + + @Test + public void testGetCredentialsForPreGeneratedToken() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken;", + null); + + GoogleCredentials credentials = + BigQueryJdbcOAuthUtility.getCredentials(authProperties, Collections.EMPTY_MAP, null); + assertThat(credentials).isNotNull(); + } + + @Test + public void testGetCredentialsForPreGeneratedTokenTPC() throws IOException { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken;" + + "universeDomain=testDomain;", + null); + Map stringStringMap = new HashMap<>(); + stringStringMap.put( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, "testDomain"); + Map overrideProperties = new HashMap<>(stringStringMap); + + GoogleCredentials credentials = + BigQueryJdbcOAuthUtility.getCredentials(authProperties, overrideProperties, null); + assertThat(credentials.getUniverseDomain()).isEqualTo("testDomain"); + } + + @Test + @Ignore // For running locally only similar to our other JDBC tests. + public void testGetCredentialsForApplicationDefault() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=MyBigQueryProject;", + null); + + GoogleCredentials credentials = + BigQueryJdbcOAuthUtility.getCredentials(authProperties, null, null); + assertThat(credentials).isNotNull(); + } + + @Test + public void testParseOAuthPropsForUserAuth() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=1;ProjectId=MyBigQueryProject;" + + "OAuthClientId=client;OAuthClientSecret=secret;", + null); + + assertThat(authProperties.get("OAuthType")).isEqualTo("GOOGLE_USER_ACCOUNT"); + assertThat(authProperties.get("OAuthClientId")).isEqualTo("client"); + assertThat(authProperties.get("OAuthClientSecret")).isEqualTo("secret"); + } + + @Test + public void testGenerateUserAuthURL() { + try { + HashMap authProperties = new HashMap<>(); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, "client_id"); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, "client_secret"); + + UserAuthorizer userAuthorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, new HashMap(), USER_AUTH_PORT, null); + + String userId = "test_user"; + String state = "test_state"; + URI baseURI = URI.create("http://example.com/foo"); + + URL authURL = userAuthorizer.getAuthorizationUrl(userId, state, baseURI); + + assertThat(authURL.toString()).isEqualTo(EXPECTED_USER_AUTH_URL); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testGenerateUserAuthURLOverrideOauthEndpoint() { + try { + + URI overrideTokenSeverURI = new URI("https://oauth2-gsprivateall.p.googleapis.com/token"); + String connectionString = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=1;" + + "OAuthClientId=client;OAuthClientSecret=secret;" + + "EndpointOverrides=OAuth2=" + + overrideTokenSeverURI + + ";"; + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(connectionString, null); + Map overrideProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connectionString, null); + + UserAuthorizer userAuthorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, overrideProperties, USER_AUTH_PORT, null); + + assertThat(overrideTokenSeverURI).isEqualTo(userAuthorizer.toBuilder().getTokenServerUri()); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testParseOAuthPropsForRefreshToken() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;OAuthRefreshToken=token;" + + "OAuthClientId=client;OAuthClientSecret=secret;", + null); + + assertThat(authProperties.get("OAuthType")).isEqualTo("PRE_GENERATED_TOKEN"); + assertThat(authProperties.get("OAuthRefreshToken")).isEqualTo("token"); + assertThat(authProperties.get("OAuthClientId")).isEqualTo("client"); + assertThat(authProperties.get("OAuthClientSecret")).isEqualTo("secret"); + } + + @Test + public void testParseOverridePropsForRefreshTokenAuth() { + try { + + String connectionString = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=2;OAuthRefreshToken=token;" + + "OAuthClientId=client;OAuthClientSecret=secret;" + + "EndpointOverrides=Oauth2=https://oauth2-private.p.googleapis.com/token;"; + + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(connectionString, null); + Map overrideProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connectionString, null); + + UserCredentials userCredentials = + BigQueryJdbcOAuthUtility.getPreGeneratedRefreshTokenCredentials( + authProperties, overrideProperties, null); + + assertThat(userCredentials.toBuilder().getTokenServerUri()) + .isEqualTo(URI.create("https://oauth2-private.p.googleapis.com/token")); + + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testParseBYOIDProps() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:433;OAuthType=4;" + + "ProjectId=MyBigQueryProject;" + + "BYOID_AudienceUri=//iam.googleapis.com/locations/global/workforcePools/pool-id/providers/provider-id;" + + "BYOID_PoolUserProject=workforceProjectNumber;" + + "BYOID_CredentialSource={\"file\": \"C:\\\\Token.txt\"};" + + "BYOID_SA_Impersonation_Uri=testSA;" + + "BYOID_SubjectTokenType=urn:ietf:params:oauth:tokentype:jwt;" + + "BYOID_TokenUri=https://testuri.com/v1/token", + null); + + assertThat(result.get("BYOID_AudienceUri")) + .isEqualTo( + "//iam.googleapis.com/locations/global/workforcePools/pool-id/providers/provider-id"); + assertThat(result.get("BYOID_PoolUserProject")).isEqualTo("workforceProjectNumber"); + assertThat(result.get("BYOID_CredentialSource")).isEqualTo("{\"file\": \"C:\\\\Token.txt\"}"); + assertThat(result.get("BYOID_SA_Impersonation_Uri")).isEqualTo("testSA"); + assertThat(result.get("BYOID_SubjectTokenType")) + .isEqualTo("urn:ietf:params:oauth:tokentype:jwt"); + assertThat(result.get("BYOID_TokenUri")).isEqualTo("https://testuri.com/v1/token"); + } + + @Test + public void testParseOAuthProperties_UserAccount_RequestDriveScopeEnabled() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=1;OAuthClientId=redactedClientId;OAuthClientSecret=redactedClientSecret;" + + "RequestGoogleDriveScope=1;"; + Map properties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(url, this.getClass().getName()); + assertEquals( + String.valueOf(BigQueryJdbcOAuthUtility.AuthType.GOOGLE_USER_ACCOUNT), + properties.get(BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME)); + assertEquals( + "redactedClientId", properties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME)); + assertEquals( + "redactedClientSecret", + properties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + assertEquals( + "1", properties.get(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)); + } + + @Test + public void testParseOAuthProperties_UserAccount_RequestDriveScopeDisabled() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=1;OAuthClientId=redactedClientId;OAuthClientSecret=redactedClientSecret;" + + "RequestGoogleDriveScope=0;"; + Map properties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(url, this.getClass().getName()); + assertEquals( + "0", properties.get(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)); + } + + @Test + public void testParseOAuthProperties_UserAccount_RequestDriveScopeDefault() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=1;OAuthClientId=redactedClientId;OAuthClientSecret=redactedClientSecret;"; + Map properties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(url, this.getClass().getName()); + assertEquals( + String.valueOf(BigQueryJdbcUrlUtility.DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE), + properties.get(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)); + } + + @Test + public void testGetUserAuthorizer_WithDriveScope() throws URISyntaxException { + Map authProperties = new HashMap<>(); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, "redactedClientId"); + authProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, "redactedClientSecret"); + authProperties.put(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, "1"); + + UserAuthorizer authorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, Collections.emptyMap(), 12345, this.getClass().getName()); + + assertTrue(authorizer.getScopes().contains("https://www.googleapis.com/auth/bigquery")); + assertTrue(authorizer.getScopes().contains("https://www.googleapis.com/auth/drive.readonly")); + assertEquals(2, authorizer.getScopes().size()); + } + + @Test + public void testGetUserAuthorizer_WithoutDriveScope() throws URISyntaxException { + Map authProperties = new HashMap<>(); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, "redactedClientId"); + authProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, "redactedClientSecret"); + authProperties.put(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, "0"); + + UserAuthorizer authorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, Collections.emptyMap(), 12345, this.getClass().getName()); + assertTrue(authorizer.getScopes().contains("https://www.googleapis.com/auth/bigquery")); + assertFalse(authorizer.getScopes().contains("https://www.googleapis.com/auth/drive.readonly")); + assertEquals(1, authorizer.getScopes().size()); + } + + @Test + public void testGetUserAuthorizer_InvalidDriveScopeValue() throws URISyntaxException { + Map authProperties = new HashMap<>(); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, "redactedClientId"); + authProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, "redactedClientSecret"); + authProperties.put( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, "invalid_value"); + UserAuthorizer authorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, Collections.emptyMap(), 12345, this.getClass().getName()); + assertFalse(authorizer.getScopes().contains("https://www.googleapis.com/auth/drive.readonly")); + } + + @Test + public void testParseUserImpersonationDefault() { + String connectionUri = + getUriOAuthServiceAccount() + .append("ServiceAccountImpersonationEmail", "impersonated") + .toString(); + Map result = BigQueryJdbcOAuthUtility.parseOAuthProperties(connectionUri, ""); + assertEquals( + "impersonated", + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME)); + assertEquals( + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_SCOPES_VALUE, + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME)); + assertEquals( + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_VALUE, + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME)); + } + + @Test + public void testParseUserImpersonationNonDefault() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + getUriOAuthServiceAccount() + .append("ServiceAccountImpersonationEmail", "impersonated") + .append("ServiceAccountImpersonationScopes", "scopes") + .append("ServiceAccountImpersonationTokenLifetime", 300) + .toString(), + ""); + assertEquals( + "impersonated", + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME)); + assertEquals( + "scopes", result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME)); + assertEquals( + "300", + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME)); + } + + @Test + public void testGetServiceAccountImpersonatedCredentials() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + getUriOAuthServiceAccount() + .append("ServiceAccountImpersonationEmail", "impersonated") + .toString(), + ""); + GoogleCredentials credentials = + BigQueryJdbcOAuthUtility.getCredentials(authProperties, Collections.EMPTY_MAP, null); + assertThat(credentials).isInstanceOf(ImpersonatedCredentials.class); + } + + @Test + public void testPrivateKeyFromPkcs8() { + PrivateKey pk = BigQueryJdbcOAuthUtility.privateKeyFromPkcs8(fake_pkcs8_key); + assertNotNull(pk); + } + + @Test + public void testPrivateKeyFromPkcs8_wrong() { + PrivateKey pk = BigQueryJdbcOAuthUtility.privateKeyFromPkcs8(""); + assertNull(pk); + } + + // Command to generate key: + // keytool -genkey -alias privatekey -keyalg RSA -keysize 2048 -storepass notasecret \ + // -keypass notasecret -storetype pkcs12 -keystore ./fake.p12 + @Test + public void testPrivateKeyFromP12File() { + URL resource = BigQueryJdbcOAuthUtilityTest.class.getResource("/fake.p12"); + try { + PrivateKey pk = + BigQueryJdbcOAuthUtility.privateKeyFromP12File( + Paths.get(resource.toURI()).toAbsolutePath().toString(), "notasecret"); + assertNotNull(pk); + } catch (Exception e) { + assertTrue(false); + } + } + + @Test + public void testPrivateKeyFromP12File_missing_file() { + PrivateKey pk = BigQueryJdbcOAuthUtility.privateKeyFromP12File("", ""); + assertNull(pk); + } + + @Test + public void testPrivateKeyFromP12File_wrong_password() { + URL resource = BigQueryJdbcOAuthUtilityTest.class.getResource("/fake.p12"); + try { + PrivateKey pk = + BigQueryJdbcOAuthUtility.privateKeyFromP12File( + Paths.get(resource.toURI()).toAbsolutePath().toString(), "fake"); + assertNull(pk); + } catch (Exception e) { + assertTrue(false); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameterTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameterTest.java new file mode 100644 index 0000000000..815759892b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameterTest.java @@ -0,0 +1,75 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; +import org.junit.Test; + +public class BigQueryJdbcParameterTest { + + @Test + public void testSettersAndGetters() { + int expectedIndex = 3; + Object expectedValue = "String Object"; + Class expectedType = String.class; + StandardSQLTypeName expectedSqlType = StandardSQLTypeName.STRING; + String expectedParamName = "StringParameter"; + BigQueryStatementParameterType expectedParamType = BigQueryStatementParameterType.IN; + int expectedScale = -1; + + BigQueryJdbcParameter parameter = new BigQueryJdbcParameter(); + parameter.setIndex(3); + parameter.setValue("String Object"); + parameter.setType(String.class); + parameter.setSqlType(StandardSQLTypeName.STRING); + parameter.setParamName("StringParameter"); + parameter.setParamType(BigQueryStatementParameterType.IN); + parameter.setScale(-1); + + assertEquals(expectedIndex, parameter.getIndex()); + assertEquals(expectedValue, parameter.getValue()); + assertEquals(expectedType, parameter.getType()); + assertEquals(expectedSqlType, parameter.getSqlType()); + assertEquals(expectedParamName, parameter.getParamName()); + assertEquals(expectedParamType, parameter.getParamType()); + assertEquals(expectedScale, parameter.getScale()); + } + + @Test + public void testCopyConstructor() { + int expectedIndex = 3; + Object expectedValue = "String Object"; + Class expectedType = String.class; + StandardSQLTypeName expectedSqlType = StandardSQLTypeName.STRING; + + BigQueryJdbcParameter parameter = new BigQueryJdbcParameter(); + parameter.setIndex(3); + parameter.setValue("String Object"); + parameter.setType(String.class); + parameter.setSqlType(StandardSQLTypeName.STRING); + + BigQueryJdbcParameter copiedParameter = new BigQueryJdbcParameter(parameter); + + assertEquals(expectedIndex, copiedParameter.getIndex()); + assertEquals(expectedValue, copiedParameter.getValue()); + assertEquals(expectedType, copiedParameter.getType()); + assertEquals(expectedSqlType, copiedParameter.getSqlType()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtilityTest.java new file mode 100644 index 0000000000..203502cdac --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtilityTest.java @@ -0,0 +1,293 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; + +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.http.HttpTransportOptions; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.junit.Test; + +public class BigQueryJdbcProxyUtilityTest { + @Test + public void testParsingAllProxyProperties() { + Map result = + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;", + null); + + assertThat(result.get("ProxyHost")).isEqualTo("34.94.167.18"); + assertThat(result.get("ProxyPort")).isEqualTo("3128"); + assertThat(result.get("ProxyUid")).isEqualTo("fahmz"); + assertThat(result.get("ProxyPwd")).isEqualTo("pass"); + } + + @Test + public void testParsingInvalidPortThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=portnumber;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;", + null)); + } + + @Test + public void testMissingHostThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyPort=3128;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;", + null)); + } + + @Test + public void testMissingPortThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;", + null)); + } + + @Test + public void testMissingUidWithPwdThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;" + + "ProxyPwd=pass;", + null)); + } + + @Test + public void testMissingPwdWithUidThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;" + + "ProxyUid=fahmz;", + null)); + } + + @Test + public void testGetHttpTransportOptionsWithAuthenticatedProxy() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=TestProject" + + ";OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;"; + + Map proxyProperties = + BigQueryJdbcProxyUtility.parseProxyProperties(connection_uri, null); + HttpTransportOptions result = + BigQueryJdbcProxyUtility.getHttpTransportOptions(proxyProperties, null, null, null); + assertNotNull(result); + } + + @Test + public void testGetHttpTransportOptionsWithNonAuthenticatedProxy() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=TestProject" + + ";OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;"; + + Map proxyProperties = + BigQueryJdbcProxyUtility.parseProxyProperties(connection_uri, null); + HttpTransportOptions result = + BigQueryJdbcProxyUtility.getHttpTransportOptions(proxyProperties, null, null, null); + assertNotNull(result); + } + + @Test + public void testGetHttpTransportOptionsWithNoProxySettingsReturnsNull() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=TestProject" + + ";OAuthType=3;"; + + Map proxyProperties = + BigQueryJdbcProxyUtility.parseProxyProperties(connection_uri, null); + HttpTransportOptions result = + BigQueryJdbcProxyUtility.getHttpTransportOptions(proxyProperties, null, null, null); + assertNull(result); + } + + private String getTestResourcePath(String resourceName) throws URISyntaxException { + URL resourceUrl = getClass().getClassLoader().getResource(resourceName); + if (resourceUrl == null) { + throw new RuntimeException("Test resource not found: " + resourceName); + } + return new File(resourceUrl.toURI()).getAbsolutePath(); + } + + @Test + public void testGetHttpTransportOptions_withSslTrustStore_noPassword() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_nopass.jks"); + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), trustStorePath, null, "TestClass"); + assertNotNull(options); + assertNotNull(options.getHttpTransportFactory()); + } + + @Test + public void testGetHttpTransportOptions_withSslTrustStore_withCorrectPassword() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_withpass.jks"); + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), trustStorePath, "testpassword", "TestClass"); + assertNotNull(options); + assertNotNull(options.getHttpTransportFactory()); + } + + @Test + public void testGetHttpTransportOptions_withSslTrustStore_withIncorrectPassword() + throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_withpass.jks"); + BigQueryJdbcRuntimeException exception = + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), + trustStorePath, + "wrongpassword", + "TestClass")); + assertThat(exception.getCause()).isInstanceOf(IOException.class); + } + + @Test + public void testGetHttpTransportOptions_withInvalidSslTrustStorePath() { + String invalidPath = "/path/to/nonexistent/truststore.jks"; + BigQueryJdbcRuntimeException exception = + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), invalidPath, null, "TestClass")); + + assertThat(exception.getCause()).isInstanceOf(FileNotFoundException.class); + } + + @Test + public void testGetHttpTransportOptions_withSslAndProxy() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_nopass.jks"); + Map proxyProperties = new HashMap<>(); + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, "proxy.example.com"); + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, "8080"); + + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + proxyProperties, trustStorePath, null, "TestClass"); + assertNotNull(options); + assertNotNull(options.getHttpTransportFactory()); + } + + @Test + public void testGetTransportChannelProvider_withSslTrustStore_noPassword() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_nopass.jks"); + TransportChannelProvider provider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + Collections.emptyMap(), trustStorePath, null, "TestClass"); + assertNotNull(provider); + } + + @Test + public void testGetTransportChannelProvider_withSslTrustStore_withCorrectPassword() + throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_withpass.jks"); + TransportChannelProvider provider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + Collections.emptyMap(), trustStorePath, "testpassword", "TestClass"); + assertNotNull(provider); + } + + @Test + public void testGetTransportChannelProvider_withSslAndProxy() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_nopass.jks"); + Map proxyProperties = new HashMap<>(); + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, "proxy.example.com"); + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, "8080"); + + TransportChannelProvider provider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + proxyProperties, trustStorePath, null, "TestClass"); + assertNotNull(provider); + } + + @Test + public void testGetTransportChannelProvider_noProxyNoSsl_returnsNull() { + TransportChannelProvider provider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + Collections.emptyMap(), null, null, "TestClass"); + assertNull(provider); + } + + @Test + public void testGetHttpTransportOptions_noProxyNoSsl_returnsNull() { + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), null, null, "TestClass"); + assertNull(options); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtilityTest.java new file mode 100644 index 0000000000..86f087bf35 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtilityTest.java @@ -0,0 +1,803 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.common.collect.Maps; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import org.junit.Test; + +public class BigQueryJdbcUrlUtilityTest { + + @Test + public void testParsePropertyWithNoDefault() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "OAuthType"); + assertThat(result).isNull(); + } + + @Test + public void testParsePropertyWithDefault() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "OAuthType"); + assertThat(result).isEqualTo(null); + } + + @Test + public void testParsePropertyWithValue() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "ProjectId"); + assertThat(result).isEqualTo("MyBigQueryProject"); + } + + @Test + public void testParsePropertyWithValueCaseInsensitive() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "PROJECTID=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "ProjectId"); + assertThat(result).isEqualTo("MyBigQueryProject"); + } + + @Test + public void testAppendPropertiesToURL() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + Properties properties = new Properties(); + properties.setProperty("OAuthType", "3"); + + String updatedUrl = BigQueryJdbcUrlUtility.appendPropertiesToURL(url, null, properties); + assertThat(updatedUrl.contains("OAuthType=3")); + } + + @Test + public void testConnectionPropertiesFromURI() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=testProject;OAUTHTYPE=3;DEFAULTDATASET=testDataset;LOCATION=us-central1"; + + assertThat(BigQueryJdbcUrlUtility.parseUriProperty(connection_uri, "OAUTHTYPE")).isEqualTo("3"); + assertThat(BigQueryJdbcUrlUtility.parseUriProperty(connection_uri, "LOCATION")) + .isEqualTo("us-central1"); + } + + @Test + public void testConnectionPropertiesFromURIMultiline() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;Multiline=value1\nvalue2\n;"; + + assertThat(BigQueryJdbcUrlUtility.parseUriProperty(connection_uri, "Multiline")) + .isEqualTo("value1\nvalue2\n"); + } + + @Test + public void testConnectionPropertiesFromURIMultilineNoSemicolon() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;Multiline=value1\nvalue2"; + + assertThat(BigQueryJdbcUrlUtility.parseUriProperty(connection_uri, "Multiline")) + .isEqualTo("value1\nvalue2"); + } + + @Test + public void testOverridePropertiesFromURICompatibility() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=testProject;PrivateServiceConnectUris=" + + "BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + } + + @Test + public void testOverridePropertiesDoesNotAffectOriginalParsersAtEnd() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PrivateServiceConnectUris=" + + "BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443," + + "OAUTH2=https://oauth2-myprivateserver.p.googleapis.com;PROJECTID=testProject;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + assertThat(parsedPSCProperties.get("OAUTH2")) + .isEqualTo("https://oauth2-myprivateserver.p.googleapis.com"); + } + + @Test + public void testOverridePropertiesDoesNotParseOutsideOfPrivateServiceConnectUris() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PrivateServiceConnectUris=" + + "BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443;" + // Hard to see but semicolon ends it here. + + "OAUTH2=https://oauth2-myprivateserver.p.googleapis.com;PROJECTID=testProject;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + assertThat(parsedPSCProperties.get("OAUTH2")).isNull(); + } + + @Test + public void testOverridePropertiesDoesNotParserPropertiesInMiddle() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PrivateServiceConnectUris=" + + "BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com,OAUTHTYPE=2," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443," + + "OAUTH2=https://oauth2-myprivateserver.p.googleapis.com;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + assertThat(parsedPSCProperties.get("OAUTH2")) + .isEqualTo("https://oauth2-myprivateserver.p.googleapis.com"); + } + + @Test + public void testOverridePropertyBeforeProceedingOverrideParameterDoesNotParse() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com;" + + "PrivateServiceConnectUris=" + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443," + + "OAUTH2=https://oauth2-myprivateserver.p.googleapis.com;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertNull(parsedPSCProperties.get("BIGQUERY")); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + assertThat(parsedPSCProperties.get("OAUTH2")) + .isEqualTo("https://oauth2-myprivateserver.p.googleapis.com"); + } + + @Test + public void testOverridePropertiesFromURIGoogleExperience() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=testProject;EndpointOverrides=" + + "BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + } + + @Test + public void testAllOverridePropertiesFromURIGoogleExperience() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=testProject;EndpointOverrides=" + + "BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443," + + "OAUTH2=https://oauth2-myprivateserver.p.googleapis.com," + + "STS=https://sts-myprivateserver.p.googleapis.com;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + assertThat(parsedPSCProperties.get("OAUTH2")) + .isEqualTo("https://oauth2-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("STS")) + .isEqualTo("https://sts-myprivateserver.p.googleapis.com"); + } + + @Test + public void testCaseSensitivityOverridePropertiesFromURI() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=testProject;endpointOverrides=" + + "bigQuery=https://bigquery-myprivateserver.p.googleapis.com," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + } + + @Test + public void testParseJobCreationModeDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;"; + + boolean jobCreationMode = BigQueryJdbcUrlUtility.parseJobCreationMode(connection_uri, null); + assertTrue(jobCreationMode); + } + + @Test + public void testParseJobCreationMode() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobCreationMode=1"; + + boolean jobCreationMode = BigQueryJdbcUrlUtility.parseJobCreationMode(connection_uri, null); + assertFalse(jobCreationMode); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobCreationMode=2"; + + jobCreationMode = BigQueryJdbcUrlUtility.parseJobCreationMode(connection_uri, null); + assertTrue(jobCreationMode); + } + + @Test + public void testParseJobCreationModeInvalidInteger() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobCreationMode=25"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseJobCreationMode(connection_uri, null)); + } + + @Test + public void testParseJobCreationModeInvalidString() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobCreationMode=JOB_CREATION_OPTIONAL"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseJobCreationMode(connection_uri, null)); + } + + @Test + public void testGetConnectionPropertyDefaultValue() { + assertEquals( + BigQueryJdbcUrlUtility.getConnectionPropertyDefaultValue("BYOID_TokenUri"), + "https://sts.googleapis.com/v1/token"); + } + + @Test + public void testParseRetryTimeoutInSecs() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "Timeout=10"; + + long retryTimeoutInSeconds = + BigQueryJdbcUrlUtility.parseRetryTimeoutInSecs(connection_uri, null); + assertEquals(10, retryTimeoutInSeconds); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "Timeout=20"; + + retryTimeoutInSeconds = BigQueryJdbcUrlUtility.parseRetryTimeoutInSecs(connection_uri, null); + assertEquals(20, retryTimeoutInSeconds); + } + + @Test + public void testParseRetryTimeoutInSecsDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long retryTimeoutInSeconds = + BigQueryJdbcUrlUtility.parseRetryTimeoutInSecs(connection_uri, null); + assertEquals(0, retryTimeoutInSeconds); + } + + @Test + public void testParseRetryTimeoutSecondsInvalidLong() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "Timeout=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseRetryTimeoutInSecs(connection_uri, null)); + } + + public void testParseJobTimeout() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobTimeout=10"; + + long jobTimeout = BigQueryJdbcUrlUtility.parseJobTimeout(connection_uri, null); + assertEquals(10, jobTimeout); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobTimeout=20"; + + jobTimeout = BigQueryJdbcUrlUtility.parseJobTimeout(connection_uri, null); + assertEquals(20, jobTimeout); + } + + @Test + public void testParseJobTimeoutDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long jobTimeout = BigQueryJdbcUrlUtility.parseJobTimeout(connection_uri, null); + assertEquals(0L, jobTimeout); + } + + @Test + public void testParseJobTimeoutInvalid() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobTimeout=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseJobTimeout(connection_uri, null)); + } + + @Test + public void testParsePartnerTokenProperty() { + // Case with partner name and environment + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "PartnerToken=(GPN:partner_company; dev);ProjectId=MyBigQueryProject;"; + String expected = " (GPN:partner_company; dev)"; + String result = + BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, "testParsePartnerTokenProperty"); + assertThat(result).isEqualTo(expected); + + // Case with only partner name + url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "PartnerToken=(GPN:another_partner);ProjectId=MyBigQueryProject;"; + expected = " (GPN:another_partner)"; + result = BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, "testParsePartnerTokenProperty"); + assertThat(result).isEqualTo(expected); + + // Case when PartnerToken property is not present + url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;"; + result = BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, "testParsePartnerTokenProperty"); + assertNull(result); + + // Case when PartnerToken property is present but empty + url = "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PartnerToken=();"; + result = BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, "testParsePartnerTokenProperty"); + assertNull(result); + + // Case when PartnerToken property is present but without partner name + url = "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PartnerToken=(env);"; + result = BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, "testParsePartnerTokenProperty"); + assertNull(result); + + // Case with extra spaces around the values + url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "PartnerToken= ( GPN: partner_name ; test_env ) ;"; + expected = " (GPN: partner_name; test_env)"; + result = BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, "testParsePartnerTokenProperty"); + assertThat(result).isEqualTo(expected); + } + + public void testParseRetryInitialDelayInSecs() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "RetryInitialDelay=10"; + + long retryInitialDelaySeconds = + BigQueryJdbcUrlUtility.parseRetryInitialDelayInSecs(connection_uri, null); + assertEquals(10, retryInitialDelaySeconds); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "RetryInitialDelay=20"; + + retryInitialDelaySeconds = + BigQueryJdbcUrlUtility.parseRetryInitialDelayInSecs(connection_uri, null); + assertEquals(20, retryInitialDelaySeconds); + } + + @Test + public void testParseRetryInitialDelayInSecsDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long retryInitialDelaySeconds = + BigQueryJdbcUrlUtility.parseRetryInitialDelayInSecs(connection_uri, null); + assertEquals(0, retryInitialDelaySeconds); + } + + @Test + public void testParseRetryInitialDelaySecondsInvalidLong() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "RetryInitialDelay=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseRetryInitialDelayInSecs(connection_uri, null)); + } + + @Test + public void testParseRetryMaxDelayInSecs() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "RetryMaxDelay=10"; + + long retryMaxDelaySeconds = + BigQueryJdbcUrlUtility.parseRetryMaxDelayInSecs(connection_uri, null); + assertEquals(10, retryMaxDelaySeconds); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "RetryMaxDelay=20"; + + retryMaxDelaySeconds = BigQueryJdbcUrlUtility.parseRetryMaxDelayInSecs(connection_uri, null); + assertEquals(20, retryMaxDelaySeconds); + } + + @Test + public void testParseRetryMaxDelayInSecsDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long retryMaxDelaySeconds = + BigQueryJdbcUrlUtility.parseRetryMaxDelayInSecs(connection_uri, null); + assertEquals(0, retryMaxDelaySeconds); + } + + @Test + public void testParseRetryMaxDelaySecondsInvalidLong() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "RetryMaxDelay=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseRetryMaxDelayInSecs(connection_uri, null)); + } + + @Test + public void testParseRequestGoogleDriveScope_Default() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;"; + Integer value = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE, + this.getClass().getName()); + assertEquals( + Integer.valueOf(BigQueryJdbcUrlUtility.DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE), value); + } + + // Connection Pool Size + + @Test + public void testParseConnectionPoolSize() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ConnectionPoolSize=10"; + long connectionPoolSize = BigQueryJdbcUrlUtility.parseConnectionPoolSize(connection_uri, null); + assertEquals(10, connectionPoolSize); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ConnectionPoolSize=20"; + + connectionPoolSize = BigQueryJdbcUrlUtility.parseConnectionPoolSize(connection_uri, null); + assertEquals(20, connectionPoolSize); + } + + @Test + public void testParseConnectionPoolSizeDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long connectionPoolSize = BigQueryJdbcUrlUtility.parseConnectionPoolSize(connection_uri, null); + assertEquals(10, connectionPoolSize); + } + + @Test + public void testParseConnectionPoolSizeDefaultNullConnectionUrl() { + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseConnectionPoolSize(null, null)); + } + + @Test + public void testParseConnectionPoolSizeDefaultEmptyConnectionUrl() { + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseConnectionPoolSize("", null)); + } + + @Test + public void testParseConnectionPoolSizeInvalidLong() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ConnectionPoolSize=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseConnectionPoolSize(connection_uri, null)); + } + + // Listener Pool Size + + @Test + public void testParseListenerPoolSize() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ListenerPoolSize=10"; + long listenerPoolSize = BigQueryJdbcUrlUtility.parseListenerPoolSize(connection_uri, null); + assertEquals(10, listenerPoolSize); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ListenerPoolSize=20"; + + listenerPoolSize = BigQueryJdbcUrlUtility.parseListenerPoolSize(connection_uri, null); + assertEquals(20, listenerPoolSize); + } + + @Test + public void testParseListenerPoolSizeDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long listenerPoolSize = BigQueryJdbcUrlUtility.parseListenerPoolSize(connection_uri, null); + assertEquals(10, listenerPoolSize); + } + + @Test + public void testParseListenerPoolSizeDefaultNullConnectionUrl() { + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseListenerPoolSize(null, null)); + } + + @Test + public void testParseListenerPoolSizeDefaultEmptyConnectionUrl() { + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseListenerPoolSize("", null)); + } + + @Test + public void testParseListenerPoolSizeInvalidLong() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ListenerPoolSize=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseListenerPoolSize(connection_uri, null)); + } + + @Test + public void testParseStringListProperty_NullOrEmpty() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;SomeProp="; + List result = + BigQueryJdbcUrlUtility.parseStringListProperty(url, "NonExistentProp", "TestClass"); + assertEquals(Collections.emptyList(), result); + + result = BigQueryJdbcUrlUtility.parseStringListProperty(url, "SomeProp", "TestClass"); + assertEquals(Collections.emptyList(), result); + + String urlWithEmptyList = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;ListProp=,,"; + result = + BigQueryJdbcUrlUtility.parseStringListProperty(urlWithEmptyList, "ListProp", "TestClass"); + assertEquals(Collections.emptyList(), result); + } + + @Test + public void testParseStringListProperty_SingleValue() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;ListProp=project1"; + List result = + BigQueryJdbcUrlUtility.parseStringListProperty(url, "ListProp", "TestClass"); + assertEquals(Collections.singletonList("project1"), result); + } + + @Test + public void testParseStringListProperty_MultipleValues() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;ListProp=project1,project2,project3"; + List result = + BigQueryJdbcUrlUtility.parseStringListProperty(url, "ListProp", "TestClass"); + assertEquals(Arrays.asList("project1", "project2", "project3"), result); + } + + @Test + public void testParseIntProperty_ValidInteger() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;SomeIntProp=123"; + Integer defaultValue = 0; + Integer result = + BigQueryJdbcUrlUtility.parseIntProperty(url, "SomeIntProp", defaultValue, "TestClass"); + assertEquals(Integer.valueOf(123), result); + } + + @Test + public void testParseIntProperty_PropertyNotPresent() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;SomeIntProp=123"; + Integer defaultValue = 42; + Integer result = + BigQueryJdbcUrlUtility.parseIntProperty(url, "MissingIntProp", defaultValue, "TestClass"); + assertEquals(defaultValue, result); + } + + @Test + public void testParseIntProperty_InvalidIntegerValue() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;InvalidIntProp=abc"; + Integer defaultValue = 77; + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcUrlUtility.parseIntProperty( + url, "InvalidIntProp", defaultValue, "TestClass")); + } + + @Test + public void testParseIntProperty_EmptyStringValue() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;EmptyIntProp="; + Integer defaultValue = 88; + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcUrlUtility.parseIntProperty( + url, "EmptyIntProp", defaultValue, "TestClass")); + } + + @Test + public void testParseMaxBytesBilled() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "MaximumBytesBilled=10000"; + + long maxBytesBilled = BigQueryJdbcUrlUtility.parseMaximumBytesBilled(connection_uri, null); + assertEquals(10000, maxBytesBilled); + } + + @Test + public void testParseMaxBytesBilledDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long maxBytesBilled = BigQueryJdbcUrlUtility.parseMaximumBytesBilled(connection_uri, null); + assertEquals(0, maxBytesBilled); + } + + @Test + public void testParseMaxBytesBilledNullUrl() { + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseMaximumBytesBilled(null, null)); + } + + @Test + public void testParseMaxBytesBilledEmptyUrl() { + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseMaximumBytesBilled("", null)); + } + + @Test + public void testParseMaxBytesBilledInvalidLong() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "MaximumBytesBilled=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseMaximumBytesBilled(connection_uri, null)); + } + + @Test + public void testParseLabels() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "Labels=k1=v1,k2=v2,k3=v3;"; + + Map labels = BigQueryJdbcUrlUtility.parseLabels(connection_uri, null); + assertNotNull(labels); + assertFalse(labels.isEmpty()); + assertEquals(3, labels.size()); + + Map expected = + new HashMap() { + { + put("k1", "v1"); + put("k2", "v2"); + put("k3", "v3"); + } + }; + + assertTrue(Maps.difference(expected, labels).areEqual()); + } + + @Test + public void testParseLabelsEmpty() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;"; + + Map labels = BigQueryJdbcUrlUtility.parseLabels(connection_uri, null); + assertNull(labels); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfPrimitivesTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfPrimitivesTest.java new file mode 100644 index 0000000000..6f10ae79a0 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfPrimitivesTest.java @@ -0,0 +1,342 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.StandardSQLTypeName.BIGNUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.BYTES; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATE; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATETIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.FLOAT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.GEOGRAPHY; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.NUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIMESTAMP; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arraySchemaAndValue; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.nestedResultSetToColumnLists; +import static com.google.common.truth.Truth.assertThat; +import static java.time.Month.MARCH; +import static java.util.Arrays.copyOfRange; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import com.google.common.io.BaseEncoding; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.TimeUnit; +import java.util.stream.Stream; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.function.ThrowingRunnable; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class BigQueryJsonArrayOfPrimitivesTest { + + private final Field schema; + private final FieldValue arrayValues; + private final Object[] expected; + private final int javaSqlTypeCode; + private Array array; + private final StandardSQLTypeName currentType; + + @ClassRule public static final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + public BigQueryJsonArrayOfPrimitivesTest( + StandardSQLTypeName currentType, + Tuple schemaAndValue, + Object[] expected, + int javaSqlTypeCode) { + this.currentType = currentType; + this.schema = schemaAndValue.x(); + this.arrayValues = schemaAndValue.y(); + this.expected = expected; + this.javaSqlTypeCode = javaSqlTypeCode; + } + + @Before + public void setUp() { + array = new BigQueryJsonArray(this.schema, this.arrayValues); + } + + @Parameters(name = "{index}: primitive array of {0}") + public static Collection data() { + timeZoneRule.enforce(); + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820227000); + LocalDate aDate = LocalDate.of(2023, MARCH, 30); + LocalTime aTime = LocalTime.of(11, 14, 19, 820227000); + return Arrays.asList( + new Object[][] { + { + INT64, + arraySchemaAndValue(INT64, "10", "20", "30", "40"), + new Long[] {10L, 20L, 30L, 40L}, + Types.BIGINT + }, + { + BOOL, + arraySchemaAndValue(BOOL, "true", "false", "false", "true"), + new Boolean[] {true, false, false, true}, + Types.BOOLEAN + }, + { + FLOAT64, + arraySchemaAndValue(FLOAT64, "11.2", "33.4", "55.6", "77.8"), + new Double[] {11.2, 33.4, 55.6, 77.8}, + Types.DOUBLE + }, + { + NUMERIC, + arraySchemaAndValue(NUMERIC, "11.2657", "33.4657", "55.6657", "77.8657"), + new BigDecimal[] { + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657") + }, + Types.NUMERIC + }, + { + BIGNUMERIC, + arraySchemaAndValue(BIGNUMERIC, "11.2657", "33.4657", "55.6657", "77.8657"), + new BigDecimal[] { + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657") + }, + Types.NUMERIC + }, + { + STRING, + arraySchemaAndValue(STRING, "one", "two", "three", "four"), + new String[] {"one", "two", "three", "four"}, + Types.NVARCHAR + }, + { + TIMESTAMP, + arraySchemaAndValue( + TIMESTAMP, + "1680174859.8202269", + "1680261259.8202269", + "1680347659.8202269", + "1680434059.8202269"), + new Timestamp[] { + Timestamp.valueOf(aTimeStamp), // 2023-03-30 16:44:19.82 + Timestamp.valueOf(aTimeStamp.plusDays(1)), + Timestamp.valueOf(aTimeStamp.plusDays(2)), + Timestamp.valueOf(aTimeStamp.plusDays(3)) + }, + Types.TIMESTAMP + }, + { + DATE, + arraySchemaAndValue(DATE, "2023-03-30", "2023-03-31", "2023-04-01", "2023-04-02"), + new Date[] { + Date.valueOf(aDate), + Date.valueOf(aDate.plusDays(1)), + Date.valueOf(aDate.plusDays(2)), + Date.valueOf(aDate.plusDays(3)) + }, + Types.DATE + }, + { + TIME, + arraySchemaAndValue( + TIME, "11:14:19.820227", "11:14:20.820227", "11:14:21.820227", "11:14:22.820227"), + new Time[] { + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())), + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.plusSeconds(1).toNanoOfDay())), + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.plusSeconds(2).toNanoOfDay())), + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.plusSeconds(3).toNanoOfDay())) + }, + Types.TIME + }, + { + DATETIME, + arraySchemaAndValue( + DATETIME, + "2023-03-30T11:14:19.820227", + "2023-03-30T11:15:19.820227", + "2023-03-30T11:16:19.820227", + "2023-03-30T11:17:19.820227"), + new Timestamp[] { + Timestamp.valueOf("2023-03-30 11:14:19.820227"), + Timestamp.valueOf("2023-03-30 11:15:19.820227"), + Timestamp.valueOf("2023-03-30 11:16:19.820227"), + Timestamp.valueOf("2023-03-30 11:17:19.820227") + }, + Types.TIMESTAMP + }, + { + GEOGRAPHY, + arraySchemaAndValue( + GEOGRAPHY, "POINT(-122 47)", "POINT(-122 48)", "POINT(-121 47)", "POINT(-123 48)"), + new String[] {"POINT(-122 47)", "POINT(-122 48)", "POINT(-121 47)", "POINT(-123 48)"}, + Types.OTHER + }, + { + BYTES, + arraySchemaAndValue( + BYTES, + Stream.of("one", "two", "three", "four") + .map(s -> BaseEncoding.base64().encode(s.getBytes())) + .toArray(String[]::new)), + new byte[][] { + "one".getBytes(), "two".getBytes(), "three".getBytes(), "four".getBytes() + }, + Types.VARBINARY + } + }); + } + + @Test + public void getArray() throws SQLException { + assertThat(array.getArray()).isEqualTo(this.expected); + } + + @Test + public void getSlicedArray() throws SQLException { + int fromIndex = 1; + int toIndexExclusive = 3; + Object[] expectedSlicedArray = + copyOfRange(this.expected, fromIndex, toIndexExclusive); // copying index(1,2) + + // the first element is at index 1 + assertThat(array.getArray(fromIndex + 1, 2)).isEqualTo(expectedSlicedArray); + } + + @Test + public void getSlicedArrayWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getArray(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getResultSet() throws SQLException { + ResultSet resultSet = this.array.getResultSet(); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + ArrayList indexList = indexAndValues.x(); + ArrayList columnValues = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {1, 2, 3, 4}); + assertThat(columnValues.toArray()).isEqualTo(this.expected); + } + + @Test + public void getSlicedResultSet() throws SQLException { + int fromIndex = 1; + int toIndexExclusive = 3; + Object[] expectedSlicedArray = + copyOfRange(this.expected, fromIndex, toIndexExclusive); // copying index(1,2) + + // the first element is at index 1 + ResultSet resultSet = array.getResultSet(fromIndex + 1, 2); + + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + ArrayList indexList = indexAndValues.x(); + ArrayList columnValues = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {2, 3}); + assertThat(columnValues.toArray()).isEqualTo(expectedSlicedArray); + } + + @Test + public void getSlicedResultSetWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getResultSet(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getBaseTypeName() throws SQLException { + assertThat(array.getBaseTypeName()).isEqualTo(this.currentType.name()); + } + + @Test + public void getBaseType() throws SQLException { + assertThat(array.getBaseType()).isEqualTo(this.javaSqlTypeCode); + } + + @Test + public void free() throws SQLException { + this.array.free(); + + ensureArrayIsInvalid(() -> array.getArray()); + ensureArrayIsInvalid(() -> array.getArray(1, 2)); + ensureArrayIsInvalid(() -> array.getResultSet()); + ensureArrayIsInvalid(() -> array.getResultSet(1, 2)); + ensureArrayIsInvalid(() -> array.getBaseTypeName()); + ensureArrayIsInvalid(() -> array.getBaseType()); + } + + @Test + public void getArrayWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(emptyMap())); + Exception exception2 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getResultSetWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getResultSet(emptyMap())); + Exception exception2 = + assertThrows( + SQLFeatureNotSupportedException.class, () -> array.getResultSet(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + private void ensureArrayIsInvalid(ThrowingRunnable block) { + Exception exception = assertThrows(IllegalStateException.class, block); + assertThat(exception.getMessage()).isEqualTo(INVALID_ARRAY); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfStructTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfStructTest.java new file mode 100644 index 0000000000..b390d642e4 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfStructTest.java @@ -0,0 +1,204 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.FieldValue.Attribute.PRIMITIVE; +import static com.google.cloud.bigquery.LegacySQLTypeName.RECORD; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.nestedResultSetToColumnLists; +import static com.google.common.truth.Truth.assertThat; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.StandardSQLTypeName; +import java.sql.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Types; +import java.util.ArrayList; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.function.ThrowingRunnable; + +public class BigQueryJsonArrayOfStructTest { + + private Array array; + + @Before + public void setUp() { + FieldList profileSchema = + FieldList.of( + Field.newBuilder("name", LegacySQLTypeName.STRING).build(), + Field.newBuilder("age", LegacySQLTypeName.INTEGER).build(), + Field.newBuilder("adult", LegacySQLTypeName.BOOLEAN).build()); + + FieldValue record1 = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Arya"), + FieldValue.of(PRIMITIVE, "15"), + FieldValue.of(PRIMITIVE, "false")))); + FieldValue record2 = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Khal Drogo"), + FieldValue.of(PRIMITIVE, "35"), + FieldValue.of(PRIMITIVE, "true")))); + FieldValue record3 = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Ned Stark"), + FieldValue.of(PRIMITIVE, "45"), + FieldValue.of(PRIMITIVE, "true")))); + FieldValue record4 = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Jon Snow"), + FieldValue.of(PRIMITIVE, "25"), + FieldValue.of(PRIMITIVE, "true")))); + + Field arrayOfStructSchema = + Field.newBuilder("profiles", RECORD, profileSchema).setMode(Mode.REPEATED).build(); + + FieldValue arrayOfStructValue = + FieldValue.of( + Attribute.REPEATED, FieldValueList.of(asList(record1, record2, record3, record4))); + array = new BigQueryJsonArray(arrayOfStructSchema, arrayOfStructValue); + } + + @Test + public void getArray() throws SQLException { + Struct[] structArray = (Struct[]) array.getArray(); + + assertThat(structArray.length).isEqualTo(4); + assertThat(structArray[0].getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(structArray[1].getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structArray[2].getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + assertThat(structArray[3].getAttributes()).isEqualTo(asList("Jon Snow", 25L, true).toArray()); + } + + @Test + public void getSlicedArray() throws SQLException { + Struct[] structArray = (Struct[]) array.getArray(2, 2); + + assertThat(structArray.length).isEqualTo(2); + assertThat(structArray[0].getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structArray[1].getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + } + + @Test + public void getSlicedArrayWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getArray(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getResultSet() throws SQLException { + ResultSet resultSet = array.getResultSet(); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + + ArrayList indexList = indexAndValues.x(); + ArrayList structs = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {1, 2, 3, 4}); + assertThat(structs.get(0).getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(structs.get(1).getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structs.get(2).getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + assertThat(structs.get(3).getAttributes()).isEqualTo(asList("Jon Snow", 25L, true).toArray()); + } + + @Test + public void getSlicedResultSet() throws SQLException { + ResultSet resultSet = array.getResultSet(2, 2); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + + ArrayList indexList = indexAndValues.x(); + ArrayList structs = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {2, 3}); + assertThat(structs.get(0).getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structs.get(1).getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + } + + @Test + public void getResultSetWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getResultSet(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getBaseTypeName() throws SQLException { + assertThat(array.getBaseTypeName()).isEqualTo(StandardSQLTypeName.STRUCT.name()); + } + + @Test + public void getBaseType() throws SQLException { + assertThat(array.getBaseType()).isEqualTo(Types.STRUCT); + } + + @Test + public void free() throws SQLException { + this.array.free(); + + ensureArrayIsInvalid(() -> array.getArray()); + ensureArrayIsInvalid(() -> array.getArray(1, 2)); + ensureArrayIsInvalid(() -> array.getBaseTypeName()); + ensureArrayIsInvalid(() -> array.getBaseType()); + } + + @Test + public void getArrayWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(emptyMap())); + Exception exception2 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + private void ensureArrayIsInvalid(ThrowingRunnable block) { + Exception exception = Assert.assertThrows(IllegalStateException.class, block); + assertThat(exception.getMessage()).isEqualTo(INVALID_ARRAY); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSetTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSetTest.java new file mode 100644 index 0000000000..4c715833ff --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSetTest.java @@ -0,0 +1,476 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static java.time.Month.MARCH; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import com.google.common.collect.ImmutableList; +import com.google.common.io.BaseEncoding; +import com.google.common.io.CharStreams; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.Calendar; +import java.util.TimeZone; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; + +public class BigQueryJsonResultSetTest { + + @Rule public final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + private static final FieldList fieldList = + FieldList.of( + Field.of("first", StandardSQLTypeName.BOOL), + Field.of("second", StandardSQLTypeName.INT64), + Field.of("third", StandardSQLTypeName.FLOAT64), + Field.of("fourth", StandardSQLTypeName.STRING), + Field.of("fifth", StandardSQLTypeName.TIMESTAMP), + Field.of("sixth", StandardSQLTypeName.BYTES), + Field.of("seventh", StandardSQLTypeName.STRING), + Field.newBuilder("eight", StandardSQLTypeName.INT64).setMode(Field.Mode.REPEATED).build(), + Field.of( + "ninth", + StandardSQLTypeName.STRUCT, + Field.of("first", StandardSQLTypeName.FLOAT64), + Field.of("second", StandardSQLTypeName.TIMESTAMP)), + Field.of("tenth", StandardSQLTypeName.NUMERIC), + Field.of("eleventh", StandardSQLTypeName.BIGNUMERIC), + Field.of("twelfth", LegacySQLTypeName.TIME), + Field.of("thirteenth", LegacySQLTypeName.INTEGER), + Field.of("fourteenth", LegacySQLTypeName.DATE)); + + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820000000); + LocalTime aTime = LocalTime.of(11, 14, 19, 820000000); + private static final String STRING_VAL = "STRING_VALUE"; + private static final Schema QUERY_SCHEMA = Schema.of(fieldList); + private final FieldValue booleanFv = FieldValue.of(Attribute.PRIMITIVE, "false"); + private final FieldValue integerFv = FieldValue.of(Attribute.PRIMITIVE, "1"); + private final FieldValue floatFv = FieldValue.of(Attribute.PRIMITIVE, "1.5"); + private final FieldValue stringFv = FieldValue.of(Attribute.PRIMITIVE, STRING_VAL); + private final FieldValue timestampFv = + FieldValue.of(Attribute.PRIMITIVE, "1680174859.820000"); // 2023-03-30 16:44:19.82 + + private final FieldValue bytesFv = + FieldValue.of( + Attribute.PRIMITIVE, + BaseEncoding.base64().encode(STRING_VAL.getBytes(StandardCharsets.UTF_8))); + + private final FieldValue nullFv = FieldValue.of(Attribute.PRIMITIVE, null); + private final FieldValue repeatedFv = + FieldValue.of( + Attribute.REPEATED, + FieldValueList.of( + ImmutableList.of( + FieldValue.of(Attribute.PRIMITIVE, "10"), + FieldValue.of(Attribute.PRIMITIVE, "20")))); + private final FieldValue recordFv = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + ImmutableList.of(floatFv, timestampFv), fieldList.get("ninth").getSubFields())); + private final FieldValue numericFv = FieldValue.of(Attribute.PRIMITIVE, "12345678"); + private final FieldValue bigNumericFv = FieldValue.of(Attribute.PRIMITIVE, "12345678.99"); + + private final FieldValue timeFv = FieldValue.of(Attribute.PRIMITIVE, "11:14:19.820000"); + + private final FieldValue shortFv = FieldValue.of(Attribute.PRIMITIVE, "10"); + private final FieldValue dateFv = FieldValue.of(Attribute.PRIMITIVE, "2020-01-15"); + + private final FieldValueList fieldValues = + FieldValueList.of( + ImmutableList.of( + booleanFv, // 1 + integerFv, // 2 + floatFv, // 3 + stringFv, // 4 + timestampFv, // 5 + bytesFv, // 6 + nullFv, // 7 + repeatedFv, // 8 + recordFv, // 9 + numericFv, // 10 + bigNumericFv, // 11 + timeFv, // 12 + shortFv, // 13 + dateFv // 14 + ), + fieldList); + + private BigQueryFieldValueListWrapper bigQueryFieldValueListWrapperNested; + + private BigQueryStatement statement; + private BigQueryStatement statementForTwoRows; + + private BigQueryJsonResultSet bigQueryJsonResultSet; + private BigQueryJsonResultSet bigQueryJsonResultSetNested; + + private BlockingQueue buffer; + private BlockingQueue bufferWithTwoRows; + + @Before + public void setUp() { + // Buffer with one row + buffer = new LinkedBlockingDeque<>(2); + statement = mock(BigQueryStatement.class); + buffer.add(BigQueryFieldValueListWrapper.of(fieldList, fieldValues)); + buffer.add(BigQueryFieldValueListWrapper.of(null, null, true)); // last marker + Thread[] workerThreads = {new Thread()}; + bigQueryJsonResultSet = + BigQueryJsonResultSet.of(QUERY_SCHEMA, 1L, buffer, statement, workerThreads); + + // Buffer with 2 rows. + bufferWithTwoRows = new LinkedBlockingDeque<>(3); + statementForTwoRows = mock(BigQueryStatement.class); + bufferWithTwoRows.add(BigQueryFieldValueListWrapper.of(fieldList, fieldValues)); + bufferWithTwoRows.add(BigQueryFieldValueListWrapper.of(fieldList, fieldValues)); + bufferWithTwoRows.add(BigQueryFieldValueListWrapper.of(null, null, true)); // last marker + + // values for nested types + Field fieldEight = fieldList.get("eight"); + FieldValue fieldEightValue = fieldValues.get("eight"); + FieldList nestedFieldList = Schema.of(fieldEight).getFields(); + bigQueryFieldValueListWrapperNested = + BigQueryFieldValueListWrapper.getNestedFieldValueListWrapper( + nestedFieldList, fieldEightValue.getRepeatedValue()); + bigQueryJsonResultSetNested = + BigQueryJsonResultSet.getNestedResultSet( + Schema.of(fieldEight), + bigQueryFieldValueListWrapperNested, + 0, + fieldEightValue.getRepeatedValue().size()); + } + + private boolean resetResultSet() + throws SQLException { // re-initialises the resultset and moves the cursor to the first row + Thread[] workerThreads = {new Thread()}; + bigQueryJsonResultSet = + BigQueryJsonResultSet.of(QUERY_SCHEMA, 1L, buffer, statement, workerThreads); + return bigQueryJsonResultSet.next(); // move to the first row + } + + @Test + public void testIsClosed() { + assertThat(bigQueryJsonResultSet.isClosed()).isFalse(); + } + + @Test + public void testClose() { + // TODO(prashant): Add test case after close method is implemented + } + + @Test + public void testRowCount() throws SQLException { + Thread[] workerThreads = {new Thread()}; + // ResultSet with 1 row buffer and 1 total rows. + BigQueryJsonResultSet bigQueryJsonResultSet2 = + BigQueryJsonResultSet.of(QUERY_SCHEMA, 1L, buffer, statement, workerThreads); + assertThat(resultSetRowCount(bigQueryJsonResultSet2)).isEqualTo(1); + // ResultSet with 2 rows buffer and 1 total rows. + bigQueryJsonResultSet2 = + BigQueryJsonResultSet.of( + QUERY_SCHEMA, 1L, bufferWithTwoRows, statementForTwoRows, workerThreads); + assertThat(resultSetRowCount(bigQueryJsonResultSet2)).isEqualTo(1); + } + + @Test + // This method tests iteration and Resultset's type getters + public void testIteration() throws SQLException { + int cnt = 0; + assertThat(bigQueryJsonResultSet.isBeforeFirst()).isTrue(); + while (bigQueryJsonResultSet.next()) { + cnt++; + assertThat(bigQueryJsonResultSet.isLast()).isTrue(); // we have one test row + assertThat(bigQueryJsonResultSet.isFirst()).isTrue(); // we have one test row + assertThat(bigQueryJsonResultSet.getBoolean("first")).isFalse(); + assertThat(bigQueryJsonResultSet.getBoolean(1)).isFalse(); + assertThat(bigQueryJsonResultSet.getInt("second")).isEqualTo(1); + assertThat(bigQueryJsonResultSet.getInt(2)).isEqualTo(1); + assertThat(bigQueryJsonResultSet.getFloat("third")).isEqualTo(1.5f); + assertThat(bigQueryJsonResultSet.getFloat(3)).isEqualTo(1.5f); + assertThat(bigQueryJsonResultSet.getString("fourth")).isEqualTo(STRING_VAL); + assertThat(bigQueryJsonResultSet.getString(4)).isEqualTo(STRING_VAL); + assertThat(bigQueryJsonResultSet.getTimestamp("fifth")) + .isEqualTo(Timestamp.valueOf(aTimeStamp)); + assertThat(bigQueryJsonResultSet.getTimestamp(5)).isEqualTo(Timestamp.valueOf(aTimeStamp)); + assertThat(bigQueryJsonResultSet.wasNull()).isFalse(); + assertThat(bigQueryJsonResultSet.getObject("seventh")).isNull(); // test null + assertThat(bigQueryJsonResultSet.getObject(7)).isNull(); + assertThat(bigQueryJsonResultSet.wasNull()).isTrue(); + assertThat(bigQueryJsonResultSet.getArray("eight").getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(bigQueryJsonResultSet.getArray(8).getArray()).isEqualTo(new Object[] {10L, 20L}); + assertThat(((Array) bigQueryJsonResultSet.getObject("eight")).getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(((Array) bigQueryJsonResultSet.getObject(8)).getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(((Struct) bigQueryJsonResultSet.getObject("ninth")).getAttributes()) + .isEqualTo(new Object[] {1.5, Timestamp.valueOf(aTimeStamp)}); + assertThat(((Struct) bigQueryJsonResultSet.getObject(9)).getAttributes()) + .isEqualTo(new Object[] {1.5, Timestamp.valueOf(aTimeStamp)}); + assertThat(bigQueryJsonResultSet.getLong("tenth")).isEqualTo(12345678L); + assertThat(bigQueryJsonResultSet.getLong(10)).isEqualTo(12345678L); + assertThat(bigQueryJsonResultSet.getDouble("eleventh")).isEqualTo(12345678.99D); + assertThat(bigQueryJsonResultSet.getDouble(11)).isEqualTo(12345678.99D); + Time expectedTime = new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())); + assertThat(bigQueryJsonResultSet.getTime("twelfth")).isEqualTo(expectedTime); + assertThat(bigQueryJsonResultSet.getTime(12)).isEqualTo(expectedTime); + assertThat(bigQueryJsonResultSet.getShort("thirteenth")).isEqualTo((short) 10); + assertThat(bigQueryJsonResultSet.getShort(13)).isEqualTo((short) 10); + } + assertThat(cnt).isEqualTo(1); + assertThat(bigQueryJsonResultSet.next()).isFalse(); + assertThat(bigQueryJsonResultSet.isAfterLast()).isTrue(); + } + + @Test + public void testGetObjectWithPrimitives() throws SQLException { + bigQueryJsonResultSet.next(); + assertThat(bigQueryJsonResultSet.getObject("first")).isEqualTo(false); + assertThat(bigQueryJsonResultSet.getObject(1)).isEqualTo(false); + assertThat(bigQueryJsonResultSet.getObject("second")).isEqualTo(1); + assertThat(bigQueryJsonResultSet.getObject(2)).isEqualTo(1); + assertThat(bigQueryJsonResultSet.getObject("third")).isEqualTo(1.5); + assertThat(bigQueryJsonResultSet.getObject(3)).isEqualTo(1.5); + assertThat(bigQueryJsonResultSet.getObject("fourth")).isEqualTo(STRING_VAL); + assertThat(bigQueryJsonResultSet.getObject(4)).isEqualTo(STRING_VAL); + assertThat(bigQueryJsonResultSet.getObject("fifth")).isEqualTo(Timestamp.valueOf(aTimeStamp)); + assertThat(bigQueryJsonResultSet.getObject(5)).isEqualTo(Timestamp.valueOf(aTimeStamp)); + assertThat(bigQueryJsonResultSet.getObject("sixth")) + .isEqualTo(STRING_VAL.getBytes(StandardCharsets.UTF_8)); + assertThat(bigQueryJsonResultSet.getObject(6)) + .isEqualTo(STRING_VAL.getBytes(StandardCharsets.UTF_8)); + assertThat(bigQueryJsonResultSet.wasNull()).isFalse(); + assertThat(bigQueryJsonResultSet.getObject("seventh")).isNull(); // test null + assertThat(bigQueryJsonResultSet.getObject(7)).isNull(); + assertThat(bigQueryJsonResultSet.wasNull()).isTrue(); + + assertThat(bigQueryJsonResultSet.getObject("tenth")).isEqualTo(new BigDecimal("12345678")); + assertThat(bigQueryJsonResultSet.getObject(10)).isEqualTo(new BigDecimal("12345678")); + assertThat(bigQueryJsonResultSet.getObject("eleventh")) + .isEqualTo(new BigDecimal("12345678.99")); + assertThat(bigQueryJsonResultSet.getObject(11)).isEqualTo(new BigDecimal("12345678.99")); + Time expectedTime = new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())); + assertThat(bigQueryJsonResultSet.getObject("twelfth")).isEqualTo(expectedTime); + assertThat(bigQueryJsonResultSet.getObject(12)).isEqualTo(expectedTime); + assertThat(bigQueryJsonResultSet.getObject("thirteenth")).isEqualTo((short) 10); + assertThat(bigQueryJsonResultSet.getObject(13)).isEqualTo((short) 10); + } + + // validate the input streams + @Test + public void testCharacterStream() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + Reader charStream = bigQueryJsonResultSet.getCharacterStream("fourth"); + String expectedVal = CharStreams.toString(charStream); + assertThat(expectedVal).isEqualTo(STRING_VAL); + } + + @Test + public void testBinaryStream() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + StringBuilder textBuilder = new StringBuilder(); + InputStream binInputStream = bigQueryJsonResultSet.getBinaryStream(6); + Reader reader = new BufferedReader(new InputStreamReader(binInputStream)); + int c; + while ((c = reader.read()) != -1) { + textBuilder.append((char) c); + } + assertThat(textBuilder.toString()).isEqualTo(STRING_VAL); + reader.close(); + } + + @Test + public void testAsciiStream() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + StringBuilder textBuilder = new StringBuilder(); + InputStream binInputStream = bigQueryJsonResultSet.getAsciiStream(4); + Reader reader = new BufferedReader(new InputStreamReader(binInputStream)); + int c; + while ((c = reader.read()) != -1) { + textBuilder.append((char) c); + } + String expectedAsciiString = + new String(STRING_VAL.getBytes(), 0, STRING_VAL.length(), StandardCharsets.US_ASCII); + assertThat(textBuilder.length()).isEqualTo(expectedAsciiString.length()); + assertThat(textBuilder.toString()).isEqualTo(expectedAsciiString); + reader.close(); + } + + @Test + public void testUnicodeStream() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + InputStream binInputStream = bigQueryJsonResultSet.getUnicodeStream(4); + byte[] cbuf = new byte[100]; + int len = binInputStream.read(cbuf, 0, cbuf.length); + String colFourVal = new String(cbuf, 0, len, StandardCharsets.UTF_16LE); + assertThat(colFourVal).isEqualTo(STRING_VAL); + } + + @Test + public void testClob() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + java.sql.Clob clobVal = bigQueryJsonResultSet.getClob(4); + StringBuilder textBuilder = new StringBuilder(); + Reader charStream = clobVal.getCharacterStream(); + int intValueOfChar; + while ((intValueOfChar = charStream.read()) != -1) { + textBuilder.append((char) intValueOfChar); + } + charStream.close(); + assertThat(textBuilder.toString()).isEqualTo(STRING_VAL); + } + + @Test + public void testBlob() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + StringBuilder textBuilder = new StringBuilder(); + Blob blobVal = bigQueryJsonResultSet.getBlob(6); + InputStream binInputStream = blobVal.getBinaryStream(); + Reader reader = new BufferedReader(new InputStreamReader(binInputStream)); + int c; + while ((c = reader.read()) != -1) { + textBuilder.append((char) c); + } + assertThat(textBuilder.toString()).isEqualTo(STRING_VAL); + reader.close(); + } + + @Test + public void testBytes() throws SQLException { + assertThat(resetResultSet()).isTrue(); + assertThat(bigQueryJsonResultSet.getBytes("sixth")) + .isEqualTo(STRING_VAL.getBytes(StandardCharsets.UTF_8)); + assertThat(bigQueryJsonResultSet.getBytes(6)) + .isEqualTo(STRING_VAL.getBytes(StandardCharsets.UTF_8)); + } + + @Test + public void testResultSetHoldability() + throws SQLException { // TODO(prashant): Revisit this after Statement's commit is finalised + assertThat(bigQueryJsonResultSet.getHoldability()) + .isEqualTo(ResultSet.HOLD_CURSORS_OVER_COMMIT); + } + + @Test + public void testStatement() throws SQLException { + assertThat(bigQueryJsonResultSet.getStatement()).isEqualTo(statement); + assertThat(bigQueryJsonResultSetNested.getStatement()).isNull(); + } + + @Test + public void testConcurrency() throws SQLException { + assertThat(bigQueryJsonResultSet.getConcurrency()).isEqualTo(ResultSet.CONCUR_READ_ONLY); + assertThat(bigQueryJsonResultSet.getType()).isEqualTo(ResultSet.TYPE_FORWARD_ONLY); + assertThat(bigQueryJsonResultSet.findColumn("first")).isEqualTo(1); + } + + @Test + public void testIterationNested() throws SQLException { + int cnt = 0; + assertThat(bigQueryJsonResultSetNested.isBeforeFirst()).isTrue(); + while (bigQueryJsonResultSetNested.next()) { + cnt++; + if (cnt == 1) { + assertThat(bigQueryJsonResultSetNested.isFirst()).isTrue(); + + } else { // 2nd row is the last row + assertThat(bigQueryJsonResultSetNested.isLast()).isTrue(); + } + assertThat(bigQueryJsonResultSetNested.getInt(1)) + .isEqualTo(cnt); // the first column is index 1 + assertThat(bigQueryJsonResultSetNested.getInt(2)) + .isEqualTo(cnt * 10); // second column has values 10 and 20 + } + assertThat(cnt).isEqualTo(2); + assertThat(bigQueryJsonResultSetNested.next()).isFalse(); + assertThat(bigQueryJsonResultSetNested.isAfterLast()).isTrue(); + } + + @Test + public void testTime() throws SQLException { + assertThat(resetResultSet()).isTrue(); + Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("EST")); + Time expectedTime = new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())); + assertThat(bigQueryJsonResultSet.getTime(12)) + .isEqualTo(bigQueryJsonResultSet.getTime(12, calendar)); + assertThat(expectedTime).isEqualTo(bigQueryJsonResultSet.getTime(12, calendar)); + assertThat(bigQueryJsonResultSet.getTime("twelfth")) + .isEqualTo(bigQueryJsonResultSet.getTime("twelfth", calendar)); + } + + @Test + public void testTimestamp() throws SQLException { + assertThat(resetResultSet()).isTrue(); + Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("EST")); + Timestamp time = bigQueryJsonResultSet.getTimestamp(5); + Timestamp timeWithCal = bigQueryJsonResultSet.getTimestamp(5, calendar); + assertThat(time).isEqualTo(timeWithCal); + assertThat(bigQueryJsonResultSet.getTimestamp("fifth")) + .isEqualTo(bigQueryJsonResultSet.getTimestamp("fifth")); + } + + @Test + public void testDate() throws SQLException { + assertThat(resetResultSet()).isTrue(); + Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("EST")); + // epoc should match + assertThat(bigQueryJsonResultSet.getDate(14).getTime()) + .isEqualTo(bigQueryJsonResultSet.getDate(14, calendar).getTime()); + assertThat(Date.valueOf("2020-01-15").getTime()) + .isEqualTo(bigQueryJsonResultSet.getDate(14, calendar).getTime()); + assertThat(bigQueryJsonResultSet.getDate("fourteenth").getTime()) + .isEqualTo(bigQueryJsonResultSet.getDate("fourteenth", calendar).getTime()); + } + + private int resultSetRowCount(BigQueryJsonResultSet resultSet) throws SQLException { + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + return rowCount; + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStructTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStructTest.java new file mode 100644 index 0000000000..f07d8cad27 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStructTest.java @@ -0,0 +1,264 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.FieldValue.Attribute.PRIMITIVE; +import static com.google.cloud.bigquery.FieldValue.Attribute.RECORD; +import static com.google.cloud.bigquery.StandardSQLTypeName.BIGNUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.BYTES; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATE; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATETIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.FLOAT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.GEOGRAPHY; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.NUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIMESTAMP; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arraySchemaAndValue; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.primitiveSchemaAndValue; +import static com.google.common.io.BaseEncoding.base64; +import static com.google.common.truth.Truth.assertThat; +import static java.time.Month.MARCH; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import com.google.common.io.BaseEncoding; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; + +public class BigQueryJsonStructTest { + + @Rule public final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + private Struct structWithPrimitiveValues; + private Struct structWithNullValue; + + @Before + public void setUp() { + List> schemaAndValues = + Arrays.asList( + primitiveSchemaAndValue(INT64, "10"), + primitiveSchemaAndValue(BOOL, "true"), + primitiveSchemaAndValue(FLOAT64, "11.2"), + primitiveSchemaAndValue(NUMERIC, "11.2657"), + primitiveSchemaAndValue(BIGNUMERIC, "11.2657"), + primitiveSchemaAndValue(STRING, "one"), + primitiveSchemaAndValue(TIMESTAMP, "1680174859.8200000"), // 2023-03-30 16:44:19.82 + primitiveSchemaAndValue(DATE, "2023-03-30"), + primitiveSchemaAndValue(TIME, "11:14:19.820000"), + primitiveSchemaAndValue(DATETIME, "2023-03-30T11:14:19.8200000"), + primitiveSchemaAndValue(GEOGRAPHY, "POINT(-122 47)"), + primitiveSchemaAndValue(BYTES, base64().encode("one".getBytes()))); + List orderedSchemas = + schemaAndValues.stream().map(Tuple::x).collect(Collectors.toList()); + List orderedValues = + schemaAndValues.stream().map(Tuple::y).collect(Collectors.toList()); + + structWithPrimitiveValues = + new BigQueryJsonStruct( + FieldList.of(orderedSchemas), FieldValue.of(RECORD, FieldValueList.of(orderedValues))); + structWithNullValue = + new BigQueryJsonStruct(FieldList.of(orderedSchemas), FieldValue.of(PRIMITIVE, null)); + } + + @Test + public void structOfPrimitives() throws SQLException { + assertThat(structWithPrimitiveValues.getAttributes()) + .isEqualTo( + Arrays.asList( + 10L, + true, + 11.2, + new BigDecimal("11.2657"), + new BigDecimal("11.2657"), + "one", + Timestamp.valueOf(LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820000000)), + Date.valueOf(LocalDate.of(2023, MARCH, 30)), + new Time( + TimeUnit.NANOSECONDS.toMillis( + LocalTime.parse("11:14:19.820").toNanoOfDay())), + Timestamp.valueOf("2023-03-30 11:14:19.8200000"), + "POINT(-122 47)", + "one".getBytes()) + .toArray()); + } + + @Test + public void structOfArrays() throws SQLException { + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820000000); + LocalDate aDate = LocalDate.of(2023, MARCH, 30); + LocalTime aTime = LocalTime.of(11, 14, 19, 820000000); + List> schemaAndValues = + Arrays.asList( + arraySchemaAndValue(INT64, "10", "20"), + arraySchemaAndValue(BOOL, "true", "false"), + arraySchemaAndValue(FLOAT64, "11.2", "33.4"), + arraySchemaAndValue(NUMERIC, "11.2657", "33.4657"), + arraySchemaAndValue(BIGNUMERIC, "11.2657", "33.4657"), + arraySchemaAndValue(STRING, "one", "two"), + arraySchemaAndValue(TIMESTAMP, "1680174859.820000", "1680261259.820000"), + arraySchemaAndValue(DATE, "2023-03-30", "2023-03-31"), + arraySchemaAndValue(TIME, "11:14:19.820000", "11:14:20.820000"), + arraySchemaAndValue( + DATETIME, "2023-03-30T11:14:19.820000", "2023-03-30T11:15:19.820000"), + arraySchemaAndValue(GEOGRAPHY, "POINT(-122 47)", "POINT(-122 48)"), + arraySchemaAndValue( + BYTES, + Stream.of("one", "two") + .map(s -> BaseEncoding.base64().encode(s.getBytes())) + .toArray(String[]::new))); + + List orderedSchemas = + schemaAndValues.stream().map(Tuple::x).collect(Collectors.toList()); + List orderedValues = + schemaAndValues.stream().map(Tuple::y).collect(Collectors.toList()); + + Struct struct = + new BigQueryJsonStruct( + FieldList.of(orderedSchemas), FieldValue.of(RECORD, FieldValueList.of(orderedValues))); + + Object[] attributes = struct.getAttributes(); + assertThat(((Array) attributes[0]).getArray()).isEqualTo(new Long[] {10L, 20L}); + assertThat(((Array) attributes[1]).getArray()).isEqualTo(new Boolean[] {true, false}); + assertThat(((Array) attributes[2]).getArray()).isEqualTo(new Double[] {11.2, 33.4}); + assertThat(((Array) attributes[3]).getArray()) + .isEqualTo(new BigDecimal[] {new BigDecimal("11.2657"), new BigDecimal("33.4657")}); + assertThat(((Array) attributes[4]).getArray()) + .isEqualTo(new BigDecimal[] {new BigDecimal("11.2657"), new BigDecimal("33.4657")}); + assertThat(((Array) attributes[5]).getArray()).isEqualTo(new String[] {"one", "two"}); + assertThat(((Array) attributes[6]).getArray()) + .isEqualTo( + new Timestamp[] { + Timestamp.valueOf(aTimeStamp), // 2023-03-30 16:44:19.82 + Timestamp.valueOf(aTimeStamp.plusDays(1)) + }); + assertThat(((Array) attributes[7]).getArray()) + .isEqualTo(new Date[] {Date.valueOf(aDate), Date.valueOf(aDate.plusDays(1))}); + assertThat(((Array) attributes[8]).getArray()) + .isEqualTo( + new Time[] { + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())), + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.plusSeconds(1).toNanoOfDay())) + }); + assertThat(((Array) attributes[9]).getArray()) // DATETIME + .isEqualTo( + new Timestamp[] { + Timestamp.valueOf("2023-03-30 11:14:19.820000"), + Timestamp.valueOf("2023-03-30 11:15:19.820000") + }); + assertThat(((Array) attributes[10]).getArray()) + .isEqualTo(new String[] {"POINT(-122 47)", "POINT(-122 48)"}); + assertThat(((Array) attributes[11]).getArray()) + .isEqualTo(new byte[][] {"one".getBytes(), "two".getBytes()}); + } + + @Test + public void structOfStructs() throws SQLException { + FieldList profileSchema = + FieldList.of( + Field.of("name", LegacySQLTypeName.STRING), + Field.of("age", LegacySQLTypeName.INTEGER), + Field.of("adult", LegacySQLTypeName.BOOLEAN)); + FieldList addressSchema = + FieldList.of( + Field.of("state", LegacySQLTypeName.STRING), + Field.of("zip", LegacySQLTypeName.INTEGER)); + FieldList rootStructSchema = + FieldList.of( + Field.of("profile", LegacySQLTypeName.RECORD, profileSchema), + Field.of("address", LegacySQLTypeName.RECORD, addressSchema)); + + FieldValue profileValue = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Arya"), + FieldValue.of(PRIMITIVE, "15"), + FieldValue.of(PRIMITIVE, "false")))); + FieldValue addressValue = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList(FieldValue.of(PRIMITIVE, "Michigan"), FieldValue.of(PRIMITIVE, "49086")))); + + FieldValue rootStructValue = + FieldValue.of(RECORD, FieldValueList.of(asList(profileValue, addressValue))); + + Struct struct = new BigQueryJsonStruct(rootStructSchema, rootStructValue); + Object[] attributes = struct.getAttributes(); + Struct profileStruct = (Struct) attributes[0]; + Struct addressStruct = (Struct) attributes[1]; + + assertThat(profileStruct.getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(addressStruct.getAttributes()).isEqualTo(asList("Michigan", 49086L).toArray()); + } + + @Test + public void structWithNullValue() throws SQLException { + assertThat(structWithNullValue.getAttributes()) + .isEqualTo( + Arrays.asList(0L, false, 0.0, null, null, null, null, null, null, null, null, null) + .toArray()); + } + + @Test + public void getSQLTypeNameIsNotSupported() { + Exception exception = + assertThrows( + SQLFeatureNotSupportedException.class, structWithPrimitiveValues::getSQLTypeName); + assertThat(exception.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getAttributesWithCustomTypeMappingsIsNotSupported() { + Exception exception = + assertThrows( + SQLFeatureNotSupportedException.class, + () -> structWithPrimitiveValues.getAttributes(emptyMap())); + assertThat(exception.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandlerTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandlerTest.java new file mode 100644 index 0000000000..0dc085b602 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandlerTest.java @@ -0,0 +1,142 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; +import org.junit.Test; + +public class BigQueryParameterHandlerTest { + + @Test + public void testGetSetParameterByName() throws Exception { + BigQueryParameterHandler paramHandler = new BigQueryParameterHandler(2); + // Add Param 1 + paramHandler.setParameter( + "ParamKey1", "ParamValue1", String.class, BigQueryStatementParameterType.IN, -1); + String paramValue = (String) paramHandler.getParameter("ParamKey1"); + assertNotNull(paramValue); + assertEquals("ParamValue1", paramValue); + BigQueryStatementParameterType paramType = paramHandler.getParameterType("ParamKey1"); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.IN, paramType); + int scale = paramHandler.getParameterScale("ParamKey1"); + assertEquals(-1, scale); + assertEquals(String.class, paramHandler.getType("ParamKey1")); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType("ParamKey1")); + + // Add Param 2 + paramHandler.setParameter( + "ParamKey2", "ParamValue2", String.class, BigQueryStatementParameterType.INOUT, 1); + paramValue = (String) paramHandler.getParameter("ParamKey2"); + assertNotNull(paramValue); + assertEquals("ParamValue2", paramValue); + paramType = paramHandler.getParameterType("ParamKey2"); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.INOUT, paramType); + scale = paramHandler.getParameterScale("ParamKey2"); + assertEquals(1, scale); + assertEquals(String.class, paramHandler.getType("ParamKey2")); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType("ParamKey2")); + + // Update Param 1 + paramHandler.setParameter( + "ParamKey1", "ParamValue1-UPD", String.class, BigQueryStatementParameterType.OUT, 1); + paramValue = (String) paramHandler.getParameter("ParamKey1"); + assertNotNull(paramValue); + assertEquals("ParamValue1-UPD", paramValue); + paramType = paramHandler.getParameterType("ParamKey1"); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.OUT, paramType); + scale = paramHandler.getParameterScale("ParamKey1"); + assertEquals(1, scale); + assertEquals(String.class, paramHandler.getType("ParamKey1")); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType("ParamKey1")); + + // Update Param 2 + paramHandler.setParameter( + "ParamKey2", "ParamValue2-UPD", String.class, BigQueryStatementParameterType.INOUT, 2); + paramValue = (String) paramHandler.getParameter("ParamKey2"); + assertNotNull(paramValue); + assertEquals("ParamValue2-UPD", paramValue); + paramType = paramHandler.getParameterType("ParamKey2"); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.INOUT, paramType); + scale = paramHandler.getParameterScale("ParamKey2"); + assertEquals(2, scale); + assertEquals(String.class, paramHandler.getType("ParamKey2")); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType("ParamKey2")); + } + + @Test + public void testGetSetParameterByIndex() throws Exception { + BigQueryParameterHandler paramHandler = new BigQueryParameterHandler(2); + + // Add Param 1 + paramHandler.setParameter(1, "ParamValue1", String.class); + String value = (String) paramHandler.getParameter(1); + assertNotNull(value); + assertEquals("ParamValue1", value); + BigQueryStatementParameterType paramType = paramHandler.getParameterType(1); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.UNSPECIFIED, paramType); + assertEquals(String.class, paramHandler.getType(1)); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType(1)); + + // Add Param 2 + paramHandler.setParameter( + 2, "ParamValue2", String.class, BigQueryStatementParameterType.IN, -1); + value = (String) paramHandler.getParameter(2); + assertNotNull(value); + assertEquals("ParamValue2", value); + paramType = paramHandler.getParameterType(2); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.IN, paramType); + int scale = paramHandler.getParameterScale(2); + assertEquals(-1, scale); + assertEquals(String.class, paramHandler.getType(2)); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType(2)); + + // Update Param 1 + paramHandler.setParameter(1, "ParamValue1-UPD", String.class); + value = (String) paramHandler.getParameter(1); + assertNotNull(value); + assertEquals("ParamValue1-UPD", value); + paramType = paramHandler.getParameterType(1); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.UNSPECIFIED, paramType); + assertEquals(String.class, paramHandler.getType(1)); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType(1)); + + // Update Param 2 + paramHandler.setParameter( + 2, "ParamValue2-UPD", String.class, BigQueryStatementParameterType.OUT, 2); + value = (String) paramHandler.getParameter(2); + assertNotNull(value); + assertEquals("ParamValue2-UPD", value); + paramType = paramHandler.getParameterType(2); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.OUT, paramType); + scale = paramHandler.getParameterScale(2); + assertEquals(2, scale); + assertEquals(String.class, paramHandler.getType(2)); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType(2)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnectionTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnectionTest.java new file mode 100644 index 0000000000..ee9d63beb9 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnectionTest.java @@ -0,0 +1,174 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.TestConnectionListener; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; + +import java.io.IOException; +import java.sql.*; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryPooledConnectionTest { + private BigQueryConnection bigQueryConnection; + private static final Long LISTENER_POOL_SIZE = 10L; + + @Before + public void setUp() throws IOException, SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + } + + @Test + public void testGetPooledConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + } + + @Test + public void testPooledConnectionClose() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + } + + @Test + public void testReuseConnectionAfterClose() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + + connection = pooledConnection.getConnection(); + assertTrue(pooledConnection.inUse()); + } + + @Test + public void testAddConnectionListener() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + + assertTrue(pooledConnection.isListenerPooled(listner)); + } + + @Test + public void testRemoveConnectionListener() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + assertTrue(pooledConnection.isListenerPooled(listner)); + pooledConnection.removeConnectionEventListener(listner); + assertFalse(pooledConnection.isListenerPooled(listner)); + } + + @Test + public void testConnectionHandleClosedByConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + assertEquals(1, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + assertTrue(pooledConnection.isListenerPooled(listner)); + } + + @Test + public void testConnectionHandleClosedByPooledConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + pooledConnection.close(); + assertFalse(pooledConnection.inUse()); + assertEquals(1, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + assertTrue(pooledConnection.isListenerPooled(listner)); + } + + @Test + public void testFireConnectionError() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + pooledConnection.fireConnectionError(new SQLException("test")); + assertFalse(pooledConnection.inUse()); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(1, listner.getConnectionErrorCount()); + + assertFalse(pooledConnection.isListenerPooled(listner)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizersTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizersTest.java new file mode 100644 index 0000000000..7332dce936 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizersTest.java @@ -0,0 +1,67 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import org.junit.Before; +import org.junit.Test; + +public class BigQueryResultSetFinalizersTest { + Thread arrowWorker; + Thread[] jsonWorkers; + + @Before + public void setUp() { + // create and start the demon threads + arrowWorker = + new Thread( + () -> { + while (true) { + if (Thread.currentThread().isInterrupted()) { + break; + } + } + }); + arrowWorker.setDaemon(true); + Thread jsonWorker = + new Thread( + () -> { + while (true) { + if (Thread.currentThread().isInterrupted()) { + break; + } + } + }); + jsonWorker.setDaemon(true); + jsonWorkers = new Thread[] {jsonWorker}; + arrowWorker.start(); + jsonWorker.start(); + } + + @Test + public void testFinalizeResources() { + BigQueryResultSetFinalizers.ArrowResultSetFinalizer arrowResultSetFinalizer = + new BigQueryResultSetFinalizers.ArrowResultSetFinalizer(null, null, arrowWorker); + arrowResultSetFinalizer.finalizeResources(); + assertThat(arrowWorker.isInterrupted()).isTrue(); + BigQueryResultSetFinalizers.JsonResultSetFinalizer jsonResultSetFinalizer = + new BigQueryResultSetFinalizers.JsonResultSetFinalizer(null, null, jsonWorkers); + jsonResultSetFinalizer.finalizeResources(); + assertThat(jsonWorkers[0].isInterrupted()).isTrue(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadataTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadataTest.java new file mode 100644 index 0000000000..b4d14296d1 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadataTest.java @@ -0,0 +1,277 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.common.collect.ImmutableList; +import java.sql.Array; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Types; +import java.util.List; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class BigQueryResultSetMetadataTest { + + private BigQueryStatement statement; + + private static Field tenthField = + Field.newBuilder("tenth", LegacySQLTypeName.NUMERIC) + .setName("tenth") + .setType(StandardSQLTypeName.NUMERIC) + .setPrecision(12L) + .setScale(9L) + .build(); + private static final FieldList fieldList = + FieldList.of( + Field.of("first", StandardSQLTypeName.BOOL), + Field.of("second", StandardSQLTypeName.INT64), + Field.of("third", StandardSQLTypeName.FLOAT64), + Field.of("fourth", StandardSQLTypeName.STRING), + Field.of("fifth", StandardSQLTypeName.TIMESTAMP), + Field.of("sixth", StandardSQLTypeName.BYTES), + Field.of("seventh", StandardSQLTypeName.STRING), + Field.newBuilder("eight", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REPEATED) + .build(), + Field.of( + "ninth", + StandardSQLTypeName.STRUCT, + Field.of("first", StandardSQLTypeName.FLOAT64), + Field.of("second", StandardSQLTypeName.TIMESTAMP)), + tenthField, + Field.of("eleventh", StandardSQLTypeName.BIGNUMERIC), + Field.of("twelfth", LegacySQLTypeName.TIME), + Field.of("thirteenth", LegacySQLTypeName.DATE)); + + private static final List fieldListSqlTypes = + ImmutableList.of( + Types.BOOLEAN, + Types.BIGINT, + Types.DOUBLE, + Types.NVARCHAR, + Types.TIMESTAMP, + Types.VARBINARY, + Types.NVARCHAR, + Types.ARRAY, + Types.STRUCT, + Types.NUMERIC, + Types.NUMERIC, + Types.TIME, + Types.DATE); + + private static final List fieldListClassNames = + ImmutableList.of( + "java.lang.Boolean", + "java.lang.Long", + "java.lang.Double", + "java.lang.String", + "java.sql.Timestamp", + byte[].class.getName(), + "java.lang.String", + Array.class.getName(), + "java.sql.Struct", + "java.math.BigDecimal", + "java.math.BigDecimal", + "java.sql.Time", + "java.sql.Date"); + private static final Schema QUERY_SCHEMA = Schema.of(fieldList); + + private ResultSetMetaData resultSetMetaData; + + private ResultSetMetaData resultSetMetaDataNested; + + @Before + public void setUp() throws SQLException { + statement = mock(BigQueryStatement.class); + Thread[] workerThreads = {new Thread()}; + BigQueryJsonResultSet bigQueryJsonResultSet = + BigQueryJsonResultSet.of(QUERY_SCHEMA, 1L, null, statement, workerThreads); + // values for nested types + resultSetMetaData = bigQueryJsonResultSet.getMetaData(); + + // values for nested types + Field fieldEight = fieldList.get("eight"); + // The schema for the nested result set should describe the elements of the array. + Field elementField = fieldEight.toBuilder().setMode(Field.Mode.NULLABLE).build(); + FieldList nestedFieldList = FieldList.of(elementField); + BigQueryFieldValueListWrapper bigQueryFieldValueListWrapperNested = + BigQueryFieldValueListWrapper.getNestedFieldValueListWrapper(nestedFieldList, null); + BigQueryJsonResultSet bigQueryJsonResultSetNested = + BigQueryJsonResultSet.getNestedResultSet( + Schema.of(nestedFieldList), bigQueryFieldValueListWrapperNested, -1, -1); + resultSetMetaDataNested = bigQueryJsonResultSetNested.getMetaData(); + } + + @Test + public void testGetColumnType() throws SQLException { + // match the mapping for all the types in the test dataset + for (int colIndex = 1; colIndex <= 13; colIndex++) { + assertThat(resultSetMetaData.getColumnType(colIndex)) + .isEqualTo(fieldListSqlTypes.get(colIndex - 1)); + } + } + + @Test + public void testGetColumnTypeName() throws SQLException { + assertThat(resultSetMetaData.getColumnTypeName(1)).isEqualTo("BOOL"); + assertThat(resultSetMetaData.getColumnTypeName(2)).isEqualTo("INT64"); + assertThat(resultSetMetaData.getColumnTypeName(3)).isEqualTo("FLOAT64"); + assertThat(resultSetMetaData.getColumnTypeName(4)).isEqualTo("STRING"); + assertThat(resultSetMetaData.getColumnTypeName(5)).isEqualTo("TIMESTAMP"); + assertThat(resultSetMetaData.getColumnTypeName(6)).isEqualTo("BYTES"); + assertThat(resultSetMetaData.getColumnTypeName(7)).isEqualTo("STRING"); + assertThat(resultSetMetaData.getColumnTypeName(8)).isEqualTo("ARRAY"); + assertThat(resultSetMetaData.getColumnTypeName(9)).isEqualTo("STRUCT"); + assertThat(resultSetMetaData.getColumnTypeName(10)).isEqualTo("NUMERIC"); + assertThat(resultSetMetaData.getColumnTypeName(11)).isEqualTo("BIGNUMERIC"); + assertThat(resultSetMetaData.getColumnTypeName(12)).isEqualTo("TIME"); + assertThat(resultSetMetaData.getColumnTypeName(13)).isEqualTo("DATE"); + } + + @Test + public void testColumnClassName() + throws SQLException { // match the mapping for all the types in the test dataset + for (int colIndex = 1; colIndex <= 13; colIndex++) { + assertThat(resultSetMetaData.getColumnClassName(colIndex)) + .isEqualTo(fieldListClassNames.get(colIndex - 1)); + } + } + + @Test + public void testResultSetMetadataProperties() throws SQLException { + assertThat(resultSetMetaData).isNotNull(); + assertThat(resultSetMetaData.getColumnCount()).isEqualTo(13); + assertThat(resultSetMetaData.isAutoIncrement(1)).isFalse(); + assertThat(resultSetMetaData.isSearchable(4)).isTrue(); + assertThat(resultSetMetaData.isCurrency(4)).isFalse(); + assertThat(resultSetMetaData.isReadOnly(4)).isFalse(); + assertThat(resultSetMetaData.isDefinitelyWritable(4)).isFalse(); + assertThat(resultSetMetaData.isWritable(4)).isTrue(); + assertThat(resultSetMetaData.isNullable(4)).isEqualTo(ResultSetMetaData.columnNullableUnknown); + } + + @Test + public void testPrecision() throws SQLException { + assertThat(resultSetMetaData.getPrecision(10)).isEqualTo(12L); + assertThat(resultSetMetaData.getPrecision(1)) + .isEqualTo(0); // schema doesn't have this info, should be defaulted to 0 + } + + @Test + public void testSigned() throws SQLException { + assertThat(resultSetMetaData.isSigned(4)).isFalse(); + assertThat(resultSetMetaData.isSigned(2)).isTrue(); + } + + @Test + public void testCheckNameLabelCatalog() throws SQLException { + assertThat(resultSetMetaData.getColumnLabel(1)).isEqualTo("first"); + assertThat(resultSetMetaData.getColumnName(10)).isEqualTo("tenth"); + assertThat(resultSetMetaData.getColumnName(10)).isEqualTo("tenth"); + assertThat(resultSetMetaData.getSchemaName(10)).isEqualTo(""); + assertThat(resultSetMetaData.getCatalogName(10)).isEqualTo(""); + } + + @Test + public void testCheckCaseSensitive() throws SQLException { + assertThat(resultSetMetaData.isCaseSensitive(2)).isFalse(); + assertThat(resultSetMetaData.isCaseSensitive(4)).isTrue(); + } + + @Test + public void testScale() throws SQLException { + assertThat(resultSetMetaData.getScale(10)).isEqualTo(9L); + assertThat(resultSetMetaData.getScale(4)).isEqualTo(0L); + } + + @Test + public void testColumnDisplaySize() throws SQLException { + assertThat(resultSetMetaData.getColumnDisplaySize(1)).isEqualTo(5); + assertThat(resultSetMetaData.getColumnDisplaySize(13)).isEqualTo(10); + assertThat(resultSetMetaData.getColumnDisplaySize(2)).isEqualTo(10); + assertThat(resultSetMetaData.getColumnDisplaySize(3)).isEqualTo(14); + assertThat(resultSetMetaData.getColumnDisplaySize(12)).isEqualTo(50); + assertThat(resultSetMetaData.getColumnDisplaySize(5)).isEqualTo(16); + } + + // Nested Types + + @Test + public void testResultSetMetaDataNestedColType() throws SQLException { + assertThat(resultSetMetaDataNested).isNotNull(); + assertThat(resultSetMetaDataNested.getColumnType(1)).isEqualTo(Types.NVARCHAR); + assertThat(resultSetMetaDataNested.getColumnClassName(1)).isEqualTo("java.lang.String"); + } + + @Test + public void testNestedresultSetMetaDataNestedProperties() throws SQLException { + assertThat(resultSetMetaDataNested.getColumnCount()).isEqualTo(1); + assertThat(resultSetMetaDataNested.isAutoIncrement(1)).isFalse(); + assertThat(resultSetMetaDataNested.isSearchable(1)).isTrue(); + assertThat(resultSetMetaDataNested.isCurrency(1)).isFalse(); + assertThat(resultSetMetaDataNested.isReadOnly(1)).isFalse(); + assertThat(resultSetMetaDataNested.isDefinitelyWritable(1)).isFalse(); + assertThat(resultSetMetaDataNested.isWritable(1)).isTrue(); + assertThat(resultSetMetaDataNested.isNullable(1)).isEqualTo(ResultSetMetaData.columnNullable); + } + + @Test + public void testNestedPrecision() throws SQLException { + assertThat(resultSetMetaDataNested.getPrecision(1)) + .isEqualTo(0); // schema doesn't have this info, should be defaulted to 0 + } + + @Test + public void testNestedSigned() throws SQLException { + assertThat(resultSetMetaDataNested.isSigned(1)).isFalse(); + } + + @Test + public void testNestedCheckNameLabelCatalog() throws SQLException { + assertThat(resultSetMetaDataNested.getColumnLabel(1)).isEqualTo("eight"); + assertThat(resultSetMetaDataNested.getColumnName(1)).isEqualTo("eight"); + assertThat(resultSetMetaDataNested.getSchemaName(1)).isEqualTo(""); + assertThat(resultSetMetaDataNested.getCatalogName(1)).isEqualTo(""); + } + + @Test + public void testNestedCheckCaseSensitive() throws SQLException { + assertThat(resultSetMetaDataNested.isCaseSensitive(1)).isTrue(); + } + + @Test + public void testNestedScale() throws SQLException { + assertThat(resultSetMetaDataNested.getScale(1)).isEqualTo(0L); + } + + @Test + public void testNestedColumnDisplaySize() throws SQLException { + assertThat(resultSetMetaDataNested.getColumnDisplaySize(1)).isEqualTo(50); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryStatementTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryStatementTest.java new file mode 100644 index 0000000000..22dc072192 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryStatementTest.java @@ -0,0 +1,483 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.utils.ArrowUtilities.serializeSchema; +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQuery.QueryResultsOption; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.JobStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryJobConfiguration.Priority; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableResult; +import com.google.cloud.bigquery.jdbc.BigQueryStatement.JobIdWrapper; +import com.google.cloud.bigquery.spi.BigQueryRpcFactory; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import com.google.cloud.bigquery.storage.v1.BigQueryReadClient; +import com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1.ReadSession; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Maps; +import java.io.IOException; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.BlockingQueue; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; + +public class BigQueryStatementTest { + + private BigQueryConnection bigQueryConnection; + private static final String PROJECT = "project"; + + private BigQueryRpcFactory rpcFactoryMock; + + private BigQueryReadClient storageReadClient; + + private BigQuery bigquery; + + private BigQueryStatement bigQueryStatement; + + private final String query = "select * from test"; + + private final String jobIdVal = UUID.randomUUID().toString(); + + private JobId jobId; + + private static final FieldList fieldList = + FieldList.of( + Field.of("first", StandardSQLTypeName.BOOL), + Field.of("second", StandardSQLTypeName.INT64)); + + private static final String DEFAULT_TEST_DATASET = "bigquery_test_dataset"; + + private static final TableId TABLE_ID = TableId.of(DEFAULT_TEST_DATASET, PROJECT); + + private static ArrowSchema arrowSchema; + + private final Map LABELS = + new HashMap() { + { + put("key1", "val1"); + put("key2", "val2"); + put("key3", "val3"); + } + }; + + private Job getJobMock( + TableResult result, QueryJobConfiguration configuration, StatementType type) + throws InterruptedException { + Job job = mock(Job.class); + JobStatistics.QueryStatistics statistics = mock(QueryStatistics.class); + JobId jobId = mock(JobId.class); + doReturn(result).when(job).getQueryResults(any(QueryResultsOption.class)); + doReturn(jobId).when(job).getJobId(); + doReturn(configuration).when(job).getConfiguration(); + doReturn(statistics).when(job).getStatistics(); + doReturn(type).when(statistics).getStatementType(); + return job; + } + + @Before + public void setUp() throws IOException, SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + rpcFactoryMock = mock(BigQueryRpcFactory.class); + bigquery = mock(BigQuery.class); + bigQueryConnection.bigQuery = bigquery; + storageReadClient = mock(BigQueryReadClient.class); + jobId = JobId.newBuilder().setJob(jobIdVal).build(); + + doReturn(bigquery).when(bigQueryConnection).getBigQuery(); + doReturn(10L).when(bigQueryConnection).getJobTimeoutInSeconds(); + doReturn(10L).when(bigQueryConnection).getMaxBytesBilled(); + doReturn(LABELS).when(bigQueryConnection).getLabels(); + doReturn(BigQueryJdbcUrlUtility.DEFAULT_QUERY_DIALECT_VALUE) + .when(bigQueryConnection) + .getQueryDialect(); + doReturn(1000L).when(bigQueryConnection).getMaxResults(); + bigQueryStatement = new BigQueryStatement(bigQueryConnection); + VectorSchemaRoot vectorSchemaRoot = getTestVectorSchemaRoot(); + arrowSchema = + ArrowSchema.newBuilder() + .setSerializedSchema(serializeSchema(vectorSchemaRoot.getSchema())) + .build(); + // bigQueryConnection.addOpenStatements(bigQueryStatement); + + } + + private VectorSchemaRoot getTestVectorSchemaRoot() { + RootAllocator allocator = new RootAllocator(); + BitVector boolField = + new BitVector("boolField", allocator); // Mapped with StandardSQLTypeName.BOOL + boolField.allocateNew(2); + boolField.set(0, 0); + boolField.setValueCount(1); + IntVector int64Filed = + new IntVector("int64Filed", allocator); // Mapped with StandardSQLTypeName.INT64 + int64Filed.allocateNew(2); + int64Filed.set(0, 1); + int64Filed.setValueCount(1); + List fieldVectors = ImmutableList.of(boolField, int64Filed); + return new VectorSchemaRoot(fieldVectors); + } + + private BigQueryOptions createBigQueryOptionsForProject( + String project, BigQueryRpcFactory rpcFactory) { + return BigQueryOptions.newBuilder() + .setProjectId(project) + .setServiceRpcFactory(rpcFactory) + .setRetrySettings(ServiceOptions.getNoRetrySettings()) + .build(); + } + + @Test + public void testStatementNonNull() { + assertThat(bigQueryStatement).isNotNull(); + } + + @Ignore + public void testExecFastQueryPath() throws SQLException, InterruptedException { + JobIdWrapper jobIdWrapper = new JobIdWrapper(jobId, null, null); + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + + TableResult result = Mockito.mock(TableResult.class); + BigQueryJsonResultSet jsonResultSet = mock(BigQueryJsonResultSet.class); + QueryJobConfiguration jobConfiguration = QueryJobConfiguration.newBuilder(query).build(); + + doReturn(result).when(bigquery).query(jobConfiguration); + doReturn(jsonResultSet).when(bigQueryStatementSpy).processJsonResultSet(result); + + bigQueryStatementSpy.runQuery(query, jobConfiguration); + // verify the statement's state + assertThat(bigQueryStatementSpy.jobIds.size()).isEqualTo(1); // job id should be created + assertThat(bigQueryStatementSpy.jobIds.get(0)).isNotNull(); + // assertThat(bigQueryStatementSpy.currentResultSet).isNotNull(); + + } + + @Test + public void testExecSlowQueryPath() throws SQLException, InterruptedException { + JobIdWrapper jobIdWrapper = new JobIdWrapper(jobId, null, null); + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + TableResult tableResult = mock(TableResult.class); + QueryJobConfiguration queryJobConfiguration = + QueryJobConfiguration.newBuilder(query) + .setPriority(Priority.BATCH) // query settings for slow query path + .build(); + Job job = getJobMock(tableResult, queryJobConfiguration, StatementType.SELECT); + + doReturn(job).when(bigquery).create(any(JobInfo.class)); + + doReturn(jobIdWrapper) + .when(bigQueryStatementSpy) + .insertJob(any(com.google.cloud.bigquery.JobConfiguration.class)); + doReturn(false).when(bigQueryStatementSpy).useReadAPI(eq(tableResult)); + doReturn(mock(JobId.class)).when(tableResult).getJobId(); + + ResultSet bigQueryJsonResultSet = mock(BigQueryJsonResultSet.class); + + doReturn(bigQueryJsonResultSet) + .when(bigQueryStatementSpy) + .processJsonResultSet(any(TableResult.class)); + + bigQueryStatementSpy.runQuery(query, queryJobConfiguration); + // verify the statement's state + // job id is created during runQuery, but cleaned up after function completes. + assertThat(bigQueryStatementSpy.jobIds.size()).isEqualTo(0); + assertThat(bigQueryStatementSpy.getResultSet()).isEqualTo(bigQueryJsonResultSet); + } + + @Test + public void getArrowResultSetTest() throws SQLException { + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + BigQueryReadClient bigQueryReadClient = Mockito.spy(mock(BigQueryReadClient.class)); + Schema schema = Schema.of(fieldList); + ReadSession readSession = ReadSession.getDefaultInstance(); + doReturn(bigQueryReadClient).when(bigQueryStatementSpy).getBigQueryReadClient(); + doReturn(readSession) + .when(bigQueryStatementSpy) + .getReadSession(any(CreateReadSessionRequest.class)); + Thread mockWorker = new Thread(); + doReturn(mockWorker) + .when(bigQueryStatementSpy) + .populateArrowBufferedQueue( + any(ReadSession.class), any(BlockingQueue.class), any(BigQueryReadClient.class)); + + doReturn(arrowSchema).when(bigQueryStatementSpy).getArrowSchema(any(ReadSession.class)); + + JobId jobId = JobId.of("123"); + TableResult result = Mockito.mock(TableResult.class); + doReturn(schema).when(result).getSchema(); + doReturn(10L).when(result).getTotalRows(); + doReturn(TABLE_ID).when(bigQueryStatementSpy).getDestinationTable(any()); + doReturn(jobId).when(result).getJobId(); + Job job = mock(Job.class); + doReturn(mock(QueryStatistics.class)).when(job).getStatistics(); + doReturn(job).when(bigquery).getJob(jobId); + + ResultSet resultSet = bigQueryStatementSpy.processArrowResultSet(result); + assertThat(resultSet).isNotNull(); + assertThat(resultSet).isInstanceOf(BigQueryArrowResultSet.class); + assertThat(resultSet.isLast()).isFalse(); // as we have 10 rows + } + + @Test + public void getJobTimeoutTest() throws Exception { + QueryJobConfiguration jobConfig = bigQueryStatement.getJobConfig("select 1").build(); + assertEquals(10000L, jobConfig.getJobTimeoutMs().longValue()); + } + + @Test + public void getMaxBytesBilledTest() throws Exception { + QueryJobConfiguration jobConfig = bigQueryStatement.getJobConfig("select 1").build(); + assertEquals(10L, jobConfig.getMaximumBytesBilled().longValue()); + } + + @Test + public void testSetMaxRowsJson() throws SQLException { + assertEquals(0, bigQueryStatement.getMaxRows()); + bigQueryStatement.setMaxRows(10); + assertEquals(10, bigQueryStatement.getMaxRows()); + } + + @Test + public void setQueryTimeoutTest() throws Exception { + bigQueryStatement.setQueryTimeout(3); + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + + TableResult result = Mockito.mock(TableResult.class); + BigQueryJsonResultSet jsonResultSet = mock(BigQueryJsonResultSet.class); + QueryJobConfiguration jobConfiguration = + QueryJobConfiguration.newBuilder(query).setJobTimeoutMs(10000L).build(); + + Job job = getJobMock(result, jobConfiguration, StatementType.SELECT); + doReturn(job).when(bigquery).create(any(JobInfo.class)); + + doReturn(jsonResultSet).when(bigQueryStatementSpy).processJsonResultSet(result); + ArgumentCaptor captor = ArgumentCaptor.forClass(JobInfo.class); + + bigQueryStatementSpy.runQuery(query, jobConfiguration); + verify(bigquery).create(captor.capture()); + QueryJobConfiguration jobConfig = captor.getValue().getConfiguration(); + assertEquals(3000L, jobConfig.getJobTimeoutMs().longValue()); + } + + @Test + public void getLabelsTest() throws Exception { + QueryJobConfiguration jobConfig = bigQueryStatement.getJobConfig("select 1").build(); + Map expected = + new HashMap() { + { + put("key1", "val1"); + put("key2", "val2"); + put("key3", "val3"); + } + }; + + assertTrue(Maps.difference(expected, jobConfig.getLabels()).areEqual()); + } + + @Test + public void getUpdateCountTest() { + bigQueryStatement.setUpdateCount(100L); + assertEquals(100, bigQueryStatement.getUpdateCount()); + assertEquals(100L, bigQueryStatement.getLargeUpdateCount()); + } + + @Test + public void testSetExtraLabels() { + Map extraLabels = new HashMap<>(); + extraLabels.put("extraKey1", "extraVal1"); + bigQueryStatement.setExtraLabels(extraLabels); + assertEquals(extraLabels, bigQueryStatement.getExtraLabels()); + } + + @Test + public void testGetJobConfigWithExtraLabels() { + Map extraLabels = new HashMap<>(); + extraLabels.put("extraKey1", "extraVal1"); + extraLabels.put("key1", "overrideVal1"); // Override connection label + bigQueryStatement.setExtraLabels(extraLabels); + + QueryJobConfiguration jobConfig = bigQueryStatement.getJobConfig("select 1").build(); + Map expectedLabels = new HashMap<>(); + expectedLabels.put("key1", "overrideVal1"); + expectedLabels.put("key2", "val2"); + expectedLabels.put("key3", "val3"); + expectedLabels.put("extraKey1", "extraVal1"); + + assertTrue(Maps.difference(expectedLabels, jobConfig.getLabels()).areEqual()); + } + + @Test + public void testJoblessQuery() throws SQLException, InterruptedException { + // 1. Test JobCreationMode=2 (jobless) + doReturn(true).when(bigQueryConnection).getUseStatelessQueryMode(); + BigQueryStatement joblessStatement = new BigQueryStatement(bigQueryConnection); + BigQueryStatement joblessStatementSpy = Mockito.spy(joblessStatement); + + TableResult tableResultMock = mock(TableResult.class); + doReturn("queryId").when(tableResultMock).getQueryId(); + doReturn(null).when(tableResultMock).getJobId(); + doReturn(tableResultMock) + .when(bigquery) + .queryWithTimeout(any(QueryJobConfiguration.class), any(), any()); + doReturn(mock(BigQueryJsonResultSet.class)) + .when(joblessStatementSpy) + .processJsonResultSet(tableResultMock); + + Job dryRunJobMock = getJobMock(null, null, StatementType.SELECT); + ArgumentCaptor dryRunCaptor = ArgumentCaptor.forClass(JobInfo.class); + doReturn(dryRunJobMock).when(bigquery).create(dryRunCaptor.capture()); + + joblessStatementSpy.executeQuery("SELECT 1"); + + verify(bigquery).queryWithTimeout(any(QueryJobConfiguration.class), any(), any()); + verify(bigquery).create(any(JobInfo.class)); + assertTrue( + Boolean.TRUE.equals( + ((QueryJobConfiguration) dryRunCaptor.getValue().getConfiguration()).dryRun())); + + // 2. Test JobCreationMode=1 (jobful) + Mockito.reset(bigquery); + doReturn(false).when(bigQueryConnection).getUseStatelessQueryMode(); + BigQueryStatement jobfulStatement = new BigQueryStatement(bigQueryConnection); + BigQueryStatement jobfulStatementSpy = Mockito.spy(jobfulStatement); + + TableResult tableResultJobfulMock = mock(TableResult.class); + QueryJobConfiguration jobConf = QueryJobConfiguration.newBuilder("SELECT 1").build(); + Job jobMock = getJobMock(tableResultJobfulMock, jobConf, StatementType.SELECT); + ArgumentCaptor jobfulCaptor = ArgumentCaptor.forClass(JobInfo.class); + doReturn(jobMock).when(bigquery).create(jobfulCaptor.capture()); + doReturn(mock(BigQueryJsonResultSet.class)) + .when(jobfulStatementSpy) + .processJsonResultSet(tableResultJobfulMock); + + jobfulStatementSpy.executeQuery("SELECT 1"); + + verify(bigquery).create(any(JobInfo.class)); + assertTrue( + jobfulCaptor.getAllValues().stream() + .noneMatch( + jobInfo -> + Boolean.TRUE.equals( + ((QueryJobConfiguration) jobInfo.getConfiguration()).dryRun()))); + verify(bigquery, Mockito.never()) + .queryWithTimeout(any(QueryJobConfiguration.class), any(), any()); + } + + @Test + public void testCloseCancelsJob() throws SQLException, InterruptedException { + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + TableResult tableResult = mock(TableResult.class); + Schema mockSchema = Schema.of(FieldList.of()); + doReturn(mockSchema).when(tableResult).getSchema(); + QueryJobConfiguration queryJobConfiguration = + QueryJobConfiguration.newBuilder(query).setPriority(Priority.BATCH).build(); + Job job = getJobMock(tableResult, queryJobConfiguration, StatementType.SELECT); + + doReturn(job).when(bigquery).create(any(JobInfo.class)); + doReturn(false).when(bigQueryStatementSpy).useReadAPI(eq(tableResult)); + doReturn(mock(JobId.class)).when(tableResult).getJobId(); + Mockito.when(job.getQueryResults(any(QueryResultsOption.class))) + .thenAnswer( + invocation -> { + Thread.sleep(2000); + return null; + }); + Thread t = + new Thread( + () -> { + try { + bigQueryStatementSpy.runQuery(query, queryJobConfiguration); + } catch (Exception e) { + } + }); + + t.start(); + // Sleep to allow background thread to call "create". + Thread.sleep(500); + bigQueryStatementSpy.close(); + t.join(); + verify(bigquery, Mockito.times(1)).cancel(any(JobId.class)); + } + + @Test + public void testCancelWithJoblessQuery() throws SQLException, InterruptedException { + doReturn(true).when(bigQueryConnection).getUseStatelessQueryMode(); + BigQueryStatement joblessStatement = new BigQueryStatement(bigQueryConnection); + BigQueryStatement joblessStatementSpy = Mockito.spy(joblessStatement); + + TableResult tableResultMock = mock(TableResult.class); + doReturn(null).when(tableResultMock).getJobId(); + + doReturn(tableResultMock) + .when(bigquery) + .queryWithTimeout(any(QueryJobConfiguration.class), any(), any()); + + Job dryRunJobMock = getJobMock(null, null, StatementType.SELECT); + doReturn(dryRunJobMock).when(bigquery).create(any(JobInfo.class)); + + BigQueryJsonResultSet resultSetMock = mock(BigQueryJsonResultSet.class); + doReturn(resultSetMock).when(joblessStatementSpy).processJsonResultSet(tableResultMock); + + joblessStatementSpy.executeQuery("SELECT 1"); + + // Pre-check: statement has a result set + assertTrue(joblessStatementSpy.currentResultSet != null); + + joblessStatementSpy.cancel(); + + // Post-check: The result set's close() method was called + verify(resultSetMock).close(); + + // And no backend cancellation was attempted + verify(bigquery, Mockito.never()).cancel(any(JobId.class)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactoryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactoryTest.java new file mode 100644 index 0000000000..d271df0e26 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactoryTest.java @@ -0,0 +1,41 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import java.util.concurrent.ThreadFactory; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryThreadFactoryTest { + + private static ThreadFactory JDBC_THREAD_FACTORY; + + @Before + public void setUp() { + JDBC_THREAD_FACTORY = new BigQueryThreadFactory("BigQuery-Thread-"); + } + + @Test + public void testNewThread() { + assertThat(JDBC_THREAD_FACTORY).isNotNull(); + Thread thread = JDBC_THREAD_FACTORY.newThread(() -> {}); + assertThat(thread).isNotNull(); + assertThat(thread.getName()).startsWith("BigQuery-Thread-"); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerTest.java new file mode 100644 index 0000000000..a758cf15c0 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerTest.java @@ -0,0 +1,100 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionException; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionNotFoundException; +import com.google.cloud.bigquery.jdbc.TestType.Text; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.function.Function; +import org.junit.Test; + +public class BigQueryTypeCoercerTest { + + @Test + public void shouldReturnSameValueWhenTargetTypeIsSameAsSourceType() { + assertThat(BigQueryTypeCoercer.INSTANCE.coerceTo(Integer.class, 56)).isEqualTo(56); + assertThat(BigQueryTypeCoercer.INSTANCE.coerceTo(Long.class, 56L)).isEqualTo(56L); + } + + @Test + public void shouldBeAbleToComposeMultipleCoercions() { + StringToBigDecimal stringToBigDecimal = new StringToBigDecimal(); + BigDecimalToBigInteger bigDecimalToBigInteger = new BigDecimalToBigInteger(); + + Function composedCoercion = + stringToBigDecimal.andThen(bigDecimalToBigInteger); + + BigQueryTypeCoercer bigQueryTypeCoercer = + new BigQueryTypeCoercerBuilder() + .registerTypeCoercion(composedCoercion, String.class, BigInteger.class) + .build(); + + assertThat(bigQueryTypeCoercer.coerceTo(BigInteger.class, "123567.66884")) + .isEqualTo(BigInteger.valueOf(123567)); + } + + @Test + public void shouldThrowCoercionNotFoundException() { + byte[] bytesArray = {72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33}; + Text text = new Text(bytesArray); + + BigQueryJdbcCoercionNotFoundException exception = + assertThrows( + BigQueryJdbcCoercionNotFoundException.class, + () -> BigQueryTypeCoercer.INSTANCE.coerceTo(Long.class, text)); + assertThat(exception.getMessage()) + .isEqualTo( + "Coercion not found for " + + "[com.google.cloud.bigquery.jdbc.TestType.Text -> java.lang.Long]" + + " conversion"); + } + + @Test + public void shouldThrowCoercionException() { + BigQueryTypeCoercer bigQueryTypeCoercer = + new BigQueryTypeCoercerBuilder() + .registerTypeCoercion(Math::toIntExact, Long.class, Integer.class) + .build(); + BigQueryJdbcCoercionException exception = + assertThrows( + BigQueryJdbcCoercionException.class, + () -> bigQueryTypeCoercer.coerceTo(Integer.class, 2147483648L)); + assertThat(exception.getMessage()).isEqualTo("Coercion error"); + assertThat(exception.getCause()).isInstanceOf(ArithmeticException.class); + } + + private static class StringToBigDecimal implements BigQueryCoercion { + + @Override + public BigDecimal coerce(String value) { + return new BigDecimal(value); + } + } + + private static class BigDecimalToBigInteger implements BigQueryCoercion { + + @Override + public BigInteger coerce(BigDecimal value) { + return value.toBigInteger(); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/FieldValueTypeBigQueryCoercionUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/FieldValueTypeBigQueryCoercionUtilityTest.java new file mode 100644 index 0000000000..456b750ca5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/FieldValueTypeBigQueryCoercionUtilityTest.java @@ -0,0 +1,378 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.FieldValue.Attribute.PRIMITIVE; +import static com.google.cloud.bigquery.FieldValue.Attribute.RANGE; +import static com.google.cloud.bigquery.FieldValue.Attribute.RECORD; +import static com.google.cloud.bigquery.FieldValue.Attribute.REPEATED; +import static com.google.cloud.bigquery.jdbc.BigQueryTypeCoercionUtility.INSTANCE; +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.bigquery.FieldElementType; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.Range; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionException; +import com.google.common.collect.ImmutableList; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.temporal.ChronoUnit; +import java.util.concurrent.TimeUnit; +import org.junit.Test; + +public class FieldValueTypeBigQueryCoercionUtilityTest { + private static final FieldValue STRING_VALUE = FieldValue.of(PRIMITIVE, "sample-string"); + private static final FieldValue INTEGER_VALUE = FieldValue.of(PRIMITIVE, "345"); + private static final FieldValue FLOAT_VALUE = FieldValue.of(PRIMITIVE, "345.21"); + private static final FieldValue SHORT_VALUE = FieldValue.of(PRIMITIVE, "345"); + private static final FieldValue LONG_VALUE = FieldValue.of(PRIMITIVE, "4374218905"); + private static final FieldValue DOUBLE_VALUE = FieldValue.of(PRIMITIVE, "56453.458393"); + private static final FieldValue BIG_DECIMAL_VALUE = FieldValue.of(PRIMITIVE, "56453.458393"); + private static final FieldValue BASE64_ENCODED_VALUE = + FieldValue.of(PRIMITIVE, "SGVsbG8gV29ybGQK"); // Hello World! + private static final FieldValue TIMESTAMP_VALUE = FieldValue.of(PRIMITIVE, "1408452095.22"); + private static final FieldValue DATE_VALUE = FieldValue.of(PRIMITIVE, "2023-03-13"); + private static final FieldValue TIME_VALUE = FieldValue.of(PRIMITIVE, "23:59:59"); + private static final FieldValue TIME_WITH_NANOSECOND_VALUE = + FieldValue.of(PRIMITIVE, "23:59:59.99999"); + private static final FieldValue TRUE_VALUE = FieldValue.of(PRIMITIVE, "true"); + private static final FieldValue FALSE_VALUE = FieldValue.of(PRIMITIVE, "false"); + private static final FieldValue NULL_VALUE = FieldValue.of(PRIMITIVE, null); + private static final FieldValue INTEGER_ARRAY = + FieldValue.of( + REPEATED, + FieldValueList.of( + ImmutableList.of(FieldValue.of(PRIMITIVE, 1), FieldValue.of(PRIMITIVE, 2)))); + private static final FieldValue RECORD_VALUE = + FieldValue.of( + RECORD, ImmutableList.of(INTEGER_VALUE, STRING_VALUE, TIME_VALUE, INTEGER_ARRAY)); + + private static final Range RANGE_DATE = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .setStart("1970-01-02") + .setEnd("1970-03-04") + .build(); + + private static final Range RANGE_DATETIME = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .setStart("2014-08-19 05:41:35.220000") + .setEnd("2015-09-20 06:41:35.220000") + .build(); + + private static final Range RANGE_TIMESTAMP = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .setStart("2014-08-19 12:41:35.220000+00:00") + .setEnd("2015-09-20 13:41:35.220000+01:00") + .build(); + + private static final FieldValue RANGE_DATE_VALUE = FieldValue.of(RANGE, RANGE_DATE); + private static final FieldValue RANGE_DATE_TIME_VALUE = FieldValue.of(RANGE, RANGE_DATETIME); + private static final FieldValue RANGE_TIMESTAMP_VALUE = FieldValue.of(RANGE, RANGE_TIMESTAMP); + + @Test + public void fieldValueToStringRangeDate() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATE.getStart().getStringValue(), RANGE_DATE.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATE_VALUE)).isEqualTo(expectedRangeDate); + } + + @Test + public void rangeDateToString() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATE.getStart().getStringValue(), RANGE_DATE.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATE)).isEqualTo(expectedRangeDate); + } + + @Test + public void fieldValueToStringRangeDatetime() { + String expectedRangeDatetime = + String.format( + "[%s, %s)", + RANGE_DATETIME.getStart().getStringValue(), RANGE_DATETIME.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATE_TIME_VALUE)) + .isEqualTo(expectedRangeDatetime); + } + + @Test + public void rangeDatetimeToString() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATETIME.getStart().getStringValue(), RANGE_DATETIME.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATETIME)).isEqualTo(expectedRangeDate); + } + + @Test + public void fieldValueToStringRangeTimestamp() { + String expectedRangeTimestamp = + String.format( + "[%s, %s)", + RANGE_TIMESTAMP.getStart().getStringValue(), RANGE_TIMESTAMP.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_TIMESTAMP_VALUE)) + .isEqualTo(expectedRangeTimestamp); + } + + @Test + public void rangeTimestampToString() { + String expectedRangeTimestamp = + String.format( + "[%s, %s)", + RANGE_TIMESTAMP.getStart().getStringValue(), RANGE_TIMESTAMP.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_TIMESTAMP)).isEqualTo(expectedRangeTimestamp); + } + + @Test + public void fieldValueToString() { + assertThat(INSTANCE.coerceTo(String.class, STRING_VALUE)).isEqualTo("sample-string"); + } + + @Test + public void fieldValueToStringWhenNull() { + assertThat(INSTANCE.coerceTo(String.class, null)).isNull(); + } + + @Test + public void fieldValueToStringWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(String.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToStringWhenInnerValueIsAnArray() { + assertThat(INSTANCE.coerceTo(String.class, INTEGER_ARRAY)) + .isEqualTo( + "[FieldValue{attribute=PRIMITIVE, value=1, useInt64Timestamps=false}, FieldValue{attribute=PRIMITIVE, value=2, useInt64Timestamps=false}]"); + } + + @Test + public void fieldValueToInteger() { + assertThat(INSTANCE.coerceTo(Integer.class, INTEGER_VALUE)).isEqualTo(345); + } + + @Test + public void fieldValueToIntegerWhenNull() { + assertThat(INSTANCE.coerceTo(Integer.class, null)).isEqualTo(0); + } + + @Test + public void fieldValueToIntegerWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Integer.class, NULL_VALUE)).isEqualTo(0); + } + + @Test + public void fieldValueToFloat() { + assertThat(INSTANCE.coerceTo(Float.class, FLOAT_VALUE)).isEqualTo(345.21f); + } + + @Test + public void fieldValueToFloatWhenNull() { + assertThat(INSTANCE.coerceTo(Float.class, null)).isEqualTo(0f); + } + + @Test + public void fieldValueToFloatWhenInnerValueNull() { + assertThat(INSTANCE.coerceTo(Float.class, NULL_VALUE)).isEqualTo(0f); + } + + @Test + public void fieldValueToShort() { + assertThat(INSTANCE.coerceTo(Short.class, SHORT_VALUE)).isEqualTo((short) 345); + } + + @Test + public void fieldValueToShortWhenNull() { + assertThat(INSTANCE.coerceTo(Short.class, null)).isEqualTo((short) 0); + } + + @Test + public void fieldValueToShortWhenInnerValueNull() { + assertThat(INSTANCE.coerceTo(Short.class, NULL_VALUE)).isEqualTo((short) 0); + } + + @Test + public void fieldValueToLong() { + assertThat(INSTANCE.coerceTo(Long.class, LONG_VALUE)).isEqualTo(4374218905L); + } + + @Test + public void fieldValueToLongWhenNull() { + assertThat(INSTANCE.coerceTo(Long.class, null)).isEqualTo(0L); + } + + @Test + public void fieldValueToLongWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Long.class, NULL_VALUE)).isEqualTo(0L); + } + + @Test + public void fieldValueToDouble() { + assertThat(INSTANCE.coerceTo(Double.class, DOUBLE_VALUE)).isEqualTo(56453.458393D); + } + + @Test + public void fieldValueToDoubleWhenNull() { + assertThat(INSTANCE.coerceTo(Double.class, null)).isEqualTo(0D); + } + + @Test + public void fieldValueToDoubleWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Double.class, NULL_VALUE)).isEqualTo(0D); + } + + @Test + public void fieldValueToBigDecimal() { + assertThat(INSTANCE.coerceTo(BigDecimal.class, BIG_DECIMAL_VALUE)) + .isEqualTo(new BigDecimal("56453.458393")); + } + + @Test + public void fieldValueToBigDecimalWhenNull() { + assertThat(INSTANCE.coerceTo(BigDecimal.class, null)).isNull(); + } + + @Test + public void fieldValueToBigDecimalWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(BigDecimal.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToBoolean() { + assertThat(INSTANCE.coerceTo(Boolean.class, TRUE_VALUE)).isTrue(); + assertThat(INSTANCE.coerceTo(Boolean.class, FALSE_VALUE)).isFalse(); + } + + @Test + public void fieldValueToBooleanWhenNull() { + assertThat(INSTANCE.coerceTo(Boolean.class, null)).isFalse(); + } + + @Test + public void fieldValueToBooleanWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Boolean.class, NULL_VALUE)).isFalse(); + } + + @Test + public void fieldValueToBytesArray() { + assertThat(INSTANCE.coerceTo(byte[].class, BASE64_ENCODED_VALUE)) + .isEqualTo(new byte[] {72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 10}); + } + + @Test + public void fieldValueToBytesArrayWhenNull() { + assertThat(INSTANCE.coerceTo(byte[].class, null)).isNull(); + } + + @Test + public void fieldValueToBytesArrayWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(byte[].class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToTimestamp() { + Instant instant = Instant.EPOCH.plus(TIMESTAMP_VALUE.getTimestampValue(), ChronoUnit.MICROS); + LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC")); + assertThat(INSTANCE.coerceTo(Timestamp.class, TIMESTAMP_VALUE)) + .isEqualTo(Timestamp.valueOf(localDateTime)); + } + + @Test + public void fieldValueToTimestampWhenNull() { + assertThat(INSTANCE.coerceTo(Timestamp.class, null)).isNull(); + } + + @Test + public void fieldValueToTimestampWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Timestamp.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToTime() { + LocalTime expectedTime = LocalTime.of(23, 59, 59); + assertThat(INSTANCE.coerceTo(Time.class, TIME_VALUE)) + .isEqualTo(new Time(TimeUnit.NANOSECONDS.toMillis(expectedTime.toNanoOfDay()))); + LocalTime expectedTimeWithNanos = LocalTime.parse("23:59:59.99999"); + assertThat(INSTANCE.coerceTo(Time.class, TIME_WITH_NANOSECOND_VALUE)) + .isEqualTo(new Time(TimeUnit.NANOSECONDS.toMillis(expectedTimeWithNanos.toNanoOfDay()))); + } + + @Test + public void fieldValueToTimeWhenNull() { + assertThat(INSTANCE.coerceTo(Time.class, null)).isNull(); + } + + @Test + public void fieldValueToTimeWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Time.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToTimeWithInvalidValue() { + FieldValue invalidTime = FieldValue.of(PRIMITIVE, "99:99:99"); + + BigQueryJdbcCoercionException coercionException = + assertThrows( + BigQueryJdbcCoercionException.class, () -> INSTANCE.coerceTo(Time.class, invalidTime)); + assertThat(coercionException.getCause()).isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void fieldValueToDate() { + LocalDate expectedDate = LocalDate.of(2023, 3, 13); + assertThat(INSTANCE.coerceTo(Date.class, DATE_VALUE)).isEqualTo(Date.valueOf(expectedDate)); + } + + @Test + public void fieldValueToDateWhenNull() { + assertThat(INSTANCE.coerceTo(Date.class, null)).isNull(); + } + + @Test + public void fieldValueToDateWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Date.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToObject() { + assertThat(INSTANCE.coerceTo(Object.class, RECORD_VALUE)) + .isEqualTo(ImmutableList.of(INTEGER_VALUE, STRING_VALUE, TIME_VALUE, INTEGER_ARRAY)); + } + + @Test + public void fieldValueToObjectWhenNull() { + assertThat(INSTANCE.coerceTo(Object.class, null)).isNull(); + } + + @Test + public void fieldValueToObjectWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Object.class, NULL_VALUE)).isNull(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/NullHandlingTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/NullHandlingTest.java new file mode 100644 index 0000000000..ab274c8797 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/NullHandlingTest.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import org.junit.Test; + +public class NullHandlingTest { + + @Test + public void shouldReturnNullForNullByDefault() { + assertThat(BigQueryTypeCoercer.INSTANCE.coerceTo(Integer.class, null)).isNull(); + } + + @Test + public void shouldReturnCustomValueForNull() { + BigQueryTypeCoercer bigQueryTypeCoercer = + new BigQueryTypeCoercerBuilder().registerTypeCoercion(new NullToIntegerCoercion()).build(); + + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, null)).isEqualTo(0); + } + + private static class NullToIntegerCoercion implements BigQueryCoercion { + @Override + public Integer coerce(Void value) { + return 0; // returning zero as the default value + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSourceTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSourceTest.java new file mode 100644 index 0000000000..bacfdae1da --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSourceTest.java @@ -0,0 +1,80 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.sql.SQLException; +import javax.sql.PooledConnection; +import org.junit.Test; + +public class PooledConnectionDataSourceTest { + private static final Long LISTENER_POOL_SIZE = 20L; + private static final Long CONNECTION_POOL_SIZE = 20L; + private static final Long DEFAULT_LISTENER_POOL_SIZE = 10L; + private static final Long DEFAULT_CONNECTION_POOL_SIZE = 10L; + + @Test + public void testGetPooledConnection() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + BigQueryConnection bqConnection = mock(BigQueryConnection.class); + doReturn(connectionUrl).when(bqConnection).getConnectionUrl(); + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setConnection(bqConnection); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + assertTrue(pooledConnection instanceof BigQueryPooledConnection); + BigQueryPooledConnection bqPooledConnection = (BigQueryPooledConnection) pooledConnection; + assertEquals(LISTENER_POOL_SIZE, bqPooledConnection.getListenerPoolSize()); + assertNotNull(pooledDataSource.getConnectionPoolManager()); + assertEquals( + CONNECTION_POOL_SIZE, pooledDataSource.getConnectionPoolManager().getConnectionPoolSize()); + } + + @Test + public void testGetPooledConnectionNoConnectionURl() throws SQLException { + BigQueryConnection bqConnection = mock(BigQueryConnection.class); + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setConnection(bqConnection); + + assertThrows(BigQueryJdbcRuntimeException.class, () -> pooledDataSource.getPooledConnection()); + } + + @Test + public void testGetPooledConnectionFailInvalidConnectionURl() { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ListenerPoolSize=invalid"; + BigQueryConnection bqConnection = mock(BigQueryConnection.class); + doReturn(connectionUrl).when(bqConnection).getConnectionUrl(); + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setConnection(bqConnection); + + assertThrows(NumberFormatException.class, () -> pooledDataSource.getPooledConnection()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionListenerTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionListenerTest.java new file mode 100644 index 0000000000..d200709c43 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionListenerTest.java @@ -0,0 +1,172 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.SQLException; +import org.junit.Before; +import org.junit.Test; + +public class PooledConnectionListenerTest { + private BigQueryConnection bigQueryConnection; + private static final Long LISTENER_POOL_SIZE = 10L; + private static final Long CONNECTION_POOL_SIZE = 10L; + + @Before + public void setUp() throws IOException, SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + } + + @Test + public void testAddConnectionListener() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + assertNull(listener.getPooledConnection()); + } + + @Test + public void testRemoveConnectionListener() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + assertTrue(pooledConnection.isListenerPooled(listener)); + pooledConnection.removeConnectionEventListener(listener); + assertFalse(pooledConnection.isListenerPooled(listener)); + } + + @Test + public void testConnectionHandleClosedByConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertTrue(pooledConnection.isListenerPooled(listener)); + } + + @Test + public void testConnectionHandleClosedByPooledConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + pooledConnection.close(); + assertFalse(pooledConnection.inUse()); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertTrue(pooledConnection.isListenerPooled(listener)); + } + + @Test + public void testFireConnectionError() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + pooledConnection.fireConnectionError(new SQLException("test")); + assertFalse(pooledConnection.inUse()); + assertTrue(listener.isConnectionPoolEmpty()); + assertFalse(pooledConnection.isListenerPooled(listener)); + } + + @Test + public void testGetConnectionWhenPoolEmpty() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + assertNull(listener.getPooledConnection()); + } + + @Test + public void testGetConnectionWhenPoolNonEmpty() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + assertFalse(listener.isConnectionPoolEmpty()); + assertTrue(pooledConnection.isListenerPooled(listener)); + + assertNotNull(listener.getPooledConnection()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/TestType.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/TestType.java new file mode 100644 index 0000000000..3222525c5e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/TestType.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +public class TestType { + public static class Text { + private final byte[] bytes; + + public Text(byte[] bytes) { + this.bytes = bytes; + } + + public byte[] getBytes() { + return bytes; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBase.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBase.java new file mode 100644 index 0000000000..5aa41b2975 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBase.java @@ -0,0 +1,21 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import com.google.cloud.bigquery.jdbc.BigQueryJdbcBaseTest; + +public class ITBase extends BigQueryJdbcBaseTest {} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBigQueryJDBCTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBigQueryJDBCTest.java new file mode 100644 index 0000000000..411a488714 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBigQueryJDBCTest.java @@ -0,0 +1,4439 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryError; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlSyntaxErrorException; +import com.google.cloud.bigquery.jdbc.BigQueryConnection; +import com.google.cloud.bigquery.jdbc.BigQueryDriver; +import com.google.cloud.bigquery.jdbc.DataSource; +import com.google.cloud.bigquery.jdbc.PooledConnectionDataSource; +import com.google.cloud.bigquery.jdbc.PooledConnectionListener; +import com.google.cloud.bigquery.jdbc.utils.TestUtilities.TestConnectionListener; +import com.google.common.collect.ImmutableMap; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.math.BigDecimal; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.sql.CallableStatement; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.Date; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.Instant; +import java.time.LocalTime; +import java.util.Arrays; +import java.util.Calendar; +import java.util.HashSet; +import java.util.Properties; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.function.BiFunction; +import javax.sql.PooledConnection; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; + +public class ITBigQueryJDBCTest extends ITBase { + static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + static Connection bigQueryConnection; + static BigQuery bigQuery; + static Statement bigQueryStatement; + static Connection bigQueryConnectionNoReadApi; + static Statement bigQueryStatementNoReadApi; + static final String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + static final String session_enabled_connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;EnableSession=1"; + private static final String BASE_QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 order by" + + " trip_distance asc LIMIT %s"; + private static final Random random = new Random(); + private static final int randomNumber = random.nextInt(9999); + private static final String DATASET = "JDBC_PRESUBMIT_INTEGRATION_DATASET"; + private static final String DATASET2 = "JDBC_PRESUBMIT_INTEGRATION_DATASET_2"; + private static final String CONSTRAINTS_DATASET = "JDBC_CONSTRAINTS_TEST_DATASET"; + private static final String CONSTRAINTS_TABLE_NAME = "JDBC_CONSTRAINTS_TEST_TABLE"; + private static final String CONSTRAINTS_TABLE_NAME2 = "JDBC_CONSTRAINTS_TEST_TABLE2"; + private static final String CONSTRAINTS_TABLE_NAME3 = "JDBC_CONSTRAINTS_TEST_TABLE3"; + private static final String CALLABLE_STMT_PROC_NAME = "IT_CALLABLE_STMT_PROC_TEST"; + private static final String CALLABLE_STMT_TABLE_NAME = "IT_CALLABLE_STMT_PROC_TABLE"; + private static final String CALLABLE_STMT_PARAM_KEY = "CALL_STMT_PARAM_KEY"; + private static final String CALLABLE_STMT_DML_INSERT_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_INSERT_TEST"; + private static final String CALLABLE_STMT_DML_UPDATE_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_UPDATE_TEST"; + private static final String CALLABLE_STMT_DML_DELETE_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_DELETE_TEST"; + private static final String CALLABLE_STMT_DML_TABLE_NAME = "IT_CALLABLE_STMT_PROC_DML_TABLE"; + private static final Long DEFAULT_CONN_POOL_SIZE = 10L; + private static final Long CUSTOM_CONN_POOL_SIZE = 5L; + private static final Object EXCEPTION_REPLACEMENT = "EXCEPTION-WAS-RAISED"; + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + private JsonObject getAuthJson() throws IOException { + final String secret = requireEnvVar("SA_SECRET"); + JsonObject authJson; + // Supporting both formats of SA_SECRET: + // - Local runs can point to a json file + // - Cloud Build has JSON value + try { + InputStream stream = Files.newInputStream(Paths.get(secret)); + InputStreamReader reader = new InputStreamReader(stream); + authJson = JsonParser.parseReader(reader).getAsJsonObject(); + } catch (IOException e) { + authJson = JsonParser.parseString(secret).getAsJsonObject(); + } + assertTrue(authJson.has("client_email")); + assertTrue(authJson.has("private_key")); + assertTrue(authJson.has("project_id")); + return authJson; + } + + private void validateConnection(String connection_uri) throws SQLException { + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @BeforeClass + public static void beforeClass() throws SQLException { + bigQueryConnection = DriverManager.getConnection(connection_uri, new Properties()); + bigQueryStatement = bigQueryConnection.createStatement(); + + Properties noReadApi = new Properties(); + noReadApi.setProperty("EnableHighThroughputAPI", "0"); + bigQueryConnectionNoReadApi = DriverManager.getConnection(connection_uri, noReadApi); + bigQueryStatementNoReadApi = bigQueryConnectionNoReadApi.createStatement(); + bigQuery = BigQueryOptions.newBuilder().build().getService(); + } + + @AfterClass + public static void afterClass() throws SQLException { + bigQueryStatement.close(); + bigQueryConnection.close(); + bigQueryStatementNoReadApi.close(); + bigQueryConnectionNoReadApi.close(); + } + + @Test + public void testValidServiceAccountAuthentication() throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + File tempFile = File.createTempFile("auth", ".json"); + tempFile.deleteOnExit(); + Files.write(tempFile.toPath(), authJson.toString().getBytes()); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + authJson.get("project_id").getAsString() + + ";OAuthType=0;" + + "OAuthPvtKeyPath=" + + tempFile.toPath() + + ";"; + + validateConnection(connection_uri); + } + + @Test + public void testServiceAccountAuthenticationMissingOAuthPvtKeyPath() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;"; + + try { + DriverManager.getConnection(connection_uri); + Assert.fail(); + } catch (BigQueryJdbcRuntimeException ex) { + assertTrue(ex.getMessage().contains("No valid credentials provided.")); + } + } + + @Test + public void testValidServiceAccountAuthenticationOAuthPvtKeyAsPath() + throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + File tempFile = File.createTempFile("auth", ".json"); + tempFile.deleteOnExit(); + Files.write(tempFile.toPath(), authJson.toString().getBytes()); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + authJson.get("project_id").getAsString() + + ";OAuthType=0;" + + "OAuthServiceAcctEmail=;" + + ";OAuthPvtKey=" + + tempFile.toPath() + + ";"; + validateConnection(connection_uri); + } + + @Test + public void testValidServiceAccountAuthenticationViaEmailAndPkcs8Key() + throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + authJson.get("project_id").getAsString() + + ";OAuthType=0;" + + "OAuthServiceAcctEmail=" + + authJson.get("client_email").getAsString() + + ";OAuthPvtKey=" + + authJson.get("private_key").getAsString() + + ";"; + validateConnection(connection_uri); + } + + @Test + public void testValidServiceAccountAuthenticationOAuthPvtKeyAsJson() + throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + authJson.get("project_id").getAsString() + + ";OAuthType=0;" + + "OAuthServiceAcctEmail=;" + + ";OAuthPvtKey=" + + authJson.toString() + + ";"; + validateConnection(connection_uri); + } + + // TODO(kirl): Enable this test when pipeline has p12 secret available. + @Test + @Ignore + public void testValidServiceAccountAuthenticationP12() throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + final String p12_file = requireEnvVar("SA_SECRET_P12"); + + final String connectionUri = + getBaseUri(0, authJson.get("project_id").getAsString()) + .append("OAuthServiceAcctEmail", authJson.get("client_email").getAsString()) + .append("OAuthPvtKeyPath", p12_file) + .toString(); + validateConnection(connectionUri); + } + + @Test + @Ignore + public void testValidGoogleUserAccountAuthentication() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAuthType=1;OAuthClientId=client_id;OAuthClientSecret=client_secret;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_USER_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + @Test + @Ignore + public void testValidExternalAccountAuthentication() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=4;" + + "BYOID_AudienceUri=//iam.googleapis.com/projects//locations//workloadIdentityPools//providers/;" + + "BYOID_SubjectTokenType=;BYOID_CredentialSource={\"file\":\"/path/to/file\"};" + + "BYOID_SA_Impersonation_Uri=;BYOID_TokenUri=;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "EXTERNAL_ACCOUNT_AUTH", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + @Test + @Ignore + public void testValidExternalAccountAuthenticationFromFile() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=4;" + + "OAuthPvtKeyPath=/path/to/file;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "EXTERNAL_ACCOUNT_AUTH", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + @Test + @Ignore + public void testValidExternalAccountAuthenticationRawJson() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=4;OAuthPvtKey={\n" + + " \"universe_domain\": \"googleapis.com\",\n" + + " \"type\": \"external_account\",\n" + + " \"audience\":" + + " \"//iam.googleapis.com/projects//locations//workloadIdentityPools//providers/\",\n" + + " \"subject_token_type\": \"\",\n" + + " \"token_url\": \"\",\n" + + " \"credential_source\": {\n" + + " \"file\": \"/path/to/file\"\n" + + " },\n" + + " \"service_account_impersonation_url\": \"\"\n" + + "};"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "EXTERNAL_ACCOUNT_AUTH", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + // TODO(farhan): figure out how to programmatically generate an access token and test + @Test + @Ignore + public void testValidPreGeneratedAccessTokenAuthentication() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=2;OAuthAccessToken=access_token;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "PRE_GENERATED_TOKEN", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + // TODO(obada): figure out how to programmatically generate a refresh token and test + @Test + @Ignore + public void testValidRefreshTokenAuthentication() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=2;OAuthRefreshToken=refresh_token;" + + ";OAuthClientId=client;OAuthClientSecret=secret;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "PRE_GENERATED_TOKEN", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + @Test + public void testValidApplicationDefaultCredentialsAuthentication() throws SQLException { + String connection_uri = getBaseUri(3, PROJECT_ID).toString(); + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "APPLICATION_DEFAULT_CREDENTIALS", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + connection.close(); + } + + // This test is useing the same client email as a main authorization & impersonation. + // It requires account to have 'tokenCreator' permission, see + // https://cloud.google.com/docs/authentication/use-service-account-impersonation#required-roles + @Test + public void testServiceAccountAuthenticationWithImpersonation() throws IOException, SQLException { + final JsonObject authJson = getAuthJson(); + + String connection_uri = + getBaseUri(0, authJson.get("project_id").getAsString()) + .append("OAuthServiceAcctEmail", authJson.get("client_email").getAsString()) + .append("OAuthPvtKey", authJson.get("private_key").getAsString()) + .append("ServiceAccountImpersonationEmail", authJson.get("client_email").getAsString()) + .toString(); + validateConnection(connection_uri); + } + + // This test uses the same client email for the main authorization and a chain of impersonations. + // It requires the account to have 'tokenCreator' permission on itself. + @Test + public void testServiceAccountAuthenticationWithChainedImpersonation() + throws IOException, SQLException { + final JsonObject authJson = getAuthJson(); + String clientEmail = authJson.get("client_email").getAsString(); + + String connection_uri = + getBaseUri(0, authJson.get("project_id").getAsString()) + .append("OAuthServiceAcctEmail", clientEmail) + .append("OAuthPvtKey", authJson.get("private_key").getAsString()) + .append("ServiceAccountImpersonationEmail", clientEmail) + .append("ServiceAccountImpersonationChain", clientEmail + "," + clientEmail) + .toString(); + validateConnection(connection_uri); + } + + @Test + public void testFastQueryPathSmall() throws SQLException { + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 850"; + ResultSet jsonResultSet = bigQueryStatement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(850, resultSetRowCount(jsonResultSet)); + } + + @Test + public void testSmallSelectAndVerifyResults() throws SQLException { + String query = + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` WHERE" + + " repository_name LIKE 'X%' LIMIT 10"; + + ResultSet resultSet = bigQueryStatement.executeQuery(query); + int rowCount = 0; + while (resultSet.next()) { + assertTrue(resultSet.getString(1).startsWith("X")); + rowCount++; + } + assertEquals(10, rowCount); + } + + @Test + // reads without using ReadAPI and makes sure that they are in order, which implies threads worked + // correctly + public void testIterateOrderJsonMultiThread_NoReadApi() throws SQLException { + int expectedCnt = 10000; + String query = String.format(BASE_QUERY, expectedCnt); + ResultSet rs = bigQueryStatementNoReadApi.executeQuery(query); + int cnt = 0; + double oldTriDis = 0.0d; + while (rs.next()) { + double tripDis = rs.getDouble("trip_distance"); + ++cnt; + assertTrue(oldTriDis <= tripDis); + oldTriDis = tripDis; + } + assertEquals(expectedCnt, cnt); // all the records were retrieved + } + + @Test + public void testInvalidQuery() throws SQLException { + String query = "SELECT *"; + + try { + bigQueryStatement.executeQuery(query); + Assert.fail(); + } catch (BigQueryJdbcException e) { + assertTrue(e.getMessage().contains("SELECT * must have a FROM clause")); + } + } + + @Test + public void testDriver() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + assertTrue(driver.acceptsURL(connection_uri)); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertNotNull(connection); + Statement st = connection.createStatement(); + boolean rs = + st.execute("Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"); + assertTrue(rs); + connection.close(); + } + + @Test + public void testDefaultDataset() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;DEFAULTDATASET=testDataset"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + assertTrue(driver.acceptsURL(connection_uri)); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertNotNull(connection); + assertEquals( + DatasetId.of("testDataset"), ((BigQueryConnection) connection).getDefaultDataset()); + + String connection_uri_null_default_dataset = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + + assertTrue(driver.acceptsURL(connection_uri_null_default_dataset)); + + Connection connection2 = driver.connect(connection_uri_null_default_dataset, new Properties()); + assertNotNull(connection2); + assertNull(((BigQueryConnection) connection2).getDefaultDataset()); + connection.close(); + connection2.close(); + } + + @Test + public void testDefaultDatasetWithProject() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;DEFAULTDATASET=" + + PROJECT_ID + + ".testDataset"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + assertTrue(driver.acceptsURL(connection_uri)); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertNotNull(connection); + assertEquals( + DatasetId.of(PROJECT_ID, "testDataset"), + ((BigQueryConnection) connection).getDefaultDataset()); + connection.close(); + } + + @Test + public void testLocation() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;LOCATION=EU"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + assertTrue(driver.acceptsURL(connection_uri)); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertEquals(((BigQueryConnection) connection).getLocation(), "EU"); + + Statement statement = connection.createStatement(); + + // Query a dataset in the EU + String query = + "SELECT name FROM `bigquery-public-data.covid19_italy_eu.data_by_province` LIMIT 100"; + ResultSet resultSet = statement.executeQuery(query); + assertEquals(100, resultSetRowCount(resultSet)); + + String connection_uri_null_location = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + + assertTrue(driver.acceptsURL(connection_uri_null_location)); + + Connection connection2 = driver.connect(connection_uri_null_location, new Properties()); + assertNotNull(connection2); + assertNull(((BigQueryConnection) connection2).getLocation()); + connection.close(); + connection2.close(); + } + + @Test + public void testIncorrectLocation() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;LOCATION=europe-west3"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertEquals(((BigQueryConnection) connection).getLocation(), "europe-west3"); + + // Query a dataset in the US + Statement statement = connection.createStatement(); + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"; + BigQueryJdbcException ex = + assertThrows(BigQueryJdbcException.class, () -> statement.executeQuery(query)); + BigQueryError error = ex.getBigQueryException().getError(); + assertNotNull(error); + assertEquals("accessDenied", error.getReason()); + connection.close(); + } + + @Test + public void testCreateStatementWithResultSetHoldabilityUnsupportedTypeForwardOnly() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, 1, 1)); + } + + @Test + public void testCreateStatementWithResultSetHoldabilityUnsupportedConcurReadOnly() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(1, ResultSet.CONCUR_READ_ONLY, 1)); + } + + @Test + public void testCreateStatementWithResultSetHoldabilityUnsupportedCloseCursorsAtCommit() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(1, 1, ResultSet.CLOSE_CURSORS_AT_COMMIT)); + } + + @Test + public void testCreateStatementWithResultSetConcurrencyUnsupportedTypeForwardOnly() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, 1)); + } + + @Test + public void testCreateStatementWithResultSetConcurrencyUnsupportedConcurReadOnly() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(1, ResultSet.CONCUR_READ_ONLY)); + } + + @Test + public void testSetTransactionIsolationToNotSerializableThrowsNotSupported() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE + 1)); + } + + @Test + public void testSetHoldabilityForNonCloseCursorsThrowsNotSupported() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> connection.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT + 1)); + connection.close(); + } + + @Test + public void testCreateStatementWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, connection::createStatement); + } + + @Test + public void testCreateStatementWithResultSetHoldabilityWhenConnectionClosedThrows() + throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, () -> connection.createStatement(1, 1, 1)); + } + + @Test + public void testCreateStatementWithResultSetConcurrencyWhenConnectionClosedThrows() + throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, () -> connection.createStatement(1, 1)); + } + + @Test + public void testSetAutoCommitWithClosedConnectionThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.close(); + assertThrows(IllegalStateException.class, () -> connection.setAutoCommit(true)); + } + + @Test + public void testSetCommitToFalseWithoutSessionEnabledThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(connection_uri); + assertThrows(IllegalStateException.class, () -> connection.setAutoCommit(false)); + connection.close(); + } + + @Test + public void testCommitWithConnectionClosedThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.close(); + assertThrows(IllegalStateException.class, connection::commit); + } + + @Test + public void testCommitToFalseWithoutSessionEnabledThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(connection_uri); + assertThrows(IllegalStateException.class, connection::commit); + connection.close(); + } + + @Test + public void testCommitWithNoTransactionStartedThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + assertThrows(IllegalStateException.class, connection::commit); + connection.close(); + } + + @Test + public void testRollbackWithConnectionClosedThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.close(); + assertThrows(IllegalStateException.class, connection::rollback); + } + + @Test + public void testRollbackToFalseWithoutSessionEnabledThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(connection_uri); + assertThrows(IllegalStateException.class, connection::rollback); + connection.close(); + } + + @Test + public void testRollbackWithoutTransactionStartedThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + assertThrows(IllegalStateException.class, connection::rollback); + connection.close(); + } + + @Test + public void testGetLocationWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows( + IllegalStateException.class, () -> ((BigQueryConnection) connection).getLocation()); + connection.close(); + } + + @Test + public void testGetDefaultDatasetWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows( + IllegalStateException.class, () -> ((BigQueryConnection) connection).getDefaultDataset()); + } + + @Test + public void testGetAutocommitWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, connection::getAutoCommit); + } + + @Test + public void testSetAutocommitWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, () -> connection.setAutoCommit(true)); + } + + @Test + public void testExecuteUpdate() throws SQLException { + String TABLE_NAME = "JDBC_EXECUTE_UPDATE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING, `IntegerField` INTEGER);", + DATASET, TABLE_NAME); + String insertQuery = + String.format( + "INSERT INTO %s.%s (StringField, IntegerField) " + + "VALUES ('string1',111 ), ('string2',111 ), ('string3',222 ), ('string4',333 );", + DATASET, TABLE_NAME); + String updateQuery = + String.format( + "UPDATE %s.%s SET StringField='Jane Doe' WHERE IntegerField=111", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + int createStatus = bigQueryStatement.executeUpdate(createQuery); + assertEquals(0, createStatus); + + int insertStatus = bigQueryStatement.executeUpdate(insertQuery); + assertEquals(4, insertStatus); + + bigQueryStatement.executeQuery(selectQuery); + int selectStatus = bigQueryStatement.getUpdateCount(); + assertEquals(-1, selectStatus); + + int updateStatus = bigQueryStatement.executeUpdate(updateQuery); + assertEquals(2, updateStatus); + + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + @Test + public void testExecuteQueryWithInsert() throws SQLException { + String TABLE_NAME = "JDBC_EXECUTE_UPDATE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING, `IntegerField` INTEGER);", + DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + + assertEquals(0, bigQueryStatement.executeUpdate(createQuery)); + assertThrows(BigQueryJdbcException.class, () -> bigQueryStatement.executeQuery(dropQuery)); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + @Test + public void testExecuteQueryWithMultipleReturns() throws SQLException { + String query = + String.format("SELECT * FROM bigquery-public-data.samples.github_timeline LIMIT 1;"); + + assertThrows(BigQueryJdbcException.class, () -> bigQueryStatement.executeQuery(query + query)); + } + + @Test + public void testExecuteUpdateWithSelect() throws SQLException { + String selectQuery = + String.format("SELECT * FROM bigquery-public-data.samples.github_timeline LIMIT 1;"); + + assertThrows(BigQueryJdbcException.class, () -> bigQueryStatement.executeUpdate(selectQuery)); + } + + @Test + public void testExecuteMethod() throws SQLException { + + String TABLE_NAME = "JDBC_EXECUTE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING, `IntegerField` INTEGER);", + DATASET, TABLE_NAME); + String insertQuery = + String.format( + "INSERT INTO %s.%s (StringField, IntegerField) " + + "VALUES ('string1',111 ), ('string2',111 ), ('string3',222 ), ('string4',333 );", + DATASET, TABLE_NAME); + String updateQuery = + String.format( + "UPDATE %s.%s SET StringField='Jane Doe' WHERE IntegerField=111", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + boolean createStatus = bigQueryStatement.execute(createQuery); + assertFalse(createStatus); + + boolean insertStatus = bigQueryStatement.execute(insertQuery); + assertFalse(insertStatus); + + boolean selectStatus = bigQueryStatement.execute(selectQuery); + assertTrue(selectStatus); + int selectCount = bigQueryStatement.getUpdateCount(); + assertEquals(-1, selectCount); + ResultSet resultSet = bigQueryStatement.getResultSet(); + assertNotNull(resultSet); + + boolean updateStatus = bigQueryStatement.execute(updateQuery); + assertFalse(updateStatus); + + boolean dropStatus = bigQueryStatement.execute(dropQuery); + assertFalse(dropStatus); + } + + @Test + public void testPreparedExecuteMethod() throws SQLException { + + String TABLE_NAME = "JDBC_PREPARED_EXECUTE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING, `IntegerField` INTEGER);", + DATASET, TABLE_NAME); + String insertQuery = + String.format( + "INSERT INTO %s.%s (StringField, IntegerField) VALUES (?,?), (?,?), (?,?), (?,?);", + DATASET, TABLE_NAME); + String updateQuery = + String.format("UPDATE %s.%s SET StringField=? WHERE IntegerField=?", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT ? FROM %s.%s", DATASET, TABLE_NAME); + + boolean createStatus = bigQueryStatement.execute(createQuery); + assertFalse(createStatus); + + PreparedStatement insertStmt = bigQueryConnection.prepareStatement(insertQuery); + insertStmt.setString(1, "String1"); + insertStmt.setInt(2, 111); + insertStmt.setString(3, "String2"); + insertStmt.setInt(4, 222); + insertStmt.setString(5, "String3"); + insertStmt.setInt(6, 333); + insertStmt.setString(7, "String4"); + insertStmt.setInt(8, 444); + + boolean insertStatus = insertStmt.execute(); + assertFalse(insertStatus); + + PreparedStatement selectStmt = bigQueryConnection.prepareStatement(selectQuery); + selectStmt.setString(1, "StringField"); + boolean selectStatus = selectStmt.execute(); + assertTrue(selectStatus); + + int selectCount = selectStmt.getUpdateCount(); + assertEquals(-1, selectCount); + ResultSet resultSet = selectStmt.getResultSet(); + assertNotNull(resultSet); + + PreparedStatement updateStmt = bigQueryConnection.prepareStatement(updateQuery); + updateStmt.setString(1, "Jane Doe"); + updateStmt.setInt(2, 222); + boolean updateStatus = updateStmt.execute(); + assertFalse(updateStatus); + + boolean dropStatus = bigQueryStatement.execute(dropQuery); + assertFalse(dropStatus); + } + + @Test + public void testPreparedStatementThrowsSyntaxError() throws SQLException { + String TABLE_NAME = "JDBC_PREPARED_SYNTAX_ERR_TABLE_" + randomNumber; + String createQuery = + String.format("CREATE OR REPLACE TABLE %s.%s (? STRING, ? INTEGER);", DATASET, TABLE_NAME); + + PreparedStatement preparedStatement = bigQueryConnection.prepareStatement(createQuery); + preparedStatement.setString(1, "StringField"); + preparedStatement.setString(2, "IntegerField"); + assertThrows(BigQueryJdbcSqlSyntaxErrorException.class, preparedStatement::execute); + + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + @Test + public void testPreparedStatementThrowsJdbcException() throws SQLException { + String TABLE_NAME = "JDBC_PREPARED_MISSING_PARAM_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (StringField STRING, IntegerField INTEGER);", + DATASET, TABLE_NAME); + boolean createStatus = bigQueryStatement.execute(createQuery); + assertFalse(createStatus); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (StringField, IntegerField) " + "VALUES (?,?), (?,?);", + DATASET, TABLE_NAME); + PreparedStatement insertStmt = bigQueryConnection.prepareStatement(insertQuery); + insertStmt.setString(1, "String1"); + insertStmt.setInt(2, 111); + assertThrows(BigQueryJdbcException.class, insertStmt::execute); + + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + @Test + public void testSetFetchDirectionFetchReverseThrowsUnsupported() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryStatement.setFetchDirection(ResultSet.FETCH_REVERSE)); + } + + @Test + public void testSetFetchDirectionFetchUnknownThrowsUnsupported() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryStatement.setFetchDirection(ResultSet.FETCH_UNKNOWN)); + } + + @Test + public void testExecuteBatchQueryTypeSelectThrowsUnsupported() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + String query = + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` WHERE" + + " repository_name LIKE 'X%' LIMIT 10"; + Statement statement = connection.createStatement(); + + assertThrows(IllegalArgumentException.class, () -> statement.addBatch(query)); + connection.close(); + } + + @Test + public void testValidExecuteBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + int[] results; + for (int i = 0; i < 3; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + // assertions + assertEquals(3, results.length); + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testAddBatchWithoutSemicolon() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_MISSING_SEMICOLON_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', 4)"; + statement.addBatch(insertQuery); + statement.addBatch(insertQuery); + int[] results = statement.executeBatch(); + + // assertions + assertEquals(2, results.length); + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testEmptySqlToAddBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EMPTY_EXECUTE_BATCH_TABLE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + String emptySql = ""; + statement.addBatch(emptySql); + int[] results = statement.executeBatch(); + + // assertions + assertEquals(0, results.length); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testEmptyExecuteBatch() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + int[] result = statement.executeBatch(); + + assertEquals(0, result.length); + connection.close(); + } + + @Test + public void testNonValidStatementTypeForAddBatchThrows() { + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_NON_VALID_TYPE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + assertThrows( + IllegalArgumentException.class, () -> bigQueryStatement.addBatch(createBatchTable)); + } + + @Test + public void testAllValidStatementTypesForAddBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_ALL_VALID_TYPES_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + "); "; + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 13 WHERE age = %s;", DATASET, BATCH_TABLE, randomNumber); + String deleteQuery = + String.format("DELETE FROM %s.%s WHERE name='Farhan';", DATASET, BATCH_TABLE); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + statement.addBatch(insertQuery); + statement.addBatch(updateQuery); + statement.addBatch(deleteQuery); + int[] results = statement.executeBatch(); + + // assertion + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testUnsupportedHTAPIFallbacksToStandardQueriesWithRange() throws SQLException { + String selectQuery = "select * from `DATATYPERANGETEST.RangeIntervalTestTable` LIMIT 5000;"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";MaxResults=500;HighThroughputActivationRatio=1;" + + "HighThroughputMinTableSize=100;" + + "EnableHighThroughputAPI=1;UnsupportedHTAPIFallback=1;JobCreationMode=1;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet); + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(3, resultSetMetaData.getColumnCount()); + connection.close(); + } + + @Test + public void testIntervalDataTypeWithArrowResultSet() throws SQLException { + String selectQuery = + "select * from `DATATYPERANGETEST.RangeIntervalTestTable` order by intColumn limit 5000;"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";MaxResults=500;HighThroughputActivationRatio=1;" + + "HighThroughputMinTableSize=100;" + + "EnableHighThroughputAPI=1;JobCreationMode=1;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + + ResultSet resultSet = statement.executeQuery(selectQuery); + assertTrue(resultSet.getClass().getName().contains("BigQueryArrowResultSet")); + resultSet.next(); + assertEquals("0-0 10 -12:30:0.0", resultSet.getString("intervalField")); + + // cleanup + connection.close(); + } + + @Test + public void testIntervalDataTypeWithJsonResultSet() throws SQLException { + String selectQuery = + "select * from `DATATYPERANGETEST.RangeIntervalTestTable` order by intColumn limit 10 ;"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";MaxResults=500;HighThroughputActivationRatio=1;" + + "HighThroughputMinTableSize=100;" + + "EnableHighThroughputAPI=0;JobCreationMode=1;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + + ResultSet resultSet = statement.executeQuery(selectQuery); + assertTrue(resultSet.getClass().getName().contains("BigQueryJsonResultSet")); + resultSet.next(); + assertEquals("0-0 10 -12:30:0", resultSet.getString("intervalField")); + + // cleanup + connection.close(); + } + + @Test + public void testValidLEPEndpointQuery() throws SQLException { + String DATASET = "JDBC_REGIONAL_DATASET"; + String TABLE_NAME = "REGIONAL_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://us-east4-bigquery.googleapis.com;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet.getMetaData()); + connection.close(); + } + + @Test + public void testValidEndpointWithInvalidBQPortThrows() throws SQLException { + String TABLE_NAME = "JDBC_REGIONAL_TABLE_" + randomNumber; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://us-east4-bigquery.googleapis.com:12312312;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.executeQuery(selectQuery)); + connection.close(); + } + + @Test + public void testLEPEndpointDataNotFoundThrows() throws SQLException { + String DATASET = "JDBC_REGIONAL_DATASET"; + String TABLE_NAME = "REGIONAL_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://us-east5-bigquery.googleapis.com;"; + + // Attempting read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.executeQuery(selectQuery)); + connection.close(); + } + + @Test + public void testValidREPEndpointQuery() throws SQLException { + String DATASET = "JDBC_REGIONAL_DATASET"; + String TABLE_NAME = "REGIONAL_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://bigquery.us-east4.rep.googleapis.com;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet.getMetaData()); + connection.close(); + } + + @Test + public void testREPEndpointDataNotFoundThrows() throws SQLException { + String DATASET = "JDBC_REGIONAL_DATASET"; + String TABLE_NAME = "REGIONAL_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://bigquery.us-east7.rep.googleapis.com;"; + + // Attempting read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.executeQuery(selectQuery)); + connection.close(); + } + + @Test + public void testCloseStatement() throws SQLException { + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 10"; + Statement statement = bigQueryConnection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertEquals(10, resultSetRowCount(jsonResultSet)); + statement.close(); + assertTrue(statement.isClosed()); + } + + @Test + public void testCloseableStatementSingleResult() throws SQLException { + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 10"; + Statement statement = bigQueryConnection.createStatement(); + statement.closeOnCompletion(); + assertTrue(statement.isCloseOnCompletion()); + ResultSet jsonResultSet = statement.executeQuery(query); + assertFalse(statement.isClosed()); + jsonResultSet.close(); + assertTrue(statement.isClosed()); + } + + @Test + public void testCloseableStatementMultiResult() throws SQLException { + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 10;"; + Statement statement = bigQueryConnection.createStatement(); + statement.closeOnCompletion(); + assertTrue(statement.isCloseOnCompletion()); + statement.execute(query + query); + assertNotNull(statement.getResultSet()); + assertFalse(statement.isClosed()); + + assertTrue(statement.getMoreResults()); + assertNotNull(statement.getResultSet()); + assertFalse(statement.isClosed()); + + assertFalse(statement.getMoreResults()); + assertTrue(statement.isClosed()); + } + + @Test + public void testCloseableStatementMultiResultExplicitClose() throws SQLException { + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 10;"; + Statement statement = bigQueryConnection.createStatement(); + statement.closeOnCompletion(); + assertTrue(statement.isCloseOnCompletion()); + statement.execute(query + query); + ResultSet result = statement.getResultSet(); + result.close(); + assertFalse(statement.isClosed()); + + assertTrue(statement.getMoreResults()); + result = statement.getResultSet(); + result.close(); + assertTrue(statement.isClosed()); + } + + @Test + public void testConnectionIsValid() throws SQLException { + assertTrue(bigQueryConnection.isValid(10)); + assertTrue(bigQueryConnectionNoReadApi.isValid(10)); + } + + @Test + public void testDataSource() throws SQLException { + DataSource ds = new DataSource(); + ds.setURL("jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;"); + ds.setOAuthType(3); + + try (Connection connection = ds.getConnection()) { + assertFalse(connection.isClosed()); + } + } + + @Test + public void testDataSourceOAuthPvtKeyPath() throws SQLException, IOException { + File tempFile = File.createTempFile("auth", ".json"); + tempFile.deleteOnExit(); + DataSource ds = new DataSource(); + ds.setURL("jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;"); + ds.setOAuthType(0); + ds.setOAuthPvtKeyPath(tempFile.toPath().toString()); + assertEquals(0, ds.getOAuthType()); + assertEquals(tempFile.toPath().toString(), ds.getOAuthPvtKeyPath()); + } + + @Test + public void testPreparedStatementSmallSelect() throws SQLException { + String query = + "SELECT * FROM `bigquery-public-data.samples.github_timeline` where repository_language=?" + + " LIMIT 1000"; + PreparedStatement preparedStatement = bigQueryConnection.prepareStatement(query); + preparedStatement.setString(1, "Java"); + + ResultSet jsonResultSet = preparedStatement.executeQuery(); + + int rowCount = resultSetRowCount(jsonResultSet); + assertEquals(1000, rowCount); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + } + + @Test + public void testPreparedStatementExecuteUpdate() throws SQLException { + Random random = new Random(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME1 = "Inventory" + random.nextInt(9999); + String TABLE_NAME2 = "DetailedInventory" + random.nextInt(9999); + + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER);", + DATASET, TABLE_NAME1); + + String createQuery2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER," + + " `supply_constrained` BOOLEAN, `comment` STRING);", + DATASET, TABLE_NAME2); + + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (product, quantity, supply_constrained, comment) " + + "VALUES ('countertop microwave', 20, NULL,'[]' )," + + " ('front load washer', 20, false,'[]' ), " + + " ('microwave', 20, false,'[]' ), " + + " ('refrigerator', 10, false,'[]' );", + DATASET, TABLE_NAME2); + + bigQueryStatement.execute(createQuery); + bigQueryStatement.execute(createQuery2); + bigQueryStatement.execute(insertQuery2); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (product, quantity) " + "VALUES (?,? ), (?,? );", + DATASET, TABLE_NAME1); + PreparedStatement insertPs = bigQueryConnection.prepareStatement(insertQuery); + insertPs.setString(1, "dishwasher"); + insertPs.setInt(2, 30); + insertPs.setString(3, "dryer"); + insertPs.setInt(4, 30); + + int insertStatus = insertPs.executeUpdate(); + assertEquals(2, insertStatus); + + String updateQuery = + String.format("UPDATE %s.%s SET quantity=? WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement updatePs = bigQueryConnection.prepareStatement(updateQuery); + updatePs.setString(2, "dryer"); + updatePs.setInt(1, 35); + + int updateStatus = updatePs.executeUpdate(); + assertEquals(1, updateStatus); + + String deleteQuery = String.format("DELETE FROM %s.%s WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement deletePs = bigQueryConnection.prepareStatement(deleteQuery); + deletePs.setString(1, "dishwasher"); + + int deleteStatus = deletePs.executeUpdate(); + assertEquals(1, deleteStatus); + + String mergeQuery = + String.format( + "MERGE %s.%s T\n" + + "USING %s.%s S\n" + + "ON T.product = S.product\n" + + "WHEN NOT MATCHED AND quantity < ? THEN\n" + + " INSERT(product, quantity, supply_constrained, comment)\n" + + " VALUES(product, quantity, true, ?)\n" + + "WHEN NOT MATCHED THEN\n" + + " INSERT(product, quantity, supply_constrained)\n" + + " VALUES(product, quantity, false)", + DATASET, TABLE_NAME2, DATASET, TABLE_NAME1); + PreparedStatement mergePs = bigQueryConnection.prepareStatement(mergeQuery); + mergePs.setInt(1, 20); + mergePs.setString(2, "comment" + random.nextInt(999)); + + int mergeStatus = mergePs.executeUpdate(); + assertEquals(1, mergeStatus); + + ResultSet rs = + bigQueryStatement.executeQuery( + String.format("SELECT COUNT(*) AS row_count\n" + "FROM %s.%s", DATASET, TABLE_NAME2)); + rs.next(); + assertEquals(5, rs.getInt(1)); + + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME1); + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME2)); + } + + @Test + public void testPreparedStatementDateTimeValues() throws SQLException { + Random random = new Random(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME1 = "DateTimeTestTable" + random.nextInt(9999); + + final String createTableQuery = + "CREATE OR REPLACE TABLE " + + " `%s.%s` " + + " (\n" + + "`StringField` STRING,\n" + + "`IntegerField` INTEGER,\n" + + "`TimestampField` TIMESTAMP,\n" + + "`TimeField` TIME,\n" + + "`DateField` DATE\n" + + ");"; + + String insertQuery = + String.format("INSERT INTO %s.%s VALUES (?,?,?,?,? );", DATASET, TABLE_NAME1); + + bigQueryStatement.execute(String.format(createTableQuery, DATASET, TABLE_NAME1)); + + PreparedStatement insertPs = bigQueryConnection.prepareStatement(insertQuery); + insertPs.setString(1, "dishwasher"); + insertPs.setInt(2, 1); + insertPs.setTimestamp(3, Timestamp.from(Instant.now())); + insertPs.setTime(4, Time.valueOf(LocalTime.NOON)); + insertPs.setDate(5, Date.valueOf("2025-12-3")); + + int insertStatus = insertPs.executeUpdate(); + assertEquals(1, insertStatus); + + ResultSet rs = + bigQueryStatement.executeQuery( + String.format("SELECT COUNT(*) AS row_count\n" + "FROM %s.%s", DATASET, TABLE_NAME1)); + rs.next(); + assertEquals(1, rs.getInt(1)); + + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME1); + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + } + + @Test + public void testValidDestinationTableSavesQueriesWithLegacySQL() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "AllowLargeResults=1;" + + "LargeResultTable=destination_table_test_legacy;" + + "LargeResultDataset=INTEGRATION_TESTS;"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 200;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String selectQuery = "SELECT * FROM `INTEGRATION_TESTS.destination_table_test_legacy`;"; + ResultSet actualResultSet = bigQueryStatement.executeQuery(selectQuery); + assertTrue(0 < resultSetRowCount(actualResultSet)); + + // clean up + String deleteRows = "DELETE FROM `INTEGRATION_TESTS.destination_table_test_legacy` WHERE 1=1;"; + bigQueryStatement.execute(deleteRows); + connection.close(); + } + + @Test + public void testValidDestinationTableSavesQueriesWithStandardSQL() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=SQL;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;"; + String selectLegacyQuery = + "SELECT * FROM `bigquery-public-data.deepmind_alphafold.metadata` LIMIT 200;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String selectQuery = "SELECT * FROM INTEGRATION_TESTS.destination_table_test;"; + ResultSet actualResultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(200, resultSetRowCount(actualResultSet)); + + // clean up + String deleteRows = "DELETE FROM `INTEGRATION_TESTS.destination_table_test` WHERE 1=1;"; + bigQueryStatement.execute(deleteRows); + connection.close(); + } + + @Test + public void testDestinationTableAndDestinationDatasetThatDoesNotExistsCreates() + throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "AllowLargeResults=1;" + + "LargeResultTable=FakeTable;" + + "LargeResultDataset=FakeDataset;"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 200;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String separateQuery = "SELECT * FROM FakeDataset.FakeTable;"; + boolean result = bigQueryStatement.execute(separateQuery); + assertTrue(result); + + // clean up + bigQueryStatement.execute("DROP SCHEMA FakeDataset CASCADE;"); + connection.close(); + } + + @Test + public void testDestinationTableWithMissingDestinationDatasetDefaults() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "AllowLargeResults=1;" + + "LargeResultTable=FakeTable;"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 200;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String separateQuery = "SELECT * FROM _google_jdbc.FakeTable;"; + boolean result = bigQueryStatement.execute(separateQuery); + assertTrue(result); + connection.close(); + } + + @Test + public void testNonSelectForLegacyDestinationTableThrows() throws SQLException { + // setup + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "AllowLargeResults=1;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + assertThrows(BigQueryJdbcException.class, () -> statement.execute(createTransactionTable)); + connection.close(); + } + + @Test + public void testNonSelectForStandardDestinationTableDoesNotThrow() throws SQLException { + // setup + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=SQL;" + + "AllowLargeResults=1;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + statement.execute(createTransactionTable); + connection.close(); + } + + @Test + public void testTableConstraints() throws SQLException { + ResultSet primaryKey1 = + bigQueryConnection + .getMetaData() + .getPrimaryKeys(PROJECT_ID, CONSTRAINTS_DATASET, CONSTRAINTS_TABLE_NAME); + primaryKey1.next(); + assertEquals("id", primaryKey1.getString(4)); + assertFalse(primaryKey1.next()); + + ResultSet primaryKey2 = + bigQueryConnection + .getMetaData() + .getPrimaryKeys(PROJECT_ID, CONSTRAINTS_DATASET, CONSTRAINTS_TABLE_NAME2); + primaryKey2.next(); + assertEquals("first_name", primaryKey2.getString(4)); + primaryKey2.next(); + assertEquals("last_name", primaryKey2.getString(4)); + assertFalse(primaryKey2.next()); + + ResultSet foreignKeys = + bigQueryConnection + .getMetaData() + .getImportedKeys(PROJECT_ID, CONSTRAINTS_DATASET, CONSTRAINTS_TABLE_NAME); + foreignKeys.next(); + assertEquals(CONSTRAINTS_TABLE_NAME2, foreignKeys.getString(3)); + assertEquals("first_name", foreignKeys.getString(4)); + assertEquals("name", foreignKeys.getString(8)); + foreignKeys.next(); + assertEquals(CONSTRAINTS_TABLE_NAME2, foreignKeys.getString(3)); + assertEquals("last_name", foreignKeys.getString(4)); + assertEquals("second_name", foreignKeys.getString(8)); + foreignKeys.next(); + assertEquals(CONSTRAINTS_TABLE_NAME3, foreignKeys.getString(3)); + assertEquals("address", foreignKeys.getString(4)); + assertEquals("address", foreignKeys.getString(8)); + assertFalse(foreignKeys.next()); + + ResultSet crossReference = + bigQueryConnection + .getMetaData() + .getCrossReference( + PROJECT_ID, + CONSTRAINTS_DATASET, + CONSTRAINTS_TABLE_NAME2, + PROJECT_ID, + CONSTRAINTS_DATASET, + CONSTRAINTS_TABLE_NAME); + crossReference.next(); + assertEquals(CONSTRAINTS_TABLE_NAME2, crossReference.getString(3)); + assertEquals("first_name", crossReference.getString(4)); + assertEquals("name", crossReference.getString(8)); + crossReference.next(); + assertEquals("last_name", crossReference.getString(4)); + assertEquals("second_name", crossReference.getString(8)); + assertFalse(crossReference.next()); + } + + @Test + public void testDatabaseMetadataGetCatalogs() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + try (ResultSet rs = databaseMetaData.getCatalogs()) { + assertNotNull("ResultSet from getCatalogs() should not be null", rs); + + ResultSetMetaData rsmd = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", rsmd); + assertEquals("Should have one column", 1, rsmd.getColumnCount()); + assertEquals("Column name should be TABLE_CAT", "TABLE_CAT", rsmd.getColumnName(1)); + + assertTrue("ResultSet should have one row", rs.next()); + assertEquals("Catalog name should match Project ID", PROJECT_ID, rs.getString("TABLE_CAT")); + assertFalse("ResultSet should have no more rows", rs.next()); + } + } + + @Test + public void testDatabaseMetadataGetProcedures() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String procedureName = "create_customer"; + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + ResultSet resultSet = databaseMetaData.getProcedures(PROJECT_ID, DATASET, procedureName); + while (resultSet.next()) { + assertEquals(PROJECT_ID, resultSet.getString("PROCEDURE_CAT")); + assertEquals(DATASET, resultSet.getString("PROCEDURE_SCHEM")); + assertEquals(procedureName, resultSet.getString("PROCEDURE_NAME")); + assertEquals(procedureName, resultSet.getString("SPECIFIC_NAME")); + assertEquals(DatabaseMetaData.procedureResultUnknown, resultSet.getInt("PROCEDURE_TYPE")); + } + } + + @Test + public void testDatabaseMetadataGetProcedureColumns() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + + // --- Test Case 1: Specific schema and procedure, null column name pattern --- + String specificSchema = "JDBC_INTEGRATION_DATASET"; + String specificProcedure = "create_customer"; + ResultSet resultSet = + databaseMetaData.getProcedureColumns(PROJECT_ID, specificSchema, specificProcedure, null); + int specificProcRows = 0; + boolean foundNameParam = false; + boolean foundIdParam = false; + while (resultSet.next()) { + specificProcRows++; + assertEquals(PROJECT_ID, resultSet.getString("PROCEDURE_CAT")); + assertEquals(specificSchema, resultSet.getString("PROCEDURE_SCHEM")); + assertEquals(specificProcedure, resultSet.getString("PROCEDURE_NAME")); + assertEquals(specificProcedure, resultSet.getString("SPECIFIC_NAME")); + if ("name".equals(resultSet.getString("COLUMN_NAME"))) { + foundNameParam = true; + assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); + } + if ("id".equals(resultSet.getString("COLUMN_NAME"))) { + foundIdParam = true; + assertEquals(2, resultSet.getInt("ORDINAL_POSITION")); + } + } + assertEquals("Should find 2 parameters for " + specificProcedure, 2, specificProcRows); + assertTrue("Parameter 'name' should be found", foundNameParam); + assertTrue("Parameter 'id' should be found", foundIdParam); + resultSet.close(); + + // --- Test Case 2: Specific schema, procedure, and column name pattern --- + String specificColumn = "name"; + resultSet = + databaseMetaData.getProcedureColumns( + PROJECT_ID, specificSchema, specificProcedure, specificColumn); + assertTrue("Should find the specific column 'name'", resultSet.next()); + assertEquals(PROJECT_ID, resultSet.getString("PROCEDURE_CAT")); + assertEquals(specificSchema, resultSet.getString("PROCEDURE_SCHEM")); + assertEquals(specificProcedure, resultSet.getString("PROCEDURE_NAME")); + assertEquals(specificColumn, resultSet.getString("COLUMN_NAME")); + assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); + assertEquals( + (short) DatabaseMetaData.procedureColumnUnknown, resultSet.getShort("COLUMN_TYPE")); + assertEquals(java.sql.Types.NVARCHAR, resultSet.getInt("DATA_TYPE")); + assertEquals("NVARCHAR", resultSet.getString("TYPE_NAME")); + assertFalse("Should only find one row for exact column match", resultSet.next()); + resultSet.close(); + + // --- Test Case 3: Non-existent procedure --- + resultSet = + databaseMetaData.getProcedureColumns( + PROJECT_ID, specificSchema, "non_existent_procedure_xyz", null); + assertFalse("Should not find columns for a non-existent procedure", resultSet.next()); + resultSet.close(); + } + + @Test + public void testDatabaseMetadataGetColumns() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_DATATYPES_INTEGRATION_TEST_TABLE"; + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + + // --- Test Case 1: Specific Column (StringField) --- + ResultSet resultSet = + databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "StringField"); + + assertTrue(resultSet.next()); + assertEquals(PROJECT_ID, resultSet.getString("TABLE_CAT")); + assertEquals(DATASET, resultSet.getString("TABLE_SCHEM")); + assertEquals(TABLE_NAME, resultSet.getString("TABLE_NAME")); + assertEquals("StringField", resultSet.getString("COLUMN_NAME")); + assertEquals("NVARCHAR", resultSet.getString("TYPE_NAME")); + resultSet.getObject("COLUMN_SIZE"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(6, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 2: All Columns --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, null); + assertTrue(resultSet.next()); + int count = 0; + do { + count++; + assertEquals(PROJECT_ID, resultSet.getString("TABLE_CAT")); + assertEquals(DATASET, resultSet.getString("TABLE_SCHEM")); + assertEquals(TABLE_NAME, resultSet.getString("TABLE_NAME")); + assertNotNull(resultSet.getString("COLUMN_NAME")); + } while (resultSet.next()); + assertEquals(16, count); + + // --- Test Case 3: Column Name Pattern Matching (%Field) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "%Time%"); + assertTrue(resultSet.next()); + count = 0; + do { + count++; + String columnName = resultSet.getString("COLUMN_NAME"); + assertTrue(columnName.contains("Time")); + } while (resultSet.next()); + assertEquals(3, count); + + // --- Test Case 4: Column Name Pattern Matching (Integer%) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "Integer%"); + assertTrue(resultSet.next()); + assertEquals("IntegerField", resultSet.getString("COLUMN_NAME")); + assertEquals("BIGINT", resultSet.getString("TYPE_NAME")); + assertEquals(19, resultSet.getInt("COLUMN_SIZE")); + assertEquals(0, resultSet.getInt("DECIMAL_DIGITS")); + assertEquals(10, resultSet.getInt("NUM_PREC_RADIX")); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(2, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 5: Specific Column (BooleanField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "BooleanField"); + assertTrue(resultSet.next()); + assertEquals("BooleanField", resultSet.getString("COLUMN_NAME")); + assertEquals("BOOLEAN", resultSet.getString("TYPE_NAME")); + assertEquals(1, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 6: Specific Column (NumericField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "NumericField"); + assertTrue(resultSet.next()); + assertEquals("NumericField", resultSet.getString("COLUMN_NAME")); + assertEquals("NUMERIC", resultSet.getString("TYPE_NAME")); + assertEquals(38, resultSet.getInt("COLUMN_SIZE")); + assertEquals(9, resultSet.getInt("DECIMAL_DIGITS")); + assertEquals(10, resultSet.getInt("NUM_PREC_RADIX")); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(4, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 7: Specific Column (BytesField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "BytesField"); + assertTrue(resultSet.next()); + assertEquals("BytesField", resultSet.getString("COLUMN_NAME")); + assertEquals("VARBINARY", resultSet.getString("TYPE_NAME")); + resultSet.getObject("COLUMN_SIZE"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(7, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 8: Specific Column (ArrayField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "ArrayField"); + assertTrue(resultSet.next()); + assertEquals("ArrayField", resultSet.getString("COLUMN_NAME")); + assertEquals("ARRAY", resultSet.getString("TYPE_NAME")); + resultSet.getObject("COLUMN_SIZE"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(9, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 9: Specific Column (TimestampField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "TimestampField"); + assertTrue(resultSet.next()); + assertEquals("TimestampField", resultSet.getString("COLUMN_NAME")); + assertEquals("TIMESTAMP", resultSet.getString("TYPE_NAME")); + assertEquals(29, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(10, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 10: Specific Column (DateField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "DateField"); + assertTrue(resultSet.next()); + assertEquals("DateField", resultSet.getString("COLUMN_NAME")); + assertEquals("DATE", resultSet.getString("TYPE_NAME")); + assertEquals(10, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(11, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 11: Specific Column (TimeField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "TimeField"); + assertTrue(resultSet.next()); + assertEquals("TimeField", resultSet.getString("COLUMN_NAME")); + assertEquals("TIME", resultSet.getString("TYPE_NAME")); + assertEquals(15, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(12, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 12: Specific Column (DateTimeField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "DateTimeField"); + assertTrue(resultSet.next()); + assertEquals("DateTimeField", resultSet.getString("COLUMN_NAME")); + assertEquals("TIMESTAMP", resultSet.getString("TYPE_NAME")); + assertEquals(29, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(13, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 13: Specific Column (GeographyField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "GeographyField"); + assertTrue(resultSet.next()); + assertEquals("GeographyField", resultSet.getString("COLUMN_NAME")); + assertEquals("VARCHAR", resultSet.getString("TYPE_NAME")); + resultSet.getObject("COLUMN_SIZE"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(14, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + } + + @Test + public void testDatabaseMetadataGetTables() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + String DATASET = "JDBC_TABLE_TYPES_TEST"; + + // --- Test Case 1: Get all tables (types = null) --- + ResultSet rsAll = databaseMetaData.getTables(PROJECT_ID, DATASET, null, null); + Set allTableNames = new HashSet<>(); + while (rsAll.next()) { + allTableNames.add(rsAll.getString("TABLE_NAME")); + } + assertTrue(allTableNames.contains("base_table")); + assertTrue(allTableNames.contains("my_view")); + assertTrue(allTableNames.contains("external_table")); + assertTrue(allTableNames.contains("my_materialized_view")); + assertTrue(allTableNames.contains("base_table_clone")); + assertTrue(allTableNames.contains("base_table_snapshot")); + assertEquals(6, allTableNames.size()); + + // --- Test Case 2: Get only "TABLE" type --- + ResultSet rsTable = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"TABLE"}); + Set tableNames = new HashSet<>(); + while (rsTable.next()) { + tableNames.add(rsTable.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains("base_table")); + assertTrue(tableNames.contains("base_table_clone")); + assertEquals(2, tableNames.size()); + + // --- Test Case 3: Get "VIEW" type --- + ResultSet rsView = databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"VIEW"}); + assertTrue(rsView.next()); + assertEquals("my_view", rsView.getString("TABLE_NAME")); + assertEquals("VIEW", rsView.getString("TABLE_TYPE")); + assertFalse(rsView.next()); + + // --- Test Case 4: Get "EXTERNAL TABLE" type --- + ResultSet rsExternal = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"EXTERNAL"}); + assertTrue(rsExternal.next()); + assertEquals("external_table", rsExternal.getString("TABLE_NAME")); + assertEquals("EXTERNAL", rsExternal.getString("TABLE_TYPE")); + assertFalse(rsExternal.next()); + + // --- Test Case 5: Get "MATERIALIZED_VIEW" type --- + ResultSet rsMaterialized = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"MATERIALIZED_VIEW"}); + assertTrue(rsMaterialized.next()); + assertEquals("my_materialized_view", rsMaterialized.getString("TABLE_NAME")); + assertEquals("MATERIALIZED_VIEW", rsMaterialized.getString("TABLE_TYPE")); + assertFalse(rsMaterialized.next()); + + // --- Test Case 6: Get "SNAPSHOT" type --- + ResultSet rsSnapshot = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"SNAPSHOT"}); + assertTrue(rsSnapshot.next()); + assertEquals("base_table_snapshot", rsSnapshot.getString("TABLE_NAME")); + assertEquals("SNAPSHOT", rsSnapshot.getString("TABLE_TYPE")); + assertFalse(rsSnapshot.next()); + + // --- Test Case 8: Get multiple types ("TABLE" and "VIEW") --- + ResultSet rsMulti = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"TABLE", "VIEW"}); + Set multiTableNames = new HashSet<>(); + while (rsMulti.next()) { + multiTableNames.add(rsMulti.getString("TABLE_NAME")); + } + assertTrue(multiTableNames.contains("base_table")); + assertTrue(multiTableNames.contains("base_table_clone")); + assertTrue(multiTableNames.contains("my_view")); + assertEquals(3, multiTableNames.size()); + + // --- Test Case 9: tableNamePattern --- + ResultSet rsNamePattern = databaseMetaData.getTables(PROJECT_ID, DATASET, "base%", null); + Set baseTableNames = new HashSet<>(); + while (rsNamePattern.next()) { + baseTableNames.add(rsNamePattern.getString("TABLE_NAME")); + } + assertTrue(baseTableNames.contains("base_table")); + assertTrue(baseTableNames.contains("base_table_clone")); + assertTrue(baseTableNames.contains("base_table_snapshot")); + assertEquals(3, baseTableNames.size()); + + // --- Test Case 10: No matching table --- + ResultSet rsNoMatch = + databaseMetaData.getTables(PROJECT_ID, DATASET, "nonexistent_table", null); + assertFalse(rsNoMatch.next()); + + // --- Test Case 11: Null type in array --- + ResultSet rsNullType = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {null, "VIEW"}); + assertTrue(rsNullType.next()); + assertEquals("VIEW", rsNullType.getString("TABLE_TYPE")); + assertEquals("my_view", rsNullType.getString("TABLE_NAME")); + assertFalse(rsNullType.next()); + } + + @Test + public void testDatabaseMetadataGetSchemas() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + + // Test case 1: Get all schemas with catalog and check for the presence of specific schemas + ResultSet rsAll = databaseMetaData.getSchemas(PROJECT_ID, null); + Set actualSchemas = new HashSet<>(); + while (rsAll.next()) { + assertEquals(PROJECT_ID, rsAll.getString("TABLE_CATALOG")); + actualSchemas.add(rsAll.getString("TABLE_SCHEM")); + } + assertTrue(actualSchemas.contains("JDBC_INTEGRATION_DATASET")); + assertTrue(actualSchemas.contains("JDBC_TABLE_TYPES_TEST")); + assertTrue(actualSchemas.contains("ODBC_TEST_DATASET")); + + // Test case 2: Get schemas with catalog and schemaPattern matching "JDBC_NIGHTLY_IT_DATASET" + ResultSet rsPattern = databaseMetaData.getSchemas(PROJECT_ID, "JDBC_NIGHTLY_IT_DATASET"); + Set actualSchemasPattern = new HashSet<>(); + while (rsPattern.next()) { + assertEquals(PROJECT_ID, rsPattern.getString("TABLE_CATALOG")); + actualSchemasPattern.add(rsPattern.getString("TABLE_SCHEM")); + } + assertTrue(actualSchemasPattern.contains("JDBC_NIGHTLY_IT_DATASET")); + assertEquals(1, actualSchemasPattern.size()); + + // Test case 3: Get schemas with catalog and schemaPattern matching "nonexistent" + ResultSet rsNoMatch = databaseMetaData.getSchemas(PROJECT_ID, "nonexistent"); + assertFalse(rsNoMatch.next()); + + // Test case 4: Get schemas with non-existent catalog + rsNoMatch = databaseMetaData.getSchemas("invalid-catalog", null); + assertFalse(rsNoMatch.next()); + } + + @Test + public void testDatabaseMetadataGetSchemasNoArgs() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + String expectedCatalog = bigQueryConnection.getCatalog(); + assertNotNull("Project ID (catalog) from connection should not be null", expectedCatalog); + + // Test case: Get all schemas (datasets) for the current project + try (ResultSet rsAll = databaseMetaData.getSchemas()) { + assertNotNull("ResultSet from getSchemas() should not be null", rsAll); + boolean foundTestDataset = false; + int rowCount = 0; + while (rsAll.next()) { + rowCount++; + assertEquals( + "TABLE_CATALOG should match the connection's project ID", + expectedCatalog, + rsAll.getString("TABLE_CATALOG")); + String schemaName = rsAll.getString("TABLE_SCHEM"); + assertNotNull("TABLE_SCHEM should not be null", schemaName); + if (DATASET.equals(schemaName) + || DATASET2.equals(schemaName) + || CONSTRAINTS_DATASET.equals(schemaName) + || "JDBC_TABLE_TYPES_TEST".equals(schemaName) + || "JDBC_INTEGRATION_DATASET".equals(schemaName)) { + foundTestDataset = true; + } + } + assertTrue("At least one of the known test datasets should be found", foundTestDataset); + assertTrue("Should retrieve at least one schema/dataset", rowCount > 0); + } + } + + @Test + public void testDatabaseMetaDataGetFunctions() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + String testSchema = "JDBC_TABLE_TYPES_TEST"; + String testCatalog = PROJECT_ID; + + Set expectedFunctionNames = + new HashSet<>( + Arrays.asList( + "complex_scalar_sql_udf", + "persistent_sql_udf_named_params", + "scalar_js_udf", + "scalar_sql_udf")); + + // Test 1: Get all functions from a specific schema + ResultSet rsAll = databaseMetaData.getFunctions(testCatalog, testSchema, null); + Set foundFunctionNames = new HashSet<>(); + int countAll = 0; + while (rsAll.next()) { + countAll++; + assertEquals(testCatalog, rsAll.getString("FUNCTION_CAT")); + assertEquals(testSchema, rsAll.getString("FUNCTION_SCHEM")); + String funcName = rsAll.getString("FUNCTION_NAME"); + foundFunctionNames.add(funcName); + assertNull(rsAll.getString("REMARKS")); + assertEquals(DatabaseMetaData.functionResultUnknown, rsAll.getShort("FUNCTION_TYPE")); + assertEquals(funcName, rsAll.getString("SPECIFIC_NAME")); + } + assertEquals( + "Should find all " + expectedFunctionNames.size() + " functions in " + testSchema, + expectedFunctionNames.size(), + countAll); + assertEquals(expectedFunctionNames, foundFunctionNames); + rsAll.close(); + + // Test 2: Get a specific function using functionNamePattern + String specificFunctionName = "scalar_sql_udf"; + ResultSet rsSpecific = + databaseMetaData.getFunctions(testCatalog, testSchema, specificFunctionName); + assertTrue("Should find the specific function " + specificFunctionName, rsSpecific.next()); + assertEquals(testCatalog, rsSpecific.getString("FUNCTION_CAT")); + assertEquals(testSchema, rsSpecific.getString("FUNCTION_SCHEM")); + assertEquals(specificFunctionName, rsSpecific.getString("FUNCTION_NAME")); + assertNull(rsSpecific.getString("REMARKS")); + assertEquals(DatabaseMetaData.functionResultUnknown, rsSpecific.getShort("FUNCTION_TYPE")); + assertEquals(specificFunctionName, rsSpecific.getString("SPECIFIC_NAME")); + assertFalse("Should only find one row for exact function match", rsSpecific.next()); + rsSpecific.close(); + + // Test 3: Get functions using a wildcard functionNamePattern "scalar%" + // Expected order due to sorting: scalar_js_udf, scalar_sql_udf + ResultSet rsWildcard = databaseMetaData.getFunctions(testCatalog, testSchema, "scalar%"); + assertTrue("Should find functions matching 'scalar%'", rsWildcard.next()); + assertEquals("scalar_js_udf", rsWildcard.getString("FUNCTION_NAME")); + assertEquals(DatabaseMetaData.functionResultUnknown, rsWildcard.getShort("FUNCTION_TYPE")); + + assertTrue("Should find the second function matching 'scalar%'", rsWildcard.next()); + assertEquals("scalar_sql_udf", rsWildcard.getString("FUNCTION_NAME")); + assertEquals(DatabaseMetaData.functionResultUnknown, rsWildcard.getShort("FUNCTION_TYPE")); + assertFalse("Should be no more functions matching 'scalar%'", rsWildcard.next()); + rsWildcard.close(); + + // Test 4: Schema pattern with wildcard + ResultSet rsSchemaWildcard = + databaseMetaData.getFunctions(testCatalog, "JDBC_TABLE_TYPES_T%", "complex_scalar_sql_udf"); + assertTrue("Should find function with schema wildcard", rsSchemaWildcard.next()); + assertEquals(testSchema, rsSchemaWildcard.getString("FUNCTION_SCHEM")); + assertEquals("complex_scalar_sql_udf", rsSchemaWildcard.getString("FUNCTION_NAME")); + assertFalse( + "Should only find one row for this schema wildcard and specific function", + rsSchemaWildcard.next()); + rsSchemaWildcard.close(); + + // Test 5: Non-existent function + ResultSet rsNonExistentFunc = + databaseMetaData.getFunctions(testCatalog, testSchema, "non_existent_function_xyz123"); + assertFalse("Should not find a non-existent function", rsNonExistentFunc.next()); + rsNonExistentFunc.close(); + + // Test 6: Non-existent schema + ResultSet rsNonExistentSchema = + databaseMetaData.getFunctions(testCatalog, "NON_EXISTENT_SCHEMA_XYZ123", null); + assertFalse("Should not find functions in a non-existent schema", rsNonExistentSchema.next()); + rsNonExistentSchema.close(); + + // Test 7: Empty schema pattern + ResultSet rsEmptySchema = databaseMetaData.getFunctions(testCatalog, "", null); + assertFalse("Empty schema pattern should return no results", rsEmptySchema.next()); + rsEmptySchema.close(); + + // Test 8: Empty function name pattern + ResultSet rsEmptyFunction = databaseMetaData.getFunctions(testCatalog, testSchema, ""); + assertFalse("Empty function name pattern should return no results", rsEmptyFunction.next()); + rsEmptyFunction.close(); + + // Test 9: Null catalog + ResultSet rsNullCatalog = databaseMetaData.getFunctions(null, testSchema, null); + assertFalse("Null catalog should return no results", rsNullCatalog.next()); + rsNullCatalog.close(); + } + + @Test + public void testDatabaseMetadataGetFunctionColumns() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + String testCatalog = PROJECT_ID; + String testSchema = "JDBC_TABLE_TYPES_TEST"; + + // Test Case 1: Specific function 'scalar_sql_udf', specific column 'x' + String specificFunction1 = "scalar_sql_udf"; + String specificColumn1 = "x"; + ResultSet rs = + databaseMetaData.getFunctionColumns( + testCatalog, testSchema, specificFunction1, specificColumn1); + + assertTrue("Should find column 'x' for function 'scalar_sql_udf'", rs.next()); + assertEquals(testCatalog, rs.getString("FUNCTION_CAT")); + assertEquals(testSchema, rs.getString("FUNCTION_SCHEM")); + assertEquals(specificFunction1, rs.getString("FUNCTION_NAME")); + assertEquals(specificColumn1, rs.getString("COLUMN_NAME")); + assertEquals(DatabaseMetaData.functionColumnUnknown, rs.getShort("COLUMN_TYPE")); + assertEquals(Types.BIGINT, rs.getInt("DATA_TYPE")); + assertEquals("BIGINT", rs.getString("TYPE_NAME")); + assertEquals(19, rs.getInt("PRECISION")); + assertEquals(null, rs.getObject("LENGTH")); + assertTrue(rs.wasNull()); + assertEquals(0, rs.getShort("SCALE")); + assertEquals(10, rs.getShort("RADIX")); + assertEquals(DatabaseMetaData.functionNullableUnknown, rs.getShort("NULLABLE")); + assertNull(rs.getString("REMARKS")); + assertEquals(null, rs.getObject("CHAR_OCTET_LENGTH")); + assertTrue(rs.wasNull()); + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + assertEquals("", rs.getString("IS_NULLABLE")); + assertEquals(specificFunction1, rs.getString("SPECIFIC_NAME")); + assertFalse("Should only find one row for exact column match", rs.next()); + rs.close(); + + // Test Case 2: Specific function 'complex_scalar_sql_udf', specific column 'arr' + String specificFunction2 = "complex_scalar_sql_udf"; + String specificColumn2 = "arr"; + rs = + databaseMetaData.getFunctionColumns( + testCatalog, testSchema, specificFunction2, specificColumn2); + assertTrue("Should find column 'arr' for function 'complex_scalar_sql_udf'", rs.next()); + assertEquals(testCatalog, rs.getString("FUNCTION_CAT")); + assertEquals(testSchema, rs.getString("FUNCTION_SCHEM")); + assertEquals(specificFunction2, rs.getString("FUNCTION_NAME")); + assertEquals(specificColumn2, rs.getString("COLUMN_NAME")); + assertEquals(DatabaseMetaData.functionColumnUnknown, rs.getShort("COLUMN_TYPE")); + assertEquals(Types.ARRAY, rs.getInt("DATA_TYPE")); + assertEquals("ARRAY", rs.getString("TYPE_NAME")); + assertEquals(null, rs.getObject("PRECISION")); + assertTrue(rs.wasNull()); + assertEquals(null, rs.getObject("LENGTH")); + assertTrue(rs.wasNull()); + assertEquals(null, rs.getObject("SCALE")); + assertTrue(rs.wasNull()); + assertEquals(null, rs.getObject("RADIX")); + assertTrue(rs.wasNull()); + assertEquals(DatabaseMetaData.functionNullableUnknown, rs.getShort("NULLABLE")); + assertNull(rs.getString("REMARKS")); + assertEquals(null, rs.getObject("CHAR_OCTET_LENGTH")); + assertTrue(rs.wasNull()); + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + assertEquals("", rs.getString("IS_NULLABLE")); + assertEquals(specificFunction2, rs.getString("SPECIFIC_NAME")); + assertFalse("Should only find one row for exact column match", rs.next()); + rs.close(); + + // Test Case 3: All columns for 'persistent_sql_udf_named_params' (sorted by ordinal position) + String specificFunction3 = "persistent_sql_udf_named_params"; + rs = databaseMetaData.getFunctionColumns(testCatalog, testSchema, specificFunction3, null); + assertTrue("Should find columns for " + specificFunction3, rs.next()); + assertEquals(specificFunction3, rs.getString("FUNCTION_NAME")); + assertEquals("value1", rs.getString("COLUMN_NAME")); // Ordinal Position 1 + assertEquals(DatabaseMetaData.functionColumnUnknown, rs.getShort("COLUMN_TYPE")); + assertEquals(Types.BIGINT, rs.getInt("DATA_TYPE")); + assertEquals("BIGINT", rs.getString("TYPE_NAME")); + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + + assertTrue("Should find second column for " + specificFunction3, rs.next()); + assertEquals(specificFunction3, rs.getString("FUNCTION_NAME")); + assertEquals("value-two", rs.getString("COLUMN_NAME")); // Ordinal Position 2 + assertEquals(DatabaseMetaData.functionColumnUnknown, rs.getShort("COLUMN_TYPE")); + assertEquals(Types.NVARCHAR, rs.getInt("DATA_TYPE")); + assertEquals("NVARCHAR", rs.getString("TYPE_NAME")); + assertEquals(2, rs.getInt("ORDINAL_POSITION")); + assertFalse("Should be no more columns for " + specificFunction3, rs.next()); + rs.close(); + + // Test Case 4: Wildcard for function name "scalar%", specific column name "x" + rs = databaseMetaData.getFunctionColumns(testCatalog, testSchema, "scalar%", "x"); + assertTrue("Should find column 'x' for functions matching 'scalar%'", rs.next()); + assertEquals("scalar_sql_udf", rs.getString("FUNCTION_NAME")); + assertEquals("x", rs.getString("COLUMN_NAME")); + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + assertFalse("Should be no more columns named 'x' for functions matching 'scalar%'", rs.next()); + rs.close(); + + // Test Case 5: Wildcard for column name "%" for 'scalar_js_udf' + String specificFunction4 = "scalar_js_udf"; + rs = databaseMetaData.getFunctionColumns(testCatalog, testSchema, specificFunction4, "%"); + assertTrue("Should find columns for " + specificFunction4 + " with wildcard", rs.next()); + assertEquals(specificFunction4, rs.getString("FUNCTION_NAME")); + assertEquals("name", rs.getString("COLUMN_NAME")); // Ordinal Position 1 + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + + assertTrue("Should find second column for " + specificFunction4 + " with wildcard", rs.next()); + assertEquals(specificFunction4, rs.getString("FUNCTION_NAME")); + assertEquals("age", rs.getString("COLUMN_NAME")); // Ordinal Position 2 + assertEquals(2, rs.getInt("ORDINAL_POSITION")); + assertFalse("Should be no more columns for " + specificFunction4 + " with wildcard", rs.next()); + rs.close(); + + // Test Case 6: Non-existent function + rs = + databaseMetaData.getFunctionColumns( + testCatalog, testSchema, "non_existent_function_xyz", null); + assertFalse("Should not find columns for a non-existent function", rs.next()); + rs.close(); + } + + @Test + public void testRangeDataTypeWithJsonResultSet() throws SQLException { + String RANGE_DATA_TABLE = "JDBC_RANGE_DATA_TEST_TABLE_" + random.nextInt(99); + String range_date_literal = "RANGE '[2020-01-01, 2020-01-31)'"; + String range_datetime_literal = "RANGE '[2020-01-01 12:00:00, 2020-01-31 12:00:00)'"; + String range_timestamp_literal = + "RANGE '[2020-01-01 12:00:00+08, 2020-01-31 12:00:00+08)'"; + + String createRangeTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `range_date` RANGE," + + " `range_date_time` RANGE, `range_timestamp` RANGE);", + DATASET, RANGE_DATA_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, range_date, range_date_time, range_timestamp) VALUES (1, %s," + + " %s, %s);", + DATASET, + RANGE_DATA_TABLE, + range_date_literal, + range_datetime_literal, + range_timestamp_literal); + String selectQuery = + String.format( + "SELECT id, range_date, range_date_time, range_timestamp FROM %s.%s WHERE id = 1;", + DATASET, RANGE_DATA_TABLE); + + boolean status = bigQueryStatement.execute(createRangeTable); + assertFalse(status); + + status = bigQueryStatement.execute(insertQuery); + assertFalse(status); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + + Integer numRows = 0; + String actual_range_date = ""; + String actual_range_datetime = ""; + String actual_range_timestamp = ""; + + while (resultSet.next()) { + numRows++; + actual_range_date = resultSet.getString("range_date"); + actual_range_datetime = resultSet.getString("range_date_time"); + actual_range_timestamp = resultSet.getString("range_timestamp"); + } + + String expected_range_date = "[2020-01-01, 2020-01-31)"; + String expected_range_datetime = "[2020-01-01T12:00:00, 2020-01-31T12:00:00)"; + String expected_range_timestamp = "[1577851200.000000, 1580443200.000000)"; + + assertThat(numRows).isEqualTo(1); + assertThat(actual_range_date).isEqualTo(expected_range_date); + assertThat(actual_range_datetime).isEqualTo(expected_range_datetime); + assertThat(actual_range_timestamp).isEqualTo(expected_range_timestamp); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, RANGE_DATA_TABLE)); + } + + @Test + public void testRangeDataTypeWithArrowResultSet() throws SQLException { + String selectQuery = + "select * from `DATATYPERANGETEST.RangeIntervalTestTable` order by intColumn limit 5000;"; + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";MaxResults=500;HighThroughputActivationRatio=1;" + + "HighThroughputMinTableSize=100;" + + "EnableHighThroughputAPI=1;JobCreationMode=1;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertTrue(resultSet.getClass().getName().contains("BigQueryArrowResultSet")); + resultSet.next(); + assertEquals("[2024-07-14, 2024-09-23)", resultSet.getString("rangeField")); + connection.close(); + } + + @Test + public void testPrepareCallSql() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc"); + assertNotNull(callableStatement); + callableStatement.close(); + } + + @Test + public void testRegisterOutParamIndex() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testRegisterOutParamName() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testRegisterOutParamIndexScale() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testRegisterOutParamNameScale() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testPrepareCallSqlResultSetTypeConcurrency() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.close(); + } + + @Test + public void testPrepareCallConcurrencyRegisterOutParamIndex() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testPrepareCallConcurrencyRegisterOutParamName() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testPrepareCallConcurrencyRegisterOutParamIndexScale() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testPrepareCallConcurrencyRegisterOutParamNameScale() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testPrepareCallSqlResultSetTypeConcurrencyHoldability() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.close(); + } + + @Test + public void testPrepareCallHoldabilityRegisterOutParamIndex() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testPrepareCallHoldabilityRegisterOutParamName() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testPrepareCallHoldabilityRegisterOutParamIndexScale() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.close(); + } + + @Test + public void testPrepareCallHoldabilityRegisterOutParamNameScale() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testPrepareCallFailureResultSetType() throws SQLException { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> + this.bigQueryConnection.prepareCall( + "call testProc", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY)); + } + + @Test + public void testPrepareCallFailureResultSetConcurrency() throws SQLException { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> + this.bigQueryConnection.prepareCall( + "call testProc", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE)); + } + + @Test + public void testPrepareCallFailureResultSetHoldability() throws SQLException { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> + this.bigQueryConnection.prepareCall( + "call testProc", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.HOLD_CURSORS_OVER_COMMIT)); + } + + // Integration tests for CallableStatement Setters and Getters + @Test + public void testSetterGetterBigDecimal() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + BigDecimal expected = new BigDecimal(12344); + callableStatement.setBigDecimal(CALLABLE_STMT_PARAM_KEY, expected); + BigDecimal actual = callableStatement.getBigDecimal(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterBoolean() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Boolean expected = true; + callableStatement.setBoolean(CALLABLE_STMT_PARAM_KEY, expected); + Boolean actual = callableStatement.getBoolean(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterByte() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Byte expected = "hello".getBytes()[0]; + callableStatement.setByte(CALLABLE_STMT_PARAM_KEY, expected); + Byte actual = callableStatement.getByte(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterBytes() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + byte[] expected = "hello".getBytes(); + callableStatement.setBytes(CALLABLE_STMT_PARAM_KEY, expected); + byte[] actual = callableStatement.getBytes(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterDate() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Date expected = new Date(1234567); + callableStatement.setDate(CALLABLE_STMT_PARAM_KEY, expected); + Date actual = callableStatement.getDate(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterDateCal() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Date expected = new Date(1L); + Calendar cal = Calendar.getInstance(); + callableStatement.setDate(CALLABLE_STMT_PARAM_KEY, expected, cal); + Date actual = callableStatement.getDate(CALLABLE_STMT_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterDouble() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Double expected = 123.2345; + callableStatement.setDouble(CALLABLE_STMT_PARAM_KEY, expected); + Double actual = callableStatement.getDouble(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterFloat() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Float expected = 123.2345F; + callableStatement.setFloat(CALLABLE_STMT_PARAM_KEY, expected); + Float actual = callableStatement.getFloat(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterInt() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Integer expected = 123; + callableStatement.setInt(CALLABLE_STMT_PARAM_KEY, expected); + Integer actual = callableStatement.getInt(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterLong() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Long expected = 123L; + callableStatement.setLong(CALLABLE_STMT_PARAM_KEY, expected); + Long actual = callableStatement.getLong(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterNString() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "heelo"; + callableStatement.setNString(CALLABLE_STMT_PARAM_KEY, expected); + String actual = callableStatement.getNString(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterObject() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "heelo"; + callableStatement.setObject(CALLABLE_STMT_PARAM_KEY, expected); + Object actual = callableStatement.getObject(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterObjectWithSQLType() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "heelo"; + callableStatement.setObject(CALLABLE_STMT_PARAM_KEY, expected, Types.NVARCHAR); + Object actual = callableStatement.getObject(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterObjectWithSqlTypeAndScale() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "heelo"; + callableStatement.setObject(CALLABLE_STMT_PARAM_KEY, expected, Types.NVARCHAR, 0); + Object actual = callableStatement.getObject(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterString() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "123"; + callableStatement.setString(CALLABLE_STMT_PARAM_KEY, expected); + String actual = callableStatement.getString(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterTime() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Time expected = new Time(1234567); + callableStatement.setTime(CALLABLE_STMT_PARAM_KEY, expected); + Time actual = callableStatement.getTime(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterTimeCal() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Time expected = new Time(1L); + Calendar cal = Calendar.getInstance(); + callableStatement.setTime(CALLABLE_STMT_PARAM_KEY, expected, cal); + Time actual = callableStatement.getTime(CALLABLE_STMT_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterTimestamp() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Timestamp expected = new Timestamp(1234567); + callableStatement.setTimestamp(CALLABLE_STMT_PARAM_KEY, expected); + Timestamp actual = callableStatement.getTimestamp(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterTimestampCal() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Timestamp expected = new Timestamp(1L); + Calendar cal = Calendar.getInstance(); + callableStatement.setTimestamp(CALLABLE_STMT_PARAM_KEY, expected, cal); + Timestamp actual = callableStatement.getTimestamp(CALLABLE_STMT_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testPooledConnectionDataSourceSuccess() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + } + + @Test + public void testPooledConnectionDataSourceFailNoConnectionURl() throws SQLException { + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + + assertThrows(BigQueryJdbcException.class, () -> pooledDataSource.getPooledConnection()); + } + + @Test + public void testPooledConnectionDataSourceFailInvalidConnectionURl() { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ListenerPoolSize=invalid"; + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + assertThrows(NumberFormatException.class, () -> pooledDataSource.getPooledConnection()); + } + + @Test + public void testPooledConnectionAddConnectionListener() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + } + + @Test + public void testPooledConnectionRemoveConnectionListener() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.removeConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + } + + @Test + public void testPooledConnectionConnectionClosed() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + connection.close(); + assertEquals(1, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + } + + @Test + public void testPooledConnectionClose() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + + pooledConnection.close(); + assertEquals(1, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + } + + @Test + public void testPooledConnectionConnectionError() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + ExecutorService executor = Executors.newFixedThreadPool(3); + connection.abort(executor); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(1, listener.getConnectionErrorCount()); + + executor.shutdown(); + connection.close(); + pooledConnection.close(); + } + + @Test + public void testPooledConnectionListenerAddListener() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + pooledConnection.close(); + } + + @Test + public void testPooledConnectionListenerRemoveListener() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + pooledConnection.removeConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + pooledConnection.close(); + } + + @Test + public void testPooledConnectionListenerCloseConnection() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + connection.close(); + assertFalse(listener.isConnectionPoolEmpty()); + pooledConnection.close(); + } + + @Test + public void testPooledConnectionListenerClosePooledConnection() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + pooledConnection.close(); + assertFalse(listener.isConnectionPoolEmpty()); + } + + @Test + public void testPooledConnectionListenerConnectionError() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + ExecutorService executor = Executors.newFixedThreadPool(3); + connection.abort(executor); + assertTrue(listener.isConnectionPoolEmpty()); + + executor.shutdown(); + connection.close(); + pooledConnection.close(); + } + + @Test + public void testExecuteQueryWithConnectionPoolingEnabledDefaultPoolSize() throws SQLException { + String connectionURL = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";"; + assertConnectionPoolingResults(connectionURL, DEFAULT_CONN_POOL_SIZE); + } + + @Test + public void testExecuteQueryWithConnectionPoolingEnabledCustomPoolSize() throws SQLException { + String connectionURL = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";" + + "ConnectionPoolSize=" + + CUSTOM_CONN_POOL_SIZE + + ";"; + assertConnectionPoolingResults(connectionURL, CUSTOM_CONN_POOL_SIZE); + } + + private void assertConnectionPoolingResults(String connectionURL, Long connectionPoolSize) + throws SQLException { + // Create Pooled Connection Datasource + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionURL); + + // Get pooled connection and ensure listner was added with default connection pool size. + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = pooledDataSource.getConnectionPoolManager(); + assertNotNull(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + // Get Underlying physical connection + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + // Execute query with physical connection + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + + // Close physical connection + connection.close(); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertEquals(connectionPoolSize, listener.getConnectionPoolSize()); + + // Reuse same physical connection. + connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertEquals(connectionPoolSize, listener.getConnectionPoolSize()); + + // Execute query with reusable physical connection + jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + + // Return connection back to the pool. + connection.close(); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertEquals(connectionPoolSize, listener.getConnectionPoolSize()); + pooledConnection.close(); + } + + @Test + public void testAdditionalProjectsInMetadata() throws SQLException { + String additionalProjectsValue = "bigquery-public-data"; + String datasetInAdditionalProject = "baseball"; + + String urlWithAdditionalProjects = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;ProjectId=" + + PROJECT_ID + + ";OAuthType=3" + + ";AdditionalProjects=" + + additionalProjectsValue; + + try (Connection conn = DriverManager.getConnection(urlWithAdditionalProjects)) { + DatabaseMetaData dbMetaData = conn.getMetaData(); + + // 1. Test getCatalogs() + Set foundCatalogs = new HashSet<>(); + try (ResultSet catalogsRs = dbMetaData.getCatalogs()) { + while (catalogsRs.next()) { + foundCatalogs.add(catalogsRs.getString("TABLE_CAT")); + } + } + assertTrue( + "getCatalogs() should contain the primary project ID", + foundCatalogs.contains(PROJECT_ID)); + assertTrue( + "getCatalogs() should contain the additional project ID", + foundCatalogs.contains(additionalProjectsValue)); + + // 2. Test getSchemas() + Set catalogsForSchemasFromAll = new HashSet<>(); + boolean foundAdditionalDataset = false; + try (ResultSet schemasRs = dbMetaData.getSchemas()) { + while (schemasRs.next()) { + String schemaName = schemasRs.getString("TABLE_SCHEM"); + String catalogName = schemasRs.getString("TABLE_CATALOG"); + catalogsForSchemasFromAll.add(catalogName); + if (additionalProjectsValue.equals(catalogName) + && datasetInAdditionalProject.equals(schemaName)) { + foundAdditionalDataset = true; + } + } + } + assertTrue( + "getSchemas() should list datasets from the primary project", + catalogsForSchemasFromAll.contains(PROJECT_ID)); + assertTrue( + "getSchemas() should list datasets from the additional project", + catalogsForSchemasFromAll.contains(additionalProjectsValue)); + assertTrue( + "Known dataset from additional project not found in getSchemas()", + foundAdditionalDataset); + + } catch (SQLException e) { + System.err.println("SQL Error during AdditionalProjects test: " + e.getMessage()); + throw e; + } + } + + @Test + public void testFilterTablesOnDefaultDataset_getTables() throws SQLException { + String defaultDatasetValue = CONSTRAINTS_DATASET; + String table1InDefaultDataset = CONSTRAINTS_TABLE_NAME; + String table2InDefaultDataset = CONSTRAINTS_TABLE_NAME2; + + String specificDatasetValue = "JDBC_TABLE_TYPES_TEST"; + String table1InSpecificDataset = "base_table"; + String table2InSpecificDataset = "external_table"; + + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;ProjectId=" + + PROJECT_ID + + ";OAuthType=3" + + ";DefaultDataset=" + + defaultDatasetValue + + ";FilterTablesOnDefaultDataset=1"; + try (Connection conn = DriverManager.getConnection(connectionUrl)) { + DatabaseMetaData dbMetaData = conn.getMetaData(); + + // Case 1: Catalog and schemaPattern are null/wildcard, should use DefaultDataset + try (ResultSet rs = dbMetaData.getTables(null, null, null, null)) { + Set tableNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(defaultDatasetValue, rs.getString("TABLE_SCHEM")); + tableNames.add(rs.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains(table1InDefaultDataset)); + assertTrue(tableNames.contains(table2InDefaultDataset)); + } + + // Case 2: Explicit schemaPattern overrides DefaultDataset + try (ResultSet rs = dbMetaData.getTables(null, specificDatasetValue, null, null)) { + Set tableNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(specificDatasetValue, rs.getString("TABLE_SCHEM")); + tableNames.add(rs.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains(table1InSpecificDataset)); + assertTrue(tableNames.contains(table2InSpecificDataset)); + } + + // Case 3: Explicit catalog, schemaPattern is null/wildcard, should use DefaultDataset within + // that catalog + try (ResultSet rs = dbMetaData.getTables(PROJECT_ID, null, null, null)) { + Set tableNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(defaultDatasetValue, rs.getString("TABLE_SCHEM")); + tableNames.add(rs.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains(table1InDefaultDataset)); + assertTrue(tableNames.contains(table2InDefaultDataset)); + } + + // Case 4: Explicit catalog and schemaPattern override DefaultDataset + try (ResultSet rs = dbMetaData.getTables(PROJECT_ID, specificDatasetValue, null, null)) { + Set tableNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(specificDatasetValue, rs.getString("TABLE_SCHEM")); + tableNames.add(rs.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains(table1InSpecificDataset)); + assertTrue(tableNames.contains(table2InSpecificDataset)); + } + } + } + + @Test + public void testFilterTablesOnDefaultDataset_getColumns() throws SQLException { + String defaultDatasetValue = CONSTRAINTS_DATASET; + String tableInDefaultDataset = CONSTRAINTS_TABLE_NAME; + String[] columnsInDefaultTable = {"id", "name", "second_name", "address"}; + + String specificDatasetValue = "JDBC_TABLE_TYPES_TEST"; + String tableInSpecificDataset = "base_table"; + String[] columnsInSpecificTable = {"id", "name", "created_at"}; + + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;ProjectId=" + + PROJECT_ID + + ";OAuthType=3" + + ";DefaultDataset=" + + defaultDatasetValue + + ";FilterTablesOnDefaultDataset=1"; + + try (Connection conn = DriverManager.getConnection(connectionUrl)) { + DatabaseMetaData dbMetaData = conn.getMetaData(); + + // Case 1: Catalog and schemaPattern are null/wildcard, should use DefaultDataset + try (ResultSet rs = dbMetaData.getColumns(null, null, tableInDefaultDataset, null)) { + Set columnNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(defaultDatasetValue, rs.getString("TABLE_SCHEM")); + assertEquals(tableInDefaultDataset, rs.getString("TABLE_NAME")); + columnNames.add(rs.getString("COLUMN_NAME")); + } + for (String expectedCol : columnsInDefaultTable) { + assertTrue(columnNames.contains(expectedCol)); + } + assertEquals(columnsInDefaultTable.length, columnNames.size()); + } + + // Case 2: Explicit schemaPattern overrides DefaultDataset + try (ResultSet rs = + dbMetaData.getColumns(null, specificDatasetValue, tableInSpecificDataset, null)) { + Set columnNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(specificDatasetValue, rs.getString("TABLE_SCHEM")); + assertEquals(tableInSpecificDataset, rs.getString("TABLE_NAME")); + columnNames.add(rs.getString("COLUMN_NAME")); + } + for (String expectedCol : columnsInSpecificTable) { + assertTrue(columnNames.contains(expectedCol)); + } + assertEquals(columnsInSpecificTable.length, columnNames.size()); + } + + // Case 3: Explicit catalog, schemaPattern is null/wildcard, should use DefaultDataset within + // that catalog + try (ResultSet rs = dbMetaData.getColumns(PROJECT_ID, null, tableInDefaultDataset, null)) { + Set columnNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(defaultDatasetValue, rs.getString("TABLE_SCHEM")); + assertEquals(tableInDefaultDataset, rs.getString("TABLE_NAME")); + columnNames.add(rs.getString("COLUMN_NAME")); + } + for (String expectedCol : columnsInDefaultTable) { + assertTrue(columnNames.contains(expectedCol)); + } + assertEquals(columnsInDefaultTable.length, columnNames.size()); + } + + // Case 4: Explicit catalog and schemaPattern override DefaultDataset + try (ResultSet rs = + dbMetaData.getColumns(PROJECT_ID, specificDatasetValue, tableInSpecificDataset, null)) { + Set columnNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(specificDatasetValue, rs.getString("TABLE_SCHEM")); + assertEquals(tableInSpecificDataset, rs.getString("TABLE_NAME")); + columnNames.add(rs.getString("COLUMN_NAME")); + } + for (String expectedCol : columnsInSpecificTable) { + assertTrue(columnNames.contains(expectedCol)); + } + assertEquals(columnsInSpecificTable.length, columnNames.size()); + } + } + } + + @Test + public void testAlterTable() throws SQLException { + String TABLE_NAME = "JDBC_ALTER_TABLE_" + randomNumber; + String createQuery = + String.format("CREATE OR REPLACE TABLE %s.%s (`StringField` STRING);", DATASET, TABLE_NAME); + String addColumnQuery = + String.format("ALTER TABLE %s.%s ADD COLUMN `IntegerField` INTEGER;", DATASET, TABLE_NAME); + String dropColumnQuery = + String.format( + "UPDATE %s.%s SET StringField='Jane Doe' WHERE IntegerField=111", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + int createStatus = bigQueryStatement.executeUpdate(createQuery); + assertEquals(0, createStatus); + + int addColumnStatus = bigQueryStatement.executeUpdate(addColumnQuery); + assertEquals(0, addColumnStatus); + + bigQueryStatement.executeQuery(selectQuery); + int selectStatus = bigQueryStatement.getUpdateCount(); + assertEquals(-1, selectStatus); + + int dropColumnStatus = bigQueryStatement.executeUpdate(dropColumnQuery); + assertEquals(0, dropColumnStatus); + + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + public void testQueryPropertyDataSetProjectIdQueriesToCorrectDataset() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=dataset_project_id=" + + PROJECT_ID + + ";"; + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (15, 'Farhan', 25);", + "INTEGRATION_TESTS", "Test_Table"); + String selectQuery = + "SELECT * FROM `bigquery-devtools-drivers.INTEGRATION_TESTS.Test_Table` WHERE age=25;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + statement.execute(insertQuery); + + // assertions + boolean result = statement.execute(selectQuery); + assertTrue(result); + + // clean up + String deleteQuery = + String.format("DELETE FROM %s.%s WHERE age=25", "INTEGRATION_TESTS", "Test_Table"); + statement.execute(deleteQuery); + connection.close(); + } + + @Test + public void testQueryPropertyDataSetProjectIdQueriesToIncorrectDatasetThrows() + throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=dataset_project_id=bigquerytestdefault" + + ";"; + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (15, 'Farhan', 25);", + "INTEGRATION_TESTS", "Test_Table"); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + assertThrows(BigQueryJdbcException.class, () -> statement.execute(insertQuery)); + connection.close(); + } + + @Test + public void testQueryPropertyTimeZoneQueries() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=time_zone=America/New_York;"; + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(query); + + // assertions + assertNotNull(resultSet); + assertTrue(resultSet.next()); + connection.close(); + } + + @Test + public void testQueryPropertySessionIdSetsStatementSession() + throws SQLException, InterruptedException { + String sessionId = getSessionId(); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=session_id=" + + sessionId + + ";"; + String selectQuery = + "INSERT INTO `bigquery-devtools-drivers.JDBC_INTEGRATION_DATASET.No_KMS_Test_table` (id," + + " name, age) VALUES (132, 'Batman', 531);"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + boolean resultSet = statement.execute(selectQuery); + + // assertions + assertFalse(resultSet); + + // clean up + String deleteQuery = + String.format("DELETE FROM %s.%s WHERE age=25", "INTEGRATION_TESTS", "Test_Table"); + statement.execute(deleteQuery); + connection.close(); + } + + @Test + public void testEncryptedTableWithKmsQueries() throws SQLException { + // setup + String KMSKeyName = requireEnvVar("KMS_RESOURCE_PATH"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";KMSKeyName=" + + KMSKeyName + + ";"; + String selectQuery = "SELECT * FROM `JDBC_INTEGRATION_DATASET.KMS_Test_table`;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectQuery); + + // assertions for data not encrypted + assertNotNull(resultSet); + assertTrue(resultSet.next()); + assertEquals("Farhan", resultSet.getString("name")); + connection.close(); + } + + @Test + public void testIncorrectKmsThrows() throws SQLException { + String KMSKeyName = requireEnvVar("KMS_RESOURCE_PATH"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";KMSKeyName=" + + KMSKeyName + + ";"; + String selectQuery = + "INSERT INTO `bigquery-devtools-drivers.JDBC_INTEGRATION_DATASET.No_KMS_Test_table` (id," + + " name, age) VALUES (132, 'Batman', 531);"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + assertThrows(BigQueryJdbcException.class, () -> statement.execute(selectQuery)); + connection.close(); + } + + @Test + public void testQueryPropertyServiceAccountFollowsIamPermission() throws SQLException { + final String SERVICE_ACCOUNT_EMAIL = requireEnvVar("SA_EMAIL"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=service_account=" + + SERVICE_ACCOUNT_EMAIL + + ";"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(String.format(BASE_QUERY, 100)); + + // assertions + assertNotNull(resultSet); + assertTrue(resultSet.next()); + connection.close(); + } + + @Test + public void testValidLegacySQLStatement() throws SQLException { + String legacyJoinQuery = + "SELECT\n" + + " repo_name\n" + + "FROM\n" + + " [bigquery-public-data.github_repos.commits],\n" + + " [bigquery-public-data.github_repos.sample_commits] LIMIT 10"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;"; + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + + boolean result = statement.execute(legacyJoinQuery); + assertTrue(result); + connection.close(); + } + + @Test + public void testMultipleTransactionsThrowsUnsupported() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.execute("BEGIN TRANSACTION;")); + connection.close(); + } + + @Test + public void testConnectionWithMultipleTransactionCommits() throws SQLException { + String TRANSACTION_TABLE = "JDBC_MULTI_COMMIT_TABLE" + randomNumber; + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'DwightShrute', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + + Connection connection = DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + + Statement statement = connection.createStatement(); + statement.execute(insertQuery); + statement.execute(updateQuery); + connection.commit(); // First transaction + + // After commit, a new transaction should have started. + // Executing another query and then rolling it back. + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (15, 'MichaelScott', 25);", + DATASET, TRANSACTION_TABLE); + statement.execute(insertQuery2); + connection.rollback(); // Second transaction + + // Verify state with the static bigQueryStatement + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + int count = 0; + while (resultSet.next()) { + count++; + assertEquals(14, resultSet.getInt("age")); + } + assertEquals(1, count); // Only first transaction should be committed. + + // Verify the second insert was rolled back + ResultSet rs2 = + bigQueryStatement.executeQuery( + String.format("SELECT * FROM %s.%s WHERE id=15", DATASET, TRANSACTION_TABLE)); + assertFalse(rs2.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %s.%s", DATASET, TRANSACTION_TABLE)); + + statement.close(); + connection.close(); + } + + // Private Helper functions + private String getSessionId() throws InterruptedException { + QueryJobConfiguration stubJobConfig = + QueryJobConfiguration.newBuilder("Select 1;").setCreateSession(true).build(); + Job job = bigQuery.create(JobInfo.of(stubJobConfig)); + job = job.waitFor(); + Job stubJob = bigQuery.getJob(job.getJobId()); + return stubJob.getStatistics().getSessionInfo().getSessionId(); + } + + @Test + public void testCallableStatementScriptExecuteUpdate() throws SQLException { + int randomNum = java.util.UUID.randomUUID().hashCode(); + String insertName = "callable-statement-dml-insert-test"; + String insertResult = String.format("%s-%d", insertName, randomNum); + String updateName = "callable-statement-dml-update-test"; + String updateResult = String.format("%s-%d", updateName, randomNum); + String selectStmtQuery = + String.format("SELECT * FROM %s.%s WHERE id = ?", DATASET, CALLABLE_STMT_DML_TABLE_NAME); + String insertCallStmtQuery = + String.format("CALL %s.%s(?,?,?);", DATASET, CALLABLE_STMT_DML_INSERT_PROC_NAME); + String updateCallStmtQuery = + String.format("CALL %s.%s(?,?,?);", DATASET, CALLABLE_STMT_DML_UPDATE_PROC_NAME); + String deleteCallStmtQuery = + String.format("CALL %s.%s(?);", DATASET, CALLABLE_STMT_DML_DELETE_PROC_NAME); + + // DML INSERT + CallableStatement callableStatement = bigQueryConnection.prepareCall(insertCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setString(1, insertName); + callableStatement.setInt(2, randomNum); + callableStatement.setString(3, insertResult); + int rowsInserted = callableStatement.executeUpdate(); + assertEquals(1, rowsInserted); + + PreparedStatement preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + ResultSet rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertTrue(rs.next()); + + assertEquals(insertName, rs.getString(1)); + assertEquals(randomNum, rs.getInt(2)); + assertEquals(insertResult, rs.getString(3)); + + // DML UPDATE + callableStatement = bigQueryConnection.prepareCall(updateCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setString(1, updateName); + callableStatement.setInt(2, randomNum); + callableStatement.setString(3, updateResult); + int rowsUpdated = callableStatement.executeUpdate(); + assertEquals(1, rowsUpdated); + + preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertTrue(rs.next()); + + assertEquals(updateName, rs.getString(1)); + assertEquals(randomNum, rs.getInt(2)); + assertEquals(updateResult, rs.getString(3)); + + // DML DELETE + callableStatement = bigQueryConnection.prepareCall(deleteCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setInt(1, randomNum); + int rowsDeleted = callableStatement.executeUpdate(); + assertEquals(1, rowsDeleted); + + preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertFalse(rs.next()); + + callableStatement.close(); + } + + @Test + public void testCallableStatementScriptExecuteLargeUpdate() throws SQLException { + int randomNum = java.util.UUID.randomUUID().hashCode(); + String insertName = "callable-statement-dml-insert-test"; + String insertResult = String.format("%s-%d", insertName, randomNum); + String updateName = "callable-statement-dml-update-test"; + String updateResult = String.format("%s-%d", updateName, randomNum); + String selectStmtQuery = + String.format("SELECT * FROM %s.%s WHERE id = ?", DATASET, CALLABLE_STMT_DML_TABLE_NAME); + String insertCallStmtQuery = + String.format("CALL %s.%s(?,?,?);", DATASET, CALLABLE_STMT_DML_INSERT_PROC_NAME); + String updateCallStmtQuery = + String.format("CALL %s.%s(?,?,?);", DATASET, CALLABLE_STMT_DML_UPDATE_PROC_NAME); + String deleteCallStmtQuery = + String.format("CALL %s.%s(?);", DATASET, CALLABLE_STMT_DML_DELETE_PROC_NAME); + + // DML INSERT + CallableStatement callableStatement = bigQueryConnection.prepareCall(insertCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setString(1, insertName); + callableStatement.setInt(2, randomNum); + callableStatement.setString(3, insertResult); + long rowsInserted = callableStatement.executeLargeUpdate(); + assertEquals(1L, rowsInserted); + + PreparedStatement preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + ResultSet rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertTrue(rs.next()); + + assertEquals(insertName, rs.getString(1)); + assertEquals(randomNum, rs.getInt(2)); + assertEquals(insertResult, rs.getString(3)); + + // DML UPDATE + callableStatement = bigQueryConnection.prepareCall(updateCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setString(1, updateName); + callableStatement.setInt(2, randomNum); + callableStatement.setString(3, updateResult); + long rowsUpdated = callableStatement.executeLargeUpdate(); + assertEquals(1L, rowsUpdated); + + preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertTrue(rs.next()); + + assertEquals(updateName, rs.getString(1)); + assertEquals(randomNum, rs.getInt(2)); + assertEquals(updateResult, rs.getString(3)); + + // DML DELETE + callableStatement = bigQueryConnection.prepareCall(deleteCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setInt(1, randomNum); + long rowsDeleted = callableStatement.executeLargeUpdate(); + assertEquals(1L, rowsDeleted); + + preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertFalse(rs.next()); + + callableStatement.close(); + } + + @Test + public void testScript() throws SQLException { + String BASE_QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 order by" + + " trip_distance asc LIMIT %s;"; + String query1 = String.format(BASE_QUERY, 5000); + String query2 = String.format(BASE_QUERY, 7000); + String query3 = String.format(BASE_QUERY, 9000); + + bigQueryStatement.execute(query1 + query2 + query3); + ResultSet resultSet = bigQueryStatement.getResultSet(); + assertEquals(5000, resultSetRowCount(resultSet)); + + boolean hasMoreResult = bigQueryStatement.getMoreResults(); + assertTrue(hasMoreResult); + resultSet = bigQueryStatement.getResultSet(); + assertEquals(7000, resultSetRowCount(resultSet)); + + hasMoreResult = bigQueryStatement.getMoreResults(); + assertTrue(hasMoreResult); + resultSet = bigQueryStatement.getResultSet(); + assertEquals(9000, resultSetRowCount(resultSet)); + } + + @Test + public void testCallableStatementScriptExecute() throws SQLException { + int randomNum = random.nextInt(99); + String callableStmtQuery = + String.format( + "DECLARE call_result STRING;" + + "CALL %s.%s(?,?,call_result);" + + "SELECT * FROM %s.%s WHERE result = call_result;", + DATASET, CALLABLE_STMT_PROC_NAME, DATASET, CALLABLE_STMT_TABLE_NAME); + CallableStatement callableStatement = bigQueryConnection.prepareCall(callableStmtQuery); + callableStatement.setString(1, "callable-stmt-test"); + callableStatement.setInt(2, randomNum); + + assertFalse(callableStatement.execute()); + assertEquals(1, callableStatement.getUpdateCount()); + + // This is an actual SELECT * from the above + assertTrue(callableStatement.getMoreResults()); + ResultSet resultSet = callableStatement.getResultSet(); + ResultSetMetaData rsMetadata = resultSet.getMetaData(); + assertEquals(3, rsMetadata.getColumnCount()); + + assertTrue(resultSet.next()); + + String expected = String.format("callable-stmt-test-%d", randomNum); + String actual = resultSet.getString(3); + + assertEquals(expected, actual); + + // Validate there are no more results + assertFalse(callableStatement.getMoreResults()); + assertEquals(-1, callableStatement.getUpdateCount()); + callableStatement.close(); + } + + @Test + public void testExecuteScriptWithExpession() throws SQLException { + int randomNum = random.nextInt(99); + String query = String.format("DECLARE x INT64; SET x = (SELECT %s); SELECT x;", randomNum); + + assertTrue(bigQueryStatement.execute(query)); + ResultSet rs = bigQueryStatement.getResultSet(); + assertTrue(rs.next()); + assertEquals(randomNum, rs.getInt(1)); + assertFalse(rs.next()); + assertFalse(bigQueryStatement.getMoreResults()); + assertEquals(-1, bigQueryStatement.getUpdateCount()); + } + + @Test + public void testInformationSchemaTables() throws SQLException { + String query = String.format("SELECT * FROM %s.INFORMATION_SCHEMA.TABLES", DATASET); + try (Statement statement = bigQueryConnection.createStatement(); + ResultSet resultSet = statement.executeQuery(query)) { + ResultSetMetaData metaData = resultSet.getMetaData(); + int columnCount = metaData.getColumnCount(); + assertTrue(columnCount > 0); + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + for (int i = 1; i <= columnCount; i++) { + Object obj = resultSet.getObject(i); + if (obj != null) { + assertNotNull(obj.toString()); + } + } + } + assertTrue(rowCount > 0); + } + } + + private void validate( + String method, + BiFunction getter, + ImmutableMap expectedResult) + throws Exception { + + try (Connection connection = DriverManager.getConnection(connection_uri); + Connection connectionHTAPI = + DriverManager.getConnection( + connection_uri + + ";HighThroughputMinTableSize=0;HighThroughputActivationRatio=0;EnableHighThroughputAPI=1;"); + Statement statement = connection.createStatement(); + Statement statementHTAPI = connectionHTAPI.createStatement()) { + + String query = + "SELECT * FROM INTEGRATION_TEST_FORMAT.all_bq_types WHERE stringField is not null"; + ResultSet resultSetRegular = statement.executeQuery(query); + ResultSet resultSetArrow = statementHTAPI.executeQuery(query); + resultSetRegular.next(); + resultSetArrow.next(); + + for (int i = 1; i <= resultSetRegular.getMetaData().getColumnCount(); i++) { + String columnName = resultSetRegular.getMetaData().getColumnName(i); + + String regularApiLabel = + String.format("[Method: %s] [Column: %s] [API: Regular]", method, columnName); + String htapiApiLabel = + String.format("[Method: %s] [Column: %s] [API: HTAPI]", method, columnName); + + if (expectedResult.containsKey(columnName)) { + Object expectedValue = expectedResult.get(columnName); + + assertEquals(regularApiLabel, expectedValue, getter.apply(resultSetRegular, i)); + assertEquals(htapiApiLabel, expectedValue, getter.apply(resultSetArrow, i)); + + } else { + String regularMsg = "Expected exception but got a value. " + regularApiLabel; + assertEquals(regularMsg, EXCEPTION_REPLACEMENT, getter.apply(resultSetRegular, i)); + + String htapiMsg = "Expected exception but got a value. " + htapiApiLabel; + assertEquals(htapiMsg, EXCEPTION_REPLACEMENT, getter.apply(resultSetArrow, i)); + } + } + } + } + + @Test + public void validateGetString() throws Exception { + final ImmutableMap stringResults = + new ImmutableMap.Builder() + .put("stringField", "StringValue") + .put("bytesField", "Qnl0ZXNWYWx1ZQ==") + .put("intField", "123") + .put("floatField", "10.5") + .put("numericField", "12345.67") + .put("bigNumericField", "98765432109876543210.123456789") + .put("booleanField", "true") + .put("timestampFiled", "2023-07-28 12:30:00.000000") + .put("dateField", "2023-07-28") + .put("timeField", "12:30:00.000") + .put("dateTimeField", "2023-07-28 12:30:00.000000") + .put("geographyField", "POINT(-74.006 40.7128)") + .put( + "recordField", + "{\"name\":\"NameValue\",\"recordNested\":{\"lastName\":\"LastNameValue\"}}") + .put("rangeField", "[2023-01-01, 2023-12-01)") + .put("jsonField", "{\"key\":\"value\"}") + .put("arrayString", "[abc, def, ghi]") + .put("arrayRecord", "[{\"value\":\"rec_val1\"}, {\"value\":\"rec_val2\"}]") + .put("arrayBytes", "[Ynl0ZTE=, Ynl0ZTI=]") + .put("arrayInteger", "[10, 20]") + .put("arrayNumeric", "[10.5, 20.5]") + .put("arrayBignumeric", "[100.1, 200.2]") + .put("arrayBoolean", "[true, false]") + .put("arrayTimestamp", "[2023-01-01 01:00:00.0, 2023-01-01 02:00:00.0]") + .put("arrayDate", "[2023-01-01, 2023-01-02]") + .put("arrayTime", "[01:00:00, 02:00:00]") + .put("arrayDatetime", "[2023-01-01 01:00:00.0, 2023-01-01 02:00:00.0]") + .put("arrayGeography", "[POINT(1 1), POINT(2 2)]") + .put("arrayRange", "[[2023-01-01, 2023-01-03), [2023-01-04, 2023-01-06)]") + .put("arrayJson", "[{\"a\":1}, {\"b\":2}]") + .put("arrayFloat", "[1.1, 2.2]") + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getString(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getString", getter, stringResults); + } + + @Test + public void validateGetInt() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", 123) + .put("floatField", 10) + .put("numericField", 12345) + .put("booleanField", 1) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getInt(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getInt", getter, result); + } + + @Test + public void validateGetLong() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", 123L) + .put("floatField", 10L) + .put("numericField", 12345L) + .put("booleanField", 1L) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getLong(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getLong", getter, result); + } + + @Test + public void validateGetBool() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", true) + .put("floatField", true) + .put("numericField", true) + .put("booleanField", true) + .put("bigNumericField", true) + .put("stringField", false) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getBoolean(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getBool", getter, result); + } + + @Test + public void validateGetFloat() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", (float) 123.0) + .put("floatField", (float) 10.5) + .put("numericField", (float) 12345.67) + .put("bigNumericField", (float) 98765432109876543210.123456789) + .put("booleanField", (float) 1.0) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getFloat(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getFloat", getter, result); + } + + @Test + public void validateGetDouble() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", (double) 123.0) + .put("floatField", (double) 10.5) + .put("numericField", (double) 12345.67) + .put("bigNumericField", (double) 98765432109876543210.123456789) + .put("booleanField", (double) 1.0) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getDouble(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getDouble", getter, result); + } + + @Test + public void validateGetShort() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", (short) 123) + .put("floatField", (short) 10) + .put("numericField", (short) 12345) + .put("booleanField", (short) 1) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getShort(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getShort", getter, result); + } + + @Test + public void validateGetTime() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("timeField", Time.valueOf("12:30:00")) + .put("dateTimeField", Time.valueOf("12:30:00")) + .put("timestampFiled", Time.valueOf("12:30:00")) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getTime(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getTime", getter, result); + } + + @Test + public void validateGetDate() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("dateField", Date.valueOf("2023-07-28")) + .put("dateTimeField", Date.valueOf("2023-07-28")) + .put("timestampFiled", Date.valueOf("2023-07-28")) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getDate(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getDate", getter, result); + } + + @Test + public void validateGetTimestamp() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("timeField", Timestamp.valueOf("1970-01-01 12:30:00")) + .put("dateField", Timestamp.valueOf("2023-07-28 00:00:00")) + .put("dateTimeField", Timestamp.valueOf("2023-07-28 12:30:00")) + .put("timestampFiled", Timestamp.valueOf("2023-07-28 12:30:00")) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getTimestamp(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getTimestamp", getter, result); + } + + @Test + public void validateGetByte() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", (byte) 123) + .put("booleanField", (byte) 1) + .put("floatField", (byte) 10) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getByte(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getByte", getter, result); + } + + @Test + public void validateGetObjectNullValues() throws Exception { + try (Connection connection = DriverManager.getConnection(connection_uri); + Connection connectionHTAPI = + DriverManager.getConnection( + connection_uri + + ";HighThroughputMinTableSize=0;HighThroughputActivationRatio=0;EnableHighThroughputAPI=1;"); + Statement statement = connection.createStatement(); + Statement statementHTAPI = connectionHTAPI.createStatement()) { + + String query = + "SELECT * FROM INTEGRATION_TEST_FORMAT.all_bq_types WHERE stringField is null;"; + ResultSet resultSetRegular = statement.executeQuery(query); + ResultSet resultSetArrow = statementHTAPI.executeQuery(query); + resultSetRegular.next(); + resultSetArrow.next(); + + for (int i = 1; i <= resultSetRegular.getMetaData().getColumnCount(); i++) { + String columnName = resultSetRegular.getMetaData().getColumnName(i); + if (!columnName.contains("array")) { + assertNull(resultSetRegular.getObject(i)); + assertNull(resultSetArrow.getObject(i)); + } else { + assertEquals(resultSetRegular.getObject(i).toString(), "[]"); + assertEquals(resultSetArrow.getObject(i).toString(), "[]"); + } + } + } + } + + private int resultSetRowCount(ResultSet resultSet) throws SQLException { + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + return rowCount; + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITNightlyBigQueryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITNightlyBigQueryTest.java new file mode 100644 index 0000000000..30124b4a04 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITNightlyBigQueryTest.java @@ -0,0 +1,1713 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static com.google.common.truth.Truth.assertThat; +import static java.util.Arrays.asList; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlSyntaxErrorException; +import com.google.cloud.bigquery.jdbc.BigQueryConnection; +import com.google.cloud.bigquery.jdbc.BigQueryDriver; +import java.nio.charset.StandardCharsets; +import java.sql.Connection; +import java.sql.Date; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Arrays; +import java.util.Properties; +import java.util.Random; +import java.util.concurrent.atomic.AtomicBoolean; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +public class ITNightlyBigQueryTest { + static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + static Connection bigQueryConnection; + static Statement bigQueryStatement; + static BigQuery bigQuery; + private static final Random random = new Random(); + private static final int randomNumber = random.nextInt(9999); + private static final String BASE_QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 order by" + + " trip_distance asc LIMIT %s"; + private static final String CONSTRAINTS_DATASET = "JDBC_CONSTRAINTS_TEST_DATASET"; + private static final String CONSTRAINTS_TABLE_NAME = "JDBC_CONSTRAINTS_TEST_TABLE"; + private static final String CONSTRAINTS_TABLE_NAME2 = "JDBC_CONSTRAINTS_TEST_TABLE2"; + private static final String CALLABLE_STMT_PROC_NAME = "IT_CALLABLE_STMT_PROC_TEST"; + private static final String CALLABLE_STMT_TABLE_NAME = "IT_CALLABLE_STMT_PROC_TABLE"; + private static final String CALLABLE_STMT_PARAM_KEY = "CALL_STMT_PARAM_KEY"; + private static final String CALLABLE_STMT_DML_INSERT_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_INSERT_TEST"; + private static final String CALLABLE_STMT_DML_UPDATE_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_UPDATE_TEST"; + private static final String CALLABLE_STMT_DML_DELETE_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_DELETE_TEST"; + private static final String CALLABLE_STMT_DML_TABLE_NAME = "IT_CALLABLE_STMT_PROC_DML_TABLE"; + private static final String DATASET = "JDBC_NIGHTLY_IT_DATASET"; + private static final String DATASET2 = "JDBC_PRESUBMIT_INTEGRATION_DATASET_2"; + static final String session_enabled_connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;EnableSession=1"; + + static final String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + + @BeforeClass + public static void beforeClass() throws SQLException { + bigQueryConnection = DriverManager.getConnection(connection_uri, new Properties()); + bigQueryStatement = bigQueryConnection.createStatement(); + bigQuery = BigQueryOptions.newBuilder().build().getService(); + } + + @AfterClass + public static void afterClass() throws SQLException { + bigQueryStatement.close(); + bigQueryConnection.close(); + } + + @Test + public void testMergeInExecuteBatch() throws SQLException { + Random random = new Random(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME1 = "Inventory" + random.nextInt(9999); + String TABLE_NAME2 = "DetailedInventory" + random.nextInt(9999); + + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER);", + DATASET, TABLE_NAME1); + + String createQuery2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER," + + " `supply_constrained` BOOLEAN, `comment` STRING);", + DATASET, TABLE_NAME2); + + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (product, quantity, supply_constrained, comment) " + + "VALUES ('countertop microwave', 20, NULL,'[]' )," + + " ('front load washer', 20, false,'[]' ), " + + " ('microwave', 20, false,'[]' ), " + + " ('refrigerator', 10, false,'[]' );", + DATASET, TABLE_NAME2); + + bigQueryStatement.execute(createQuery); + bigQueryStatement.execute(createQuery2); + bigQueryStatement.execute(insertQuery2); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (product, quantity) " + + "VALUES (?,? ), (?,? ), (?,? ), (?,? ), (?,? ), (?,? );", + DATASET, TABLE_NAME1); + PreparedStatement insertPs = bigQueryConnection.prepareStatement(insertQuery); + insertPs.setString(1, "dishwasher"); + insertPs.setInt(2, 30); + insertPs.setString(3, "dryer"); + insertPs.setInt(4, 30); + insertPs.setString(5, "front load washer"); + insertPs.setInt(6, 20); + insertPs.setString(7, "microwave"); + insertPs.setInt(8, 20); + insertPs.setString(9, "oven"); + insertPs.setInt(10, 5); + insertPs.setString(11, "top load washer"); + insertPs.setInt(12, 10); + + int insertStatus = insertPs.executeUpdate(); + assertEquals(6, insertStatus); + + String updateQuery = + String.format("UPDATE %s.%s SET quantity=? WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement updatePs = bigQueryConnection.prepareStatement(updateQuery); + updatePs.setString(2, "dryer"); + updatePs.setInt(1, 35); + + int updateStatus = updatePs.executeUpdate(); + assertEquals(1, updateStatus); + + String deleteQuery = String.format("DELETE FROM %s.%s WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement deletePs = bigQueryConnection.prepareStatement(deleteQuery); + deletePs.setString(1, "dishwasher"); + + int deleteStatus = deletePs.executeUpdate(); + assertEquals(1, deleteStatus); + + Statement statement = bigQueryConnection.createStatement(); + String mergeQuery = + String.format( + "MERGE %s.%s T\n" + + "USING %s.%s S\n" + + "ON T.product = S.product\n" + + "WHEN NOT MATCHED AND quantity < 100 THEN\n" + + " INSERT(product, quantity, supply_constrained, comment)\n" + + " VALUES(product, quantity, true, '[]')\n", + DATASET, TABLE_NAME2, DATASET, TABLE_NAME1); + statement.addBatch(mergeQuery); + int[] result = statement.executeBatch(); + + assertEquals(1, result.length); + assertEquals(3, result[0]); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME1)); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME2)); + } + + @Test + public void testValidLongRunningQuery() throws SQLException { + // setup + String selectQuery = + "SELECT * FROM `bigquery-public-data.deepmind_alphafold.metadata` LIMIT 50000"; + + // Read data via JDBC + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + + for (int i = 0; i < 50000; i++) { + resultSet.next(); + assertFalse(resultSet.wasNull()); + assertNotNull(resultSet.getString(5)); + } + + // clean up + resultSet.close(); + } + + @Test + public void testQueryInterruptGracefullyStopsExplicitJob() + throws SQLException, InterruptedException { + AtomicBoolean threadException = new AtomicBoolean(true); + Connection bigQueryConnection = + DriverManager.getConnection(connection_uri + ";JobCreationMode=1", new Properties()); + Statement bigQueryStatement = bigQueryConnection.createStatement(); + + // This query takes 300 seconds to complete + String query300Seconds = + "DECLARE DELAY_TIME DATETIME; SET DELAY_TIME = DATETIME_ADD(CURRENT_DATETIME, INTERVAL 300" + + " SECOND); WHILE CURRENT_DATETIME < DELAY_TIME DO END WHILE;"; + + // Query will be started in the background thread & we will call cancel from current thread. + Thread t = + new Thread( + () -> { + SQLException e = + assertThrows( + SQLException.class, () -> bigQueryStatement.execute(query300Seconds)); + assertTrue(e.getMessage().contains("User requested cancellation")); + threadException.set(false); + }); + t.start(); + // Allow thread to actually initiate the query + Thread.sleep(3000); + bigQueryStatement.cancel(); + // Wait until background thread is finished + t.join(); + assertFalse(threadException.get()); + // Ensure statement can be used again + assertFalse(bigQueryStatement.isClosed()); + bigQueryStatement.executeQuery("SELECT 1"); + } + + @Test + public void testQueryInterruptGracefullyStopsOptionalJob() + throws SQLException, InterruptedException { + AtomicBoolean threadException = new AtomicBoolean(true); + Connection bigQueryConnection = + DriverManager.getConnection(connection_uri + ";JobCreationMode=2", new Properties()); + Statement bigQueryStatement = bigQueryConnection.createStatement(); + + // This query takes 300 seconds to complete + String query300Seconds = + "DECLARE DELAY_TIME DATETIME; SET DELAY_TIME = DATETIME_ADD(CURRENT_DATETIME, INTERVAL 300" + + " SECOND); WHILE CURRENT_DATETIME < DELAY_TIME DO END WHILE;"; + + // Query will be started in the background thread & we will call cancel from current thread. + Thread t = + new Thread( + () -> { + SQLException e = + assertThrows( + SQLException.class, () -> bigQueryStatement.execute(query300Seconds)); + assertTrue(e.getMessage().contains("Query was cancelled.")); + threadException.set(false); + }); + t.start(); + // Allow thread to actually initiate the query + Thread.sleep(3000); + bigQueryStatement.cancel(); + // Wait until background thread is finished + t.join(); + assertFalse(threadException.get()); + // Ensure statement can be used again + assertFalse(bigQueryStatement.isClosed()); + bigQueryStatement.executeQuery("SELECT 1"); + } + + @Test + public void testWideColumnQueries() throws SQLException { + String selectQuery = + "SELECT * FROM `bigquery-public-data.covid19_open_data_eu.covid19_open_data` LIMIT 50000"; + + // Read data via JDBC + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + + for (int i = 0; i < 50000; i++) { + resultSet.next(); + assertFalse(resultSet.wasNull()); + } + + // clean up + resultSet.close(); + } + + @Test + public void testExecuteLargeUpdate() throws SQLException { + String tableName = "JDBC_LARGE_UPDATE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE TABLE %s.%s (" + + " gbifid STRING, scientificname STRING, " + + " individualcount INTEGER, isReviewed BOOLEAN)", + DATASET, tableName); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (gbifid, scientificname, individualcount) " + + "SELECT gbifid, scientificname, individualcount FROM " + + "bigquery-public-data.gbif.occurrences;", + DATASET, tableName); + String updateQuery = + String.format( + "UPDATE %s.%s SET isReviewed = false WHERE individualcount >= 0 OR individualcount IS" + + " NULL", + DATASET, tableName); + + String selectQuery = String.format("SELECT * FROM %s.%s LIMIT 10", DATASET, tableName); + + bigQueryStatement.execute(createQuery); + + long insertCount = bigQueryStatement.executeLargeUpdate(insertQuery); + assertTrue(insertCount > Integer.MAX_VALUE); + + long updateCount = bigQueryStatement.executeLargeUpdate(updateQuery); + assertTrue(updateCount > Integer.MAX_VALUE); + + ResultSet selectResult = bigQueryStatement.executeQuery(selectQuery); + assertTrue(selectResult.next()); + assertFalse(selectResult.getBoolean("isReviewed")); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, tableName)); + } + + @Test + public void testHTAPIWithValidDestinationTableSavesQueriesWithStandardSQL() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=SQL;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;" + + "EnableHighThroughputAPI=1;"; + String selectLegacyQuery = + "SELECT * FROM `bigquery-public-data.deepmind_alphafold.metadata` LIMIT 200000;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String selectQuery = "SELECT * FROM INTEGRATION_TESTS.destination_table_test;"; + ResultSet actualResultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(200000, resultSetRowCount(actualResultSet)); + + // clean up + String deleteRows = "DELETE FROM `INTEGRATION_TESTS.destination_table_test` WHERE 1=1;"; + bigQueryStatement.execute(deleteRows); + } + + @Test + public void testBigQueryConcurrentLimitWithExecuteBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_CONCURRENT_LIMIT_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + int[] results; + for (int i = 0; i < 30; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + } + + @Test + public void testValidExecuteBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + int[] results; + for (int i = 0; i < 30; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + // assertions + assertEquals(30, results.length); + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + } + + @Test + public void testValidExecuteBatchWithMultipleDatasets() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_MULTIPLE_DATASET_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + String createBatchTable2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET2, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + bigQueryStatement.execute(createBatchTable2); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + int[] results; + for (int i = 0; i < 15; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + for (int i = 0; i < 15; i++) { + String insertQuery = + "INSERT INTO " + + DATASET2 + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + // assertions + for (int updateCount : results) { + assertEquals(1, updateCount); + } + + // do a select to validate row count on each + String selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET, BATCH_TABLE); + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(15, resultSetRowCount(resultSet)); + selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET2, BATCH_TABLE); + resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(15, resultSetRowCount(resultSet)); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + } + + @Test + public void testValidExecuteBatchWithMultipleTables() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_MULTI_TABLES_" + random.nextInt(99); + String BATCH_TABLE_2 = "JDBC_EXECUTE_BATCH_TABLE_MULTI_TABLES_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + String createBatchTable2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE_2); + bigQueryStatement.execute(createBatchTable); + bigQueryStatement.execute(createBatchTable2); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + int[] results; + for (int i = 0; i < 5; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + for (int i = 0; i < 5; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE_2 + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + // assertions + for (int updateCount : results) { + assertEquals(1, updateCount); + } + + // do a select to test row count on each + String selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET, BATCH_TABLE); + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(5, resultSetRowCount(resultSet)); + selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET, BATCH_TABLE_2); + resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(5, resultSetRowCount(resultSet)); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE_2)); + } + + @Test + public void testPreparedStatementExecuteUpdate() throws SQLException { + Random random = new Random(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME1 = "Inventory" + random.nextInt(9999); + String TABLE_NAME2 = "DetailedInventory" + random.nextInt(9999); + + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER);", + DATASET, TABLE_NAME1); + + String createQuery2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER," + + " `supply_constrained` BOOLEAN, `comment` STRING);", + DATASET, TABLE_NAME2); + + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (product, quantity, supply_constrained, comment) " + + "VALUES ('countertop microwave', 20, NULL,'[]' )," + + " ('front load washer', 20, false,'[]' ), " + + " ('microwave', 20, false,'[]' ), " + + " ('refrigerator', 10, false,'[]' );", + DATASET, TABLE_NAME2); + + bigQueryStatement.execute(createQuery); + bigQueryStatement.execute(createQuery2); + bigQueryStatement.execute(insertQuery2); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (product, quantity) " + + "VALUES (?,? ), (?,? ), (?,? ), (?,? ), (?,? ), (?,? );", + DATASET, TABLE_NAME1); + PreparedStatement insertPs = bigQueryConnection.prepareStatement(insertQuery); + insertPs.setString(1, "dishwasher"); + insertPs.setInt(2, 30); + insertPs.setString(3, "dryer"); + insertPs.setInt(4, 30); + insertPs.setString(5, "front load washer"); + insertPs.setInt(6, 20); + insertPs.setString(7, "microwave"); + insertPs.setInt(8, 20); + insertPs.setString(9, "oven"); + insertPs.setInt(10, 5); + insertPs.setString(11, "top load washer"); + insertPs.setInt(12, 10); + + int insertStatus = insertPs.executeUpdate(); + assertEquals(6, insertStatus); + + String updateQuery = + String.format("UPDATE %s.%s SET quantity=? WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement updatePs = bigQueryConnection.prepareStatement(updateQuery); + updatePs.setString(2, "dryer"); + updatePs.setInt(1, 35); + + int updateStatus = updatePs.executeUpdate(); + assertEquals(1, updateStatus); + + String deleteQuery = String.format("DELETE FROM %s.%s WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement deletePs = bigQueryConnection.prepareStatement(deleteQuery); + deletePs.setString(1, "dishwasher"); + + int deleteStatus = deletePs.executeUpdate(); + assertEquals(1, deleteStatus); + + String mergeQuery = + String.format( + "MERGE %s.%s T\n" + + "USING %s.%s S\n" + + "ON T.product = S.product\n" + + "WHEN NOT MATCHED AND quantity < ? THEN\n" + + " INSERT(product, quantity, supply_constrained, comment)\n" + + " VALUES(product, quantity, true, ?)\n" + + "WHEN NOT MATCHED THEN\n" + + " INSERT(product, quantity, supply_constrained)\n" + + " VALUES(product, quantity, false)", + DATASET, TABLE_NAME2, DATASET, TABLE_NAME1); + PreparedStatement mergePs = bigQueryConnection.prepareStatement(mergeQuery); + mergePs.setInt(1, 20); + mergePs.setString(2, "comment" + random.nextInt(999)); + + int mergeStatus = mergePs.executeUpdate(); + assertEquals(3, mergeStatus); + + ResultSet rs = + bigQueryStatement.executeQuery( + String.format("SELECT COUNT(*) AS row_count\n" + "FROM %s.%s", DATASET, TABLE_NAME2)); + rs.next(); + assertEquals(7, rs.getInt(1)); + + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME1); + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME2)); + } + + @Test + public void testFailedStatementInTheMiddleOfExecuteBatchStopsExecuting() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_ERROR_IN_MIDDLE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + String createBatchTable2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET2, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + bigQueryStatement.execute(createBatchTable2); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET2, BATCH_TABLE); + + // act + for (int i = 0; i < 20; i++) { + if (i == 10) { + statement.addBatch( + "INSERT INTO " + + DATASET2 + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "); + } else { + statement.addBatch( + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "); + } + } + bigQueryStatement.execute(dropQuery); + + // assertions + assertThrows(BigQueryJdbcException.class, statement::executeBatch); + String selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET, BATCH_TABLE); + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(10, resultSetRowCount(resultSet)); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, BATCH_TABLE)); + } + + @Test + public void testHTAPIWithValidDestinationTableSavesQueriesWithLegacy() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;" + + "EnableHighThroughputAPI=1;"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 200000;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String selectQuery = "SELECT * FROM INTEGRATION_TESTS.destination_table_test;"; + ResultSet actualResultSet = bigQueryStatement.executeQuery(selectQuery); + assertTrue(0 < resultSetRowCount(actualResultSet)); + + // clean up + String deleteRows = "DELETE FROM `INTEGRATION_TESTS.destination_table_test` WHERE 1=1;"; + bigQueryStatement.execute(deleteRows); + } + + @Test + public void testMultiStatementTransactionRollbackByUser() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + int rows = statement.executeUpdate(updateQuery); + assertEquals(1, rows); + status = statement.execute(selectQuery); + assertTrue(status); + connection.rollback(); + assertTrue( + "After rollback() in manual commit mode, a new transaction should be started.", + connection.isTransactionStarted()); + + // Separate query to check if transaction rollback worked + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testMultiStatementTransactionDoesNotCommitWithoutCommit() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + int rows = statement.executeUpdate(updateQuery); + assertEquals(1, rows); + status = statement.execute(selectQuery); + assertTrue(status); + + // Separate query to check nothing committed + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + statement.close(); + connection.close(); + } + + @Test + public void testValidMultiStatementTransactionCommits() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + status = statement.execute(updateQuery); + assertFalse(status); + status = statement.execute(selectQuery); + assertTrue(status); + connection.commit(); + + // Separate query to check inserted and updated data committed + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertTrue(resultSet.next()); + assertEquals(14, resultSet.getInt(3)); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + statement.close(); + connection.close(); + } + + @Test + public void testConnectionWithMultipleTransactionCommits() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + boolean status = statement.execute(insertQuery); + assertFalse(status); + status = statement.execute(updateQuery); + assertFalse(status); + status = statement.execute(selectQuery); + assertTrue(status); + connection.commit(); + + connection.setAutoCommit(false); + assertTrue(connection.isTransactionStarted()); + statement.execute(insertQuery); + connection.rollback(); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + int count = 0; + while (resultSet.next()) { + count++; + } + assertEquals(1, count); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testTransactionRollbackOnError() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s ;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + String transactionOnError = + "BEGIN\n" + + "\n" + + " BEGIN TRANSACTION;\n" + + " INSERT INTO " + + DATASET + + "." + + TRANSACTION_TABLE + + "\n" + + " VALUES (39, 'Drake', 123);\n" + + " SELECT 1/0;\n" + + " COMMIT TRANSACTION;\n" + + "\n" + + "EXCEPTION WHEN ERROR THEN\n" + + " SELECT @@error.message;\n" + + " ROLLBACK TRANSACTION;\n" + + "END;"; + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + Statement statement = connection.createStatement(); + statement.execute(transactionOnError); + + // do a check to see if no vals inserted + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testClearBatchClears() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_CLEAR_BATCH_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 13 WHERE age = %s;", DATASET, BATCH_TABLE, randomNumber); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + statement.addBatch(updateQuery); + statement.clearBatch(); + int[] results = statement.executeBatch(); + + // assertion + assertEquals(0, results.length); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testMultipleExecuteBatches() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_MULTI_BATCHES_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + "); "; + String insertQuery2 = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', 123), " + + " (12, 'Farhan', 123); "; + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + statement.addBatch(insertQuery); + int[] firstResults = statement.executeBatch(); + statement.addBatch(insertQuery2); + int[] secondResults = statement.executeBatch(); + + // assertions + assertEquals(1, firstResults.length); + assertEquals(1, secondResults.length); + assertEquals(1, firstResults[0]); + assertEquals(2, secondResults[0]); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testValidAllDataTypesSerializationFromSelectQuery() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_DATATYPES_INTEGRATION_TEST_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(16, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.getBoolean(1)); + assertEquals(33, resultSet.getInt(2)); + assertEquals(50.05f, resultSet.getFloat(3), 0.0); + assertEquals(123.456, resultSet.getDouble(4), 0.0); + assertEquals(123.456789, resultSet.getDouble(5), 0.0); + assertEquals("testString", resultSet.getString(6)); + assertEquals("Test String", new String(resultSet.getBytes(7), StandardCharsets.UTF_8)); + Struct expectedStruct = (Struct) resultSet.getObject(8); + assertThat(expectedStruct.getAttributes()).isEqualTo(asList("Eric", 10L).toArray()); + assertArrayEquals( + new String[] {"one", "two", "three"}, (String[]) resultSet.getArray(9).getArray()); + + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356456"), resultSet.getObject(10)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getObject(11)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getObject(12)); + assertEquals(Timestamp.valueOf("2019-02-17 11:24:00"), resultSet.getObject(13)); + assertEquals("POINT(1 2)", resultSet.getString(14)); + assertEquals("{\"class\":{\"students\":[{\"name\":\"Jane\"}]}}", resultSet.getString(15)); + assertEquals("123-7 -19 0:24:12.000006", resultSet.getString(16)); + } + + @Test + public void testRepeatedStructFromSelectQuery() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_REPEATED_STRUCT_INTEGRATION_TEST"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + resultSet.next(); + + Struct[] repeatedStruct = (Struct[]) resultSet.getArray(1).getArray(); + assertEquals(3, Arrays.stream(repeatedStruct).count()); + + Object[] alice = repeatedStruct[0].getAttributes(); + Object[] bob = repeatedStruct[1].getAttributes(); + Object[] charlie = repeatedStruct[2].getAttributes(); + assertEquals("Alice", alice[0]); + assertEquals("30", alice[1]); + assertEquals("Bob", bob[0]); + assertEquals("25", bob[1]); + assertEquals("Charlie", charlie[0]); + assertEquals("35", charlie[1]); + } + + @Test + public void testValidAllDataTypesSerializationFromSelectQueryArrowDataset() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_INTEGRATION_ARROW_TEST_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME + " LIMIT 5000;"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";EnableHighThroughputAPI=1;" + + "HighThroughputActivationRatio=2;" + + "HighThroughputMinTableSize=1000;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet); + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(15, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.getBoolean(1)); + assertEquals(33, resultSet.getInt(2)); + assertEquals(50.05f, resultSet.getFloat(3), 0.0); + assertEquals(123.456, resultSet.getDouble(4), 0.0); + assertEquals(123.456789, resultSet.getDouble(5), 0.0); + assertEquals("testString", resultSet.getString(6)); + assertEquals("Test String", new String(resultSet.getBytes(7), StandardCharsets.UTF_8)); + Struct expectedStruct = (Struct) resultSet.getObject(8); + assertThat(expectedStruct.getAttributes()).isEqualTo(asList("Eric", 10L).toArray()); + assertEquals("{\"name\":\"Eric\",\"age\":10}", expectedStruct.toString()); + assertArrayEquals( + new String[] {"one", "two", "three"}, (String[]) resultSet.getArray(9).getArray()); + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356"), resultSet.getObject(10)); + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356"), resultSet.getTimestamp(10)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getObject(11)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getDate(11)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getObject(12)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getTime(12)); + assertEquals(Timestamp.valueOf("2022-01-22 22:22:12.142265"), resultSet.getObject(13)); + assertEquals("POINT(1 2)", resultSet.getString(14)); + assertEquals("{\"class\":{\"students\":[{\"name\":\"Jane\"}]}}", resultSet.getString(15)); + connection.close(); + } + + /////////////// MARKER + /// + + @Test + public void testBulkInsertOperation() throws SQLException { + String TABLE_NAME = "JDBC_BULK_INSERT_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING,\n" + + " `IntegerField` INTEGER," + + " `FloatField` FLOAT64," + + " `NumericField` NUMERIC," + + " `BigNumericField` BIGNUMERIC," + + " `BooleanField` BOOLEAN" + + " );", + DATASET, TABLE_NAME); + String insertQuery = + String.format("INSERT INTO %s.%s VALUES(?, ?, ?,?, ?, ?);", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;" + + "EnableWriteAPI=1;SWA_ActivationRowCount=5;SWA_AppendRowCount=500"; + + try (Connection connection = DriverManager.getConnection(connection_uri)) { + bigQueryStatement.execute(createQuery); + PreparedStatement statement = connection.prepareStatement(insertQuery); + for (int i = 0; i < 20; ++i) { + statement.setString(1, i + "StringField"); + statement.setInt(2, i); + statement.setFloat(3, (float) (i + .6)); + statement.setInt(4, random.nextInt()); + statement.setInt(5, random.nextInt()); + statement.setBoolean(6, true); + + statement.addBatch(); + } + int[] result = statement.executeBatch(); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(result.length, resultSetRowCount(resultSet)); + + bigQueryStatement.execute(dropQuery); + + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testBulkInsertOperationStandard() throws SQLException { + String TABLE_NAME = "JDBC_BULK_INSERT_STANDARD_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING,\n" + + " `IntegerField` INTEGER," + + " `FloatField` FLOAT64," + + " `NumericField` NUMERIC," + + " `BigNumericField` BIGNUMERIC," + + " `BooleanField` BOOLEAN" + + " );", + DATASET, TABLE_NAME); + String insertQuery = + String.format("INSERT INTO %s.%s VALUES(?, ?, ?,?, ?, ?);", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;" + + "EnableWriteAPI=0;SWA_ActivationRowCount=50;SWA_AppendRowCount=500"; + + try (Connection connection = DriverManager.getConnection(connection_uri)) { + bigQueryStatement.execute(createQuery); + PreparedStatement statement = connection.prepareStatement(insertQuery); + for (int i = 0; i < 20; ++i) { + statement.setString(1, i + "StringField"); + statement.setInt(2, i); + statement.setFloat(3, (float) (i + .6)); + statement.setInt(4, random.nextInt()); + statement.setInt(5, random.nextInt()); + statement.setBoolean(6, true); + + statement.addBatch(); + } + int[] result = statement.executeBatch(); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(result.length, resultSetRowCount(resultSet)); + + bigQueryStatement.execute(dropQuery); + + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testExecuteQueryWithSetMaxRows() throws SQLException { + String TEST_MAX_ROWS_TABLE = "JDBC_TEST_MAX_ROWS_TABLE" + random.nextInt(99); + int id1 = random.nextInt(99); + int id2 = random.nextInt(99); + String createMaxRowsTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING);", + DATASET, TEST_MAX_ROWS_TABLE); + String insertQuery1 = + String.format( + "INSERT INTO %s.%s (id, name) VALUES (%s, 'max-rows-test-1');", + DATASET, TEST_MAX_ROWS_TABLE, id1); + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (id, name) VALUES (%s, 'max-rows-test-2');", + DATASET, TEST_MAX_ROWS_TABLE, id2); + String selectQuery = String.format("SELECT id, name FROM %s.%s;", DATASET, TEST_MAX_ROWS_TABLE); + + boolean executeResult = bigQueryStatement.execute(createMaxRowsTable); + assertFalse(executeResult); + int rowsInserted = bigQueryStatement.executeUpdate(insertQuery1); + assertEquals(1, rowsInserted); + rowsInserted = bigQueryStatement.executeUpdate(insertQuery2); + assertEquals(1, rowsInserted); + + bigQueryStatement.setMaxRows(1); + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + assertEquals(1, resultSetRowCount(resultSet)); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TEST_MAX_ROWS_TABLE)); + } + + @Test + public void testExecuteQueryWithoutSetMaxRows() throws SQLException { + String TEST_MAX_ROWS_TABLE = "JDBC_TEST_MAX_ROWS_TABLE" + random.nextInt(99); + int id1 = random.nextInt(99); + int id2 = random.nextInt(99); + String createMaxRowsTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING);", + DATASET, TEST_MAX_ROWS_TABLE); + String insertQuery1 = + String.format( + "INSERT INTO %s.%s (id, name) VALUES (%s, 'max-rows-test-1');", + DATASET, TEST_MAX_ROWS_TABLE, id1); + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (id, name) VALUES (%s, 'max-rows-test-2');", + DATASET, TEST_MAX_ROWS_TABLE, id2); + String selectQuery = String.format("SELECT id, name FROM %s.%s;", DATASET, TEST_MAX_ROWS_TABLE); + + boolean executeResult = bigQueryStatement.execute(createMaxRowsTable); + assertFalse(executeResult); + int rowsInserted = bigQueryStatement.executeUpdate(insertQuery1); + assertEquals(1, rowsInserted); + rowsInserted = bigQueryStatement.executeUpdate(insertQuery2); + assertEquals(1, rowsInserted); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + assertEquals(2, resultSetRowCount(resultSet)); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TEST_MAX_ROWS_TABLE)); + } + + @Test + public void testQueryPropertySessionIdIsUsedWithTransaction() + throws SQLException, InterruptedException { + // setup + String sessionId = getSessionId(); + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String beginTransaction = "BEGIN TRANSACTION; "; + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + String commitTransaction = "COMMIT TRANSACTION;"; + + String transactionQuery = + beginTransaction + + insertQuery + + insertQuery + + updateQuery + + selectQuery + + commitTransaction; + + bigQueryStatement.execute(createTransactionTable); + + // Run the transaction + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=session_id=" + + sessionId + + ";"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + statement.execute(transactionQuery); + + // Test each query's result with getMoreResults + int resultsCount = 0; + boolean hasMoreResult = statement.getMoreResults(); + while (hasMoreResult || statement.getUpdateCount() != -1) { + if (statement.getUpdateCount() == -1) { + ResultSet result = statement.getResultSet(); + assertTrue(result.next()); + assertEquals(-1, statement.getUpdateCount()); + } else { + assertTrue(statement.getUpdateCount() > -1); + } + hasMoreResult = statement.getMoreResults(); + resultsCount++; + } + assertEquals(5, resultsCount); + + // Check the transaction was actually committed. + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + assertEquals(14, resultSet.getInt(3)); + } + assertEquals(2, rowCount); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testRollbackOnConnectionClosed() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (15, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 12 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + int rows = statement.executeUpdate(updateQuery); + assertEquals(1, rows); + status = statement.execute(selectQuery); + assertTrue(status); + connection.close(); + + // Separate query to check if transaction rollback worked + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + } + + @Test + public void testSingleStatementTransaction() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String beginTransaction = "BEGIN TRANSACTION; "; + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + String commitTransaction = "COMMIT TRANSACTION;"; + + String transactionQuery = + beginTransaction + + insertQuery + + insertQuery + + updateQuery + + selectQuery + + commitTransaction; + + bigQueryStatement.execute(createTransactionTable); + + // Run the transaction + Connection connection = DriverManager.getConnection(session_enabled_connection_uri); + Statement statement = connection.createStatement(); + statement.execute(transactionQuery); + + // Test each query's result with getMoreResults + int resultsCount = 0; + boolean hasMoreResult = statement.getMoreResults(); + while (hasMoreResult || statement.getUpdateCount() != -1) { + if (statement.getUpdateCount() == -1) { + ResultSet result = statement.getResultSet(); + assertTrue(result.next()); + assertEquals(-1, statement.getUpdateCount()); + } else { + assertTrue(statement.getUpdateCount() > -1); + } + hasMoreResult = statement.getMoreResults(); + resultsCount++; + } + assertEquals(5, resultsCount); + + // Check the transaction was actually committed. + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + assertEquals(14, resultSet.getInt(3)); + } + assertEquals(2, rowCount); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testConnectionClosedRollsBackStartedTransactions() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + int rows = statement.executeUpdate(updateQuery); + assertEquals(1, rows); + status = statement.execute(selectQuery); + assertTrue(status); + connection.close(); + + // Separate query to check if transaction rollback worked + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + } + + @Test + public void testStatelessQueryPathSmall() throws SQLException { + Properties jobCreationMode = new Properties(); + jobCreationMode.setProperty("JobCreationMode", "2"); + Connection bigQueryConnectionUseStateless = + DriverManager.getConnection(connection_uri, jobCreationMode); + + Statement statement = bigQueryConnectionUseStateless.createStatement(); + + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 850"; + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(850, resultSetRowCount(jsonResultSet)); + + String queryEmpty = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 0"; + ResultSet jsonResultSetEmpty = statement.executeQuery(queryEmpty); + assertTrue(jsonResultSetEmpty.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(0, resultSetRowCount(jsonResultSetEmpty)); + bigQueryConnectionUseStateless.close(); + } + + @Test + public void testFastQueryPathMedium() throws SQLException { + String query = + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 9000"; + ResultSet jsonResultSet = bigQueryStatement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(9000, resultSetRowCount(jsonResultSet)); + } + + @Test + public void testFastQueryPathLarge() throws SQLException { + String query = + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 18000"; + ResultSet jsonResultSet = bigQueryStatement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(18000, resultSetRowCount(jsonResultSet)); + } + + @Test + // reads using ReadAPI and makes sure that they are in order, which implies threads worked + // correctly + public void testIterateOrderArrowMultiThread() throws SQLException { + int expectedCnt = 200000; + String longQuery = String.format(BASE_QUERY, expectedCnt); + ResultSet rs = bigQueryStatement.executeQuery(longQuery); + int cnt = 0; + double oldTriDis = 0.0d; + while (rs.next()) { + double tripDis = rs.getDouble("trip_distance"); + ++cnt; + assertTrue(oldTriDis <= tripDis); + oldTriDis = tripDis; + } + assertEquals(expectedCnt, cnt); // all the records were retrieved + } + + @Test + public void testNonEnabledUseLegacySQLThrowsSyntaxError() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 20000000;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + assertThrows( + BigQueryJdbcSqlSyntaxErrorException.class, () -> statement.execute(selectLegacyQuery)); + connection.close(); + } + + @Test + public void testFastQueryPathEmpty() throws SQLException { + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 0"; + ResultSet jsonResultSet = bigQueryStatement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(0, resultSetRowCount(jsonResultSet)); + } + + @Test + public void testReadAPIPathLarge() throws SQLException { + Properties withReadApi = new Properties(); + withReadApi.setProperty("EnableHighThroughputAPI", "1"); + withReadApi.setProperty("HighThroughputActivationRatio", "2"); + withReadApi.setProperty("HighThroughputMinTableSize", "1000"); + withReadApi.setProperty("MaxResults", "300"); + + Connection connection = DriverManager.getConnection(connection_uri, withReadApi); + Statement statement = connection.createStatement(); + int expectedCnt = 5000; + String longQuery = String.format(BASE_QUERY, expectedCnt); + ResultSet arrowResultSet = statement.executeQuery(longQuery); + assertTrue(arrowResultSet.getClass().getName().contains("BigQueryArrowResultSet")); + assertEquals(expectedCnt, resultSetRowCount(arrowResultSet)); + arrowResultSet.close(); + connection.close(); + } + + @Test + public void testReadAPIPathLargeWithThresholdParameters() throws SQLException { + String connectionUri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;MaxResults=300;HighThroughputActivationRatio=2;" + + "HighThroughputMinTableSize=100;EnableHighThroughputAPI=1"; + Connection connection = DriverManager.getConnection(connectionUri); + Statement statement = connection.createStatement(); + int expectedCnt = 1000; + String longQuery = String.format(BASE_QUERY, expectedCnt); + ResultSet arrowResultSet = statement.executeQuery(longQuery); + assertTrue(arrowResultSet.getClass().getName().contains("BigQueryArrowResultSet")); + assertEquals(expectedCnt, resultSetRowCount(arrowResultSet)); + arrowResultSet.close(); + connection.close(); + } + + @Test + public void testReadAPIPathLargeWithThresholdNotMet() throws SQLException { + String connectionUri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;HighThroughputActivationRatio=4;" + + "HighThroughputMinTableSize=100;EnableHighThroughputAPI=1"; + Connection connection = DriverManager.getConnection(connectionUri); + Statement statement = connection.createStatement(); + int expectedCnt = 5000; + String longQuery = String.format(BASE_QUERY, expectedCnt); + ResultSet arrowResultSet = statement.executeQuery(longQuery); + assertTrue(arrowResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(expectedCnt, resultSetRowCount(arrowResultSet)); + arrowResultSet.close(); + connection.close(); + } + + private String getSessionId() throws InterruptedException { + QueryJobConfiguration stubJobConfig = + QueryJobConfiguration.newBuilder("Select 1;").setCreateSession(true).build(); + Job job = bigQuery.create(JobInfo.of(stubJobConfig)); + job = job.waitFor(); + Job stubJob = bigQuery.getJob(job.getJobId()); + return stubJob.getStatistics().getSessionInfo().getSessionId(); + } + + private int resultSetRowCount(ResultSet resultSet) throws SQLException { + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + return rowCount; + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITPSCBigQueryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITPSCBigQueryTest.java new file mode 100644 index 0000000000..1b73f84b66 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITPSCBigQueryTest.java @@ -0,0 +1,300 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static com.google.common.truth.Truth.assertThat; +import static java.util.Arrays.asList; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.jdbc.BigQueryConnection; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.sql.Connection; +import java.sql.Date; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Properties; +import org.junit.Test; + +public class ITPSCBigQueryTest { + static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + private static final String SERVICE_ACCOUNT_EMAIL = requireEnvVar("SA_EMAIL"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @Test + public void testNoOverrideTimesOut() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "APPLICATION_DEFAULT_CREDENTIALS", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 850"; + Statement statement = connection.createStatement(); + assertThrows(BigQueryException.class, () -> statement.executeQuery(query)); + } + + @Test + public void testValidADCAuthenticationInPSC() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "APPLICATION_DEFAULT_CREDENTIALS", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testValidOAuthType2AuthenticationInPSC() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=2;" + + "OAuthAccessToken=RedactedToken;" // TODO(fahmz): see if there is a way to use SMS + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "PRE_GENERATED_TOKEN", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testValidServiceAccountAuthenticationKeyFileInPSC() throws SQLException, IOException { + final String SERVICE_ACCOUNT_KEY = requireEnvVar("SA_SECRET"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;" + + "OAuthPvtKeyPath=" + + SERVICE_ACCOUNT_KEY + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + connection.close(); + } + + @Test + public void testValidServiceAccountAuthenticationViaEmailInPSC() throws SQLException { + final String SERVICE_ACCOUNT_KEY = requireEnvVar("SA_SECRET"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;" + + "OAuthServiceAcctEmail=" + + SERVICE_ACCOUNT_EMAIL + + ";OAuthPvtKey=" + + SERVICE_ACCOUNT_KEY + + ";EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testValidAllDataTypesSerializationFromSelectQueryInPSC() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;" + + "OAuthPvtKeyPath=.\\google-cloud-bigquery-jdbc\\secret.json;" + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + Connection bigQueryConnection = DriverManager.getConnection(connection_uri, new Properties()); + Statement bigQueryStatement = bigQueryConnection.createStatement(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_DATATYPES_INTEGRATION_TEST_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(16, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.getBoolean(1)); + assertEquals(33, resultSet.getInt(2)); + assertEquals(50.05f, resultSet.getFloat(3), 0.0); + assertEquals(123.456, resultSet.getDouble(4), 0.0); + assertEquals(123.456789, resultSet.getDouble(5), 0.0); + assertEquals("testString", resultSet.getString(6)); + assertEquals("Test String", new String(resultSet.getBytes(7), StandardCharsets.UTF_8)); + Struct expectedStruct = (Struct) resultSet.getObject(8); + assertThat(expectedStruct.getAttributes()).isEqualTo(asList("Eric", 10).toArray()); + assertArrayEquals( + new String[] {"one", "two", "three"}, (String[]) resultSet.getArray(9).getArray()); + + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356456"), resultSet.getObject(10)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getObject(11)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getObject(12)); + assertEquals(Timestamp.valueOf("2019-02-17 11:24:00"), resultSet.getObject(13)); + assertEquals("POINT(1 2)", resultSet.getString(14)); + assertEquals("{\"class\":{\"students\":[{\"name\":\"Jane\"}]}}", resultSet.getString(15)); + assertEquals("123-7 -19 0:24:12.000006", resultSet.getString(16)); + } + + @Test + public void testValidAllDataTypesSerializationFromSelectQueryArrowDatasetInPSC() + throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_INTEGRATION_ARROW_TEST_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;" + + "OAuthPvtKeyPath=.\\google-cloud-bigquery-jdbc\\secret.json;;" + + "EnableHighThroughputAPI=1;" + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "READ_API=bigquerystorage-privateendpoint.p.googleapis.com:443," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet); + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(15, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.getBoolean(1)); + assertEquals(33, resultSet.getInt(2)); + assertEquals(50.05f, resultSet.getFloat(3), 0.0); + assertEquals(123.456, resultSet.getDouble(4), 0.0); + assertEquals(123.456789, resultSet.getDouble(5), 0.0); + assertEquals("testString", resultSet.getString(6)); + assertEquals("Test String", new String(resultSet.getBytes(7), StandardCharsets.UTF_8)); + Struct expectedStruct = (Struct) resultSet.getObject(8); + assertThat(expectedStruct.getAttributes()).isEqualTo(asList("Eric", 10).toArray()); + assertEquals("{\"v\":{\"f\":[{\"v\":\"Eric\"},{\"v\":\"10\"}]}}", expectedStruct.toString()); + assertArrayEquals( + new String[] {"one", "two", "three"}, (String[]) resultSet.getArray(9).getArray()); + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356"), resultSet.getObject(10)); + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356"), resultSet.getTimestamp(10)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getObject(11)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getDate(11)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getObject(12)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getTime(12)); + assertEquals(Timestamp.valueOf("2022-01-22 22:22:12.142265"), resultSet.getObject(13)); + assertEquals("POINT(1 2)", resultSet.getString(14)); + assertEquals("{\"class\":{\"students\":[{\"name\":\"Jane\"}]}}", resultSet.getString(15)); + connection.close(); + } + + @Test + public void testValidExternalAccountAuthenticationInPSC() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=4;" + + "BYOID_AudienceUri=//iam.googleapis.com/projects//locations//workloadIdentityPools//providers/;" + + "BYOID_SubjectTokenType=;" + + "BYOID_CredentialSource={\"file\":\"/path/to/file\"};" + + "BYOID_SA_Impersonation_Uri=;" + + "BYOID_TokenUri=;" + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "EXTERNAL_ACCOUNT_AUTH", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertNotNull(resultSet); + connection.close(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITProxyBigQueryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITProxyBigQueryTest.java new file mode 100644 index 0000000000..fbde5ecc67 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITProxyBigQueryTest.java @@ -0,0 +1,234 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Arrays; +import java.util.List; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Enclosed.class) +public class ITProxyBigQueryTest { + static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + static final String PROXY_HOST = "34.94.167.18"; + static final String PROXY_PORT = "3128"; + static final String PROXY_UID = "fahmz"; + static final String PROXY_PWD = "fahmz"; + + public static class NonParameterizedProxyTests { + @Test + public void testValidAuthenticatedProxy() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=" + + PROXY_HOST + + ";ProxyPort=" + + PROXY_PORT + + ";ProxyUid=" + + PROXY_UID + + ";ProxyPwd=" + + PROXY_PWD + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + boolean result = + statement.execute( + "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"); + assertTrue(result); + connection.close(); + } + + @Test + public void testAuthenticatedProxyWithOutAuthDetailsThrows() throws SQLException { + String query = "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=" + + PROXY_HOST + + ";ProxyPort=" + + PROXY_PORT + + ";"; + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.execute(query)); + } + + @Test + public void testNonExistingProxyTimesOut() throws SQLException { + String query = "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=111.12.111.11;" // If the test fails you may have this ip address + // assigned + + "ProxyPort=1111;"; + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.execute(query)); + } + + @Test + @Ignore // Run this when Proxy server has no authentication otherwise you'll get a "407 Proxy + // Authentication Required". + public void testNonAuthenticatedProxy() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=" + + PROXY_HOST + + ";ProxyPort=" + + PROXY_PORT + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + boolean result = + statement.execute( + "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"); + assertTrue(result); + connection.close(); + } + + @Test + public void testValidNonProxyConnectionQueries() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + boolean result = + statement.execute( + "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"); + assertTrue(result); + connection.close(); + } + + @Test + public void testReadAPIEnabledWithProxySettings() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=" + + PROXY_HOST + + ";ProxyPort=" + + PROXY_PORT + + ";EnableHighThroughputAPI=1;" + + "ProxyUid=" + + PROXY_UID + + ";ProxyPwd=" + + PROXY_PWD + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + boolean result = + statement.execute( + "SELECT * FROM `bigquery-public-data.covid19_open_data_eu.covid19_open_data` LIMIT 200000"); + assertTrue(result); + connection.close(); + } + } + + @RunWith(Parameterized.class) + public static class ParametrizedMissingPropertiesTest { + private final String ProxyHost; + private final String ProxyPort; + private final String ProxyUid; + private final String ProxyPwd; + + public ParametrizedMissingPropertiesTest( + String ProxyHost, String ProxyPort, String ProxyUid, String ProxyPwd) { + this.ProxyHost = ProxyHost; + this.ProxyPort = ProxyPort; + this.ProxyUid = ProxyUid; + this.ProxyPwd = ProxyPwd; + } + + @Parameterized.Parameters + public static List ProxyParameters() { + String proxyHost = "ProxyHost=" + PROXY_HOST + ";"; + String proxyPort = "ProxyPort=" + PROXY_PORT + ";"; + String proxyUid = "ProxyUid=" + PROXY_UID + ";"; + String proxyPwd = "ProxyPwd=" + PROXY_PWD + ";"; + return Arrays.asList( + new String[][] { + {"", proxyPort, proxyUid, proxyPwd}, + {proxyHost, "", proxyUid, proxyPwd}, + {proxyHost, proxyPort, "", proxyPwd}, + {proxyHost, proxyPort, proxyUid, ""}, + {"", "", proxyUid, proxyPwd} + }); + } + + @Test + public void testMissingProxyParameterThrowsIllegalArgument() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + ProxyHost + + ProxyPort + + ProxyUid + + ProxyPwd; + assertThrows( + IllegalArgumentException.class, () -> DriverManager.getConnection(connection_uri)); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITTPCBigQueryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITTPCBigQueryTest.java new file mode 100644 index 0000000000..3fa2d7d7e4 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITTPCBigQueryTest.java @@ -0,0 +1,220 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.bigquery.jdbc.BigQueryConnection; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import org.junit.Test; + +public class ITTPCBigQueryTest { + private static final String ENDPOINT_URL = System.getenv("ENDPOINT_URL"); + private static final String UNIVERSE_DOMAIN = System.getenv("UNIVERSE_DOMAIN"); + private static final String TPC_SERVICE_ACCOUNT = System.getenv("SERVICE_ACCOUNT"); + private static final String TPC_PVT_KEY = System.getenv("PRIVATE_KEY"); + private static final String TPC_ACCESS_TOKEN = System.getenv("ACCESS_TOKEN"); + private static final String TPC_PROJECT_ID = System.getenv("PROJECT_ID"); + + private static final String TPC_ENDPOINT = + (ENDPOINT_URL.isEmpty()) + ? "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443" + : "jdbc:bigquery://" + ENDPOINT_URL; + private static final String TPC_UNIVERSE_DOMAIN = + (UNIVERSE_DOMAIN.isEmpty()) ? "googleapis.com" : UNIVERSE_DOMAIN; + + // See here go/bq-cli-tpc for testing setup. + // Use the default test project. + // For the SA you will have to give it bigquery admin permissions cl/627813300 and will have to + // revert after testing. + // Plug in the values for the connection properties from the guide into the connection string. + @Test + public void testServiceAccountAuthenticationViaEmail() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "OAuthType=0;" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";" + + "OAuthServiceAcctEmail=" + + TPC_SERVICE_ACCOUNT + + ";" + + "OAuthPvtKey=" + + TPC_PVT_KEY + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = "SELECT 1"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + // You will need to change the environment variable for GOOGLE_APPLICATION_CREDENTIALS to point to + // the SA key file. + @Test + public void testValidApplicationDefaultCredentialsAuthentication() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "OAuthType=3;" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "APPLICATION_DEFAULT_CREDENTIALS", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = "SELECT * FROM test.test;"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testValidPreGeneratedAccessTokenAuthentication() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "OAuthType=2;" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "OAuthAccessToken=" + + TPC_ACCESS_TOKEN + + ";" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + String query = "SELECT * FROM test.test;"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertNotNull(connection); + assertFalse(connection.isClosed()); + connection.close(); + } + + @Test + public void testSimpleQueryReturns() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "OAuthType=0;" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";" + + "OAuthServiceAcctEmail=" + + TPC_SERVICE_ACCOUNT + + ";" + + "OAuthPvtKey=" + + TPC_PVT_KEY + + ";"; // Plug in this value when testing from the key file + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = "SELECT * FROM test.test;"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testServiceAccountKeyFileReturns() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "OAuthType=0;" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";" + // Point the key path to where you have downloaded it to. + + "OAuthPvtKeyPath=/Users/YourPathToSecretFile/SAKeyFile.json;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = "SELECT * FROM test.test;"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + private void validateTPCEnvironment() { + if (TPC_PROJECT_ID.isEmpty()) { + throw new IllegalArgumentException("TPC_PROJECT_ID is empty"); + } + if (TPC_SERVICE_ACCOUNT.isEmpty()) { + throw new IllegalArgumentException("TPC_SERVICE_ACCOUNT is empty"); + } + if (TPC_ENDPOINT.isEmpty()) { + throw new IllegalArgumentException("TPC_ENDPOINT is empty"); + } + if (TPC_PVT_KEY.isEmpty()) { + throw new IllegalArgumentException("TPC_PVT_KEY is empty"); + } + if (TPC_UNIVERSE_DOMAIN.isEmpty()) { + throw new IllegalArgumentException("TPC_UNIVERSE_DOMAIN is empty"); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/rules/TimeZoneRule.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/rules/TimeZoneRule.java new file mode 100644 index 0000000000..ff5db108e8 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/rules/TimeZoneRule.java @@ -0,0 +1,57 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.rules; + +import java.util.TimeZone; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +public class TimeZoneRule implements TestRule { + + private final String timeZoneId; + private final TimeZone defaultTimeZone; + + public TimeZoneRule(String timeZoneId) { + this.timeZoneId = timeZoneId; + defaultTimeZone = TimeZone.getDefault(); + } + + @Override + public Statement apply(Statement base, Description description) { + return new Statement() { + @Override + public void evaluate() throws Throwable { + try { + TimeZone.setDefault(TimeZone.getTimeZone(timeZoneId)); + base.evaluate(); + } finally { + TimeZone.setDefault(defaultTimeZone); + } + } + }; + } + + /** + * Public method to enforce the rule from places like methods annotated with {@link + * org.junit.runners.Parameterized.Parameters} annotation which gets executed before this rule is + * applied. + */ + public void enforce() { + TimeZone.setDefault(TimeZone.getTimeZone(timeZoneId)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/ArrowUtilities.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/ArrowUtilities.java new file mode 100644 index 0000000000..13f3007667 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/ArrowUtilities.java @@ -0,0 +1,54 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.utils; + +import com.google.api.core.InternalApi; +import com.google.protobuf.ByteString; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.channels.Channels; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.VectorUnloader; +import org.apache.arrow.vector.ipc.WriteChannel; +import org.apache.arrow.vector.ipc.message.ArrowRecordBatch; +import org.apache.arrow.vector.ipc.message.MessageSerializer; +import org.apache.arrow.vector.types.pojo.Schema; + +@InternalApi("Used for testing purpose") +public class ArrowUtilities { + + public static ByteString serializeSchema(Schema schema) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + MessageSerializer.serialize(new WriteChannel(Channels.newChannel(out)), schema); + return ByteString.readFrom(new ByteArrayInputStream(out.toByteArray())); + } + + public static ByteString serializeVectorSchemaRoot(VectorSchemaRoot root) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + + ArrowRecordBatch recordBatch = new VectorUnloader(root).getRecordBatch(); + MessageSerializer.serialize(new WriteChannel(Channels.newChannel(out)), recordBatch); + return ByteString.readFrom(new ByteArrayInputStream(out.toByteArray())); + + // ArrowStreamWriter writer = new ArrowStreamWriter(root, null, Channels.newChannel(out)); + // writer.start(); + // writer.writeBatch(); + // writer.end(); + // return ByteString.readFrom(new ByteArrayInputStream(out.toByteArray())); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/TestUtilities.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/TestUtilities.java new file mode 100644 index 0000000000..419cb9b0bc --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/TestUtilities.java @@ -0,0 +1,143 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.utils; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.StandardSQLTypeName; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.stream.Collectors; +import javax.sql.ConnectionEvent; +import javax.sql.ConnectionEventListener; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; + +public class TestUtilities { + + public static Tuple primitiveSchemaAndValue( + StandardSQLTypeName typeName, String value) { + Field schema = + Field.newBuilder(typeName.name() + "_column", typeName).setMode(Mode.NULLABLE).build(); + FieldValue primitiveValue = FieldValue.of(Attribute.PRIMITIVE, value); + + return Tuple.of(schema, primitiveValue); + } + + public static Tuple arraySchemaAndValue( + StandardSQLTypeName typeName, String... values) { + Field schema = + Field.newBuilder(typeName.name() + "_arr_column", typeName).setMode(Mode.REPEATED).build(); + + FieldValue arrayValues = + FieldValue.of( + Attribute.REPEATED, + FieldValueList.of( + Arrays.stream(values) + .map(value -> FieldValue.of(Attribute.PRIMITIVE, value)) + .collect(Collectors.toList()))); + + return Tuple.of(schema, arrayValues); + } + + public static Tuple, ArrayList> nestedResultSetToColumnLists( + ResultSet resultSet) throws SQLException { + ArrayList indexes = new ArrayList<>(); + ArrayList columnValues = new ArrayList<>(); + while (resultSet.next()) { + indexes.add(resultSet.getObject(1)); + columnValues.add((T) resultSet.getObject(2)); + } + return Tuple.of(indexes, columnValues); + } + + @SafeVarargs + public static Tuple> arrowStructOf( + Tuple... tuples) { + ArrayList fields = new ArrayList<>(); + JsonStringHashMap values = new JsonStringHashMap<>(); + + for (Tuple tuple : tuples) { + StandardSQLTypeName typeName = tuple.x(); + Object value = tuple.y(); + String name = typeName.name() + "_column"; + Field schema = Field.newBuilder(name, typeName).setMode(Mode.NULLABLE).build(); + fields.add(schema); + values.put(name, value); + } + return Tuple.of(FieldList.of(fields), values); + } + + public static Tuple> arrowArraySchemaAndValue( + StandardSQLTypeName typeName, T... values) { + Field schema = + Field.newBuilder(typeName.name() + "_arr_column", typeName).setMode(Mode.REPEATED).build(); + + JsonStringArrayList arrayValues = new JsonStringArrayList<>(); + arrayValues.addAll(Arrays.asList(values)); + return Tuple.of(schema, arrayValues); + } + + @SafeVarargs + public static JsonStringArrayList arrowArrayOf(T... values) { + JsonStringArrayList arrayValues = new JsonStringArrayList<>(); + arrayValues.addAll(Arrays.asList(values)); + return arrayValues; + } + + // struct of arrays + public static JsonStringHashMap toArrowStruct( + Iterable>> schemaAndValues) { + JsonStringHashMap struct = new JsonStringHashMap<>(); + for (Tuple> schemaAndValue : schemaAndValues) { + Field schema = schemaAndValue.x(); + JsonStringArrayList value = schemaAndValue.y(); + struct.put(schema.getName(), value); + } + return struct; + } + + public static class TestConnectionListener implements ConnectionEventListener { + private int connectionClosedCount = 0; + private int connectionErrorCount = 0; + + @Override + public void connectionClosed(ConnectionEvent arg0) { + connectionClosedCount++; + } + + @Override + public void connectionErrorOccurred(ConnectionEvent arg0) { + connectionErrorCount++; + } + + public int getConnectionClosedCount() { + return connectionClosedCount; + } + + public int getConnectionErrorCount() { + return connectionErrorCount; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/URIBuilder.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/URIBuilder.java new file mode 100644 index 0000000000..ce34f42f54 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/URIBuilder.java @@ -0,0 +1,34 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.utils; + +public class URIBuilder { + StringBuilder builder; + + public URIBuilder(String baseUri) { + builder = new StringBuilder(baseUri); + } + + public URIBuilder append(String key, Object value) { + builder.append(String.format("%s=%s;", key, value.toString())); + return this; + } + + public String toString() { + return builder.toString(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/resources/fake.p12 b/google-cloud-bigquery-jdbc/src/test/resources/fake.p12 new file mode 100644 index 0000000000000000000000000000000000000000..d9e064411d4f7a6e831e96a468ebd3ec1c35abc3 GIT binary patch literal 5223 zcmbW5Ra6_?vd2RR0Sdw0T|y}d4#ll#ao6JRQXE1XT0FS5P^4&ar$BM5;98sy2u`sA z1rF!lv(C5f%e`mahds0QfA-AV{>`j`A;@fi7?>~w83c?AMJgjNAsE;g`3TZG5Q6j? zh9JFwA@I!qM+omZ2!W^iAS=NTScPD`e`Vo6!T{zYuw))Y37GxAGq4~qW0>gQS9}-` zi0sP`K@uDD_O;gu3Q`I}M!N@f1NA^4ClL^X35E~G`R|HATr2>L9*k>+RK~Ew1Ytly zMAq6F7bW*9y$WXH{!dVamLLRZENZxBb0*D8-}dawC$_zp6#Yg{-aoVxV!MMxM$U%c z4pLlU?MXg!d5k7pOQ7Eiqb<2V?j0(4bR;JcM73Ue0GFwx{Nd%730IkrQkUd|-FW=f z#xQC}chFzOd_Kd@jV()Mx`hm$?x!Fx4s}=yw6ma96$4;!LLPLktmRchOD5<0+PN+v)AhJSpuAMt|a_nxW79ftbY;>eNX zn@)qKf)*-+L30I>@Do*HEXCi3yXwB>N^V2Zo0TgJtVc2^x@Q)CQaJMsqe15Zp|tL^ z`o@;&bY8YPXhEJB*`aRF=#4QQ1uk`>QS1f(TMhJQ!&YkM+^Nr0hlBK50?j(ST2G}_ zMxxZaFPp4(%YW0&+9tjpfXv*giEgUg>$-}~RGCrOVV?-e_a@wCWw~XHM~NsZ|B5$v zT%$L4!}=p@0T#4Y@|fX4Wkx~pm>2d&&w5xVoU_~_zI1(BdYisTe^9j0Ga~9DWifnL zENSZi;gV%|f+>(GHe00or^sIbKj3X*rY&s%7Pss7=fboz-cpS;`V_Whdzd@?S9Gsr z@m-ya$8)%1gg;f4)ASSI8q_)we3t9r*bJNOtF8ln0TX5LJ$LD|yK&cU61|bmHTt!r!;RM5v*G1WyzKIiwxgjl9UUqF*o)Y+1#=hX9)-2im&@`?gzFlt@4e4YB*?<&0OXIYP*08WJE^cg z9K?H1VbG>Jo+yQ}b_J6qd`^tSc|(j0Z9biqFpI=Jaa2D(R{IMq&Sq6@h+SqgDrzsKuw4_U08$nk4!*%z)Td z4X6^sI{NS1%rOrq?ywo}2vAT;A z8EmDbIj3>-fNQ~c{*6mqGB^VR9l##o4R8Y30o(z$09QaDz!LD{!8<)z9~krBkICU= zAQC+%H#;V{h!9K+1{V<%;pZ24Aac(C3)2~X>Abt;2=rZqq@3P;(9pLJw4(nBrUiOh zw87C6*IM^+2~@un2)#7#D(lB!u(RdQ`aVJ25x!#kA7kGn*x(CjMVEninCHOpR8beXs6H?97B zE?hl9UOUdKEo8Z~?fmW=7rU*<G_jw-v9ghAxmqD^&I%7h%1<8wiK-L=?p%-vU)x|_2zCt)z;hr0Q6 z!1-tgsw@!ZrYk?)75|0=+2hqN1wXbH@W#M)xZBxD6$6D%?FLZ#Yf3BIPh~P684g*e ziTVsWyy8?K3~-Q+g%>*+j`tBA758 z6~mY|2`Z^iJYEKwI%;5;e^o*xT!9%&c1nh|LMIOCm`EzJcX?RLKwqHv*2vr`{hoEw z=8fjikgUyxZm^z1k#N0zWDub23A}CguZ~m7+mix0;*voggyq!sixdKjw?+4x@U7hu zWHW}?F$Wy@(4itbV?S@JAucc^$ z#juG-RdjQeT&1TzT5gKo@Ef;e-TqlFhe_N0jz$7GAYp^(?DOjq>w~D7*ZKqEu{Fme zID0ix$8@$tms`w%w>Rw}-*qgfVwwRbj2UNvs%$IrRa&F5joue_2XFiY_yrH$D zfnl=pH*`-~YChQ)VU2P+yxAZ9FhS;Yi@9~C+tP}`6+QZ%oHCiFQ&G4kXc(rl2IRA~ zqUUz(Dm*&yd8YzaUY_kumJ~kRw5gssm-v;}2GY*-Ey5H|?;y;qit}9268;h}|B13L zcSY;xQ;q@|t%X~?6zAz4*?REgiR>M&6`63E)aS}at-@rGl?Zfj6x zEcavAilKnPF%29NoD_Znqy@_JNmG{7(B-Yid#GJy}Y%isnOU1)~ehJUobI_lmVHLj7hJh06v=e>I z^pALr!oGd{0bxFCuA!hx4-HQdaQ2_O|zxaEB0~Uv^QGC18^29yN*!F zxE6_ZQQrwZJ;5WXkkHMX_mZ4x@~uP5zn|& zB>fPvi3!0_q=@FrOWSSXK%J6P-|_iUrmQDk8N++QEG87ISybOZ$D-e7GwSSa*&=g{ zZ@9_>TQ)%X^;_L6iYQfv4?ixx7^(lJ?x3#IbJ;nhpE;fhdd%g}E>Vpy^Udxg;WM}p zt_)-So4jOj3I;NO=YL_^{{hRs53qK5ID!8wp2he@`QZn1mpjjst2swA{5%KeR8NW=c06TFQ4c;1^g$tJ|K!NY>@euz9J$PrOW+@F9LxB zn4aSk_u$XCiv9G9n?*QjLgzwzItSXC$fhchiQ5_l@vmqjAzqTL16<3JWBU-^mG#8D z^5vbvPVOzHox1IsqV7Bq z5hP`(XOq6PUAJ1@f$U=N^@i&jIKEjW@8o`pZ=2t9sN#k7gs7KA>MPP8dPoBr8$3ge z_qV^E>q7igEI1<~&70a;E8pI6f2%?DqMAbo4n=qn5vaoyUlrkcMIM{_OP-XuNk|b( z7P_yoyA=tIvZ9J9;76tfGj9d_xj)G&atN+jWqn(^J|V&Y1~Y5`6rTp@J;Qb$UBca9 z*W)9{ch^i8b|ILvWT&a==lFwi+i?7yAyEFZ{W)X>?Akq(r9}nWKbbPB^IM!96_5+( z?kCtRG>j3?^4~_7`xxp^FY&3({blu@<;FbOu$c})DC@7h#%-`a;>{JBGJ|x8)_Zq-~2*}t(R9oyeiTDrSGox zWYDsHW2%e~ZO7ChYje-o@fzd1*fnl`0GM-&rX0b)2hDq7TkK4T<6ozI(dVFyg z8HEzNr57&OqcXkU7+aCX@Md_ajLsMhSQAVcuy_(kO;fmHU9Xjd_n@A5#Vq@j*LAuHg*8N4Ljn z=HmC%j~SJBJ(cZn#AVGePH`>?1D)kAH-HR%D7Oa=qwlC>_d&%`0QFV4XpKMXbvBQMBkTHnkl;Sb5tH*$r5N8>^F_^fHBb5ncB9~9ED9EOS zwF|v{Ih#n4WW-kz`W_V&X5biDOf5&X>QF<@gn`~*^@u9@OQb6 zo1ttiEJig*mWZfiH`z9Rr6$P5^ulbOkQjAbMbuC6afhy9i+KV^ommlogIb|cq4sO`0dwJ$z-1iT4FGnsr$obg{bYF&XUM;ipC9Z+T-$F14s zv=pys`*!@6@EX8utfEN6oc)S}ui3_Fu$kvBC@6DNzZa;k-${17-XHl^iLgL3L1WiB zvz3|6_}(+E=-_*MQYqzwbhb(?}i*0{vShjbq>0vfBQ-_}~O@WFPG9Rt8?XPJEZe9P!C zQ_LIHF-M|Hn7?0-> zTOWDw*8#;thAF9XUT0c5yCu3VKRh*B0NcXyFkOq|fqIm}RIr%%;$JMR`0>z9*{yr) zD%g#JVVDIju^9T)jfpXwV#)C6xS`9p&-w;MvKq?Wb<2OZt~D7_a#S5j>K%->T5O zdt7ny?YVz5gs9%bTT&<^)-6i(L{>YrbbNd0xMAyF;< zo(AcMD6iymo;{&+@rIIDOp#W!55v36njS6cAsMW+y}wFQVZ;xl^Absdr8JFVaAWyn zDure4#8gd(OvN4CUun9AT z1{?AkaDzA;!Ym=F#U(H~9`acOCF6XPOek210{FgG#wGZ-{6b}=2?M`XCssPbX4jZY zRn6r%w==C_=hL&l+vZR4nDbKH-($kd&73cedpNzhlJ2fS8e_u6uQ|#aEzM3bD zEn$tP>}1tz*DBIKf8PGxRb--OyHk(dF=v;;a$}WN&a9ku(gA}08-t5?CZ|W_qigQB7PaF zax3_yBS#`IwE`otk-@H3^~R#kjfVUe&xuUk_VCyYdEee^DdBQLns>^Yj!w!tai^Dg zXY;At_;kaIM%;$~&DheHIq;h_MNQh!5%)30F@rtPYcA*I^81^XY+KG~k?Gv!VU-vY zsYlu6Zqg-)wsyRhxw=d3QU>V&iSD}{<=-@IwRTr+m+ zZ2Tf5ag9kg`<3F)YsS^9^q*z$L_A_OOa8DbLu>liI<^J7T8^f!Uq5w?$a(e2z4^0l zv^&h&ykX<~`x&#n|K;2>DtOs)%59(M{rM_>tkITRwuQ2#OzpNmpLlKg3Ds|s0+X0u zOZ5o;{rxA)*LL~bKV0wc=7@#dy;mHlJe9j*wr1MN_9xdjF~y|eY0ZKj2B@A literal 0 HcmV?d00001 From 8bc43a055f3ca1e0bd0eec8d7edf0bd906374648 Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Mon, 12 Jan 2026 14:12:12 -0500 Subject: [PATCH 27/36] test: Use unique table names in tests (#4053) * feat: Migrate to JUnit 5 and add parallel test execution * feat: Migrate tests to JUnit5 * chore: Add surefire-junit-platform dep for ITs * test: Enable parallel tests for JUnit5 * chore: Fix broken tests * chore: Upgrade existing integration tests to JUnit 5 syntax and features * chore: Upgrade ITNightlyBigQueryTest to JUnit 5 features and package-private * chore: Make the tests package-private * feat: migrate tests to JUnit 5 assertThrows and static imports * chore: Remove wildcard imports * chore: revert samples to use junit4 * chore: Address code comments * chore: Close connection after test * chore: Fix flaky tests * chore: Fix flaky tests * chore: Fix tests * chore: Disable cache for query stats * chore: Add unique id to each table * chore: Use unique test table names * chore: Remove parallel test execution * chore: Add comment for cache * chore: Fix broken test --------- Co-authored-by: AbgarSim --- .../cloud/bigquery/it/ITBigQueryTest.java | 146 ++++++++++-------- .../bigquery/it/ITNightlyBigQueryTest.java | 5 +- 2 files changed, 83 insertions(+), 68 deletions(-) diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index fec7e55e05..76b358dae6 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -621,14 +621,16 @@ class ITBigQueryTest { private static final String EXTRACT_FILE = "extract.csv"; private static final String EXTRACT_MODEL_FILE = "extract_model.csv"; private static final String BUCKET = RemoteStorageHelper.generateBucketName(); - private static final TableId TABLE_ID = TableId.of(DATASET, "testing_table"); - private static final TableId TABLE_ID_DDL = TableId.of(DATASET, "ddl_testing_table"); - private static final TableId TABLE_ID_FASTQUERY = TableId.of(DATASET, "fastquery_testing_table"); - private static final TableId TABLE_ID_FASTQUERY_UK = - TableId.of(UK_DATASET, "fastquery_testing_table"); - private static final TableId TABLE_ID_LARGE = TableId.of(DATASET, "large_data_testing_table"); - private static final TableId TABLE_ID_FASTQUERY_BQ_RESULTSET = - TableId.of(DATASET, "fastquery_testing_bq_resultset"); + private static final TableId TABLE_ID = TableId.of(DATASET, generateTableName("testing_table")); + private static final TableId TABLE_ID_DDL = + TableId.of(DATASET, generateTableName("ddl_testing_table")); + private static final TableId TABLE_ID_FAST_QUERY = + TableId.of(DATASET, generateTableName("fast_query_testing_table")); + private static final TableId TABLE_ID_LARGE = + TableId.of(DATASET, generateTableName("large_data_testing_table")); + private static final TableId TABLE_ID_FAST_QUERY_BQ_RESULTSET = + TableId.of(DATASET, generateTableName("fast_query_testing_bq_resultset")); + private static final String CSV_CONTENT = "StringValue1\nStringValue2\n"; private static final String CSV_CONTENT_NULL = "String\0Value1\n"; private static final String CSV_CONTENT_FLEXIBLE_COLUMN = "name,&ersand\nrow_name,1"; @@ -1063,6 +1065,10 @@ public CompletableResultCode shutdown() { } } + private static String generateTableName(String prefix) { + return prefix + UUID.randomUUID().toString().substring(0, 8); + } + @BeforeAll static void beforeClass() throws InterruptedException, IOException { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); @@ -1133,7 +1139,7 @@ static void beforeClass() throws InterruptedException, IOException { LoadJobConfiguration configurationFastQuery = LoadJobConfiguration.newBuilder( - TABLE_ID_FASTQUERY, "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json()) + TABLE_ID_FAST_QUERY, "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json()) .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) .setSchema(TABLE_SCHEMA) .setLabels(labels) @@ -1144,7 +1150,7 @@ static void beforeClass() throws InterruptedException, IOException { LoadJobConfiguration configFastQueryBQResultset = LoadJobConfiguration.newBuilder( - TABLE_ID_FASTQUERY_BQ_RESULTSET, + TABLE_ID_FAST_QUERY_BQ_RESULTSET, "gs://" + BUCKET + "/" + JSON_LOAD_FILE_BQ_RESULTSET, FormatOptions.json()) .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) @@ -2681,7 +2687,7 @@ void testListTablesWithRangePartitioning() { @Test void testListPartitions() throws InterruptedException { - String tableName = "test_table_partitions_" + UUID.randomUUID().toString().substring(0, 8); + String tableName = generateTableName("test_table_partitions_"); Date date = Date.fromJavaUtilDate(new java.util.Date()); String partitionDate = date.toString().replaceAll("-", ""); TableId tableId = TableId.of(DATASET, tableName + "$" + partitionDate); @@ -3503,8 +3509,15 @@ void testLosslessTimestamp() throws InterruptedException { void testQuery() throws InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); QueryJobConfiguration config = - QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); + QueryJobConfiguration.newBuilder(query) + // Disable the cache as query plans do not exist from cached results + // This will force generation of execution plan + .setUseQueryCache(false) + .setDefaultDataset(DatasetId.of(DATASET)) + .build(); Job job = bigquery.create(JobInfo.of(JobId.of(), config)); + job = job.waitFor(); + assertNotNull(job); TableResult result = job.getQueryResults(); assertNotNull(result.getJobId()); @@ -3527,6 +3540,7 @@ void testQuery() throws InterruptedException { } assertEquals(2, rowCount); + // Query Plan will exist for a completed job Job job2 = bigquery.getJob(job.getJobId()); JobStatistics.QueryStatistics statistics = job2.getStatistics(); assertNotNull(statistics.getQueryPlan()); @@ -3822,7 +3836,7 @@ void testConnectionImplDryRun() throws SQLException { String query = String.format( "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from %s where StringField = ? order by TimestampField", - TABLE_ID_FASTQUERY_BQ_RESULTSET.getTable()); + TABLE_ID_FAST_QUERY_BQ_RESULTSET.getTable()); ConnectionSettings connectionSettings = ConnectionSettings.newBuilder() .setDefaultDataset(DatasetId.of(DATASET)) @@ -3850,7 +3864,7 @@ void testConnectionImplDryRunNoQueryParameters() throws SQLException { + "TimestampField, FloatField, NumericField, TimeField, DateField, DateTimeField, " + "GeographyField, RecordField.BytesField, RecordField.BooleanField, " + "IntegerArrayField from %s order by TimestampField", - TABLE_ID_FASTQUERY_BQ_RESULTSET.getTable()); + TABLE_ID_FAST_QUERY_BQ_RESULTSET.getTable()); ConnectionSettings connectionSettings = ConnectionSettings.newBuilder() .setDefaultDataset(DatasetId.of(DATASET)) @@ -3933,7 +3947,7 @@ void testExecuteSelectSinglePageTableRow() throws SQLException { String query = "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, " + "NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from " - + TABLE_ID_FASTQUERY_BQ_RESULTSET.getTable() + + TABLE_ID_FAST_QUERY_BQ_RESULTSET.getTable() + " order by TimestampField"; ConnectionSettings connectionSettings = ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); @@ -3997,7 +4011,7 @@ void testExecuteSelectSinglePageTableRowWithReadAPI() throws SQLException { String query = "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, " + "NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from " - + TABLE_ID_FASTQUERY_BQ_RESULTSET.getTable() + + TABLE_ID_FAST_QUERY_BQ_RESULTSET.getTable() + " order by TimestampField"; ConnectionSettings connectionSettings = ConnectionSettings.newBuilder() @@ -4344,7 +4358,7 @@ void testExecuteSelectSinglePageTableRowColInd() throws SQLException { String query = "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, " + "NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from " - + TABLE_ID_FASTQUERY_BQ_RESULTSET.getTable() + + TABLE_ID_FAST_QUERY_BQ_RESULTSET.getTable() + " order by TimestampField"; /* Column Index mapping for ref: @@ -4543,7 +4557,7 @@ void testExecuteSelectArrayOfStruct() throws SQLException { @Test void testFastQueryMultipleRuns() throws InterruptedException { String query = - "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FASTQUERY.getTable(); + "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FAST_QUERY.getTable(); QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); TableResult result = bigquery.query(config); @@ -4580,7 +4594,7 @@ void testFastQueryMultipleRuns() throws InterruptedException { @Test void testFastQuerySinglePageDuplicateRequestIds() throws InterruptedException { String query = - "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FASTQUERY.getTable(); + "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FAST_QUERY.getTable(); QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); TableResult result = bigquery.query(config); @@ -4613,7 +4627,7 @@ void testFastQuerySinglePageDuplicateRequestIds() throws InterruptedException { @Test void testFastSQLQuery() throws InterruptedException { String query = - "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FASTQUERY.getTable(); + "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FAST_QUERY.getTable(); QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); TableResult result = bigquery.query(config); @@ -4642,27 +4656,27 @@ void testFastSQLQuery() throws InterruptedException { @Test void testProjectIDFastSQLQueryWithJobId() throws InterruptedException { - String random_project_id = "RANDOM_PROJECT_" + UUID.randomUUID().toString().replace('-', '_'); - System.out.println(random_project_id); + String invalidProjectId = "RANDOM_PROJECT_" + UUID.randomUUID().toString().replace('-', '_'); String query = - "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FASTQUERY.getTable(); + "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FAST_QUERY.getTable(); // With incorrect projectID in jobid // The job will be created with the specified(incorrect) projectID // hence failing the operation - JobId jobIdWithProjectId = JobId.newBuilder().setProject(random_project_id).build(); + JobId jobIdWithProjectId = JobId.newBuilder().setProject(invalidProjectId).build(); QueryJobConfiguration configSelect = QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); try { bigquery.query(configSelect, jobIdWithProjectId); } catch (Exception exception) { // error message for non-existent project - assertTrue(exception.getMessage().contains("Cannot parse as CloudRegion")); + assertEquals("Cannot parse as CloudRegion.", exception.getMessage()); assertEquals(BigQueryException.class, exception.getClass()); } } @Test void testLocationFastSQLQueryWithJobId() throws InterruptedException { + TableId tableIdFastQueryUk = TableId.of(UK_DATASET, "fastquery_testing_table"); DatasetInfo infoUK = DatasetInfo.newBuilder(UK_DATASET) .setDescription(DESCRIPTION) @@ -4672,11 +4686,11 @@ void testLocationFastSQLQueryWithJobId() throws InterruptedException { bigquery.create(infoUK); TableDefinition tableDefinition = StandardTableDefinition.of(SIMPLE_SCHEMA); - TableInfo tableInfo = TableInfo.newBuilder(TABLE_ID_FASTQUERY_UK, tableDefinition).build(); + TableInfo tableInfo = TableInfo.newBuilder(tableIdFastQueryUk, tableDefinition).build(); bigquery.create(tableInfo); String insert = - "INSERT " + UK_DATASET + "." + TABLE_ID_FASTQUERY_UK.getTable() + " VALUES('Anna');"; + "INSERT " + UK_DATASET + "." + tableIdFastQueryUk.getTable() + " VALUES('Anna');"; QueryJobConfiguration config = QueryJobConfiguration.newBuilder(insert) @@ -4685,10 +4699,12 @@ void testLocationFastSQLQueryWithJobId() throws InterruptedException { TableResult result = bigquery.query(config); assertNotNull(result.getJobId()); assertEquals(SIMPLE_SCHEMA, result.getSchema()); - assertEquals(1, result.getTotalRows()); - assertNull(result.getNextPage()); - assertNull(result.getNextPageToken()); - assertFalse(result.hasNextPage()); + // Use `getNumDmlAffectedRows()` for DML operations + Job queryJob = bigquery.getJob(result.getJobId()); + queryJob = queryJob.waitFor(); + JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); + assertEquals(1L, statistics.getNumDmlAffectedRows().longValue()); + // Verify correctness of table content for (FieldValueList row : result.getValues()) { FieldValue stringCell = row.get(0); @@ -4698,7 +4714,7 @@ void testLocationFastSQLQueryWithJobId() throws InterruptedException { // With incorrect location in jobid // The job will be created with the specified(incorrect) location // hence failing the operation - String query = "SELECT StringField FROM " + TABLE_ID_FASTQUERY_UK.getTable(); + String query = "SELECT StringField FROM " + tableIdFastQueryUk.getTable(); JobId jobIdWithLocation = JobId.newBuilder().setLocation("us-west1").build(); QueryJobConfiguration configSelect = QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(UK_DATASET)).build(); @@ -4757,7 +4773,7 @@ void testFastSQLQueryMultiPage() throws InterruptedException { @Test void testFastDMLQuery() throws InterruptedException { - String tableName = TABLE_ID_FASTQUERY.getTable(); + String tableName = TABLE_ID_FAST_QUERY.getTable(); String dmlQuery = String.format("UPDATE %s.%s SET StringField = 'hello' WHERE TRUE", DATASET, tableName); QueryJobConfiguration dmlConfig = QueryJobConfiguration.newBuilder(dmlQuery).build(); @@ -4812,8 +4828,7 @@ void testFastDDLQuery() throws InterruptedException { @Test void testFastQuerySlowDDL() throws InterruptedException { - String tableName = - "test_table_fast_query_ddl_slow_" + UUID.randomUUID().toString().substring(0, 8); + String tableName = generateTableName("test_table_fast_query_ddl_slow_"); // This query take more than 10s to run and should fall back on the old query path String slowDdlQuery = String.format( @@ -4846,7 +4861,7 @@ void testFastQuerySlowDDL() throws InterruptedException { @Test void testFastQueryHTTPException() throws InterruptedException { String queryInvalid = - "CREATE OR REPLACE SELECT * FROM UPDATE TABLE SET " + TABLE_ID_FASTQUERY.getTable(); + "CREATE OR REPLACE SELECT * FROM UPDATE TABLE SET " + TABLE_ID_FAST_QUERY.getTable(); QueryJobConfiguration configInvalidQuery = QueryJobConfiguration.newBuilder(queryInvalid) .setDefaultDataset(DatasetId.of(DATASET)) @@ -5063,7 +5078,7 @@ void testExecuteSelectSessionSupport() throws BigQuerySQLException { @Test void testDmlStatistics() throws InterruptedException { - String tableName = TABLE_ID_FASTQUERY.getTable(); + String tableName = TABLE_ID_FAST_QUERY.getTable(); // Run a DML statement to UPDATE 2 rows of data String dmlQuery = String.format("UPDATE %s.%s SET StringField = 'hello' WHERE TRUE", DATASET, tableName); @@ -5085,7 +5100,7 @@ void testDmlStatistics() throws InterruptedException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test void testTransactionInfo() throws InterruptedException { - String tableName = TABLE_ID_FASTQUERY.getTable(); + String tableName = TABLE_ID_FAST_QUERY.getTable(); String transaction = String.format( "BEGIN TRANSACTION;\n" @@ -5726,8 +5741,8 @@ void testListJobsWithCreationBounding() { @Test void testCreateAndGetJob() throws InterruptedException, TimeoutException { - String sourceTableName = "test_create_and_get_job_source_table"; - String destinationTableName = "test_create_and_get_job_destination_table"; + String sourceTableName = generateTableName("test_create_and_get_job_source_table"); + String destinationTableName = generateTableName("test_create_and_get_job_destination_table"); TableId sourceTable = TableId.of(DATASET, sourceTableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(sourceTable, tableDefinition); @@ -5784,8 +5799,10 @@ void testCreateJobAndWaitForWithRetryOptions() throws InterruptedException, Time @Test void testCreateAndGetJobWithSelectedFields() throws InterruptedException, TimeoutException { - String sourceTableName = "test_create_and_get_job_with_selected_fields_source_table"; - String destinationTableName = "test_create_and_get_job_with_selected_fields_destination_table"; + String sourceTableName = + generateTableName("test_create_and_get_job_with_selected_fields_source_table"); + String destinationTableName = + generateTableName("test_create_and_get_job_with_selected_fields_destination_table"); TableId sourceTable = TableId.of(DATASET, sourceTableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(sourceTable, tableDefinition); @@ -5832,8 +5849,8 @@ void testCreateAndGetJobWithSelectedFields() throws InterruptedException, Timeou @Test void testCopyJob() throws InterruptedException, TimeoutException { - String sourceTableName = "test_copy_job_source_table"; - String destinationTableName = "test_copy_job_destination_table"; + String sourceTableName = generateTableName("test_copy_job_source_table"); + String destinationTableName = generateTableName("test_copy_job_destination_table"); TableId sourceTable = TableId.of(DATASET, sourceTableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(sourceTable, tableDefinition); @@ -5864,8 +5881,8 @@ void testCopyJob() throws InterruptedException, TimeoutException { @Test void testCopyJobStatistics() throws InterruptedException, TimeoutException { - String sourceTableName = "test_copy_job_statistics_source_table"; - String destinationTableName = "test_copy_job_statistics_destination_table"; + String sourceTableName = generateTableName("test_copy_job_statistics_source_table"); + String destinationTableName = generateTableName("test_copy_job_statistics_destination_table"); QueryJobConfiguration createTable = QueryJobConfiguration.newBuilder( @@ -5974,10 +5991,8 @@ void testSnapshotTableCopyJob() throws InterruptedException { @Test void testCopyJobWithLabelsAndExpTime() throws InterruptedException { String destExpiryTime = "2099-12-31T23:59:59.999999999Z"; - String sourceTableName = - "test_copy_job_source_table_label" + UUID.randomUUID().toString().substring(0, 8); - String destinationTableName = - "test_copy_job_destination_table_label" + UUID.randomUUID().toString().substring(0, 8); + String sourceTableName = generateTableName("test_copy_job_source_table_label"); + String destinationTableName = generateTableName("test_copy_job_destination_table_label"); Map labels = ImmutableMap.of("test_job_name", "test_copy_job"); TableId sourceTable = TableId.of(DATASET, sourceTableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); @@ -6006,7 +6021,7 @@ void testCopyJobWithLabelsAndExpTime() throws InterruptedException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test public void testQueryJob() throws InterruptedException, TimeoutException { - String tableName = "test_query_job_table"; + String tableName = generateTableName("test_query_job_table"); String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); TableId destinationTable = TableId.of(DATASET, tableName); QueryJobConfiguration configuration = @@ -6052,7 +6067,7 @@ public void testQueryJob() throws InterruptedException, TimeoutException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test void testQueryJobWithConnectionProperties() throws InterruptedException { - String tableName = "test_query_job_table_connection_properties"; + String tableName = generateTableName("test_query_job_table_connection_properties"); String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); TableId destinationTable = TableId.of(DATASET, tableName); QueryJobConfiguration configuration = @@ -6072,7 +6087,7 @@ void testQueryJobWithConnectionProperties() throws InterruptedException { /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test void testQueryJobWithLabels() throws InterruptedException, TimeoutException { - String tableName = "test_query_job_table"; + String tableName = generateTableName("test_query_job_table"); String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); Map labels = ImmutableMap.of("test-job-name", "test-query-job"); TableId destinationTable = TableId.of(DATASET, tableName); @@ -6095,11 +6110,9 @@ void testQueryJobWithLabels() throws InterruptedException, TimeoutException { @Test void testQueryJobWithSearchReturnsSearchStatisticsUnused() throws InterruptedException { - String tableName = "test_query_job_table"; + String tableName = generateTableName("test_query_job_table"); String query = - "SELECT * FROM " - + TABLE_ID.getTable() - + " WHERE search(StringField, \"stringValue\")"; + "SELECT * FROM " + TABLE_ID.getTable() + " WHERE search(StringField, \"stringValue\")"; TableId destinationTable = TableId.of(DATASET, tableName); try { QueryJobConfiguration configuration = @@ -6125,7 +6138,7 @@ void testQueryJobWithSearchReturnsSearchStatisticsUnused() throws InterruptedExc /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test void testQueryJobWithRangePartitioning() throws InterruptedException { - String tableName = "test_query_job_table_rangepartitioning"; + String tableName = generateTableName("test_query_job_table_rangepartitioning"); String query = "SELECT IntegerField, TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); @@ -6150,7 +6163,7 @@ void testQueryJobWithRangePartitioning() throws InterruptedException { @Test void testLoadJobWithRangePartitioning() throws InterruptedException { - String tableName = "test_load_job_table_rangepartitioning"; + String tableName = generateTableName("test_load_job_table_rangepartitioning"); TableId destinationTable = TableId.of(DATASET, tableName); try { LoadJobConfiguration configuration = @@ -6174,7 +6187,7 @@ void testLoadJobWithRangePartitioning() throws InterruptedException { @Test void testLoadJobWithDecimalTargetTypes() throws InterruptedException { - String tableName = "test_load_job_table_parquet_decimalTargetTypes"; + String tableName = generateTableName("test_load_job_table_parquet_decimalTargetTypes"); TableId destinationTable = TableId.of(DATASET, tableName); String sourceUri = "gs://" + CLOUD_SAMPLES_DATA + "/bigquery/numeric/numeric_38_12.parquet"; try { @@ -6202,7 +6215,7 @@ void testLoadJobWithDecimalTargetTypes() throws InterruptedException { @Test void testExternalTableWithDecimalTargetTypes() throws InterruptedException { - String tableName = "test_create_external_table_parquet_decimalTargetTypes"; + String tableName = generateTableName("test_create_external_table_parquet_decimalTargetTypes"); TableId destinationTable = TableId.of(DATASET, tableName); String sourceUri = "gs://" + CLOUD_SAMPLES_DATA + "/bigquery/numeric/numeric_38_12.parquet"; ExternalTableDefinition externalTableDefinition = @@ -6222,7 +6235,7 @@ void testExternalTableWithDecimalTargetTypes() throws InterruptedException { @Test void testQueryJobWithDryRun() throws InterruptedException, TimeoutException { - String tableName = "test_query_job_table"; + String tableName = generateTableName("test_query_job_table"); String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); TableId destinationTable = TableId.of(DATASET, tableName); QueryJobConfiguration configuration = @@ -6240,7 +6253,7 @@ void testQueryJobWithDryRun() throws InterruptedException, TimeoutException { @Test void testExtractJob() throws InterruptedException, TimeoutException { - String tableName = "test_export_job_table"; + String tableName = generateTableName("test_export_job_table"); TableId destinationTable = TableId.of(DATASET, tableName); Map labels = ImmutableMap.of("test-job-name", "test-load-extract-job"); LoadJobConfiguration configuration = @@ -6317,7 +6330,7 @@ void testExtractJobWithModel() throws InterruptedException { @Test void testExtractJobWithLabels() throws InterruptedException, TimeoutException { - String tableName = "test_export_job_table_label"; + String tableName = generateTableName("test_export_job_table_label"); Map labels = ImmutableMap.of("test_job_name", "test_export_job"); TableId destinationTable = TableId.of(DATASET, tableName); LoadJobConfiguration configuration = @@ -6343,7 +6356,7 @@ void testExtractJobWithLabels() throws InterruptedException, TimeoutException { @Test void testCancelJob() throws InterruptedException, TimeoutException { - String destinationTableName = "test_cancel_query_job_table"; + String destinationTableName = generateTableName("test_cancel_query_job_table"); String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); TableId destinationTable = TableId.of(DATASET, destinationTableName); QueryJobConfiguration configuration = @@ -6625,7 +6638,8 @@ void testWriteChannelPreserveAsciiControlCharacters() @Test void testLoadJobPreserveAsciiControlCharacters() throws InterruptedException { - String destinationTableName = "test_load_job_preserve_ascii_control_characters"; + String destinationTableName = + generateTableName("test_load_job_preserve_ascii_control_characters"); TableId destinationTable = TableId.of(DATASET, destinationTableName); try { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java index 84e355f9e6..6418682038 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java @@ -77,7 +77,8 @@ public class ITNightlyBigQueryTest { private static final Logger logger = Logger.getLogger(ITNightlyBigQueryTest.class.getName()); private static final String DATASET = RemoteBigQueryHelper.generateDatasetName(); - private static final String TABLE = "TEMP_RS_TEST_TABLE"; + private static final String TABLE = + "TEMP_RS_TEST_TABLE" + UUID.randomUUID().toString().substring(0, 8); private static final byte[] BYTES = "TestByteValue".getBytes(StandardCharsets.UTF_8); private static final String BYTES_BASE64 = BaseEncoding.base64().encode(BYTES); // Script will populate NUM_BATCHES*REC_PER_BATCHES number of records (eg: 100*10000 = 1M) @@ -355,7 +356,7 @@ void testConnectionClose() throws SQLException { while (rs.next()) { ++cnt; if (cnt == 50000) { // interrupt at 50K - assertTrue(connection.close()); + break; } } assertTrue(LIMIT_RECS > cnt); From c241d5ebc681e4cad32db37c2f53c906ab0e2dd9 Mon Sep 17 00:00:00 2001 From: Abgar Simonean Date: Tue, 13 Jan 2026 18:21:04 +0200 Subject: [PATCH 28/36] feat:Add JUnit 5 and add parallel test execution (#4058) --- .../src/test/resources/junit-platform.properties | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 google-cloud-bigquery/src/test/resources/junit-platform.properties diff --git a/google-cloud-bigquery/src/test/resources/junit-platform.properties b/google-cloud-bigquery/src/test/resources/junit-platform.properties new file mode 100644 index 0000000000..3b9a816b45 --- /dev/null +++ b/google-cloud-bigquery/src/test/resources/junit-platform.properties @@ -0,0 +1,3 @@ +junit.jupiter.execution.parallel.enabled=true + +junit.jupiter.execution.parallel.mode.default=concurrent \ No newline at end of file From 99058eada1918263db0816123af88f946a874093 Mon Sep 17 00:00:00 2001 From: Diego Marquez Date: Tue, 13 Jan 2026 16:00:20 -0500 Subject: [PATCH 29/36] chore: cleanup release-please config (#4013) * chore: cleanup release-please config - Remove redundant options already declared at the top level.\n- Remove bumpMinorPreMajor for repositories after the first major release. * chore: format release-please.yml --- .github/release-please.yml | 67 ++++++++++++-------------------------- 1 file changed, 21 insertions(+), 46 deletions(-) diff --git a/.github/release-please.yml b/.github/release-please.yml index e78d84de07..412c1b796e 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,49 +1,24 @@ branches: - - branch: 1.127.12-sp - bumpMinorPreMajor: true - handleGHRelease: true - releaseType: java-lts - - bumpMinorPreMajor: true - handleGHRelease: true - releaseType: java-yoshi - branch: java7 - - bumpMinorPreMajor: true - handleGHRelease: true - releaseType: java-backport - branch: 2.3.x - - bumpMinorPreMajor: true - handleGHRelease: true - releaseType: java-backport - branch: 2.10.x - - bumpMinorPreMajor: true - handleGHRelease: true - releaseType: java-backport - branch: 2.19.x - - bumpMinorPreMajor: true - handleGHRelease: true - releaseType: java-backport - branch: 2.35.x - - bumpMinorPreMajor: true - handleGHRelease: true - releaseType: java-backport - branch: 2.40.x - - bumpMinorPreMajor: true - handleGHRelease: true - releaseType: java-backport - branch: 2.48.x - - bumpMinorPreMajor: true - handleGHRelease: true - releaseType: java-backport - branch: 2.52.x - - bumpMinorPreMajor: true - handleGHRelease: true - releaseType: java-backport - branch: 2.51.x - - branch: protobuf-4.x-rc - bumpMinorPreMajor: true - handleGHRelease: true - releaseType: java-yoshi - manifest: true -bumpMinorPreMajor: true + - branch: 1.127.12-sp + releaseType: java-lts + - branch: java7 + - branch: 2.3.x + releaseType: java-backport + - branch: 2.10.x + releaseType: java-backport + - branch: 2.19.x + releaseType: java-backport + - branch: 2.35.x + releaseType: java-backport + - branch: 2.40.x + releaseType: java-backport + - branch: 2.48.x + releaseType: java-backport + - branch: 2.52.x + releaseType: java-backport + - branch: 2.51.x + releaseType: java-backport + - branch: protobuf-4.x-rc + manifest: true handleGHRelease: true releaseType: java-yoshi From 176199f5d1a3e387558ada47747c3118b4182c77 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 15 Jan 2026 19:22:38 +0000 Subject: [PATCH 30/36] chore(deps): update dependency com.google.cloud:sdk-platform-java-config to v3.55.1 (#4060) --- .github/workflows/unmanaged_dependency_check.yaml | 2 +- .kokoro/continuous/graalvm-native-a.cfg | 2 +- .kokoro/continuous/graalvm-native-b.cfg | 2 +- .kokoro/continuous/graalvm-native-c.cfg | 2 +- .kokoro/presubmit/graalvm-native-a.cfg | 2 +- .kokoro/presubmit/graalvm-native-b.cfg | 2 +- .kokoro/presubmit/graalvm-native-c.cfg | 2 +- google-cloud-bigquery-bom/pom.xml | 2 +- pom.xml | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/unmanaged_dependency_check.yaml b/.github/workflows/unmanaged_dependency_check.yaml index 3c07fc2027..e9cacb9dc2 100644 --- a/.github/workflows/unmanaged_dependency_check.yaml +++ b/.github/workflows/unmanaged_dependency_check.yaml @@ -17,7 +17,7 @@ jobs: # repository .kokoro/build.sh - name: Unmanaged dependency check - uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.54.2 + uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.55.1 with: # java-bigquery does not produce a BOM. Fortunately the root pom.xml # defines google-cloud-bigquery in dependencyManagement section. So diff --git a/.kokoro/continuous/graalvm-native-a.cfg b/.kokoro/continuous/graalvm-native-a.cfg index 20c0ac4a52..a2798bba2c 100644 --- a/.kokoro/continuous/graalvm-native-a.cfg +++ b/.kokoro/continuous/graalvm-native-a.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.54.2" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.55.1" } env_vars: { diff --git a/.kokoro/continuous/graalvm-native-b.cfg b/.kokoro/continuous/graalvm-native-b.cfg index 3b7b14a650..1f804ecee9 100644 --- a/.kokoro/continuous/graalvm-native-b.cfg +++ b/.kokoro/continuous/graalvm-native-b.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.54.2" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.55.1" } env_vars: { diff --git a/.kokoro/continuous/graalvm-native-c.cfg b/.kokoro/continuous/graalvm-native-c.cfg index a41d88c092..641a3fb1db 100644 --- a/.kokoro/continuous/graalvm-native-c.cfg +++ b/.kokoro/continuous/graalvm-native-c.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.54.2" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.55.1" } env_vars: { diff --git a/.kokoro/presubmit/graalvm-native-a.cfg b/.kokoro/presubmit/graalvm-native-a.cfg index 20c0ac4a52..a2798bba2c 100644 --- a/.kokoro/presubmit/graalvm-native-a.cfg +++ b/.kokoro/presubmit/graalvm-native-a.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.54.2" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.55.1" } env_vars: { diff --git a/.kokoro/presubmit/graalvm-native-b.cfg b/.kokoro/presubmit/graalvm-native-b.cfg index 3b7b14a650..1f804ecee9 100644 --- a/.kokoro/presubmit/graalvm-native-b.cfg +++ b/.kokoro/presubmit/graalvm-native-b.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.54.2" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.55.1" } env_vars: { diff --git a/.kokoro/presubmit/graalvm-native-c.cfg b/.kokoro/presubmit/graalvm-native-c.cfg index a41d88c092..641a3fb1db 100644 --- a/.kokoro/presubmit/graalvm-native-c.cfg +++ b/.kokoro/presubmit/graalvm-native-c.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.54.2" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.55.1" } env_vars: { diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index 4cfd474143..52ea2c14be 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -8,7 +8,7 @@ com.google.cloud sdk-platform-java-config - 3.54.2 + 3.55.1 diff --git a/pom.xml b/pom.xml index f99bb3f907..d689261713 100644 --- a/pom.xml +++ b/pom.xml @@ -14,7 +14,7 @@ com.google.cloud sdk-platform-java-config - 3.54.2 + 3.55.1 From decd8e7bbc0f3550f2f27993d20581788bf7bcf7 Mon Sep 17 00:00:00 2001 From: Kirill Logachev Date: Thu, 15 Jan 2026 12:23:59 -0800 Subject: [PATCH 31/36] chore(main): update CODEOWNERS with JDBC ownership (#4063) --- .github/CODEOWNERS | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 7892b1d67b..d5cda28465 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -12,3 +12,6 @@ samples/**/*.java @googleapis/cloud-java-team-teamsync @googleapis/java-sa # Generated snippets should not be owned by samples reviewers samples/snippets/generated/ @googleapis/cloud-java-team-teamsync @googleapis/yoshi-java + +# JDBC Driver +google-cloud-bigquery-jdbc/** @googleapis/bq-developer-tools @googleapis/cloud-java-team-teamsync From ea0cd5776ff6035d5fa66281cc8eac6ac41ef70b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 15 Jan 2026 20:49:40 +0000 Subject: [PATCH 32/36] chore(deps): update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.19.1 (#4036) * chore(deps): update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.20.0-rc1 * Update google-cloud-bigquerystorage-bom version --------- Co-authored-by: Diego Marquez --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index d689261713..2929f8a9f5 100644 --- a/pom.xml +++ b/pom.xml @@ -71,7 +71,7 @@ com.google.cloud google-cloud-bigquerystorage-bom - 3.19.0 + 3.19.1 pom import From ae6f8bd516c8e73a6beccfe2833dbc0d70aaf4e0 Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Thu, 15 Jan 2026 17:21:12 -0500 Subject: [PATCH 33/36] chore: Enable exponential backoff for retries in tests (#4059) * chore: Enable exponential backoff for retries in tests * chore: Clone table for DML query tests * chore: Use GCA retry settings code suggestion --- .../testing/RemoteBigQueryHelper.java | 36 +++--- .../cloud/bigquery/it/ITBigQueryTest.java | 109 +++++++++++++----- .../testing/RemoteBigQueryHelperTest.java | 13 +-- 3 files changed, 106 insertions(+), 52 deletions(-) diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java index c32e1a0516..d195dc245e 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java @@ -45,7 +45,7 @@ public class RemoteBigQueryHelper { private static final String MODEL_NAME_PREFIX = "model_"; private static final String ROUTINE_NAME_PREFIX = "routine_"; private final BigQueryOptions options; - private static final int connectTimeout = 60000; + private static final int CONNECT_TIMEOUT_IN_MS = 60000; private RemoteBigQueryHelper(BigQueryOptions options) { this.options = options; @@ -96,8 +96,8 @@ public static RemoteBigQueryHelper create(String projectId, InputStream keyStrea HttpTransportOptions transportOptions = BigQueryOptions.getDefaultHttpTransportOptions(); transportOptions = transportOptions.toBuilder() - .setConnectTimeout(connectTimeout) - .setReadTimeout(connectTimeout) + .setConnectTimeout(CONNECT_TIMEOUT_IN_MS) + .setReadTimeout(CONNECT_TIMEOUT_IN_MS) .build(); BigQueryOptions bigqueryOptions = BigQueryOptions.newBuilder() @@ -133,8 +133,8 @@ public static RemoteBigQueryHelper create(BigQueryOptions.Builder bigqueryOption HttpTransportOptions transportOptions = BigQueryOptions.getDefaultHttpTransportOptions(); transportOptions = transportOptions.toBuilder() - .setConnectTimeout(connectTimeout) - .setReadTimeout(connectTimeout) + .setConnectTimeout(CONNECT_TIMEOUT_IN_MS) + .setReadTimeout(CONNECT_TIMEOUT_IN_MS) .build(); BigQueryOptions.Builder builder = bigqueryOptionsBuilder @@ -143,21 +143,25 @@ public static RemoteBigQueryHelper create(BigQueryOptions.Builder bigqueryOption return new RemoteBigQueryHelper(builder.build()); } + // Opt to keep these settings a small as possible to minimize the total test time. + // These values can be adjusted per test case, but these serve as default values. private static RetrySettings retrySettings() { - double retryDelayMultiplier = 1.0; + double backoffMultiplier = 1.5; int maxAttempts = 10; - long initialRetryDelay = 250L; - long maxRetryDelay = 30000L; - long totalTimeOut = 120000L; + long initialRetryDelayMs = 100L; // 0.1s initial retry delay + long maxRetryDelayMs = 1000L; // 1s max retry delay between retry + long initialRpcTimeoutMs = 1000L; // 1s initial rpc duration + long maxRpcTimeoutMs = 2000L; // 2s max rpc duration + long totalTimeoutMs = 3000L; // 3s total timeout return RetrySettings.newBuilder() .setMaxAttempts(maxAttempts) - .setMaxRetryDelayDuration(Duration.ofMillis(maxRetryDelay)) - .setTotalTimeoutDuration(Duration.ofMillis(totalTimeOut)) - .setInitialRetryDelayDuration(Duration.ofMillis(initialRetryDelay)) - .setRetryDelayMultiplier(retryDelayMultiplier) - .setInitialRpcTimeoutDuration(Duration.ofMillis(totalTimeOut)) - .setRpcTimeoutMultiplier(retryDelayMultiplier) - .setMaxRpcTimeoutDuration(Duration.ofMillis(totalTimeOut)) + .setTotalTimeoutDuration(Duration.ofMillis(totalTimeoutMs)) + .setInitialRetryDelayDuration(Duration.ofMillis(initialRetryDelayMs)) + .setMaxRetryDelayDuration(Duration.ofMillis(maxRetryDelayMs)) + .setRetryDelayMultiplier(backoffMultiplier) + .setInitialRpcTimeoutDuration(Duration.ofMillis(initialRpcTimeoutMs)) + .setMaxRpcTimeoutDuration(Duration.ofMillis(maxRpcTimeoutMs)) + .setRpcTimeoutMultiplier(backoffMultiplier) .build(); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index 76b358dae6..d41ccc0e12 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -477,7 +477,7 @@ class ITBigQueryTest { Field.newBuilder("BooleanField", LegacySQLTypeName.BOOLEAN) .setDescription("BooleanDescription") .build(); - private static final Schema DDL_TABLE_SCHEMA = + private static final Schema SIMPLE_TABLE_SCHEMA = Schema.of(DDL_TIMESTAMP_FIELD_SCHEMA, DDL_STRING_FIELD_SCHEMA, DDL_BOOLEAN_FIELD_SCHEMA); private static final Schema LARGE_TABLE_SCHEMA = Schema.of( @@ -622,7 +622,7 @@ class ITBigQueryTest { private static final String EXTRACT_MODEL_FILE = "extract_model.csv"; private static final String BUCKET = RemoteStorageHelper.generateBucketName(); private static final TableId TABLE_ID = TableId.of(DATASET, generateTableName("testing_table")); - private static final TableId TABLE_ID_DDL = + private static final TableId TABLE_ID_SIMPLE = TableId.of(DATASET, generateTableName("ddl_testing_table")); private static final TableId TABLE_ID_FAST_QUERY = TableId.of(DATASET, generateTableName("fast_query_testing_table")); @@ -1163,9 +1163,11 @@ static void beforeClass() throws InterruptedException, IOException { LoadJobConfiguration configurationDDL = LoadJobConfiguration.newBuilder( - TABLE_ID_DDL, "gs://" + BUCKET + "/" + JSON_LOAD_FILE_SIMPLE, FormatOptions.json()) + TABLE_ID_SIMPLE, + "gs://" + BUCKET + "/" + JSON_LOAD_FILE_SIMPLE, + FormatOptions.json()) .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) - .setSchema(DDL_TABLE_SCHEMA) + .setSchema(SIMPLE_TABLE_SCHEMA) .setLabels(labels) .build(); Job jobDDL = bigquery.create(JobInfo.of(configurationDDL)); @@ -4773,24 +4775,41 @@ void testFastSQLQueryMultiPage() throws InterruptedException { @Test void testFastDMLQuery() throws InterruptedException { - String tableName = TABLE_ID_FAST_QUERY.getTable(); + // The test runs an update query. Clone the table to ensure that this doesn't impact + // other tests. + String tableName = generateTableName("test_table_fast_query_dml"); + String tableNameFastQuery = TABLE_ID_SIMPLE.getTable(); + String ddlQuery = + String.format( + "CREATE OR REPLACE TABLE %s (" + + "TimestampField TIMESTAMP OPTIONS(description='TimestampDescription'), " + + "StringField STRING OPTIONS(description='StringDescription'), " + + "BooleanField BOOLEAN OPTIONS(description='BooleanDescription') " + + ") AS SELECT DISTINCT * FROM %s", + tableName, tableNameFastQuery); + QueryJobConfiguration ddlConfig = + QueryJobConfiguration.newBuilder(ddlQuery).setDefaultDataset(DatasetId.of(DATASET)).build(); + TableResult result = bigquery.query(ddlConfig); + assertNotNull(result.getJobId()); + String dmlQuery = String.format("UPDATE %s.%s SET StringField = 'hello' WHERE TRUE", DATASET, tableName); QueryJobConfiguration dmlConfig = QueryJobConfiguration.newBuilder(dmlQuery).build(); - TableResult result = bigquery.query(dmlConfig); - assertNotNull(result.getJobId()); - assertEquals(TABLE_SCHEMA, result.getSchema()); + TableResult resultAfterDML = bigquery.query(dmlConfig); + assertNotNull(resultAfterDML.getJobId()); + assertEquals(SIMPLE_TABLE_SCHEMA, resultAfterDML.getSchema()); // Using the job reference on the TableResult, lookup and verify DML statistics. - Job queryJob = bigquery.getJob(result.getJobId()); + Job queryJob = bigquery.getJob(resultAfterDML.getJobId()); + queryJob = queryJob.waitFor(); JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); - assertEquals(2L, statistics.getNumDmlAffectedRows().longValue()); - assertEquals(2L, statistics.getDmlStats().getUpdatedRowCount().longValue()); + assertEquals(1L, statistics.getNumDmlAffectedRows().longValue()); + assertEquals(1L, statistics.getDmlStats().getUpdatedRowCount().longValue()); } @Test void testFastDDLQuery() throws InterruptedException { - String tableName = "test_table_fast_query_ddl"; - String tableNameFastQuery = TABLE_ID_DDL.getTable(); + String tableName = generateTableName("test_table_fast_query_ddl"); + String tableNameFastQuery = TABLE_ID_SIMPLE.getTable(); String ddlQuery = String.format( "CREATE OR REPLACE TABLE %s (" @@ -4803,7 +4822,7 @@ void testFastDDLQuery() throws InterruptedException { QueryJobConfiguration.newBuilder(ddlQuery).setDefaultDataset(DatasetId.of(DATASET)).build(); TableResult result = bigquery.query(ddlConfig); assertNotNull(result.getJobId()); - assertEquals(DDL_TABLE_SCHEMA, result.getSchema()); + assertEquals(SIMPLE_TABLE_SCHEMA, result.getSchema()); assertEquals(0, result.getTotalRows()); // Verify correctness of table content String sqlQuery = String.format("SELECT * FROM %s.%s", DATASET, tableName); @@ -5078,8 +5097,23 @@ void testExecuteSelectSessionSupport() throws BigQuerySQLException { @Test void testDmlStatistics() throws InterruptedException { - String tableName = TABLE_ID_FAST_QUERY.getTable(); - // Run a DML statement to UPDATE 2 rows of data + // This runs an update SQL query. Clone the table to ensure that this doesn't impact + // other tests. + String tableName = generateTableName("test_table_dml_stats"); + String tableNameSimple = TABLE_ID_SIMPLE.getTable(); + String ddlQuery = + String.format( + "CREATE OR REPLACE TABLE %s (" + + "TimestampField TIMESTAMP OPTIONS(description='TimestampDescription'), " + + "StringField STRING OPTIONS(description='StringDescription'), " + + "BooleanField BOOLEAN OPTIONS(description='BooleanDescription') " + + ") AS SELECT DISTINCT * FROM %s", + tableName, tableNameSimple); + QueryJobConfiguration ddlConfig = + QueryJobConfiguration.newBuilder(ddlQuery).setDefaultDataset(DatasetId.of(DATASET)).build(); + TableResult result = bigquery.query(ddlConfig); + assertNotNull(result.getJobId()); + String dmlQuery = String.format("UPDATE %s.%s SET StringField = 'hello' WHERE TRUE", DATASET, tableName); QueryJobConfiguration dmlConfig = QueryJobConfiguration.newBuilder(dmlQuery).build(); @@ -5087,20 +5121,37 @@ void testDmlStatistics() throws InterruptedException { remoteJob = remoteJob.waitFor(); assertNull(remoteJob.getStatus().getError()); - TableResult result = remoteJob.getQueryResults(); - assertNotNull(result.getJobId()); - assertEquals(TABLE_SCHEMA, result.getSchema()); + TableResult resultAfterUpdate = remoteJob.getQueryResults(); + assertNotNull(resultAfterUpdate.getJobId()); + assertEquals(SIMPLE_TABLE_SCHEMA, resultAfterUpdate.getSchema()); Job queryJob = bigquery.getJob(remoteJob.getJobId()); + queryJob = queryJob.waitFor(); JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); - assertEquals(2L, statistics.getNumDmlAffectedRows().longValue()); - assertEquals(2L, statistics.getDmlStats().getUpdatedRowCount().longValue()); + assertEquals(1L, statistics.getNumDmlAffectedRows().longValue()); + assertEquals(1L, statistics.getDmlStats().getUpdatedRowCount().longValue()); } /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test void testTransactionInfo() throws InterruptedException { - String tableName = TABLE_ID_FAST_QUERY.getTable(); + // The transaction runs an update query. Clone the table to ensure that this doesn't impact + // other tests. + String tableName = generateTableName("test_table_transaction_info"); + String tableNameSimple = TABLE_ID_SIMPLE.getTable(); + String ddlQuery = + String.format( + "CREATE OR REPLACE TABLE %s (" + + "TimestampField TIMESTAMP OPTIONS(description='TimestampDescription'), " + + "StringField STRING OPTIONS(description='StringDescription'), " + + "BooleanField BOOLEAN OPTIONS(description='BooleanDescription') " + + ") AS SELECT DISTINCT * FROM %s", + tableName, tableNameSimple); + QueryJobConfiguration ddlConfig = + QueryJobConfiguration.newBuilder(ddlQuery).setDefaultDataset(DatasetId.of(DATASET)).build(); + TableResult result = bigquery.query(ddlConfig); + assertNotNull(result.getJobId()); + String transaction = String.format( "BEGIN TRANSACTION;\n" @@ -5913,7 +5964,7 @@ void testCopyJobStatistics() throws InterruptedException, TimeoutException { @Test void testSnapshotTableCopyJob() throws InterruptedException { String sourceTableName = "test_copy_job_base_table"; - String ddlTableName = TABLE_ID_DDL.getTable(); + String ddlTableName = TABLE_ID_SIMPLE.getTable(); // this creates a snapshot table at specified snapshotTime String snapshotTableName = "test_snapshot_table"; // Create source table with some data in it @@ -5930,7 +5981,7 @@ void testSnapshotTableCopyJob() throws InterruptedException { TableId sourceTableId = TableId.of(DATASET, sourceTableName); TableResult result = bigquery.query(ddlConfig); assertNotNull(result.getJobId()); - assertEquals(DDL_TABLE_SCHEMA, result.getSchema()); + assertEquals(SIMPLE_TABLE_SCHEMA, result.getSchema()); Table remoteTable = bigquery.getTable(DATASET, sourceTableName); assertNotNull(remoteTable); @@ -5952,7 +6003,7 @@ void testSnapshotTableCopyJob() throws InterruptedException { assertEquals(snapshotTableId.getDataset(), snapshotTable.getTableId().getDataset()); assertEquals(snapshotTableName, snapshotTable.getTableId().getTable()); assertTrue(snapshotTable.getDefinition() instanceof SnapshotTableDefinition); - assertEquals(DDL_TABLE_SCHEMA, snapshotTable.getDefinition().getSchema()); + assertEquals(SIMPLE_TABLE_SCHEMA, snapshotTable.getDefinition().getSchema()); assertNotNull(((SnapshotTableDefinition) snapshotTable.getDefinition()).getSnapshotTime()); assertEquals( sourceTableName, @@ -5978,7 +6029,7 @@ void testSnapshotTableCopyJob() throws InterruptedException { assertNotNull(restoredTable); assertEquals(restoredTableId.getDataset(), restoredTable.getTableId().getDataset()); assertEquals(restoredTableName, restoredTable.getTableId().getTable()); - assertEquals(DDL_TABLE_SCHEMA, restoredTable.getDefinition().getSchema()); + assertEquals(SIMPLE_TABLE_SCHEMA, restoredTable.getDefinition().getSchema()); assertEquals(snapshotTable.getNumBytes(), restoredTable.getNumBytes()); assertEquals(snapshotTable.getNumRows(), restoredTable.getNumRows()); @@ -6857,7 +6908,7 @@ void testCreateExternalTableWithReferenceFileSchemaParquet() { @Test void testCloneTableCopyJob() throws InterruptedException { String sourceTableName = "test_copy_job_base_table"; - String ddlTableName = TABLE_ID_DDL.getTable(); + String ddlTableName = TABLE_ID_SIMPLE.getTable(); String cloneTableName = "test_clone_table"; // Create source table with some data in it String ddlQuery = @@ -6873,7 +6924,7 @@ void testCloneTableCopyJob() throws InterruptedException { TableId sourceTableId = TableId.of(DATASET, sourceTableName); TableResult result = bigquery.query(ddlConfig); assertNotNull(result.getJobId()); - assertEquals(DDL_TABLE_SCHEMA, result.getSchema()); + assertEquals(SIMPLE_TABLE_SCHEMA, result.getSchema()); Table remoteTable = bigquery.getTable(DATASET, sourceTableName); assertNotNull(remoteTable); @@ -6897,7 +6948,7 @@ void testCloneTableCopyJob() throws InterruptedException { assertEquals(cloneTableName, cloneTable.getTableId().getTable()); assertEquals(TableDefinition.Type.TABLE, cloneTable.getDefinition().getType()); assertTrue(cloneTable.getDefinition() instanceof StandardTableDefinition); - assertEquals(DDL_TABLE_SCHEMA, cloneTable.getDefinition().getSchema()); + assertEquals(SIMPLE_TABLE_SCHEMA, cloneTable.getDefinition().getSchema()); assertTrue(cloneTable.getCloneDefinition() instanceof CloneDefinition); assertEquals(sourceTableName, cloneTable.getCloneDefinition().getBaseTableId().getTable()); assertNotNull(cloneTable.getCloneDefinition().getCloneTime()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java index 589f7ccda0..f66df17ab5 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java @@ -26,14 +26,13 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; import java.time.Duration; -import java.util.concurrent.ExecutionException; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -public class RemoteBigQueryHelperTest { +class RemoteBigQueryHelperTest { private static final String DATASET_NAME = "dataset-name"; private static final String PROJECT_ID = "project-id"; @@ -67,7 +66,7 @@ public class RemoteBigQueryHelperTest { private static final InputStream JSON_KEY_STREAM = new ByteArrayInputStream(JSON_KEY.getBytes()); @Test - public void testForceDelete() throws InterruptedException, ExecutionException { + void testForceDelete() { BigQuery bigqueryMock = Mockito.mock(BigQuery.class); Mockito.when(bigqueryMock.delete(DATASET_NAME, DatasetDeleteOption.deleteContents())) .thenReturn(true); @@ -76,15 +75,15 @@ public void testForceDelete() throws InterruptedException, ExecutionException { } @Test - public void testCreateFromStream() { + void testCreateFromStream() { RemoteBigQueryHelper helper = RemoteBigQueryHelper.create(PROJECT_ID, JSON_KEY_STREAM); BigQueryOptions options = helper.getOptions(); assertEquals(PROJECT_ID, options.getProjectId()); assertEquals(60000, ((HttpTransportOptions) options.getTransportOptions()).getConnectTimeout()); assertEquals(60000, ((HttpTransportOptions) options.getTransportOptions()).getReadTimeout()); assertEquals(10, options.getRetrySettings().getMaxAttempts()); - assertEquals(Duration.ofMillis(30000), options.getRetrySettings().getMaxRetryDelayDuration()); - assertEquals(Duration.ofMillis(120000), options.getRetrySettings().getTotalTimeoutDuration()); - assertEquals(Duration.ofMillis(250), options.getRetrySettings().getInitialRetryDelayDuration()); + assertEquals(Duration.ofMillis(1000), options.getRetrySettings().getMaxRetryDelayDuration()); + assertEquals(Duration.ofMillis(3000), options.getRetrySettings().getTotalTimeoutDuration()); + assertEquals(Duration.ofMillis(100), options.getRetrySettings().getInitialRetryDelayDuration()); } } From fab86281af519a8294f3385dccf9e17cbb56d21c Mon Sep 17 00:00:00 2001 From: Kirill Logachev Date: Thu, 15 Jan 2026 18:35:29 -0800 Subject: [PATCH 34/36] chore(jdbc): moving helper scripts & simplify dependencies (#4062) --- .gitignore | 6 +- google-cloud-bigquery-jdbc/Dockerfile | 21 ++++ google-cloud-bigquery-jdbc/Makefile | 140 ++++++++++++++++++++++++++ google-cloud-bigquery-jdbc/pom.xml | 92 +---------------- 4 files changed, 169 insertions(+), 90 deletions(-) create mode 100644 google-cloud-bigquery-jdbc/Dockerfile create mode 100644 google-cloud-bigquery-jdbc/Makefile diff --git a/.gitignore b/.gitignore index c94c87d10b..d5bcd47b28 100644 --- a/.gitignore +++ b/.gitignore @@ -12,4 +12,8 @@ __pycache__ SimpleBenchmarkApp/src/main/java/com/google/cloud/App.java .flattened-pom.xml # Local Test files -*ITLocalTest.java \ No newline at end of file +*ITLocalTest.java + +# JDBC-specific files +google-cloud-bigquery-jdbc/drivers/ +google-cloud-bigquery-jdbc/jacoco* \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/Dockerfile b/google-cloud-bigquery-jdbc/Dockerfile new file mode 100644 index 0000000000..f88cc5a4bc --- /dev/null +++ b/google-cloud-bigquery-jdbc/Dockerfile @@ -0,0 +1,21 @@ +FROM gcr.io/cloud-devrel-public-resources/java11 + +ENV JDBC_DOCKER_ENV=true +RUN apt-get update && apt-get install -y zip && rm -rf /var/lib/apt/lists/* + +RUN mkdir /tst +COPY ./pom.xml /src/pom.xml +COPY ./java.header /src/java.header +COPY ./license-checks.xml /src/license-checks.xml +COPY ./google-cloud-bigquery-jdbc/pom.xml /src/google-cloud-bigquery-jdbc/pom.xml + +COPY ./google-cloud-bigquery /src/google-cloud-bigquery +COPY ./google-cloud-bigquery-bom /src/google-cloud-bigquery-bom + + +WORKDIR /src +RUN mvn install -DskipTests + +WORKDIR /src/google-cloud-bigquery-jdbc + +ENTRYPOINT [] diff --git a/google-cloud-bigquery-jdbc/Makefile b/google-cloud-bigquery-jdbc/Makefile new file mode 100644 index 0000000000..f196596870 --- /dev/null +++ b/google-cloud-bigquery-jdbc/Makefile @@ -0,0 +1,140 @@ +CONTAINER_NAME=jdbc +PACKAGE_DESTINATION=$(PWD)/drivers +SRC="$(PWD)/.." +skipSurefire ?= true + +# no indendation for ifndef\endif due to their evaluation before execution +.check-env: | +ifndef GOOGLE_APPLICATION_CREDENTIALS + $(error GOOGLE_APPLICATION_CREDENTIALS is required to run tests) +endif + +install: + mvn clean install + +clean: + mvn clean + +lint: + mvn com.spotify.fmt:fmt-maven-plugin:format + +unittest: | + mvn -B -ntp \ + -DtrimStackTrace=false \ + -Dclirr.skip=true \ + -Denforcer.skip=true \ + -Dtest=$(test) \ + test + +# Important: By default, this command will skip unittests. +# To include unit tests, run: make integration-test skipSurefire=false +integration-test: .check-env + mvn -B -ntp \ + -Penable-integration-tests \ + -DtrimStackTrace=false \ + -DskipSurefire=$(skipSurefire) \ + -Dclirr.skip=true \ + -Denforcer.skip=true \ + -Dit.failIfNoSpecifiedTests=false \ + -Dit.test=$(test) \ + integration-test + +unit-test-coverage: + $(MAKE) unittest + mvn -B -ntp jacoco:report + BUILD_DIR=$$(mvn -B -ntp help:evaluate -Dexpression=project.build.directory -q -DforceStdout); \ + cd $$BUILD_DIR/site && zip -r $$OLDPWD/jacoco-unittests.zip jacoco && cd $$OLDPWD + +full-coverage: .check-env + $(MAKE) integration-test skipSurefire=false test=ITBigQueryJDBCTest,ITNightlyBigQueryTest + mvn -B -ntp jacoco:report + BUILD_DIR=$$(mvn -B -ntp help:evaluate -Dexpression=project.build.directory -q -DforceStdout); \ + cd $$BUILD_DIR/site && zip -r $$OLDPWD/jacoco-full.zip jacoco && cd $$OLDPWD + +package: + mvn clean package \ + -DincludeScope=runtime \ + -Dmaven.test.skip=true + mvn dependency:copy-dependencies \ + -DincludeScope=runtime + ${MAKE} generate-dependency-list + +package-all-dependencies: + mvn package \ + -DincludeScope=runtime \ + -Dmaven.test.skip=true \ + -P=release-all-dependencies + +package-all-dependencies-shaded: + mvn package \ + -DincludeScope=runtime \ + -Dmaven.test.skip=true \ + -P=release-all-dependencies,release-all-dependencies-shaded + +generate-dependency-list: + mvn -B dependency:list \ + -f pom.xml \ + -DincludeScope=runtime | grep :jar: | sed -E "s/^.* ([^: ]+):([^:]+):([^:]+):([^:]+).*/\1<\/groupId>\2<\/artifactId>\4<\/version><\/dependency>/g" > dependencies.txt + +# Commands for dockerized environments +.docker-run: | + docker run -it \ + -v $(GOOGLE_APPLICATION_CREDENTIALS):/auth/application_creds.json \ + -v "$(GOOGLE_APPLICATION_CREDENTIALS).p12":/auth/application_creds.p12 \ + -e "GOOGLE_APPLICATION_CREDENTIALS=/auth/application_creds.json" \ + -v $(SRC):/src \ + -e "SA_EMAIL=test_email" \ + -e "SA_SECRET=/auth/application_creds.json" \ + -e "SA_SECRET_P12=/auth/application_creds.p12" \ + $(CONTAINER_NAME) $(args) + +docker-build: + docker build -t $(CONTAINER_NAME) -f Dockerfile .. + +docker-session: + $(MAKE) .docker-run args="bash" + +docker-package-all-dependencies: docker-build + mkdir -p $(PACKAGE_DESTINATION) + docker run \ + -v $(SRC):/src \ + -v $(PACKAGE_DESTINATION):/pkg \ + $(CONTAINER_NAME) \ + sh -c "make package-all-dependencies && \ + cp --no-preserve=ownership /mvn/test-target/google-cloud-bigquery-jdbc-*.jar /pkg && \ + rm -f /pkg/*tests.jar" + +docker-package-all-dependencies-shaded: docker-build + mkdir -p $(PACKAGE_DESTINATION) + docker run \ + -v $(SRC):/src \ + -v $(PACKAGE_DESTINATION):/pkg \ + $(CONTAINER_NAME) \ + sh -c "make package-all-dependencies-shaded && \ + cp --no-preserve=ownership /mvn/test-target/google-cloud-bigquery-jdbc-*.jar /pkg && \ + rm -f /pkg/*tests.jar" + +docker-package: docker-build + mkdir -p $(PACKAGE_DESTINATION) + docker run \ + -v $(SRC):/src \ + -v $(PACKAGE_DESTINATION):/pkg \ + $(CONTAINER_NAME) \ + sh -c "make package && \ + mkdir -p /tmp/package && \ + cp --no-preserve=ownership /mvn/test-target/google-cloud-bigquery-jdbc-*.jar /tmp/package && \ + rm -f /pkg/*tests.jar && \ + cp --no-preserve=ownership dependencies.txt /tmp/package && \ + rm dependencies.txt && \ + cp --no-preserve=ownership /mvn/test-target/dependency/*.jar /tmp/package && \ + zip -j -r /pkg/google-cloud-bigquery-jdbc-$$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout).zip /tmp/package" + +docker-unittest: | + $(MAKE) .docker-run args="make unittest test=$(test)" + +docker-integration-test: .check-env + $(MAKE) .docker-run args="make integration-test test=$(test) skipSurefire=$(skipSurefire)" + +docker-coverage: + $(MAKE) .docker-run args="make unit-test-coverage" + $(MAKE) .docker-run args="make full-coverage" \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/pom.xml b/google-cloud-bigquery-jdbc/pom.xml index a697ba1017..677437794b 100644 --- a/google-cloud-bigquery-jdbc/pom.xml +++ b/google-cloud-bigquery-jdbc/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery-jdbc - 0.1.0 + 0.1.0-SNAPSHOT jar BigQuery JDBC https://github.com/googleapis/java-bigquery-jdbc @@ -60,8 +60,8 @@ com.google.cloud - google-cloud-bigquery-jdbc-parent - 0.1.0 + google-cloud-bigquery-parent + 2.57.2-SNAPSHOT @@ -72,72 +72,6 @@ com.google.cloud google-cloud-bigquerystorage - - com.google.api - api-common - - - org.apache.arrow - arrow-vector - - - com.google.guava - guava - - - com.google.cloud - google-cloud-core - - - com.google.api - gax - - - com.google.auth - google-auth-library-oauth2-http - - - com.google.auth - google-auth-library-credentials - - - - - org.apache.arrow - arrow-memory-core - - - org.apache.arrow - arrow-memory-netty - - - com.google.protobuf - protobuf-java - - - com.google.api.grpc - proto-google-cloud-bigquerystorage-v1 - - - com.google.code.gson - gson - - - com.google.code.findbugs - jsr305 - - - org.apache.httpcomponents.core5 - httpcore5 - - - org.apache.httpcomponents.client5 - httpclient5 - - - com.google.http-client - google-http-client - com.google.http-client google-http-client-apache-v5 @@ -152,26 +86,6 @@ - - com.google.cloud - google-cloud-core-http - - - com.google.api - gax-grpc - - - io.grpc - grpc-api - - - io.grpc - grpc-netty-shaded - - - io.grpc - grpc-core - com.google.truth From a8aef5ab71c93229cfed87a22df47d60e9aa3de0 Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Fri, 16 Jan 2026 13:31:19 -0500 Subject: [PATCH 35/36] chore: Fix flaky testListTablesWithPartitioning test (#4068) * chore: Fix flaky testListTablesWithPartitioning test * chore: Fix test comment * chore: Fix typo to partitionType --- .../cloud/bigquery/it/ITBigQueryTest.java | 33 ++++++++++++------- 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index d41ccc0e12..68079ddcaf 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -219,7 +219,6 @@ class ITBigQueryTest { private static final byte[] BYTES = {0xD, 0xE, 0xA, 0xD}; private static final String BYTES_BASE64 = BaseEncoding.base64().encode(BYTES); - private static final Long EXPIRATION_MS = 86400000L; private static final Logger LOG = Logger.getLogger(ITBigQueryTest.class.getName()); private static final String DATASET = RemoteBigQueryHelper.generateDatasetName(); private static final String UK_DATASET = RemoteBigQueryHelper.generateDatasetName(); @@ -2626,12 +2625,13 @@ void testListTables() { @Test void testListTablesWithPartitioning() { + long expirationMs = 86400000L; + Type partitionType = Type.DAY; String tableName = "test_list_tables_partitioning"; - TimePartitioning timePartitioning = TimePartitioning.of(Type.DAY, EXPIRATION_MS); StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder() .setSchema(TABLE_SCHEMA) - .setTimePartitioning(timePartitioning) + .setTimePartitioning(TimePartitioning.of(partitionType, expirationMs)) .build(); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); Table createdPartitioningTable = bigquery.create(tableInfo); @@ -2639,16 +2639,25 @@ void testListTablesWithPartitioning() { try { Page

  • tables = bigquery.listTables(DATASET); boolean found = false; - Iterator
    tableIterator = tables.getValues().iterator(); - while (tableIterator.hasNext() && !found) { - StandardTableDefinition standardTableDefinition = tableIterator.next().getDefinition(); - if (standardTableDefinition.getTimePartitioning() != null - && standardTableDefinition.getTimePartitioning().getType().equals(Type.DAY) - && standardTableDefinition - .getTimePartitioning() - .getExpirationMs() - .equals(EXPIRATION_MS)) { + for (Table table : tables.getValues()) { + // Look for the table that matches the newly partitioned table. Other tables in the + // dataset may not be partitioned or may be partitioned but may not be expiring + // (e.g. `null` expirationMs). + if (!table + .getTableId() + .getTable() + .equals(createdPartitioningTable.getTableId().getTable())) { + continue; + } + + StandardTableDefinition standardTableDefinition = table.getDefinition(); + TimePartitioning timePartitioning = standardTableDefinition.getTimePartitioning(); + assertNotNull(timePartitioning); + assertNotNull(timePartitioning.getExpirationMs()); + if (timePartitioning.getType().equals(partitionType) + && timePartitioning.getExpirationMs().equals(expirationMs)) { found = true; + break; } } assertTrue(found); From 2af2b2b769dbfa61068365e6cb7c4e0eea5fa530 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 19 Jan 2026 10:55:41 -0500 Subject: [PATCH 36/36] chore(main): release 2.57.2 (#4044) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 13 +++++++++++++ benchmark/pom.xml | 2 +- google-cloud-bigquery-bom/pom.xml | 4 ++-- google-cloud-bigquery-jdbc/pom.xml | 2 +- google-cloud-bigquery/pom.xml | 4 ++-- pom.xml | 4 ++-- samples/snapshot/pom.xml | 2 +- versions.txt | 2 +- 8 files changed, 23 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 469bc0b33d..f1ac8d1c60 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [2.57.2](https://github.com/googleapis/java-bigquery/compare/v2.57.1...v2.57.2) (2026-01-16) + + +### Bug Fixes + +* Gracefully handle thread interruption in ConnectionImpl to preve… ([#4047](https://github.com/googleapis/java-bigquery/issues/4047)) ([031deb0](https://github.com/googleapis/java-bigquery/commit/031deb00f153b47d37655b025fcef298a3db0e0b)) +* Job.isDone() uses Job.Status.State if available ([#4039](https://github.com/googleapis/java-bigquery/issues/4039)) ([fa0a12e](https://github.com/googleapis/java-bigquery/commit/fa0a12e3cf171abab528c318ba3f4260b69a5274)) + + +### Documentation + +* Add specific samples for creating and query timestamps ([#4051](https://github.com/googleapis/java-bigquery/issues/4051)) ([fac16a8](https://github.com/googleapis/java-bigquery/commit/fac16a8eb05a6e13e406feeb9761259cdbf8e674)) + ## [2.57.1](https://github.com/googleapis/java-bigquery/compare/v2.57.0...v2.57.1) (2025-12-12) diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 41e1a2fcb3..2e0e6f6bab 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -6,7 +6,7 @@ google-cloud-bigquery-parent com.google.cloud - 2.57.2-SNAPSHOT + 2.57.2 diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index 52ea2c14be..7177694c62 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery-bom - 2.57.2-SNAPSHOT + 2.57.2 pom com.google.cloud @@ -54,7 +54,7 @@ com.google.cloud google-cloud-bigquery - 2.57.2-SNAPSHOT + 2.57.2 diff --git a/google-cloud-bigquery-jdbc/pom.xml b/google-cloud-bigquery-jdbc/pom.xml index 677437794b..8f2c9d7b49 100644 --- a/google-cloud-bigquery-jdbc/pom.xml +++ b/google-cloud-bigquery-jdbc/pom.xml @@ -61,7 +61,7 @@ com.google.cloud google-cloud-bigquery-parent - 2.57.2-SNAPSHOT + 2.57.2 diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index 174bb9cb36..6b4661fb60 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 2.57.2-SNAPSHOT + 2.57.2 jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 2.57.2-SNAPSHOT + 2.57.2 google-cloud-bigquery diff --git a/pom.xml b/pom.xml index 2929f8a9f5..cd13b20082 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 2.57.2-SNAPSHOT + 2.57.2 BigQuery Parent https://github.com/googleapis/java-bigquery @@ -93,7 +93,7 @@ com.google.cloud google-cloud-bigquery - 2.57.2-SNAPSHOT + 2.57.2 diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index e10b51dc7d..f7ff76844c 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -56,7 +56,7 @@ com.google.cloud google-cloud-bigquery - 2.57.2-SNAPSHOT + 2.57.2 diff --git a/versions.txt b/versions.txt index 507619351d..54f6252b10 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:2.57.1:2.57.2-SNAPSHOT \ No newline at end of file +google-cloud-bigquery:2.57.2:2.57.2 \ No newline at end of file