Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Set air.compiler.fail-warnings as true in BigQuery #22050

Merged
merged 3 commits into from
May 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions plugin/trino-bigquery/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

<properties>
<air.main.basedir>${project.parent.basedir}</air.main.basedir>
<air.compiler.fail-warnings>true</air.compiler.fail-warnings>
<air.test.jvm.additional-arguments>${air.test.jvm.additional-arguments.default} --add-opens=java.base/java.nio=ALL-UNNAMED</air.test.jvm.additional-arguments>
</properties>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import com.google.cloud.bigquery.DatasetId;
import com.google.cloud.bigquery.DatasetInfo;
import com.google.cloud.bigquery.Job;
import com.google.cloud.bigquery.JobException;
import com.google.cloud.bigquery.JobInfo;
import com.google.cloud.bigquery.JobStatistics;
import com.google.cloud.bigquery.JobStatistics.QueryStatistics;
Expand Down Expand Up @@ -59,13 +60,15 @@
import static com.google.cloud.bigquery.TableDefinition.Type.SNAPSHOT;
import static com.google.cloud.bigquery.TableDefinition.Type.TABLE;
import static com.google.cloud.bigquery.TableDefinition.Type.VIEW;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Streams.stream;
import static io.trino.plugin.bigquery.BigQueryErrorCode.BIGQUERY_AMBIGUOUS_OBJECT_NAME;
import static io.trino.plugin.bigquery.BigQueryErrorCode.BIGQUERY_FAILED_TO_EXECUTE_QUERY;
import static io.trino.plugin.bigquery.BigQueryErrorCode.BIGQUERY_INVALID_STATEMENT;
import static io.trino.plugin.bigquery.BigQueryErrorCode.BIGQUERY_LISTING_DATASET_ERROR;
import static io.trino.plugin.bigquery.BigQueryErrorCode.BIGQUERY_LISTING_TABLE_ERROR;
Expand Down Expand Up @@ -204,7 +207,13 @@ public DatasetInfo getDataset(DatasetId datasetId)

public Optional<TableInfo> getTable(TableId remoteTableId)
{
return Optional.ofNullable(bigQuery.getTable(remoteTableId));
try {
return Optional.ofNullable(bigQuery.getTable(remoteTableId));
}
catch (BigQueryException e) {
// getTable method throws an exception in some situations, e.g. wild card tables
return Optional.empty();
}
}

public TableInfo getCachedTable(Duration viewExpiration, TableInfo remoteTableId, List<String> requiredColumns, Optional<String> filter)
Expand Down Expand Up @@ -338,6 +347,9 @@ private TableResult execute(ConnectorSession session, QueryJobConfiguration job)
try {
return bigQuery.query(jobWithQueryLabel);
}
catch (BigQueryException | JobException e) {
throw new TrinoException(BIGQUERY_FAILED_TO_EXECUTE_QUERY, "Failed to run the query: " + firstNonNull(e.getMessage(), e), e);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new BigQueryException(BaseHttpServiceException.UNKNOWN_CODE, format("Failed to run the query [%s]", job.getQuery()), e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ public enum BigQueryErrorCode
BIGQUERY_PROXY_SSL_INITIALIZATION_FAILED(7, EXTERNAL),
BIGQUERY_BAD_WRITE(8, EXTERNAL),
BIGQUERY_LISTING_TABLE_ERROR(9, EXTERNAL),
BIGQUERY_CREATE_READ_SESSION_ERROR(10, EXTERNAL),
/**/;

private final ErrorCode errorCode;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@
import io.trino.spi.connector.RecordCursor;
import io.trino.spi.connector.RelationCommentMetadata;
import io.trino.spi.connector.RetryMode;
import io.trino.spi.connector.SaveMode;
import io.trino.spi.connector.SchemaNotFoundException;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.connector.SchemaTablePrefix;
Expand Down Expand Up @@ -122,6 +123,8 @@
import static io.trino.plugin.bigquery.BigQueryUtil.quoted;
import static io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.connector.SaveMode.IGNORE;
import static io.trino.spi.connector.SaveMode.REPLACE;
import static io.trino.spi.type.BigintType.BIGINT;
import static java.lang.String.format;
import static java.util.Locale.ENGLISH;
Expand Down Expand Up @@ -433,6 +436,7 @@ public ColumnMetadata getColumnMetadata(
return ((BigQueryColumnHandle) columnHandle).getColumnMetadata();
}

@SuppressWarnings("deprecation") // TODO Remove this method when https://github.com/trinodb/trino/pull/21920 is merged
@Override
public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSession session, SchemaTablePrefix prefix)
{
Expand Down Expand Up @@ -504,22 +508,29 @@ public void rollback()
}

@Override
public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, boolean ignoreExisting)
public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, SaveMode saveMode)
{
if (saveMode == REPLACE) {
throw new TrinoException(NOT_SUPPORTED, "This connector does not support replacing tables");
}
try {
createTable(session, tableMetadata, Optional.empty());
}
catch (BigQueryException e) {
if (ignoreExisting && e.getCode() == 409) {
if (saveMode == IGNORE && e.getCode() == 409) {
return;
}
throw e;
}
}

@Override
public ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, Optional<ConnectorTableLayout> layout, RetryMode retryMode)
public ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, Optional<ConnectorTableLayout> layout, RetryMode retryMode, boolean replace)
{
if (replace) {
throw new TrinoException(NOT_SUPPORTED, "This connector does not support replacing tables");
}

ColumnMetadata pageSinkIdColumn = buildPageSinkIdColumn(tableMetadata.getColumns().stream()
.map(ColumnMetadata::getName)
.collect(toImmutableList()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import io.trino.spi.type.VarcharType;
import org.apache.avro.Conversions.DecimalConversion;
import org.apache.avro.Schema;
import org.apache.avro.SchemaParseException;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.BinaryDecoder;
Expand All @@ -51,6 +52,7 @@
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;

import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
Expand Down Expand Up @@ -298,7 +300,13 @@ Iterable<GenericRecord> parse(ReadRowsResponse response)
byte[] buffer = response.getAvroRows().getSerializedBinaryRows().toByteArray();
readBytes.addAndGet(buffer.length);
log.debug("Read %d bytes (total %d) from %s", buffer.length, readBytes.get(), split.getStreamName());
Schema avroSchema = new Schema.Parser().parse(split.getSchemaString());
Schema avroSchema;
try {
avroSchema = new Schema.Parser().parse(split.getSchemaString());
}
catch (SchemaParseException e) {
throw new TrinoException(GENERIC_INTERNAL_ERROR, "Invalid Avro schema: " + firstNonNull(e.getMessage(), e), e);
}
return () -> new AvroBinaryIterator(avroSchema, buffer);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
*/
package io.trino.plugin.bigquery;

import com.google.api.gax.rpc.ApiException;
import com.google.cloud.bigquery.TableDefinition;
import com.google.cloud.bigquery.TableId;
import com.google.cloud.bigquery.TableInfo;
Expand All @@ -38,6 +39,8 @@
import static com.google.cloud.bigquery.TableDefinition.Type.TABLE;
import static com.google.cloud.bigquery.TableDefinition.Type.VIEW;
import static com.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec.ZSTD;
import static com.google.common.base.MoreObjects.firstNonNull;
import static io.trino.plugin.bigquery.BigQueryErrorCode.BIGQUERY_CREATE_READ_SESSION_ERROR;
import static io.trino.plugin.bigquery.BigQuerySessionProperties.isViewMaterializationWithFilter;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static java.lang.String.format;
Expand Down Expand Up @@ -110,7 +113,14 @@ public ReadSession create(ConnectorSession session, TableId remoteTable, List<St
.onRetry(event -> log.debug("Request failed, retrying: %s", event.getLastException()))
.abortOn(failure -> !BigQueryUtil.isRetryable(failure))
.build())
.get(() -> bigQueryReadClient.createReadSession(createReadSessionRequest));
.get(() -> {
try {
return bigQueryReadClient.createReadSession(createReadSessionRequest);
}
catch (ApiException e) {
throw new TrinoException(BIGQUERY_CREATE_READ_SESSION_ERROR, "Cannot create read session" + firstNonNull(e.getMessage(), e), e);
}
});
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import io.trino.plugin.bigquery.BigQueryQueryRelationHandle;
import io.trino.plugin.bigquery.BigQueryTableHandle;
import io.trino.plugin.bigquery.BigQueryTypeManager;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ConnectorAccessControl;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.ConnectorTableHandle;
Expand All @@ -47,6 +48,7 @@
import java.util.Optional;

import static com.google.common.collect.Iterables.getOnlyElement;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.function.table.ReturnTypeSpecification.GenericTable.GENERIC_TABLE;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static java.util.Objects.requireNonNull;
Expand Down Expand Up @@ -113,7 +115,8 @@ public TableFunctionAnalysis analyze(
ImmutableList.Builder<BigQueryColumnHandle> columnsBuilder = ImmutableList.builderWithExpectedSize(schema.getFields().size());
for (com.google.cloud.bigquery.Field field : schema.getFields()) {
if (!typeManager.isSupportedType(field)) {
throw new UnsupportedOperationException("Unsupported type: " + field.getType());
// TODO: Skip unsupported type instead of throwing an exception
throw new TrinoException(NOT_SUPPORTED, "Unsupported type: " + field.getType());
}
columnsBuilder.add(typeManager.toColumnHandle(field));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -328,16 +328,6 @@ protected Optional<DataMappingTestSetup> filterCaseSensitiveDataMappingTestData(
return Optional.of(dataMappingTestSetup);
}

@Test
@Override
public void testNoDataSystemTable()
{
// TODO (https://github.com/trinodb/trino/issues/6515): Big Query throws an error when trying to read "some_table$data".
assertThatThrownBy(super::testNoDataSystemTable)
.hasMessageFindingMatch(".*Cannot read partition information from a table that is not partitioned.*");
abort("TODO");
}

@Override
protected boolean isColumnNameRejected(Exception exception, String columnName, boolean delimited)
{
Expand Down Expand Up @@ -666,7 +656,7 @@ public void testDateYearOfEraPredicate()
// Override because the connector throws an exception instead of an empty result when the value is out of supported range
assertQuery("SELECT orderdate FROM orders WHERE orderdate = DATE '1997-09-14'", "VALUES DATE '1997-09-14'");
assertThat(query("SELECT * FROM orders WHERE orderdate = DATE '-1996-09-14'"))
.nonTrinoExceptionFailure().hasMessageMatching(".*Could not cast literal \"-1996-09-14\" to type DATE.*");
.failure().hasMessageMatching(".*Could not cast literal \"-1996-09-14\" to type DATE.*");
}

@Test
Expand Down Expand Up @@ -825,8 +815,7 @@ public void testQueryCache()

// Verify query cache is empty
assertThat(query(createNeverDisposition, "SELECT * FROM test." + materializedView))
// TODO should be TrinoException, provide a better error message
.nonTrinoExceptionFailure().hasMessageContaining("Not found");
.failure().hasMessageMatching("Failed to run the query: Not found: Table .* was not found .*");
// Populate cache and verify it
assertQuery(queryResultsCacheSession, "SELECT * FROM test." + materializedView, "VALUES 5");
assertQuery(createNeverDisposition, "SELECT * FROM test." + materializedView, "VALUES 5");
Expand Down Expand Up @@ -878,8 +867,7 @@ public void testWildcardTableWithDifferentColumnDefinition()
assertQuery("DESCRIBE test.\"" + wildcardTable + "\"", "VALUES ('value', 'varchar', '', '')");

assertThat(query("SELECT * FROM test.\"" + wildcardTable + "\""))
// TODO should be TrinoException
.nonTrinoExceptionFailure().hasMessageContaining("Cannot read field of type INT64 as STRING Field: value");
.failure().hasMessageContaining("Cannot read field of type INT64 as STRING Field: value");
}
finally {
onBigQuery("DROP TABLE IF EXISTS test." + firstTable);
Expand All @@ -891,7 +879,7 @@ public void testWildcardTableWithDifferentColumnDefinition()
public void testMissingWildcardTable()
{
assertThat(query("SELECT * FROM test.\"test_missing_wildcard_table_*\""))
.nonTrinoExceptionFailure().hasMessageEndingWith("does not match any table.");
.failure().hasMessageMatching(".* Table .* does not exist");
}

@Override
Expand Down Expand Up @@ -1030,7 +1018,7 @@ public void testNativeQuerySelectUnsupportedType()
// Check that column 'two' is not supported.
assertQuery("SELECT column_name FROM information_schema.columns WHERE table_schema = 'test' AND table_name = '" + tableName + "'", "VALUES 'one', 'three'");
assertThat(query("SELECT * FROM TABLE(bigquery.system.query(query => 'SELECT * FROM test." + tableName + "'))"))
.nonTrinoExceptionFailure().hasMessageContaining("Unsupported type");
.failure().hasMessageContaining("Unsupported type");
}
finally {
onBigQuery("DROP TABLE IF EXISTS test." + tableName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,7 @@ public void testSelectFailsForColumnName()
assertUpdate("INSERT INTO " + tableName + " VALUES ('test value')", 1);
// The storage API can't read the table, but query based API can read it
assertThat(query("SELECT * FROM " + tableName))
// TODO should be TrinoException, provide better error message
.nonTrinoExceptionFailure().cause()
.hasMessageMatching(".*(Illegal initial character|Invalid name).*");
.failure().hasMessageMatching("(Cannot create read|Invalid Avro schema).*(Illegal initial character|Invalid name).*");
assertThat(bigQuerySqlExecutor.executeQuery("SELECT * FROM " + tableName).getValues())
.extracting(field -> field.get(0).getStringValue())
.containsExactly("test value");
Expand Down
Loading