diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 7b0680faf29f..052357da097f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -523,6 +523,7 @@ jobs:
&& ! (contains(matrix.modules, 'trino-bigquery') && contains(matrix.profile, 'cloud-tests-2'))
&& ! (contains(matrix.modules, 'trino-redshift') && contains(matrix.profile, 'cloud-tests'))
&& ! (contains(matrix.modules, 'trino-redshift') && contains(matrix.profile, 'fte-tests'))
+ && ! (contains(matrix.modules, 'trino-snowflake') && contains(matrix.profile, 'cloud-tests'))
&& ! (contains(matrix.modules, 'trino-filesystem-s3') && contains(matrix.profile, 'cloud-tests'))
&& ! (contains(matrix.modules, 'trino-filesystem-azure') && contains(matrix.profile, 'cloud-tests'))
&& ! (contains(matrix.modules, 'trino-hdfs') && contains(matrix.profile, 'cloud-tests'))
@@ -655,17 +656,17 @@ jobs:
run: |
$MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-case-insensitive-mapping -Dbigquery.credentials-key="${BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY}"
- name: Cloud Snowflake Tests
+ id: tests-snowflake
env:
- SNOWFLAKE_URL: ${{ secrets.SNOWFLAKE_URL }}
- SNOWFLAKE_USER: ${{ secrets.SNOWFLAKE_USER }}
+ SNOWFLAKE_URL: ${{ vars.SNOWFLAKE_URL }}
+ SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }}
- SNOWFLAKE_DATABASE: ${{ secrets.SNOWFLAKE_DATABASE }}
- SNOWFLAKE_ROLE: ${{ secrets.SNOWFLAKE_ROLE }}
- SNOWFLAKE_WAREHOUSE: ${{ secrets.SNOWFLAKE_WAREHOUSE }}
- if: matrix.modules == 'plugin/trino-snowflake' && !contains(matrix.profile, 'cloud-tests') && (env.SNOWFLAKE_URL != '' && env.SNOWFLAKE_USER != '' && env.SNOWFLAKE_PASSWORD != '')
+ SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }}
+ SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }}
+ SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }}
+ if: matrix.modules == 'plugin/trino-snowflake' && contains(matrix.profile, 'cloud-tests') && (env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.SNOWFLAKE_URL != '')
run: |
$MAVEN test ${MAVEN_TEST} -pl :trino-snowflake -Pcloud-tests \
- -Dconnector.name="snowflake" \
-Dsnowflake.test.server.url="${SNOWFLAKE_URL}" \
-Dsnowflake.test.server.user="${SNOWFLAKE_USER}" \
-Dsnowflake.test.server.password="${SNOWFLAKE_PASSWORD}" \
@@ -952,6 +953,12 @@ jobs:
DATABRICKS_TOKEN:
GCP_CREDENTIALS_KEY:
GCP_STORAGE_BUCKET:
+ SNOWFLAKE_URL:
+ SNOWFLAKE_USER:
+ SNOWFLAKE_PASSWORD:
+ SNOWFLAKE_DATABASE:
+ SNOWFLAKE_ROLE:
+ SNOWFLAKE_WAREHOUSE:
TESTCONTAINERS_NEVER_PULL: true
run: |
# converts filtered YAML file into JSON
@@ -1019,6 +1026,12 @@ jobs:
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }}
GCP_STORAGE_BUCKET: ${{ vars.GCP_STORAGE_BUCKET }}
+ SNOWFLAKE_URL: ${{ vars.SNOWFLAKE_URL }}
+ SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }}
+ SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }}
+ SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }}
+ SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }}
+ SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }}
run: |
exec testing/trino-product-tests-launcher/target/trino-product-tests-launcher-*-executable.jar suite run \
--suite ${{ matrix.suite }} \
diff --git a/plugin/trino-snowflake/pom.xml b/plugin/trino-snowflake/pom.xml
index 8b7c218c3291..70f5298e6a3e 100644
--- a/plugin/trino-snowflake/pom.xml
+++ b/plugin/trino-snowflake/pom.xml
@@ -15,7 +15,6 @@
${project.parent.basedir}
- --add-opens=java.base/java.nio=ALL-UNNAMED
@@ -34,11 +33,6 @@
configuration
-
- io.airlift
- log
-
-
io.trino
trino-base-jdbc
@@ -52,7 +46,7 @@
net.snowflake
snowflake-jdbc
- 3.13.32
+ 3.15.0
@@ -92,6 +86,12 @@
provided
+
+ io.airlift
+ log
+ runtime
+
+
io.airlift
@@ -201,7 +201,6 @@
**/TestSnowflakeConnectorTest.java
- **/TestSnowflakePlugin.java
**/TestSnowflakeTypeMapping.java
@@ -216,6 +215,9 @@
false
+
+ ${air.test.jvm.additional-arguments.default} --add-opens=java.base/java.nio=ALL-UNNAMED
+
@@ -223,10 +225,7 @@
maven-surefire-plugin
- **/TestSnowflakeClient.java
- **/TestSnowflakeConfig.java
**/TestSnowflakeConnectorTest.java
- **/TestSnowflakePlugin.java
**/TestSnowflakeTypeMapping.java
diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java
index 1c6b2da99354..7f0d7c1bd795 100644
--- a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java
+++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java
@@ -16,7 +16,6 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject;
-import io.airlift.log.Logger;
import io.trino.plugin.base.aggregation.AggregateFunctionRewriter;
import io.trino.plugin.base.aggregation.AggregateFunctionRule;
import io.trino.plugin.base.expression.ConnectorExpressionRewriter;
@@ -34,7 +33,6 @@
import io.trino.plugin.jdbc.ObjectWriteFunction;
import io.trino.plugin.jdbc.PredicatePushdownController;
import io.trino.plugin.jdbc.QueryBuilder;
-import io.trino.plugin.jdbc.SliceReadFunction;
import io.trino.plugin.jdbc.SliceWriteFunction;
import io.trino.plugin.jdbc.StandardColumnMappings;
import io.trino.plugin.jdbc.WriteMapping;
@@ -91,7 +89,6 @@
import java.util.function.BiFunction;
import static com.google.common.base.Preconditions.checkArgument;
-import static io.airlift.slice.Slices.utf8Slice;
import static io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_SECOND;
@@ -108,11 +105,12 @@ public class SnowflakeClient
All TIME values must be between 00:00:00 and 23:59:59.999999999. TIME internally stores “wallclock” time, and all operations on TIME values are performed without taking any time zone into consideration.
*/
private static final int MAX_SUPPORTED_TEMPORAL_PRECISION = 9;
- private static final Logger log = Logger.get(SnowflakeClient.class);
+
private static final DateTimeFormatter SNOWFLAKE_DATETIME_FORMATTER = DateTimeFormatter.ofPattern("u-MM-dd'T'HH:mm:ss.SSSSSSSSSXXX");
private static final DateTimeFormatter SNOWFLAKE_DATE_FORMATTER = DateTimeFormatter.ofPattern("uuuu-MM-dd");
private static final DateTimeFormatter SNOWFLAKE_TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("u-MM-dd'T'HH:mm:ss.SSSSSSSSS");
private static final DateTimeFormatter SNOWFLAKE_TIME_FORMATTER = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSSSSS");
+ private static final TimeZone UTC_TZ = TimeZone.getTimeZone(ZoneId.of("UTC"));
private final AggregateFunctionRewriter aggregateFunctionRewriter;
private interface WriteMappingFunction
@@ -125,8 +123,6 @@ private interface ColumnMappingFunction
Optional convert(JdbcTypeHandle typeHandle);
}
- private static final TimeZone UTC_TZ = TimeZone.getTimeZone(ZoneId.of("UTC"));
-
@Inject
public SnowflakeClient(
BaseJdbcConfig config,
@@ -183,9 +179,11 @@ public Optional toColumnMapping(ConnectorSession session, Connect
}
final Map snowflakeColumnMappings = ImmutableMap.builder()
- .put("time", handle -> { return Optional.of(timeColumnMapping(handle.getRequiredDecimalDigits())); })
- .put("date", handle -> { return Optional.of(ColumnMapping.longMapping(DateType.DATE, (resultSet, columnIndex) -> LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(), snowFlakeDateWriter())); })
- .put("varchar", handle -> { return Optional.of(varcharColumnMapping(handle.getRequiredColumnSize())); })
+ .put("time", handle -> Optional.of(timeColumnMapping(handle.getRequiredDecimalDigits())))
+ .put("timestampntz", handle -> Optional.of(timestampColumnMapping(handle.getRequiredDecimalDigits())))
+ .put("timestamptz", handle -> Optional.of(timestampTZColumnMapping(handle.getRequiredDecimalDigits())))
+ .put("date", handle -> Optional.of(ColumnMapping.longMapping(DateType.DATE, (resultSet, columnIndex) -> LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(), snowFlakeDateWriter())))
+ .put("varchar", handle -> Optional.of(varcharColumnMapping(handle.getRequiredColumnSize())))
.put("number", handle -> {
int decimalDigits = handle.getRequiredDecimalDigits();
int precision = handle.getRequiredColumnSize() + Math.max(-decimalDigits, 0);
@@ -231,41 +229,15 @@ public WriteMapping toWriteMapping(ConnectorSession session, Type type)
}
final Map snowflakeWriteMappings = ImmutableMap.builder()
- .put("TimeType", writeType -> {
- return WriteMapping.longMapping("time", timeWriteFunction(((TimeType) writeType).getPrecision()));
- })
- .put("ShortTimestampType", writeType -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWriter(writeType);
- return myMap;
- })
- .put("ShortTimestampWithTimeZoneType", writeType -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType);
- return myMap;
- })
- .put("LongTimestampType", writeType -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType);
- return myMap;
- })
- .put("LongTimestampWithTimeZoneType", writeType -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType);
- return myMap;
- })
- .put("VarcharType", writeType -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeVarCharWriter(writeType);
- return myMap;
- })
- .put("CharType", writeType -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeCharWriter(writeType);
- return myMap;
- })
- .put("LongDecimalType", writeType -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeDecimalWriter(writeType);
- return myMap;
- })
- .put("ShortDecimalType", writeType -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeDecimalWriter(writeType);
- return myMap;
- })
+ .put("TimeType", writeType -> WriteMapping.longMapping("time", timeWriteFunction(((TimeType) writeType).getPrecision())))
+ .put("ShortTimestampType", writeType -> SnowflakeClient.snowFlakeTimestampWriter(writeType))
+ .put("ShortTimestampWithTimeZoneType", writeType -> SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType))
+ .put("LongTimestampType", writeType -> SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType))
+ .put("LongTimestampWithTimeZoneType", writeType -> SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType))
+ .put("VarcharType", writeType -> SnowflakeClient.snowFlakeVarCharWriter(writeType))
+ .put("CharType", writeType -> SnowflakeClient.snowFlakeCharWriter(writeType))
+ .put("LongDecimalType", writeType -> SnowflakeClient.snowFlakeDecimalWriter(writeType))
+ .put("ShortDecimalType", writeType -> SnowflakeClient.snowFlakeDecimalWriter(writeType))
.buildOrThrow();
WriteMappingFunction writeMappingFunction = snowflakeWriteMappings.get(simple);
@@ -306,11 +278,6 @@ public void setColumnType(ConnectorSession session, JdbcTableHandle handle, Jdbc
throw new TrinoException(NOT_SUPPORTED, "This connector does not support setting column types");
}
- private static SliceReadFunction variantReadFunction()
- {
- return (resultSet, columnIndex) -> utf8Slice(resultSet.getString(columnIndex).replaceAll("^\"|\"$", ""));
- }
-
private static ColumnMapping columnMappingPushdown(ColumnMapping mapping)
{
if (mapping.getPredicatePushdownController() == PredicatePushdownController.DISABLE_PUSHDOWN) {
@@ -333,6 +300,87 @@ private static ColumnMapping timeColumnMapping(int precision)
PredicatePushdownController.FULL_PUSHDOWN);
}
+ private static ColumnMapping timestampTZColumnMapping(int precision)
+ {
+ if (precision <= 3) {
+ return ColumnMapping.longMapping(TimestampWithTimeZoneType.createTimestampWithTimeZoneType(precision),
+ (resultSet, columnIndex) -> {
+ ZonedDateTime timestamp = SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from);
+ return DateTimeEncoding.packDateTimeWithZone(timestamp.toInstant().toEpochMilli(), timestamp.getZone().getId());
+ },
+ timestampWithTZWriter(), PredicatePushdownController.FULL_PUSHDOWN);
+ }
+ else {
+ return ColumnMapping.objectMapping(TimestampWithTimeZoneType.createTimestampWithTimeZoneType(precision), longTimestampWithTimezoneReadFunction(), longTimestampWithTZWriteFunction());
+ }
+ }
+
+ private static LongWriteFunction timestampWithTZWriter()
+ {
+ return (statement, index, encodedTimeWithZone) -> {
+ Instant timeI = Instant.ofEpochMilli(DateTimeEncoding.unpackMillisUtc(encodedTimeWithZone));
+ ZoneId zone = ZoneId.of(DateTimeEncoding.unpackZoneKey(encodedTimeWithZone).getId());
+ statement.setString(index, SNOWFLAKE_DATETIME_FORMATTER.format(timeI.atZone(zone)));
+ };
+ }
+
+ private static ObjectReadFunction longTimestampWithTimezoneReadFunction()
+ {
+ return ObjectReadFunction.of(LongTimestampWithTimeZone.class, (resultSet, columnIndex) -> {
+ ZonedDateTime timestamp = SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from);
+ return LongTimestampWithTimeZone.fromEpochSecondsAndFraction(timestamp.toEpochSecond(),
+ (long) timestamp.getNano() * Timestamps.PICOSECONDS_PER_NANOSECOND,
+ TimeZoneKey.getTimeZoneKey(timestamp.getZone().getId()));
+ });
+ }
+
+ private static ObjectWriteFunction longTimestampWithTZWriteFunction()
+ {
+ return ObjectWriteFunction.of(LongTimestampWithTimeZone.class, (statement, index, value) -> {
+ long epoMilli = value.getEpochMillis();
+ long epoSeconds = Math.floorDiv(epoMilli, Timestamps.MILLISECONDS_PER_SECOND);
+ long adjNano = (long) Math.floorMod(epoMilli, Timestamps.MILLISECONDS_PER_SECOND) * Timestamps.NANOSECONDS_PER_MILLISECOND + value.getPicosOfMilli() / Timestamps.PICOSECONDS_PER_NANOSECOND;
+ ZoneId zone = TimeZoneKey.getTimeZoneKey(value.getTimeZoneKey()).getZoneId();
+ Instant timeI = Instant.ofEpochSecond(epoSeconds, adjNano);
+ statement.setString(index, SNOWFLAKE_DATETIME_FORMATTER.format(ZonedDateTime.ofInstant(timeI, zone)));
+ });
+ }
+
+ private static ColumnMapping timestampColumnMapping(int precision)
+ {
+ // <= 6 fits into a long
+ if (precision <= 6) {
+ return ColumnMapping.longMapping(TimestampType.createTimestampType(precision), (resultSet, columnIndex) -> StandardColumnMappings.toTrinoTimestamp(TimestampType.createTimestampType(precision), toLocalDateTime(resultSet, columnIndex)), timestampWriteFunction());
+ }
+
+ // Too big. Put it in an object
+ return ColumnMapping.objectMapping(TimestampType.createTimestampType(precision), longTimestampReader(), longTimestampWriter(precision));
+ }
+
+ private static LocalDateTime toLocalDateTime(ResultSet resultSet, int columnIndex)
+ throws SQLException
+ {
+ Calendar calendar = new GregorianCalendar(UTC_TZ, Locale.ENGLISH);
+ calendar.setTime(new Date(0));
+ Timestamp ts = resultSet.getTimestamp(columnIndex, calendar);
+ return LocalDateTime.ofInstant(Instant.ofEpochMilli(ts.getTime()), ZoneOffset.UTC);
+ }
+
+ private static ObjectReadFunction longTimestampReader()
+ {
+ return ObjectReadFunction.of(LongTimestamp.class, (resultSet, columnIndex) -> {
+ Calendar calendar = new GregorianCalendar(UTC_TZ, Locale.ENGLISH);
+ calendar.setTime(new Date(0));
+ Timestamp ts = resultSet.getTimestamp(columnIndex, calendar);
+ long epochMillis = ts.getTime();
+ int nanosInTheSecond = ts.getNanos();
+ int nanosInTheMilli = nanosInTheSecond % Timestamps.NANOSECONDS_PER_MILLISECOND;
+ long micro = epochMillis * Timestamps.MICROSECONDS_PER_MILLISECOND + (nanosInTheMilli / Timestamps.NANOSECONDS_PER_MICROSECOND);
+ int picosOfMicro = nanosInTheMilli % 1000 * 1000;
+ return new LongTimestamp(micro, picosOfMicro);
+ });
+ }
+
private static LongWriteFunction timeWriteFunction(int precision)
{
checkArgument(precision <= MAX_SUPPORTED_TEMPORAL_PRECISION, "Unsupported precision: %s", precision);
@@ -368,17 +416,6 @@ private static ColumnMapping varcharColumnMapping(int varcharLength)
StandardColumnMappings.varcharWriteFunction());
}
- private static ObjectReadFunction longTimestampWithTimezoneReadFunction()
- {
- return ObjectReadFunction.of(LongTimestampWithTimeZone.class, (resultSet, columnIndex) -> {
- ZonedDateTime timestamp = SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from);
- return LongTimestampWithTimeZone.fromEpochSecondsAndFraction(
- timestamp.toEpochSecond(),
- (long) timestamp.getNano() * PICOSECONDS_PER_NANOSECOND,
- TimeZoneKey.getTimeZoneKey(timestamp.getZone().getId()));
- });
- }
-
private static ObjectWriteFunction longTimestampWithTzWriteFunction()
{
return ObjectWriteFunction.of(LongTimestampWithTimeZone.class, (statement, index, value) -> {
@@ -476,51 +513,4 @@ private static LongWriteFunction timestampWithTimezoneWriteFunction()
statement.setString(index, SNOWFLAKE_DATETIME_FORMATTER.format(instant.atZone(zone)));
};
}
-
- private static ObjectReadFunction longTimestampReader()
- {
- return ObjectReadFunction.of(LongTimestamp.class, (resultSet, columnIndex) -> {
- Calendar calendar = new GregorianCalendar(UTC_TZ, Locale.ENGLISH);
- calendar.setTime(new Date(0));
- Timestamp ts = resultSet.getTimestamp(columnIndex, calendar);
- long epochMillis = ts.getTime();
- int nanosInTheSecond = ts.getNanos();
- int nanosInTheMilli = nanosInTheSecond % NANOSECONDS_PER_MILLISECOND;
- long micro = epochMillis * Timestamps.MICROSECONDS_PER_MILLISECOND + (nanosInTheMilli / Timestamps.NANOSECONDS_PER_MICROSECOND);
- int picosOfMicro = nanosInTheMilli % 1000 * 1000;
- return new LongTimestamp(micro, picosOfMicro);
- });
- }
-
- private static ColumnMapping timestampColumnMapping(JdbcTypeHandle typeHandle)
- {
- int precision = typeHandle.getRequiredDecimalDigits();
- String jdbcTypeName = typeHandle.getJdbcTypeName()
- .orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle));
- int type = typeHandle.getJdbcType();
- log.debug("timestampColumnMapping: jdbcTypeName(%s):%s precision:%s", type, jdbcTypeName, precision);
-
- // <= 6 fits into a long
- if (precision <= 6) {
- return ColumnMapping.longMapping(
- TimestampType.createTimestampType(precision),
- (resultSet, columnIndex) -> StandardColumnMappings.toTrinoTimestamp(TimestampType.createTimestampType(precision), toLocalDateTime(resultSet, columnIndex)),
- timestampWriteFunction());
- }
-
- // Too big. Put it in an object
- return ColumnMapping.objectMapping(
- TimestampType.createTimestampType(precision),
- longTimestampReader(),
- longTimestampWriter(precision));
- }
-
- private static LocalDateTime toLocalDateTime(ResultSet resultSet, int columnIndex)
- throws SQLException
- {
- Calendar calendar = new GregorianCalendar(UTC_TZ, Locale.ENGLISH);
- calendar.setTime(new Date(0));
- Timestamp ts = resultSet.getTimestamp(columnIndex, calendar);
- return LocalDateTime.ofInstant(Instant.ofEpochMilli(ts.getTime()), ZoneOffset.UTC);
- }
}
diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java
index 587ca8d11faa..cbd44dc18d01 100644
--- a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java
+++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java
@@ -65,11 +65,10 @@ public ConnectionFactory getConnectionFactory(BaseJdbcConfig baseJdbcConfig, Sno
properties.setProperty("TIMESTAMP_TZ_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS.FF9TZH:TZM");
properties.setProperty("TIMESTAMP_LTZ_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS.FF9TZH:TZM");
properties.setProperty("TIME_OUTPUT_FORMAT", "HH24:MI:SS.FF9");
- snowflakeConfig.getTimestampNoTimezoneAsUTC().ifPresent(as_utc -> properties.setProperty("JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC", as_utc ? "true" : "false"));
// Support for Corporate proxies
- if (snowflakeConfig.getHTTPProxy().isPresent()) {
- String proxy = snowflakeConfig.getHTTPProxy().get();
+ if (snowflakeConfig.getHttpProxy().isPresent()) {
+ String proxy = snowflakeConfig.getHttpProxy().get();
URL url = new URL(proxy);
diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeConfig.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeConfig.java
index c002728f85b7..c1f7f0a14804 100644
--- a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeConfig.java
+++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeConfig.java
@@ -14,6 +14,7 @@
package io.trino.plugin.snowflake;
import io.airlift.configuration.Config;
+import io.airlift.configuration.ConfigSecuritySensitive;
import java.util.Optional;
@@ -23,7 +24,6 @@ public class SnowflakeConfig
private String database;
private String role;
private String warehouse;
- private Boolean timestampNoTimezoneAsUTC;
private String httpProxy;
public Optional getAccount()
@@ -74,18 +74,14 @@ public SnowflakeConfig setWarehouse(String warehouse)
return this;
}
- public Optional getTimestampNoTimezoneAsUTC()
- {
- return Optional.ofNullable(timestampNoTimezoneAsUTC);
- }
-
- public Optional getHTTPProxy()
+ public Optional getHttpProxy()
{
return Optional.ofNullable(httpProxy);
}
@Config("snowflake.http-proxy")
- public SnowflakeConfig setHTTPProxy(String httpProxy)
+ @ConfigSecuritySensitive
+ public SnowflakeConfig setHttpProxy(String httpProxy)
{
this.httpProxy = httpProxy;
return this;
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/BaseSnowflakeConnectorTest.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/BaseSnowflakeConnectorTest.java
deleted file mode 100644
index 0b64ddd61ee1..000000000000
--- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/BaseSnowflakeConnectorTest.java
+++ /dev/null
@@ -1,606 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.snowflake;
-
-import io.trino.Session;
-import io.trino.plugin.jdbc.BaseJdbcConnectorTest;
-import io.trino.testing.MaterializedResult;
-import io.trino.testing.TestingConnectorBehavior;
-import io.trino.testing.sql.TestTable;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.TestInstance;
-import org.junit.jupiter.api.parallel.Execution;
-
-import java.util.Optional;
-import java.util.OptionalInt;
-
-import static com.google.common.base.Strings.nullToEmpty;
-import static io.trino.plugin.snowflake.TestingSnowflakeServer.TEST_SCHEMA;
-import static io.trino.spi.connector.ConnectorMetadata.MODIFYING_ROWS_MESSAGE;
-import static io.trino.spi.type.VarcharType.VARCHAR;
-import static io.trino.testing.MaterializedResult.resultBuilder;
-import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_CREATE_TABLE;
-import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_CREATE_TABLE_WITH_DATA;
-import static io.trino.testing.TestingNames.randomNameSuffix;
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.assertThatThrownBy;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertFalse;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.junit.jupiter.api.Assumptions.abort;
-import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
-import static org.junit.jupiter.api.parallel.ExecutionMode.CONCURRENT;
-
-@TestInstance(PER_CLASS)
-@Execution(CONCURRENT)
-public abstract class BaseSnowflakeConnectorTest
- extends BaseJdbcConnectorTest
-{
- protected TestingSnowflakeServer server;
-
- @Override
- protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior)
- {
- switch (connectorBehavior) {
- case SUPPORTS_AGGREGATION_PUSHDOWN:
- case SUPPORTS_TOPN_PUSHDOWN:
- case SUPPORTS_LIMIT_PUSHDOWN:
- return false;
- case SUPPORTS_COMMENT_ON_COLUMN:
- case SUPPORTS_ADD_COLUMN_WITH_COMMENT:
- case SUPPORTS_CREATE_TABLE_WITH_TABLE_COMMENT:
- case SUPPORTS_CREATE_TABLE_WITH_COLUMN_COMMENT:
- case SUPPORTS_SET_COLUMN_TYPE:
- return false;
- case SUPPORTS_ROW_TYPE:
- case SUPPORTS_ARRAY:
- return false;
- default:
- return super.hasBehavior(connectorBehavior);
- }
- }
-
- @Override
- protected TestTable createTableWithDefaultColumns()
- {
- return new TestTable(
- onRemoteDatabase(),
- TEST_SCHEMA,
- "(col_required BIGINT NOT NULL," +
- "col_nullable BIGINT," +
- "col_default BIGINT DEFAULT 43," +
- "col_nonnull_default BIGINT NOT NULL DEFAULT 42," +
- "col_required2 BIGINT NOT NULL)");
- }
-
- @Override
- protected TestTable createTableWithUnsupportedColumn()
- {
- return new TestTable(
- onRemoteDatabase(),
- TEST_SCHEMA,
- "(one bigint, two decimal(38,0), three varchar(10))");
- }
-
- @Override
- protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup)
- {
- String typeName = dataMappingTestSetup.getTrinoTypeName();
- // TODO: Test fails with these types
- // Error: No result for query: SELECT row_id FROM test_data_mapping_smoke_real_3u8xo6hp59 WHERE rand() = 42 OR value = REAL '567.123'
- // In the testDataMappingSmokeTestDataProvider(), the type sampleValueLiteral of type real should be "DOUBLE" rather than "REAL".
- if (typeName.equals("real")) {
- return Optional.empty();
- }
- // Error: Failed to insert data: SQL compilation error: error line 1 at position 130
- if (typeName.equals("time")
- || typeName.equals("time(6)")
- || typeName.equals("timestamp(6)")) {
- return Optional.empty();
- }
- // Error: not equal
- if (typeName.equals("char(3)")) {
- return Optional.empty();
- }
- return Optional.of(dataMappingTestSetup);
- }
-
- @Override
- protected boolean isColumnNameRejected(Exception exception, String columnName, boolean delimited)
- {
- return nullToEmpty(exception.getMessage()).matches(".*(Incorrect column name).*");
- }
-
- @Override
- protected MaterializedResult getDescribeOrdersResult()
- {
- // Override this test because the type of row "shippriority" should be bigint rather than integer for snowflake case
- return resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR)
- .row("orderkey", "bigint", "", "")
- .row("custkey", "bigint", "", "")
- .row("orderstatus", "varchar(1)", "", "")
- .row("totalprice", "double", "", "")
- .row("orderdate", "date", "", "")
- .row("orderpriority", "varchar(15)", "", "")
- .row("clerk", "varchar(15)", "", "")
- .row("shippriority", "bigint", "", "")
- .row("comment", "varchar(79)", "", "")
- .build();
- }
-
- @Test
- @Override
- public void testShowColumns()
- {
- assertThat(query("SHOW COLUMNS FROM orders")).matches(getDescribeOrdersResult());
- }
-
- @Test
- public void testViews()
- {
- String tableName = "test_view_" + randomNameSuffix();
- onRemoteDatabase().execute("CREATE OR REPLACE VIEW tpch." + tableName + " AS SELECT * FROM tpch.orders");
- assertQuery("SELECT orderkey FROM " + tableName, "SELECT orderkey FROM orders");
- onRemoteDatabase().execute("DROP VIEW IF EXISTS tpch." + tableName);
- }
-
- @Test
- @Override
- public void testShowCreateTable()
- {
- // Override this test because the type of row "shippriority" should be bigint rather than integer for snowflake case
- assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue())
- .isEqualTo("CREATE TABLE snowflake.tpch.orders (\n" +
- " orderkey bigint,\n" +
- " custkey bigint,\n" +
- " orderstatus varchar(1),\n" +
- " totalprice double,\n" +
- " orderdate date,\n" +
- " orderpriority varchar(15),\n" +
- " clerk varchar(15),\n" +
- " shippriority bigint,\n" +
- " comment varchar(79)\n" +
- ")\n" +
- "COMMENT ''");
- }
-
- @Test
- @Override
- public void testAddNotNullColumn()
- {
- assertThatThrownBy(super::testAddNotNullColumn)
- .isInstanceOf(AssertionError.class)
- .hasMessage("Unexpected failure when adding not null column");
- }
-
- @Test
- @Override
- public void testCharVarcharComparison()
- {
- assertThatThrownBy(super::testCharVarcharComparison)
- .hasMessageContaining("For query")
- .hasMessageContaining("Actual rows")
- .hasMessageContaining("Expected rows");
- }
-
- @Test
- @Override
- public void testCountDistinctWithStringTypes()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testInsertInPresenceOfNotSupportedColumn()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testAggregationPushdown()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testDistinctAggregationPushdown()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testNumericAggregationPushdown()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testLimitPushdown()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testInsertIntoNotNullColumn()
- {
- // TODO: java.lang.UnsupportedOperationException: This method should be overridden
- assertThatThrownBy(super::testInsertIntoNotNullColumn);
- }
-
- @Test
- @Override
- public void testDeleteWithLike()
- {
- assertThatThrownBy(super::testDeleteWithLike)
- .hasStackTraceContaining("TrinoException: " + MODIFYING_ROWS_MESSAGE);
- }
-
- @Test
- @Override
- public void testCreateTableAsSelect()
- {
- String tableName = "test_ctas" + randomNameSuffix();
- if (!hasBehavior(SUPPORTS_CREATE_TABLE_WITH_DATA)) {
- assertQueryFails("CREATE TABLE IF NOT EXISTS " + tableName + " AS SELECT name, regionkey FROM nation", "This connector does not support creating tables with data");
- return;
- }
- assertUpdate("CREATE TABLE IF NOT EXISTS " + tableName + " AS SELECT name, regionkey FROM nation", "SELECT count(*) FROM nation");
- assertTableColumnNames(tableName, "name", "regionkey");
-
- assertEquals(getTableComment(getSession().getCatalog().orElseThrow(), getSession().getSchema().orElseThrow(), tableName), "");
- assertUpdate("DROP TABLE " + tableName);
-
- // Some connectors support CREATE TABLE AS but not the ordinary CREATE TABLE. Let's test CTAS IF NOT EXISTS with a table that is guaranteed to exist.
- assertUpdate("CREATE TABLE IF NOT EXISTS nation AS SELECT nationkey, regionkey FROM nation", 0);
- assertTableColumnNames("nation", "nationkey", "name", "regionkey", "comment");
-
- assertCreateTableAsSelect(
- "SELECT nationkey, name, regionkey FROM nation",
- "SELECT count(*) FROM nation");
-
- assertCreateTableAsSelect(
- "SELECT mktsegment, sum(acctbal) x FROM customer GROUP BY mktsegment",
- "SELECT count(DISTINCT mktsegment) FROM customer");
-
- assertCreateTableAsSelect(
- "SELECT count(*) x FROM nation JOIN region ON nation.regionkey = region.regionkey",
- "SELECT 1");
-
- assertCreateTableAsSelect(
- "SELECT nationkey FROM nation ORDER BY nationkey LIMIT 10",
- "SELECT 10");
-
- assertCreateTableAsSelect(
- "SELECT * FROM nation WITH DATA",
- "SELECT * FROM nation",
- "SELECT count(*) FROM nation");
-
- assertCreateTableAsSelect(
- "SELECT * FROM nation WITH NO DATA",
- "SELECT * FROM nation LIMIT 0",
- "SELECT 0");
-
- // Tests for CREATE TABLE with UNION ALL: exercises PushTableWriteThroughUnion optimizer
-
- assertCreateTableAsSelect(
- "SELECT name, nationkey, regionkey FROM nation WHERE nationkey % 2 = 0 UNION ALL " +
- "SELECT name, nationkey, regionkey FROM nation WHERE nationkey % 2 = 1",
- "SELECT name, nationkey, regionkey FROM nation",
- "SELECT count(*) FROM nation");
-
- assertCreateTableAsSelect(
- Session.builder(getSession()).setSystemProperty("redistribute_writes", "true").build(),
- "SELECT CAST(nationkey AS BIGINT) nationkey, regionkey FROM nation UNION ALL " +
- "SELECT 1234567890, 123",
- "SELECT nationkey, regionkey FROM nation UNION ALL " +
- "SELECT 1234567890, 123",
- "SELECT count(*) + 1 FROM nation");
-
- assertCreateTableAsSelect(
- Session.builder(getSession()).setSystemProperty("redistribute_writes", "false").build(),
- "SELECT CAST(nationkey AS BIGINT) nationkey, regionkey FROM nation UNION ALL " +
- "SELECT 1234567890, 123",
- "SELECT nationkey, regionkey FROM nation UNION ALL " +
- "SELECT 1234567890, 123",
- "SELECT count(*) + 1 FROM nation");
-
- tableName = "test_ctas" + randomNameSuffix();
- assertExplainAnalyze("EXPLAIN ANALYZE CREATE TABLE " + tableName + " AS SELECT name FROM nation");
- assertQuery("SELECT * from " + tableName, "SELECT name FROM nation");
- assertUpdate("DROP TABLE " + tableName);
- }
-
- @Test
- @Override
- public void testCreateTable()
- {
- String tableName = "test_create_" + randomNameSuffix();
- if (!hasBehavior(SUPPORTS_CREATE_TABLE)) {
- assertQueryFails("CREATE TABLE " + tableName + " (a bigint, b double, c varchar(50))", "This connector does not support creating tables");
- return;
- }
-
- assertThat(computeActual("SHOW TABLES").getOnlyColumnAsSet()) // prime the cache, if any
- .doesNotContain(tableName);
- assertUpdate("CREATE TABLE " + tableName + " (a bigint, b double, c varchar(50))");
- assertTrue(getQueryRunner().tableExists(getSession(), tableName));
- assertThat(computeActual("SHOW TABLES").getOnlyColumnAsSet())
- .contains(tableName);
- assertTableColumnNames(tableName, "a", "b", "c");
- assertEquals(getTableComment(getSession().getCatalog().orElseThrow(), getSession().getSchema().orElseThrow(), tableName), "");
-
- assertUpdate("DROP TABLE " + tableName);
- assertFalse(getQueryRunner().tableExists(getSession(), tableName));
- assertThat(computeActual("SHOW TABLES").getOnlyColumnAsSet())
- .doesNotContain(tableName);
-
- assertQueryFails("CREATE TABLE " + tableName + " (a bad_type)", ".* Unknown type 'bad_type' for column 'a'");
- assertFalse(getQueryRunner().tableExists(getSession(), tableName));
-
- tableName = "test_cr_not_exists_" + randomNameSuffix();
- assertUpdate("CREATE TABLE " + tableName + " (a bigint, b varchar(50), c double)");
- assertTrue(getQueryRunner().tableExists(getSession(), tableName));
- assertTableColumnNames(tableName, "a", "b", "c");
-
- assertUpdate("CREATE TABLE IF NOT EXISTS " + tableName + " (d bigint, e varchar(50))");
- assertTrue(getQueryRunner().tableExists(getSession(), tableName));
- assertTableColumnNames(tableName, "a", "b", "c");
-
- assertUpdate("DROP TABLE " + tableName);
- assertFalse(getQueryRunner().tableExists(getSession(), tableName));
-
- // Test CREATE TABLE LIKE
- tableName = "test_create_orig_" + randomNameSuffix();
- assertUpdate("CREATE TABLE " + tableName + " (a bigint, b double, c varchar(50))");
- assertTrue(getQueryRunner().tableExists(getSession(), tableName));
- assertTableColumnNames(tableName, "a", "b", "c");
-
- String tableNameLike = "test_create_like_" + randomNameSuffix();
- assertUpdate("CREATE TABLE " + tableNameLike + " (LIKE " + tableName + ", d bigint, e varchar(50))");
- assertTrue(getQueryRunner().tableExists(getSession(), tableNameLike));
- assertTableColumnNames(tableNameLike, "a", "b", "c", "d", "e");
-
- assertUpdate("DROP TABLE " + tableName);
- assertFalse(getQueryRunner().tableExists(getSession(), tableName));
-
- assertUpdate("DROP TABLE " + tableNameLike);
- assertFalse(getQueryRunner().tableExists(getSession(), tableNameLike));
- }
-
- @Test
- @Override
- public void testNativeQueryCreateStatement()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testNativeQueryInsertStatementTableExists()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testNativeQuerySelectUnsupportedType()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testCreateTableWithLongColumnName()
- {
- String tableName = "test_long_column" + randomNameSuffix();
- String baseColumnName = "col";
-
- int maxLength = maxColumnNameLength()
- // Assume 2^16 is enough for most use cases. Add a bit more to ensure 2^16 isn't actual limit.
- .orElse(65536 + 5);
-
- String validColumnName = baseColumnName + "z".repeat(maxLength - baseColumnName.length());
- assertUpdate("CREATE TABLE " + tableName + " (" + validColumnName + " bigint)");
- assertTrue(columnExists(tableName, validColumnName));
- assertUpdate("DROP TABLE " + tableName);
-
- if (maxColumnNameLength().isEmpty()) {
- return;
- }
- assertFalse(getQueryRunner().tableExists(getSession(), tableName));
- }
-
- @Test
- @Override
- public void testCreateTableWithLongTableName()
- {
- // TODO: Find the maximum table name length in Snowflake and enable this test.
- abort("TODO");
- }
-
- @Override
- protected OptionalInt maxColumnNameLength()
- {
- return OptionalInt.of(251);
- }
-
- @Test
- @Override
- public void testAlterTableAddLongColumnName()
- {
- String tableName = "test_long_column" + randomNameSuffix();
- assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);
-
- String baseColumnName = "col";
- int maxLength = maxColumnNameLength()
- // Assume 2^16 is enough for most use cases. Add a bit more to ensure 2^16 isn't actual limit.
- .orElse(65536 + 5);
-
- String validTargetColumnName = baseColumnName + "z".repeat(maxLength - baseColumnName.length());
- assertUpdate("ALTER TABLE " + tableName + " ADD COLUMN " + validTargetColumnName + " int");
- assertTrue(getQueryRunner().tableExists(getSession(), tableName));
- assertQuery("SELECT x FROM " + tableName, "VALUES 123");
- assertUpdate("DROP TABLE " + tableName);
-
- if (maxColumnNameLength().isEmpty()) {
- return;
- }
-
- assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);
- assertQuery("SELECT x FROM " + tableName, "VALUES 123");
- }
-
- @Test
- @Override
- public void testAlterTableRenameColumnToLongName()
- {
- String tableName = "test_long_column" + randomNameSuffix();
- assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);
-
- String baseColumnName = "col";
- int maxLength = maxColumnNameLength()
- // Assume 2^16 is enough for most use cases. Add a bit more to ensure 2^16 isn't actual limit.
- .orElse(65536 + 5);
-
- String validTargetColumnName = baseColumnName + "z".repeat(maxLength - baseColumnName.length());
- assertUpdate("ALTER TABLE " + tableName + " RENAME COLUMN x TO " + validTargetColumnName);
- assertQuery("SELECT " + validTargetColumnName + " FROM " + tableName, "VALUES 123");
- assertUpdate("DROP TABLE " + tableName);
-
- if (maxColumnNameLength().isEmpty()) {
- return;
- }
-
- assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);
- assertQuery("SELECT x FROM " + tableName, "VALUES 123");
- }
-
- @Test
- @Override
- public void testCreateSchemaWithLongName()
- {
- // TODO: Find the maximum table schema length in Snowflake and enable this test.
- abort("TODO");
- }
-
- @Test
- @Override
- public void testInsertArray()
- {
- // Snowflake does not support this feature.
- abort("Not supported");
- }
-
- @Test
- @Override
- public void testInsertRowConcurrently()
- {
- abort("TODO: Connection is already closed");
- }
-
- @Test
- @Override
- public void testNativeQueryColumnAlias()
- {
- abort("TODO: Table function system.query not registered");
- }
-
- @Test
- @Override
- public void testNativeQueryColumnAliasNotFound()
- {
- abort("TODO: Table function system.query not registered");
- }
-
- @Test
- @Override
- public void testNativeQueryIncorrectSyntax()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testNativeQueryInsertStatementTableDoesNotExist()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testNativeQueryParameters()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testNativeQuerySelectFromNation()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testNativeQuerySelectFromTestTable()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testNativeQuerySimple()
- {
- abort("TODO");
- }
-
- @Test
- @Override
- public void testRenameSchemaToLongName()
- {
- // TODO: Find the maximum table schema length in Snowflake and enable this test.
- abort("TODO");
- }
-
- @Test
- @Override
- public void testRenameTableToLongTableName()
- {
- // TODO: Find the maximum table length in Snowflake and enable this test.
- abort("TODO");
- }
-
- @Test
- @Override
- public void testCharTrailingSpace()
- {
- assertThatThrownBy(super::testCharVarcharComparison)
- .hasMessageContaining("For query")
- .hasMessageContaining("Actual rows")
- .hasMessageContaining("Expected rows");
- }
-
- @Test
- @Override
- public void testDescribeTable()
- {
- assertThat(query("DESCRIBE orders")).matches(getDescribeOrdersResult());
- }
-}
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/SnowflakeQueryRunner.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/SnowflakeQueryRunner.java
index 2f877068f88a..23e74bd6de3c 100644
--- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/SnowflakeQueryRunner.java
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/SnowflakeQueryRunner.java
@@ -55,13 +55,11 @@ public static DistributedQueryRunner createSnowflakeQueryRunner(
connectorProperties.putIfAbsent("snowflake.database", TestingSnowflakeServer.TEST_DATABASE);
connectorProperties.putIfAbsent("snowflake.role", TestingSnowflakeServer.TEST_ROLE);
connectorProperties.putIfAbsent("snowflake.warehouse", TestingSnowflakeServer.TEST_WAREHOUSE);
- if (TestingSnowflakeServer.TEST_PROXY != null) {
- connectorProperties.putIfAbsent("snowflake.httpProxy", TestingSnowflakeServer.TEST_PROXY);
- }
queryRunner.installPlugin(new SnowflakePlugin());
queryRunner.createCatalog("snowflake", "snowflake", connectorProperties);
+ queryRunner.execute(createSession(), "CREATE SCHEMA IF NOT EXISTS " + TPCH_SCHEMA);
copyTpchTables(queryRunner, "tpch", TINY_SCHEMA_NAME, createSession(), tables);
return queryRunner;
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConfig.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConfig.java
index ad4679d5566e..ef1344061a4f 100644
--- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConfig.java
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConfig.java
@@ -32,7 +32,7 @@ public void testDefaults()
.setDatabase(null)
.setRole(null)
.setWarehouse(null)
- .setHTTPProxy(null));
+ .setHttpProxy(null));
}
@Test
@@ -44,7 +44,6 @@ public void testExplicitPropertyMappings()
.put("snowflake.role", "MYROLE")
.put("snowflake.warehouse", "MYWAREHOUSE")
.put("snowflake.http-proxy", "MYPROXY")
- .put("snowflake.timestamp-no-timezone-as-utc", "true")
.buildOrThrow();
SnowflakeConfig expected = new SnowflakeConfig()
@@ -52,7 +51,7 @@ public void testExplicitPropertyMappings()
.setDatabase("MYDATABASE")
.setRole("MYROLE")
.setWarehouse("MYWAREHOUSE")
- .setHTTPProxy("MYPROXY");
+ .setHttpProxy("MYPROXY");
assertFullMapping(properties, expected);
}
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java
index b448e5756c0b..84bbb18c4809 100644
--- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java
@@ -14,13 +14,42 @@
package io.trino.plugin.snowflake;
import com.google.common.collect.ImmutableMap;
+import io.trino.Session;
+import io.trino.plugin.jdbc.BaseJdbcConnectorTest;
+import io.trino.testing.MaterializedResult;
import io.trino.testing.QueryRunner;
+import io.trino.testing.TestingConnectorBehavior;
import io.trino.testing.sql.SqlExecutor;
+import io.trino.testing.sql.TestTable;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.parallel.Execution;
+import java.util.Optional;
+import java.util.OptionalInt;
+
+import static com.google.common.base.Strings.nullToEmpty;
import static io.trino.plugin.snowflake.SnowflakeQueryRunner.createSnowflakeQueryRunner;
+import static io.trino.plugin.snowflake.TestingSnowflakeServer.TEST_SCHEMA;
+import static io.trino.spi.connector.ConnectorMetadata.MODIFYING_ROWS_MESSAGE;
+import static io.trino.spi.type.VarcharType.VARCHAR;
+import static io.trino.testing.MaterializedResult.resultBuilder;
+import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_CREATE_TABLE;
+import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_CREATE_TABLE_WITH_DATA;
+import static io.trino.testing.TestingNames.randomNameSuffix;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assumptions.abort;
+import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
+import static org.junit.jupiter.api.parallel.ExecutionMode.CONCURRENT;
+@TestInstance(PER_CLASS)
+@Execution(CONCURRENT)
public class TestSnowflakeConnectorTest
- extends BaseSnowflakeConnectorTest
+ extends BaseJdbcConnectorTest
{
@Override
protected QueryRunner createQueryRunner()
@@ -32,6 +61,558 @@ protected QueryRunner createQueryRunner()
@Override
protected SqlExecutor onRemoteDatabase()
{
- return server::execute;
+ return TestingSnowflakeServer::execute;
+ }
+
+ @Override
+ protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior)
+ {
+ return switch (connectorBehavior) {
+ case SUPPORTS_ADD_COLUMN_WITH_COMMENT,
+ SUPPORTS_AGGREGATION_PUSHDOWN,
+ SUPPORTS_ARRAY,
+ SUPPORTS_COMMENT_ON_COLUMN,
+ SUPPORTS_COMMENT_ON_TABLE,
+ SUPPORTS_CREATE_TABLE_WITH_COLUMN_COMMENT,
+ SUPPORTS_CREATE_TABLE_WITH_TABLE_COMMENT,
+ SUPPORTS_LIMIT_PUSHDOWN,
+ SUPPORTS_ROW_TYPE,
+ SUPPORTS_SET_COLUMN_TYPE,
+ SUPPORTS_TOPN_PUSHDOWN -> false;
+ default -> super.hasBehavior(connectorBehavior);
+ };
+ }
+
+ @Override
+ protected TestTable createTableWithDefaultColumns()
+ {
+ return new TestTable(
+ onRemoteDatabase(),
+ TEST_SCHEMA,
+ "(col_required BIGINT NOT NULL," +
+ "col_nullable BIGINT," +
+ "col_default BIGINT DEFAULT 43," +
+ "col_nonnull_default BIGINT NOT NULL DEFAULT 42," +
+ "col_required2 BIGINT NOT NULL)");
+ }
+
+ @Override
+ protected TestTable createTableWithUnsupportedColumn()
+ {
+ return new TestTable(
+ onRemoteDatabase(),
+ TEST_SCHEMA,
+ "(one bigint, two decimal(38,0), three varchar(10))");
+ }
+
+ @Override
+ protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup)
+ {
+ String typeName = dataMappingTestSetup.getTrinoTypeName();
+ // TODO: Test fails with these types
+ // Error: No result for query: SELECT row_id FROM test_data_mapping_smoke_real_3u8xo6hp59 WHERE rand() = 42 OR value = REAL '567.123'
+ // In the testDataMappingSmokeTestDataProvider(), the type sampleValueLiteral of type real should be "DOUBLE" rather than "REAL".
+ if (typeName.equals("real")) {
+ return Optional.empty();
+ }
+ // Error: Failed to insert data: SQL compilation error: error line 1 at position 130
+ if (typeName.equals("time")
+ || typeName.equals("time(6)")
+ || typeName.equals("timestamp(6)")) {
+ return Optional.empty();
+ }
+ // Error: not equal
+ if (typeName.equals("char(3)")) {
+ return Optional.empty();
+ }
+ return Optional.of(dataMappingTestSetup);
+ }
+
+ @Override
+ protected boolean isColumnNameRejected(Exception exception, String columnName, boolean delimited)
+ {
+ return nullToEmpty(exception.getMessage()).matches(".*(Incorrect column name).*");
+ }
+
+ @Override
+ protected MaterializedResult getDescribeOrdersResult()
+ {
+ // Override this test because the type of row "shippriority" should be bigint rather than integer for snowflake case
+ return resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR)
+ .row("orderkey", "bigint", "", "")
+ .row("custkey", "bigint", "", "")
+ .row("orderstatus", "varchar(1)", "", "")
+ .row("totalprice", "double", "", "")
+ .row("orderdate", "date", "", "")
+ .row("orderpriority", "varchar(15)", "", "")
+ .row("clerk", "varchar(15)", "", "")
+ .row("shippriority", "bigint", "", "")
+ .row("comment", "varchar(79)", "", "")
+ .build();
+ }
+
+ @Test
+ @Override
+ public void testShowColumns()
+ {
+ assertThat(query("SHOW COLUMNS FROM orders")).result().matches(getDescribeOrdersResult());
+ }
+
+ @Test
+ public void testViews()
+ {
+ String tableName = "test_view_" + randomNameSuffix();
+ onRemoteDatabase().execute("CREATE OR REPLACE VIEW tpch." + tableName + " AS SELECT * FROM tpch.orders");
+ assertQuery("SELECT orderkey FROM " + tableName, "SELECT orderkey FROM orders");
+ onRemoteDatabase().execute("DROP VIEW IF EXISTS tpch." + tableName);
+ }
+
+ @Test
+ @Override
+ public void testShowCreateTable()
+ {
+ // Override this test because the type of row "shippriority" should be bigint rather than integer for snowflake case
+ assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue())
+ .isEqualTo("CREATE TABLE snowflake.tpch.orders (\n" +
+ " orderkey bigint,\n" +
+ " custkey bigint,\n" +
+ " orderstatus varchar(1),\n" +
+ " totalprice double,\n" +
+ " orderdate date,\n" +
+ " orderpriority varchar(15),\n" +
+ " clerk varchar(15),\n" +
+ " shippriority bigint,\n" +
+ " comment varchar(79)\n" +
+ ")\n" +
+ "COMMENT ''");
+ }
+
+ @Test
+ @Override
+ public void testAddNotNullColumn()
+ {
+ assertThatThrownBy(super::testAddNotNullColumn)
+ .isInstanceOf(AssertionError.class)
+ .hasMessage("Unexpected failure when adding not null column");
+ }
+
+ @Test
+ @Override
+ public void testCharVarcharComparison()
+ {
+ assertThatThrownBy(super::testCharVarcharComparison)
+ .hasMessageContaining("For query")
+ .hasMessageContaining("Actual rows")
+ .hasMessageContaining("Expected rows");
+ }
+
+ @Test
+ @Override
+ public void testCountDistinctWithStringTypes()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testInsertInPresenceOfNotSupportedColumn()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testAggregationPushdown()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testDistinctAggregationPushdown()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNumericAggregationPushdown()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testLimitPushdown()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testInsertIntoNotNullColumn()
+ {
+ // TODO: java.lang.UnsupportedOperationException: This method should be overridden
+ assertThatThrownBy(super::testInsertIntoNotNullColumn);
+ }
+
+ @Test
+ @Override
+ public void testDeleteWithLike()
+ {
+ assertThatThrownBy(super::testDeleteWithLike)
+ .hasStackTraceContaining("TrinoException: " + MODIFYING_ROWS_MESSAGE);
+ }
+
+ @Test
+ @Override
+ public void testCreateTableAsSelect()
+ {
+ String tableName = "test_ctas" + randomNameSuffix();
+ if (!hasBehavior(SUPPORTS_CREATE_TABLE_WITH_DATA)) {
+ assertQueryFails("CREATE TABLE IF NOT EXISTS " + tableName + " AS SELECT name, regionkey FROM nation", "This connector does not support creating tables with data");
+ return;
+ }
+ assertUpdate("CREATE TABLE IF NOT EXISTS " + tableName + " AS SELECT name, regionkey FROM nation", "SELECT count(*) FROM nation");
+ assertTableColumnNames(tableName, "name", "regionkey");
+
+ assertEquals(getTableComment(getSession().getCatalog().orElseThrow(), getSession().getSchema().orElseThrow(), tableName), "");
+ assertUpdate("DROP TABLE " + tableName);
+
+ // Some connectors support CREATE TABLE AS but not the ordinary CREATE TABLE. Let's test CTAS IF NOT EXISTS with a table that is guaranteed to exist.
+ assertUpdate("CREATE TABLE IF NOT EXISTS nation AS SELECT nationkey, regionkey FROM nation", 0);
+ assertTableColumnNames("nation", "nationkey", "name", "regionkey", "comment");
+
+ assertCreateTableAsSelect(
+ "SELECT nationkey, name, regionkey FROM nation",
+ "SELECT count(*) FROM nation");
+
+ assertCreateTableAsSelect(
+ "SELECT mktsegment, sum(acctbal) x FROM customer GROUP BY mktsegment",
+ "SELECT count(DISTINCT mktsegment) FROM customer");
+
+ assertCreateTableAsSelect(
+ "SELECT count(*) x FROM nation JOIN region ON nation.regionkey = region.regionkey",
+ "SELECT 1");
+
+ assertCreateTableAsSelect(
+ "SELECT nationkey FROM nation ORDER BY nationkey LIMIT 10",
+ "SELECT 10");
+
+ assertCreateTableAsSelect(
+ "SELECT * FROM nation WITH DATA",
+ "SELECT * FROM nation",
+ "SELECT count(*) FROM nation");
+
+ assertCreateTableAsSelect(
+ "SELECT * FROM nation WITH NO DATA",
+ "SELECT * FROM nation LIMIT 0",
+ "SELECT 0");
+
+ // Tests for CREATE TABLE with UNION ALL: exercises PushTableWriteThroughUnion optimizer
+
+ assertCreateTableAsSelect(
+ "SELECT name, nationkey, regionkey FROM nation WHERE nationkey % 2 = 0 UNION ALL " +
+ "SELECT name, nationkey, regionkey FROM nation WHERE nationkey % 2 = 1",
+ "SELECT name, nationkey, regionkey FROM nation",
+ "SELECT count(*) FROM nation");
+
+ assertCreateTableAsSelect(
+ Session.builder(getSession()).setSystemProperty("redistribute_writes", "true").build(),
+ "SELECT CAST(nationkey AS BIGINT) nationkey, regionkey FROM nation UNION ALL " +
+ "SELECT 1234567890, 123",
+ "SELECT nationkey, regionkey FROM nation UNION ALL " +
+ "SELECT 1234567890, 123",
+ "SELECT count(*) + 1 FROM nation");
+
+ assertCreateTableAsSelect(
+ Session.builder(getSession()).setSystemProperty("redistribute_writes", "false").build(),
+ "SELECT CAST(nationkey AS BIGINT) nationkey, regionkey FROM nation UNION ALL " +
+ "SELECT 1234567890, 123",
+ "SELECT nationkey, regionkey FROM nation UNION ALL " +
+ "SELECT 1234567890, 123",
+ "SELECT count(*) + 1 FROM nation");
+
+ tableName = "test_ctas" + randomNameSuffix();
+ assertExplainAnalyze("EXPLAIN ANALYZE CREATE TABLE " + tableName + " AS SELECT name FROM nation");
+ assertQuery("SELECT * from " + tableName, "SELECT name FROM nation");
+ assertUpdate("DROP TABLE " + tableName);
+ }
+
+ @Test
+ @Override
+ public void testCreateTable()
+ {
+ String tableName = "test_create_" + randomNameSuffix();
+ if (!hasBehavior(SUPPORTS_CREATE_TABLE)) {
+ assertQueryFails("CREATE TABLE " + tableName + " (a bigint, b double, c varchar(50))", "This connector does not support creating tables");
+ return;
+ }
+
+ assertThat(computeActual("SHOW TABLES").getOnlyColumnAsSet()) // prime the cache, if any
+ .doesNotContain(tableName);
+ assertUpdate("CREATE TABLE " + tableName + " (a bigint, b double, c varchar(50))");
+ assertTrue(getQueryRunner().tableExists(getSession(), tableName));
+ assertThat(computeActual("SHOW TABLES").getOnlyColumnAsSet())
+ .contains(tableName);
+ assertTableColumnNames(tableName, "a", "b", "c");
+ assertEquals(getTableComment(getSession().getCatalog().orElseThrow(), getSession().getSchema().orElseThrow(), tableName), "");
+
+ assertUpdate("DROP TABLE " + tableName);
+ assertFalse(getQueryRunner().tableExists(getSession(), tableName));
+ assertThat(computeActual("SHOW TABLES").getOnlyColumnAsSet())
+ .doesNotContain(tableName);
+
+ assertQueryFails("CREATE TABLE " + tableName + " (a bad_type)", ".* Unknown type 'bad_type' for column 'a'");
+ assertFalse(getQueryRunner().tableExists(getSession(), tableName));
+
+ tableName = "test_cr_not_exists_" + randomNameSuffix();
+ assertUpdate("CREATE TABLE " + tableName + " (a bigint, b varchar(50), c double)");
+ assertTrue(getQueryRunner().tableExists(getSession(), tableName));
+ assertTableColumnNames(tableName, "a", "b", "c");
+
+ assertUpdate("CREATE TABLE IF NOT EXISTS " + tableName + " (d bigint, e varchar(50))");
+ assertTrue(getQueryRunner().tableExists(getSession(), tableName));
+ assertTableColumnNames(tableName, "a", "b", "c");
+
+ assertUpdate("DROP TABLE " + tableName);
+ assertFalse(getQueryRunner().tableExists(getSession(), tableName));
+
+ // Test CREATE TABLE LIKE
+ tableName = "test_create_orig_" + randomNameSuffix();
+ assertUpdate("CREATE TABLE " + tableName + " (a bigint, b double, c varchar(50))");
+ assertTrue(getQueryRunner().tableExists(getSession(), tableName));
+ assertTableColumnNames(tableName, "a", "b", "c");
+
+ String tableNameLike = "test_create_like_" + randomNameSuffix();
+ assertUpdate("CREATE TABLE " + tableNameLike + " (LIKE " + tableName + ", d bigint, e varchar(50))");
+ assertTrue(getQueryRunner().tableExists(getSession(), tableNameLike));
+ assertTableColumnNames(tableNameLike, "a", "b", "c", "d", "e");
+
+ assertUpdate("DROP TABLE " + tableName);
+ assertFalse(getQueryRunner().tableExists(getSession(), tableName));
+
+ assertUpdate("DROP TABLE " + tableNameLike);
+ assertFalse(getQueryRunner().tableExists(getSession(), tableNameLike));
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryCreateStatement()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryInsertStatementTableExists()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQuerySelectUnsupportedType()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testCreateTableWithLongColumnName()
+ {
+ String tableName = "test_long_column" + randomNameSuffix();
+ String baseColumnName = "col";
+
+ int maxLength = maxColumnNameLength()
+ // Assume 2^16 is enough for most use cases. Add a bit more to ensure 2^16 isn't actual limit.
+ .orElse(65536 + 5);
+
+ String validColumnName = baseColumnName + "z".repeat(maxLength - baseColumnName.length());
+ assertUpdate("CREATE TABLE " + tableName + " (" + validColumnName + " bigint)");
+ assertTrue(columnExists(tableName, validColumnName));
+ assertUpdate("DROP TABLE " + tableName);
+
+ if (maxColumnNameLength().isEmpty()) {
+ return;
+ }
+ assertFalse(getQueryRunner().tableExists(getSession(), tableName));
+ }
+
+ @Test
+ @Override
+ public void testCreateTableWithLongTableName()
+ {
+ // TODO: Find the maximum table name length in Snowflake and enable this test.
+ abort("TODO");
+ }
+
+ @Override
+ protected OptionalInt maxColumnNameLength()
+ {
+ return OptionalInt.of(251);
+ }
+
+ @Test
+ @Override
+ public void testAlterTableAddLongColumnName()
+ {
+ String tableName = "test_long_column" + randomNameSuffix();
+ assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);
+
+ String baseColumnName = "col";
+ int maxLength = maxColumnNameLength()
+ // Assume 2^16 is enough for most use cases. Add a bit more to ensure 2^16 isn't actual limit.
+ .orElse(65536 + 5);
+
+ String validTargetColumnName = baseColumnName + "z".repeat(maxLength - baseColumnName.length());
+ assertUpdate("ALTER TABLE " + tableName + " ADD COLUMN " + validTargetColumnName + " int");
+ assertTrue(getQueryRunner().tableExists(getSession(), tableName));
+ assertQuery("SELECT x FROM " + tableName, "VALUES 123");
+ assertUpdate("DROP TABLE " + tableName);
+
+ if (maxColumnNameLength().isEmpty()) {
+ return;
+ }
+
+ assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);
+ assertQuery("SELECT x FROM " + tableName, "VALUES 123");
+ }
+
+ @Test
+ @Override
+ public void testAlterTableRenameColumnToLongName()
+ {
+ String tableName = "test_long_column" + randomNameSuffix();
+ assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);
+
+ String baseColumnName = "col";
+ int maxLength = maxColumnNameLength()
+ // Assume 2^16 is enough for most use cases. Add a bit more to ensure 2^16 isn't actual limit.
+ .orElse(65536 + 5);
+
+ String validTargetColumnName = baseColumnName + "z".repeat(maxLength - baseColumnName.length());
+ assertUpdate("ALTER TABLE " + tableName + " RENAME COLUMN x TO " + validTargetColumnName);
+ assertQuery("SELECT " + validTargetColumnName + " FROM " + tableName, "VALUES 123");
+ assertUpdate("DROP TABLE " + tableName);
+
+ if (maxColumnNameLength().isEmpty()) {
+ return;
+ }
+
+ assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);
+ assertQuery("SELECT x FROM " + tableName, "VALUES 123");
+ }
+
+ @Test
+ @Override
+ public void testCreateSchemaWithLongName()
+ {
+ // TODO: Find the maximum table schema length in Snowflake and enable this test.
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testInsertArray()
+ {
+ // Snowflake does not support this feature.
+ abort("Not supported");
+ }
+
+ @Test
+ @Override
+ public void testInsertRowConcurrently()
+ {
+ abort("TODO: Connection is already closed");
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryColumnAlias()
+ {
+ abort("TODO: Table function system.query not registered");
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryColumnAliasNotFound()
+ {
+ abort("TODO: Table function system.query not registered");
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryIncorrectSyntax()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryInsertStatementTableDoesNotExist()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryParameters()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQuerySelectFromNation()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQuerySelectFromTestTable()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQuerySimple()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testRenameSchemaToLongName()
+ {
+ // TODO: Find the maximum table schema length in Snowflake and enable this test.
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testRenameTableToLongTableName()
+ {
+ // TODO: Find the maximum table length in Snowflake and enable this test.
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testCharTrailingSpace()
+ {
+ assertThatThrownBy(super::testCharVarcharComparison)
+ .hasMessageContaining("For query")
+ .hasMessageContaining("Actual rows")
+ .hasMessageContaining("Expected rows");
+ }
+
+ @Test
+ @Override
+ public void testDescribeTable()
+ {
+ assertThat(query("DESCRIBE orders")).result().matches(getDescribeOrdersResult());
}
}
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java
index 1e7a28572b6e..ef552a9fa5ff 100644
--- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java
@@ -54,8 +54,6 @@
public class TestSnowflakeTypeMapping
extends AbstractTestQueryFramework
{
- protected TestingSnowflakeServer snowflakeServer;
-
private final ZoneId jvmZone = ZoneId.systemDefault();
// no DST in 1970, but has DST in later years (e.g. 2018)
private final ZoneId vilnius = ZoneId.of("Europe/Vilnius");
@@ -372,7 +370,7 @@ private DataSetup trinoCreateAndInsert(Session session, String tableNamePrefix)
private DataSetup snowflakeCreateAndInsert(String tableNamePrefix)
{
- return new CreateAndInsertDataSetup(snowflakeServer::execute, tableNamePrefix);
+ return new CreateAndInsertDataSetup(TestingSnowflakeServer::execute, tableNamePrefix);
}
private static void checkIsGap(ZoneId zone, LocalDate date)
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestingSnowflakeServer.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestingSnowflakeServer.java
index 74fb6ed0f42a..5c3b99d5f51f 100644
--- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestingSnowflakeServer.java
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestingSnowflakeServer.java
@@ -23,8 +23,7 @@
import static java.util.Objects.requireNonNull;
-public class TestingSnowflakeServer
- implements AutoCloseable
+public final class TestingSnowflakeServer
{
public static final String TEST_URL = requireNonNull(System.getProperty("snowflake.test.server.url"), "snowflake.test.server.url is not set");
public static final String TEST_USER = requireNonNull(System.getProperty("snowflake.test.server.user"), "snowflake.test.server.user is not set");
@@ -32,15 +31,11 @@ public class TestingSnowflakeServer
public static final String TEST_DATABASE = requireNonNull(System.getProperty("snowflake.test.server.database"), "snowflake.test.server.database is not set");
public static final String TEST_WAREHOUSE = requireNonNull(System.getProperty("snowflake.test.server.warehouse"), "snowflake.test.server.warehouse is not set");
public static final String TEST_ROLE = requireNonNull(System.getProperty("snowflake.test.server.role"), "snowflake.test.server.role is not set");
- public static final String TEST_PROXY = System.getProperty("snowflake.test.http_proxy");
public static final String TEST_SCHEMA = "tpch";
- public TestingSnowflakeServer()
- {
- execute("CREATE SCHEMA IF NOT EXISTS tpch");
- }
+ private TestingSnowflakeServer() {}
- public void execute(@Language("SQL") String sql)
+ public static void execute(@Language("SQL") String sql)
{
execute(TEST_URL, getProperties(), sql);
}
@@ -56,7 +51,7 @@ private static void execute(String url, Properties properties, String sql)
}
}
- public Properties getProperties()
+ private static Properties getProperties()
{
Properties properties = new Properties();
properties.setProperty("user", TEST_USER);
@@ -67,10 +62,4 @@ public Properties getProperties()
properties.setProperty("role", TEST_ROLE);
return properties;
}
-
- @Override
- public void close()
- throws Exception
- {
- }
}
diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeSnowflake.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeSnowflake.java
index 7f4ab574084a..ac5694a0b835 100644
--- a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeSnowflake.java
+++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeSnowflake.java
@@ -20,14 +20,8 @@
import io.trino.tests.product.launcher.env.common.Standard;
import io.trino.tests.product.launcher.env.common.TestsEnvironment;
-import java.io.File;
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.attribute.PosixFilePermissions;
-
-import static java.nio.file.attribute.PosixFilePermissions.fromString;
+import static io.trino.tests.product.launcher.env.EnvironmentContainers.isTrinoContainer;
+import static io.trino.tests.product.launcher.env.common.Standard.CONTAINER_TRINO_JVM_CONFIG;
import static java.util.Objects.requireNonNull;
import static org.testcontainers.utility.MountableFile.forHostPath;
@@ -47,27 +41,21 @@ public EnvMultinodeSnowflake(DockerFiles dockerFiles, Standard standard)
@Override
public void extendEnvironment(Environment.Builder builder)
{
- builder.addConnector("snowflake", forHostPath(getEnvProperties()));
- }
+ builder.configureContainers(container -> {
+ if (isTrinoContainer(container.getLogicalName())) {
+ container
+ .withEnv("SNOWFLAKE_URL", requireEnv("SNOWFLAKE_URL"))
+ .withEnv("SNOWFLAKE_USER", requireEnv("SNOWFLAKE_USER"))
+ .withEnv("SNOWFLAKE_PASSWORD", requireEnv("SNOWFLAKE_PASSWORD"))
+ .withEnv("SNOWFLAKE_DATABASE", requireEnv("SNOWFLAKE_DATABASE"))
+ .withEnv("SNOWFLAKE_ROLE", requireEnv("SNOWFLAKE_ROLE"))
+ .withEnv("SNOWFLAKE_WAREHOUSE", requireEnv("SNOWFLAKE_WAREHOUSE"));
- private Path getEnvProperties()
- {
- try {
- String properties = Files.readString(configDir.getPath("snowflake.properties"))
- .replace("${ENV:SNOWFLAKE_URL}", requireEnv("SNOWFLAKE_URL"))
- .replace("${ENV:SNOWFLAKE_USER}", requireEnv("SNOWFLAKE_USER"))
- .replace("${ENV:SNOWFLAKE_PASSWORD}", requireEnv("SNOWFLAKE_PASSWORD"))
- .replace("${ENV:SNOWFLAKE_DATABASE}", requireEnv("SNOWFLAKE_DATABASE"))
- .replace("${ENV:SNOWFLAKE_ROLE}", requireEnv("SNOWFLAKE_ROLE"))
- .replace("${ENV:SNOWFLAKE_WAREHOUSE}", requireEnv("SNOWFLAKE_WAREHOUSE"));
- File newProperties = Files.createTempFile("snowflake-replaced", ".properties", PosixFilePermissions.asFileAttribute(fromString("rwxrwxrwx"))).toFile();
- newProperties.deleteOnExit();
- Files.writeString(newProperties.toPath(), properties);
- return newProperties.toPath();
- }
- catch (IOException e) {
- throw new UncheckedIOException(e);
- }
+ container.withCopyFileToContainer(forHostPath(configDir.getPath("jvm.config")), CONTAINER_TRINO_JVM_CONFIG);
+ }
+ });
+
+ builder.addConnector("snowflake", forHostPath(configDir.getPath("snowflake.properties")));
}
private static String requireEnv(String variable)
diff --git a/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-all/snowflake.properties b/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-all/snowflake.properties
index 669489ea4363..a841dbe3339c 100644
--- a/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-all/snowflake.properties
+++ b/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-all/snowflake.properties
@@ -1,4 +1,4 @@
connector.name=snowflake
-connection-url=${ENV:SNOWFLAKE_URL}
-connection-user=${ENV:SNOWFLAKE_USER}
-connection-password=${ENV:SNOWFLAKE_PASSWORD}
+connection-url=jdbc:snowflake://example.snowflakecomputing.com
+connection-user=root
+connection-password=secret
diff --git a/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-snowflake/jvm.config b/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-snowflake/jvm.config
new file mode 100644
index 000000000000..491f05d67917
--- /dev/null
+++ b/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-snowflake/jvm.config
@@ -0,0 +1,18 @@
+-server
+--add-opens=java.base/java.nio=ALL-UNNAMED
+-Xmx2G
+-XX:G1HeapRegionSize=32M
+-XX:+ExplicitGCInvokesConcurrent
+-XX:+ExitOnOutOfMemoryError
+-XX:+HeapDumpOnOutOfMemoryError
+-XX:-OmitStackTraceInFastThrow
+-XX:ReservedCodeCacheSize=150M
+-XX:PerMethodRecompilationCutoff=10000
+-XX:PerBytecodeRecompilationCutoff=10000
+-Djdk.attach.allowAttachSelf=true
+# jdk.nio.maxCachedBufferSize controls what buffers can be allocated in per-thread "temporary buffer cache" (sun.nio.ch.Util). Value of 0 disables the cache.
+-Djdk.nio.maxCachedBufferSize=0
+-Duser.timezone=Asia/Kathmandu
+-XX:ErrorFile=/docker/logs/product-tests-presto-jvm-error-file.log
+# Allow loading dynamic agent used by JOL
+-XX:+EnableDynamicAgentLoading
diff --git a/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-snowflake/snowflake.properties b/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-snowflake/snowflake.properties
index 669489ea4363..a3a4002b9661 100644
--- a/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-snowflake/snowflake.properties
+++ b/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-snowflake/snowflake.properties
@@ -2,3 +2,6 @@ connector.name=snowflake
connection-url=${ENV:SNOWFLAKE_URL}
connection-user=${ENV:SNOWFLAKE_USER}
connection-password=${ENV:SNOWFLAKE_PASSWORD}
+snowflake.database=${ENV:SNOWFLAKE_DATABASE}
+snowflake.role=${ENV:SNOWFLAKE_ROLE}
+snowflake.warehouse=${ENV:SNOWFLAKE_WAREHOUSE}