Skip to content

Commit

Permalink
Support type mapping configs in Snowflake Connector
Browse files Browse the repository at this point in the history
Support config properties unsupported-type-handling and
jdbc-types-mapped-to-varchar.
  • Loading branch information
lxynov authored and ebyhr committed Apr 25, 2024
1 parent 373dccf commit 6c67281
Show file tree
Hide file tree
Showing 3 changed files with 68 additions and 2 deletions.
3 changes: 3 additions & 0 deletions docs/src/main/sphinx/connector/snowflake.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,9 @@ Trino supports the following Snowflake data types:

Complete list of [Snowflake data types](https://docs.snowflake.com/en/sql-reference/intro-summary-data-types.html).

```{include} jdbc-type-mapping.fragment
```

(snowflake-sql-support)=

## SQL support
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,8 @@
import static io.trino.plugin.jdbc.StandardColumnMappings.varbinaryColumnMapping;
import static io.trino.plugin.jdbc.StandardColumnMappings.varbinaryWriteFunction;
import static io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction;
import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling;
import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
Expand Down Expand Up @@ -206,6 +208,10 @@ public SnowflakeClient(
@Override
public Optional<ColumnMapping> toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle)
{
Optional<ColumnMapping> mapping = getForcedMappingToVarchar(typeHandle);
if (mapping.isPresent()) {
return mapping;
}
String jdbcTypeName = typeHandle.jdbcTypeName()
.orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle));
jdbcTypeName = jdbcTypeName.toLowerCase(Locale.ENGLISH);
Expand Down Expand Up @@ -262,7 +268,9 @@ public Optional<ColumnMapping> toColumnMapping(ConnectorSession session, Connect
case Types.TIMESTAMP_WITH_TIMEZONE:
return Optional.of(timestampWithTimeZoneColumnMapping(typeHandle.requiredDecimalDigits()));
}

if (getUnsupportedTypeHandling(session) == CONVERT_TO_VARCHAR) {
return mapToUnboundedVarchar(typeHandle);
}
return Optional.empty();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.trino.Session;
import io.trino.plugin.jdbc.UnsupportedTypeHandling;
import io.trino.spi.type.TimeZoneKey;
import io.trino.testing.AbstractTestQueryFramework;
import io.trino.testing.QueryRunner;
Expand All @@ -24,6 +25,7 @@
import io.trino.testing.datatype.CreateAsSelectDataSetup;
import io.trino.testing.datatype.DataSetup;
import io.trino.testing.datatype.SqlDataTypeTest;
import io.trino.testing.sql.TestTable;
import io.trino.testing.sql.TrinoSqlExecutor;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
Expand All @@ -35,6 +37,9 @@

import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.UNSUPPORTED_TYPE_HANDLING;
import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR;
import static io.trino.plugin.jdbc.UnsupportedTypeHandling.IGNORE;
import static io.trino.plugin.snowflake.SnowflakeQueryRunner.createSnowflakeQueryRunner;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
Expand Down Expand Up @@ -76,7 +81,7 @@ protected QueryRunner createQueryRunner()
{
return createSnowflakeQueryRunner(
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableMap.of("jdbc-types-mapped-to-varchar", "ARRAY"),
ImmutableList.of());
}

Expand Down Expand Up @@ -392,6 +397,56 @@ private void testTimestamp(ZoneId sessionZone)
.execute(getQueryRunner(), session, trinoCreateAndInsert("test_timestamp"));
}

@Test
public void testForcedMappingToVarchar()
{
try (TestTable table = new TestTable(
TestingSnowflakeServer::execute,
"tpch.test_forced_varchar_mapping",
"AS SELECT ARRAY_CONSTRUCT(1, 2, 3) x")) {
assertQuery(
"SELECT * FROM " + table.getName(),
"""
VALUES ('[
1,
2,
3
]')
""");

assertQueryFails(
"INSERT INTO " + table.getName() + " VALUES 'some value'",
"Underlying type that is mapped to VARCHAR is not supported for INSERT: .*");
}
}

@Test
public void testUnsupportedDataType()
{
try (TestTable table = new TestTable(
TestingSnowflakeServer::execute,
"tpch.test_unsupported_data_type",
"AS SELECT TRUE x, TO_GEOMETRY('POINT(1820.12 890.56)') y")) {
assertQuery(unsupportedTypeHandling(IGNORE), "SELECT * FROM " + table.getName(), "VALUES TRUE");
assertQuery(unsupportedTypeHandling(CONVERT_TO_VARCHAR), "SELECT * FROM " + table.getName(), """
VALUES (TRUE, '{
"coordinates": [
1.820120000000000e+03,
8.905599999999999e+02
],
"type": "Point"
}')
""");
}
}

private Session unsupportedTypeHandling(UnsupportedTypeHandling unsupportedTypeHandling)
{
return Session.builder(getSession())
.setCatalogSessionProperty("snowflake", UNSUPPORTED_TYPE_HANDLING, unsupportedTypeHandling.name())
.build();
}

private DataSetup trinoCreateAsSelect(String tableNamePrefix)
{
return trinoCreateAsSelect(getSession(), tableNamePrefix);
Expand Down

0 comments on commit 6c67281

Please sign in to comment.