diff --git a/docs/src/main/sphinx/connector/snowflake.md b/docs/src/main/sphinx/connector/snowflake.md index 99aeb8d8c4e3..bba55f9d5db1 100644 --- a/docs/src/main/sphinx/connector/snowflake.md +++ b/docs/src/main/sphinx/connector/snowflake.md @@ -49,30 +49,163 @@ multiple instances of the Snowflake connector. ## Type mapping -Trino supports the following Snowflake data types: - -| Snowflake Type | Trino Type | -| -------------- | -------------- | -| `boolean` | `boolean` | -| `tinyint` | `bigint` | -| `smallint` | `bigint` | -| `byteint` | `bigint` | -| `int` | `bigint` | -| `integer` | `bigint` | -| `bigint` | `bigint` | -| `float` | `real` | -| `real` | `real` | -| `double` | `double` | -| `decimal` | `decimal(P,S)` | -| `varchar(n)` | `varchar(n)` | -| `char(n)` | `varchar(n)` | -| `binary(n)` | `varbinary` | -| `varbinary` | `varbinary` | -| `date` | `date` | -| `time` | `time` | -| `timestampntz` | `timestamp` | - -Complete list of [Snowflake data types](https://docs.snowflake.com/en/sql-reference/intro-summary-data-types.html). +Because Trino and Snowflake each support types that the other does not, this +connector {ref}`modifies some types ` when reading or +writing data. Data types may not map the same way in both directions between +Trino and the data source. Refer to the following sections for type mapping in +each direction. + +List of [Snowflake data types](https://docs.snowflake.com/en/sql-reference/intro-summary-data-types.html). + +### Snowflake type to Trino type mapping + +The connector maps Snowflake types to the corresponding Trino types following +this table: + +:::{list-table} Snowflake type to Trino type mapping +:widths: 30, 30, 40 +:header-rows: 1 + +* - Snowflake type + - Trino type + - Notes +* - `NUMBER` + - `DECIMAL` + - Default precision and scale are (38,0). +* - `DECIMAL`, `NUMERIC` + - `DECIMAL` + - Synonymous with `NUMBER`. See Snowflake + [data types for fixed point numbers](https://docs.snowflake.com/en/sql-reference/data-types-numeric#data-types-for-fixed-point-numbers) + for more information. +* - `INT`, `INTEGER`, `BIGINT`, `SMALLINT`, `TINYINT`, `BYTEINT` + - `DECIMAL(38,0)` + - Synonymous with `NUMBER(38,0)`. See Snowflake + [data types for fixed point numbers](https://docs.snowflake.com/en/sql-reference/data-types-numeric#data-types-for-fixed-point-numbers) + for more information. +* - `FLOAT`, `FLOAT4`, `FLOAT8` + - `DOUBLE` + - The names `FLOAT`, `FLOAT4`, and `FLOAT8` are for compatibility with other systems; Snowflake treats all three as + 64-bit floating-point numbers. See Snowflake + [data types for floating point numbers](https://docs.snowflake.com/en/sql-reference/data-types-numeric#data-types-for-floating-point-numbers) + for more information. +* - `DOUBLE`, `DOUBLE PRECISION`, `REAL` + - `DOUBLE` + - Synonymous with `FLOAT`. See Snowflake + [data types for floating point numbers](https://docs.snowflake.com/en/sql-reference/data-types-numeric#data-types-for-floating-point-numbers) + for more information. +* - `VARCHAR` + - `VARCHAR` + - +* - `CHAR`, `CHARACTER` + - `VARCHAR` + - Synonymous with `VARCHAR` except default length is `VARCHAR(1)`. See Snowflake + [String & Binary Data Types](https://docs.snowflake.com/en/sql-reference/data-types-text) + for more information. +* - `STRING`, `TEXT` + - `VARCHAR` + - Synonymous with `VARCHAR`. See Snowflake + [String & Binary Data Types](https://docs.snowflake.com/en/sql-reference/data-types-text) + for more information. +* - `BINARY` + - `VARBINARY` + - +* - `VARBINARY` + - `VARBINARY` + - Synonymous with `BINARY`. See Snowflake + [String & Binary Data Types](https://docs.snowflake.com/en/sql-reference/data-types-text) + for more information. +* - `BOOLEAN` + - `BOOLEAN` + - +* - `DATE` + - `DATE` + - +* - `TIME` + - `TIME` + - +* - `TIMESTAMP_NTZ` + - `TIMESTAMP` + - TIMESTAMP with no time zone; time zone, if provided, is not stored. See Snowflake + [Date & Time Data Types](https://docs.snowflake.com/en/sql-reference/data-types-datetime) + for more information. +* - `DATETIME` + - `TIMESTAMP` + - Alias for `TIMESTAMP_NTZ`. See Snowflake + [Date & Time Data Types](https://docs.snowflake.com/en/sql-reference/data-types-datetime) + for more information. +* - `TIMESTAMP` + - `TIMESTAMP` + - Alias for one of the `TIMESTAMP` variations (`TIMESTAMP_NTZ` by default). This connector always sets `TIMESTAMP_NTZ` as the variant. +* - `TIMESTAMP_TZ` + - `TIMESTAMP WITH TIME ZONE` + - TIMESTAMP with time zone. +::: + +No other types are supported. + +### Trino type to Snowflake type mapping + +The connector maps Trino types to the corresponding Snowflake types following +this table: + +:::{list-table} Trino type to Snowflake type mapping +:widths: 30, 30, 40 +:header-rows: 1 + +* - Trino type + - Snowflake type + - Notes +* - `TINYINT` + - `NUMBER(3, 0)` + - +* - `SMALLINT` + - `NUMBER(5, 0)` + - +* - `INTEGER` + - `NUMBER(10, 0)` + - +* - `BIGINT` + - `NUMBER(19, 0)` + - +* - `DECIMAL` + - `NUMBER` + - +* - `REAL` + - `DOUBLE` + - +* - `DOUBLE` + - `DOUBLE` + - +* - `VARCHAR` + - `VARCHAR` + - +* - `CHAR` + - `VARCHAR` + - +* - `VARBINARY` + - `BINARY` + - +* - `VARBINARY` + - `VARBINARY` + - +* - `BOOLEAN` + - `BOOLEAN` + - +* - `DATE` + - `DATE` + - +* - `TIME` + - `TIME` + - +* - `TIMESTAMP` + - `TIMESTAMP_NTZ` + - +* - `TIMESTAMP WITH TIME ZONE` + - `TIMESTAMP_TZ` + - +:::: + +No other types are supported. (snowflake-sql-support)= diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java index fca53986736a..0b2026498d95 100644 --- a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java +++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java @@ -13,7 +13,6 @@ */ package io.trino.plugin.snowflake; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.inject.Inject; import io.trino.plugin.base.aggregation.AggregateFunctionRewriter; @@ -66,7 +65,6 @@ import io.trino.spi.type.Type; import io.trino.spi.type.VarcharType; -import java.math.RoundingMode; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -95,13 +93,45 @@ import static io.trino.plugin.jdbc.CaseSensitivity.CASE_INSENSITIVE; import static io.trino.plugin.jdbc.CaseSensitivity.CASE_SENSITIVE; import static io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR; +import static io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping; +import static io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.booleanColumnMapping; +import static io.trino.plugin.jdbc.StandardColumnMappings.booleanWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping; +import static io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping; +import static io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.toTrinoTimestamp; +import static io.trino.plugin.jdbc.StandardColumnMappings.varbinaryColumnMapping; +import static io.trino.plugin.jdbc.StandardColumnMappings.varbinaryWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction; +import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling; +import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR; import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; +import static io.trino.spi.type.BigintType.BIGINT; +import static io.trino.spi.type.BooleanType.BOOLEAN; +import static io.trino.spi.type.DateType.DATE; +import static io.trino.spi.type.DecimalType.createDecimalType; +import static io.trino.spi.type.DoubleType.DOUBLE; +import static io.trino.spi.type.IntegerType.INTEGER; +import static io.trino.spi.type.RealType.REAL; +import static io.trino.spi.type.SmallintType.SMALLINT; +import static io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType; import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_SECOND; import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND; import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND; +import static io.trino.spi.type.TinyintType.TINYINT; +import static io.trino.spi.type.VarbinaryType.VARBINARY; import static io.trino.spi.type.VarcharType.createUnboundedVarcharType; import static io.trino.spi.type.VarcharType.createVarcharType; import static java.lang.String.format; +import static java.math.RoundingMode.UNNECESSARY; +import static java.sql.Types.TIMESTAMP_WITH_TIMEZONE; public class SnowflakeClient extends BaseJdbcClient @@ -118,16 +148,6 @@ public class SnowflakeClient private static final TimeZone UTC_TZ = TimeZone.getTimeZone(ZoneId.of("UTC")); private final AggregateFunctionRewriter aggregateFunctionRewriter; - private interface WriteMappingFunction - { - WriteMapping convert(Type type); - } - - private interface ColumnMappingFunction - { - Optional convert(JdbcTypeHandle typeHandle); - } - @Inject public SnowflakeClient( BaseJdbcConfig config, @@ -159,98 +179,124 @@ public SnowflakeClient( @Override public Optional toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle) { + Optional mapping = getForcedMappingToVarchar(typeHandle); + if (mapping.isPresent()) { + return mapping; + } String jdbcTypeName = typeHandle.getJdbcTypeName() .orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle)); jdbcTypeName = jdbcTypeName.toLowerCase(Locale.ENGLISH); int type = typeHandle.getJdbcType(); - // Mappings for JDBC column types to internal Trino types - final Map standardColumnMappings = ImmutableMap.builder() - .put(Types.BOOLEAN, StandardColumnMappings.booleanColumnMapping()) - .put(Types.TINYINT, StandardColumnMappings.tinyintColumnMapping()) - .put(Types.SMALLINT, StandardColumnMappings.smallintColumnMapping()) - .put(Types.INTEGER, StandardColumnMappings.integerColumnMapping()) - .put(Types.BIGINT, StandardColumnMappings.bigintColumnMapping()) - .put(Types.REAL, StandardColumnMappings.realColumnMapping()) - .put(Types.DOUBLE, StandardColumnMappings.doubleColumnMapping()) - .put(Types.FLOAT, StandardColumnMappings.doubleColumnMapping()) - .put(Types.BINARY, StandardColumnMappings.varbinaryColumnMapping()) - .put(Types.VARBINARY, StandardColumnMappings.varbinaryColumnMapping()) - .put(Types.LONGVARBINARY, StandardColumnMappings.varbinaryColumnMapping()) - .buildOrThrow(); - - ColumnMapping columnMap = standardColumnMappings.get(type); - if (columnMap != null) { - return Optional.of(columnMap); + switch (type) { + case Types.BOOLEAN: + return Optional.of(booleanColumnMapping()); + // This is used for synthetic columns generated by count() aggregation pushdown + case Types.BIGINT: + return Optional.of(bigintColumnMapping()); + case Types.DOUBLE: + return Optional.of(doubleColumnMapping()); + // In Snowflake all fixed-point numeric types are decimals. It always returns a DECIMAL type when + // JDBC_TREAT_DECIMAL_AS_INT is set to False. + case Types.DECIMAL: + case Types.NUMERIC: { + int precision = typeHandle.getRequiredColumnSize(); + int scale = typeHandle.getRequiredDecimalDigits(); + DecimalType decimalType = createDecimalType(precision, scale); + return Optional.of(decimalColumnMapping(decimalType, UNNECESSARY)); + } + case Types.VARCHAR: + if (jdbcTypeName.equals("varchar")) { + return Optional.of(varcharColumnMapping(typeHandle.getRequiredColumnSize(), typeHandle.getCaseSensitivity())); + } + // Some other Snowflake types (ARRAY, VARIANT, GEOMETRY, etc.) are also mapped to Types.VARCHAR, but + // they're unsupported. + return Optional.empty(); + case Types.BINARY: + // Multiple Snowflake types are mapped into Types.BINARY + if (jdbcTypeName.equals("binary")) { + return Optional.of(varbinaryColumnMapping()); + } + // Some other Snowflake types (GEOMETRY in some cases, etc.) are also mapped to Types.BINARY, but + // they're unsupported. + return Optional.empty(); + case Types.DATE: + return Optional.of(ColumnMapping.longMapping(DateType.DATE, ResultSet::getLong, snowFlakeDateWriter())); + case Types.TIME: + return Optional.of(timeColumnMapping(typeHandle.getRequiredDecimalDigits())); + case Types.TIMESTAMP: + return Optional.of(timestampColumnMapping(typeHandle.getRequiredDecimalDigits())); + case TIMESTAMP_WITH_TIMEZONE: + return Optional.of(timestampTimeZoneColumnMapping(typeHandle.getRequiredDecimalDigits())); } - final Map snowflakeColumnMappings = ImmutableMap.builder() - .put("time", handle -> Optional.of(timeColumnMapping(handle.getRequiredDecimalDigits()))) - .put("timestampntz", handle -> Optional.of(timestampColumnMapping(handle.getRequiredDecimalDigits()))) - .put("timestamptz", handle -> Optional.of(timestampTZColumnMapping(handle.getRequiredDecimalDigits()))) - .put("date", handle -> Optional.of(ColumnMapping.longMapping(DateType.DATE, (resultSet, columnIndex) -> LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(), snowFlakeDateWriter()))) - .put("varchar", handle -> Optional.of(varcharColumnMapping(handle.getRequiredColumnSize(), typeHandle.getCaseSensitivity()))) - .put("number", handle -> { - int decimalDigits = handle.getRequiredDecimalDigits(); - int precision = handle.getRequiredColumnSize() + Math.max(-decimalDigits, 0); - if (precision > 38) { - return Optional.empty(); - } - return Optional.of(columnMappingPushdown( - StandardColumnMappings.decimalColumnMapping(DecimalType.createDecimalType(precision, Math.max(decimalDigits, 0)), RoundingMode.UNNECESSARY))); - }) - .buildOrThrow(); - - ColumnMappingFunction columnMappingFunction = snowflakeColumnMappings.get(jdbcTypeName); - if (columnMappingFunction != null) { - return columnMappingFunction.convert(typeHandle); + if (getUnsupportedTypeHandling(session) == CONVERT_TO_VARCHAR) { + return mapToUnboundedVarchar(typeHandle); } - return Optional.empty(); } @Override public WriteMapping toWriteMapping(ConnectorSession session, Type type) { - Class myClass = type.getClass(); - String simple = myClass.getSimpleName(); - - // Mappings for internal Trino types to JDBC column types - final Map standardWriteMappings = ImmutableMap.builder() - .put("BooleanType", WriteMapping.booleanMapping("boolean", StandardColumnMappings.booleanWriteFunction())) - .put("BigintType", WriteMapping.longMapping("number(19)", StandardColumnMappings.bigintWriteFunction())) - .put("IntegerType", WriteMapping.longMapping("number(10)", StandardColumnMappings.integerWriteFunction())) - .put("SmallintType", WriteMapping.longMapping("number(5)", StandardColumnMappings.smallintWriteFunction())) - .put("TinyintType", WriteMapping.longMapping("number(3)", StandardColumnMappings.tinyintWriteFunction())) - .put("DoubleType", WriteMapping.doubleMapping("double precision", StandardColumnMappings.doubleWriteFunction())) - .put("RealType", WriteMapping.longMapping("real", StandardColumnMappings.realWriteFunction())) - .put("VarbinaryType", WriteMapping.sliceMapping("varbinary", StandardColumnMappings.varbinaryWriteFunction())) - .put("DateType", WriteMapping.longMapping("date", snowFlakeDateWriter())) - .buildOrThrow(); - - WriteMapping writeMapping = standardWriteMappings.get(simple); - if (writeMapping != null) { - return writeMapping; + if (type == BOOLEAN) { + return WriteMapping.booleanMapping("BOOLEAN", booleanWriteFunction()); } - final Map snowflakeWriteMappings = ImmutableMap.builder() - .put("TimeType", writeType -> WriteMapping.longMapping("time", timeWriteFunction(((TimeType) writeType).getPrecision()))) - .put("ShortTimestampType", writeType -> SnowflakeClient.snowFlakeTimestampWriter(writeType)) - .put("ShortTimestampWithTimeZoneType", writeType -> SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType)) - .put("LongTimestampType", writeType -> SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType)) - .put("LongTimestampWithTimeZoneType", writeType -> SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType)) - .put("VarcharType", writeType -> SnowflakeClient.snowFlakeVarCharWriter(writeType)) - .put("CharType", writeType -> SnowflakeClient.snowFlakeCharWriter(writeType)) - .put("LongDecimalType", writeType -> SnowflakeClient.snowFlakeDecimalWriter(writeType)) - .put("ShortDecimalType", writeType -> SnowflakeClient.snowFlakeDecimalWriter(writeType)) - .buildOrThrow(); - - WriteMappingFunction writeMappingFunction = snowflakeWriteMappings.get(simple); - if (writeMappingFunction != null) { - return writeMappingFunction.convert(type); + if (type == TINYINT) { + return WriteMapping.longMapping("NUMBER(3, 0)", tinyintWriteFunction()); } - - throw new TrinoException(NOT_SUPPORTED, "Unsupported column type: " + type.getDisplayName() + ", simple:" + simple); + if (type == SMALLINT) { + return WriteMapping.longMapping("NUMBER(5, 0)", smallintWriteFunction()); + } + if (type == INTEGER) { + return WriteMapping.longMapping("NUMBER(10, 0)", integerWriteFunction()); + } + if (type == BIGINT) { + return WriteMapping.longMapping("NUMBER(19, 0)", bigintWriteFunction()); + } + if (type == REAL) { + return WriteMapping.longMapping("DOUBLE", realWriteFunction()); + } + if (type == DOUBLE) { + return WriteMapping.doubleMapping("DOUBLE", doubleWriteFunction()); + } + if (type instanceof DecimalType decimalType) { + String dataType = format("NUMBER(%s, %s)", decimalType.getPrecision(), decimalType.getScale()); + if (decimalType.isShort()) { + return WriteMapping.longMapping(dataType, shortDecimalWriteFunction(decimalType)); + } + return WriteMapping.objectMapping(dataType, longDecimalWriteFunction(decimalType)); + } + if (type instanceof CharType charType) { + return WriteMapping.sliceMapping("VARCHAR(" + charType.getLength() + ")", charWriteFunction(charType)); + } + if (type instanceof VarcharType varcharType) { + String dataType; + if (varcharType.isUnbounded()) { + dataType = "VARCHAR"; + } + else { + dataType = "VARCHAR(" + varcharType.getBoundedLength() + ")"; + } + return WriteMapping.sliceMapping(dataType, varcharWriteFunction()); + } + if (type == VARBINARY) { + return WriteMapping.sliceMapping("VARBINARY", varbinaryWriteFunction()); + } + if (type == DATE) { + return WriteMapping.longMapping("date", snowFlakeDateWriter()); + } + if (type instanceof TimeType timeType) { + return WriteMapping.longMapping("time", timeWriteFunction(timeType.getPrecision())); + } + if (type instanceof TimestampType timestampType) { + return snowflakeTimestampWriteMapping(timestampType); + } + if (type instanceof TimestampWithTimeZoneType timestampWithTimeZoneType) { + return snowflakeTimestampWithTimeZoneWriteMapping(timestampWithTimeZoneType); + } + throw new TrinoException(NOT_SUPPORTED, "Unsupported column type: " + type.getDisplayName()); } @Override @@ -322,15 +368,6 @@ public void setColumnType(ConnectorSession session, JdbcTableHandle handle, Jdbc throw new TrinoException(NOT_SUPPORTED, "This connector does not support setting column types"); } - private static ColumnMapping columnMappingPushdown(ColumnMapping mapping) - { - if (mapping.getPredicatePushdownController() == PredicatePushdownController.DISABLE_PUSHDOWN) { - throw new TrinoException(NOT_SUPPORTED, "mapping.getPredicatePushdownController() is DISABLE_PUSHDOWN. Type was " + mapping.getType()); - } - - return new ColumnMapping(mapping.getType(), mapping.getReadFunction(), mapping.getWriteFunction(), PredicatePushdownController.FULL_PUSHDOWN); - } - private static ColumnMapping timeColumnMapping(int precision) { checkArgument(precision <= MAX_SUPPORTED_TEMPORAL_PRECISION, "The max timestamp precision in Snowflake is " + MAX_SUPPORTED_TEMPORAL_PRECISION); @@ -344,22 +381,21 @@ private static ColumnMapping timeColumnMapping(int precision) PredicatePushdownController.FULL_PUSHDOWN); } - private static ColumnMapping timestampTZColumnMapping(int precision) + private static ColumnMapping timestampTimeZoneColumnMapping(int precision) { - if (precision <= 3) { - return ColumnMapping.longMapping(TimestampWithTimeZoneType.createTimestampWithTimeZoneType(precision), + TimestampWithTimeZoneType trinoType = createTimestampWithTimeZoneType(precision); + if (precision <= TimestampWithTimeZoneType.MAX_SHORT_PRECISION) { + return ColumnMapping.longMapping(trinoType, (resultSet, columnIndex) -> { ZonedDateTime timestamp = SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from); return DateTimeEncoding.packDateTimeWithZone(timestamp.toInstant().toEpochMilli(), timestamp.getZone().getId()); }, - timestampWithTZWriter(), PredicatePushdownController.FULL_PUSHDOWN); - } - else { - return ColumnMapping.objectMapping(TimestampWithTimeZoneType.createTimestampWithTimeZoneType(precision), longTimestampWithTimezoneReadFunction(), longTimestampWithTZWriteFunction()); + shortTimestampWithTimeZoneWriteFunction(), PredicatePushdownController.FULL_PUSHDOWN); } + return ColumnMapping.objectMapping(trinoType, longTimestampWithTimeZoneReadFunction(), longTimestampWithTimeZoneWriteFunction()); } - private static LongWriteFunction timestampWithTZWriter() + private static LongWriteFunction shortTimestampWithTimeZoneWriteFunction() { return (statement, index, encodedTimeWithZone) -> { Instant timeI = Instant.ofEpochMilli(DateTimeEncoding.unpackMillisUtc(encodedTimeWithZone)); @@ -368,7 +404,7 @@ private static LongWriteFunction timestampWithTZWriter() }; } - private static ObjectReadFunction longTimestampWithTimezoneReadFunction() + private static ObjectReadFunction longTimestampWithTimeZoneReadFunction() { return ObjectReadFunction.of(LongTimestampWithTimeZone.class, (resultSet, columnIndex) -> { ZonedDateTime timestamp = SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from); @@ -378,7 +414,7 @@ private static ObjectReadFunction longTimestampWithTimezoneReadFunction() }); } - private static ObjectWriteFunction longTimestampWithTZWriteFunction() + private static ObjectWriteFunction longTimestampWithTimeZoneWriteFunction() { return ObjectWriteFunction.of(LongTimestampWithTimeZone.class, (statement, index, value) -> { long epoMilli = value.getEpochMillis(); @@ -392,13 +428,15 @@ private static ObjectWriteFunction longTimestampWithTZWriteFunction() private static ColumnMapping timestampColumnMapping(int precision) { - // <= 6 fits into a long - if (precision <= 6) { - return ColumnMapping.longMapping(TimestampType.createTimestampType(precision), (resultSet, columnIndex) -> StandardColumnMappings.toTrinoTimestamp(TimestampType.createTimestampType(precision), toLocalDateTime(resultSet, columnIndex)), timestampWriteFunction()); + TimestampType trinoType = TimestampType.createTimestampType(precision); + if (precision <= TimestampType.MAX_SHORT_PRECISION) { + return ColumnMapping.longMapping( + trinoType, + (resultSet, columnIndex) -> toTrinoTimestamp(trinoType, toLocalDateTime(resultSet, columnIndex)), + shortTimestampWriteFunction()); } - // Too big. Put it in an object - return ColumnMapping.objectMapping(TimestampType.createTimestampType(precision), longTimestampReader(), longTimestampWriter(precision)); + return ColumnMapping.objectMapping(trinoType, longTimestampReader(), longTimestampWriteFunction(precision)); } private static LocalDateTime toLocalDateTime(ResultSet resultSet, int columnIndex) @@ -469,48 +507,17 @@ private static ObjectWriteFunction longTimestampWithTzWriteFunction() }); } - private static WriteMapping snowFlakeDecimalWriter(Type type) - { - DecimalType decimalType = (DecimalType) type; - String dataType = format("decimal(%s, %s)", decimalType.getPrecision(), decimalType.getScale()); - - if (decimalType.isShort()) { - return WriteMapping.longMapping(dataType, StandardColumnMappings.shortDecimalWriteFunction(decimalType)); - } - return WriteMapping.objectMapping(dataType, StandardColumnMappings.longDecimalWriteFunction(decimalType)); - } - private static LongWriteFunction snowFlakeDateWriter() { return (statement, index, day) -> statement.setString(index, SNOWFLAKE_DATE_FORMATTER.format(LocalDate.ofEpochDay(day))); } - private static WriteMapping snowFlakeCharWriter(Type type) - { - CharType charType = (CharType) type; - return WriteMapping.sliceMapping("char(" + charType.getLength() + ")", charWriteFunction(charType)); - } - - private static WriteMapping snowFlakeVarCharWriter(Type type) - { - String dataType; - VarcharType varcharType = (VarcharType) type; - - if (varcharType.isUnbounded()) { - dataType = "varchar"; - } - else { - dataType = "varchar(" + varcharType.getBoundedLength() + ")"; - } - return WriteMapping.sliceMapping(dataType, StandardColumnMappings.varcharWriteFunction()); - } - private static SliceWriteFunction charWriteFunction(CharType charType) { return (statement, index, value) -> statement.setString(index, Chars.padSpaces(value, charType).toStringUtf8()); } - private static WriteMapping snowFlakeTimestampWriter(Type type) + private static WriteMapping snowflakeTimestampWriteMapping(Type type) { TimestampType timestampType = (TimestampType) type; checkArgument( @@ -518,35 +525,35 @@ private static WriteMapping snowFlakeTimestampWriter(Type type) "The max timestamp precision in Snowflake is " + MAX_SUPPORTED_TEMPORAL_PRECISION); if (timestampType.isShort()) { - return WriteMapping.longMapping(format("timestamp_ntz(%d)", timestampType.getPrecision()), timestampWriteFunction()); + return WriteMapping.longMapping(format("timestamp_ntz(%d)", timestampType.getPrecision()), shortTimestampWriteFunction()); } - return WriteMapping.objectMapping(format("timestamp_ntz(%d)", timestampType.getPrecision()), longTimestampWriter(timestampType.getPrecision())); + return WriteMapping.objectMapping(format("timestamp_ntz(%d)", timestampType.getPrecision()), longTimestampWriteFunction(timestampType.getPrecision())); } - private static LongWriteFunction timestampWriteFunction() + private static LongWriteFunction shortTimestampWriteFunction() { return (statement, index, value) -> statement.setString(index, StandardColumnMappings.fromTrinoTimestamp(value).toString()); } - private static ObjectWriteFunction longTimestampWriter(int precision) + private static ObjectWriteFunction longTimestampWriteFunction(int precision) { return ObjectWriteFunction.of( LongTimestamp.class, (statement, index, value) -> statement.setString(index, SNOWFLAKE_TIMESTAMP_FORMATTER.format(StandardColumnMappings.fromLongTrinoTimestamp(value, precision)))); } - private static WriteMapping snowFlakeTimestampWithTZWriter(Type type) + private static WriteMapping snowflakeTimestampWithTimeZoneWriteMapping(Type type) { TimestampWithTimeZoneType timeTZType = (TimestampWithTimeZoneType) type; checkArgument(timeTZType.getPrecision() <= MAX_SUPPORTED_TEMPORAL_PRECISION, "Max Snowflake precision is is " + MAX_SUPPORTED_TEMPORAL_PRECISION); if (timeTZType.isShort()) { - return WriteMapping.longMapping(format("timestamp_tz(%d)", timeTZType.getPrecision()), timestampWithTimezoneWriteFunction()); + return WriteMapping.longMapping(format("timestamp_tz(%d)", timeTZType.getPrecision()), timestampWithTimeZoneWriteFunction()); } return WriteMapping.objectMapping(format("timestamp_tz(%d)", timeTZType.getPrecision()), longTimestampWithTzWriteFunction()); } - private static LongWriteFunction timestampWithTimezoneWriteFunction() + private static LongWriteFunction timestampWithTimeZoneWriteFunction() { return (statement, index, encodedTimeWithZone) -> { Instant instant = Instant.ofEpochMilli(DateTimeEncoding.unpackMillisUtc(encodedTimeWithZone)); diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java index 52c2161581f0..808e34813919 100644 --- a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java +++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java @@ -69,6 +69,7 @@ public ConnectionFactory getConnectionFactory(BaseJdbcConfig baseJdbcConfig, Sno properties.setProperty("TIMESTAMP_TZ_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS.FF9TZH:TZM"); properties.setProperty("TIMESTAMP_LTZ_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS.FF9TZH:TZM"); properties.setProperty("TIME_OUTPUT_FORMAT", "HH24:MI:SS.FF9"); + properties.setProperty("JDBC_TREAT_DECIMAL_AS_INT", "FALSE"); // Support for Corporate proxies if (snowflakeConfig.getHttpProxy().isPresent()) { diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java index 16e827958f35..62d4de736b02 100644 --- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java +++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java @@ -142,14 +142,14 @@ protected MaterializedResult getDescribeOrdersResult() { // Override this test because the type of row "shippriority" should be bigint rather than integer for snowflake case return resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR) - .row("orderkey", "bigint", "", "") - .row("custkey", "bigint", "", "") + .row("orderkey", "decimal(19,0)", "", "") + .row("custkey", "decimal(19,0)", "", "") .row("orderstatus", "varchar(1)", "", "") .row("totalprice", "double", "", "") .row("orderdate", "date", "", "") .row("orderpriority", "varchar(15)", "", "") .row("clerk", "varchar(15)", "", "") - .row("shippriority", "bigint", "", "") + .row("shippriority", "decimal(10,0)", "", "") .row("comment", "varchar(79)", "", "") .build(); } @@ -161,6 +161,20 @@ public void testShowColumns() assertThat(query("SHOW COLUMNS FROM orders")).result().matches(getDescribeOrdersResult()); } + @Test + @Override + public void testInformationSchemaFiltering() + { + assertQuery( + "SELECT table_name FROM information_schema.tables WHERE table_name = 'orders' LIMIT 1", + "SELECT 'orders' table_name"); + // Overriding this method to change "data_type" from "bigint" to "decimal(19,0)". When creating a Trino BIGINT + // column using Trino Snowflake Connector, a "decimal(19,0)" column is created under the hood. + assertQuery( + "SELECT table_name FROM information_schema.columns WHERE data_type = 'decimal(19,0)' AND table_name = 'nation' and column_name = 'nationkey' LIMIT 1", + "SELECT 'nation' table_name"); + } + @Test public void testViews() { @@ -177,14 +191,14 @@ public void testShowCreateTable() // Override this test because the type of row "shippriority" should be bigint rather than integer for snowflake case assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue()) .isEqualTo("CREATE TABLE snowflake.tpch.orders (\n" + - " orderkey bigint,\n" + - " custkey bigint,\n" + + " orderkey decimal(19, 0),\n" + + " custkey decimal(19, 0),\n" + " orderstatus varchar(1),\n" + " totalprice double,\n" + " orderdate date,\n" + " orderpriority varchar(15),\n" + " clerk varchar(15),\n" + - " shippriority bigint,\n" + + " shippriority decimal(10, 0),\n" + " comment varchar(79)\n" + ")"); } diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java index ef552a9fa5ff..db9cc182515c 100644 --- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java +++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java @@ -36,13 +36,13 @@ import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Verify.verify; import static io.trino.plugin.snowflake.SnowflakeQueryRunner.createSnowflakeQueryRunner; -import static io.trino.spi.type.BigintType.BIGINT; -import static io.trino.spi.type.BooleanType.BOOLEAN; + import static io.trino.spi.type.BooleanType.BOOLEAN; import static io.trino.spi.type.DateType.DATE; import static io.trino.spi.type.DecimalType.createDecimalType; import static io.trino.spi.type.DoubleType.DOUBLE; import static io.trino.spi.type.TimeZoneKey.getTimeZoneKey; import static io.trino.spi.type.TimestampType.createTimestampType; +import static io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType; import static io.trino.spi.type.VarbinaryType.VARBINARY; import static io.trino.spi.type.VarcharType.createVarcharType; import static java.time.ZoneOffset.UTC; @@ -107,10 +107,12 @@ public void testInteger() private void testInteger(String inputType) { SqlDataTypeTest.create() - .addRoundTrip(inputType, "-9223372036854775808", BIGINT, "-9223372036854775808") - .addRoundTrip(inputType, "9223372036854775807", BIGINT, "9223372036854775807") - .addRoundTrip(inputType, "0", BIGINT, "CAST(0 AS BIGINT)") - .addRoundTrip(inputType, "NULL", BIGINT, "CAST(NULL AS BIGINT)") + .addRoundTrip(inputType, "'-9223372036854775808'", createDecimalType(38, 0), "CAST('-9223372036854775808' AS decimal(38, 0))") + .addRoundTrip(inputType, "'9223372036854775807'", createDecimalType(38, 0), "CAST('9223372036854775807' AS decimal(38, 0))") + .addRoundTrip(inputType, "'-99999999999999999999999999999999999999'", createDecimalType(38, 0), "CAST('-99999999999999999999999999999999999999' AS decimal(38, 0))") + .addRoundTrip(inputType, "'99999999999999999999999999999999999999'", createDecimalType(38, 0), "CAST('99999999999999999999999999999999999999' AS decimal(38, 0))") + .addRoundTrip(inputType, "0", createDecimalType(38, 0), "CAST(0 AS decimal(38, 0))") + .addRoundTrip(inputType, "NULL", createDecimalType(38, 0), "CAST(NULL AS decimal(38, 0))") .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.integer")); } @@ -118,10 +120,10 @@ private void testInteger(String inputType) public void testDecimal() { SqlDataTypeTest.create() - .addRoundTrip("decimal(3, 0)", "NULL", BIGINT, "CAST(NULL AS BIGINT)") - .addRoundTrip("decimal(3, 0)", "CAST('193' AS decimal(3, 0))", BIGINT, "CAST('193' AS BIGINT)") - .addRoundTrip("decimal(3, 0)", "CAST('19' AS decimal(3, 0))", BIGINT, "CAST('19' AS BIGINT)") - .addRoundTrip("decimal(3, 0)", "CAST('-193' AS decimal(3, 0))", BIGINT, "CAST('-193' AS BIGINT)") + .addRoundTrip("decimal(3, 0)", "NULL", createDecimalType(3, 0), "CAST(NULL AS decimal(3, 0))") + .addRoundTrip("decimal(3, 0)", "CAST('193' AS decimal(3, 0))", createDecimalType(3, 0), "CAST('193' AS decimal(3, 0))") + .addRoundTrip("decimal(3, 0)", "CAST('19' AS decimal(3, 0))", createDecimalType(3, 0), "CAST('19' AS decimal(3, 0))") + .addRoundTrip("decimal(3, 0)", "CAST('-193' AS decimal(3, 0))", createDecimalType(3, 0), "CAST('-193' AS decimal(3, 0))") .addRoundTrip("decimal(3, 1)", "CAST('10.0' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('10.0' AS decimal(3, 1))") .addRoundTrip("decimal(3, 1)", "CAST('10.1' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('10.1' AS decimal(3, 1))") .addRoundTrip("decimal(3, 1)", "CAST('-10.1' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('-10.1' AS decimal(3, 1))") @@ -133,7 +135,7 @@ public void testDecimal() .addRoundTrip("decimal(24, 4)", "CAST('12345678901234567890.31' AS decimal(24, 4))", createDecimalType(24, 4), "CAST('12345678901234567890.31' AS decimal(24, 4))") .addRoundTrip("decimal(30, 5)", "CAST('3141592653589793238462643.38327' AS decimal(30, 5))", createDecimalType(30, 5), "CAST('3141592653589793238462643.38327' AS decimal(30, 5))") .addRoundTrip("decimal(30, 5)", "CAST('-3141592653589793238462643.38327' AS decimal(30, 5))", createDecimalType(30, 5), "CAST('-3141592653589793238462643.38327' AS decimal(30, 5))") - .addRoundTrip("decimal(38, 0)", "CAST(NULL AS decimal(38, 0))", BIGINT, "CAST(NULL AS BIGINT)") + .addRoundTrip("decimal(38, 0)", "CAST(NULL AS decimal(38, 0))", createDecimalType(38, 0), "CAST(NULL AS decimal(38, 0))") .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.test_decimal")) .execute(getQueryRunner(), trinoCreateAsSelect("test_decimal")) .execute(getQueryRunner(), trinoCreateAndInsert("test_decimal")); @@ -348,6 +350,34 @@ private void testTimestamp(ZoneId sessionZone) .execute(getQueryRunner(), session, trinoCreateAndInsert("test_timestamp")); } + @Test + public void testTimestampWithTimeZone() + { + testTimestampWithTimeZone(UTC); + testTimestampWithTimeZone(jvmZone); + testTimestampWithTimeZone(vilnius); + testTimestampWithTimeZone(kathmandu); + testTimestampWithTimeZone(TestingSession.DEFAULT_TIME_ZONE_KEY.getZoneId()); + } + + private void testTimestampWithTimeZone(ZoneId sessionZone) + { + Session session = Session.builder(getSession()) + .setTimeZoneKey(TimeZoneKey.getTimeZoneKey(sessionZone.getId())) + .build(); + SqlDataTypeTest.create() + .addRoundTrip("TIMESTAMP_TZ(0)", "'2020-09-27 12:34:56 -08:00'::TIMESTAMP_TZ", createTimestampWithTimeZoneType(0), "TIMESTAMP '2020-09-27 12:34:56 -08:00'") + .addRoundTrip("TIMESTAMP_TZ(3)", "'2020-09-27 12:34:56.333 -08:00'::TIMESTAMP_TZ", createTimestampWithTimeZoneType(3), "TIMESTAMP '2020-09-27 12:34:56.333 -08:00'") + .addRoundTrip("TIMESTAMP_TZ(9)", "'2020-09-27 12:34:56.999999999 -08:00'::TIMESTAMP_TZ", createTimestampWithTimeZoneType(9), "TIMESTAMP '2020-09-27 12:34:56.999999999 -08:00'") + .execute(getQueryRunner(), session, snowflakeCreateAndInsert("tpch.test_timestamptz_sf_insert")); + SqlDataTypeTest.create() + .addRoundTrip("TIMESTAMP(0) WITH TIME ZONE", "TIMESTAMP '2020-09-27 12:34:56 -08:00'", createTimestampWithTimeZoneType(0), "TIMESTAMP '2020-09-27 12:34:56 -08:00'") + .addRoundTrip("TIMESTAMP(3) WITH TIME ZONE", "TIMESTAMP '2020-09-27 12:34:56.333 -08:00'", createTimestampWithTimeZoneType(3), "TIMESTAMP '2020-09-27 12:34:56.333 -08:00'") + .addRoundTrip("TIMESTAMP(9) WITH TIME ZONE", "TIMESTAMP '2020-09-27 12:34:56.999999999 -08:00'", createTimestampWithTimeZoneType(9), "TIMESTAMP '2020-09-27 12:34:56.999999999 -08:00'") + .execute(getQueryRunner(), session, trinoCreateAsSelect("tpch.test_timestamptz_trino_insert")) + .execute(getQueryRunner(), session, trinoCreateAndInsert("tpch.test_timestamptz_trino_insert")); + } + private DataSetup trinoCreateAsSelect(String tableNamePrefix) { return trinoCreateAsSelect(getSession(), tableNamePrefix); diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestingSnowflakeServer.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestingSnowflakeServer.java index 5c3b99d5f51f..3bbcb17b6e75 100644 --- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestingSnowflakeServer.java +++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestingSnowflakeServer.java @@ -60,6 +60,12 @@ private static Properties getProperties() properties.setProperty("schema", TEST_SCHEMA); properties.setProperty("warehouse", TEST_WAREHOUSE); properties.setProperty("role", TEST_ROLE); + properties.setProperty("TIMESTAMP_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS.FF9TZH:TZM"); + properties.setProperty("TIMESTAMP_NTZ_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS.FF9TZH:TZM"); + properties.setProperty("TIMESTAMP_TZ_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS.FF9TZH:TZM"); + properties.setProperty("TIMESTAMP_LTZ_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS.FF9TZH:TZM"); + properties.setProperty("TIME_OUTPUT_FORMAT", "HH24:MI:SS.FF9"); + properties.setProperty("JDBC_TREAT_DECIMAL_AS_INT", "FALSE"); return properties; } }