diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index c2283c9c4b543..bc0df89d98b44 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -614,6 +614,7 @@ jobs:
- { modules: plugin/trino-resource-group-managers }
- { modules: plugin/trino-singlestore }
- { modules: plugin/trino-snowflake }
+ - { modules: plugin/trino-snowflake, profile: cloud-tests }
- { modules: plugin/trino-sqlserver }
- { modules: testing/trino-faulttolerant-tests, profile: default }
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-delta }
@@ -781,6 +782,24 @@ jobs:
if: matrix.modules == 'plugin/trino-bigquery' && !contains(matrix.profile, 'cloud-tests-arrow-and-fte') && (env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY != '')
run: |
$MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-case-insensitive-mapping -Dbigquery.credentials-key="${BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY}"
+ - name: Cloud Snowflake Tests
+ env:
+ SNOWFLAKE_TEST_SERVER_URL: ${{ secrets.SNOWFLAKE_TEST_SERVER_URL }}
+ SNOWFLAKE_TEST_SERVER_USER: ${{ secrets.SNOWFLAKE_TEST_SERVER_USER }}
+ SNOWFLAKE_TEST_SERVER_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_SERVER_PASSWORD }}
+ SNOWFLAKE_TEST_SERVER_DATABASE: ${{ secrets.SNOWFLAKE_TEST_SERVER_DATABASE }}
+ SNOWFLAKE_TEST_SERVER_ROLE: ${{ secrets.SNOWFLAKE_TEST_SERVER_ROLE }}
+ SNOWFLAKE_TEST_SERVER_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_SERVER_WAREHOUSE }}
+ if: matrix.modules == 'plugin/trino-snowflake' && !contains(matrix.profile, 'cloud-tests') && (env.SNOWFLAKE_TEST_SERVER_URL != '' && env.SNOWFLAKE_TEST_SERVER_USER != '' && env.SNOWFLAKE_TEST_SERVER_PASSWORD != '')
+ run: |
+ $MAVEN test ${MAVEN_TEST} -pl :trino-snowflake -Pcloud-tests \
+ -Dconnector.name="snowflake" \
+ -Dsnowflake.test.server.url="${SNOWFLAKE_TEST_SERVER_URL}" \
+ -Dsnowflake.test.server.user="${SNOWFLAKE_TEST_SERVER_USER}" \
+ -Dsnowflake.test.server.password="${SNOWFLAKE_TEST_SERVER_PASSWORD}" \
+ -Dsnowflake.test.server.database="${SNOWFLAKE_TEST_SERVER_DATABASE}" \
+ -Dsnowflake.test.server.role="${SNOWFLAKE_TEST_SERVER_ROLE}" \
+ -Dsnowflake.test.server.warehouse="${SNOWFLAKE_TEST_SERVER_WAREHOUSE}"
- name: Iceberg Cloud Tests
env:
AWS_ACCESS_KEY_ID: ${{ secrets.TRINO_AWS_ACCESS_KEY_ID }}
@@ -976,6 +995,7 @@ jobs:
- suite-clickhouse
- suite-mysql
- suite-iceberg
+ - suite-snowflake
- suite-hudi
- suite-ignite
exclude:
diff --git a/plugin/trino-snowflake/pom.xml b/plugin/trino-snowflake/pom.xml
index 2a61433fe13b5..33f14d2a93990 100644
--- a/plugin/trino-snowflake/pom.xml
+++ b/plugin/trino-snowflake/pom.xml
@@ -15,6 +15,7 @@
${project.parent.basedir}
+ --add-opens=java.base/java.nio=ALL-UNNAMED
@@ -185,18 +186,6 @@
-
-
-
- org.apache.maven.plugins
- maven-surefire-plugin
-
- --add-opens=java.base/java.nio=ALL-UNNAMED
-
-
-
-
-
default
@@ -210,8 +199,6 @@
maven-surefire-plugin
- **/TestSnowflakeClient.java
- **/TestSnowflakeConfig.java
**/TestSnowflakeConnectorTest.java
**/TestSnowflakePlugin.java
**/TestSnowflakeTypeMapping.java
@@ -225,6 +212,9 @@
cloud-tests
+
+ false
+
@@ -232,11 +222,11 @@
maven-surefire-plugin
- **/TestSnowflakeClient.java
- **/TestSnowflakeConfig.java
- **/TestSnowflakeConnectorTest.java
- **/TestSnowflakePlugin.java
- **/TestSnowflakeTypeMapping.java
+ **/TestSnowflakeClient.java
+ **/TestSnowflakeConfig.java
+ **/TestSnowflakeConnectorTest.java
+ **/TestSnowflakePlugin.java
+ **/TestSnowflakeTypeMapping.java
diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java
index 56d41e6d2464a..c68e786e2efeb 100644
--- a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java
+++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java
@@ -59,15 +59,12 @@
import io.trino.spi.type.DecimalType;
import io.trino.spi.type.LongTimestamp;
import io.trino.spi.type.LongTimestampWithTimeZone;
-import io.trino.spi.type.StandardTypes;
import io.trino.spi.type.TimeType;
import io.trino.spi.type.TimeZoneKey;
import io.trino.spi.type.TimestampType;
import io.trino.spi.type.TimestampWithTimeZoneType;
import io.trino.spi.type.Timestamps;
import io.trino.spi.type.Type;
-import io.trino.spi.type.TypeManager;
-import io.trino.spi.type.TypeSignature;
import io.trino.spi.type.VarcharType;
import java.math.RoundingMode;
@@ -75,7 +72,6 @@
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
-import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.time.Instant;
@@ -97,11 +93,16 @@
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
-import static io.airlift.slice.Slices.utf8Slice;
-import static io.trino.plugin.base.util.JsonTypeUtil.jsonParse;
import static io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR;
-import static io.trino.plugin.jdbc.PredicatePushdownController.DISABLE_PUSHDOWN;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
+import static io.trino.spi.type.DecimalType.createDecimalType;
+import static io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType;
+import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_SECOND;
+import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND;
+import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND;
+import static io.trino.spi.type.VarcharType.createUnboundedVarcharType;
+import static io.trino.spi.type.VarcharType.createVarcharType;
+import static java.lang.String.format;
public class SnowflakeClient
extends BaseJdbcClient
@@ -115,8 +116,7 @@ public class SnowflakeClient
private static final DateTimeFormatter SNOWFLAKE_DATE_FORMATTER = DateTimeFormatter.ofPattern("uuuu-MM-dd");
private static final DateTimeFormatter SNOWFLAKE_TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("y-MM-dd'T'HH:mm:ss.SSSSSSSSS");
private static final DateTimeFormatter SNOWFLAKE_TIME_FORMATTER = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSSSSS");
- private final Type jsonType;
- private final AggregateFunctionRewriter aggregateFunctionRewriter;
+ private final AggregateFunctionRewriter aggregateFunctionRewriter;
private interface WriteMappingFunction
{
@@ -145,47 +145,30 @@ private interface ColumnMappingFunction
.buildOrThrow();
private static final Map SHOWFLAKE_COLUMN_MAPPINGS = ImmutableMap.builder()
- .put("time", typeHandle -> {
- //return Optional.of(columnMappingPushdown(timeColumnMapping(typeHandle)));
- return Optional.of(timeColumnMapping(typeHandle));
- })
- .put("timestampntz", typeHandle -> {
- return Optional.of(timestampColumnMapping(typeHandle));
- })
- .put("timestamptz", typeHandle -> {
- return Optional.of(timestampTZColumnMapping(typeHandle));
- })
- .put("timestampltz", typeHandle -> {
- return Optional.of(timestampTZColumnMapping(typeHandle));
- })
- .put("date", typeHandle -> {
- return Optional.of(ColumnMapping.longMapping(
- DateType.DATE, (resultSet, columnIndex) ->
- LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(),
- snowFlakeDateWriter()));
- })
- .put("object", typeHandle -> {
- return Optional.of(ColumnMapping.sliceMapping(
- VarcharType.createUnboundedVarcharType(),
- StandardColumnMappings.varcharReadFunction(VarcharType.createUnboundedVarcharType()),
- StandardColumnMappings.varcharWriteFunction(),
- PredicatePushdownController.DISABLE_PUSHDOWN));
- })
- .put("array", typeHandle -> {
- return Optional.of(ColumnMapping.sliceMapping(
- VarcharType.createUnboundedVarcharType(),
- StandardColumnMappings.varcharReadFunction(VarcharType.createUnboundedVarcharType()),
- StandardColumnMappings.varcharWriteFunction(),
- PredicatePushdownController.DISABLE_PUSHDOWN));
- })
- .put("variant", typeHandle -> {
- return Optional.of(ColumnMapping.sliceMapping(
- VarcharType.createUnboundedVarcharType(), variantReadFunction(), StandardColumnMappings.varcharWriteFunction(),
- PredicatePushdownController.FULL_PUSHDOWN));
- })
- .put("varchar", typeHandle -> {
- return Optional.of(varcharColumnMapping(typeHandle.getRequiredColumnSize()));
- })
+ .put("time", typeHandle -> Optional.of(timeColumnMapping(typeHandle)))
+ .put("timestampntz", typeHandle -> Optional.of(timestampColumnMapping(typeHandle)))
+ .put("timestamptz", typeHandle -> Optional.of(timestampTzColumnMapping(typeHandle)))
+ .put("timestampltz", typeHandle -> Optional.of(timestampTzColumnMapping(typeHandle)))
+ .put("date", typeHandle -> Optional.of(ColumnMapping.longMapping(
+ DateType.DATE,
+ (resultSet, columnIndex) -> LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(),
+ snowFlakeDateWriter())))
+ .put("object", typeHandle -> Optional.of(ColumnMapping.sliceMapping(
+ createUnboundedVarcharType(),
+ StandardColumnMappings.varcharReadFunction(createUnboundedVarcharType()),
+ StandardColumnMappings.varcharWriteFunction(),
+ PredicatePushdownController.DISABLE_PUSHDOWN)))
+ .put("array", typeHandle -> Optional.of(ColumnMapping.sliceMapping(
+ createUnboundedVarcharType(),
+ StandardColumnMappings.varcharReadFunction(createUnboundedVarcharType()),
+ StandardColumnMappings.varcharWriteFunction(),
+ PredicatePushdownController.DISABLE_PUSHDOWN)))
+ .put("variant", typeHandle -> Optional.of(ColumnMapping.sliceMapping(
+ createUnboundedVarcharType(),
+ variantReadFunction(),
+ StandardColumnMappings.varcharWriteFunction(),
+ PredicatePushdownController.FULL_PUSHDOWN)))
+ .put("varchar", typeHandle -> Optional.of(varcharColumnMapping(typeHandle.getRequiredColumnSize())))
.put("number", typeHandle -> {
int decimalDigits = typeHandle.getRequiredDecimalDigits();
int precision = typeHandle.getRequiredColumnSize() + Math.max(-decimalDigits, 0);
@@ -193,8 +176,7 @@ private interface ColumnMappingFunction
return Optional.empty();
}
return Optional.of(columnMappingPushdown(
- StandardColumnMappings.decimalColumnMapping(DecimalType.createDecimalType(
- precision, Math.max(decimalDigits, 0)), RoundingMode.UNNECESSARY)));
+ StandardColumnMappings.decimalColumnMapping(createDecimalType(precision, Math.max(decimalDigits, 0)), RoundingMode.UNNECESSARY)));
})
.buildOrThrow();
@@ -212,50 +194,26 @@ private interface ColumnMappingFunction
.buildOrThrow();
private static final Map SNOWFLAKE_WRITE_MAPPINGS = ImmutableMap.builder()
- .put("TimeType", type -> {
- return WriteMapping.longMapping("time", SnowflakeClient.snowFlaketimeWriter(type));
- })
- .put("ShortTimestampType", type -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWriter(type);
- return myMap;
- })
- .put("ShortTimestampWithTimeZoneType", type -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(type);
- return myMap;
- })
- .put("LongTimestampType", type -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(type);
- return myMap;
- })
- .put("LongTimestampWithTimeZoneType", type -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(type);
- return myMap;
- })
- .put("VarcharType", type -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeVarCharWriter(type);
- return myMap;
- })
- .put("CharType", type -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeCharWriter(type);
- return myMap;
- })
- .put("LongDecimalType", type -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeDecimalWriter(type);
- return myMap;
- })
- .put("ShortDecimalType", type -> {
- WriteMapping myMap = SnowflakeClient.snowFlakeDecimalWriter(type);
- return myMap;
- })
+ .put("TimeType", type -> WriteMapping.longMapping("time", SnowflakeClient.snowFlaketimeWriter(type)))
+ .put("ShortTimestampType", SnowflakeClient::snowFlakeTimestampWriter)
+ .put("ShortTimestampWithTimeZoneType", SnowflakeClient::snowFlakeTimestampWithTZWriter)
+ .put("LongTimestampType", SnowflakeClient::snowFlakeTimestampWithTZWriter)
+ .put("LongTimestampWithTimeZoneType", SnowflakeClient::snowFlakeTimestampWithTZWriter)
+ .put("VarcharType", SnowflakeClient::snowFlakeVarCharWriter)
+ .put("CharType", SnowflakeClient::snowFlakeCharWriter)
+ .put("LongDecimalType", SnowflakeClient::snowFlakeDecimalWriter)
+ .put("ShortDecimalType", SnowflakeClient::snowFlakeDecimalWriter)
.buildOrThrow();
@Inject
- public SnowflakeClient(BaseJdbcConfig config, ConnectionFactory connectionFactory, QueryBuilder queryBuilder,
- TypeManager typeManager, IdentifierMapping identifierMapping,
- RemoteQueryModifier remoteQueryModifier)
+ public SnowflakeClient(
+ BaseJdbcConfig config,
+ ConnectionFactory connectionFactory,
+ QueryBuilder queryBuilder,
+ IdentifierMapping identifierMapping,
+ RemoteQueryModifier remoteQueryModifier)
{
super("\"", connectionFactory, queryBuilder, config.getJdbcTypesMappedToVarchar(), identifierMapping, remoteQueryModifier, false);
- this.jsonType = typeManager.getType(new TypeSignature(StandardTypes.JSON));
JdbcTypeHandle bigintTypeHandle = new JdbcTypeHandle(Types.BIGINT, Optional.of("bigint"), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty());
ConnectorExpressionRewriter connectorExpressionRewriter = JdbcConnectorExpressionRewriterBuilder.newBuilder()
@@ -286,44 +244,137 @@ public void abortReadConnection(Connection connection, ResultSet resultSet)
@Override
public Optional toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle)
{
+ Optional mapping = getForcedMappingToVarchar(typeHandle);
+ if (mapping.isPresent()) {
+ return mapping;
+ }
+
String jdbcTypeName = typeHandle.getJdbcTypeName()
.orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle));
jdbcTypeName = jdbcTypeName.toLowerCase(Locale.ENGLISH);
int type = typeHandle.getJdbcType();
- ColumnMapping columnMap = STANDARD_COLUMN_MAPPINGS.get(type);
+ // Mappings for JDBC column types to internal Trino types
+ final Map standardColumnMappings = ImmutableMap.builder()
+ .put(Types.BOOLEAN, StandardColumnMappings.booleanColumnMapping())
+ .put(Types.TINYINT, StandardColumnMappings.tinyintColumnMapping())
+ .put(Types.SMALLINT, StandardColumnMappings.smallintColumnMapping())
+ .put(Types.INTEGER, StandardColumnMappings.integerColumnMapping())
+ .put(Types.BIGINT, StandardColumnMappings.bigintColumnMapping())
+ .put(Types.REAL, StandardColumnMappings.realColumnMapping())
+ .put(Types.DOUBLE, StandardColumnMappings.doubleColumnMapping())
+ .put(Types.FLOAT, StandardColumnMappings.doubleColumnMapping())
+ .put(Types.BINARY, StandardColumnMappings.varbinaryColumnMapping())
+ .put(Types.VARBINARY, StandardColumnMappings.varbinaryColumnMapping())
+ .put(Types.LONGVARBINARY, StandardColumnMappings.varbinaryColumnMapping())
+ .buildOrThrow();
+
+ ColumnMapping columnMap = standardColumnMappings.get(type);
if (columnMap != null) {
return Optional.of(columnMap);
}
- ColumnMappingFunction columnMappingFunction = SHOWFLAKE_COLUMN_MAPPINGS.get(jdbcTypeName);
+ final Map snowflakeColumnMappings = ImmutableMap.builder()
+ .put("time", handle -> {
+ return Optional.of(timeColumnMapping(handle));
+ })
+ .put("date", handle -> {
+ return Optional.of(ColumnMapping.longMapping(
+ DateType.DATE, (resultSet, columnIndex) ->
+ LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(),
+ snowFlakeDateWriter()));
+ })
+ .put("varchar", handle -> {
+ return Optional.of(varcharColumnMapping(handle.getRequiredColumnSize()));
+ })
+ .put("number", handle -> {
+ int decimalDigits = handle.getRequiredDecimalDigits();
+ int precision = handle.getRequiredColumnSize() + Math.max(-decimalDigits, 0);
+ if (precision > 38) {
+ return Optional.empty();
+ }
+ return Optional.of(columnMappingPushdown(
+ StandardColumnMappings.decimalColumnMapping(DecimalType.createDecimalType(
+ precision, Math.max(decimalDigits, 0)), RoundingMode.UNNECESSARY)));
+ })
+ .buildOrThrow();
+
+ ColumnMappingFunction columnMappingFunction = snowflakeColumnMappings.get(jdbcTypeName);
if (columnMappingFunction != null) {
return columnMappingFunction.convert(typeHandle);
}
// Code should never reach here so throw an error.
- throw new TrinoException(NOT_SUPPORTED, "SNOWFLAKE_CONNECTOR_COLUMN_TYPE_NOT_SUPPORTED: Unsupported column type(" + type +
- "):" + jdbcTypeName);
+ throw new TrinoException(NOT_SUPPORTED, "SNOWFLAKE_CONNECTOR_COLUMN_TYPE_NOT_SUPPORTED: Unsupported column type(" + type + "):" + jdbcTypeName);
}
@Override
public WriteMapping toWriteMapping(ConnectorSession session, Type type)
{
- Class myClass = type.getClass();
+ Class> myClass = type.getClass();
String simple = myClass.getSimpleName();
- WriteMapping writeMapping = STANDARD_WRITE_MAPPINGS.get(simple);
+ // Mappings for internal Trino types to JDBC column types
+ final Map standardWriteMappings = ImmutableMap.builder()
+ .put("BooleanType", WriteMapping.booleanMapping("boolean", StandardColumnMappings.booleanWriteFunction()))
+ .put("BigintType", WriteMapping.longMapping("number(19)", StandardColumnMappings.bigintWriteFunction()))
+ .put("IntegerType", WriteMapping.longMapping("number(10)", StandardColumnMappings.integerWriteFunction()))
+ .put("SmallintType", WriteMapping.longMapping("number(5)", StandardColumnMappings.smallintWriteFunction()))
+ .put("TinyintType", WriteMapping.longMapping("number(3)", StandardColumnMappings.tinyintWriteFunction()))
+ .put("DoubleType", WriteMapping.doubleMapping("double precision", StandardColumnMappings.doubleWriteFunction()))
+ .put("RealType", WriteMapping.longMapping("real", StandardColumnMappings.realWriteFunction()))
+ .put("VarbinaryType", WriteMapping.sliceMapping("varbinary", StandardColumnMappings.varbinaryWriteFunction()))
+ .put("DateType", WriteMapping.longMapping("date", snowFlakeDateWriter()))
+ .buildOrThrow();
+
+ WriteMapping writeMapping = standardWriteMappings.get(simple);
if (writeMapping != null) {
return writeMapping;
}
- WriteMappingFunction writeMappingFunction = SNOWFLAKE_WRITE_MAPPINGS.get(simple);
+ final Map snowflakeWriteMappings = ImmutableMap.builder()
+ .put("TimeType", writeType -> {
+ return WriteMapping.longMapping("time", SnowflakeClient.snowFlaketimeWriter(writeType));
+ })
+ .put("ShortTimestampType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWriter(writeType);
+ return myMap;
+ })
+ .put("ShortTimestampWithTimeZoneType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType);
+ return myMap;
+ })
+ .put("LongTimestampType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType);
+ return myMap;
+ })
+ .put("LongTimestampWithTimeZoneType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType);
+ return myMap;
+ })
+ .put("VarcharType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeVarCharWriter(writeType);
+ return myMap;
+ })
+ .put("CharType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeCharWriter(writeType);
+ return myMap;
+ })
+ .put("LongDecimalType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeDecimalWriter(writeType);
+ return myMap;
+ })
+ .put("ShortDecimalType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeDecimalWriter(writeType);
+ return myMap;
+ })
+ .buildOrThrow();
+
+ WriteMappingFunction writeMappingFunction = snowflakeWriteMappings.get(simple);
if (writeMappingFunction != null) {
return writeMappingFunction.convert(type);
}
- log.debug("SnowflakeClient.toWriteMapping: SNOWFLAKE_CONNECTOR_COLUMN_TYPE_NOT_SUPPORTED: Unsupported column type: " + type.getDisplayName() + ", simple:" + simple);
-
throw new TrinoException(NOT_SUPPORTED, "SNOWFLAKE_CONNECTOR_COLUMN_TYPE_NOT_SUPPORTED: Unsupported column type: " + type.getDisplayName() + ", simple:" + simple);
}
@@ -351,15 +402,6 @@ public boolean isLimitGuaranteed(ConnectorSession session)
return true;
}
- private ColumnMapping jsonColumnMapping()
- {
- return ColumnMapping.sliceMapping(
- jsonType,
- (resultSet, columnIndex) -> jsonParse(utf8Slice(resultSet.getString(columnIndex))),
- StandardColumnMappings.varcharWriteFunction(),
- DISABLE_PUSHDOWN);
- }
-
@Override
public void setColumnType(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column, Type type)
{
@@ -374,26 +416,21 @@ private static SliceReadFunction variantReadFunction()
private static ColumnMapping columnMappingPushdown(ColumnMapping mapping)
{
if (mapping.getPredicatePushdownController() == PredicatePushdownController.DISABLE_PUSHDOWN) {
- log.debug("SnowflakeClient.columnMappingPushdown: NOT_SUPPORTED mapping.getPredicatePushdownController() is DISABLE_PUSHDOWN. Type was " + mapping.getType());
throw new TrinoException(NOT_SUPPORTED, "mapping.getPredicatePushdownController() is DISABLE_PUSHDOWN. Type was " + mapping.getType());
}
- return new ColumnMapping(mapping.getType(), mapping.getReadFunction(), mapping.getWriteFunction(),
- PredicatePushdownController.FULL_PUSHDOWN);
+ return new ColumnMapping(mapping.getType(), mapping.getReadFunction(), mapping.getWriteFunction(), PredicatePushdownController.FULL_PUSHDOWN);
}
private static ColumnMapping timeColumnMapping(JdbcTypeHandle typeHandle)
{
int precision = typeHandle.getRequiredDecimalDigits();
- checkArgument((precision <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION),
- "The max timestamp precision in Snowflake is " + SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION);
+ checkArgument(precision <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION, "The max timestamp precision in Snowflake is " + SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION);
return ColumnMapping.longMapping(
TimeType.createTimeType(precision),
(resultSet, columnIndex) -> {
LocalTime time = SNOWFLAKE_TIME_FORMATTER.parse(resultSet.getString(columnIndex), LocalTime::from);
- long nanosOfDay = time.toNanoOfDay();
- long picosOfDay = nanosOfDay * Timestamps.PICOSECONDS_PER_NANOSECOND;
- return Timestamps.round(picosOfDay, 12 - precision);
+ return Timestamps.round(time.toNanoOfDay() * PICOSECONDS_PER_NANOSECOND, 12 - precision);
},
timeWriteFunction(precision),
PredicatePushdownController.FULL_PUSHDOWN);
@@ -401,15 +438,13 @@ private static ColumnMapping timeColumnMapping(JdbcTypeHandle typeHandle)
private static LongWriteFunction snowFlaketimeWriter(Type type)
{
- TimeType timeType = (TimeType) type;
- int precision = timeType.getPrecision();
- return timeWriteFunction(precision);
+ return timeWriteFunction(((TimeType) type).getPrecision());
}
private static LongWriteFunction timeWriteFunction(int precision)
{
checkArgument(precision <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION, "Unsupported precision: %s", precision);
- String bindExpression = String.format("CAST(? AS time(%s))", precision);
+ String bindExpression = format("CAST(? AS time(%s))", precision);
return new LongWriteFunction()
{
@Override
@@ -426,19 +461,14 @@ public void set(PreparedStatement statement, int index, long picosOfDay)
if (picosOfDay == Timestamps.PICOSECONDS_PER_DAY) {
picosOfDay = 0;
}
- LocalTime localTime = LocalTime.ofNanoOfDay(picosOfDay / Timestamps.PICOSECONDS_PER_NANOSECOND);
+ LocalTime localTime = LocalTime.ofNanoOfDay(picosOfDay / PICOSECONDS_PER_NANOSECOND);
// statement.setObject(.., localTime) would yield incorrect end result for 23:59:59.999000
statement.setString(index, SNOWFLAKE_TIME_FORMATTER.format(localTime));
}
};
}
- private static long toTrinoTime(Time sqlTime)
- {
- return Timestamps.PICOSECONDS_PER_SECOND * sqlTime.getTime();
- }
-
- private static ColumnMapping timestampTZColumnMapping(JdbcTypeHandle typeHandle)
+ private static ColumnMapping timestampTzColumnMapping(JdbcTypeHandle typeHandle)
{
int precision = typeHandle.getRequiredDecimalDigits();
String jdbcTypeName = typeHandle.getJdbcTypeName()
@@ -447,23 +477,23 @@ private static ColumnMapping timestampTZColumnMapping(JdbcTypeHandle typeHandle)
log.debug("timestampTZColumnMapping: jdbcTypeName(%s):%s precision:%s", type, jdbcTypeName, precision);
if (precision <= 3) {
- return ColumnMapping.longMapping(TimestampWithTimeZoneType.createTimestampWithTimeZoneType(precision),
+ return ColumnMapping.longMapping(
+ createTimestampWithTimeZoneType(precision),
(resultSet, columnIndex) -> {
- ZonedDateTime timestamp = (ZonedDateTime) SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from);
+ ZonedDateTime timestamp = SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from);
return DateTimeEncoding.packDateTimeWithZone(timestamp.toInstant().toEpochMilli(), timestamp.getZone().getId());
},
- timestampWithTZWriter(), PredicatePushdownController.FULL_PUSHDOWN);
+ timestampWithTZWriter(),
+ PredicatePushdownController.FULL_PUSHDOWN);
}
else {
- return ColumnMapping.objectMapping(TimestampWithTimeZoneType.createTimestampWithTimeZoneType(precision), longTimestampWithTimezoneReadFunction(), longTimestampWithTZWriteFunction());
+ return ColumnMapping.objectMapping(createTimestampWithTimeZoneType(precision), longTimestampWithTimezoneReadFunction(), longTimestampWithTzWriteFunction());
}
}
private static ColumnMapping varcharColumnMapping(int varcharLength)
{
- VarcharType varcharType = varcharLength <= VarcharType.MAX_LENGTH
- ? VarcharType.createVarcharType(varcharLength)
- : VarcharType.createUnboundedVarcharType();
+ VarcharType varcharType = varcharLength <= VarcharType.MAX_LENGTH ? createVarcharType(varcharLength) : createUnboundedVarcharType();
return ColumnMapping.sliceMapping(
varcharType,
StandardColumnMappings.varcharReadFunction(varcharType),
@@ -473,41 +503,30 @@ private static ColumnMapping varcharColumnMapping(int varcharLength)
private static ObjectReadFunction longTimestampWithTimezoneReadFunction()
{
return ObjectReadFunction.of(LongTimestampWithTimeZone.class, (resultSet, columnIndex) -> {
- ZonedDateTime timestamp = (ZonedDateTime) SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from);
- return LongTimestampWithTimeZone.fromEpochSecondsAndFraction(timestamp.toEpochSecond(),
- (long) timestamp.getNano() * Timestamps.PICOSECONDS_PER_NANOSECOND,
+ ZonedDateTime timestamp = SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from);
+ return LongTimestampWithTimeZone.fromEpochSecondsAndFraction(
+ timestamp.toEpochSecond(),
+ (long) timestamp.getNano() * PICOSECONDS_PER_NANOSECOND,
TimeZoneKey.getTimeZoneKey(timestamp.getZone().getId()));
});
}
- private static ObjectWriteFunction longTimestampWithTZWriteFunction()
+ private static ObjectWriteFunction longTimestampWithTzWriteFunction()
{
return ObjectWriteFunction.of(LongTimestampWithTimeZone.class, (statement, index, value) -> {
- long epoMilli = value.getEpochMillis();
- long epoSeconds = Math.floorDiv(epoMilli, Timestamps.MILLISECONDS_PER_SECOND);
- long adjNano = Math.floorMod(epoMilli, Timestamps.MILLISECONDS_PER_SECOND) *
- Timestamps.NANOSECONDS_PER_MILLISECOND + value.getPicosOfMilli() / Timestamps.PICOSECONDS_PER_NANOSECOND;
+ long epochMilli = value.getEpochMillis();
+ long epochSecond = Math.floorDiv(epochMilli, MILLISECONDS_PER_SECOND);
+ int nanosOfSecond = Math.floorMod(epochMilli, MILLISECONDS_PER_SECOND) * NANOSECONDS_PER_MILLISECOND + value.getPicosOfMilli() / PICOSECONDS_PER_NANOSECOND;
ZoneId zone = TimeZoneKey.getTimeZoneKey(value.getTimeZoneKey()).getZoneId();
- Instant timeI = Instant.ofEpochSecond(epoSeconds, adjNano);
- statement.setString(index, SNOWFLAKE_DATETIME_FORMATTER.format(ZonedDateTime.ofInstant(timeI, zone)));
+ Instant instant = Instant.ofEpochSecond(epochSecond, nanosOfSecond);
+ statement.setString(index, SNOWFLAKE_DATETIME_FORMATTER.format(ZonedDateTime.ofInstant(instant, zone)));
});
}
- private static LongWriteFunction snowFlakeDateTimeWriter()
- {
- return (statement, index, encodedTimeWithZone) -> {
- Instant time = Instant.ofEpochMilli(DateTimeEncoding.unpackMillisUtc(encodedTimeWithZone));
- ZoneId zone = ZoneId.of(DateTimeEncoding.unpackZoneKey(encodedTimeWithZone).getId());
- statement.setString(index, SNOWFLAKE_DATETIME_FORMATTER.format(time.atZone(zone)));
- };
- }
-
private static WriteMapping snowFlakeDecimalWriter(Type type)
{
DecimalType decimalType = (DecimalType) type;
- String dataType = String.format("decimal(%s, %s)", new Object[] {
- Integer.valueOf(decimalType.getPrecision()), Integer.valueOf(decimalType.getScale())
- });
+ String dataType = format("decimal(%s, %s)", decimalType.getPrecision(), decimalType.getScale());
if (decimalType.isShort()) {
return WriteMapping.longMapping(dataType, StandardColumnMappings.shortDecimalWriteFunction(decimalType));
@@ -523,8 +542,7 @@ private static LongWriteFunction snowFlakeDateWriter()
private static WriteMapping snowFlakeCharWriter(Type type)
{
CharType charType = (CharType) type;
- return WriteMapping.sliceMapping("char(" + charType.getLength() + ")",
- charWriteFunction(charType));
+ return WriteMapping.sliceMapping("char(" + charType.getLength() + ")", charWriteFunction(charType));
}
private static WriteMapping snowFlakeVarCharWriter(Type type)
@@ -549,55 +567,45 @@ private static SliceWriteFunction charWriteFunction(CharType charType)
private static WriteMapping snowFlakeTimestampWriter(Type type)
{
TimestampType timestampType = (TimestampType) type;
- checkArgument((timestampType.getPrecision() <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION),
+ checkArgument(
+ timestampType.getPrecision() <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION,
"The max timestamp precision in Snowflake is " + SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION);
if (timestampType.isShort()) {
- return WriteMapping.longMapping(
- String.format("timestamp_ntz(%d)", new Object[] {Integer.valueOf(timestampType.getPrecision()) }),
- timestampWriteFunction());
+ return WriteMapping.longMapping(format("timestamp_ntz(%d)", timestampType.getPrecision()), timestampWriteFunction());
}
- return WriteMapping.objectMapping(
- String.format("timestamp_ntz(%d)", new Object[] {Integer.valueOf(timestampType.getPrecision()) }),
- longTimestampWriter(timestampType.getPrecision()));
+ return WriteMapping.objectMapping(format("timestamp_ntz(%d)", timestampType.getPrecision()), longTimestampWriter(timestampType.getPrecision()));
}
private static LongWriteFunction timestampWriteFunction()
{
- return (statement, index, value) -> statement.setString(index,
- StandardColumnMappings.fromTrinoTimestamp(value).toString());
+ return (statement, index, value) -> statement.setString(index, StandardColumnMappings.fromTrinoTimestamp(value).toString());
}
private static ObjectWriteFunction longTimestampWriter(int precision)
{
- return ObjectWriteFunction.of(LongTimestamp.class,
- (statement, index, value) -> statement.setString(index,
- SNOWFLAKE_TIMESTAMP_FORMATTER.format(StandardColumnMappings.fromLongTrinoTimestamp(value,
- precision))));
+ return ObjectWriteFunction.of(
+ LongTimestamp.class,
+ (statement, index, value) -> statement.setString(index, SNOWFLAKE_TIMESTAMP_FORMATTER.format(StandardColumnMappings.fromLongTrinoTimestamp(value, precision))));
}
private static WriteMapping snowFlakeTimestampWithTZWriter(Type type)
{
TimestampWithTimeZoneType timeTZType = (TimestampWithTimeZoneType) type;
- checkArgument((timeTZType.getPrecision() <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION),
- "Max Snowflake precision is is " + SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION);
+ checkArgument(timeTZType.getPrecision() <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION, "Max Snowflake precision is is " + SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION);
if (timeTZType.isShort()) {
- return WriteMapping.longMapping(String.format("timestamp_tz(%d)",
- new Object[] {Integer.valueOf(timeTZType.getPrecision()) }),
- timestampWithTZWriter());
+ return WriteMapping.longMapping(format("timestamp_tz(%d)", timeTZType.getPrecision()), timestampWithTZWriter());
}
- return WriteMapping.objectMapping(
- String.format("timestamp_tz(%d)", new Object[] {Integer.valueOf(timeTZType.getPrecision()) }),
- longTimestampWithTZWriteFunction());
+ return WriteMapping.objectMapping(format("timestamp_tz(%d)", timeTZType.getPrecision()), longTimestampWithTzWriteFunction());
}
private static LongWriteFunction timestampWithTZWriter()
{
return (statement, index, encodedTimeWithZone) -> {
- Instant timeI = Instant.ofEpochMilli(DateTimeEncoding.unpackMillisUtc(encodedTimeWithZone));
+ Instant instant = Instant.ofEpochMilli(DateTimeEncoding.unpackMillisUtc(encodedTimeWithZone));
ZoneId zone = ZoneId.of(DateTimeEncoding.unpackZoneKey(encodedTimeWithZone).getId());
- statement.setString(index, SNOWFLAKE_DATETIME_FORMATTER.format(timeI.atZone(zone)));
+ statement.setString(index, SNOWFLAKE_DATETIME_FORMATTER.format(instant.atZone(zone)));
};
}
@@ -609,7 +617,7 @@ private static ObjectReadFunction longTimestampReader()
Timestamp ts = resultSet.getTimestamp(columnIndex, calendar);
long epochMillis = ts.getTime();
int nanosInTheSecond = ts.getNanos();
- int nanosInTheMilli = nanosInTheSecond % Timestamps.NANOSECONDS_PER_MILLISECOND;
+ int nanosInTheMilli = nanosInTheSecond % NANOSECONDS_PER_MILLISECOND;
long micro = epochMillis * Timestamps.MICROSECONDS_PER_MILLISECOND + (nanosInTheMilli / Timestamps.NANOSECONDS_PER_MICROSECOND);
int picosOfMicro = nanosInTheMilli % 1000 * 1000;
return new LongTimestamp(micro, picosOfMicro);
@@ -627,15 +635,14 @@ private static ColumnMapping timestampColumnMapping(JdbcTypeHandle typeHandle)
// <= 6 fits into a long
if (precision <= 6) {
return ColumnMapping.longMapping(
- (Type) TimestampType.createTimestampType(precision), (resultSet, columnIndex) ->
- StandardColumnMappings.toTrinoTimestamp(TimestampType.createTimestampType(precision),
- toLocalDateTime(resultSet, columnIndex)),
+ TimestampType.createTimestampType(precision),
+ (resultSet, columnIndex) -> StandardColumnMappings.toTrinoTimestamp(TimestampType.createTimestampType(precision), toLocalDateTime(resultSet, columnIndex)),
timestampWriteFunction());
}
// Too big. Put it in an object
return ColumnMapping.objectMapping(
- (Type) TimestampType.createTimestampType(precision),
+ TimestampType.createTimestampType(precision),
longTimestampReader(),
longTimestampWriter(precision));
}
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeClient.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeClient.java
index b743314af7633..3e92cfcb6f6ce 100644
--- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeClient.java
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeClient.java
@@ -37,7 +37,6 @@
import static io.trino.spi.type.BooleanType.BOOLEAN;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.testing.TestingConnectorSession.SESSION;
-import static io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER;
import static org.assertj.core.api.Assertions.assertThat;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
@@ -62,7 +61,6 @@ public class TestSnowflakeClient
new BaseJdbcConfig(),
session -> { throw new UnsupportedOperationException(); },
new DefaultQueryBuilder(RemoteQueryModifier.NONE),
- TESTING_TYPE_MANAGER,
new DefaultIdentifierMapping(),
RemoteQueryModifier.NONE);