diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index dff34be25a3242..49c17d2bb2df7e 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -378,6 +378,7 @@ jobs:
!:trino-server,
!:trino-server-rpm,
!:trino-singlestore,
+ !:trino-snowflake,
!:trino-sqlserver,
!:trino-test-jdbc-compatibility-old-server,
!:trino-tests,
@@ -480,6 +481,8 @@ jobs:
- { modules: plugin/trino-redshift, profile: fte-tests }
- { modules: plugin/trino-resource-group-managers }
- { modules: plugin/trino-singlestore }
+ - { modules: plugin/trino-snowflake }
+ - { modules: plugin/trino-snowflake, profile: cloud-tests }
- { modules: plugin/trino-sqlserver }
- { modules: testing/trino-faulttolerant-tests, profile: default }
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-delta }
@@ -658,6 +661,24 @@ jobs:
if: matrix.modules == 'plugin/trino-bigquery' && !contains(matrix.profile, 'cloud-tests-2') && (env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY != '')
run: |
$MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-case-insensitive-mapping -Dbigquery.credentials-key="${BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY}"
+ - name: Cloud Snowflake Tests
+ env:
+ SNOWFLAKE_TEST_SERVER_URL: ${{ secrets.SNOWFLAKE_TEST_SERVER_URL }}
+ SNOWFLAKE_TEST_SERVER_USER: ${{ secrets.SNOWFLAKE_TEST_SERVER_USER }}
+ SNOWFLAKE_TEST_SERVER_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_SERVER_PASSWORD }}
+ SNOWFLAKE_TEST_SERVER_DATABASE: ${{ secrets.SNOWFLAKE_TEST_SERVER_DATABASE }}
+ SNOWFLAKE_TEST_SERVER_ROLE: ${{ secrets.SNOWFLAKE_TEST_SERVER_ROLE }}
+ SNOWFLAKE_TEST_SERVER_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_SERVER_WAREHOUSE }}
+ if: matrix.modules == 'plugin/trino-snowflake' && !contains(matrix.profile, 'cloud-tests') && (env.SNOWFLAKE_TEST_SERVER_URL != '' && env.SNOWFLAKE_TEST_SERVER_USER != '' && env.SNOWFLAKE_TEST_SERVER_PASSWORD != '')
+ run: |
+ $MAVEN test ${MAVEN_TEST} -pl :trino-snowflake -Pcloud-tests \
+ -Dconnector.name="snowflake" \
+ -Dsnowflake.test.server.url="${SNOWFLAKE_TEST_SERVER_URL}" \
+ -Dsnowflake.test.server.user="${SNOWFLAKE_TEST_SERVER_USER}" \
+ -Dsnowflake.test.server.password="${SNOWFLAKE_TEST_SERVER_PASSWORD}" \
+ -Dsnowflake.test.server.database="${SNOWFLAKE_TEST_SERVER_DATABASE}" \
+ -Dsnowflake.test.server.role="${SNOWFLAKE_TEST_SERVER_ROLE}" \
+ -Dsnowflake.test.server.warehouse="${SNOWFLAKE_TEST_SERVER_WAREHOUSE}"
- name: Iceberg Cloud Tests
id: tests-iceberg
env:
@@ -848,6 +869,7 @@ jobs:
- suite-clickhouse
- suite-mysql
- suite-iceberg
+ - suite-snowflake
- suite-hudi
- suite-ignite
exclude:
diff --git a/core/trino-server/src/main/provisio/trino.xml b/core/trino-server/src/main/provisio/trino.xml
index 8a5fd01c0d0d1a..7a2e0a6fe8f25c 100644
--- a/core/trino-server/src/main/provisio/trino.xml
+++ b/core/trino-server/src/main/provisio/trino.xml
@@ -296,6 +296,12 @@
+
+
+
+
+
+
diff --git a/docs/src/main/sphinx/connector.md b/docs/src/main/sphinx/connector.md
index 39d47eace388a8..a44c2834a73dda 100644
--- a/docs/src/main/sphinx/connector.md
+++ b/docs/src/main/sphinx/connector.md
@@ -38,6 +38,7 @@ Prometheus
Redis
Redshift
SingleStore
+Snowflake
SQL Server
System
Thrift
diff --git a/docs/src/main/sphinx/connector/snowflake.md b/docs/src/main/sphinx/connector/snowflake.md
new file mode 100644
index 00000000000000..579764534e230d
--- /dev/null
+++ b/docs/src/main/sphinx/connector/snowflake.md
@@ -0,0 +1,96 @@
+# Snowflake connector
+
+```{raw} html
+
+```
+
+The Snowflake connector allows querying and creating tables in an
+external [Snowflake](https://www.snowflake.com/) account. This can be used to join data between
+different systems like Snowflake and Hive, or between two different
+Snowflake accounts.
+
+## Configuration
+
+To configure the Snowflake connector, create a catalog properties file
+in `etc/catalog` named, for example, `example.properties`, to
+mount the Snowflake connector as the `snowflake` catalog.
+Create the file with the following contents, replacing the
+connection properties as appropriate for your setup:
+
+```none
+connector.name=snowflake
+connection-url=jdbc:snowflake://.snowflakecomputing.com
+connection-user=root
+connection-password=secret
+snowflake.account=account
+snowflake.database=database
+snowflake.role=role
+snowflake.warehouse=warehouse
+```
+
+### Arrow serialization support
+
+This is an experimental feature which introduces support for using Apache Arrow
+as the serialization format when reading from Snowflake. Please note there are
+a few caveats:
+
+- Using Apache Arrow serialization is disabled by default. In order to enable
+ it, add `--add-opens=java.base/java.nio=ALL-UNNAMED` to the Trino
+ {ref}`jvm-config`.
+
+### Multiple Snowflake databases or accounts
+
+The Snowflake connector can only access a single database within
+a Snowflake account. Thus, if you have multiple Snowflake databases,
+or want to connect to multiple Snowflake accounts, you must configure
+multiple instances of the Snowflake connector.
+
+% snowflake-type-mapping:
+
+## Type mapping
+
+Trino supports the following Snowflake data types:
+
+| Snowflake Type | Trino Type |
+| -------------- | -------------- |
+| `boolean` | `boolean` |
+| `tinyint` | `bigint` |
+| `smallint` | `bigint` |
+| `byteint` | `bigint` |
+| `int` | `bigint` |
+| `integer` | `bigint` |
+| `bigint` | `bigint` |
+| `float` | `real` |
+| `real` | `real` |
+| `double` | `double` |
+| `decimal` | `decimal(P,S)` |
+| `varchar(n)` | `varchar(n)` |
+| `char(n)` | `varchar(n)` |
+| `binary(n)` | `varbinary` |
+| `varbinary` | `varbinary` |
+| `date` | `date` |
+| `time` | `time` |
+| `timestampntz` | `timestamp` |
+| `timestamptz` | `timestampTZ` |
+| `timestampltz` | `timestampTZ` |
+
+Complete list of [Snowflake data types](https://docs.snowflake.com/en/sql-reference/intro-summary-data-types.html).
+
+(snowflake-sql-support)=
+
+## SQL support
+
+The connector provides read access and write access to data and metadata in
+a Snowflake database. In addition to the {ref}`globally available
+` and {ref}`read operation `
+statements, the connector supports the following features:
+
+- {doc}`/sql/insert`
+- {doc}`/sql/delete`
+- {doc}`/sql/truncate`
+- {doc}`/sql/create-table`
+- {doc}`/sql/create-table-as`
+- {doc}`/sql/drop-table`
+- {doc}`/sql/alter-table`
+- {doc}`/sql/create-schema`
+- {doc}`/sql/drop-schema`
diff --git a/docs/src/main/sphinx/static/img/snowflake.png b/docs/src/main/sphinx/static/img/snowflake.png
new file mode 100644
index 00000000000000..b337bc4d5a779c
Binary files /dev/null and b/docs/src/main/sphinx/static/img/snowflake.png differ
diff --git a/plugin/trino-snowflake/pom.xml b/plugin/trino-snowflake/pom.xml
new file mode 100644
index 00000000000000..bd3d8d88c4ba26
--- /dev/null
+++ b/plugin/trino-snowflake/pom.xml
@@ -0,0 +1,237 @@
+
+
+ 4.0.0
+
+
+ io.trino
+ trino-root
+ 439-SNAPSHOT
+ ../../pom.xml
+
+
+ trino-snowflake
+ trino-plugin
+ Trino - Snowflake Connector
+
+
+ ${project.parent.basedir}
+ --add-opens=java.base/java.nio=ALL-UNNAMED
+
+
+
+
+ com.google.guava
+ guava
+
+
+
+ com.google.inject
+ guice
+
+
+
+ io.airlift
+ configuration
+
+
+
+ io.airlift
+ log
+
+
+
+ io.trino
+ trino-base-jdbc
+
+
+
+ io.trino
+ trino-plugin-toolkit
+
+
+
+ net.snowflake
+ snowflake-jdbc
+ 3.13.32
+
+
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+ provided
+
+
+
+ io.airlift
+ slice
+ provided
+
+
+
+ io.opentelemetry
+ opentelemetry-api
+ provided
+
+
+
+ io.opentelemetry
+ opentelemetry-context
+ provided
+
+
+
+ io.trino
+ trino-spi
+ provided
+
+
+
+ org.openjdk.jol
+ jol-core
+ provided
+
+
+
+
+ io.airlift
+ testing
+ test
+
+
+
+ io.trino
+ trino-base-jdbc
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-main
+ test
+
+
+
+ io.trino
+ trino-main
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-testing
+ test
+
+
+
+ io.trino
+ trino-testing-services
+ test
+
+
+
+ io.trino
+ trino-tpch
+ test
+
+
+
+ io.trino.tpch
+ tpch
+ test
+
+
+
+ org.assertj
+ assertj-core
+ test
+
+
+
+ org.jetbrains
+ annotations
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+
+ org.testcontainers
+ jdbc
+ test
+
+
+
+ org.testcontainers
+ testcontainers
+ test
+
+
+
+ org.testng
+ testng
+ test
+
+
+
+
+
+ default
+
+ true
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+
+ **/TestSnowflakeConnectorTest.java
+ **/TestSnowflakePlugin.java
+ **/TestSnowflakeTypeMapping.java
+
+
+
+
+
+
+
+
+
+ cloud-tests
+
+ false
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+
+ **/TestSnowflakeClient.java
+ **/TestSnowflakeConfig.java
+ **/TestSnowflakeConnectorTest.java
+ **/TestSnowflakePlugin.java
+ **/TestSnowflakeTypeMapping.java
+
+
+
+
+
+
+
+
diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java
new file mode 100644
index 00000000000000..c68e786e2efeb0
--- /dev/null
+++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java
@@ -0,0 +1,658 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.snowflake;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.inject.Inject;
+import io.airlift.log.Logger;
+import io.airlift.slice.Slices;
+import io.trino.plugin.base.aggregation.AggregateFunctionRewriter;
+import io.trino.plugin.base.aggregation.AggregateFunctionRule;
+import io.trino.plugin.base.expression.ConnectorExpressionRewriter;
+import io.trino.plugin.base.mapping.IdentifierMapping;
+import io.trino.plugin.jdbc.BaseJdbcClient;
+import io.trino.plugin.jdbc.BaseJdbcConfig;
+import io.trino.plugin.jdbc.ColumnMapping;
+import io.trino.plugin.jdbc.ConnectionFactory;
+import io.trino.plugin.jdbc.JdbcColumnHandle;
+import io.trino.plugin.jdbc.JdbcExpression;
+import io.trino.plugin.jdbc.JdbcTableHandle;
+import io.trino.plugin.jdbc.JdbcTypeHandle;
+import io.trino.plugin.jdbc.LongWriteFunction;
+import io.trino.plugin.jdbc.ObjectReadFunction;
+import io.trino.plugin.jdbc.ObjectWriteFunction;
+import io.trino.plugin.jdbc.PredicatePushdownController;
+import io.trino.plugin.jdbc.QueryBuilder;
+import io.trino.plugin.jdbc.SliceReadFunction;
+import io.trino.plugin.jdbc.SliceWriteFunction;
+import io.trino.plugin.jdbc.StandardColumnMappings;
+import io.trino.plugin.jdbc.WriteMapping;
+import io.trino.plugin.jdbc.aggregation.ImplementAvgDecimal;
+import io.trino.plugin.jdbc.aggregation.ImplementAvgFloatingPoint;
+import io.trino.plugin.jdbc.aggregation.ImplementCount;
+import io.trino.plugin.jdbc.aggregation.ImplementCountAll;
+import io.trino.plugin.jdbc.aggregation.ImplementMinMax;
+import io.trino.plugin.jdbc.aggregation.ImplementSum;
+import io.trino.plugin.jdbc.expression.JdbcConnectorExpressionRewriterBuilder;
+import io.trino.plugin.jdbc.expression.ParameterizedExpression;
+import io.trino.plugin.jdbc.logging.RemoteQueryModifier;
+import io.trino.spi.TrinoException;
+import io.trino.spi.connector.AggregateFunction;
+import io.trino.spi.connector.ColumnHandle;
+import io.trino.spi.connector.ConnectorSession;
+import io.trino.spi.type.CharType;
+import io.trino.spi.type.Chars;
+import io.trino.spi.type.DateTimeEncoding;
+import io.trino.spi.type.DateType;
+import io.trino.spi.type.DecimalType;
+import io.trino.spi.type.LongTimestamp;
+import io.trino.spi.type.LongTimestampWithTimeZone;
+import io.trino.spi.type.TimeType;
+import io.trino.spi.type.TimeZoneKey;
+import io.trino.spi.type.TimestampType;
+import io.trino.spi.type.TimestampWithTimeZoneType;
+import io.trino.spi.type.Timestamps;
+import io.trino.spi.type.Type;
+import io.trino.spi.type.VarcharType;
+
+import java.math.RoundingMode;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.sql.Types;
+import java.time.Instant;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Optional;
+import java.util.TimeZone;
+import java.util.function.BiFunction;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
+import static io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR;
+import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
+import static io.trino.spi.type.DecimalType.createDecimalType;
+import static io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType;
+import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_SECOND;
+import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND;
+import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND;
+import static io.trino.spi.type.VarcharType.createUnboundedVarcharType;
+import static io.trino.spi.type.VarcharType.createVarcharType;
+import static java.lang.String.format;
+
+public class SnowflakeClient
+ extends BaseJdbcClient
+{
+ /* TIME supports an optional precision parameter for fractional seconds, e.g. TIME(3). Time precision can range from 0 (seconds) to 9 (nanoseconds). The default precision is 9.
+ All TIME values must be between 00:00:00 and 23:59:59.999999999. TIME internally stores “wallclock” time, and all operations on TIME values are performed without taking any time zone into consideration.
+ */
+ private static final int SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION = 9;
+ private static final Logger log = Logger.get(SnowflakeClient.class);
+ private static final DateTimeFormatter SNOWFLAKE_DATETIME_FORMATTER = DateTimeFormatter.ofPattern("y-MM-dd'T'HH:mm:ss.SSSSSSSSSXXX");
+ private static final DateTimeFormatter SNOWFLAKE_DATE_FORMATTER = DateTimeFormatter.ofPattern("uuuu-MM-dd");
+ private static final DateTimeFormatter SNOWFLAKE_TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("y-MM-dd'T'HH:mm:ss.SSSSSSSSS");
+ private static final DateTimeFormatter SNOWFLAKE_TIME_FORMATTER = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSSSSS");
+ private final AggregateFunctionRewriter aggregateFunctionRewriter;
+
+ private interface WriteMappingFunction
+ {
+ WriteMapping convert(Type type);
+ }
+
+ private interface ColumnMappingFunction
+ {
+ Optional convert(JdbcTypeHandle typeHandle);
+ }
+
+ private static final TimeZone UTC_TZ = TimeZone.getTimeZone(ZoneId.of("UTC"));
+ // Mappings for JDBC column types to internal Trino types
+ private static final Map STANDARD_COLUMN_MAPPINGS = ImmutableMap.builder()
+ .put(Types.BOOLEAN, StandardColumnMappings.booleanColumnMapping())
+ .put(Types.TINYINT, StandardColumnMappings.tinyintColumnMapping())
+ .put(Types.SMALLINT, StandardColumnMappings.smallintColumnMapping())
+ .put(Types.INTEGER, StandardColumnMappings.integerColumnMapping())
+ .put(Types.BIGINT, StandardColumnMappings.bigintColumnMapping())
+ .put(Types.REAL, StandardColumnMappings.realColumnMapping())
+ .put(Types.DOUBLE, StandardColumnMappings.doubleColumnMapping())
+ .put(Types.FLOAT, StandardColumnMappings.doubleColumnMapping())
+ .put(Types.BINARY, StandardColumnMappings.varbinaryColumnMapping())
+ .put(Types.VARBINARY, StandardColumnMappings.varbinaryColumnMapping())
+ .put(Types.LONGVARBINARY, StandardColumnMappings.varbinaryColumnMapping())
+ .buildOrThrow();
+
+ private static final Map SHOWFLAKE_COLUMN_MAPPINGS = ImmutableMap.builder()
+ .put("time", typeHandle -> Optional.of(timeColumnMapping(typeHandle)))
+ .put("timestampntz", typeHandle -> Optional.of(timestampColumnMapping(typeHandle)))
+ .put("timestamptz", typeHandle -> Optional.of(timestampTzColumnMapping(typeHandle)))
+ .put("timestampltz", typeHandle -> Optional.of(timestampTzColumnMapping(typeHandle)))
+ .put("date", typeHandle -> Optional.of(ColumnMapping.longMapping(
+ DateType.DATE,
+ (resultSet, columnIndex) -> LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(),
+ snowFlakeDateWriter())))
+ .put("object", typeHandle -> Optional.of(ColumnMapping.sliceMapping(
+ createUnboundedVarcharType(),
+ StandardColumnMappings.varcharReadFunction(createUnboundedVarcharType()),
+ StandardColumnMappings.varcharWriteFunction(),
+ PredicatePushdownController.DISABLE_PUSHDOWN)))
+ .put("array", typeHandle -> Optional.of(ColumnMapping.sliceMapping(
+ createUnboundedVarcharType(),
+ StandardColumnMappings.varcharReadFunction(createUnboundedVarcharType()),
+ StandardColumnMappings.varcharWriteFunction(),
+ PredicatePushdownController.DISABLE_PUSHDOWN)))
+ .put("variant", typeHandle -> Optional.of(ColumnMapping.sliceMapping(
+ createUnboundedVarcharType(),
+ variantReadFunction(),
+ StandardColumnMappings.varcharWriteFunction(),
+ PredicatePushdownController.FULL_PUSHDOWN)))
+ .put("varchar", typeHandle -> Optional.of(varcharColumnMapping(typeHandle.getRequiredColumnSize())))
+ .put("number", typeHandle -> {
+ int decimalDigits = typeHandle.getRequiredDecimalDigits();
+ int precision = typeHandle.getRequiredColumnSize() + Math.max(-decimalDigits, 0);
+ if (precision > 38) {
+ return Optional.empty();
+ }
+ return Optional.of(columnMappingPushdown(
+ StandardColumnMappings.decimalColumnMapping(createDecimalType(precision, Math.max(decimalDigits, 0)), RoundingMode.UNNECESSARY)));
+ })
+ .buildOrThrow();
+
+ // Mappings for internal Trino types to JDBC column types
+ private static final Map STANDARD_WRITE_MAPPINGS = ImmutableMap.builder()
+ .put("BooleanType", WriteMapping.booleanMapping("boolean", StandardColumnMappings.booleanWriteFunction()))
+ .put("BigintType", WriteMapping.longMapping("number(19)", StandardColumnMappings.bigintWriteFunction()))
+ .put("IntegerType", WriteMapping.longMapping("number(10)", StandardColumnMappings.integerWriteFunction()))
+ .put("SmallintType", WriteMapping.longMapping("number(5)", StandardColumnMappings.smallintWriteFunction()))
+ .put("TinyintType", WriteMapping.longMapping("number(3)", StandardColumnMappings.tinyintWriteFunction()))
+ .put("DoubleType", WriteMapping.doubleMapping("double precision", StandardColumnMappings.doubleWriteFunction()))
+ .put("RealType", WriteMapping.longMapping("real", StandardColumnMappings.realWriteFunction()))
+ .put("VarbinaryType", WriteMapping.sliceMapping("varbinary", StandardColumnMappings.varbinaryWriteFunction()))
+ .put("DateType", WriteMapping.longMapping("date", snowFlakeDateWriter()))
+ .buildOrThrow();
+
+ private static final Map SNOWFLAKE_WRITE_MAPPINGS = ImmutableMap.builder()
+ .put("TimeType", type -> WriteMapping.longMapping("time", SnowflakeClient.snowFlaketimeWriter(type)))
+ .put("ShortTimestampType", SnowflakeClient::snowFlakeTimestampWriter)
+ .put("ShortTimestampWithTimeZoneType", SnowflakeClient::snowFlakeTimestampWithTZWriter)
+ .put("LongTimestampType", SnowflakeClient::snowFlakeTimestampWithTZWriter)
+ .put("LongTimestampWithTimeZoneType", SnowflakeClient::snowFlakeTimestampWithTZWriter)
+ .put("VarcharType", SnowflakeClient::snowFlakeVarCharWriter)
+ .put("CharType", SnowflakeClient::snowFlakeCharWriter)
+ .put("LongDecimalType", SnowflakeClient::snowFlakeDecimalWriter)
+ .put("ShortDecimalType", SnowflakeClient::snowFlakeDecimalWriter)
+ .buildOrThrow();
+
+ @Inject
+ public SnowflakeClient(
+ BaseJdbcConfig config,
+ ConnectionFactory connectionFactory,
+ QueryBuilder queryBuilder,
+ IdentifierMapping identifierMapping,
+ RemoteQueryModifier remoteQueryModifier)
+ {
+ super("\"", connectionFactory, queryBuilder, config.getJdbcTypesMappedToVarchar(), identifierMapping, remoteQueryModifier, false);
+
+ JdbcTypeHandle bigintTypeHandle = new JdbcTypeHandle(Types.BIGINT, Optional.of("bigint"), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty());
+ ConnectorExpressionRewriter connectorExpressionRewriter = JdbcConnectorExpressionRewriterBuilder.newBuilder()
+ .addStandardRules(this::quoted)
+ .build();
+
+ this.aggregateFunctionRewriter = new AggregateFunctionRewriter<>(
+ connectorExpressionRewriter,
+ ImmutableSet.>builder()
+ .add(new ImplementCountAll(bigintTypeHandle))
+ .add(new ImplementCount(bigintTypeHandle))
+ .add(new ImplementMinMax(false))
+ .add(new ImplementSum(SnowflakeClient::toTypeHandle))
+ .add(new ImplementAvgFloatingPoint())
+ .add(new ImplementAvgDecimal())
+ .build());
+ }
+
+ @Override
+ public void abortReadConnection(Connection connection, ResultSet resultSet)
+ throws SQLException
+ {
+ // Abort connection before closing. Without this, the Snowflake driver
+ // attempts to drain the connection by reading all the results.
+ connection.abort(directExecutor());
+ }
+
+ @Override
+ public Optional toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle)
+ {
+ Optional mapping = getForcedMappingToVarchar(typeHandle);
+ if (mapping.isPresent()) {
+ return mapping;
+ }
+
+ String jdbcTypeName = typeHandle.getJdbcTypeName()
+ .orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle));
+ jdbcTypeName = jdbcTypeName.toLowerCase(Locale.ENGLISH);
+ int type = typeHandle.getJdbcType();
+
+ // Mappings for JDBC column types to internal Trino types
+ final Map standardColumnMappings = ImmutableMap.builder()
+ .put(Types.BOOLEAN, StandardColumnMappings.booleanColumnMapping())
+ .put(Types.TINYINT, StandardColumnMappings.tinyintColumnMapping())
+ .put(Types.SMALLINT, StandardColumnMappings.smallintColumnMapping())
+ .put(Types.INTEGER, StandardColumnMappings.integerColumnMapping())
+ .put(Types.BIGINT, StandardColumnMappings.bigintColumnMapping())
+ .put(Types.REAL, StandardColumnMappings.realColumnMapping())
+ .put(Types.DOUBLE, StandardColumnMappings.doubleColumnMapping())
+ .put(Types.FLOAT, StandardColumnMappings.doubleColumnMapping())
+ .put(Types.BINARY, StandardColumnMappings.varbinaryColumnMapping())
+ .put(Types.VARBINARY, StandardColumnMappings.varbinaryColumnMapping())
+ .put(Types.LONGVARBINARY, StandardColumnMappings.varbinaryColumnMapping())
+ .buildOrThrow();
+
+ ColumnMapping columnMap = standardColumnMappings.get(type);
+ if (columnMap != null) {
+ return Optional.of(columnMap);
+ }
+
+ final Map snowflakeColumnMappings = ImmutableMap.builder()
+ .put("time", handle -> {
+ return Optional.of(timeColumnMapping(handle));
+ })
+ .put("date", handle -> {
+ return Optional.of(ColumnMapping.longMapping(
+ DateType.DATE, (resultSet, columnIndex) ->
+ LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(),
+ snowFlakeDateWriter()));
+ })
+ .put("varchar", handle -> {
+ return Optional.of(varcharColumnMapping(handle.getRequiredColumnSize()));
+ })
+ .put("number", handle -> {
+ int decimalDigits = handle.getRequiredDecimalDigits();
+ int precision = handle.getRequiredColumnSize() + Math.max(-decimalDigits, 0);
+ if (precision > 38) {
+ return Optional.empty();
+ }
+ return Optional.of(columnMappingPushdown(
+ StandardColumnMappings.decimalColumnMapping(DecimalType.createDecimalType(
+ precision, Math.max(decimalDigits, 0)), RoundingMode.UNNECESSARY)));
+ })
+ .buildOrThrow();
+
+ ColumnMappingFunction columnMappingFunction = snowflakeColumnMappings.get(jdbcTypeName);
+ if (columnMappingFunction != null) {
+ return columnMappingFunction.convert(typeHandle);
+ }
+
+ // Code should never reach here so throw an error.
+ throw new TrinoException(NOT_SUPPORTED, "SNOWFLAKE_CONNECTOR_COLUMN_TYPE_NOT_SUPPORTED: Unsupported column type(" + type + "):" + jdbcTypeName);
+ }
+
+ @Override
+ public WriteMapping toWriteMapping(ConnectorSession session, Type type)
+ {
+ Class> myClass = type.getClass();
+ String simple = myClass.getSimpleName();
+
+ // Mappings for internal Trino types to JDBC column types
+ final Map standardWriteMappings = ImmutableMap.builder()
+ .put("BooleanType", WriteMapping.booleanMapping("boolean", StandardColumnMappings.booleanWriteFunction()))
+ .put("BigintType", WriteMapping.longMapping("number(19)", StandardColumnMappings.bigintWriteFunction()))
+ .put("IntegerType", WriteMapping.longMapping("number(10)", StandardColumnMappings.integerWriteFunction()))
+ .put("SmallintType", WriteMapping.longMapping("number(5)", StandardColumnMappings.smallintWriteFunction()))
+ .put("TinyintType", WriteMapping.longMapping("number(3)", StandardColumnMappings.tinyintWriteFunction()))
+ .put("DoubleType", WriteMapping.doubleMapping("double precision", StandardColumnMappings.doubleWriteFunction()))
+ .put("RealType", WriteMapping.longMapping("real", StandardColumnMappings.realWriteFunction()))
+ .put("VarbinaryType", WriteMapping.sliceMapping("varbinary", StandardColumnMappings.varbinaryWriteFunction()))
+ .put("DateType", WriteMapping.longMapping("date", snowFlakeDateWriter()))
+ .buildOrThrow();
+
+ WriteMapping writeMapping = standardWriteMappings.get(simple);
+ if (writeMapping != null) {
+ return writeMapping;
+ }
+
+ final Map snowflakeWriteMappings = ImmutableMap.builder()
+ .put("TimeType", writeType -> {
+ return WriteMapping.longMapping("time", SnowflakeClient.snowFlaketimeWriter(writeType));
+ })
+ .put("ShortTimestampType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWriter(writeType);
+ return myMap;
+ })
+ .put("ShortTimestampWithTimeZoneType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType);
+ return myMap;
+ })
+ .put("LongTimestampType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType);
+ return myMap;
+ })
+ .put("LongTimestampWithTimeZoneType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType);
+ return myMap;
+ })
+ .put("VarcharType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeVarCharWriter(writeType);
+ return myMap;
+ })
+ .put("CharType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeCharWriter(writeType);
+ return myMap;
+ })
+ .put("LongDecimalType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeDecimalWriter(writeType);
+ return myMap;
+ })
+ .put("ShortDecimalType", writeType -> {
+ WriteMapping myMap = SnowflakeClient.snowFlakeDecimalWriter(writeType);
+ return myMap;
+ })
+ .buildOrThrow();
+
+ WriteMappingFunction writeMappingFunction = snowflakeWriteMappings.get(simple);
+ if (writeMappingFunction != null) {
+ return writeMappingFunction.convert(type);
+ }
+
+ throw new TrinoException(NOT_SUPPORTED, "SNOWFLAKE_CONNECTOR_COLUMN_TYPE_NOT_SUPPORTED: Unsupported column type: " + type.getDisplayName() + ", simple:" + simple);
+ }
+
+ @Override
+ public Optional implementAggregation(ConnectorSession session, AggregateFunction aggregate, Map assignments)
+ {
+ // TODO support complex ConnectorExpressions
+ return aggregateFunctionRewriter.rewrite(session, aggregate, assignments);
+ }
+
+ private static Optional toTypeHandle(DecimalType decimalType)
+ {
+ return Optional.of(new JdbcTypeHandle(Types.NUMERIC, Optional.of("decimal"), Optional.of(decimalType.getPrecision()), Optional.of(decimalType.getScale()), Optional.empty(), Optional.empty()));
+ }
+
+ @Override
+ protected Optional> limitFunction()
+ {
+ return Optional.of((sql, limit) -> sql + " LIMIT " + limit);
+ }
+
+ @Override
+ public boolean isLimitGuaranteed(ConnectorSession session)
+ {
+ return true;
+ }
+
+ @Override
+ public void setColumnType(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column, Type type)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support setting column types");
+ }
+
+ private static SliceReadFunction variantReadFunction()
+ {
+ return (resultSet, columnIndex) -> Slices.utf8Slice(resultSet.getString(columnIndex).replaceAll("^\"|\"$", ""));
+ }
+
+ private static ColumnMapping columnMappingPushdown(ColumnMapping mapping)
+ {
+ if (mapping.getPredicatePushdownController() == PredicatePushdownController.DISABLE_PUSHDOWN) {
+ throw new TrinoException(NOT_SUPPORTED, "mapping.getPredicatePushdownController() is DISABLE_PUSHDOWN. Type was " + mapping.getType());
+ }
+
+ return new ColumnMapping(mapping.getType(), mapping.getReadFunction(), mapping.getWriteFunction(), PredicatePushdownController.FULL_PUSHDOWN);
+ }
+
+ private static ColumnMapping timeColumnMapping(JdbcTypeHandle typeHandle)
+ {
+ int precision = typeHandle.getRequiredDecimalDigits();
+ checkArgument(precision <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION, "The max timestamp precision in Snowflake is " + SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION);
+ return ColumnMapping.longMapping(
+ TimeType.createTimeType(precision),
+ (resultSet, columnIndex) -> {
+ LocalTime time = SNOWFLAKE_TIME_FORMATTER.parse(resultSet.getString(columnIndex), LocalTime::from);
+ return Timestamps.round(time.toNanoOfDay() * PICOSECONDS_PER_NANOSECOND, 12 - precision);
+ },
+ timeWriteFunction(precision),
+ PredicatePushdownController.FULL_PUSHDOWN);
+ }
+
+ private static LongWriteFunction snowFlaketimeWriter(Type type)
+ {
+ return timeWriteFunction(((TimeType) type).getPrecision());
+ }
+
+ private static LongWriteFunction timeWriteFunction(int precision)
+ {
+ checkArgument(precision <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION, "Unsupported precision: %s", precision);
+ String bindExpression = format("CAST(? AS time(%s))", precision);
+ return new LongWriteFunction()
+ {
+ @Override
+ public String getBindExpression()
+ {
+ return bindExpression;
+ }
+
+ @Override
+ public void set(PreparedStatement statement, int index, long picosOfDay)
+ throws SQLException
+ {
+ picosOfDay = Timestamps.round(picosOfDay, 12 - precision);
+ if (picosOfDay == Timestamps.PICOSECONDS_PER_DAY) {
+ picosOfDay = 0;
+ }
+ LocalTime localTime = LocalTime.ofNanoOfDay(picosOfDay / PICOSECONDS_PER_NANOSECOND);
+ // statement.setObject(.., localTime) would yield incorrect end result for 23:59:59.999000
+ statement.setString(index, SNOWFLAKE_TIME_FORMATTER.format(localTime));
+ }
+ };
+ }
+
+ private static ColumnMapping timestampTzColumnMapping(JdbcTypeHandle typeHandle)
+ {
+ int precision = typeHandle.getRequiredDecimalDigits();
+ String jdbcTypeName = typeHandle.getJdbcTypeName()
+ .orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle));
+ int type = typeHandle.getJdbcType();
+ log.debug("timestampTZColumnMapping: jdbcTypeName(%s):%s precision:%s", type, jdbcTypeName, precision);
+
+ if (precision <= 3) {
+ return ColumnMapping.longMapping(
+ createTimestampWithTimeZoneType(precision),
+ (resultSet, columnIndex) -> {
+ ZonedDateTime timestamp = SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from);
+ return DateTimeEncoding.packDateTimeWithZone(timestamp.toInstant().toEpochMilli(), timestamp.getZone().getId());
+ },
+ timestampWithTZWriter(),
+ PredicatePushdownController.FULL_PUSHDOWN);
+ }
+ else {
+ return ColumnMapping.objectMapping(createTimestampWithTimeZoneType(precision), longTimestampWithTimezoneReadFunction(), longTimestampWithTzWriteFunction());
+ }
+ }
+
+ private static ColumnMapping varcharColumnMapping(int varcharLength)
+ {
+ VarcharType varcharType = varcharLength <= VarcharType.MAX_LENGTH ? createVarcharType(varcharLength) : createUnboundedVarcharType();
+ return ColumnMapping.sliceMapping(
+ varcharType,
+ StandardColumnMappings.varcharReadFunction(varcharType),
+ StandardColumnMappings.varcharWriteFunction());
+ }
+
+ private static ObjectReadFunction longTimestampWithTimezoneReadFunction()
+ {
+ return ObjectReadFunction.of(LongTimestampWithTimeZone.class, (resultSet, columnIndex) -> {
+ ZonedDateTime timestamp = SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from);
+ return LongTimestampWithTimeZone.fromEpochSecondsAndFraction(
+ timestamp.toEpochSecond(),
+ (long) timestamp.getNano() * PICOSECONDS_PER_NANOSECOND,
+ TimeZoneKey.getTimeZoneKey(timestamp.getZone().getId()));
+ });
+ }
+
+ private static ObjectWriteFunction longTimestampWithTzWriteFunction()
+ {
+ return ObjectWriteFunction.of(LongTimestampWithTimeZone.class, (statement, index, value) -> {
+ long epochMilli = value.getEpochMillis();
+ long epochSecond = Math.floorDiv(epochMilli, MILLISECONDS_PER_SECOND);
+ int nanosOfSecond = Math.floorMod(epochMilli, MILLISECONDS_PER_SECOND) * NANOSECONDS_PER_MILLISECOND + value.getPicosOfMilli() / PICOSECONDS_PER_NANOSECOND;
+ ZoneId zone = TimeZoneKey.getTimeZoneKey(value.getTimeZoneKey()).getZoneId();
+ Instant instant = Instant.ofEpochSecond(epochSecond, nanosOfSecond);
+ statement.setString(index, SNOWFLAKE_DATETIME_FORMATTER.format(ZonedDateTime.ofInstant(instant, zone)));
+ });
+ }
+
+ private static WriteMapping snowFlakeDecimalWriter(Type type)
+ {
+ DecimalType decimalType = (DecimalType) type;
+ String dataType = format("decimal(%s, %s)", decimalType.getPrecision(), decimalType.getScale());
+
+ if (decimalType.isShort()) {
+ return WriteMapping.longMapping(dataType, StandardColumnMappings.shortDecimalWriteFunction(decimalType));
+ }
+ return WriteMapping.objectMapping(dataType, StandardColumnMappings.longDecimalWriteFunction(decimalType));
+ }
+
+ private static LongWriteFunction snowFlakeDateWriter()
+ {
+ return (statement, index, day) -> statement.setString(index, SNOWFLAKE_DATE_FORMATTER.format(LocalDate.ofEpochDay(day)));
+ }
+
+ private static WriteMapping snowFlakeCharWriter(Type type)
+ {
+ CharType charType = (CharType) type;
+ return WriteMapping.sliceMapping("char(" + charType.getLength() + ")", charWriteFunction(charType));
+ }
+
+ private static WriteMapping snowFlakeVarCharWriter(Type type)
+ {
+ String dataType;
+ VarcharType varcharType = (VarcharType) type;
+
+ if (varcharType.isUnbounded()) {
+ dataType = "varchar";
+ }
+ else {
+ dataType = "varchar(" + varcharType.getBoundedLength() + ")";
+ }
+ return WriteMapping.sliceMapping(dataType, StandardColumnMappings.varcharWriteFunction());
+ }
+
+ private static SliceWriteFunction charWriteFunction(CharType charType)
+ {
+ return (statement, index, value) -> statement.setString(index, Chars.padSpaces(value, charType).toStringUtf8());
+ }
+
+ private static WriteMapping snowFlakeTimestampWriter(Type type)
+ {
+ TimestampType timestampType = (TimestampType) type;
+ checkArgument(
+ timestampType.getPrecision() <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION,
+ "The max timestamp precision in Snowflake is " + SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION);
+
+ if (timestampType.isShort()) {
+ return WriteMapping.longMapping(format("timestamp_ntz(%d)", timestampType.getPrecision()), timestampWriteFunction());
+ }
+ return WriteMapping.objectMapping(format("timestamp_ntz(%d)", timestampType.getPrecision()), longTimestampWriter(timestampType.getPrecision()));
+ }
+
+ private static LongWriteFunction timestampWriteFunction()
+ {
+ return (statement, index, value) -> statement.setString(index, StandardColumnMappings.fromTrinoTimestamp(value).toString());
+ }
+
+ private static ObjectWriteFunction longTimestampWriter(int precision)
+ {
+ return ObjectWriteFunction.of(
+ LongTimestamp.class,
+ (statement, index, value) -> statement.setString(index, SNOWFLAKE_TIMESTAMP_FORMATTER.format(StandardColumnMappings.fromLongTrinoTimestamp(value, precision))));
+ }
+
+ private static WriteMapping snowFlakeTimestampWithTZWriter(Type type)
+ {
+ TimestampWithTimeZoneType timeTZType = (TimestampWithTimeZoneType) type;
+
+ checkArgument(timeTZType.getPrecision() <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION, "Max Snowflake precision is is " + SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION);
+ if (timeTZType.isShort()) {
+ return WriteMapping.longMapping(format("timestamp_tz(%d)", timeTZType.getPrecision()), timestampWithTZWriter());
+ }
+ return WriteMapping.objectMapping(format("timestamp_tz(%d)", timeTZType.getPrecision()), longTimestampWithTzWriteFunction());
+ }
+
+ private static LongWriteFunction timestampWithTZWriter()
+ {
+ return (statement, index, encodedTimeWithZone) -> {
+ Instant instant = Instant.ofEpochMilli(DateTimeEncoding.unpackMillisUtc(encodedTimeWithZone));
+ ZoneId zone = ZoneId.of(DateTimeEncoding.unpackZoneKey(encodedTimeWithZone).getId());
+ statement.setString(index, SNOWFLAKE_DATETIME_FORMATTER.format(instant.atZone(zone)));
+ };
+ }
+
+ private static ObjectReadFunction longTimestampReader()
+ {
+ return ObjectReadFunction.of(LongTimestamp.class, (resultSet, columnIndex) -> {
+ Calendar calendar = new GregorianCalendar(UTC_TZ, Locale.ENGLISH);
+ calendar.setTime(new Date(0));
+ Timestamp ts = resultSet.getTimestamp(columnIndex, calendar);
+ long epochMillis = ts.getTime();
+ int nanosInTheSecond = ts.getNanos();
+ int nanosInTheMilli = nanosInTheSecond % NANOSECONDS_PER_MILLISECOND;
+ long micro = epochMillis * Timestamps.MICROSECONDS_PER_MILLISECOND + (nanosInTheMilli / Timestamps.NANOSECONDS_PER_MICROSECOND);
+ int picosOfMicro = nanosInTheMilli % 1000 * 1000;
+ return new LongTimestamp(micro, picosOfMicro);
+ });
+ }
+
+ private static ColumnMapping timestampColumnMapping(JdbcTypeHandle typeHandle)
+ {
+ int precision = typeHandle.getRequiredDecimalDigits();
+ String jdbcTypeName = typeHandle.getJdbcTypeName()
+ .orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle));
+ int type = typeHandle.getJdbcType();
+ log.debug("timestampColumnMapping: jdbcTypeName(%s):%s precision:%s", type, jdbcTypeName, precision);
+
+ // <= 6 fits into a long
+ if (precision <= 6) {
+ return ColumnMapping.longMapping(
+ TimestampType.createTimestampType(precision),
+ (resultSet, columnIndex) -> StandardColumnMappings.toTrinoTimestamp(TimestampType.createTimestampType(precision), toLocalDateTime(resultSet, columnIndex)),
+ timestampWriteFunction());
+ }
+
+ // Too big. Put it in an object
+ return ColumnMapping.objectMapping(
+ TimestampType.createTimestampType(precision),
+ longTimestampReader(),
+ longTimestampWriter(precision));
+ }
+
+ private static LocalDateTime toLocalDateTime(ResultSet resultSet, int columnIndex)
+ throws SQLException
+ {
+ Calendar calendar = new GregorianCalendar(UTC_TZ, Locale.ENGLISH);
+ calendar.setTime(new Date(0));
+ Timestamp ts = resultSet.getTimestamp(columnIndex, calendar);
+ return LocalDateTime.ofInstant(Instant.ofEpochMilli(ts.getTime()), ZoneOffset.UTC);
+ }
+}
diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java
new file mode 100644
index 00000000000000..19fc358471915c
--- /dev/null
+++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.snowflake;
+
+import com.google.inject.Binder;
+import com.google.inject.Module;
+import com.google.inject.Provides;
+import com.google.inject.Scopes;
+import com.google.inject.Singleton;
+import io.trino.plugin.jdbc.BaseJdbcConfig;
+import io.trino.plugin.jdbc.ConnectionFactory;
+import io.trino.plugin.jdbc.DriverConnectionFactory;
+import io.trino.plugin.jdbc.ForBaseJdbc;
+import io.trino.plugin.jdbc.JdbcClient;
+import io.trino.plugin.jdbc.TypeHandlingJdbcConfig;
+import io.trino.plugin.jdbc.credential.CredentialProvider;
+import io.trino.spi.TrinoException;
+import net.snowflake.client.jdbc.SnowflakeDriver;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Properties;
+
+import static io.airlift.configuration.ConfigBinder.configBinder;
+import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
+
+public class SnowflakeClientModule
+ implements Module
+{
+ @Override
+ public void configure(Binder binder)
+ {
+ binder.bind(JdbcClient.class).annotatedWith(ForBaseJdbc.class).to(SnowflakeClient.class).in(Scopes.SINGLETON);
+ configBinder(binder).bindConfig(SnowflakeConfig.class);
+ configBinder(binder).bindConfig(TypeHandlingJdbcConfig.class);
+ }
+
+ @Singleton
+ @Provides
+ @ForBaseJdbc
+ public ConnectionFactory getConnectionFactory(BaseJdbcConfig baseJdbcConfig, SnowflakeConfig snowflakeConfig, CredentialProvider credentialProvider)
+ throws MalformedURLException
+ {
+ Properties properties = new Properties();
+ snowflakeConfig.getAccount().ifPresent(account -> properties.setProperty("account", account));
+ snowflakeConfig.getDatabase().ifPresent(database -> properties.setProperty("db", database));
+ snowflakeConfig.getRole().ifPresent(role -> properties.setProperty("role", role));
+ snowflakeConfig.getWarehouse().ifPresent(warehouse -> properties.setProperty("warehouse", warehouse));
+
+ // Set the expected date/time formatting we expect for our plugin to parse
+ properties.setProperty("TIMESTAMP_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS.FF9TZH:TZM");
+ properties.setProperty("TIMESTAMP_NTZ_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS.FF9TZH:TZM");
+ properties.setProperty("TIMESTAMP_TZ_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS.FF9TZH:TZM");
+ properties.setProperty("TIMESTAMP_LTZ_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS.FF9TZH:TZM");
+ properties.setProperty("TIME_OUTPUT_FORMAT", "HH24:MI:SS.FF9");
+ snowflakeConfig.getTimestampNoTimezoneAsUTC().ifPresent(as_utc -> properties.setProperty("JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC", as_utc ? "true" : "false"));
+
+ // Support for Corporate proxies
+ if (snowflakeConfig.getHTTPProxy().isPresent()) {
+ String proxy = snowflakeConfig.getHTTPProxy().get();
+
+ URL url = new URL(proxy);
+
+ properties.setProperty("useProxy", "true");
+ properties.setProperty("proxyHost", url.getHost());
+ properties.setProperty("proxyPort", Integer.toString(url.getPort()));
+ properties.setProperty("proxyProtocol", url.getProtocol());
+
+ String userInfo = url.getUserInfo();
+ if (userInfo != null) {
+ String[] usernamePassword = userInfo.split(":", 2);
+
+ if (usernamePassword.length != 2) {
+ throw new TrinoException(NOT_SUPPORTED, "Improper snowflake.http_proxy. username:password@ is optional but what was entered was not correct");
+ }
+
+ properties.setProperty("proxyUser", usernamePassword[0]);
+ properties.setProperty("proxyPassword", usernamePassword[1]);
+ }
+ }
+
+ return new DriverConnectionFactory(new SnowflakeDriver(), baseJdbcConfig.getConnectionUrl(), properties, credentialProvider);
+ }
+}
diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeConfig.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeConfig.java
new file mode 100644
index 00000000000000..6dbf1252017745
--- /dev/null
+++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeConfig.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.snowflake;
+
+import io.airlift.configuration.Config;
+
+import java.util.Optional;
+
+public class SnowflakeConfig
+{
+ private String account;
+ private String database;
+ private String role;
+ private String warehouse;
+ private Boolean timestampNoTimezoneAsUTC;
+ private String httpProxy;
+
+ public Optional getAccount()
+ {
+ return Optional.ofNullable(account);
+ }
+
+ @Config("snowflake.account")
+ public SnowflakeConfig setAccount(String account)
+ {
+ this.account = account;
+ return this;
+ }
+
+ public Optional getDatabase()
+ {
+ return Optional.ofNullable(database);
+ }
+
+ @Config("snowflake.database")
+ public SnowflakeConfig setDatabase(String database)
+ {
+ this.database = database;
+ return this;
+ }
+
+ public Optional getRole()
+ {
+ return Optional.ofNullable(role);
+ }
+
+ @Config("snowflake.role")
+ public SnowflakeConfig setRole(String role)
+ {
+ this.role = role;
+ return this;
+ }
+
+ public Optional getWarehouse()
+ {
+ return Optional.ofNullable(warehouse);
+ }
+
+ @Config("snowflake.warehouse")
+ public SnowflakeConfig setWarehouse(String warehouse)
+ {
+ this.warehouse = warehouse;
+ return this;
+ }
+
+ public Optional getTimestampNoTimezoneAsUTC()
+ {
+ return Optional.ofNullable(timestampNoTimezoneAsUTC);
+ }
+
+ @Config("snowflake.timestamp-no-timezone-as-utc")
+ public SnowflakeConfig setTimestampNoTimezoneAsUTC(Boolean timestampNoTimezoneAsUTC)
+ {
+ this.timestampNoTimezoneAsUTC = timestampNoTimezoneAsUTC;
+ return this;
+ }
+
+ public Optional getHTTPProxy()
+ {
+ return Optional.ofNullable(httpProxy);
+ }
+
+ @Config("snowflake.http-proxy")
+ public SnowflakeConfig setHTTPProxy(String httpProxy)
+ {
+ this.httpProxy = httpProxy;
+ return this;
+ }
+}
diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakePlugin.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakePlugin.java
new file mode 100644
index 00000000000000..728264d297784d
--- /dev/null
+++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakePlugin.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.snowflake;
+
+import io.trino.plugin.jdbc.JdbcPlugin;
+
+public class SnowflakePlugin
+ extends JdbcPlugin
+{
+ public SnowflakePlugin()
+ {
+ super("snowflake", new SnowflakeClientModule());
+ }
+}
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/BaseSnowflakeConnectorTest.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/BaseSnowflakeConnectorTest.java
new file mode 100644
index 00000000000000..b547624cebeacd
--- /dev/null
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/BaseSnowflakeConnectorTest.java
@@ -0,0 +1,610 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.snowflake;
+
+import io.trino.Session;
+import io.trino.plugin.jdbc.BaseJdbcConnectorTest;
+import io.trino.testing.MaterializedResult;
+import io.trino.testing.TestingConnectorBehavior;
+import io.trino.testing.sql.TestTable;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.parallel.Execution;
+
+import java.util.Optional;
+import java.util.OptionalInt;
+
+import static com.google.common.base.Strings.nullToEmpty;
+import static io.trino.plugin.snowflake.TestingSnowflakeServer.TEST_SCHEMA;
+import static io.trino.spi.connector.ConnectorMetadata.MODIFYING_ROWS_MESSAGE;
+import static io.trino.spi.type.VarcharType.VARCHAR;
+import static io.trino.testing.MaterializedResult.resultBuilder;
+import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_CREATE_TABLE;
+import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_CREATE_TABLE_WITH_DATA;
+import static io.trino.testing.TestingNames.randomNameSuffix;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assumptions.abort;
+import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
+import static org.junit.jupiter.api.parallel.ExecutionMode.CONCURRENT;
+
+@TestInstance(PER_CLASS)
+@Execution(CONCURRENT)
+public abstract class BaseSnowflakeConnectorTest
+ extends BaseJdbcConnectorTest
+{
+ protected TestingSnowflakeServer server;
+
+ @Override
+ protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior)
+ {
+ switch (connectorBehavior) {
+ case SUPPORTS_AGGREGATION_PUSHDOWN:
+ case SUPPORTS_TOPN_PUSHDOWN:
+ case SUPPORTS_LIMIT_PUSHDOWN:
+ return false;
+ case SUPPORTS_COMMENT_ON_COLUMN:
+ case SUPPORTS_ADD_COLUMN_WITH_COMMENT:
+ case SUPPORTS_COMMENT_ON_TABLE:
+ case SUPPORTS_CREATE_TABLE_WITH_TABLE_COMMENT:
+ case SUPPORTS_CREATE_TABLE_WITH_COLUMN_COMMENT:
+ case SUPPORTS_SET_COLUMN_TYPE:
+ return false;
+ case SUPPORTS_DROP_FIELD:
+ case SUPPORTS_ROW_TYPE:
+ case SUPPORTS_ARRAY:
+ return false;
+ default:
+ return super.hasBehavior(connectorBehavior);
+ }
+ }
+
+ @Override
+ protected TestTable createTableWithDefaultColumns()
+ {
+ return new TestTable(
+ onRemoteDatabase(),
+ TEST_SCHEMA,
+ "(col_required BIGINT NOT NULL," +
+ "col_nullable BIGINT," +
+ "col_default BIGINT DEFAULT 43," +
+ "col_nonnull_default BIGINT NOT NULL DEFAULT 42," +
+ "col_required2 BIGINT NOT NULL)");
+ }
+
+ @Override
+ protected TestTable createTableWithUnsupportedColumn()
+ {
+ return new TestTable(
+ onRemoteDatabase(),
+ TEST_SCHEMA,
+ "(one bigint, two decimal(38,0), three varchar(10))");
+ }
+
+ @Override
+ protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup)
+ {
+ String typeName = dataMappingTestSetup.getTrinoTypeName();
+ // TODO: Test fails with these types
+ // Error: No result for query: SELECT row_id FROM test_data_mapping_smoke_real_3u8xo6hp59 WHERE rand() = 42 OR value = REAL '567.123'
+ // In the testDataMappingSmokeTestDataProvider(), the type sampleValueLiteral of type real should be "DOUBLE" rather than "REAL".
+ if (typeName.equals("real")) {
+ return Optional.empty();
+ }
+ // Error: Failed to insert data: SQL compilation error: error line 1 at position 130
+ if (typeName.equals("time")
+ || typeName.equals("time(6)")
+ || typeName.equals("timestamp(6)")) {
+ return Optional.empty();
+ }
+ // Error: not equal
+ if (typeName.equals("char(3)")) {
+ return Optional.empty();
+ }
+ return Optional.of(dataMappingTestSetup);
+ }
+
+ @Override
+ protected boolean isColumnNameRejected(Exception exception, String columnName, boolean delimited)
+ {
+ return nullToEmpty(exception.getMessage()).matches(".*(Incorrect column name).*");
+ }
+
+ @Override
+ protected MaterializedResult getDescribeOrdersResult()
+ {
+ // Override this test because the type of row "shippriority" should be bigint rather than integer for snowflake case
+ return resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR)
+ .row("orderkey", "bigint", "", "")
+ .row("custkey", "bigint", "", "")
+ .row("orderstatus", "varchar(1)", "", "")
+ .row("totalprice", "double", "", "")
+ .row("orderdate", "date", "", "")
+ .row("orderpriority", "varchar(15)", "", "")
+ .row("clerk", "varchar(15)", "", "")
+ .row("shippriority", "bigint", "", "")
+ .row("comment", "varchar(79)", "", "")
+ .build();
+ }
+
+ @Test
+ @Override
+ public void testShowColumns()
+ {
+ assertThat(query("SHOW COLUMNS FROM orders")).matches(getDescribeOrdersResult());
+ }
+
+ @Test
+ public void testViews()
+ {
+ String tableName = "test_view_" + randomNameSuffix();
+ onRemoteDatabase().execute("CREATE OR REPLACE VIEW tpch." + tableName + " AS SELECT * FROM tpch.orders");
+ assertQuery("SELECT orderkey FROM " + tableName, "SELECT orderkey FROM orders");
+ onRemoteDatabase().execute("DROP VIEW IF EXISTS tpch." + tableName);
+ }
+
+ @Test
+ @Override
+ public void testShowCreateTable()
+ {
+ // Override this test because the type of row "shippriority" should be bigint rather than integer for snowflake case
+ assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue())
+ .isEqualTo("CREATE TABLE snowflake.tpch.orders (\n" +
+ " orderkey bigint,\n" +
+ " custkey bigint,\n" +
+ " orderstatus varchar(1),\n" +
+ " totalprice double,\n" +
+ " orderdate date,\n" +
+ " orderpriority varchar(15),\n" +
+ " clerk varchar(15),\n" +
+ " shippriority bigint,\n" +
+ " comment varchar(79)\n" +
+ ")\n" +
+ "COMMENT ''");
+ }
+
+ @Test
+ @Override
+ public void testAddNotNullColumn()
+ {
+ assertThatThrownBy(super::testAddNotNullColumn)
+ .isInstanceOf(AssertionError.class)
+ .hasMessage("Unexpected failure when adding not null column");
+ }
+
+ @Test
+ @Override
+ public void testCharVarcharComparison()
+ {
+ assertThatThrownBy(super::testCharVarcharComparison)
+ .hasMessageContaining("For query")
+ .hasMessageContaining("Actual rows")
+ .hasMessageContaining("Expected rows");
+ }
+
+ @Test
+ @Override
+ public void testCountDistinctWithStringTypes()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testInsertInPresenceOfNotSupportedColumn()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testAggregationPushdown()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testDistinctAggregationPushdown()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNumericAggregationPushdown()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testLimitPushdown()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testInsertIntoNotNullColumn()
+ {
+ // TODO: java.lang.UnsupportedOperationException: This method should be overridden
+ assertThatThrownBy(super::testInsertIntoNotNullColumn);
+ }
+
+ @Test
+ @Override
+ public void testDeleteWithLike()
+ {
+ assertThatThrownBy(super::testDeleteWithLike)
+ .hasStackTraceContaining("TrinoException: " + MODIFYING_ROWS_MESSAGE);
+ }
+
+ @Test
+ @Override
+ public void testCreateTableAsSelect()
+ {
+ String tableName = "test_ctas" + randomNameSuffix();
+ if (!hasBehavior(SUPPORTS_CREATE_TABLE_WITH_DATA)) {
+ assertQueryFails("CREATE TABLE IF NOT EXISTS " + tableName + " AS SELECT name, regionkey FROM nation", "This connector does not support creating tables with data");
+ return;
+ }
+ assertUpdate("CREATE TABLE IF NOT EXISTS " + tableName + " AS SELECT name, regionkey FROM nation", "SELECT count(*) FROM nation");
+ assertTableColumnNames(tableName, "name", "regionkey");
+
+ assertEquals(getTableComment(getSession().getCatalog().orElseThrow(), getSession().getSchema().orElseThrow(), tableName), "");
+ assertUpdate("DROP TABLE " + tableName);
+
+ // Some connectors support CREATE TABLE AS but not the ordinary CREATE TABLE. Let's test CTAS IF NOT EXISTS with a table that is guaranteed to exist.
+ assertUpdate("CREATE TABLE IF NOT EXISTS nation AS SELECT nationkey, regionkey FROM nation", 0);
+ assertTableColumnNames("nation", "nationkey", "name", "regionkey", "comment");
+
+ assertCreateTableAsSelect(
+ "SELECT nationkey, name, regionkey FROM nation",
+ "SELECT count(*) FROM nation");
+
+ assertCreateTableAsSelect(
+ "SELECT mktsegment, sum(acctbal) x FROM customer GROUP BY mktsegment",
+ "SELECT count(DISTINCT mktsegment) FROM customer");
+
+ assertCreateTableAsSelect(
+ "SELECT count(*) x FROM nation JOIN region ON nation.regionkey = region.regionkey",
+ "SELECT 1");
+
+ assertCreateTableAsSelect(
+ "SELECT nationkey FROM nation ORDER BY nationkey LIMIT 10",
+ "SELECT 10");
+
+ assertCreateTableAsSelect(
+ "SELECT * FROM nation WITH DATA",
+ "SELECT * FROM nation",
+ "SELECT count(*) FROM nation");
+
+ assertCreateTableAsSelect(
+ "SELECT * FROM nation WITH NO DATA",
+ "SELECT * FROM nation LIMIT 0",
+ "SELECT 0");
+
+ // Tests for CREATE TABLE with UNION ALL: exercises PushTableWriteThroughUnion optimizer
+
+ assertCreateTableAsSelect(
+ "SELECT name, nationkey, regionkey FROM nation WHERE nationkey % 2 = 0 UNION ALL " +
+ "SELECT name, nationkey, regionkey FROM nation WHERE nationkey % 2 = 1",
+ "SELECT name, nationkey, regionkey FROM nation",
+ "SELECT count(*) FROM nation");
+
+ assertCreateTableAsSelect(
+ Session.builder(getSession()).setSystemProperty("redistribute_writes", "true").build(),
+ "SELECT CAST(nationkey AS BIGINT) nationkey, regionkey FROM nation UNION ALL " +
+ "SELECT 1234567890, 123",
+ "SELECT nationkey, regionkey FROM nation UNION ALL " +
+ "SELECT 1234567890, 123",
+ "SELECT count(*) + 1 FROM nation");
+
+ assertCreateTableAsSelect(
+ Session.builder(getSession()).setSystemProperty("redistribute_writes", "false").build(),
+ "SELECT CAST(nationkey AS BIGINT) nationkey, regionkey FROM nation UNION ALL " +
+ "SELECT 1234567890, 123",
+ "SELECT nationkey, regionkey FROM nation UNION ALL " +
+ "SELECT 1234567890, 123",
+ "SELECT count(*) + 1 FROM nation");
+
+ // TODO: BigQuery throws table not found at BigQueryClient.insert if we reuse the same table name
+ tableName = "test_ctas" + randomNameSuffix();
+ assertExplainAnalyze("EXPLAIN ANALYZE CREATE TABLE " + tableName + " AS SELECT name FROM nation");
+ assertQuery("SELECT * from " + tableName, "SELECT name FROM nation");
+ assertUpdate("DROP TABLE " + tableName);
+ }
+
+ @Test
+ @Override
+ public void testCreateTable()
+ {
+ String tableName = "test_create_" + randomNameSuffix();
+ if (!hasBehavior(SUPPORTS_CREATE_TABLE)) {
+ assertQueryFails("CREATE TABLE " + tableName + " (a bigint, b double, c varchar(50))", "This connector does not support creating tables");
+ return;
+ }
+
+ assertThat(computeActual("SHOW TABLES").getOnlyColumnAsSet()) // prime the cache, if any
+ .doesNotContain(tableName);
+ assertUpdate("CREATE TABLE " + tableName + " (a bigint, b double, c varchar(50))");
+ assertTrue(getQueryRunner().tableExists(getSession(), tableName));
+ assertThat(computeActual("SHOW TABLES").getOnlyColumnAsSet())
+ .contains(tableName);
+ assertTableColumnNames(tableName, "a", "b", "c");
+ assertEquals(getTableComment(getSession().getCatalog().orElseThrow(), getSession().getSchema().orElseThrow(), tableName), "");
+
+ assertUpdate("DROP TABLE " + tableName);
+ assertFalse(getQueryRunner().tableExists(getSession(), tableName));
+ assertThat(computeActual("SHOW TABLES").getOnlyColumnAsSet())
+ .doesNotContain(tableName);
+
+ assertQueryFails("CREATE TABLE " + tableName + " (a bad_type)", ".* Unknown type 'bad_type' for column 'a'");
+ assertFalse(getQueryRunner().tableExists(getSession(), tableName));
+
+ // TODO (https://github.com/trinodb/trino/issues/5901) revert to longer name when Oracle version is updated
+ tableName = "test_cr_not_exists_" + randomNameSuffix();
+ assertUpdate("CREATE TABLE " + tableName + " (a bigint, b varchar(50), c double)");
+ assertTrue(getQueryRunner().tableExists(getSession(), tableName));
+ assertTableColumnNames(tableName, "a", "b", "c");
+
+ assertUpdate("CREATE TABLE IF NOT EXISTS " + tableName + " (d bigint, e varchar(50))");
+ assertTrue(getQueryRunner().tableExists(getSession(), tableName));
+ assertTableColumnNames(tableName, "a", "b", "c");
+
+ assertUpdate("DROP TABLE " + tableName);
+ assertFalse(getQueryRunner().tableExists(getSession(), tableName));
+
+ // Test CREATE TABLE LIKE
+ tableName = "test_create_orig_" + randomNameSuffix();
+ assertUpdate("CREATE TABLE " + tableName + " (a bigint, b double, c varchar(50))");
+ assertTrue(getQueryRunner().tableExists(getSession(), tableName));
+ assertTableColumnNames(tableName, "a", "b", "c");
+
+ String tableNameLike = "test_create_like_" + randomNameSuffix();
+ assertUpdate("CREATE TABLE " + tableNameLike + " (LIKE " + tableName + ", d bigint, e varchar(50))");
+ assertTrue(getQueryRunner().tableExists(getSession(), tableNameLike));
+ assertTableColumnNames(tableNameLike, "a", "b", "c", "d", "e");
+
+ assertUpdate("DROP TABLE " + tableName);
+ assertFalse(getQueryRunner().tableExists(getSession(), tableName));
+
+ assertUpdate("DROP TABLE " + tableNameLike);
+ assertFalse(getQueryRunner().tableExists(getSession(), tableNameLike));
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryCreateStatement()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryInsertStatementTableExists()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQuerySelectUnsupportedType()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testCreateTableWithLongColumnName()
+ {
+ String tableName = "test_long_column" + randomNameSuffix();
+ String baseColumnName = "col";
+
+ int maxLength = maxColumnNameLength()
+ // Assume 2^16 is enough for most use cases. Add a bit more to ensure 2^16 isn't actual limit.
+ .orElse(65536 + 5);
+
+ String validColumnName = baseColumnName + "z".repeat(maxLength - baseColumnName.length());
+ assertUpdate("CREATE TABLE " + tableName + " (" + validColumnName + " bigint)");
+ assertTrue(columnExists(tableName, validColumnName));
+ assertUpdate("DROP TABLE " + tableName);
+
+ if (maxColumnNameLength().isEmpty()) {
+ return;
+ }
+ assertFalse(getQueryRunner().tableExists(getSession(), tableName));
+ }
+
+ @Test
+ @Override
+ public void testCreateTableWithLongTableName()
+ {
+ // TODO: Find the maximum table name length in Snowflake and enable this test.
+ abort("TODO");
+ }
+
+ @Override
+ protected OptionalInt maxColumnNameLength()
+ {
+ return OptionalInt.of(251);
+ }
+
+ @Test
+ @Override
+ public void testAlterTableAddLongColumnName()
+ {
+ String tableName = "test_long_column" + randomNameSuffix();
+ assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);
+
+ String baseColumnName = "col";
+ int maxLength = maxColumnNameLength()
+ // Assume 2^16 is enough for most use cases. Add a bit more to ensure 2^16 isn't actual limit.
+ .orElse(65536 + 5);
+
+ String validTargetColumnName = baseColumnName + "z".repeat(maxLength - baseColumnName.length());
+ assertUpdate("ALTER TABLE " + tableName + " ADD COLUMN " + validTargetColumnName + " int");
+ assertTrue(getQueryRunner().tableExists(getSession(), tableName));
+ assertQuery("SELECT x FROM " + tableName, "VALUES 123");
+ assertUpdate("DROP TABLE " + tableName);
+
+ if (maxColumnNameLength().isEmpty()) {
+ return;
+ }
+
+ assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);
+ assertQuery("SELECT x FROM " + tableName, "VALUES 123");
+ }
+
+ @Test
+ @Override
+ public void testAlterTableRenameColumnToLongName()
+ {
+ String tableName = "test_long_column" + randomNameSuffix();
+ assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);
+
+ String baseColumnName = "col";
+ int maxLength = maxColumnNameLength()
+ // Assume 2^16 is enough for most use cases. Add a bit more to ensure 2^16 isn't actual limit.
+ .orElse(65536 + 5);
+
+ String validTargetColumnName = baseColumnName + "z".repeat(maxLength - baseColumnName.length());
+ assertUpdate("ALTER TABLE " + tableName + " RENAME COLUMN x TO " + validTargetColumnName);
+ assertQuery("SELECT " + validTargetColumnName + " FROM " + tableName, "VALUES 123");
+ assertUpdate("DROP TABLE " + tableName);
+
+ if (maxColumnNameLength().isEmpty()) {
+ return;
+ }
+
+ assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);
+ assertQuery("SELECT x FROM " + tableName, "VALUES 123");
+ }
+
+ @Test
+ @Override
+ public void testCreateSchemaWithLongName()
+ {
+ // TODO: Find the maximum table schema length in Snowflake and enable this test.
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testInsertArray()
+ {
+ // Snowflake does not support this feature.
+ abort("Not supported");
+ }
+
+ @Test
+ @Override
+ public void testInsertRowConcurrently()
+ {
+ abort("TODO: Connection is already closed");
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryColumnAlias()
+ {
+ abort("TODO: Table function system.query not registered");
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryColumnAliasNotFound()
+ {
+ abort("TODO: Table function system.query not registered");
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryIncorrectSyntax()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryInsertStatementTableDoesNotExist()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQueryParameters()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQuerySelectFromNation()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQuerySelectFromTestTable()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testNativeQuerySimple()
+ {
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testRenameSchemaToLongName()
+ {
+ // TODO: Find the maximum table schema length in Snowflake and enable this test.
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testRenameTableToLongTableName()
+ {
+ // TODO: Find the maximum table length in Snowflake and enable this test.
+ abort("TODO");
+ }
+
+ @Test
+ @Override
+ public void testCharTrailingSpace()
+ {
+ assertThatThrownBy(super::testCharVarcharComparison)
+ .hasMessageContaining("For query")
+ .hasMessageContaining("Actual rows")
+ .hasMessageContaining("Expected rows");
+ }
+
+ @Test
+ @Override
+ public void testDescribeTable()
+ {
+ assertThat(query("DESCRIBE orders")).matches(getDescribeOrdersResult());
+ }
+}
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/SnowflakeQueryRunner.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/SnowflakeQueryRunner.java
new file mode 100644
index 00000000000000..a50debaf003b6e
--- /dev/null
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/SnowflakeQueryRunner.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.snowflake;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import io.airlift.log.Logger;
+import io.trino.Session;
+import io.trino.plugin.tpch.TpchPlugin;
+import io.trino.testing.DistributedQueryRunner;
+import io.trino.tpch.TpchTable;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static io.airlift.testing.Closeables.closeAllSuppress;
+import static io.trino.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME;
+import static io.trino.testing.QueryAssertions.copyTpchTables;
+import static io.trino.testing.TestingSession.testSessionBuilder;
+
+public final class SnowflakeQueryRunner
+{
+ public static final String TPCH_SCHEMA = "tpch";
+
+ private SnowflakeQueryRunner() {}
+
+ public static DistributedQueryRunner createSnowflakeQueryRunner(
+ TestingSnowflakeServer server,
+ Map extraProperties,
+ Map connectorProperties,
+ Iterable> tables)
+ throws Exception
+ {
+ DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(createSession())
+ .setExtraProperties(extraProperties)
+ .build();
+ try {
+ queryRunner.installPlugin(new TpchPlugin());
+ queryRunner.createCatalog("tpch", "tpch");
+
+ connectorProperties = new HashMap<>(ImmutableMap.copyOf(connectorProperties));
+ connectorProperties.putIfAbsent("connection-url", TestingSnowflakeServer.TEST_URL);
+ connectorProperties.putIfAbsent("connection-user", TestingSnowflakeServer.TEST_USER);
+ connectorProperties.putIfAbsent("connection-password", TestingSnowflakeServer.TEST_PASSWORD);
+ connectorProperties.putIfAbsent("snowflake.database", TestingSnowflakeServer.TEST_DATABASE);
+ connectorProperties.putIfAbsent("snowflake.role", TestingSnowflakeServer.TEST_ROLE);
+ connectorProperties.putIfAbsent("snowflake.warehouse", TestingSnowflakeServer.TEST_WAREHOUSE);
+ if (TestingSnowflakeServer.TEST_PROXY != null) {
+ connectorProperties.putIfAbsent("snowflake.httpProxy", TestingSnowflakeServer.TEST_PROXY);
+ }
+
+ queryRunner.installPlugin(new SnowflakePlugin());
+ queryRunner.createCatalog("snowflake", "snowflake", connectorProperties);
+
+ copyTpchTables(queryRunner, "tpch", TINY_SCHEMA_NAME, createSession(), tables);
+
+ return queryRunner;
+ }
+ catch (Throwable e) {
+ closeAllSuppress(e, queryRunner);
+ throw e;
+ }
+ }
+
+ public static Session createSession()
+ {
+ return testSessionBuilder()
+ .setCatalog("snowflake")
+ .setSchema(TPCH_SCHEMA)
+ .build();
+ }
+
+ public static void main(String[] args)
+ throws Exception
+ {
+ DistributedQueryRunner queryRunner = createSnowflakeQueryRunner(
+ new TestingSnowflakeServer(),
+ ImmutableMap.of("http-server.http.port", "8080"),
+ ImmutableMap.of(),
+ ImmutableList.of());
+
+ Logger log = Logger.get(SnowflakeQueryRunner.class);
+ log.info("======== SERVER STARTED ========");
+ log.info("\n====\n%s\n====", queryRunner.getCoordinator().getBaseUrl());
+ }
+}
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeClient.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeClient.java
new file mode 100644
index 00000000000000..3e92cfcb6f6cee
--- /dev/null
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeClient.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.snowflake;
+
+import io.trino.plugin.base.mapping.DefaultIdentifierMapping;
+import io.trino.plugin.jdbc.BaseJdbcConfig;
+import io.trino.plugin.jdbc.ColumnMapping;
+import io.trino.plugin.jdbc.DefaultQueryBuilder;
+import io.trino.plugin.jdbc.JdbcClient;
+import io.trino.plugin.jdbc.JdbcColumnHandle;
+import io.trino.plugin.jdbc.JdbcExpression;
+import io.trino.plugin.jdbc.JdbcTypeHandle;
+import io.trino.plugin.jdbc.logging.RemoteQueryModifier;
+import io.trino.spi.connector.AggregateFunction;
+import io.trino.spi.connector.ColumnHandle;
+import io.trino.spi.expression.ConnectorExpression;
+import io.trino.spi.expression.Variable;
+import org.testng.annotations.Test;
+
+import java.sql.Types;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import static io.trino.spi.type.BigintType.BIGINT;
+import static io.trino.spi.type.BooleanType.BOOLEAN;
+import static io.trino.spi.type.DoubleType.DOUBLE;
+import static io.trino.testing.TestingConnectorSession.SESSION;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+
+public class TestSnowflakeClient
+{
+ private static final JdbcColumnHandle BIGINT_COLUMN =
+ JdbcColumnHandle.builder()
+ .setColumnName("c_bigint")
+ .setColumnType(BIGINT)
+ .setJdbcTypeHandle(new JdbcTypeHandle(Types.BIGINT, Optional.of("int8"), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty()))
+ .build();
+
+ private static final JdbcColumnHandle DOUBLE_COLUMN =
+ JdbcColumnHandle.builder()
+ .setColumnName("c_double")
+ .setColumnType(DOUBLE)
+ .setJdbcTypeHandle(new JdbcTypeHandle(Types.DOUBLE, Optional.of("double"), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty()))
+ .build();
+
+ private static final JdbcClient JDBC_CLIENT = new SnowflakeClient(
+ new BaseJdbcConfig(),
+ session -> { throw new UnsupportedOperationException(); },
+ new DefaultQueryBuilder(RemoteQueryModifier.NONE),
+ new DefaultIdentifierMapping(),
+ RemoteQueryModifier.NONE);
+
+ @Test
+ public void testImplementCount()
+ {
+ Variable bigintVariable = new Variable("v_bigint", BIGINT);
+ Variable doubleVariable = new Variable("v_double", BIGINT);
+ Optional filter = Optional.of(new Variable("a_filter", BOOLEAN));
+
+ // count(*)
+ testImplementAggregation(
+ new AggregateFunction("count", BIGINT, List.of(), List.of(), false, Optional.empty()),
+ Map.of(),
+ Optional.of("count(*)"));
+
+ // count(bigint)
+ testImplementAggregation(
+ new AggregateFunction("count", BIGINT, List.of(bigintVariable), List.of(), false, Optional.empty()),
+ Map.of(bigintVariable.getName(), BIGINT_COLUMN),
+ Optional.of("count(\"c_bigint\")"));
+
+ // count(double)
+ testImplementAggregation(
+ new AggregateFunction("count", BIGINT, List.of(doubleVariable), List.of(), false, Optional.empty()),
+ Map.of(doubleVariable.getName(), DOUBLE_COLUMN),
+ Optional.of("count(\"c_double\")"));
+
+ // count() FILTER (WHERE ...)
+ testImplementAggregation(
+ new AggregateFunction("count", BIGINT, List.of(), List.of(), false, filter),
+ Map.of(),
+ Optional.empty());
+
+ // count(bigint) FILTER (WHERE ...)
+ testImplementAggregation(
+ new AggregateFunction("count", BIGINT, List.of(bigintVariable), List.of(), false, filter),
+ Map.of(bigintVariable.getName(), BIGINT_COLUMN),
+ Optional.empty());
+ }
+
+ @Test
+ public void testImplementSum()
+ {
+ Variable bigintVariable = new Variable("v_bigint", BIGINT);
+ Variable doubleVariable = new Variable("v_double", DOUBLE);
+ Optional filter = Optional.of(new Variable("a_filter", BOOLEAN));
+
+ // sum(bigint)
+ testImplementAggregation(
+ new AggregateFunction("sum", BIGINT, List.of(bigintVariable), List.of(), false, Optional.empty()),
+ Map.of(bigintVariable.getName(), BIGINT_COLUMN),
+ Optional.of("sum(\"c_bigint\")"));
+
+ // sum(double)
+ testImplementAggregation(
+ new AggregateFunction("sum", DOUBLE, List.of(doubleVariable), List.of(), false, Optional.empty()),
+ Map.of(doubleVariable.getName(), DOUBLE_COLUMN),
+ Optional.of("sum(\"c_double\")"));
+
+ // sum(DISTINCT bigint)
+ testImplementAggregation(
+ new AggregateFunction("sum", BIGINT, List.of(bigintVariable), List.of(), true, Optional.empty()),
+ Map.of(bigintVariable.getName(), BIGINT_COLUMN),
+ Optional.of("sum(DISTINCT \"c_bigint\")"));
+
+ // sum(bigint) FILTER (WHERE ...)
+ testImplementAggregation(
+ new AggregateFunction("sum", BIGINT, List.of(bigintVariable), List.of(), false, filter),
+ Map.of(bigintVariable.getName(), BIGINT_COLUMN),
+ Optional.empty()); // filter not supported
+ }
+
+ private static void testImplementAggregation(AggregateFunction aggregateFunction, Map assignments, Optional expectedExpression)
+ {
+ Optional result = JDBC_CLIENT.implementAggregation(SESSION, aggregateFunction, assignments);
+ if (expectedExpression.isEmpty()) {
+ assertThat(result).isEmpty();
+ }
+ else {
+ assertThat(result).isPresent();
+ assertEquals(result.get().getExpression(), expectedExpression.get());
+ Optional columnMapping = JDBC_CLIENT.toColumnMapping(SESSION, null, result.get().getJdbcTypeHandle());
+ assertTrue(columnMapping.isPresent(), "No mapping for: " + result.get().getJdbcTypeHandle());
+ assertEquals(columnMapping.get().getType(), aggregateFunction.getOutputType());
+ }
+ }
+}
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConfig.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConfig.java
new file mode 100644
index 00000000000000..eb5c32a3d063c6
--- /dev/null
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConfig.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.snowflake;
+
+import com.google.common.collect.ImmutableMap;
+import org.testng.annotations.Test;
+
+import java.util.Map;
+
+import static io.airlift.configuration.testing.ConfigAssertions.assertFullMapping;
+import static io.airlift.configuration.testing.ConfigAssertions.assertRecordedDefaults;
+import static io.airlift.configuration.testing.ConfigAssertions.recordDefaults;
+
+public class TestSnowflakeConfig
+{
+ @Test
+ public void testDefaults()
+ {
+ assertRecordedDefaults(recordDefaults(SnowflakeConfig.class)
+ .setAccount(null)
+ .setDatabase(null)
+ .setRole(null)
+ .setWarehouse(null)
+ .setHTTPProxy(null)
+ .setTimestampNoTimezoneAsUTC(null));
+ }
+
+ @Test
+ public void testExplicitPropertyMappings()
+ {
+ Map properties = ImmutableMap.builder()
+ .put("snowflake.account", "MYACCOUNT")
+ .put("snowflake.database", "MYDATABASE")
+ .put("snowflake.role", "MYROLE")
+ .put("snowflake.warehouse", "MYWAREHOUSE")
+ .put("snowflake.http-proxy", "MYPROXY")
+ .put("snowflake.timestamp-no-timezone-as-utc", "true")
+ .buildOrThrow();
+
+ SnowflakeConfig expected = new SnowflakeConfig()
+ .setAccount("MYACCOUNT")
+ .setDatabase("MYDATABASE")
+ .setRole("MYROLE")
+ .setWarehouse("MYWAREHOUSE")
+ .setHTTPProxy("MYPROXY")
+ .setTimestampNoTimezoneAsUTC(true);
+
+ assertFullMapping(properties, expected);
+ }
+}
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java
new file mode 100644
index 00000000000000..8b9b0c78c73b54
--- /dev/null
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.snowflake;
+
+import com.google.common.collect.ImmutableMap;
+import io.trino.testing.QueryRunner;
+import io.trino.testing.sql.SqlExecutor;
+
+import static io.trino.plugin.snowflake.SnowflakeQueryRunner.createSnowflakeQueryRunner;
+
+public class TestSnowflakeConnectorTest
+ extends BaseSnowflakeConnectorTest
+{
+ @Override
+ protected QueryRunner createQueryRunner()
+ throws Exception
+ {
+ server = closeAfterClass(new TestingSnowflakeServer());
+ return createSnowflakeQueryRunner(server, ImmutableMap.of(), ImmutableMap.of(), REQUIRED_TPCH_TABLES);
+ }
+
+ @Override
+ protected SqlExecutor onRemoteDatabase()
+ {
+ return server::execute;
+ }
+}
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakePlugin.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakePlugin.java
new file mode 100644
index 00000000000000..26165c3f018cb3
--- /dev/null
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakePlugin.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.snowflake;
+
+import com.google.common.collect.ImmutableMap;
+import io.trino.spi.Plugin;
+import io.trino.spi.connector.ConnectorFactory;
+import io.trino.testing.TestingConnectorContext;
+import org.testng.annotations.Test;
+
+import static com.google.common.collect.Iterables.getOnlyElement;
+
+public class TestSnowflakePlugin
+{
+ @Test
+ public void testCreateConnector()
+ {
+ Plugin plugin = new SnowflakePlugin();
+ ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories());
+ factory.create("test", ImmutableMap.of("connection-url", "jdbc:snowflake://test"), new TestingConnectorContext()).shutdown();
+ }
+}
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java
new file mode 100644
index 00000000000000..5377e013a6cd8c
--- /dev/null
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java
@@ -0,0 +1,391 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.snowflake;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import io.trino.Session;
+import io.trino.spi.type.TimeZoneKey;
+import io.trino.testing.AbstractTestQueryFramework;
+import io.trino.testing.QueryRunner;
+import io.trino.testing.TestingSession;
+import io.trino.testing.datatype.CreateAndInsertDataSetup;
+import io.trino.testing.datatype.CreateAsSelectDataSetup;
+import io.trino.testing.datatype.DataSetup;
+import io.trino.testing.datatype.SqlDataTypeTest;
+import io.trino.testing.sql.TrinoSqlExecutor;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.parallel.Execution;
+
+import java.time.LocalDate;
+import java.time.ZoneId;
+
+import static com.google.common.base.Preconditions.checkState;
+import static com.google.common.base.Verify.verify;
+import static io.trino.plugin.snowflake.SnowflakeQueryRunner.createSnowflakeQueryRunner;
+import static io.trino.spi.type.BigintType.BIGINT;
+import static io.trino.spi.type.BooleanType.BOOLEAN;
+import static io.trino.spi.type.DateType.DATE;
+import static io.trino.spi.type.DecimalType.createDecimalType;
+import static io.trino.spi.type.DoubleType.DOUBLE;
+import static io.trino.spi.type.TimeZoneKey.getTimeZoneKey;
+import static io.trino.spi.type.TimestampType.createTimestampType;
+import static io.trino.spi.type.VarbinaryType.VARBINARY;
+import static io.trino.spi.type.VarcharType.createVarcharType;
+import static java.time.ZoneOffset.UTC;
+import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
+import static org.junit.jupiter.api.parallel.ExecutionMode.CONCURRENT;
+
+@TestInstance(PER_CLASS)
+@Execution(CONCURRENT)
+public class TestSnowflakeTypeMapping
+ extends AbstractTestQueryFramework
+{
+ protected TestingSnowflakeServer snowflakeServer;
+
+ private final ZoneId jvmZone = ZoneId.systemDefault();
+ // no DST in 1970, but has DST in later years (e.g. 2018)
+ private final ZoneId vilnius = ZoneId.of("Europe/Vilnius");
+ // minutes offset change since 1970-01-01, no DST
+ private final ZoneId kathmandu = ZoneId.of("Asia/Kathmandu");
+
+ @BeforeAll
+ public void setUp()
+ {
+ String zone = jvmZone.getId();
+ checkState(jvmZone.getId().equals("America/Bahia_Banderas"), "Timezone not configured correctly. Add -Duser.timezone=America/Bahia_Banderas to your JVM arguments");
+ checkIsGap(jvmZone, LocalDate.of(1970, 1, 1));
+ checkIsGap(vilnius, LocalDate.of(1983, 4, 1));
+ verify(vilnius.getRules().getValidOffsets(LocalDate.of(1983, 10, 1).atStartOfDay().minusMinutes(1)).size() == 2);
+ }
+
+ @Override
+ protected QueryRunner createQueryRunner()
+ throws Exception
+ {
+ snowflakeServer = new TestingSnowflakeServer();
+ return createSnowflakeQueryRunner(
+ snowflakeServer,
+ ImmutableMap.of(),
+ ImmutableMap.of(),
+ ImmutableList.of());
+ }
+
+ @Test
+ public void testBoolean()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("boolean", "true", BOOLEAN, "BOOLEAN '1'")
+ .addRoundTrip("boolean", "false", BOOLEAN, "BOOLEAN '0'")
+ .addRoundTrip("boolean", "NULL", BOOLEAN, "CAST(NULL AS BOOLEAN)")
+ .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.test_boolean"))
+ .execute(getQueryRunner(), trinoCreateAsSelect("tpch.test_boolean"))
+ .execute(getQueryRunner(), trinoCreateAndInsert("tpch.test_boolean"));
+ }
+
+ @Test
+ public void testInteger()
+ {
+ // INT , INTEGER , BIGINT , SMALLINT , TINYINT , BYTEINT, DECIMAL , NUMERIC are aliases for NUMBER(38, 0) in snowflake
+ // https://docs.snowflake.com/en/sql-reference/data-types-numeric.html#int-integer-bigint-smallint-tinyint-byteint
+ testInteger("INT");
+ testInteger("INTEGER");
+ testInteger("BIGINT");
+ testInteger("SMALLINT");
+ testInteger("TINYINT");
+ testInteger("BYTEINT");
+ }
+
+ private void testInteger(String inputType)
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip(inputType, "-9223372036854775808", BIGINT, "-9223372036854775808")
+ .addRoundTrip(inputType, "9223372036854775807", BIGINT, "9223372036854775807")
+ .addRoundTrip(inputType, "0", BIGINT, "CAST(0 AS BIGINT)")
+ .addRoundTrip(inputType, "NULL", BIGINT, "CAST(NULL AS BIGINT)")
+ .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.integer"));
+ }
+
+ @Test
+ public void testDecimal()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("decimal(3, 0)", "NULL", BIGINT, "CAST(NULL AS BIGINT)")
+ .addRoundTrip("decimal(3, 0)", "CAST('193' AS decimal(3, 0))", BIGINT, "CAST('193' AS BIGINT)")
+ .addRoundTrip("decimal(3, 0)", "CAST('19' AS decimal(3, 0))", BIGINT, "CAST('19' AS BIGINT)")
+ .addRoundTrip("decimal(3, 0)", "CAST('-193' AS decimal(3, 0))", BIGINT, "CAST('-193' AS BIGINT)")
+ .addRoundTrip("decimal(3, 1)", "CAST('10.0' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('10.0' AS decimal(3, 1))")
+ .addRoundTrip("decimal(3, 1)", "CAST('10.1' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('10.1' AS decimal(3, 1))")
+ .addRoundTrip("decimal(3, 1)", "CAST('-10.1' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('-10.1' AS decimal(3, 1))")
+ .addRoundTrip("decimal(4, 2)", "CAST('2' AS decimal(4, 2))", createDecimalType(4, 2), "CAST('2' AS decimal(4, 2))")
+ .addRoundTrip("decimal(4, 2)", "CAST('2.3' AS decimal(4, 2))", createDecimalType(4, 2), "CAST('2.3' AS decimal(4, 2))")
+ .addRoundTrip("decimal(24, 2)", "CAST('2' AS decimal(24, 2))", createDecimalType(24, 2), "CAST('2' AS decimal(24, 2))")
+ .addRoundTrip("decimal(24, 2)", "CAST('2.3' AS decimal(24, 2))", createDecimalType(24, 2), "CAST('2.3' AS decimal(24, 2))")
+ .addRoundTrip("decimal(24, 2)", "CAST('123456789.3' AS decimal(24, 2))", createDecimalType(24, 2), "CAST('123456789.3' AS decimal(24, 2))")
+ .addRoundTrip("decimal(24, 4)", "CAST('12345678901234567890.31' AS decimal(24, 4))", createDecimalType(24, 4), "CAST('12345678901234567890.31' AS decimal(24, 4))")
+ .addRoundTrip("decimal(30, 5)", "CAST('3141592653589793238462643.38327' AS decimal(30, 5))", createDecimalType(30, 5), "CAST('3141592653589793238462643.38327' AS decimal(30, 5))")
+ .addRoundTrip("decimal(30, 5)", "CAST('-3141592653589793238462643.38327' AS decimal(30, 5))", createDecimalType(30, 5), "CAST('-3141592653589793238462643.38327' AS decimal(30, 5))")
+ .addRoundTrip("decimal(38, 0)", "CAST(NULL AS decimal(38, 0))", BIGINT, "CAST(NULL AS BIGINT)")
+ .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.test_decimal"))
+ .execute(getQueryRunner(), trinoCreateAsSelect("test_decimal"))
+ .execute(getQueryRunner(), trinoCreateAndInsert("test_decimal"));
+ }
+
+ @Test
+ public void testFloat()
+ {
+ // https://docs.snowflake.com/en/sql-reference/data-types-numeric.html#float-float4-float8
+ SqlDataTypeTest.create()
+ .addRoundTrip("real", "3.14", DOUBLE, "DOUBLE '3.14'")
+ .addRoundTrip("real", "10.3e0", DOUBLE, "DOUBLE '10.3e0'")
+ .addRoundTrip("real", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)")
+ .addRoundTrip("real", "CAST('NaN' AS DOUBLE)", DOUBLE, "nan()")
+ .addRoundTrip("real", "CAST('Infinity' AS DOUBLE)", DOUBLE, "+infinity()")
+ .addRoundTrip("real", "CAST('-Infinity' AS DOUBLE)", DOUBLE, "-infinity()")
+ .execute(getQueryRunner(), trinoCreateAsSelect("tpch.test_real"))
+ .execute(getQueryRunner(), trinoCreateAndInsert("tpch.test_real"));
+
+ SqlDataTypeTest.create()
+ .addRoundTrip("float", "3.14", DOUBLE, "DOUBLE '3.14'")
+ .addRoundTrip("float", "10.3e0", DOUBLE, "DOUBLE '10.3e0'")
+ .addRoundTrip("float", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)")
+ .addRoundTrip("float", "CAST('NaN' AS float)", DOUBLE, "nan()")
+ .addRoundTrip("float", "CAST('Infinity' AS float)", DOUBLE, "+infinity()")
+ .addRoundTrip("float", "CAST('-Infinity' AS float)", DOUBLE, "-infinity()")
+ .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.test_float"));
+ }
+
+ @Test
+ public void testDouble()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("double", "3.14", DOUBLE, "CAST(3.14 AS DOUBLE)")
+ .addRoundTrip("double", "1.0E100", DOUBLE, "1.0E100")
+ .addRoundTrip("double", "1.23456E12", DOUBLE, "1.23456E12")
+ .addRoundTrip("double", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)")
+ .addRoundTrip("double", "CAST('NaN' AS DOUBLE)", DOUBLE, "nan()")
+ .addRoundTrip("double", "CAST('Infinity' AS DOUBLE)", DOUBLE, "+infinity()")
+ .addRoundTrip("double", "CAST('-Infinity' AS DOUBLE)", DOUBLE, "-infinity()")
+ .execute(getQueryRunner(), trinoCreateAsSelect("trino_test_double"))
+ .execute(getQueryRunner(), trinoCreateAndInsert("trino_test_double"))
+ .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.test_double"));
+ }
+
+ @Test
+ public void testSnowflakeCreatedParameterizedVarchar()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("text", "'b'", createVarcharType(16777216), "CAST('b' AS VARCHAR(16777216))")
+ .addRoundTrip("varchar(32)", "'e'", createVarcharType(32), "CAST('e' AS VARCHAR(32))")
+ .addRoundTrip("varchar(15000)", "'f'", createVarcharType(15000), "CAST('f' AS VARCHAR(15000))")
+ .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.snowflake_test_parameterized_varchar"));
+ }
+
+ @Test
+ public void testSnowflakeCreatedParameterizedVarcharUnicode()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("text collate \'utf8\'", "'攻殻機動隊'", createVarcharType(16777216), "CAST('攻殻機動隊' AS VARCHAR(16777216))")
+ .addRoundTrip("varchar(5) collate \'utf8\'", "'攻殻機動隊'", createVarcharType(5), "CAST('攻殻機動隊' AS VARCHAR(5))")
+ .addRoundTrip("varchar(32) collate \'utf8\'", "'攻殻機動隊'", createVarcharType(32), "CAST('攻殻機動隊' AS VARCHAR(32))")
+ .addRoundTrip("varchar(20000) collate \'utf8\'", "'攻殻機動隊'", createVarcharType(20000), "CAST('攻殻機動隊' AS VARCHAR(20000))")
+ .addRoundTrip("varchar(1) collate \'utf8mb4\'", "'😂'", createVarcharType(1), "CAST('😂' AS VARCHAR(1))")
+ .addRoundTrip("varchar(77) collate \'utf8mb4\'", "'Ну, погоди!'", createVarcharType(77), "CAST('Ну, погоди!' AS VARCHAR(77))")
+ .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.snowflake_test_parameterized_varchar_unicode"));
+ }
+
+ @Test
+ public void testParameterizedChar()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("char", "''", createVarcharType(1), "CAST(' ' AS varchar(1))")
+ .addRoundTrip("char", "'a'", createVarcharType(1), "CAST('a' AS varchar(1))")
+ .addRoundTrip("char(1)", "''", createVarcharType(1), "CAST(' ' AS varchar(1))")
+ .addRoundTrip("char(1)", "'a'", createVarcharType(1), "CAST('a' AS varchar(1))")
+ .addRoundTrip("char(8)", "'abc'", createVarcharType(8), "CAST('abc ' AS varchar(8))")
+ .addRoundTrip("char(8)", "'12345678'", createVarcharType(8), "CAST('12345678' AS varchar(8))")
+ .execute(getQueryRunner(), trinoCreateAsSelect("snowflake_test_parameterized_char"));
+
+ SqlDataTypeTest.create()
+ .addRoundTrip("char", "''", createVarcharType(1), "CAST('' AS varchar(1))")
+ .addRoundTrip("char", "'a'", createVarcharType(1), "CAST('a' AS varchar(1))")
+ .addRoundTrip("char(1)", "''", createVarcharType(1), "CAST('' AS varchar(1))")
+ .addRoundTrip("char(1)", "'a'", createVarcharType(1), "CAST('a' AS varchar(1))")
+ .addRoundTrip("char(8)", "'abc'", createVarcharType(8), "CAST('abc' AS varchar(8))")
+ .addRoundTrip("char(8)", "'12345678'", createVarcharType(8), "CAST('12345678' AS varchar(8))")
+ .execute(getQueryRunner(), trinoCreateAndInsert("snowflake_test_parameterized_char"))
+ .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.snowflake_test_parameterized_char"));
+ }
+
+ @Test
+ public void testSnowflakeParameterizedCharUnicode()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("char(1) collate \'utf8\'", "'攻'", createVarcharType(1), "CAST('攻' AS VARCHAR(1))")
+ .addRoundTrip("char(5) collate \'utf8\'", "'攻殻'", createVarcharType(5), "CAST('攻殻' AS VARCHAR(5))")
+ .addRoundTrip("char(5) collate \'utf8\'", "'攻殻機動隊'", createVarcharType(5), "CAST('攻殻機動隊' AS VARCHAR(5))")
+ .addRoundTrip("char(1)", "'😂'", createVarcharType(1), "CAST('😂' AS VARCHAR(1))")
+ .addRoundTrip("char(77)", "'Ну, погоди!'", createVarcharType(77), "CAST('Ну, погоди!' AS VARCHAR(77))")
+ .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.snowflake_test_parameterized_char"));
+ }
+
+ @Test
+ public void testBinary()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("binary(18)", "NULL", VARBINARY, "CAST(NULL AS varbinary)")
+ .addRoundTrip("binary(18)", "X''", VARBINARY, "X''")
+ .addRoundTrip("binary(18)", "X'68656C6C6F'", VARBINARY, "to_utf8('hello')")
+ .addRoundTrip("binary(18)", "X'C582C4856B61207720E69DB1E4BAACE983BD'", VARBINARY, "to_utf8('łąka w 東京都')") // no trailing zeros
+ .addRoundTrip("binary(18)", "X'4261672066756C6C206F6620F09F92B0'", VARBINARY, "to_utf8('Bag full of 💰')")
+ .addRoundTrip("binary(18)", "X'0001020304050607080DF9367AA7000000'", VARBINARY, "X'0001020304050607080DF9367AA7000000'") // non-text prefix
+ .addRoundTrip("binary(18)", "X'000000000000'", VARBINARY, "X'000000000000'")
+ .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.test_binary"));
+ }
+
+ @Test
+ public void testVarbinary()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("varbinary", "NULL", VARBINARY, "CAST(NULL AS varbinary)")
+ .addRoundTrip("varbinary", "X''", VARBINARY, "X''")
+ .addRoundTrip("varbinary", "X'68656C6C6F'", VARBINARY, "to_utf8('hello')")
+ .addRoundTrip("varbinary", "X'5069C4996B6E6120C582C4856B61207720E69DB1E4BAACE983BD'", VARBINARY, "to_utf8('Piękna łąka w 東京都')")
+ .addRoundTrip("varbinary", "X'4261672066756C6C206F6620F09F92B0'", VARBINARY, "to_utf8('Bag full of 💰')")
+ .addRoundTrip("varbinary", "X'0001020304050607080DF9367AA7000000'", VARBINARY, "X'0001020304050607080DF9367AA7000000'") // non-text
+ .addRoundTrip("varbinary", "X'000000000000'", VARBINARY, "X'000000000000'")
+ .execute(getQueryRunner(), trinoCreateAsSelect("test_varbinary"))
+ .execute(getQueryRunner(), trinoCreateAndInsert("test_varbinary"))
+ .execute(getQueryRunner(), snowflakeCreateAndInsert("tpch.test_varbinary"));
+ }
+
+ @Test
+ public void testDate()
+ {
+ testDate(UTC);
+ testDate(jvmZone);
+ testDate(vilnius);
+ testDate(kathmandu);
+ testDate(TestingSession.DEFAULT_TIME_ZONE_KEY.getZoneId());
+ }
+
+ private void testDate(ZoneId sessionZone)
+ {
+ Session session = Session.builder(getSession())
+ .setTimeZoneKey(getTimeZoneKey(sessionZone.getId()))
+ .build();
+
+ SqlDataTypeTest.create()
+ .addRoundTrip("date", "NULL", DATE, "CAST(NULL AS DATE)")
+ .addRoundTrip("date", "'-5877641-06-23'", DATE, "DATE '-5877641-06-23'") // min value in Trino
+ .addRoundTrip("date", "'0000-01-01'", DATE, "DATE '0000-01-01'")
+ .addRoundTrip("date", "DATE '0001-01-01'", DATE, "DATE '0001-01-01'") // Min value for the function Date.
+ .addRoundTrip("date", "DATE '1582-10-05'", DATE, "DATE '1582-10-05'") // begin julian->gregorian switch
+ .addRoundTrip("date", "DATE '1582-10-14'", DATE, "DATE '1582-10-14'") // end julian->gregorian switch
+ .addRoundTrip("date", "DATE '1983-04-01'", DATE, "DATE '1983-04-01'")
+ .addRoundTrip("date", "DATE '1983-10-01'", DATE, "DATE '1983-10-01'")
+ .addRoundTrip("date", "DATE '2017-07-01'", DATE, "DATE '2017-07-01'") // summer on northern hemisphere (possible DST)
+ .addRoundTrip("date", "DATE '2017-01-01'", DATE, "DATE '2017-01-01'") // winter on northern hemisphere (possible DST on southern hemisphere)
+ .addRoundTrip("date", "DATE '99999-12-31'", DATE, "DATE '99999-12-31'")
+ .addRoundTrip("date", "'5881580-07-11'", DATE, "DATE '5881580-07-11'") // max value in Trino
+ .execute(getQueryRunner(), session, trinoCreateAsSelect("test_date"))
+ .execute(getQueryRunner(), session, snowflakeCreateAndInsert("tpch.test_date"));
+ }
+
+ @Test
+ public void testTimestamp()
+ {
+ testTimestamp(UTC);
+ testTimestamp(jvmZone);
+ testTimestamp(vilnius);
+ testTimestamp(kathmandu);
+ testTimestamp(TestingSession.DEFAULT_TIME_ZONE_KEY.getZoneId());
+ }
+
+ private void testTimestamp(ZoneId sessionZone)
+ {
+ Session session = Session.builder(getSession())
+ .setTimeZoneKey(TimeZoneKey.getTimeZoneKey(sessionZone.getId()))
+ .build();
+
+ SqlDataTypeTest.create()
+ // after epoch (MariaDb's timestamp type doesn't support values <= epoch)
+ .addRoundTrip("timestamp(3)", "TIMESTAMP '2019-03-18 10:01:17.987'", createTimestampType(3), "TIMESTAMP '2019-03-18 10:01:17.987'")
+ // time doubled in JVM zone
+ .addRoundTrip("timestamp(3)", "TIMESTAMP '2018-10-28 01:33:17.456'", createTimestampType(3), "TIMESTAMP '2018-10-28 01:33:17.456'")
+ // time double in Vilnius
+ .addRoundTrip("timestamp(3)", "TIMESTAMP '2018-10-28 03:33:33.333'", createTimestampType(3), "TIMESTAMP '2018-10-28 03:33:33.333'")
+ .addRoundTrip("timestamp(3)", "TIMESTAMP '1970-01-01 00:13:42.000'", createTimestampType(3), "TIMESTAMP '1970-01-01 00:13:42.000'")
+ .addRoundTrip("timestamp(3)", "TIMESTAMP '2018-04-01 02:13:55.123'", createTimestampType(3), "TIMESTAMP '2018-04-01 02:13:55.123'")
+ // time gap in Vilnius
+ .addRoundTrip("timestamp(3)", "TIMESTAMP '2018-03-25 03:17:17.000'", createTimestampType(3), "TIMESTAMP '2018-03-25 03:17:17.000'")
+ // time gap in Kathmandu
+ .addRoundTrip("timestamp(3)", "TIMESTAMP '1986-01-01 00:13:07.000'", createTimestampType(3), "TIMESTAMP '1986-01-01 00:13:07.000'")
+ // max value 2038-01-19 03:14:07
+ .addRoundTrip("timestamp(3)", "TIMESTAMP '2038-01-19 03:14:07.000'", createTimestampType(3), "TIMESTAMP '2038-01-19 03:14:07.000'")
+ // test arbitrary time for all supported precisions
+ .addRoundTrip("timestamp(0)", "TIMESTAMP '1970-01-01 00:00:01'", createTimestampType(0), "TIMESTAMP '1970-01-01 00:00:01'")
+ .addRoundTrip("timestamp(1)", "TIMESTAMP '1970-01-01 00:00:01.1'", createTimestampType(1), "TIMESTAMP '1970-01-01 00:00:01.1'")
+ .addRoundTrip("timestamp(1)", "TIMESTAMP '1970-01-01 00:00:01.9'", createTimestampType(1), "TIMESTAMP '1970-01-01 00:00:01.9'")
+ .addRoundTrip("timestamp(2)", "TIMESTAMP '1970-01-01 00:00:01.12'", createTimestampType(2), "TIMESTAMP '1970-01-01 00:00:01.12'")
+ .addRoundTrip("timestamp(3)", "TIMESTAMP '1970-01-01 00:00:01.123'", createTimestampType(3), "TIMESTAMP '1970-01-01 00:00:01.123'")
+ .addRoundTrip("timestamp(3)", "TIMESTAMP '1970-01-01 00:00:01.999'", createTimestampType(3), "TIMESTAMP '1970-01-01 00:00:01.999'")
+ .addRoundTrip("timestamp(1)", "TIMESTAMP '2020-09-27 12:34:56.1'", createTimestampType(1), "TIMESTAMP '2020-09-27 12:34:56.1'")
+ .addRoundTrip("timestamp(1)", "TIMESTAMP '2020-09-27 12:34:56.9'", createTimestampType(1), "TIMESTAMP '2020-09-27 12:34:56.9'")
+ .addRoundTrip("timestamp(3)", "TIMESTAMP '2020-09-27 12:34:56.123'", createTimestampType(3), "TIMESTAMP '2020-09-27 12:34:56.123'")
+ .addRoundTrip("timestamp(3)", "TIMESTAMP '2020-09-27 12:34:56.999'", createTimestampType(3), "TIMESTAMP '2020-09-27 12:34:56.999'")
+ .execute(getQueryRunner(), session, snowflakeCreateAndInsert("tpch.test_timestamp"))
+ .execute(getQueryRunner(), session, trinoCreateAsSelect(session, "test_timestamp"))
+ .execute(getQueryRunner(), session, trinoCreateAsSelect("test_timestamp"))
+ .execute(getQueryRunner(), session, trinoCreateAndInsert(session, "test_timestamp"))
+ .execute(getQueryRunner(), session, trinoCreateAndInsert("test_timestamp"));
+ }
+
+ private DataSetup trinoCreateAsSelect(String tableNamePrefix)
+ {
+ return trinoCreateAsSelect(getSession(), tableNamePrefix);
+ }
+
+ private DataSetup trinoCreateAsSelect(Session session, String tableNamePrefix)
+ {
+ return new CreateAsSelectDataSetup(new TrinoSqlExecutor(getQueryRunner(), session), tableNamePrefix);
+ }
+
+ private DataSetup trinoCreateAndInsert(String tableNamePrefix)
+ {
+ return trinoCreateAndInsert(getSession(), tableNamePrefix);
+ }
+
+ private DataSetup trinoCreateAndInsert(Session session, String tableNamePrefix)
+ {
+ return new CreateAndInsertDataSetup(new TrinoSqlExecutor(getQueryRunner(), session), tableNamePrefix);
+ }
+
+ private DataSetup snowflakeCreateAndInsert(String tableNamePrefix)
+ {
+ return new CreateAndInsertDataSetup(snowflakeServer::execute, tableNamePrefix);
+ }
+
+ private static void checkIsGap(ZoneId zone, LocalDate date)
+ {
+ verify(isGap(zone, date), "Expected %s to be a gap in %s", date, zone);
+ }
+
+ private static boolean isGap(ZoneId zone, LocalDate date)
+ {
+ return zone.getRules().getValidOffsets(date.atStartOfDay()).isEmpty();
+ }
+}
diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestingSnowflakeServer.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestingSnowflakeServer.java
new file mode 100644
index 00000000000000..bd64f30307597c
--- /dev/null
+++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestingSnowflakeServer.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.snowflake;
+
+import org.intellij.lang.annotations.Language;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Properties;
+
+import static java.util.Objects.requireNonNull;
+
+public class TestingSnowflakeServer
+ implements AutoCloseable
+{
+ public static final String TEST_URL = requireNonNull(System.getProperty("snowflake.test.server.url"), "snowflake.test.server.url is not set");
+ public static final String TEST_USER = requireNonNull(System.getProperty("snowflake.test.server.user"), "snowflake.test.server.user is not set");
+ public static final String TEST_PASSWORD = requireNonNull(System.getProperty("snowflake.test.server.password"), "snowflake.test.server.password is not set");
+ public static final String TEST_DATABASE = requireNonNull(System.getProperty("snowflake.test.server.database"), "snowflake.test.server.database is not set");
+ public static final String TEST_WAREHOUSE = requireNonNull(System.getProperty("snowflake.test.server.warehouse"), "snowflake.test.server.warehouse is not set");
+ public static final String TEST_ROLE = requireNonNull(System.getProperty("snowflake.test.server.role"), "snowflake.test.server.role is not set");
+ public static final String TEST_PROXY = System.getProperty("snowflake.test.http_proxy");
+ public static final String TEST_SCHEMA = "tpch";
+
+ public TestingSnowflakeServer()
+ {
+ execute("CREATE SCHEMA IF NOT EXISTS tpch");
+ }
+
+ public void execute(@Language("SQL") String sql)
+ {
+ execute(TEST_URL, getProperties(), sql);
+ }
+
+ private static void execute(String url, Properties properties, String sql)
+ {
+ try (Connection connection = DriverManager.getConnection(url, properties);
+ Statement statement = connection.createStatement()) {
+ statement.execute(sql);
+ }
+ catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public Properties getProperties()
+ {
+ Properties properties = new Properties();
+ properties.setProperty("user", TEST_USER);
+ properties.setProperty("password", TEST_PASSWORD);
+ properties.setProperty("db", TEST_DATABASE);
+ properties.setProperty("schema", TEST_SCHEMA);
+ properties.setProperty("warehouse", TEST_WAREHOUSE);
+ properties.setProperty("role", TEST_ROLE);
+ return properties;
+ }
+
+ @Override
+ public void close()
+ throws Exception
+ {
+ execute("DROP SCHEMA IF EXISTS tpch");
+ }
+}
diff --git a/pom.xml b/pom.xml
index db16e5799fd8e4..bdcc9edbd04d2e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -107,6 +107,7 @@
plugin/trino-resource-group-managers
plugin/trino-session-property-managers
plugin/trino-singlestore
+ plugin/trino-snowflake
plugin/trino-sqlserver
plugin/trino-teradata-functions
plugin/trino-thrift
@@ -1439,6 +1440,12 @@
test-jar
+
+ io.trino
+ trino-snowflake
+ ${project.version}
+
+
io.trino
trino-spi
diff --git a/testing/trino-product-tests-groups/src/main/java/io/trino/tests/product/TestGroups.java b/testing/trino-product-tests-groups/src/main/java/io/trino/tests/product/TestGroups.java
index a7f5e7e0f2df9a..7ed2978dc99d1f 100644
--- a/testing/trino-product-tests-groups/src/main/java/io/trino/tests/product/TestGroups.java
+++ b/testing/trino-product-tests-groups/src/main/java/io/trino/tests/product/TestGroups.java
@@ -83,6 +83,7 @@ public final class TestGroups
public static final String CLICKHOUSE = "clickhouse";
public static final String KUDU = "kudu";
public static final String MARIADB = "mariadb";
+ public static final String SNOWFLAKE = "snowflake";
public static final String DELTA_LAKE_OSS = "delta-lake-oss";
public static final String DELTA_LAKE_HDFS = "delta-lake-hdfs";
public static final String DELTA_LAKE_MINIO = "delta-lake-minio";
diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java
index 12c958581ee875..c44f85b98437a8 100644
--- a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java
+++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java
@@ -78,6 +78,7 @@ public void extendEnvironment(Environment.Builder builder)
"raptor_legacy",
"redis",
"redshift",
+ "snowflake",
"sqlserver",
"trino_thrift",
"tpcds")
diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeSnowflake.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeSnowflake.java
new file mode 100644
index 00000000000000..7f4ab574084a1d
--- /dev/null
+++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeSnowflake.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.tests.product.launcher.env.environment;
+
+import com.google.inject.Inject;
+import io.trino.tests.product.launcher.docker.DockerFiles;
+import io.trino.tests.product.launcher.env.Environment;
+import io.trino.tests.product.launcher.env.EnvironmentProvider;
+import io.trino.tests.product.launcher.env.common.Standard;
+import io.trino.tests.product.launcher.env.common.TestsEnvironment;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.attribute.PosixFilePermissions;
+
+import static java.nio.file.attribute.PosixFilePermissions.fromString;
+import static java.util.Objects.requireNonNull;
+import static org.testcontainers.utility.MountableFile.forHostPath;
+
+@TestsEnvironment
+public class EnvMultinodeSnowflake
+ extends EnvironmentProvider
+{
+ private final DockerFiles.ResourceProvider configDir;
+
+ @Inject
+ public EnvMultinodeSnowflake(DockerFiles dockerFiles, Standard standard)
+ {
+ super(standard);
+ configDir = requireNonNull(dockerFiles, "dockerFiles is null").getDockerFilesHostDirectory("conf/environment/multinode-snowflake");
+ }
+
+ @Override
+ public void extendEnvironment(Environment.Builder builder)
+ {
+ builder.addConnector("snowflake", forHostPath(getEnvProperties()));
+ }
+
+ private Path getEnvProperties()
+ {
+ try {
+ String properties = Files.readString(configDir.getPath("snowflake.properties"))
+ .replace("${ENV:SNOWFLAKE_URL}", requireEnv("SNOWFLAKE_URL"))
+ .replace("${ENV:SNOWFLAKE_USER}", requireEnv("SNOWFLAKE_USER"))
+ .replace("${ENV:SNOWFLAKE_PASSWORD}", requireEnv("SNOWFLAKE_PASSWORD"))
+ .replace("${ENV:SNOWFLAKE_DATABASE}", requireEnv("SNOWFLAKE_DATABASE"))
+ .replace("${ENV:SNOWFLAKE_ROLE}", requireEnv("SNOWFLAKE_ROLE"))
+ .replace("${ENV:SNOWFLAKE_WAREHOUSE}", requireEnv("SNOWFLAKE_WAREHOUSE"));
+ File newProperties = Files.createTempFile("snowflake-replaced", ".properties", PosixFilePermissions.asFileAttribute(fromString("rwxrwxrwx"))).toFile();
+ newProperties.deleteOnExit();
+ Files.writeString(newProperties.toPath(), properties);
+ return newProperties.toPath();
+ }
+ catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+
+ private static String requireEnv(String variable)
+ {
+ return requireNonNull(System.getenv(variable), () -> "environment variable not set: " + variable);
+ }
+}
diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteSnowflake.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteSnowflake.java
new file mode 100644
index 00000000000000..317d3481723651
--- /dev/null
+++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteSnowflake.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.tests.product.launcher.suite.suites;
+
+import com.google.common.collect.ImmutableList;
+import io.trino.tests.product.launcher.env.EnvironmentConfig;
+import io.trino.tests.product.launcher.env.environment.EnvMultinodeSnowflake;
+import io.trino.tests.product.launcher.suite.Suite;
+import io.trino.tests.product.launcher.suite.SuiteTestRun;
+
+import java.util.List;
+
+import static io.trino.tests.product.launcher.suite.SuiteTestRun.testOnEnvironment;
+
+public class SuiteSnowflake
+ extends Suite
+{
+ @Override
+ public List getTestRuns(EnvironmentConfig config)
+ {
+ return ImmutableList.of(
+ testOnEnvironment(EnvMultinodeSnowflake.class)
+ .withGroups("configured_features", "snowflake")
+ .build());
+ }
+}
diff --git a/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-all/snowflake.properties b/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-all/snowflake.properties
new file mode 100644
index 00000000000000..669489ea4363ec
--- /dev/null
+++ b/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-all/snowflake.properties
@@ -0,0 +1,4 @@
+connector.name=snowflake
+connection-url=${ENV:SNOWFLAKE_URL}
+connection-user=${ENV:SNOWFLAKE_USER}
+connection-password=${ENV:SNOWFLAKE_PASSWORD}
diff --git a/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-snowflake/snowflake.properties b/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-snowflake/snowflake.properties
new file mode 100644
index 00000000000000..669489ea4363ec
--- /dev/null
+++ b/testing/trino-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-snowflake/snowflake.properties
@@ -0,0 +1,4 @@
+connector.name=snowflake
+connection-url=${ENV:SNOWFLAKE_URL}
+connection-user=${ENV:SNOWFLAKE_USER}
+connection-password=${ENV:SNOWFLAKE_PASSWORD}
diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/snowflake/TestSnowflake.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/snowflake/TestSnowflake.java
new file mode 100644
index 00000000000000..8850ca21655086
--- /dev/null
+++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/snowflake/TestSnowflake.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.tests.product.snowflake;
+
+import io.trino.tempto.ProductTest;
+import io.trino.tempto.query.QueryResult;
+import org.testng.annotations.Test;
+
+import static io.trino.tempto.assertions.QueryAssert.Row.row;
+import static io.trino.tempto.assertions.QueryAssert.assertThat;
+import static io.trino.testing.TestingNames.randomNameSuffix;
+import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
+import static io.trino.tests.product.TestGroups.SNOWFLAKE;
+import static io.trino.tests.product.utils.QueryExecutors.onTrino;
+
+public class TestSnowflake
+ extends ProductTest
+{
+ @Test(groups = {SNOWFLAKE, PROFILE_SPECIFIC_TESTS})
+ public void testCreateTableAsSelect()
+ {
+ String tableName = "snowflake.tpch.nation_" + randomNameSuffix();
+
+ onTrino().executeQuery("DROP TABLE IF EXISTS " + tableName);
+ QueryResult result = onTrino().executeQuery("CREATE TABLE " + tableName + " AS SELECT * FROM tpch.tiny.nation");
+ try {
+ assertThat(result).updatedRowsCountIsEqualTo(25);
+ assertThat(onTrino().executeQuery("SELECT COUNT(*) FROM " + tableName))
+ .containsOnly(row(25));
+ }
+ finally {
+ onTrino().executeQuery("DROP TABLE " + tableName);
+ }
+ }
+}