Skip to content

Commit

Permalink
Add missing fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
yuuteng committed Dec 22, 2023
2 parents 36920e0 + d82035a commit 330162b
Show file tree
Hide file tree
Showing 3 changed files with 91 additions and 101 deletions.
20 changes: 20 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -614,6 +614,7 @@ jobs:
- { modules: plugin/trino-resource-group-managers }
- { modules: plugin/trino-singlestore }
- { modules: plugin/trino-snowflake }
- { modules: plugin/trino-snowflake, profile: cloud-tests }
- { modules: plugin/trino-sqlserver }
- { modules: testing/trino-faulttolerant-tests, profile: default }
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-delta }
Expand Down Expand Up @@ -781,6 +782,24 @@ jobs:
if: matrix.modules == 'plugin/trino-bigquery' && !contains(matrix.profile, 'cloud-tests-arrow-and-fte') && (env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY != '')
run: |
$MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-case-insensitive-mapping -Dbigquery.credentials-key="${BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY}"
- name: Cloud Snowflake Tests
env:
SNOWFLAKE_TEST_SERVER_URL: ${{ secrets.SNOWFLAKE_TEST_SERVER_URL }}
SNOWFLAKE_TEST_SERVER_USER: ${{ secrets.SNOWFLAKE_TEST_SERVER_USER }}
SNOWFLAKE_TEST_SERVER_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_SERVER_PASSWORD }}
SNOWFLAKE_TEST_SERVER_DATABASE: ${{ secrets.SNOWFLAKE_TEST_SERVER_DATABASE }}
SNOWFLAKE_TEST_SERVER_ROLE: ${{ secrets.SNOWFLAKE_TEST_SERVER_ROLE }}
SNOWFLAKE_TEST_SERVER_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_SERVER_WAREHOUSE }}
if: matrix.modules == 'plugin/trino-snowflake' && !contains(matrix.profile, 'cloud-tests') && (env.SNOWFLAKE_TEST_SERVER_URL != '' && env.SNOWFLAKE_TEST_SERVER_USER != '' && env.SNOWFLAKE_TEST_SERVER_PASSWORD != '')
run: |
$MAVEN test ${MAVEN_TEST} -pl :trino-snowflake -Pcloud-tests \
-Dconnector.name="snowflake" \
-Dsnowflake.test.server.url="${SNOWFLAKE_TEST_SERVER_URL}" \
-Dsnowflake.test.server.user="${SNOWFLAKE_TEST_SERVER_USER}" \
-Dsnowflake.test.server.password="${SNOWFLAKE_TEST_SERVER_PASSWORD}" \
-Dsnowflake.test.server.database="${SNOWFLAKE_TEST_SERVER_DATABASE}" \
-Dsnowflake.test.server.role="${SNOWFLAKE_TEST_SERVER_ROLE}" \
-Dsnowflake.test.server.warehouse="${SNOWFLAKE_TEST_SERVER_WAREHOUSE}"
- name: Iceberg Cloud Tests
env:
AWS_ACCESS_KEY_ID: ${{ secrets.TRINO_AWS_ACCESS_KEY_ID }}
Expand Down Expand Up @@ -976,6 +995,7 @@ jobs:
- suite-clickhouse
- suite-mysql
- suite-iceberg
- suite-snowflake
- suite-hudi
- suite-ignite
exclude:
Expand Down
28 changes: 9 additions & 19 deletions plugin/trino-snowflake/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

<properties>
<air.main.basedir>${project.parent.basedir}</air.main.basedir>
<air.test.jvm.additional-arguments>--add-opens=java.base/java.nio=ALL-UNNAMED</air.test.jvm.additional-arguments>
</properties>

<dependencies>
Expand Down Expand Up @@ -185,18 +186,6 @@
</dependency>
</dependencies>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<argLine>--add-opens=java.base/java.nio=ALL-UNNAMED</argLine>
</configuration>
</plugin>
</plugins>
</build>

<profiles>
<profile>
<id>default</id>
Expand All @@ -210,8 +199,6 @@
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/TestSnowflakeClient.java</exclude>
<exclude>**/TestSnowflakeConfig.java</exclude>
<exclude>**/TestSnowflakeConnectorTest.java</exclude>
<exclude>**/TestSnowflakePlugin.java</exclude>
<exclude>**/TestSnowflakeTypeMapping.java</exclude>
Expand All @@ -225,18 +212,21 @@
<profile>
<!-- Tests which require third party cloud services are separated from the main test profile -->
<id>cloud-tests</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<includes>
<exclude>**/TestSnowflakeClient.java</exclude>
<exclude>**/TestSnowflakeConfig.java</exclude>
<exclude>**/TestSnowflakeConnectorTest.java</exclude>
<exclude>**/TestSnowflakePlugin.java</exclude>
<exclude>**/TestSnowflakeTypeMapping.java</exclude>
<include>**/TestSnowflakeClient.java</include>
<include>**/TestSnowflakeConfig.java</include>
<include>**/TestSnowflakeConnectorTest.java</include>
<include>**/TestSnowflakePlugin.java</include>
<include>**/TestSnowflakeTypeMapping.java</include>
</includes>
</configuration>
</plugin>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,81 +129,6 @@ private interface ColumnMappingFunction
}

private static final TimeZone UTC_TZ = TimeZone.getTimeZone(ZoneId.of("UTC"));
// Mappings for JDBC column types to internal Trino types
private static final Map<Integer, ColumnMapping> STANDARD_COLUMN_MAPPINGS = ImmutableMap.<Integer, ColumnMapping>builder()
.put(Types.BOOLEAN, StandardColumnMappings.booleanColumnMapping())
.put(Types.TINYINT, StandardColumnMappings.tinyintColumnMapping())
.put(Types.SMALLINT, StandardColumnMappings.smallintColumnMapping())
.put(Types.INTEGER, StandardColumnMappings.integerColumnMapping())
.put(Types.BIGINT, StandardColumnMappings.bigintColumnMapping())
.put(Types.REAL, StandardColumnMappings.realColumnMapping())
.put(Types.DOUBLE, StandardColumnMappings.doubleColumnMapping())
.put(Types.FLOAT, StandardColumnMappings.doubleColumnMapping())
.put(Types.BINARY, StandardColumnMappings.varbinaryColumnMapping())
.put(Types.VARBINARY, StandardColumnMappings.varbinaryColumnMapping())
.put(Types.LONGVARBINARY, StandardColumnMappings.varbinaryColumnMapping())
.buildOrThrow();

private static final Map<String, ColumnMappingFunction> SHOWFLAKE_COLUMN_MAPPINGS = ImmutableMap.<String, ColumnMappingFunction>builder()
.put("time", typeHandle -> Optional.of(timeColumnMapping(typeHandle)))
.put("timestampntz", typeHandle -> Optional.of(timestampColumnMapping(typeHandle)))
.put("timestamptz", typeHandle -> Optional.of(timestampTzColumnMapping(typeHandle)))
.put("timestampltz", typeHandle -> Optional.of(timestampTzColumnMapping(typeHandle)))
.put("date", typeHandle -> Optional.of(ColumnMapping.longMapping(
DateType.DATE,
(resultSet, columnIndex) -> LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(),
snowFlakeDateWriter())))
.put("object", typeHandle -> Optional.of(ColumnMapping.sliceMapping(
createUnboundedVarcharType(),
StandardColumnMappings.varcharReadFunction(createUnboundedVarcharType()),
StandardColumnMappings.varcharWriteFunction(),
PredicatePushdownController.DISABLE_PUSHDOWN)))
.put("array", typeHandle -> Optional.of(ColumnMapping.sliceMapping(
createUnboundedVarcharType(),
StandardColumnMappings.varcharReadFunction(createUnboundedVarcharType()),
StandardColumnMappings.varcharWriteFunction(),
PredicatePushdownController.DISABLE_PUSHDOWN)))
.put("variant", typeHandle -> Optional.of(ColumnMapping.sliceMapping(
createUnboundedVarcharType(),
variantReadFunction(),
StandardColumnMappings.varcharWriteFunction(),
PredicatePushdownController.FULL_PUSHDOWN)))
.put("varchar", typeHandle -> Optional.of(varcharColumnMapping(typeHandle.getRequiredColumnSize())))
.put("number", typeHandle -> {
int decimalDigits = typeHandle.getRequiredDecimalDigits();
int precision = typeHandle.getRequiredColumnSize() + Math.max(-decimalDigits, 0);
if (precision > 38) {
return Optional.empty();
}
return Optional.of(columnMappingPushdown(
StandardColumnMappings.decimalColumnMapping(createDecimalType(precision, Math.max(decimalDigits, 0)), RoundingMode.UNNECESSARY)));
})
.buildOrThrow();

// Mappings for internal Trino types to JDBC column types
private static final Map<String, WriteMapping> STANDARD_WRITE_MAPPINGS = ImmutableMap.<String, WriteMapping>builder()
.put("BooleanType", WriteMapping.booleanMapping("boolean", StandardColumnMappings.booleanWriteFunction()))
.put("BigintType", WriteMapping.longMapping("number(19)", StandardColumnMappings.bigintWriteFunction()))
.put("IntegerType", WriteMapping.longMapping("number(10)", StandardColumnMappings.integerWriteFunction()))
.put("SmallintType", WriteMapping.longMapping("number(5)", StandardColumnMappings.smallintWriteFunction()))
.put("TinyintType", WriteMapping.longMapping("number(3)", StandardColumnMappings.tinyintWriteFunction()))
.put("DoubleType", WriteMapping.doubleMapping("double precision", StandardColumnMappings.doubleWriteFunction()))
.put("RealType", WriteMapping.longMapping("real", StandardColumnMappings.realWriteFunction()))
.put("VarbinaryType", WriteMapping.sliceMapping("varbinary", StandardColumnMappings.varbinaryWriteFunction()))
.put("DateType", WriteMapping.longMapping("date", snowFlakeDateWriter()))
.buildOrThrow();

private static final Map<String, WriteMappingFunction> SNOWFLAKE_WRITE_MAPPINGS = ImmutableMap.<String, WriteMappingFunction>builder()
.put("TimeType", type -> WriteMapping.longMapping("time", SnowflakeClient.snowFlaketimeWriter(type)))
.put("ShortTimestampType", SnowflakeClient::snowFlakeTimestampWriter)
.put("ShortTimestampWithTimeZoneType", SnowflakeClient::snowFlakeTimestampWithTZWriter)
.put("LongTimestampType", SnowflakeClient::snowFlakeTimestampWithTZWriter)
.put("LongTimestampWithTimeZoneType", SnowflakeClient::snowFlakeTimestampWithTZWriter)
.put("VarcharType", SnowflakeClient::snowFlakeVarCharWriter)
.put("CharType", SnowflakeClient::snowFlakeCharWriter)
.put("LongDecimalType", SnowflakeClient::snowFlakeDecimalWriter)
.put("ShortDecimalType", SnowflakeClient::snowFlakeDecimalWriter)
.buildOrThrow();

@Inject
public SnowflakeClient(
Expand Down Expand Up @@ -249,12 +174,45 @@ public Optional<ColumnMapping> toColumnMapping(ConnectorSession session, Connect
jdbcTypeName = jdbcTypeName.toLowerCase(Locale.ENGLISH);
int type = typeHandle.getJdbcType();

ColumnMapping columnMap = STANDARD_COLUMN_MAPPINGS.get(type);
// Mappings for JDBC column types to internal Trino types
final Map<Integer, ColumnMapping> standardColumnMappings = ImmutableMap.<Integer, ColumnMapping>builder()
.put(Types.BOOLEAN, StandardColumnMappings.booleanColumnMapping())
.put(Types.TINYINT, StandardColumnMappings.tinyintColumnMapping())
.put(Types.SMALLINT, StandardColumnMappings.smallintColumnMapping())
.put(Types.INTEGER, StandardColumnMappings.integerColumnMapping())
.put(Types.BIGINT, StandardColumnMappings.bigintColumnMapping())
.put(Types.REAL, StandardColumnMappings.realColumnMapping())
.put(Types.DOUBLE, StandardColumnMappings.doubleColumnMapping())
.put(Types.FLOAT, StandardColumnMappings.doubleColumnMapping())
.put(Types.BINARY, StandardColumnMappings.varbinaryColumnMapping())
.put(Types.VARBINARY, StandardColumnMappings.varbinaryColumnMapping())
.put(Types.LONGVARBINARY, StandardColumnMappings.varbinaryColumnMapping())
.buildOrThrow();

ColumnMapping columnMap = standardColumnMappings.get(type);
if (columnMap != null) {
return Optional.of(columnMap);
}

ColumnMappingFunction columnMappingFunction = SHOWFLAKE_COLUMN_MAPPINGS.get(jdbcTypeName);
final Map<String, ColumnMappingFunction> snowflakeColumnMappings = ImmutableMap.<String, ColumnMappingFunction>builder()
.put("time", handle -> Optional.of(timeColumnMapping(handle)))
.put("date", handle -> Optional.of(ColumnMapping.longMapping(
DateType.DATE,
(resultSet, columnIndex) -> LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(),
snowFlakeDateWriter())))
.put("varchar", handle -> Optional.of(varcharColumnMapping(handle.getRequiredColumnSize())))
.put("number", handle -> {
int decimalDigits = handle.getRequiredDecimalDigits();
int precision = handle.getRequiredColumnSize() + Math.max(-decimalDigits, 0);
if (precision > 38) {
return Optional.empty();
}
return Optional.of(columnMappingPushdown(
StandardColumnMappings.decimalColumnMapping(createDecimalType(precision, Math.max(decimalDigits, 0)), RoundingMode.UNNECESSARY)));
})
.buildOrThrow();

ColumnMappingFunction columnMappingFunction = snowflakeColumnMappings.get(jdbcTypeName);
if (columnMappingFunction != null) {
return columnMappingFunction.convert(typeHandle);
}
Expand All @@ -269,18 +227,41 @@ public WriteMapping toWriteMapping(ConnectorSession session, Type type)
Class<?> myClass = type.getClass();
String simple = myClass.getSimpleName();

WriteMapping writeMapping = STANDARD_WRITE_MAPPINGS.get(simple);
// Mappings for internal Trino types to JDBC column types
final Map<String, WriteMapping> standardWriteMappings = ImmutableMap.<String, WriteMapping>builder()
.put("BooleanType", WriteMapping.booleanMapping("boolean", StandardColumnMappings.booleanWriteFunction()))
.put("BigintType", WriteMapping.longMapping("number(19)", StandardColumnMappings.bigintWriteFunction()))
.put("IntegerType", WriteMapping.longMapping("number(10)", StandardColumnMappings.integerWriteFunction()))
.put("SmallintType", WriteMapping.longMapping("number(5)", StandardColumnMappings.smallintWriteFunction()))
.put("TinyintType", WriteMapping.longMapping("number(3)", StandardColumnMappings.tinyintWriteFunction()))
.put("DoubleType", WriteMapping.doubleMapping("double precision", StandardColumnMappings.doubleWriteFunction()))
.put("RealType", WriteMapping.longMapping("real", StandardColumnMappings.realWriteFunction()))
.put("VarbinaryType", WriteMapping.sliceMapping("varbinary", StandardColumnMappings.varbinaryWriteFunction()))
.put("DateType", WriteMapping.longMapping("date", snowFlakeDateWriter()))
.buildOrThrow();

WriteMapping writeMapping = standardWriteMappings.get(simple);
if (writeMapping != null) {
return writeMapping;
}

WriteMappingFunction writeMappingFunction = SNOWFLAKE_WRITE_MAPPINGS.get(simple);
final Map<String, WriteMappingFunction> snowflakeWriteMappings = ImmutableMap.<String, WriteMappingFunction>builder()
.put("TimeType", writeType -> WriteMapping.longMapping("time", SnowflakeClient.snowFlaketimeWriter(writeType)))
.put("ShortTimestampType", SnowflakeClient::snowFlakeTimestampWriter)
.put("ShortTimestampWithTimeZoneType", SnowflakeClient::snowFlakeTimestampWithTZWriter)
.put("LongTimestampType", SnowflakeClient::snowFlakeTimestampWithTZWriter)
.put("LongTimestampWithTimeZoneType", SnowflakeClient::snowFlakeTimestampWithTZWriter)
.put("VarcharType", SnowflakeClient::snowFlakeVarCharWriter)
.put("CharType", SnowflakeClient::snowFlakeCharWriter)
.put("LongDecimalType", SnowflakeClient::snowFlakeDecimalWriter)
.put("ShortDecimalType", SnowflakeClient::snowFlakeDecimalWriter)
.buildOrThrow();

WriteMappingFunction writeMappingFunction = snowflakeWriteMappings.get(simple);
if (writeMappingFunction != null) {
return writeMappingFunction.convert(type);
}

log.debug("SnowflakeClient.toWriteMapping: SNOWFLAKE_CONNECTOR_COLUMN_TYPE_NOT_SUPPORTED: Unsupported column type: " + type.getDisplayName() + ", simple:" + simple);

throw new TrinoException(NOT_SUPPORTED, "SNOWFLAKE_CONNECTOR_COLUMN_TYPE_NOT_SUPPORTED: Unsupported column type: " + type.getDisplayName() + ", simple:" + simple);
}

Expand Down Expand Up @@ -322,7 +303,6 @@ private static SliceReadFunction variantReadFunction()
private static ColumnMapping columnMappingPushdown(ColumnMapping mapping)
{
if (mapping.getPredicatePushdownController() == PredicatePushdownController.DISABLE_PUSHDOWN) {
log.debug("SnowflakeClient.columnMappingPushdown: NOT_SUPPORTED mapping.getPredicatePushdownController() is DISABLE_PUSHDOWN. Type was " + mapping.getType());
throw new TrinoException(NOT_SUPPORTED, "mapping.getPredicatePushdownController() is DISABLE_PUSHDOWN. Type was " + mapping.getType());
}

Expand Down

0 comments on commit 330162b

Please sign in to comment.