Skip to content

Commit

Permalink
Update ci and delete no use lines
Browse files Browse the repository at this point in the history
  • Loading branch information
yuuteng committed Mar 4, 2024
1 parent 9dd8e39 commit 63efdf1
Show file tree
Hide file tree
Showing 7 changed files with 21 additions and 50 deletions.
26 changes: 13 additions & 13 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -656,22 +656,22 @@ jobs:
$MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-case-insensitive-mapping -Dbigquery.credentials-key="${BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY}"
- name: Cloud Snowflake Tests
env:
SNOWFLAKE_TEST_SERVER_URL: ${{ secrets.SNOWFLAKE_TEST_SERVER_URL }}
SNOWFLAKE_TEST_SERVER_USER: ${{ secrets.SNOWFLAKE_TEST_SERVER_USER }}
SNOWFLAKE_TEST_SERVER_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_SERVER_PASSWORD }}
SNOWFLAKE_TEST_SERVER_DATABASE: ${{ secrets.SNOWFLAKE_TEST_SERVER_DATABASE }}
SNOWFLAKE_TEST_SERVER_ROLE: ${{ secrets.SNOWFLAKE_TEST_SERVER_ROLE }}
SNOWFLAKE_TEST_SERVER_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_SERVER_WAREHOUSE }}
if: matrix.modules == 'plugin/trino-snowflake' && !contains(matrix.profile, 'cloud-tests') && (env.SNOWFLAKE_TEST_SERVER_URL != '' && env.SNOWFLAKE_TEST_SERVER_USER != '' && env.SNOWFLAKE_TEST_SERVER_PASSWORD != '')
SNOWFLAKE_URL: ${{ secrets.SNOWFLAKE_URL }}
SNOWFLAKE_USER: ${{ secrets.SNOWFLAKE_USER }}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }}
SNOWFLAKE_DATABASE: ${{ secrets.SNOWFLAKE_DATABASE }}
SNOWFLAKE_ROLE: ${{ secrets.SNOWFLAKE_ROLE }}
SNOWFLAKE_WAREHOUSE: ${{ secrets.SNOWFLAKE_WAREHOUSE }}
if: matrix.modules == 'plugin/trino-snowflake' && !contains(matrix.profile, 'cloud-tests') && (env.SNOWFLAKE_URL != '' && env.SNOWFLAKE_USER != '' && env.SNOWFLAKE_PASSWORD != '')
run: |
$MAVEN test ${MAVEN_TEST} -pl :trino-snowflake -Pcloud-tests \
-Dconnector.name="snowflake" \
-Dsnowflake.test.server.url="${SNOWFLAKE_TEST_SERVER_URL}" \
-Dsnowflake.test.server.user="${SNOWFLAKE_TEST_SERVER_USER}" \
-Dsnowflake.test.server.password="${SNOWFLAKE_TEST_SERVER_PASSWORD}" \
-Dsnowflake.test.server.database="${SNOWFLAKE_TEST_SERVER_DATABASE}" \
-Dsnowflake.test.server.role="${SNOWFLAKE_TEST_SERVER_ROLE}" \
-Dsnowflake.test.server.warehouse="${SNOWFLAKE_TEST_SERVER_WAREHOUSE}"
-Dsnowflake.test.server.url="${SNOWFLAKE_URL}" \
-Dsnowflake.test.server.user="${SNOWFLAKE_USER}" \
-Dsnowflake.test.server.password="${SNOWFLAKE_PASSWORD}" \
-Dsnowflake.test.server.database="${SNOWFLAKE_DATABASE}" \
-Dsnowflake.test.server.role="${SNOWFLAKE_ROLE}" \
-Dsnowflake.test.server.warehouse="${SNOWFLAKE_WAREHOUSE}"
- name: Iceberg Cloud Tests
id: tests-iceberg
env:
Expand Down
5 changes: 0 additions & 5 deletions plugin/trino-snowflake/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -185,11 +185,6 @@
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

<profiles>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,6 @@
import static io.airlift.slice.Slices.utf8Slice;
import static io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.type.TimestampWithTimeZoneType.MAX_SHORT_PRECISION;
import static io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType;
import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_SECOND;
import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND;
import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND;
Expand Down Expand Up @@ -361,27 +359,6 @@ public void set(PreparedStatement statement, int index, long picosOfDay)
};
}

private static ColumnMapping timestampTzColumnMapping(JdbcTypeHandle typeHandle)
{
int precision = typeHandle.getRequiredDecimalDigits();
String jdbcTypeName = typeHandle.getJdbcTypeName()
.orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle));
int type = typeHandle.getJdbcType();
if (precision <= MAX_SHORT_PRECISION) {
return ColumnMapping.longMapping(
createTimestampWithTimeZoneType(precision),
(resultSet, columnIndex) -> {
ZonedDateTime timestamp = SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from);
return DateTimeEncoding.packDateTimeWithZone(timestamp.toInstant().toEpochMilli(), timestamp.getZone().getId());
},
timestampWithTimezoneWriteFunction(),
PredicatePushdownController.FULL_PUSHDOWN);
}
else {
return ColumnMapping.objectMapping(createTimestampWithTimeZoneType(precision), longTimestampWithTimezoneReadFunction(), longTimestampWithTzWriteFunction());
}
}

private static ColumnMapping varcharColumnMapping(int varcharLength)
{
VarcharType varcharType = varcharLength <= VarcharType.MAX_LENGTH ? createVarcharType(varcharLength) : createUnboundedVarcharType();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.expression.ConnectorExpression;
import io.trino.spi.expression.Variable;
import org.testng.annotations.Test;
import org.junit.jupiter.api.Test;

import java.sql.Types;
import java.util.List;
Expand All @@ -38,8 +38,6 @@
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.testing.TestingConnectorSession.SESSION;
import static org.assertj.core.api.Assertions.assertThat;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;

public class TestSnowflakeClient
{
Expand Down Expand Up @@ -142,10 +140,12 @@ private static void testImplementAggregation(AggregateFunction aggregateFunction
}
else {
assertThat(result).isPresent();
assertEquals(result.get().getExpression(), expectedExpression.get());
assertThat(result.get().getExpression()).isEqualTo(expectedExpression.get());
Optional<ColumnMapping> columnMapping = JDBC_CLIENT.toColumnMapping(SESSION, null, result.get().getJdbcTypeHandle());
assertTrue(columnMapping.isPresent(), "No mapping for: " + result.get().getJdbcTypeHandle());
assertEquals(columnMapping.get().getType(), aggregateFunction.getOutputType());
assertThat(columnMapping.isPresent())
.describedAs("No mapping for: " + result.get().getJdbcTypeHandle())
.isTrue();
assertThat(columnMapping.get().getType()).isEqualTo(aggregateFunction.getOutputType());
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
package io.trino.plugin.snowflake;

import com.google.common.collect.ImmutableMap;
import org.testng.annotations.Test;
import org.junit.jupiter.api.Test;

import java.util.Map;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import io.trino.spi.Plugin;
import io.trino.spi.connector.ConnectorFactory;
import io.trino.testing.TestingConnectorContext;
import org.testng.annotations.Test;
import org.junit.jupiter.api.Test;

import static com.google.common.collect.Iterables.getOnlyElement;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,5 @@ public Properties getProperties()
public void close()
throws Exception
{
execute("DROP SCHEMA IF EXISTS tpch");
}
}

0 comments on commit 63efdf1

Please sign in to comment.