Skip to content

Commit

Permalink
Throw TrinoException when user selects to LZ4 for Iceberg Avro table
Browse files Browse the repository at this point in the history
  • Loading branch information
findepi committed Jan 30, 2024
1 parent 76bb60b commit 42961bc
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import static io.trino.plugin.iceberg.IcebergAvroDataConversion.toIcebergRecords;
import static io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_WRITER_CLOSE_ERROR;
import static io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_WRITER_OPEN_ERROR;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static java.util.Objects.requireNonNull;
import static org.apache.iceberg.TableProperties.AVRO_COMPRESSION;

Expand Down Expand Up @@ -137,7 +138,7 @@ private static String toIcebergAvroCompressionName(HiveCompressionCodec hiveComp
return switch (hiveCompressionCodec) {
case NONE -> "UNCOMPRESSED";
case SNAPPY -> "SNAPPY";
case LZ4 -> "LZ4";
case LZ4 -> throw new TrinoException(NOT_SUPPORTED, "Compression codec LZ4 not supported for Avro");
case ZSTD -> "ZSTD";
case GZIP -> "GZIP";
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@
import io.trino.testing.MaterializedResult;
import io.trino.testing.MaterializedResultWithQueryId;
import io.trino.testing.MaterializedRow;
import io.trino.testing.QueryFailedException;
import io.trino.testing.QueryRunner;
import io.trino.testing.TestingConnectorBehavior;
import io.trino.testing.sql.TestTable;
Expand Down Expand Up @@ -7450,20 +7449,10 @@ private void testCreateTableWithCompressionCodec(HiveCompressionCodec compressio
.build();
String tableName = "test_table_with_compression_" + compressionCodec;
String createTableSql = format("CREATE TABLE %s AS TABLE tpch.tiny.nation", tableName);
if (format == IcebergFileFormat.PARQUET && compressionCodec == HiveCompressionCodec.LZ4) {
// TODO (https://github.com/trinodb/trino/issues/9142) Support LZ4 compression with native Parquet writer
// TODO (https://github.com/trinodb/trino/issues/9142) Support LZ4 compression with native Parquet writer
if ((format == IcebergFileFormat.PARQUET || format == AVRO) && compressionCodec == HiveCompressionCodec.LZ4) {
assertTrinoExceptionThrownBy(() -> computeActual(session, createTableSql))
.hasMessage("Compression codec LZ4 not supported for Parquet");
return;
}

if (format == AVRO && compressionCodec == HiveCompressionCodec.LZ4) {
// TODO (https://github.com/trinodb/trino/issues/9142) Support LZ4 compression with native Parquet writer
assertThatThrownBy(() -> computeActual(session, createTableSql))
.hasMessage("Unsupported compression codec: LZ4")
.isInstanceOf(QueryFailedException.class)
// TODO this should be TrinoException
.cause().hasToString("java.lang.IllegalArgumentException: Unsupported compression codec: LZ4");
.hasMessage("Compression codec LZ4 not supported for " + capitalize(format.name().toLowerCase(ENGLISH)));
return;
}
assertUpdate(session, createTableSql, 25);
Expand Down Expand Up @@ -7872,4 +7861,9 @@ private void assertQueryIdStored(String tableName, QueryId queryId)
assertThat(getFieldFromLatestSnapshotSummary(tableName, TRINO_QUERY_ID_NAME))
.isEqualTo(queryId.toString());
}

private static String capitalize(String value)
{
return Character.toUpperCase(value.charAt(0)) + value.substring(1);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1479,7 +1479,7 @@ public void testSparkReadingTrinoCompressedData(StorageFormat storageFormat, Str
}
if (storageFormat == StorageFormat.AVRO && compressionCodec.equals("LZ4")) {
assertQueryFailure(() -> onTrino().executeQuery(createTable))
.hasMessageMatching("\\QQuery failed (#\\E\\S+\\Q): Unsupported compression codec: " + compressionCodec);
.hasMessageMatching("\\QQuery failed (#\\E\\S+\\Q): Compression codec LZ4 not supported for Avro");
return;
}
onTrino().executeQuery(createTable);
Expand Down

0 comments on commit 42961bc

Please sign in to comment.