Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SNOW-1553624 - structured types bindings test all types #1841

Draft
wants to merge 7 commits into
base: master
Choose a base branch
from
Draft
13 changes: 11 additions & 2 deletions src/main/java/net/snowflake/client/core/FieldSchemaCreator.java
Original file line number Diff line number Diff line change
Expand Up @@ -58,11 +58,16 @@ public static BindingParameterMetadata buildSchemaWithScaleAndPrecision(

public static BindingParameterMetadata buildBindingSchemaForType(int baseType)
throws SQLException {
return buildBindingSchemaForType(baseType, true);
return buildBindingSchemaForType(baseType, true, null);
}

public static BindingParameterMetadata buildBindingSchemaForType(int baseType, boolean addName)
throws SQLException {
return buildBindingSchemaForType(baseType, addName, null);
}

public static BindingParameterMetadata buildBindingSchemaForType(
int baseType, boolean addName, String typeName) throws SQLException {
String name = addName ? SnowflakeType.javaTypeToSFType(baseType, null).name() : null;
switch (baseType) {
case Types.VARCHAR:
Expand All @@ -85,9 +90,13 @@ public static BindingParameterMetadata buildBindingSchemaForType(int baseType, b
case Types.DATE:
return FieldSchemaCreator.buildSchemaTypeAndNameOnly(name, "date", Optional.empty());
case Types.TIMESTAMP:
return FieldSchemaCreator.buildSchemaWithScaleAndPrecision(
name, Optional.ofNullable(typeName).orElse("timestamp"), 0, 0, Optional.empty());
case Types.TIME:
return FieldSchemaCreator.buildSchemaWithScaleAndPrecision(
name, "timestamp", 9, 0, Optional.empty());
name, "time", 9, 0, Optional.empty());
sfc-gh-dprzybysz marked this conversation as resolved.
Show resolved Hide resolved
case Types.BINARY:
return FieldSchemaCreator.buildSchemaForBytesType(name, Optional.empty());
default:
logger.error("Could not create schema for type : " + baseType);
throw new SQLException("Could not create schema for type : " + baseType);
Expand Down
18 changes: 5 additions & 13 deletions src/main/java/net/snowflake/client/core/JsonSqlOutput.java
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
Expand Down Expand Up @@ -74,6 +75,9 @@ private TimeZone getSessionTimezone(SFBaseSession sfBaseSession) {
}

private static List<Field> getClassFields(SQLData original) {
if (original == null) {
return Collections.emptyList();
}
return Arrays.stream(original.getClass().getDeclaredFields())
.filter(
field ->
Expand Down Expand Up @@ -230,7 +234,7 @@ public void writeTimestamp(Timestamp value) throws SQLException {
.filter(str -> !str.isEmpty())
.orElse(timestampSessionType));
int columnType = snowflakeTypeToJavaType(snowflakeType);
TimeZone timeZone = timeZoneDependOnType(snowflakeType, session, null);
TimeZone timeZone = SnowflakeUtil.timeZoneDependOnType(snowflakeType, session, null);
String timestampAsString =
SnowflakeUtil.mapSFExceptionToSQLException(
() ->
Expand Down Expand Up @@ -374,18 +378,6 @@ public BindingParameterMetadata getSchema() {
return schema;
}

private TimeZone timeZoneDependOnType(
SnowflakeType snowflakeType, SFBaseSession session, TimeZone tz) {
if (snowflakeType == SnowflakeType.TIMESTAMP_NTZ) {
return null;
} else if (snowflakeType == SnowflakeType.TIMESTAMP_LTZ) {
return getSessionTimezone(session);
} else if (snowflakeType == SnowflakeType.TIMESTAMP_TZ) {
return Optional.ofNullable(tz).orElse(sessionTimezone);
}
return TimeZone.getDefault();
}

private int snowflakeTypeToJavaType(SnowflakeType snowflakeType) {
if (snowflakeType == SnowflakeType.TIMESTAMP_NTZ) {
return SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@

package net.snowflake.client.core;

import com.fasterxml.jackson.core.JsonProcessingException;
import net.snowflake.client.jdbc.BindingParameterMetadata;
import net.snowflake.client.jdbc.SnowflakeUtil;

/** This class represents a binding object passed to server side Created by hyu on 6/15/17. */
public class ParameterBindingDTO {
Expand All @@ -19,6 +21,12 @@ public class ParameterBindingDTO {

public ParameterBindingDTO(
String fmt, String type, Object value, BindingParameterMetadata schema) {
System.out.println("VALUE " + value);
try {
System.out.println("SCHEMA " + SnowflakeUtil.mapJson(schema));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
this.fmt = fmt;
this.type = type;
this.value = value;
Expand Down
2 changes: 1 addition & 1 deletion src/main/java/net/snowflake/client/core/ResultUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,7 @@ public static String getSFTimestampAsString(

try {
Timestamp adjustedTimestamp = ResultUtil.adjustTimestamp(sfTS.getTimestamp());

System.out.println("Using formatter " + formatter.getSqlFormat());
return formatter.format(adjustedTimestamp, sfTS.getTimeZone(), scale);
} catch (SFTimestamp.TimestampOperationNotAvailableException e) {
// this timestamp doesn't fit into a Java timestamp, and therefore we
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -379,11 +379,11 @@ public Converters getConverters() {
@SnowflakeJdbcInternalApi
public SQLInput createSqlInputForColumn(
Object input,
Class<?> parentObjectClass,
boolean isJsonMapping,
int columnIndex,
SFBaseSession session,
List<FieldMetadata> fields) {
if (parentObjectClass.equals(JsonSqlInput.class)) {
if (isJsonMapping) {
return createJsonSqlInputForColumn(input, session, fields);
} else {
return new ArrowSqlInput((Map<String, Object>) input, session, converters, fields);
Expand Down Expand Up @@ -705,6 +705,7 @@ private SfSqlArray getArrowArray(List<Object> elements, int columnIndex) throws
columnSubType,
mapAndConvert(elements, converters.timeFromIntConverter(scale)).toArray(Time[]::new));
case Types.TIMESTAMP:
case Types.TIMESTAMP_WITH_TIMEZONE:
return new SfSqlArray(
columnSubType,
mapAndConvert(
Expand Down
219 changes: 125 additions & 94 deletions src/main/java/net/snowflake/client/core/SFBaseResultSet.java
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ public TimeZone getSessionTimeZone() {
@SnowflakeJdbcInternalApi
public SQLInput createSqlInputForColumn(
Object input,
Class<?> parentObjectClass,
boolean isJsonMapping,
int columnIndex,
SFBaseSession session,
List<FieldMetadata> fields) {
Expand All @@ -263,7 +263,11 @@ public Timestamp convertToTimestamp(
protected SQLInput createJsonSqlInputForColumn(
Object input, SFBaseSession session, List<FieldMetadata> fields) {
JsonNode inputNode;

if (input instanceof JsonNode) {
if (((JsonNode) input).isNull()) {
return null;
}
inputNode = (JsonNode) input;
} else {
inputNode = OBJECT_MAPPER.convertValue(input, JsonNode.class);
Expand All @@ -288,99 +292,126 @@ protected SfSqlArray getJsonArray(String obj, int columnIndex) throws SFExceptio
int columnType = ColumnTypeHelper.getColumnType(columnSubType, session);
int scale = fieldMetadata.getScale();

ArrayNode arrayNode = (ArrayNode) OBJECT_MAPPER.readTree(obj);
Iterator<JsonNode> nodeElements = arrayNode.elements();

switch (columnType) {
case Types.INTEGER:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().integerConverter(columnType))
.toArray(Integer[]::new));
case Types.SMALLINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().smallIntConverter(columnType))
.toArray(Short[]::new));
case Types.TINYINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().tinyIntConverter(columnType))
.toArray(Byte[]::new));
case Types.BIGINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().bigIntConverter(columnType))
.toArray(Long[]::new));
case Types.DECIMAL:
case Types.NUMERIC:
return new SfSqlArray(
columnSubType,
convertToFixedArray(
getStream(nodeElements, getConverters().bigDecimalConverter(columnType))));
case Types.CHAR:
case Types.VARCHAR:
case Types.LONGNVARCHAR:
return new SfSqlArray(
columnSubType,
getStream(
nodeElements,
getConverters().varcharConverter(columnType, columnSubType, scale))
.toArray(String[]::new));
case Types.BINARY:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().bytesConverter(columnType, scale))
.toArray(Byte[][]::new));
case Types.FLOAT:
case Types.REAL:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().floatConverter(columnType))
.toArray(Float[]::new));
case Types.DOUBLE:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().doubleConverter(columnType))
.toArray(Double[]::new));
case Types.DATE:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().dateStringConverter(session))
.toArray(Date[]::new));
case Types.TIME:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().timeFromStringConverter(session))
.toArray(Time[]::new));
case Types.TIMESTAMP:
return new SfSqlArray(
columnSubType,
getStream(
nodeElements,
getConverters()
.timestampFromStringConverter(
columnSubType, columnType, scale, session, null, sessionTimeZone))
.toArray(Timestamp[]::new));
case Types.BOOLEAN:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().booleanConverter(columnType))
.toArray(Boolean[]::new));
case Types.STRUCT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().structConverter(OBJECT_MAPPER))
.toArray(Map[]::new));
case Types.ARRAY:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().arrayConverter(OBJECT_MAPPER))
.toArray(Map[][]::new));
default:
throw new SFException(
ErrorCode.FEATURE_UNSUPPORTED,
"Can't construct array for data type: " + columnSubType);
JsonNode data = OBJECT_MAPPER.readTree(obj);

if (data.isNull()) {
return null;
}

if (data.isArray()) {
ArrayNode arrayNode = (ArrayNode) OBJECT_MAPPER.readTree(obj);
Iterator<JsonNode> nodeElements = arrayNode.elements();

switch (columnType) {
case Types.INTEGER:
return new SfSqlArray(
data,
columnSubType,
getStream(nodeElements, getConverters().integerConverter(columnType))
.toArray(Integer[]::new));
case Types.SMALLINT:
return new SfSqlArray(
data,
columnSubType,
getStream(nodeElements, getConverters().smallIntConverter(columnType))
.toArray(Short[]::new));
case Types.TINYINT:
return new SfSqlArray(
data,
columnSubType,
getStream(nodeElements, getConverters().tinyIntConverter(columnType))
.toArray(Byte[]::new));
case Types.BIGINT:
return new SfSqlArray(
data,
columnSubType,
getStream(nodeElements, getConverters().bigIntConverter(columnType))
.toArray(Long[]::new));
case Types.DECIMAL:
case Types.NUMERIC:
return new SfSqlArray(
data,
columnSubType,
convertToFixedArray(
getStream(nodeElements, getConverters().bigDecimalConverter(columnType))));
case Types.CHAR:
case Types.VARCHAR:
case Types.LONGNVARCHAR:
return new SfSqlArray(
data,
columnSubType,
getStream(
nodeElements,
getConverters().varcharConverter(columnType, columnSubType, scale))
.toArray(String[]::new));
case Types.BINARY:
return new SfSqlArray(
data,
columnSubType,
getStream(nodeElements, getConverters().bytesConverter(columnType, scale))
.toArray(Byte[][]::new));
case Types.FLOAT:
case Types.REAL:
return new SfSqlArray(
data,
columnSubType,
getStream(nodeElements, getConverters().floatConverter(columnType))
.toArray(Float[]::new));
case Types.DOUBLE:
return new SfSqlArray(
data,
columnSubType,
getStream(nodeElements, getConverters().doubleConverter(columnType))
.toArray(Double[]::new));
case Types.DATE:
return new SfSqlArray(
data,
columnSubType,
getStream(nodeElements, getConverters().dateStringConverter(session))
.toArray(Date[]::new));
case Types.TIME:
return new SfSqlArray(
data,
columnSubType,
getStream(nodeElements, getConverters().timeFromStringConverter(session))
.toArray(Time[]::new));
case Types.TIMESTAMP:
case Types.TIMESTAMP_WITH_TIMEZONE:
return new SfSqlArray(
data,
columnSubType,
getStream(
nodeElements,
getConverters()
.timestampFromStringConverter(
columnSubType, columnType, scale, session, null, sessionTimeZone))
.toArray(Timestamp[]::new));
case Types.BOOLEAN:
return new SfSqlArray(
data,
columnSubType,
getStream(nodeElements, getConverters().booleanConverter(columnType))
.toArray(Boolean[]::new));
case Types.STRUCT:
return new SfSqlArray(
data,
columnSubType,
getStream(nodeElements, getConverters().structConverter(OBJECT_MAPPER))
.toArray(Map[]::new));
case Types.ARRAY:
return new SfSqlArray(
data,
columnSubType,
getStream(nodeElements, getConverters().arrayConverter(OBJECT_MAPPER))
.toArray(Map[][]::new));
default:
throw new SFException(
ErrorCode.FEATURE_UNSUPPORTED,
"Can't construct array for data type: " + columnSubType);
}
} else {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we inverse the condition and skip then else to make less changes here?

throw new SFException(
ErrorCode.INVALID_VALUE_CONVERT, "Can't construct array from delivered data");
}
} catch (JsonProcessingException e) {
throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ public Date getDate(int columnIndex, TimeZone tz) throws SFException {
@SnowflakeJdbcInternalApi
public SQLInput createSqlInputForColumn(
Object input,
Class<?> parentObjectClass,
boolean isJsonMapping,
int columnIndex,
SFBaseSession session,
List<FieldMetadata> fields) {
Expand Down
Loading
Loading