Skip to content

Commit

Permalink
refactor: 优化
Browse files Browse the repository at this point in the history
  • Loading branch information
zhou-hao committed Sep 19, 2024
1 parent 33494e4 commit fee0f73
Show file tree
Hide file tree
Showing 10 changed files with 113 additions and 50 deletions.
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
package org.hswebframework.ezorm.core;

public interface ValueCodec<E, D> extends Encoder<E>, Decoder<D> {

default E encodeNull(){
return null;
}

E encode(Object value);

D decode(Object data);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,9 @@ public Object decode(Object data) {

public Object encode(Object data) {
if (data == null) {
if (valueCodec != null) {
return valueCodec.encodeNull();
}
return null;
}
if (valueCodec != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,8 @@ public Object toDate(Object data) {
} else if (data instanceof ZonedDateTime) {
ZonedDateTime dateTime = ((ZonedDateTime) data);
data = Date.from(dateTime.toInstant());
}else if(data instanceof OffsetDateTime){
data = Date.from(((OffsetDateTime) data).toInstant());
} else if (data instanceof String) {
String stringData = ((String) data);
if ((stringData).contains(",")) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,13 @@ public RDBColumnMetadata clone() {

@Override
public Object encode(Object data) {
if (data instanceof NullValue) {
if (data == null || data instanceof NullValue) {
if (valueCodec != null) {
Object newVal = valueCodec.encodeNull();
if (newVal != null) {
return newVal;
}
}
return data;
}
return super.encode(data);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@ public RDBIndexMetadata(String name) {
@Override
public String toString() {
StringBuilder builder = new StringBuilder(name)
.append(" ")
.append(unique ? "unique index" : "index")
.append(" on ")
.append(tableName);
.append(" ")
.append(unique ? "unique index" : "index")
.append(" on ")
.append(tableName);
builder.append("(");
int index = 0;
for (IndexColumn column : columns) {
Expand Down Expand Up @@ -74,12 +74,11 @@ public ObjectType getObjectType() {
@SneakyThrows
public RDBIndexMetadata clone() {
RDBIndexMetadata metadata = (RDBIndexMetadata) super.clone();
metadata.columns = new CopyOnWriteArrayList<>();

metadata.columns.clear();

columns.stream()
.map(IndexColumn::clone)
.forEach(metadata.columns::add);
for (IndexColumn column : this.columns) {
metadata.columns.add(column.clone());
}

return metadata;
}
Expand All @@ -94,17 +93,17 @@ public boolean isChanged(RDBTableMetadata metadata, RDBIndexMetadata old) {
}

Map<String, IndexColumn> nameMapping = getColumns()
.stream()
.collect(Collectors.toMap(c -> metadata
.getColumn(c.column)
.map(RDBColumnMetadata::getName)
.orElseGet(RDBIndexMetadata.class::getName), Function.identity()));
.stream()
.collect(Collectors.toMap(c -> metadata
.getColumn(c.column)
.map(RDBColumnMetadata::getName)
.orElseGet(RDBIndexMetadata.class::getName), Function.identity()));

for (IndexColumn oldColumn : old.getColumns()) {
String columnName = metadata
.getColumn(oldColumn.column)
.map(RDBColumnMetadata::getName)
.orElse(null);
.getColumn(oldColumn.column)
.map(RDBColumnMetadata::getName)
.orElse(null);


if (columnName == null || !nameMapping.containsKey(columnName)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ public SqlRequest build(InsertOperatorParameter parameter) {

fragments.add(SqlFragments.LEFT_BRACKET);

LinkedHashMap<Integer, RDBColumnMetadata> indexMapping = Maps.newLinkedHashMapWithExpectedSize(columns.size());
LinkedHashMap<Integer, SqlFragments> functionValues = Maps.newLinkedHashMapWithExpectedSize(columns.size());
LinkedHashMap<Integer, RDBColumnMetadata> indexMapping = Maps.newLinkedHashMapWithExpectedSize(columns.size());
LinkedHashMap<Integer, SqlFragments> functionValues = Maps.newLinkedHashMapWithExpectedSize(columns.size());

int index = 0;
int primaryIndex = -1;
Expand Down Expand Up @@ -140,8 +140,16 @@ public SqlRequest build(InsertOperatorParameter parameter) {
if (value == null) {
value = NullValue.of(column.getType());
}
fragments.add(SqlFragments.QUESTION_MARK)
.addParameter(column.encode(value));
value = column.encode(value);
if (value instanceof NativeSql) {
fragments
.addSql(((NativeSql) value).getSql())
.addParameter(((NativeSql) value).getParameters());

} else {
fragments.add(SqlFragments.QUESTION_MARK)
.addParameter(value);
}
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,9 +90,18 @@ else if (value instanceof NativeSql) {
}
// = ?
else {
columnFragments = SimpleSqlFragments
.of(Arrays.asList(columnMetadata.getQuoteName(), "= ?"),
Collections.singletonList(columnMetadata.encode(value)));
value = columnMetadata.encode(value);
if (value instanceof NativeSql) {
columnFragments = SimpleSqlFragments
.of(
Arrays.asList(columnMetadata.getQuoteName(), "=", ((NativeSql) column.getValue()).getSql()),
Arrays.asList(((NativeSql) column.getValue()).getParameters())
);
} else {
columnFragments = SimpleSqlFragments
.of(Arrays.asList(columnMetadata.getQuoteName(), "= ?"),
Collections.singletonList(columnMetadata.encode(value)));
}
}

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import org.hswebframework.ezorm.rdb.executor.reactive.ReactiveSqlExecutor;
import org.hswebframework.ezorm.rdb.mapping.defaults.SaveResult;
import org.hswebframework.ezorm.rdb.metadata.RDBColumnMetadata;
import org.hswebframework.ezorm.rdb.metadata.RDBIndexMetadata;
import org.hswebframework.ezorm.rdb.metadata.RDBTableMetadata;
import org.hswebframework.ezorm.rdb.operator.builder.fragments.*;
import org.hswebframework.ezorm.rdb.operator.builder.fragments.insert.BatchInsertSqlBuilder;
Expand All @@ -19,8 +20,10 @@

import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Supplier;
import java.util.stream.Collectors;

@SuppressWarnings("all")
public class PostgresqlBatchUpsertOperator implements SaveOrUpdateOperator {
Expand All @@ -29,36 +32,60 @@ public class PostgresqlBatchUpsertOperator implements SaveOrUpdateOperator {

private PostgresqlUpsertBatchInsertSqlBuilder builder;

private RDBColumnMetadata idColumn;
private SqlFragments prefix;

private SaveOrUpdateOperator fallback;

public PostgresqlBatchUpsertOperator(RDBTableMetadata table) {
this.table = table;
this.idColumn = table.getColumns()
.stream().filter(RDBColumnMetadata::isPrimaryKey)
.findFirst()
.orElse(null);
this.fallback = new DefaultSaveOrUpdateOperator(table);
this.builder = new PostgresqlUpsertBatchInsertSqlBuilder(table);
}

@Override
public SaveResultOperator execute(UpsertOperatorParameter parameter) {
if (idColumn == null) {
this.idColumn = table
if (getOrCreateOnConflict().isEmpty()) {
return fallback.execute(parameter);
}
return new PostgresqlSaveResultOperator(() -> builder.build(new PostgresqlUpsertOperatorParameter(parameter)));
}

SqlFragments getOrCreateOnConflict() {
if (prefix == null) {
prefix = createOnConflict();
}
return prefix;
}

SqlFragments createOnConflict() {
RDBColumnMetadata idColumn = table
.getColumns()
.stream()
.filter(RDBColumnMetadata::isPrimaryKey)
.findFirst()
.orElse(null);
if (idColumn != null) {
return SqlFragments.of("on conflict (", idColumn.getName(), ") do ");
}
RDBIndexMetadata indexMetadata = table
.getIndexes()
.stream()
.filter(index -> index.isUnique())
.findFirst()
.orElse(null);

if (indexMetadata != null) {
String columns = indexMetadata
.getColumns()
.stream()
.filter(RDBColumnMetadata::isPrimaryKey)
.findFirst()
.orElse(null);
.map(c -> table.getColumn(c.getColumn()).orElse(null))
.filter(Objects::nonNull)
.map(RDBColumnMetadata::getQuoteName)
.collect(Collectors.joining(","));

if (this.idColumn == null) {
return fallback.execute(parameter);
}
return SqlFragments.of("on conflict( ", columns, ") do ");
}

return new PostgresqlSaveResultOperator(() -> builder.build(new PostgresqlUpsertOperatorParameter(parameter)));
return EmptySqlFragments.INSTANCE;
}

class PostgresqlUpsertOperatorParameter extends InsertOperatorParameter {
Expand Down Expand Up @@ -104,7 +131,6 @@ public Mono<SaveResult> reactive() {

private class PostgresqlUpsertBatchInsertSqlBuilder extends BatchInsertSqlBuilder {

SqlFragments PREFIX = null;

public PostgresqlUpsertBatchInsertSqlBuilder(RDBTableMetadata table) {
super(table);
Expand All @@ -117,10 +143,7 @@ protected int computeSqlSize(int columnSize, int valueSize) {

@Override
protected AppendableSqlFragments afterBuild(Set<InsertColumn> columns, InsertOperatorParameter parameter, AppendableSqlFragments sql) {
if (PREFIX == null) {
PREFIX = SqlFragments.of("on conflict (", idColumn.getName(), ") do ");
}
sql.add(PREFIX);
sql.add(createOnConflict());

if (((PostgresqlUpsertOperatorParameter) parameter).doNoThingOnConflict) {
sql.addSql("nothing");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@

import java.math.BigDecimal;
import java.sql.JDBCType;
import java.sql.Timestamp;
import java.time.ZonedDateTime;

/**
* @author zhouhao
Expand All @@ -20,6 +22,7 @@ public PostgresqlDialect() {
addDataTypeBuilder(JDBCType.CHAR, (meta) -> StringUtils.concat("char(", meta.getLength(255), ")"));
addDataTypeBuilder(JDBCType.VARCHAR, (meta) -> StringUtils.concat("varchar(", meta.getLength(255), ")"));
addDataTypeBuilder(JDBCType.TIMESTAMP, (meta) -> "timestamp");
addDataTypeBuilder(JDBCType.TIMESTAMP_WITH_TIMEZONE, (meta) -> "timestamptz");
addDataTypeBuilder(JDBCType.TIME, (meta) -> "time");
addDataTypeBuilder(JDBCType.DATE, (meta) -> "date");
addDataTypeBuilder(JDBCType.CLOB, (meta) -> "text");
Expand All @@ -36,6 +39,7 @@ public PostgresqlDialect() {

addDataTypeBuilder(JDBCType.BIGINT, (meta) -> "bigint");
addDataTypeBuilder(JDBCType.OTHER, (meta) -> "other");

addDataTypeBuilder("json", meta -> "json");
addDataTypeBuilder("jsonb", meta -> "jsonb");

Expand All @@ -46,6 +50,10 @@ public PostgresqlDialect() {
registerDataType("blob", DataType.builder(JdbcDataType.of(JDBCType.BLOB, String.class), (c) -> "bytea"));
registerDataType("longnvarchar", DataType.builder(JdbcDataType.of(JDBCType.LONGNVARCHAR, String.class), c -> "text"));
registerDataType("longvarchar", DataType.builder(JdbcDataType.of(JDBCType.LONGVARCHAR, String.class), c -> "text"));
registerDataType("timestamptz", DataType
.builder(JdbcDataType
.of(JDBCType.TIMESTAMP_WITH_TIMEZONE, ZonedDateTime.class),
c -> "timestamptz"));

registerDataType("int8", JdbcDataType.of(JDBCType.BIGINT, Long.class));
registerDataType("int4", JdbcDataType.of(JDBCType.INTEGER, Integer.class));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import reactor.util.function.Tuples;

import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

@AllArgsConstructor
@Slf4j
Expand Down Expand Up @@ -110,7 +111,7 @@ public Flux<RDBIndexMetadata> parseAllReactive() {
}

class PostgresqlIndexMetadataWrapper implements ResultWrapper<Map<String, Object>, List<RDBIndexMetadata>> {
Map<Tuple2<String,String>, RDBIndexMetadata> group = new HashMap<>();
Map<Tuple2<String,String>, RDBIndexMetadata> group = new ConcurrentHashMap<>();

@Override
public Map<String, Object> newRowInstance() {
Expand All @@ -127,16 +128,15 @@ public void wrapColumn(ColumnWrapperContext<Map<String, Object>> context) {
@Override
public boolean completedWrapRow(Map<String, Object> result) {
String name = (String) result.get("indexname");
String tableName = ((String) result.get("tablename")).toLowerCase();
String tableName = ((String) result.get("tablename"));

RDBIndexMetadata index = group.computeIfAbsent(Tuples.of(name,tableName), __ -> new RDBIndexMetadata());
index.setName(name.toLowerCase());
index.setTableName(tableName);
index.setPrimaryKey(Boolean.TRUE.equals(result.get("indisprimary")));
index.setUnique(Boolean.FALSE.equals(result.get("indisunique")));
index.setUnique(Boolean.TRUE.equals(result.get("indisunique")));
RDBIndexMetadata.IndexColumn indexColumn = new RDBIndexMetadata.IndexColumn();
indexColumn.setColumn(((String) result.get("attname")).toLowerCase());
// TODO: 2019-10-22 咋获取排序...
indexColumn.setColumn(((String) result.get("attname")));
indexColumn.setSort(RDBIndexMetadata.IndexSort.asc);
indexColumn.setSortIndex(((Number) result.get("attnum")).intValue());
index.getColumns().add(indexColumn);
Expand Down

0 comments on commit fee0f73

Please sign in to comment.