Skip to content

Commit

Permalink
Update to version v3.18.0
Browse files Browse the repository at this point in the history
  • Loading branch information
graveart committed Jul 29, 2023
1 parent ec109ea commit 6d4504f
Show file tree
Hide file tree
Showing 122 changed files with 5,449 additions and 1,168 deletions.
2 changes: 1 addition & 1 deletion bindings/consts.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package bindings

const CInt32Max = int(^uint32(0) >> 1)

const ReindexerVersion = "v3.17.0"
const ReindexerVersion = "v3.18.0"

// public go consts from type_consts.h and reindexer_ctypes.h
const (
Expand Down
35 changes: 35 additions & 0 deletions changelog.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,38 @@
# Version 3.18.0 (29.07.2023)
## Core
- [fea] Increased max indexes count for each namespace up to 255 user-defined indexes (previously it was 63)
- [fea] Added more info to the [slow logger's](readme.md#slow-actions-logging) output: mutexes timing for transactions and basic `explain` info for `select`-queries
- [fea] Improved logic of the cost evaluation for the btree indexes usage in situations, when backgroud indexes ordering was not completed (right after write operations). Expecting more optimal execution plan in those cases
- [fix] Changed logic of the `ALLSET` operator. Now `ALLSET` condition returns `false` for empty values sets and the result behavior is similar to MongoDB [$all](https://www.mongodb.com/docs/manual/reference/operator/query/all/)
- [fix] Fixed automatic conversion for numeric strings with leading or trailing spaces (i.e. ' 1234' or '1234 ') into integers/floats in `WHERE`/`ORDER BY`
- [fix] Allowed non-unique values in forced sort (`ORDER BY (id,4,2,2,5)`). If forced sort order contains same values on the different positions (i.e. `ORDER BY (id,1,2,1,5)`), then the first occurance of the value will be used for sorting
- [fix] Added limits for the large values sets in the composite indexes substitution algorithm, introduced in v3.15.0 (due to performance issues in some cases). If the result size of the set is exceeding corresponding limit, reindexer will try to find another composite index or skip the substitution

## Go connector
- [fea] Added support for JOINs and brackets into [JSON DSL wrapper](dsl/dsl.go)

## Build
- [fea] Added support and deploy for Debian 12 (bookworm). Debian 10 (buster) build was deprecated
- [fea] Enabled SSE4.2 for the default reindexer's builds and for the prebuilt packages. SSE may still be disabled by passing `-DENABLE_SSE=OFF` to `cmake` command

## Face
- [fea] Changed the scale window icon for textareas
- [fea] Added the background color to the Close icon in the search history on the Namespace page
- [fea] Improved the buttons' behavior on the Query builder page
- [fea] Added the database name size limit.
- [fea] Improved the drop-down section behavior on the Query builder page
- [fea] Added new proc settings to the Index config
- [fix] Fixed the columns' settings resetting after the Perfstats page reloading
- [fix] Removed the double requests on the Perfstats page
- [fix] Fixed the JSON Paths tooltip description
- [fix] Fixed the pie chart position in Safari
- [fix] Fixed the popup window size for the long text
- [fix] Fixed the bottom padding on the statistics legend window
- [fix] Fixed the modal window to inform about disabled memory statistics
- [fix] Fixed the filter removal
- [fix] Fixed the filter result page when the filter is removed
- [fix] Fixed the redirect to the wrong page after all items were removed

# Version 3.17.0 (06.07.2023)
## Core
- [fea] Optimized namespaces' locks for queries to the system namespaces, containing explicit list of names (for example, `SELECT * FROM #memstats WHERE "name" IN ('ns1', 'nsx', 'ns19')` now requires shared locks for the listed namespaces only)
Expand Down
4 changes: 3 additions & 1 deletion cjson/decoder.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ type Decoder struct {
logger Logger
}

const MaxIndexes = 256

func fieldByTag(t reflect.Type, tag string) (result reflect.StructField, ok bool) {
if t.Kind() == reflect.Ptr {
t = t.Elem()
Expand Down Expand Up @@ -674,7 +676,7 @@ func (dec *Decoder) DecodeCPtr(cptr uintptr, dest interface{}) (err error) {
}
}()

fieldsoutcnt := make([]int, 64, 64)
fieldsoutcnt := make([]int, MaxIndexes)
ctagsPath := make([]int, 0, 8)

dec.decodeValue(pl, ser, reflect.ValueOf(dest), fieldsoutcnt, ctagsPath)
Expand Down
18 changes: 15 additions & 3 deletions cpp_src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ option (ENABLE_TCMALLOC "Enable tcmalloc extensions" ON)
option (ENABLE_JEMALLOC "Enable jemalloc extensions" ON)
option (ENABLE_ROCKSDB "Enable rocksdb storage" ON)
option (ENABLE_GRPC "Enable GRPC service" OFF)
option (ENABLE_SSE "Enable SSE instructions" ON)

if (NOT GRPC_PACKAGE_PROVIDER)
set (GRPC_PACKAGE_PROVIDER "CONFIG")
Expand All @@ -35,7 +36,7 @@ else()
option (LINK_RESOURCES "Link web resources as binary data" ON)
endif()

set (REINDEXER_VERSION_DEFAULT "3.17.0")
set (REINDEXER_VERSION_DEFAULT "3.18.0")

if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE "RelWithDebInfo")
Expand All @@ -52,12 +53,13 @@ include (TargetArch)
target_architecture(COMPILER_TARGET_ARCH)

# Configure compile options
string( REPLACE "-DNDEBUG" "" CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO}")
string(REPLACE "-DNDEBUG" "" CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO}")
string(REPLACE "-O2" "-O3" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
string(REPLACE "-O2" "-O3" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
if (NOT ${COMPILER_TARGET_ARCH} STREQUAL "e2k")
string(REPLACE "-g" "-g1" CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO}")
else()
string(REPLACE "-g" "-g0" CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO}")
string(REPLACE "-O2" "-O3" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
endif()

if (${COMPILER_TARGET_ARCH} STREQUAL "e2k")
Expand Down Expand Up @@ -224,6 +226,16 @@ else ()
endif ()
list(APPEND SRCS ${CONTEXT_ASM_SRCS})

if (ENABLE_SSE)
if (NOT MSVC AND NOT APPLE AND (${COMPILER_TARGET_ARCH} STREQUAL "x86_64" OR ${COMPILER_TARGET_ARCH} STREQUAL "i386"))
add_definitions(-DREINDEXER_WITH_SSE=1)
message ("Building with SSE support...")
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -msse -msse2 -msse3 -mssse3 -msse4 -msse4.1 -msse4.2 -mpopcnt")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -msse -msse2 -msse3 -mssse3 -msse4 -msse4.1 -msse4.2 -mpopcnt")
else ()
message ("SSE compiler flags were disabled for the current platform")
endif ()
endif ()

include_directories(${REINDEXER_SOURCE_PATH})
include_directories(${REINDEXER_SOURCE_PATH}/vendor)
Expand Down
5 changes: 4 additions & 1 deletion cpp_src/client/cororeindexer.cc
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
#include "client/cororeindexer.h"
#include "client/cororpcclient.h"
#include "tools/cpucheck.h"
#include "tools/logger.h"

namespace reindexer {
namespace client {

CoroReindexer::CoroReindexer(const ReindexerConfig& config) : impl_(new CoroRPCClient(config)), owner_(true), ctx_() {}
CoroReindexer::CoroReindexer(const ReindexerConfig& config) : impl_(new CoroRPCClient(config)), owner_(true), ctx_() {
reindexer::CheckRequiredSSESupport();
}
CoroReindexer::~CoroReindexer() {
if (owner_) {
delete impl_;
Expand Down
6 changes: 3 additions & 3 deletions cpp_src/client/itemimpl.cc
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ void ItemImpl::FromCJSON(std::string_view slice) {
throw Error(errParseJson, "Internal error - left unparsed data %d", rdser.Pos());
}
tupleData_.assign(ser_.Slice().data(), ser_.Slice().size());
pl.Set(0, {Variant(p_string(&tupleData_))});
pl.Set(0, Variant(p_string(&tupleData_)));
}

Error ItemImpl::FromJSON(std::string_view slice, char **endp, bool /*pkOnly*/) {
Expand Down Expand Up @@ -88,7 +88,7 @@ Error ItemImpl::FromJSON(std::string_view slice, char **endp, bool /*pkOnly*/) {
if (err.ok()) {
// Put tuple to field[0]
tupleData_.assign(ser_.Slice().data(), ser_.Slice().size());
pl.Set(0, {Variant(p_string(&tupleData_))});
pl.Set(0, Variant(p_string(&tupleData_)));
ser_ = WrSerializer();
}
return err;
Expand All @@ -102,7 +102,7 @@ Error ItemImpl::FromMsgPack(std::string_view buf, size_t &offset) {
Error err = decoder.Decode(buf, pl, ser_, offset);
if (err.ok()) {
tupleData_.assign(ser_.Slice().data(), ser_.Slice().size());
pl.Set(0, {Variant(p_string(&tupleData_))});
pl.Set(0, Variant(p_string(&tupleData_)));
}
return err;
}
Expand Down
3 changes: 3 additions & 0 deletions cpp_src/client/rpcclient.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
#include "client/itemimpl.h"
#include "core/namespacedef.h"
#include "gason/gason.h"
#include "tools/cpucheck.h"
#include "tools/errors.h"
#include "tools/logger.h"
#include "vendor/gason/gason.h"
Expand All @@ -14,6 +15,8 @@ namespace client {
using reindexer::net::cproto::RPCAnswer;

RPCClient::RPCClient(const ReindexerConfig& config) : workers_(config.WorkerThreads), config_(config), updatesConn_(nullptr) {
reindexer::CheckRequiredSSESupport();

if (config_.ConnectTimeout > config_.RequestTimeout) {
config_.RequestTimeout = config_.ConnectTimeout;
}
Expand Down
2 changes: 1 addition & 1 deletion cpp_src/cmd/reindexer_server/contrib/Dockerfile.deb
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ RUN cd /src && \
FROM debian:stable-slim
COPY --from=build /usr/local /usr/local
COPY --from=build /entrypoint.sh /entrypoint.sh
RUN apt update -y && apt install -y libleveldb1d libunwind8 libjemalloc2 libgrpc++1 && rm -rf /var/lib/apt
RUN apt update -y && apt install -y libleveldb1d libunwind8 libjemalloc2 libgrpc++1.51 && rm -rf /var/lib/apt

ENV RX_DATABASE /db
ENV RX_CORELOG stdout
Expand Down
4 changes: 4 additions & 0 deletions cpp_src/cmd/reindexer_server/main.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,13 @@
#include "debug/backtrace.h"
#include "server/server.h"
#include "spdlog/spdlog.h"
#include "tools/cpucheck.h"

int main(int argc, char* argv[]) {
reindexer::debug::backtrace_init();

reindexer::CheckRequiredSSESupport();

reindexer_server::Server svc(reindexer_server::ServerMode::Standalone);
auto err = svc.InitFromCLI(argc, argv);
if (!err.ok()) {
Expand Down
3 changes: 3 additions & 0 deletions cpp_src/cmd/reindexer_tool/reindexer_tool.cc
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
#include "debug/backtrace.h"
#include "reindexer_version.h"
#include "repair_tool.h"
#include "tools/cpucheck.h"
#include "tools/logger.h"
#include "tools/stringstools.h"

Expand Down Expand Up @@ -42,6 +43,8 @@ int main(int argc, char* argv[]) {
using namespace reindexer_tool;
reindexer::debug::backtrace_init();

reindexer::CheckRequiredSSESupport();

args::ArgumentParser parser("Reindexer client tool");
args::HelpFlag help(parser, "help", "show this message", {'h', "help"});

Expand Down
15 changes: 6 additions & 9 deletions cpp_src/core/cjson/baseencoder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,7 @@
namespace reindexer {

template <typename Builder>
BaseEncoder<Builder>::BaseEncoder(const TagsMatcher* tagsMatcher, const FieldsSet* filter) : tagsMatcher_(tagsMatcher), filter_(filter) {
static_assert(std::numeric_limits<decltype(objectScalarIndexes_)>::digits >= maxIndexes,
"objectScalarIndexes_ needs to provide 'maxIndexes' bits or more");
}
BaseEncoder<Builder>::BaseEncoder(const TagsMatcher* tagsMatcher, const FieldsSet* filter) : tagsMatcher_(tagsMatcher), filter_(filter) {}

template <typename Builder>
void BaseEncoder<Builder>::Encode(std::string_view tuple, Builder& builder, IAdditionalDatasource<Builder>* ds) {
Expand Down Expand Up @@ -45,7 +42,7 @@ void BaseEncoder<Builder>::Encode(ConstPayload& pl, Builder& builder, IAdditiona
return;
}

objectScalarIndexes_ = 0;
objectScalarIndexes_.reset();
std::fill_n(std::begin(fieldsoutcnt_), pl.NumFields(), 0);
builder.SetTagsMatcher(tagsMatcher_);
if constexpr (kWithTagsPathTracking) {
Expand Down Expand Up @@ -75,7 +72,7 @@ const TagsLengths& BaseEncoder<Builder>::GetTagsMeasures(ConstPayload& pl, IEnco
[[maybe_unused]] const ctag beginTag = rdser.GetCTag();
assertrx(beginTag.Type() == TAG_OBJECT);

tagsLengths_.reserve(maxIndexes);
tagsLengths_.reserve(kMaxIndexes);
tagsLengths_.push_back(StartObject);

while (collectTagsSizes(pl, rdser)) {
Expand Down Expand Up @@ -149,14 +146,14 @@ bool BaseEncoder<Builder>::encode(ConstPayload* pl, Serializer& rdser, Builder&
// get field from indexed field
if (tagField >= 0) {
if (!pl) throw Error(errParams, "Trying to encode index field %d without payload", tagField);
if ((objectScalarIndexes_ & (1ULL << tagField)) && (tagType != TAG_ARRAY)) {
if (objectScalarIndexes_.test(tagField) && (tagType != TAG_ARRAY)) {
std::string fieldName;
if (tagName && tagsMatcher_) {
fieldName = tagsMatcher_->tag2name(tagName);
}
throw Error(errParams, "Non-array field '%s' [%d] from '%s' can only be encoded once.", fieldName, tagField, pl->Type().Name());
}
objectScalarIndexes_ |= (1ULL << tagField);
objectScalarIndexes_.set(tagField);
assertrx(tagField < pl->NumFields());
int* cnt = &fieldsoutcnt_[tagField];
switch (tagType) {
Expand Down Expand Up @@ -220,7 +217,7 @@ bool BaseEncoder<Builder>::encode(ConstPayload* pl, Serializer& rdser, Builder&
break;
}
case TAG_OBJECT: {
objectScalarIndexes_ = 0;
objectScalarIndexes_.reset();
if (visible) {
auto objNode = builder.Object(tagName);
while (encode(pl, rdser, objNode, true))
Expand Down
4 changes: 2 additions & 2 deletions cpp_src/core/cjson/baseencoder.h
Original file line number Diff line number Diff line change
Expand Up @@ -63,13 +63,13 @@ class BaseEncoder {
std::string_view getPlTuple(ConstPayload &pl);

const TagsMatcher *tagsMatcher_;
int fieldsoutcnt_[maxIndexes];
int fieldsoutcnt_[kMaxIndexes];
const FieldsSet *filter_;
WrSerializer tmpPlTuple_;
TagsPath curTagsPath_;
IndexedTagsPathInternalT indexedTagsPath_;
TagsLengths tagsLengths_;
uint64_t objectScalarIndexes_ = 0;
std::bitset<kMaxIndexes> objectScalarIndexes_;
};

using JsonEncoder = BaseEncoder<JsonBuilder>;
Expand Down
2 changes: 1 addition & 1 deletion cpp_src/core/cjson/cjsondecoder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ bool CJsonDecoder::decodeCJson(Payload &pl, Serializer &rdser, WrSerializer &wrs
throw Error(errLogic, "Error parsing cjson field '%s' - got value in the nested array, but expected scalar %s",
fieldRef.Name(), fieldType.Name());
} else {
pl.Set(field, {cjsonValueToVariant(tagType, rdser, fieldType)}, true);
pl.Set(field, cjsonValueToVariant(tagType, rdser, fieldType), true);
fieldType.EvaluateOneOf(
[&](OneOf<KeyValueType::Int, KeyValueType::Int64>) {
wrser.PutCTag(ctag{TAG_VARINT, tagName, field});
Expand Down
4 changes: 2 additions & 2 deletions cpp_src/core/cjson/cjsonmodifier.cc
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,10 @@ class CJsonModifier::Context {
TagsPath jsonPath;
IndexedTagsPath currObjPath;
FieldModifyMode mode;
const Payload *payload = nullptr;
bool fieldUpdated = false;
bool updateArrayElements = false;
std::array<unsigned, maxIndexes> fieldsArrayOffsets;
const Payload *payload = nullptr;
std::array<unsigned, kMaxIndexes> fieldsArrayOffsets;

private:
bool isForAllItems_ = false;
Expand Down
2 changes: 1 addition & 1 deletion cpp_src/core/cjson/cjsontools.cc
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ void copyCJsonValue(TagType tagType, Serializer &rdser, WrSerializer &wrser) {
}
}

void skipCjsonTag(ctag tag, Serializer &rdser, std::array<unsigned, maxIndexes> *fieldsArrayOffsets) {
void skipCjsonTag(ctag tag, Serializer &rdser, std::array<unsigned, kMaxIndexes> *fieldsArrayOffsets) {
const auto field = tag.Field();
const bool embeddedField = (field < 0);
switch (tag.Type()) {
Expand Down
2 changes: 1 addition & 1 deletion cpp_src/core/cjson/cjsontools.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ void putCJsonRef(TagType tagType, int tagName, int tagField, const VariantArray
void putCJsonValue(TagType tagType, int tagName, const VariantArray &values, WrSerializer &wrser);

[[nodiscard]] TagType kvType2Tag(KeyValueType kvType) noexcept;
void skipCjsonTag(ctag tag, Serializer &rdser, std::array<unsigned, maxIndexes> *fieldsArrayOffsets = nullptr);
void skipCjsonTag(ctag tag, Serializer &rdser, std::array<unsigned, kMaxIndexes> *fieldsArrayOffsets = nullptr);
[[nodiscard]] Variant cjsonValueToVariant(TagType tag, Serializer &rdser, KeyValueType dstType);

} // namespace reindexer
2 changes: 1 addition & 1 deletion cpp_src/core/cjson/jsondecoder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ void JsonDecoder::decodeJsonObject(Payload &pl, CJsonBuilder &builder, const gas
f.Name(), f.Type().Name());
}
Variant v = jsonValue2Variant(elem.value, f.Type(), f.Name());
pl.Set(field, {v}, true);
pl.Set(field, v, true);
builder.Ref(tagName, v, field);
} break;
}
Expand Down
2 changes: 1 addition & 1 deletion cpp_src/core/cjson/msgpackdecoder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ void MsgPackDecoder::setValue(Payload& pl, CJsonBuilder& builder, const T& value
throw Error(errLogic, "Error parsing msgpack field '%s' - got array, expected scalar %s", f.Name(), f.Type().Name());
}
Variant val(value);
pl.Set(field, {val}, true);
pl.Set(field, val, true);
builder.Ref(tagName, val, field);
} else {
builder.Put(tagName, value);
Expand Down
4 changes: 2 additions & 2 deletions cpp_src/core/cjson/protobufdecoder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ void ProtobufDecoder::setValue(Payload& pl, CJsonBuilder& builder, ProtobufValue
int field = tm_.tags2field(tagsPath_.data(), tagsPath_.size());
auto value = item.value.convert(item.itemType);
if (field > 0) {
pl.Set(field, {value}, true);
pl.Set(field, value, true);
if (item.isArray) {
arraysStorage_.UpdateArraySize(item.tagName, field);
} else {
Expand All @@ -76,7 +76,7 @@ Error ProtobufDecoder::decodeArray(Payload& pl, CJsonBuilder& builder, const Pro
if (packed) {
int count = 0;
while (!parser.IsEof()) {
pl.Set(field, {parser.ReadArrayItem(item.itemType)}, true);
pl.Set(field, parser.ReadArrayItem(item.itemType), true);
++count;
}
builder.ArrayRef(item.tagName, field, count);
Expand Down
4 changes: 2 additions & 2 deletions cpp_src/core/cjson/uuid_recoders.h
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ class RecoderStringToUuidArray : public Recoder {
void Recode(Serializer &, WrSerializer &) const override final { assertrx(0); }
void Recode(Serializer &rdser, Payload &pl, int tagName, WrSerializer &wrser) override final {
if (fromNotArrayField_) {
pl.Set(field_, {Variant{rdser.GetStrUuid()}}, true);
pl.Set(field_, Variant{rdser.GetStrUuid()}, true);
wrser.PutCTag(ctag{TAG_ARRAY, tagName, field_});
wrser.PutVarUint(1);
} else {
Expand Down Expand Up @@ -98,7 +98,7 @@ class RecoderStringToUuid : public Recoder {
[[nodiscard]] bool Match(const TagsPath &) const noexcept override final { return false; }
void Recode(Serializer &, WrSerializer &) const override final { assertrx(0); }
void Recode(Serializer &rdser, Payload &pl, int tagName, WrSerializer &wrser) override final {
pl.Set(field_, {Variant{rdser.GetStrUuid()}}, true);
pl.Set(field_, Variant{rdser.GetStrUuid()}, true);
wrser.PutCTag(ctag{TAG_UUID, tagName, field_});
}

Expand Down
Loading

0 comments on commit 6d4504f

Please sign in to comment.