Skip to content

Commit

Permalink
Merge branch 'ClickHouse:master' into add-alias
Browse files Browse the repository at this point in the history
  • Loading branch information
xiedeyantu authored Jul 26, 2023
2 parents 265e143 + a4a8c73 commit 6bb1a30
Show file tree
Hide file tree
Showing 283 changed files with 3,669 additions and 1,836 deletions.
8 changes: 7 additions & 1 deletion base/poco/Foundation/include/Poco/URI.h
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ class Foundation_API URI
URI();
/// Creates an empty URI.

explicit URI(const std::string & uri);
explicit URI(const std::string & uri, bool disable_url_encoding = false);
/// Parses an URI from the given string. Throws a
/// SyntaxException if the uri is not valid.

Expand Down Expand Up @@ -350,13 +350,19 @@ class Foundation_API URI
static const std::string ILLEGAL;

private:
void encodePath(std::string & encodedStr) const;
void decodePath(const std::string & encodedStr);


std::string _scheme;
std::string _userInfo;
std::string _host;
unsigned short _port;
std::string _path;
std::string _query;
std::string _fragment;

bool _disable_url_encoding = false;
};


Expand Down
39 changes: 29 additions & 10 deletions base/poco/Foundation/src/URI.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ URI::URI():
}


URI::URI(const std::string& uri):
_port(0)
URI::URI(const std::string& uri, bool decode_and_encode_path):
_port(0), _disable_url_encoding(decode_and_encode_path)
{
parse(uri);
}
Expand Down Expand Up @@ -107,7 +107,8 @@ URI::URI(const URI& uri):
_port(uri._port),
_path(uri._path),
_query(uri._query),
_fragment(uri._fragment)
_fragment(uri._fragment),
_disable_url_encoding(uri._disable_url_encoding)
{
}

Expand All @@ -119,7 +120,8 @@ URI::URI(const URI& baseURI, const std::string& relativeURI):
_port(baseURI._port),
_path(baseURI._path),
_query(baseURI._query),
_fragment(baseURI._fragment)
_fragment(baseURI._fragment),
_disable_url_encoding(baseURI._disable_url_encoding)
{
resolve(relativeURI);
}
Expand Down Expand Up @@ -151,6 +153,7 @@ URI& URI::operator = (const URI& uri)
_path = uri._path;
_query = uri._query;
_fragment = uri._fragment;
_disable_url_encoding = uri._disable_url_encoding;
}
return *this;
}
Expand Down Expand Up @@ -181,6 +184,7 @@ void URI::swap(URI& uri)
std::swap(_path, uri._path);
std::swap(_query, uri._query);
std::swap(_fragment, uri._fragment);
std::swap(_disable_url_encoding, uri._disable_url_encoding);
}


Expand All @@ -201,7 +205,7 @@ std::string URI::toString() const
std::string uri;
if (isRelative())
{
encode(_path, RESERVED_PATH, uri);
encodePath(uri);
}
else
{
Expand All @@ -217,7 +221,7 @@ std::string URI::toString() const
{
if (!auth.empty() && _path[0] != '/')
uri += '/';
encode(_path, RESERVED_PATH, uri);
encodePath(uri);
}
else if (!_query.empty() || !_fragment.empty())
{
Expand Down Expand Up @@ -313,7 +317,7 @@ void URI::setAuthority(const std::string& authority)
void URI::setPath(const std::string& path)
{
_path.clear();
decode(path, _path);
decodePath(path);
}


Expand Down Expand Up @@ -418,7 +422,7 @@ void URI::setPathEtc(const std::string& pathEtc)
std::string URI::getPathEtc() const
{
std::string pathEtc;
encode(_path, RESERVED_PATH, pathEtc);
encodePath(pathEtc);
if (!_query.empty())
{
pathEtc += '?';
Expand All @@ -436,7 +440,7 @@ std::string URI::getPathEtc() const
std::string URI::getPathAndQuery() const
{
std::string pathAndQuery;
encode(_path, RESERVED_PATH, pathAndQuery);
encodePath(pathAndQuery);
if (!_query.empty())
{
pathAndQuery += '?';
Expand Down Expand Up @@ -681,6 +685,21 @@ void URI::decode(const std::string& str, std::string& decodedStr, bool plusAsSpa
}
}

void URI::encodePath(std::string & encodedStr) const
{
if (_disable_url_encoding)
encodedStr = _path;
else
encode(_path, RESERVED_PATH, encodedStr);
}

void URI::decodePath(const std::string & encodedStr)
{
if (_disable_url_encoding)
_path = encodedStr;
else
decode(encodedStr, _path);
}

bool URI::isWellKnownPort() const
{
Expand Down Expand Up @@ -820,7 +839,7 @@ void URI::parsePath(std::string::const_iterator& it, const std::string::const_it
{
std::string path;
while (it != end && *it != '?' && *it != '#') path += *it++;
decode(path, _path);
decodePath(path);
}


Expand Down
2 changes: 1 addition & 1 deletion contrib/idxd-config
Submodule idxd-config updated 54 files
+5 −0 Documentation/accfg/accel-config-config-device.txt
+14 −0 Documentation/accfg/accel-config-config-group.txt
+11 −0 Documentation/accfg/accel-config-config-wq.txt
+31 −22 Documentation/accfg/accel-config-list.txt
+5 −9 README.md
+1 −1 accfg-test.spec.in
+69 −1 accfg/config.c
+98 −5 accfg/config_attr.c
+187 −1 accfg/idxd.h
+0 −7 accfg/lib/Makefile.am
+12 −8 accfg/lib/libaccel-config.sym
+180 −9 accfg/lib/libaccfg.c
+5 −1 accfg/lib/private.h
+25 −2 accfg/libaccel_config.h
+20 −0 accfg/list.c
+0 −12 contrib/accel-config.conf.sample
+0 −6 contrib/configs/app_profile.conf
+0 −12 contrib/configs/net_profile.conf
+0 −3 contrib/configs/os_profile.conf
+0 −4 contrib/configs/profilenote.txt
+0 −6 contrib/configs/storage_profile.conf
+8 −3 debdch.sh
+1 −1 debian/accel-config-test.install
+114 −0 debian/changelog
+18 −13 debian/control
+4 −4 debian/copyright
+2 −2 debian/libaccel-config-dev.install
+1 −1 debian/libaccel-config1.install
+7 −1 debian/rules
+1 −1 git-version
+7 −5 test/Makefile.am
+32 −6 test/accel_test.c
+49 −0 test/accel_test.h
+111 −0 test/algorithms/iaa_crypto.c
+37 −0 test/algorithms/iaa_crypto.h
+89 −10 test/algorithms/iaa_zcompress.c
+5 −0 test/algorithms/iaa_zcompress.h
+9 −5 test/common
+2 −12 test/configs/2g2q_user_2.conf
+190 −29 test/dsa.c
+12 −2 test/dsa.h
+232 −0 test/dsa_config_test_runner.sh
+0 −1 test/dsa_memmove.sh
+92 −0 test/dsa_memmove.sh
+47 −0 test/dsa_prep.c
+213 −4 test/dsa_test.c
+881 −10 test/iaa.c
+15 −0 test/iaa.h
+59 −0 test/iaa_prep.c
+178 −1 test/iaa_test.c
+124 −1 test/iaa_user_test_runner.sh
+103 −21 test/libaccfg.c
+102 −0 test_all.sh
+7 −2 util/help.c
+58 −22 util/json.c
2 changes: 1 addition & 1 deletion contrib/qpl
Submodule qpl updated 55 files
+1 −1 CMakeLists.txt
+1 −1 Doxyfile
+1 −1 doc/source/conf.py
+1 −1 doc/source/documentation/get_started_docs/installation.rst
+1 −1 doc/source/documentation/introduction_docs/introduction.rst
+1 −1 examples/low-level-api/CMakeLists.txt
+26 −27 examples/low-level-api/compression_multi_chunk_example.cpp
+2 −1 sources/c_api/compression_operations/compressor.cpp
+14 −3 sources/c_api/compression_operations/decompressor.cpp
+2 −0 sources/c_api/legacy_hw_path/hardware_defs.h
+3 −2 sources/c_api/legacy_hw_path/hardware_state.h
+17 −2 sources/c_api/legacy_hw_path/qpl_hw_check_job.cpp
+78 −7 sources/c_api/legacy_hw_path/qpl_hw_compress_job.cpp
+33 −115 sources/c_api/legacy_hw_path/qpl_hw_inflate_job.cpp
+46 −2 sources/c_api/legacy_hw_path/qpl_hw_submit_job.cpp
+9 −0 sources/c_api/serialization/huffman_table_serialization.cpp
+86 −33 sources/core-iaa/include/hw_aecs_api.h
+2 −0 sources/core-iaa/include/hw_configuration_driver.h
+37 −0 sources/core-iaa/include/hw_descriptors_api.h
+11 −0 sources/core-iaa/include/hw_devices.h
+2 −2 sources/core-iaa/include/hw_status.h
+82 −41 sources/core-iaa/include/libaccel_config.h
+1 −1 sources/core-iaa/sources/aecs/hw_aecs_compress.c
+280 −12 sources/core-iaa/sources/aecs/hw_aecs_decompress.c
+1 −1 sources/core-iaa/sources/descriptors/hw_analytic_descriptor_operations.c
+101 −2 sources/core-iaa/sources/descriptors/hw_compress_descriptor.c
+3 −3 sources/core-iaa/sources/descriptors/hw_decompress_descriptor.c
+20 −2 sources/core-iaa/sources/driver_loader/hw_configuration_driver.c
+8 −6 sources/core-iaa/sources/include/own_hw_definitions.h
+6 −6 sources/core-sw/include/qplc_compression_consts.h
+23 −6 sources/core-sw/include/qplc_huffman_table.h
+93 −25 sources/middle-layer/compression/deflate/deflate.cpp
+3 −2 sources/middle-layer/compression/deflate/streams/deflate_state_builder.hpp
+17 −6 sources/middle-layer/compression/deflate/streams/hw_deflate_state.hpp
+13 −1 sources/middle-layer/compression/huffman_only/huffman_only_decompression_state.hpp
+14 −14 sources/middle-layer/compression/huffman_table/huffman_table.hpp
+92 −36 sources/middle-layer/compression/huffman_table/huffman_table_utils.cpp
+3 −3 sources/middle-layer/compression/huffman_table/inflate_huffman_table.cpp
+1 −1 sources/middle-layer/compression/huffman_table/inflate_huffman_table.hpp
+21 −3 sources/middle-layer/compression/huffman_table/serialization_utils.cpp
+6 −0 sources/middle-layer/compression/inflate/inflate_defs.hpp
+43 −10 sources/middle-layer/compression/inflate/inflate_state.hpp
+17 −10 sources/middle-layer/compression/stream_decorators/gzip_decorator.cpp
+9 −5 sources/middle-layer/compression/stream_decorators/gzip_decorator.hpp
+26 −0 sources/middle-layer/dispatcher/hw_device.cpp
+8 −1 sources/middle-layer/dispatcher/hw_device.hpp
+36 −0 sources/middle-layer/util/aecs_format_checker.hpp
+6 −1 tools/benchmarks/include/utility.hpp
+9 −12 tools/tests/functional/algorithmic_tests/low_level_api/deflate_huffman_only.cpp
+1 −0 tools/utils/common/command_line.hpp
+1 −0 tools/utils/common/dataset.hpp
+14 −0 tools/utils/common/util.hpp
+0 −2 tools/utils/generators/configurators/base_configurator.hpp
+3 −3 tools/utils/generators/prle_generator.cpp
+1 −0 tools/utils/generators/random_generator.h
3 changes: 2 additions & 1 deletion docker/test/stress/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ ln -s /usr/share/clickhouse-test/clickhouse-test /usr/bin/clickhouse-test

# Stress tests and upgrade check uses similar code that was placed
# in a separate bash library. See tests/ci/stress_tests.lib
source /usr/share/clickhouse-test/ci/attach_gdb.lib
source /usr/share/clickhouse-test/ci/stress_tests.lib

install_packages package_folder
Expand Down Expand Up @@ -52,7 +53,7 @@ azurite-blob --blobHost 0.0.0.0 --blobPort 10000 --debug /azurite_log &

start

shellcheck disable=SC2086 # No quotes because I want to split it into words.
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
/s3downloader --url-prefix "$S3_URL" --dataset-names $DATASETS
chmod 777 -R /var/lib/clickhouse
clickhouse-client --query "ATTACH DATABASE IF NOT EXISTS datasets ENGINE = Ordinary"
Expand Down
3 changes: 3 additions & 0 deletions docker/test/upgrade/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ ln -s /usr/share/clickhouse-test/ci/get_previous_release_tag.py /usr/bin/get_pre

# Stress tests and upgrade check uses similar code that was placed
# in a separate bash library. See tests/ci/stress_tests.lib
source /usr/share/clickhouse-test/ci/attach_gdb.lib
source /usr/share/clickhouse-test/ci/stress_tests.lib

azurite-blob --blobHost 0.0.0.0 --blobPort 10000 --debug /azurite_log &
Expand Down Expand Up @@ -61,6 +62,7 @@ configure

# it contains some new settings, but we can safely remove it
rm /etc/clickhouse-server/config.d/merge_tree.xml
rm /etc/clickhouse-server/config.d/enable_wait_for_shutdown_replicated_tables.xml
rm /etc/clickhouse-server/users.d/nonconst_timezone.xml

start
Expand Down Expand Up @@ -90,6 +92,7 @@ sudo chgrp clickhouse /etc/clickhouse-server/config.d/s3_storage_policy_by_defau

# it contains some new settings, but we can safely remove it
rm /etc/clickhouse-server/config.d/merge_tree.xml
rm /etc/clickhouse-server/config.d/enable_wait_for_shutdown_replicated_tables.xml
rm /etc/clickhouse-server/users.d/nonconst_timezone.xml

start
Expand Down
8 changes: 2 additions & 6 deletions docs/en/development/building_and_benchmarking_deflate_qpl.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,8 @@ description: How to build Clickhouse and run benchmark with DEFLATE_QPL Codec

# Build Clickhouse with DEFLATE_QPL

- Make sure your target machine meet the QPL required [prerequisites](https://intel.github.io/qpl/documentation/get_started_docs/installation.html#prerequisites)
- Pass the following flag to CMake when building ClickHouse:

``` bash
cmake -DENABLE_QPL=1 ..
```
- Make sure your host machine meet the QPL required [prerequisites](https://intel.github.io/qpl/documentation/get_started_docs/installation.html#prerequisites)
- deflate_qpl is enabled by default during cmake build. In case you accidentally change it, please double-check build flag: ENABLE_QPL=1

- For generic requirements, please refer to Clickhouse generic [build instructions](/docs/en/development/build.md)

Expand Down
21 changes: 3 additions & 18 deletions docs/en/engines/table-engines/integrations/s3.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ Notice that the S3 endpoint in the `ENGINE` configuration uses the parameter tok

:::note
As shown in the example, querying from S3 tables that are partitioned is
not directly supported at this time, but can be accomplished by querying the bucket contents with a wildcard.
not directly supported at this time, but can be accomplished by querying the individual partitions
using the S3 table function.

The primary use-case for writing
partitioned data in S3 is to enable transferring that data into another
Expand Down Expand Up @@ -127,23 +128,7 @@ FROM s3('http://minio:10000/clickhouse//test_45.csv', 'minioadmin', 'minioadminp
└────┴────┴────┘
```

#### Select from all partitions

```sql
SELECT *
FROM s3('http://minio:10000/clickhouse//**', 'minioadmin', 'minioadminpassword', 'CSV')
```
```response
┌─c1─┬─c2─┬─c3─┐
│ 3 │ 2 │ 1 │
└────┴────┴────┘
┌─c1─┬─c2─┬─c3─┐
│ 1 │ 2 │ 3 │
└────┴────┴────┘
┌─c1─┬─c2─┬─c3─┐
│ 78 │ 43 │ 45 │
└────┴────┴────┘
```
#### Limitation

You may naturally try to `Select * from p`, but as noted above, this query will fail; use the preceding query.

Expand Down
1 change: 1 addition & 0 deletions docs/en/engines/table-engines/special/url.md
Original file line number Diff line number Diff line change
Expand Up @@ -106,3 +106,4 @@ For partitioning by month, use the `toYYYYMM(date_column)` expression, where `da
## Storage Settings {#storage-settings}

- [engine_url_skip_empty_files](/docs/en/operations/settings/settings.md#engine_url_skip_empty_files) - allows to skip empty files while reading. Disabled by default.
- [disable_url_encoding](/docs/en/operations/settings/settings.md#disable_url_encoding) -allows to disable decoding/encoding path in uri. Disabled by default.
18 changes: 9 additions & 9 deletions docs/en/interfaces/http.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ Connection: Close
Content-Type: text/tab-separated-values; charset=UTF-8
X-ClickHouse-Server-Display-Name: clickhouse.ru-central1.internal
X-ClickHouse-Query-Id: 5abe861c-239c-467f-b955-8a201abb8b7f
X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0"}
X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","peak_memory_usage":"0"}

1
```
Expand Down Expand Up @@ -286,9 +286,9 @@ Similarly, you can use ClickHouse sessions in the HTTP protocol. To do this, you
You can receive information about the progress of a query in `X-ClickHouse-Progress` response headers. To do this, enable [send_progress_in_http_headers](../operations/settings/settings.md#settings-send_progress_in_http_headers). Example of the header sequence:

``` text
X-ClickHouse-Progress: {"read_rows":"2752512","read_bytes":"240570816","total_rows_to_read":"8880128"}
X-ClickHouse-Progress: {"read_rows":"5439488","read_bytes":"482285394","total_rows_to_read":"8880128"}
X-ClickHouse-Progress: {"read_rows":"8783786","read_bytes":"819092887","total_rows_to_read":"8880128"}
X-ClickHouse-Progress: {"read_rows":"2752512","read_bytes":"240570816","total_rows_to_read":"8880128","peak_memory_usage":"4371480"}
X-ClickHouse-Progress: {"read_rows":"5439488","read_bytes":"482285394","total_rows_to_read":"8880128","peak_memory_usage":"13621616"}
X-ClickHouse-Progress: {"read_rows":"8783786","read_bytes":"819092887","total_rows_to_read":"8880128","peak_memory_usage":"23155600"}
```

Possible header fields:
Expand Down Expand Up @@ -416,7 +416,7 @@ $ curl -v 'http://localhost:8123/predefined_query'
< X-ClickHouse-Format: Template
< X-ClickHouse-Timezone: Asia/Shanghai
< Keep-Alive: timeout=3
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0"}
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","peak_memory_usage":"0"}
<
# HELP "Query" "Number of executing queries"
# TYPE "Query" counter
Expand Down Expand Up @@ -581,7 +581,7 @@ $ curl -vv -H 'XXX:xxx' 'http://localhost:8123/hi'
< Content-Type: text/html; charset=UTF-8
< Transfer-Encoding: chunked
< Keep-Alive: timeout=3
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0"}
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","peak_memory_usage":"0"}
<
* Connection #0 to host localhost left intact
Say Hi!%
Expand Down Expand Up @@ -621,7 +621,7 @@ $ curl -v -H 'XXX:xxx' 'http://localhost:8123/get_config_static_handler'
< Content-Type: text/plain; charset=UTF-8
< Transfer-Encoding: chunked
< Keep-Alive: timeout=3
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0"}
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","peak_memory_usage":"0"}
<
* Connection #0 to host localhost left intact
<html ng-app="SMI2"><head><base href="http://ui.tabix.io/"></head><body><div ui-view="" class="content-ui"></div><script src="http://loader.tabix.io/master.js"></script></body></html>%
Expand Down Expand Up @@ -673,7 +673,7 @@ $ curl -vv -H 'XXX:xxx' 'http://localhost:8123/get_absolute_path_static_handler'
< Content-Type: text/html; charset=UTF-8
< Transfer-Encoding: chunked
< Keep-Alive: timeout=3
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0"}
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","peak_memory_usage":"0"}
<
<html><body>Absolute Path File</body></html>
* Connection #0 to host localhost left intact
Expand All @@ -692,7 +692,7 @@ $ curl -vv -H 'XXX:xxx' 'http://localhost:8123/get_relative_path_static_handler'
< Content-Type: text/html; charset=UTF-8
< Transfer-Encoding: chunked
< Keep-Alive: timeout=3
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0"}
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","peak_memory_usage":"0"}
<
<html><body>Relative Path File</body></html>
* Connection #0 to host localhost left intact
Expand Down
34 changes: 34 additions & 0 deletions docs/en/operations/configuration-files.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,40 @@ XML substitution example:

Substitutions can also be performed from ZooKeeper. To do this, specify the attribute `from_zk = "/path/to/node"`. The element value is replaced with the contents of the node at `/path/to/node` in ZooKeeper. You can also put an entire XML subtree on the ZooKeeper node and it will be fully inserted into the source element.

## Encrypting Configuration {#encryption}

You can use symmetric encryption to encrypt a configuration element, for example, a password field. To do so, first configure the [encryption codec](../sql-reference/statements/create/table.md#encryption-codecs), then add attribute `encryption_codec` with the name of the encryption codec as value to the element to encrypt.

Unlike attributes `from_zk`, `from_env` and `incl` (or element `include`), no substitution, i.e. decryption of the encrypted value, is performed in the preprocessed file. Decryption happens only at runtime in the server process.

Example:

```xml
<clickhouse>
<encryption_codecs>
<aes_128_gcm_siv>
<key_hex>00112233445566778899aabbccddeeff</key_hex>
</aes_128_gcm_siv>
</encryption_codecs>
<interserver_http_credentials>
<user>admin</user>
<password encryption_codec="AES_128_GCM_SIV">961F000000040000000000EEDDEF4F453CFE6457C4234BD7C09258BD651D85</password>
</interserver_http_credentials>
</clickhouse>
```

To get the encrypted value `encrypt_decrypt` example application may be used.

Example:

``` bash
./encrypt_decrypt /etc/clickhouse-server/config.xml -e AES_128_GCM_SIV abcd
```

``` text
961F000000040000000000EEDDEF4F453CFE6457C4234BD7C09258BD651D85
```

## User Settings {#user-settings}

The `config.xml` file can specify a separate config with user settings, profiles, and quotas. The relative path to this config is set in the `users_config` element. By default, it is `users.xml`. If `users_config` is omitted, the user settings, profiles, and quotas are specified directly in `config.xml`.
Expand Down
6 changes: 6 additions & 0 deletions docs/en/operations/settings/settings.md
Original file line number Diff line number Diff line change
Expand Up @@ -3468,6 +3468,12 @@ Possible values:

Default value: `0`.

## disable_url_encoding {#disable_url_encoding}

Allows to disable decoding/encoding path in uri in [URL](../../engines/table-engines/special/url.md) engine tables.

Disabled by default.

## database_atomic_wait_for_drop_and_detach_synchronously {#database_atomic_wait_for_drop_and_detach_synchronously}

Adds a modifier `SYNC` to all `DROP` and `DETACH` queries.
Expand Down
2 changes: 2 additions & 0 deletions docs/en/sql-reference/aggregate-functions/reference/any.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,5 @@ To get a determinate result, you can use the ‘min’ or ‘max’ function ins
In some cases, you can rely on the order of execution. This applies to cases when SELECT comes from a subquery that uses ORDER BY.

When a `SELECT` query has the `GROUP BY` clause or at least one aggregate function, ClickHouse (in contrast to MySQL) requires that all expressions in the `SELECT`, `HAVING`, and `ORDER BY` clauses be calculated from keys or from aggregate functions. In other words, each column selected from the table must be used either in keys or inside aggregate functions. To get behavior like in MySQL, you can put the other columns in the `any` aggregate function.

- Alias: `any_value`
Loading

0 comments on commit 6bb1a30

Please sign in to comment.