-
Notifications
You must be signed in to change notification settings - Fork 117
/
go.metrictest.sh
executable file
·60 lines (48 loc) · 2.98 KB
/
go.metrictest.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
#!/usr/bin/env bash
echo "create database"
curl "localhost:58123" -d "CREATE DATABASE IF NOT EXISTS gauge ON CLUSTER abc"
echo "create metric test tables"
curl "localhost:58123" -d "DROP TABLE IF EXISTS test_prom_metric ON CLUSTER abc SYNC"
curl "localhost:58123" -d "CREATE TABLE test_prom_metric ON CLUSTER abc
(
__series_id__ Int64,
timestamp DateTime CODEC(DoubleDelta, LZ4),
value Float32 CODEC(ZSTD(15))
) ENGINE=ReplicatedReplacingMergeTree()
PARTITION BY toYYYYMMDD(timestamp)
ORDER BY (__series_id__, timestamp);"
curl "localhost:58123" -d "DROP TABLE IF EXISTS dist_test_prom_metric ON CLUSTER abc SYNC"
curl "localhost:58123" -d "CREATE TABLE dist_test_prom_metric ON CLUSTER abc AS test_prom_metric ENGINE = Distributed(abc, default, test_prom_metric);"
curl "localhost:58123" -d "DROP TABLE IF EXISTS test_prom_series ON CLUSTER abc SYNC"
curl "localhost:58123" -d "CREATE TABLE test_prom_series ON CLUSTER abc
(
__series_id__ Int64,
__mgmt_id__ Int64,
labels String,
__name__ String
) ENGINE=ReplicatedReplacingMergeTree()
ORDER BY (__name__, __series_id__);"
curl "localhost:58123" -d "DROP TABLE IF EXISTS dist_test_prom_series ON CLUSTER abc SYNC"
curl "localhost:58123" -d "CREATE TABLE dist_test_prom_series ON CLUSTER abc AS test_prom_series ENGINE = Distributed(abc, default, test_prom_series);"
echo "send messages to kafka"
echo "cat /tmp/test_prom_metric.data | kafka-console-producer --topic test_metric_topic --broker-list localhost:9092" > send.sh
# data generated by ""./kafka_gen_prom 192.168.110.10:19092 TestMetric"
sudo docker cp ./docker/test_prom_metric.data kafka:/tmp/
sudo docker cp send.sh kafka:/tmp/
sudo docker exec kafka kafka-topics --bootstrap-server localhost:9093 --topic test_metric_topic --delete
sudo docker exec kafka sh /tmp/send.sh
echo "start clickhouse_sinker to consume"
timeout 30 ./bin/clickhouse_sinker --local-cfg-file docker/test_prom_metric.hjson
schema=`curl "localhost:58123" -d 'DESC test_prom_metric' 2>/dev/null | sort | tr -d '\t' | tr -d ' '| tr '\n' ','`
echo "Got test_prom_metric schema => $schema"
[ $schema = "__series_id__Int64,timestampDateTimeDoubleDelta,LZ4,value1Nullable(Float64),value2Nullable(Int64),value3Nullable(Bool),valueFloat32ZSTD(15)," ] || exit 1
schema=`curl "localhost:58123" -d 'DESC test_prom_series' 2>/dev/null | sort | tr -d '\t' | tr -d ' '| tr '\n' ','`
echo "Got test_prom_series schema => $schema"
[ $schema = "key_0Nullable(String),key_1Nullable(String),key_2Nullable(String),key_4Nullable(String),key_5Nullable(String),key_6Nullable(String),key_7Nullable(String),key_8Nullable(String),key_9Nullable(String),labelsString,__mgmt_id__Int64,__name__String,__series_id__Int64," ] || exit 1
echo "check result 1"
count=`curl "localhost:58123" -d 'select count() from dist_test_prom_metric'`
echo "Got test_prom_metric count => $count"
[ $count -le 10000 ] || exit 1
count=`curl "localhost:58123" -d 'select count() from dist_test_prom_series'`
echo "Got test_prom_series count => $count"
[ $count -eq 1000 ] || exit 1