Skip to content

Commit

Permalink
Merge pull request #48 from anasoid/multiline-file
Browse files Browse the repository at this point in the history
Multiline file
  • Loading branch information
anasoid authored Jun 9, 2023
2 parents e85425d + 976f439 commit 90f03be
Show file tree
Hide file tree
Showing 16 changed files with 232 additions and 74 deletions.
40 changes: 5 additions & 35 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,40 +46,6 @@ jobs:
prefix: ""
tags-latest: "8.4"

- output: "influxdb"
logstash-version: 7.17.9
logstash-branch: 7.17
elasticsearch-version: 7
prefix: "influxdb-"
tags-latest: "influxdb"

- output: "influxdb"
logstash-version: 8.1.3
logstash-branch: 8.1
elasticsearch-version: 8
prefix: "influxdb-"
tags-latest: "influxdb-8.1"

- output: "influxdb"
logstash-version: 8.2.3
logstash-branch: 8.2
elasticsearch-version: 8
prefix: "influxdb-"
tags-latest: "influxdb"

- output: "influxdb"
logstash-version: 8.3.3
logstash-branch: 8.3
elasticsearch-version: 8
prefix: "influxdb-"
tags-latest: "influxdb-8.3"

- output: "influxdb"
logstash-version: 8.4.2
logstash-branch: 8.4
elasticsearch-version: 8
prefix: "influxdb-"
tags-latest: "influxdb-8.4"


# v2 https://github.com/docker/build-push-action/blob/master/UPGRADE.md
Expand Down Expand Up @@ -146,7 +112,11 @@ jobs:
- name: Result 📦
id: result
run: |
echo ::set-output "currentImage=${{ steps.prep.outputs.image }}:${{ matrix.logstash-branch }}" >> $GITHUB_OUTPUT
echo "currentImage=${{ steps.prep.outputs.image }}:${{ matrix.logstash-branch }}" >> $GITHUB_OUTPUT
- name: Validate config ☁️

run: docker run --rm -e "FILE_EXIT_AFTER_READ=false" ${{ steps.result.outputs.currentImage }} -t

#Publish on master
- name: Login to DockerHub 📦
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ jobs:
run: |
mkdir build
cp -r config build
cp -r docker build
cp -r testing/config/* build/config/
rm build/config/pipeline/output-elastic-logstash.conf
mkdir -p $PATH_OUT_BASIC
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ docker run --rm -it -e "INFLUXDB_PORT=9090" -e "INFLUXDB_HOST=localhost" -v ${PW

| Environment variables | Description | Default |
| -------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------- |
| `ELASTICSEARCH_HOSTS` | Elasticsearch output configuration [hosts](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-hosts) | <http://elasticsearch:9200> |
| `ELASTICSEARCH_HOSTS` | Elasticsearch output configuration [hosts](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-hosts) (ex: http://elasticsearch:9200 ) | |
| `ELASTICSEARCH_INDEX` | Elasticsearch output configuration [index](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-index) | jmeter-jtl-%{+YYYY.MM.dd} |
| `ELASTICSEARCH_USER` | Elasticsearch output configuration [user](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-user) | |
| `ELASTICSEARCH_PASSWORD` | Elasticsearch output configuration [password](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-password) | |
Expand Down
15 changes: 15 additions & 0 deletions config/pipeline-in-csv/filter-wait-logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
filter {
if !([message] =~ /^timeStamp/) {
ruby {
init => "$count_event=0"
code => "
if $count_event < 1
sleep(5)
$count_event += 1
end
"
tag_on_exception => "_rubyexception_filtred_sleep"
}
}

}
22 changes: 22 additions & 0 deletions config/pipeline-in-csv/input-csv-logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#start_position => string, one of ["beginning", "end"]
#mode => string, one of ["tail", "read"]
#file_completed_action : delete, log, log_and_delete
input {
file {
id => "jtl-input"
path => ["${INPUT_PATH:/input}/**.jtl","${INPUT_PATH_JTL:/input}/**.jtl"]
mode => "${FILE_READ_MODE:tail}"
start_position => "${FILE_START_POSITION:beginning}"
exit_after_read => "${FILE_EXIT_AFTER_READ:false}"
file_completed_action => "${FILE_COMPLETED_ACTION:log}"
file_completed_log_path => "file_completed_log_path.log"
codec => multiline {
pattern => "^\d{4}"
negate => true
what => "previous"
}
}


}

5 changes: 5 additions & 0 deletions config/pipeline-in-csv/output-next-pipeline-logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
output {
pipeline {
send_to => ["main-jtl"]
}
}
3 changes: 2 additions & 1 deletion config/pipeline/filter-a-config-fields-logstash.conf
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,9 @@ filter {
"[@metadata][remove_sampler]" => "${PARSE_REMOVE_SAMPLER:false}"
"[@metadata][with_subresult]" => "${PARSE_WITH_FLAG_SUBRESULT:true}"
"[@metadata][metadata]" => "${TEST_METADATA:undefined}"
"[@metadata][standalone]" => "${STANDALONE:false}"

}

}
}
}
22 changes: 22 additions & 0 deletions config/pipeline/filter-a-jtl-csv-filter-logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
filter {
if [origin] == "jtl" {

csv {
autodetect_column_names => true
separator => "${CSV_SEPARATOR:,}"
convert => {
"Connect" => "integer"
"bytes" => "integer"
"IdleTime" => "integer"
"Latency" => "integer"
"sentBytes" => "integer"
"elapsed" => "integer"
"allThreads" => "integer"
"grpThreads" => "integer"
"success" => "boolean"
"SampleCount" => "integer"
"ErrorCount" => "integer"
}
}
}
}
28 changes: 20 additions & 8 deletions config/pipeline/filter-oo-jtl-thread-logstash.conf
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,28 @@ filter {

#parse ThreadGroup
if [threadName] {
grok {
match => {
"threadName" => [
'\A%{IPORHOST:workerNode}-%{DATA:threadGrpName} %{INT:threadGrpId}-%{INT:threadNumber}\Z' ,
'\A%{HOSTPORT:workerNode}-%{DATA:threadGrpName} %{INT:threadGrpId}-%{INT:threadNumber}\Z' ,
'\A%{DATA:threadGrpName} %{INT:threadGrpId}-%{INT:threadNumber}\Z']
if [@metadata][standalone] != "false" {
grok {
match => {
"threadName" => ['\A%{DATA:threadGrpName} %{INT:threadGrpId}-%{INT:threadNumber}\Z']
}
tag_on_failure => ["_grokparsefailure","_grokparsefailure_threadName"]

}
tag_on_failure => ["_grokparsefailure","_grokparsefailure_threadName"]
}
else {
grok {
match => {
"threadName" => [
'\A%{IPORHOST:workerNode}-%{DATA:threadGrpName} %{INT:threadGrpId}-%{INT:threadNumber}\Z' ,
'\A%{HOSTPORT:workerNode}-%{DATA:threadGrpName} %{INT:threadGrpId}-%{INT:threadNumber}\Z' ,
'\A%{DATA:threadGrpName} %{INT:threadGrpId}-%{INT:threadNumber}\Z']
}
tag_on_failure => ["_grokparsefailure","_grokparsefailure_threadName"]

}
}

mutate {
convert => {
"threadGrpId" => "integer"
Expand All @@ -31,4 +43,4 @@ filter {

}

}
}
2 changes: 1 addition & 1 deletion config/pipeline/filter-oo-path-logstash.conf
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ filter {
}

grok {
patterns_dir => ["//usr/share/logstash/pipeline//pattern/grok"]
patterns_dir => ["/usr/share/logstash/pipeline/pattern/grok"]
match => {
"filename" => ['\A%{TESTNAME:testname}-%{ENVIRONMENT:environment}-%{EXECUTIOND:executionid}\Z',
'\A%{TESTNAME:testname}-%{EXECUTIOND:executionid}\Z','\A%{USERNAME:testname}\Z' ]
Expand Down
29 changes: 5 additions & 24 deletions config/pipeline/input-file-logstash.conf
Original file line number Diff line number Diff line change
@@ -1,36 +1,16 @@
#start_position => string, one of ["beginning", "end"]
#mode => string, one of ["tail", "read"]
#file_completed_action : delete, log, log_and_delete

input {
file {
pipeline {
id => "jtl-input"
path => ["${INPUT_PATH:/input}/**.jtl","${INPUT_PATH_JTL:/input}/**.jtl"]
mode => "${FILE_READ_MODE:tail}"
start_position => "${FILE_START_POSITION:beginning}"
exit_after_read => "${FILE_EXIT_AFTER_READ:false}"
file_completed_action => "${FILE_COMPLETED_ACTION:log}"
file_completed_log_path => "file_completed_log_path.log"
address => "main-jtl"
add_field => {
"origin" => "jtl"

}
codec => csv {
autodetect_column_names => true
convert => {
"Connect" => "integer"
"bytes" => "integer"
"IdleTime" => "integer"
"Latency" => "integer"
"sentBytes" => "integer"
"elapsed" => "integer"
"allThreads" => "integer"
"grpThreads" => "integer"
"success" => "boolean"
"SampleCount" => "integer"
"ErrorCount" => "integer"
}
}
}

file {
id => "statistics-input"
path => ["${INPUT_PATH:/input}/**.json","${INPUT_PATH_STAT:/input}/**.json"]
Expand All @@ -52,3 +32,4 @@ input {


}

4 changes: 2 additions & 2 deletions config/pipeline/output-elastic-logstash.conf
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ output {
if [origin] == "jtl" {
elasticsearch {
id => "jmeter-elasticsearch-output-jtl"
hosts => "${ELASTICSEARCH_HOSTS:elasticsearch:9200}"
hosts => "${ELASTICSEARCH_HOSTS:}"
index => "${ELASTICSEARCH_INDEX:jmeter-jtl-%{+YYYY.MM.dd}}"
http_compression => "${ELASTICSEARCH_HTTP_COMPRESSION:false}"
user => "${ELASTICSEARCH_USER:}"
Expand All @@ -19,7 +19,7 @@ output {
if [origin] == "stat" {
elasticsearch {
id => "jmeter-elasticsearch-output-stat"
hosts => "${ELASTICSEARCH_HOSTS:elasticsearch:9200}"
hosts => "${ELASTICSEARCH_HOSTS:}"
index => "${ELASTICSEARCH_INDEX_STAT:jmeter-jtl-%{+YYYY.MM.dd}}"
http_compression => "${ELASTICSEARCH_HTTP_COMPRESSION:false}"
user => "${ELASTICSEARCH_USER:}"
Expand Down
7 changes: 7 additions & 0 deletions config/settings/pipelines.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
- pipeline.id: main
path.config: "/usr/share/logstash/pipeline"
- pipeline.id: jtl-csv
path.config: "/usr/share/logstash/pipeline-in-csv"
pipeline.workers: 1
pipeline.batch.size: 1
pipeline.ordered: "true"
11 changes: 10 additions & 1 deletion docker/elasticsearch/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,21 @@ FROM docker.elastic.co/logstash/logstash-oss:${LOGSTASH_VERSION}
ARG ELASTICSEARCH_VERSION=7
ENV ELASTICSEARCH_VERSION ${ELASTICSEARCH_VERSION}

RUN bin/logstash-plugin install logstash-codec-csv
ENV CONF_EXEC_TIMEOUT 86400
ENV CONF_WAIT_FIRST_DATA 60
ENV CONF_WAIT_INACTIVITY 30


RUN rm -f /usr/share/logstash/pipeline/logstash.conf
ADD config/pipeline/ /usr/share/logstash/pipeline/
ADD config/pipeline-in-csv/ /usr/share/logstash/pipeline-in-csv/
ADD config/settings/ /usr/share/logstash/config/
ADD docker/entrypoint.sh /usr/local/bin/jmeter-logstash-entrypoint.sh


RUN mkdir -p /usr/share/logstash/data/plugins/inputs/file
VOLUME /usr/share/logstash/data/plugins/inputs/file



ENTRYPOINT ["jmeter-logstash-entrypoint.sh"]
Loading

0 comments on commit 90f03be

Please sign in to comment.