Skip to content

Commit

Permalink
Merge pull request #49 from anasoid/develop
Browse files Browse the repository at this point in the history
mutiline
  • Loading branch information
anasoid authored Jun 9, 2023
2 parents 24f40bf + 90f03be commit 257de48
Show file tree
Hide file tree
Showing 48 changed files with 1,216 additions and 376 deletions.
40 changes: 5 additions & 35 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,40 +46,6 @@ jobs:
prefix: ""
tags-latest: "8.4"

- output: "influxdb"
logstash-version: 7.17.9
logstash-branch: 7.17
elasticsearch-version: 7
prefix: "influxdb-"
tags-latest: "influxdb"

- output: "influxdb"
logstash-version: 8.1.3
logstash-branch: 8.1
elasticsearch-version: 8
prefix: "influxdb-"
tags-latest: "influxdb-8.1"

- output: "influxdb"
logstash-version: 8.2.3
logstash-branch: 8.2
elasticsearch-version: 8
prefix: "influxdb-"
tags-latest: "influxdb"

- output: "influxdb"
logstash-version: 8.3.3
logstash-branch: 8.3
elasticsearch-version: 8
prefix: "influxdb-"
tags-latest: "influxdb-8.3"

- output: "influxdb"
logstash-version: 8.4.2
logstash-branch: 8.4
elasticsearch-version: 8
prefix: "influxdb-"
tags-latest: "influxdb-8.4"


# v2 https://github.com/docker/build-push-action/blob/master/UPGRADE.md
Expand Down Expand Up @@ -146,7 +112,11 @@ jobs:
- name: Result 📦
id: result
run: |
echo ::set-output "currentImage=${{ steps.prep.outputs.image }}:${{ matrix.logstash-branch }}" >> $GITHUB_OUTPUT
echo "currentImage=${{ steps.prep.outputs.image }}:${{ matrix.logstash-branch }}" >> $GITHUB_OUTPUT
- name: Validate config ☁️

run: docker run --rm -e "FILE_EXIT_AFTER_READ=false" ${{ steps.result.outputs.currentImage }} -t

#Publish on master
- name: Login to DockerHub 📦
Expand Down
7 changes: 4 additions & 3 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ jobs:
run: |
mkdir build
cp -r config build
cp -r docker build
cp -r testing/config/* build/config/
rm build/config/pipeline/output-elastic-logstash.conf
mkdir -p $PATH_OUT_BASIC
Expand Down Expand Up @@ -97,12 +98,12 @@ jobs:
EXPECTED_FILE=$PATH_EXPECTED/$FILE_NAME
OUT_FILE=$PATH_OUT/$FILE_NAME
echo "####OUT###############$FILE_NAME#####################"
jq --sort-keys 'del(.host) | del(."@timestamp")' $OUT_FILE
jq --sort-keys 'del(.host) | del(."@timestamp") | del(.timestamp)' $OUT_FILE
echo "####OUT###############$FILE_NAME#####################"
echo "####EXPECTED##########$FILE_NAME#####################"
jq --sort-keys 'del(.host) | del(."@timestamp")' $EXPECTED_FILE
jq --sort-keys 'del(.host) | del(."@timestamp") | del(.timestamp)' $EXPECTED_FILE
echo "####EXPECTED##########$FILE_NAME#####################"
diff <(jq --sort-keys 'del(.host) | del(."@timestamp")' $EXPECTED_FILE) <(jq --sort-keys 'del(.host) | del(."@timestamp")' $OUT_FILE)
diff <(jq --sort-keys 'del(.host) | del(."@timestamp") | del(.timestamp)' $EXPECTED_FILE) <(jq --sort-keys 'del(.host) | del(."@timestamp") | del(.timestamp)' $OUT_FILE)
echo "####END###############################$FILE_NAME#####################################"
done
done
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ build
test
input
.history
.sincedb
.sincedb
.idea
77 changes: 61 additions & 16 deletions README.md

Large diffs are not rendered by default.

15 changes: 15 additions & 0 deletions config/pipeline-in-csv/filter-wait-logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
filter {
if !([message] =~ /^timeStamp/) {
ruby {
init => "$count_event=0"
code => "
if $count_event < 1
sleep(5)
$count_event += 1
end
"
tag_on_exception => "_rubyexception_filtred_sleep"
}
}

}
22 changes: 22 additions & 0 deletions config/pipeline-in-csv/input-csv-logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#start_position => string, one of ["beginning", "end"]
#mode => string, one of ["tail", "read"]
#file_completed_action : delete, log, log_and_delete
input {
file {
id => "jtl-input"
path => ["${INPUT_PATH:/input}/**.jtl","${INPUT_PATH_JTL:/input}/**.jtl"]
mode => "${FILE_READ_MODE:tail}"
start_position => "${FILE_START_POSITION:beginning}"
exit_after_read => "${FILE_EXIT_AFTER_READ:false}"
file_completed_action => "${FILE_COMPLETED_ACTION:log}"
file_completed_log_path => "file_completed_log_path.log"
codec => multiline {
pattern => "^\d{4}"
negate => true
what => "previous"
}
}


}

5 changes: 5 additions & 0 deletions config/pipeline-in-csv/output-next-pipeline-logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
output {
pipeline {
send_to => ["main-jtl"]
}
}
26 changes: 26 additions & 0 deletions config/pipeline/filter-a-config-fields-logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@

filter {

mutate {
add_field => {
"project" => "${PROJECT_NAME:undefined}"
"[@metadata][executionid]" => "${EXECUTION_ID:undefined}"
"[@metadata][environment]" => "${ENVIRONMENT_NAME:undefined}"
"[@metadata][testname]" => "${TEST_NAME:undefined}"
"[@metadata][testtags]" => "${TEST_TAGS:undefined}"
"[@metadata][dropmessage]" => "${PARSE_REMOVE_MESSAGE_FIELD:true}"
"[@metadata][cleanup]" => "${PARSE_CLEANUP_FIELDS:true}"
"[@metadata][transaction_regex]" => "${PARSE_TRANSACTION_REGEX:_.+_}"
"[@metadata][transaction_auto]" => "${PARSE_TRANSACTION_AUTO:true}"
"[@metadata][filter_include_sampler_regex]" => "${PARSE_FILTER_INCLUDE_SAMPLER_REGEX:}"
"[@metadata][filter_exclude_sampler_regex]" => "${PARSE_FILTER_EXCLUDE_SAMPLER_REGEX:}"
"[@metadata][remove_transaction]" => "${PARSE_REMOVE_TRANSACTION:false}"
"[@metadata][remove_sampler]" => "${PARSE_REMOVE_SAMPLER:false}"
"[@metadata][with_subresult]" => "${PARSE_WITH_FLAG_SUBRESULT:true}"
"[@metadata][metadata]" => "${TEST_METADATA:undefined}"
"[@metadata][standalone]" => "${STANDALONE:false}"

}

}
}
22 changes: 22 additions & 0 deletions config/pipeline/filter-a-jtl-csv-filter-logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
filter {
if [origin] == "jtl" {

csv {
autodetect_column_names => true
separator => "${CSV_SEPARATOR:,}"
convert => {
"Connect" => "integer"
"bytes" => "integer"
"IdleTime" => "integer"
"Latency" => "integer"
"sentBytes" => "integer"
"elapsed" => "integer"
"allThreads" => "integer"
"grpThreads" => "integer"
"success" => "boolean"
"SampleCount" => "integer"
"ErrorCount" => "integer"
}
}
}
}
25 changes: 0 additions & 25 deletions config/pipeline/filter-aa-fields-logstash.conf

This file was deleted.

25 changes: 25 additions & 0 deletions config/pipeline/filter-aa-jtl-fields-logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
filter {
if [origin] == "jtl" {

#parse Transaction
if [responseCode] =~ /^[0-9]+$/ {
mutate {
copy => {
"responseCode" => "responseStatus"
}
}
}
else {
mutate {
add_field => [ "responseStatus", "${MISSED_RESPONSE_CODE:510}" ]

}
}

mutate {
convert => {
"responseStatus" => "integer"
}
}
}
}
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
filter {

#parse Transaction
if [origin] == "jtl" {
#parse Transaction

ruby {
code => "
ruby {
code => "
regex = event.get('[@metadata][filter_exclude_sampler_regex]')
if (regex.to_java().isEmpty() )
event.set('[@metadata][filtred_exclude]', 'false')
Expand All @@ -15,35 +16,36 @@ filter {
event.set('[@metadata][filtred_exclude]', 'false')
end
end
"
tag_on_exception => "_rubyexception_filtred_exclude"
}
"
tag_on_exception => "_rubyexception_filtred_exclude"
}

if [@metadata][filtred_exclude] == "true" {
drop {
}
}
else {
if [@metadata][filtred_exclude] == "true" {
drop {
}
}
else {

ruby {
code => "
regex = event.get('[@metadata][filter_include_sampler_regex]')
if (regex.to_java().isEmpty() )
ruby {
code => "
regex = event.get('[@metadata][filter_include_sampler_regex]')
if (regex.to_java().isEmpty() )
event.set('[@metadata][filtred_include]', 'true')
else
else
patern = java::util::regex::Pattern.compile(regex)
if patern.match(event.get('label'))
event.set('[@metadata][filtred_include]', 'true')
else
event.set('[@metadata][filtred_include]', 'false')
end
end
"
tag_on_exception => "_rubyexception_filtred_include"
}
if [@metadata][filtred_include] == "false" {
drop {
end
"
tag_on_exception => "_rubyexception_filtred_include"
}
if [@metadata][filtred_include] == "false" {
drop {
}
}
}
}
}
}
}
}
79 changes: 79 additions & 0 deletions config/pipeline/filter-aa-jtl-transaction-logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
filter {

if [origin] == "jtl" {
#parse Transaction

ruby {
code => "
regex = event.get('[@metadata][transaction_regex]')
if (regex.to_java().isEmpty() )
event.set('transaction', 'false')
else
patern = java::util::regex::Pattern.compile(regex)
if patern.match(event.get('label'))
event.set('transaction', 'true')
else
event.set('transaction', 'false')
end
end
"
tag_on_exception => "_rubyexception_transaction"
}

if [@metadata][transaction_auto] == "true" {
if [URL] == "null" {
if [responseMessage] =~ /^Number of samples in transaction (.)+/ {
mutate {
replace => {
"transaction" => "true"
}
}
}
}
}


if [transaction] == "true" {

if [@metadata][remove_transaction] == "false" {
#parse transaction Message
if [responseMessage] {

grok {
match => {
"responseMessage" => ['\ANumber of samples in transaction : %{INT:transactionTotalSampler}, number of failing samples : %{INT:transactionFailingSampler}\Z']
}
tag_on_failure => ["_grokparsefailure","_grokparsefailure_Transaction"]

}

mutate {
convert => {
"transactionTotalSampler" => "integer"
"transactionFailingSampler" => "integer"
}
}

}
}
else {
drop {
}
}
}
else {
if [@metadata][remove_sampler] != "false" {
drop {
}
}
}


mutate {
convert => {
"transaction" => "boolean"
}
}
}

}
Loading

0 comments on commit 257de48

Please sign in to comment.