diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index d8d8e85..fc70285 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -12,7 +12,7 @@ jobs:
matrix:
include:
- output: "elasticsearch"
- logstash-version: 7.17.9
+ logstash-version: 7.17.14
logstash-branch: 7.17
elasticsearch-version: 7
prefix: ""
@@ -51,7 +51,7 @@ jobs:
# v2 https://github.com/docker/build-push-action/blob/master/UPGRADE.md
steps:
- name: Checkout ✅
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Config 📦
run: |
diff --git a/README.md b/README.md
index fb884dd..995781b 100644
--- a/README.md
+++ b/README.md
@@ -1,81 +1,78 @@
-# jmeter-logstash
+# 1. jmeter-logstash
-Jmeter JTL ans statiscts file parsing with Logstash and elasticsearch, you can find image on [Docker Hub](https://hub.docker.com/r/anasoid/jmeter-logstash), statistics.json file is generated with jmeter html report.
+Jmeter JTL ans statiscts file parsing with Logstash and elasticsearch, you can find image
+on [Docker Hub](https://hub.docker.com/r/anasoid/jmeter-logstash), statistics.json file is generated with jmeter html
+report.
-# Quick reference
+# 2. Quick reference
- **Where to get help**:
- - [Issues](https://github.com/anasoid/jmeter-logstash/issues)
- - [Discussions](https://github.com/anasoid/jmeter-logstash/discussions)
- - [Documentation](https://github.com/anasoid/jmeter-logstash)
+ - [Issues](https://github.com/anasoid/jmeter-logstash/issues)
+ - [Discussions](https://github.com/anasoid/jmeter-logstash/discussions)
+ - [Documentation](https://github.com/anasoid/jmeter-logstash)
-## Image version
+## 2.1. Image version
-- [`latest`, `7.17`, `7.17.6`](https://github.com/anasoid/jmeter-logstash/blob/master/docker/elasticsearch/Dockerfile)
+- [`latest`, `7.17`, `7.17.14`](https://github.com/anasoid/jmeter-logstash/blob/master/docker/elasticsearch/Dockerfile)
- [`8.4`, `8.4.2`](https://github.com/anasoid/jmeter-logstash/blob/master/docker/elasticsearch/Dockerfile)
- [`8.3`, `8.3.3`](https://github.com/anasoid/jmeter-logstash/blob/master/docker/elasticsearch/Dockerfile)
- [`8.2`, `8.2.3`](https://github.com/anasoid/jmeter-logstash/blob/master/docker/elasticsearch/Dockerfile)
- [`8.1`, `8.1.3`](https://github.com/anasoid/jmeter-logstash/blob/master/docker/elasticsearch/Dockerfile)
-- [`influxdb`, `influxdb-7.17`, `influxdb-7.17.6`](https://github.com/anasoid/jmeter-logstash/blob/master/docker/influxdb/Dockerfile)
-- [`influxdb-8.4`, `influxdb-8.4.2`](https://github.com/anasoid/jmeter-logstash/blob/master/docker/influxdb/Dockerfile)
-- [`influxdb-8.3`, `influxdb-8.3.3`](https://github.com/anasoid/jmeter-logstash/blob/master/docker/influxdb/Dockerfile)
-- [`influxdb-8.2`, `influxdb-8.2.3`](https://github.com/anasoid/jmeter-logstash/blob/master/docker/influxdb/Dockerfile)
-- [`influxdb-8.1`, `influxdb-8.1.3`](https://github.com/anasoid/jmeter-logstash/blob/master/docker/influxdb/Dockerfile)
-
-## Features
+## 2.2. Features
1. Parse Standard JTL (CSV Format).
2. Possibility to filter requests based on regex filter (include and exclude filter) .
3. Flag samplers generated by TransactionController based on regex (by default '_.+_').
4. For TransactionController, calculate number of failing sampler and total.
-5. Add relative time to compare different Executions, variable TESTSTART.MS should be logged with property [sample_variables](https://jmeter.apache.org/usermanual/properties_reference.html#results_file_config)
+5. Add relative time to compare different Executions, variable TESTSTART.MS should be logged with
+ property [sample_variables](https://jmeter.apache.org/usermanual/properties_reference.html#results_file_config)
6. Add Project name, test name, environment and executionId to organize results and compare different execution.
7. Split Label name to have multi tags by request (by default split by '/').
-8. Flag subresult, when there is a redirection 302,.. Subrequest has a have a suffix like "-xx" when xx is the order number
-9. Supporting ElasticSearch , influxDB, and can be adapted for other tools.
-10. can also index custom field logged in file with property : [sample_variables](https://jmeter.apache.org/usermanual/properties_reference.html#results_file_config)
-
-## Content
-
-- [jmeter-logstash](#jmeter-logstash)
-- [Quick reference](#quick-reference)
- - [Image version](#image-version)
- - [Features](#features)
- - [Content](#content)
- - [Image Variants](#image-variants)
-- [Getting Started](#getting-started)
- - [Create ElasticSearch stack (_Only if using ElasticSearch \& Kibana_)](#create-elasticsearch-stack-only-if-using-elasticsearch--kibana)
- - [Run Logstash](#run-logstash)
- - [Run With image from docker hub for Elasticsearch (way 1, preferred)](#run-with-image-from-docker-hub-for-elasticsearch-way-1-preferred)
- - [Run With image from docker hub for InfluxDB](#run-with-image-from-docker-hub-for-influxdb)
- - [Dashboards](#dashboards)
- - [Kibana](#kibana)
- - [HOW-TO](#how-to)
- - [Example](#example)
- - [Parameters](#parameters)
- - [ElasticSearch configuration](#elasticsearch-configuration)
- - [InfluxDB configuration](#influxdb-configuration)
- - [Logstash](#logstash)
- - [Fields](#fields)
- - [Common](#common)
- - [JTL](#jtl)
- - [Statistics (Only ElasticSearch)](#statistics-only-elasticsearch)
-- [Troubleshooting \& Limitation](#troubleshooting--limitation)
-
-## Image Variants
+8. Flag subresult, when there is a redirection 302,.. Subrequest has a have a suffix like "-xx" when xx is the order
+ number
+9. Supporting ElasticSearch and can be adapted for other tools.
+10. can also index custom field logged in file with
+ property : [sample_variables](https://jmeter.apache.org/usermanual/properties_reference.html#results_file_config)
+
+## 2.3. Content
+
+- [1. jmeter-logstash](#1-jmeter-logstash)
+- [2. Quick reference](#2-quick-reference)
+ - [2.1. Image version](#21-image-version)
+ - [2.2. Features](#22-features)
+ - [2.3. Content](#23-content)
+ - [2.4. Image Variants](#24-image-variants)
+- [3. Getting Started](#3-getting-started)
+ - [3.1. Create ElasticSearch stack (_Only if using ElasticSearch \& Kibana_)](#31-create-elasticsearch-stack-only-if-using-elasticsearch--kibana)
+ - [3.2. Run Logstash](#32-run-logstash)
+ - [3.2.1. Run With image from docker hub for Elasticsearch (way 1, preferred)](#321-run-with-image-from-docker-hub-for-elasticsearch-way-1-preferred)
+ - [3.3. Dashboards](#33-dashboards)
+ - [3.3.1. Kibana](#331-kibana)
+ - [3.4. HOW-TO](#34-how-to)
+ - [3.4.1. Example](#341-example)
+ - [3.5. Parameters](#35-parameters)
+ - [3.5.1. ElasticSearch configuration](#351-elasticsearch-configuration)
+ - [3.5.2. Logstash](#352-logstash)
+ - [3.6. Fields](#36-fields)
+ - [3.6.1. Common](#361-common)
+ - [3.6.2. JTL](#362-jtl)
+ - [3.6.3. Statistics (Only ElasticSearch)](#363-statistics-only-elasticsearch)
+- [4. Troubleshooting \& Limitation](#4-troubleshooting--limitation)
+
+## 2.4. Image Variants
The `jmeter-logstash` images come in many flavors, each designed for a specific use case.
The images version are based on component used to build image, default use elasticsearch output:
1. **Logstash Version**: 7.17.9 -> default for 7.17.
-1. **influxdb** : Pre-configured image with influxdb output.
-# Getting Started
+# 3. Getting Started
-## Create ElasticSearch stack (_Only if using ElasticSearch & Kibana_)
+## 3.1. Create ElasticSearch stack (_Only if using ElasticSearch & Kibana_)
-1. Create Optional docker network (Called jmeter). If not used remove "--net jmeter " from all following docker command and adapt Elasticsearch url.
+1. Create Optional docker network (Called jmeter). If not used remove "--net jmeter " from all following docker command
+ and adapt Elasticsearch url.
```shell
docker network create jmeter
@@ -98,12 +95,13 @@ docker.elastic.co/elasticsearch/elasticsearch:8.4.1
docker run --name jmeter-kibana --net jmeter -p 5601:5601 -e "ELASTICSEARCH_HOSTS=http://jmeter-elastic:9200" docker.elastic.co/kibana/kibana:8.4.1
```
-## Run Logstash
+## 3.2. Run Logstash
1. In the project folder create a folder named 'input' or you can use any input folder in your machine.
-1. If you choose to use a different input folder, you should change **"${PWD}/input"** on the following command by your input folder.
+1. If you choose to use a different input folder, you should change **"${PWD}/input"** on the following command by your
+ input folder.
-### Run With image from docker hub for Elasticsearch (way 1, preferred)
+### 3.2.1. Run With image from docker hub for Elasticsearch (way 1, preferred)
```shell
@@ -112,35 +110,32 @@ docker run --rm -it --net jmeter -e "ELASTICSEARCH_HOSTS=http://jmeter-elastic:9
```
-### Run With image from docker hub for InfluxDB
+## 3.3. Dashboards
-Adapt parameters for your Influxdb like (INFLUXDB_HOST ..) See [InfluxDB configuration](#influxdb-configuration).
+### 3.3.1. Kibana
-```shell
-#Run Image
-docker run --rm -it -e "INFLUXDB_HOST=localhost" -v ${PWD}/input:/input/ anasoid/jmeter-logstash:influxdb
-
-```
-
-## Dashboards
-
-### Kibana
-
-Download Dashboards from [Kibana Dashboards](/docs/kibana/jmeter-jtl-kibana.ndjson) and go to Stack management kibana -> saved object for import.
+Download Dashboards from [Kibana Dashboards](/docs/kibana/jmeter-jtl-kibana.ndjson) and go to Stack management kibana ->
+saved object for import.
| Main Dashboard | Compare Dashboard |
-| ------------------------------------------------------------- | --------------------------------------------------------------------- |
+|---------------------------------------------------------------|-----------------------------------------------------------------------|
| | |
-## HOW-TO
+## 3.4. HOW-TO
-1. To exit after all files parsed use (**-e "FILE_EXIT_AFTER_READ=true"**) should be used with ( **-e "FILE_READ_MODE=read"**) ..
+1. To exit after all files parsed use (**-e "FILE_EXIT_AFTER_READ=true"**) should be used with ( **-e "
+ FILE_READ_MODE=read"**) ..
2. To not remove container logstash after execution not use --rm from arguments.
-3. Logstash keep information on position f last line parsed in a file sincedb, this file by default is on the path _/usr/share/logstash/data/plugins/inputs/file_, if you use the same container this file will be persisted even you restart logstash cotainer, and if you need to maintain this file even you remove container you can mount volume folder in the path (_/usr/share/logstash/data/plugins/inputs/file_)
+3. Logstash keep information on position f last line parsed in a file sincedb, this file by default is on the path
+ _/usr/share/logstash/data/plugins/inputs/file_, if you use the same container this file will be persisted even you
+ restart logstash cotainer, and if you need to maintain this file even you remove container you can mount volume
+ folder in the path (_/usr/share/logstash/data/plugins/inputs/file_)
4. To have relative time for comparison test start time should be logged :
- in user.properties file (sample_variables=TESTSTART.MS,...) or add properties with file using -q argument or directly in command line with (-Jsample_variables=TESTSTART.MS,..) see [Full list of command-line options](https://jmeter.apache.org/usermanual/get-started.html#options)
+ in user.properties file (sample_variables=TESTSTART.MS,...) or add properties with file using -q argument or directly
+ in command line with (-Jsample_variables=TESTSTART.MS,..)
+ see [Full list of command-line options](https://jmeter.apache.org/usermanual/get-started.html#options)
-### Example
+### 3.4.1. Example
Run Logstash without remove container after stop.
@@ -171,46 +166,27 @@ anasoid/jmeter-logstash
````
-Run Logstash with influxDB custom port.
+## 3.5. Parameters
-```shell
-#Run Image
-docker run --rm -it -e "INFLUXDB_PORT=9090" -e "INFLUXDB_HOST=localhost" -v ${PWD}/input:/input/ anasoid/jmeter-logstash:influxdb
-
-
-```
-
-## Parameters
-
-### ElasticSearch configuration
+### 3.5.1. ElasticSearch configuration
| Environment variables | Description | Default |
-| -------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------- |
-| `ELASTICSEARCH_HOSTS` | Elasticsearch output configuration [hosts](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-hosts) (ex: http://elasticsearch:9200 ) | |
+|----------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------|
+| `ELASTICSEARCH_HOSTS` | Elasticsearch output configuration [hosts](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-hosts) (ex: http://elasticsearch:9200 ) | |
| `ELASTICSEARCH_INDEX` | Elasticsearch output configuration [index](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-index) | jmeter-jtl-%{+YYYY.MM.dd} |
| `ELASTICSEARCH_USER` | Elasticsearch output configuration [user](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-user) | |
| `ELASTICSEARCH_PASSWORD` | Elasticsearch output configuration [password](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-password) | |
+| `ELASTICSEARCH_SSL_VERIFICATION` | Elasticsearch output configuration [ssl_certificate_verification](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-ssl_certificate_verification) | true |
| `ELASTICSEARCH_HTTP_COMPRESSION` | Elasticsearch output configuration [http_compression](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-http_compression) | false |
| `ELASTICSEARCH_VERSION` | Elasticsearch template version, shoud be the same as elasticsearch version (not logstash version), valid values are 7 and 8. Only logstash 7 who can work with Elasticsearch 7 and 8, logstash 8 work only with elasticsearch 8. | 7 for logstash 7.x et 8 for logstash 8.x |
-### InfluxDB configuration
-
-| Environment variables | Description | Default |
-| ---------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------- |
-| `INFLUXDB_HOST` | InfluxDB output configuration [host](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-influxdb.html#plugins-outputs-influxdb-host) | localhost |
-| `INFLUXDB_PORT` | InfluxDB output configuration [port](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-influxdb.html#plugins-outputs-influxdb-port) | 8086 |
-| `INFLUXDB_USER` | InfluxDB output configuration [user](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-influxdb.html#plugins-outputs-influxdb-user) | |
-| `INFLUXDB_PASSWORD` | InfluxDB output configuration [password](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-influxdb.html#plugins-outputs-influxdb-password) | |
-| `INFLUXDB_DB` | InfluxDB output configuration [DB](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-influxdb.html#plugins-outputs-influxdb-db) | jmeter |
-| `INFLUXDB_MEASUREMENT` | InfluxDB output configuration [measurement](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-influxdb.html#plugins-outputs-influxdb-measurement) | jtl |
-
-### Logstash
+### 3.5.2. Logstash
| Environment variables | Description | Default |
-| ------------------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------- |
-| `INPUT_PATH` | Default folder input used for JTL and statistics file. | /input |
-| `INPUT_PATH_JTL` | Default folder input used for JTL, pattern : (`["${INPUT_PATH:/input}/**.jtl","${INPUT_PATH_JTL:/input}/**.jtl"]`) | /input |
-| `INPUT_PATH_STAT` | Default folder input used statistics , pattern : (`["${INPUT_PATH:/input}/**.json","${INPUT_PATH_STAT:/input}/**.json"]`) | /input |
+|--------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------|
+| `INPUT_PATH` | Default folder input used for JTL and statistics file. | /input |
+| `INPUT_PATH_JTL` | Default folder input used for JTL, pattern : (`["${INPUT_PATH:/input}/**.jtl","${INPUT_PATH_JTL:/input}/**.jtl"]`) | /input |
+| `INPUT_PATH_STAT` | Default folder input used statistics , pattern : (`["${INPUT_PATH:/input}/**.json","${INPUT_PATH_STAT:/input}/**.json"]`) | /input |
| `PROJECT_NAME` | Project name | undefined |
| `ENVIRONMENT_NAME` | Environment name, if not provided will try to extract value from file name ( {test_name}-{environment-name}-{execution_id} ) | undefined |
| `TEST_NAME` | Test name, if not provided will try to extract value from file name ( {test_name}-{environment-name}-{execution_id} or {test_name}-{execution_id} or {test_name}) | undefined |
@@ -232,100 +208,104 @@ docker run --rm -it -e "INFLUXDB_PORT=9090" -e "INFLUXDB_HOST=localhost" -v ${PW
| `PARSE_REMOVE_MESSAGE_FIELD` | Remove field message. | true |
| `PARSE_CLEANUP_FIELDS` | Remove fields : host, path. | true |
| `PARSE_WITH_FLAG_SUBRESULT` | Flag result with prefix like have a suffix like "-xx" when xx is the order number | true |
-| `PCT1` | percent 1 value for statistics report | 90 |
-| `PCT2` | percent 1 value for statistics report | 95 |
-| `PCT3` | percent 1 value for statistics report | 99 |
+| `PCT1` | percent 1 value for statistics report | 90 |
+| `PCT2` | percent 1 value for statistics report | 95 |
+| `PCT3` | percent 1 value for statistics report | 99 |
-## Fields
+## 3.6. Fields
-### Common
+### 3.6.1. Common
Common field are integrated during file parsing for both JTL ans statistics file.
-| Fields | Type InfluxDB | Type ELK | source | Description |
-| --------------------------- | ------------- | -------- | ------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
-| `origin` | - | string | - | Origin of message **jtl** or **stat** |
-| `@timestamp` | - | date | elk | Insertion time in Elastic search |
-| `environment` | string (tag) | string | input/parsing | Target environment (Ex: dev, stage ..), as input using environment variable or extracted from filename. |
-| `executionid` | string (tag) | string | input/parsing | Unique id to identify data for a test, as input using environment variable or extracted from filename. |
-| `filename` | string (tag) | string | parsing | file name without extension. |
-| `path` | - | string | logstash | Path of file. |
-| `project` | string (tag) | string | input | Project name. |
-| `testname` | string (tag) | string | parsing | Test name, as input using environment variable or extracted from filename. |
-| `testtags` | - | string | parsing | List of keywords extracted by splitting environnement variable "TEST_TAGS" from environment variable |
-| `timestamp` | - | date | csv | Request time. Accept timestamp format in ms or "yyyy/MM/dd HH:mm:ss.SSS" . |
-| `label` | string (tag) | string | csv | sampler label |
-| `labels` | - | string | parsing | List of keywords extracted by splitting label using the char "PARSE_LABELS_SPLIT_CHAR" from environment variable , default to "/" |
-| `globalLabel` | string (tag) | string | parsing | Normalized label for subresults, when there redirection (ex 302) jmeter log all redirects requests and the parent one by default (_jmeter.save.saveservice.subresults=false_ to disable), the parent will have the normal label and other subresut will have a suffix like "-xx" when xx is the order number, in this field you will find the original label for all subresult without the number suffix (see field : subresult, redirectLevel ) |
-
-### JTL
+| Fields | Type ELK | source | Description |
+|---------------|----------|---------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| `origin` | string | - | Origin of message **jtl** or **stat** |
+| `@timestamp` | date | elk | Insertion time in Elastic search |
+| `environment` | string | input/parsing | Target environment (Ex: dev, stage ..), as input using environment variable or extracted from filename. |
+| `executionid` | string | input/parsing | Unique id to identify data for a test, as input using environment variable or extracted from filename. |
+| `filename` | string | parsing | file name without extension. |
+| `path` | string | logstash | Path of file. |
+| `project` | string | input | Project name. |
+| `testname` | string | parsing | Test name, as input using environment variable or extracted from filename. |
+| `testtags` | string | parsing | List of keywords extracted by splitting environnement variable "TEST_TAGS" from environment variable |
+| `timestamp` | date | csv | Request time. Accept timestamp format in ms or "yyyy/MM/dd HH:mm:ss.SSS" . |
+| `label` | string | csv | sampler label |
+| `labels` | string | parsing | List of keywords extracted by splitting label using the char "PARSE_LABELS_SPLIT_CHAR" from environment variable , default to "/" |
+| `globalLabel` | string | parsing | Normalized label for subresults, when there redirection (ex 302) jmeter log all redirects requests and the parent one by default (_jmeter.save.saveservice.subresults=false_ to disable), the parent will have the normal label and other subresut will have a suffix like "-xx" when xx is the order number, in this field you will find the original label for all subresult without the number suffix (see field : subresult, redirectLevel ) |
+
+### 3.6.2. JTL
Fields for JTL File.
For csv field see documentation on [CSV Log format](https://jmeter.apache.org/usermanual/listeners.html#csvlogformat).
-For additional fields see documentation on [Results file configuration](https://jmeter.apache.org/usermanual/properties_reference.html#results_file_config).
-
-| Fields | Type InfluxDB | Type ELK | source | Description |
-| --------------------------- | ------------- | -------- | ------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
-| `Connect` | long | long | csv | time to establish connection |
-| `IdleTime` | long | long | csv | number of milliseconds of 'Idle' time (normally 0) |
-| `Latency` | long | long | csv | time to first response |
-| `URL` | string (tag) | string | csv | |
-| `allThreads` | long | long | csv | total number of active threads in all groups |
-| `bytes` | long | long | csv | number of bytes in the sample |
-| `dataType` | string | string | csv | e.g. text |
-| `domain` | string (tag) | string | parsing | domain name or ip which is extracted from url. |
-| `elapsed` | long | long | csv | elapsed - in milliseconds |
-| `failureMessage` | string | string | csv | |
-| `grpThreads` | long | long | csv | number of active threads in this thread group |
-| `host` | - | string | elk | hostname of logstash node. |
-| `redirectLevel` | long (tag) | long | parsing | redirect number (see field: _globalLabel_) |
-| `relativetime` | float | float | parsing | Number of milliseconds from test started. Useful to compare test. this field need to have started test time logged to csv (add this variable name _TESTSTART.MS_ to property _sample_variables_) |
-| `request` | string (tag) | string | parsing | Request path if Http/s request. |
-| `responseCode` | string (tag) | string | csv | |
-| `responseMessage` | string (tag) | string | csv | |
-| `responseStatus` | long (tag) | long | parsing | Numeric responseCode , if responseCode is not numeric (case on timeout) using value "MISSED_RESPONSE_CODE" from environment variable , default to 510. |
-| `sentBytes` | long | long | csv | number of bytes sent for the sample. |
-| `subresult` | boolean (tag) | boolean | parsing | true if sample is a sub result (see field: _globalLabel_) |
-| `success` | boolean (tag) | boolean | csv | true or false. |
-| `teststart` | - | date | csv | Test start time. This field need to have started test time logged to csv (add this variable name _TESTSTART.MS_ to property _sample_variables_) |
-| `threadGrpId` | long | long | parsing | The number of thread group. (Extract from threadName) . |
-| `threadGrpName` | string (tag) | string | parsing | The name of thread group.(Extract from threadName) . |
-| `workerNode` | string (tag) | string | parsing | host port of worker node. (Extract from threadName) . |
-| `threadNumber` | long | long | parsing | The number of thread (unique in thread group). (Extract from threadName) . |
-| `threadName` | string (tag) | string | csv | Thread name, unique on test. |
-| `transaction` | boolean (tag) | boolean | parsing | IS Sampler transaction , generated by transaction controller, to identify transaction label should start and and with "\_", the regex used to this is "\_.+\_" |
-| `transactionFailingSampler` | long | long | parsing | If sample is transaction, this value represent number of failing sampler. |
-| `transactionTotalSampler` | long | long | parsing | If sample is transaction, this value represent count of total sampler. |
-
-### Statistics (Only ElasticSearch)
+For additional fields see documentation
+on [Results file configuration](https://jmeter.apache.org/usermanual/properties_reference.html#results_file_config).
+
+| Fields | Type ELK | source | Description |
+|-----------------------------|----------|---------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| `Connect` | long | csv | time to establish connection |
+| `IdleTime` | long | csv | number of milliseconds of 'Idle' time (normally 0) |
+| `Latency` | long | csv | time to first response |
+| `URL` | string | csv | |
+| `allThreads` | long | csv | total number of active threads in all groups |
+| `bytes` | long | csv | number of bytes in the sample |
+| `dataType` | string | csv | e.g. text |
+| `domain` | string | parsing | domain name or ip which is extracted from url. |
+| `elapsed` | long | csv | elapsed - in milliseconds |
+| `failureMessage` | string | csv | |
+| `grpThreads` | long | csv | number of active threads in this thread group |
+| `host` | string | elk | hostname of logstash node. |
+| `redirectLevel` | long | parsing | redirect number (see field: _globalLabel_) |
+| `relativetime` | float | parsing | Number of milliseconds from test started. Useful to compare test. this field need to have started test time logged to csv (add this variable name _TESTSTART.MS_ to property _sample_variables_) |
+| `request` | string | parsing | Request path if Http/s request. |
+| `responseCode` | string | csv | |
+| `responseMessage` | string | csv | |
+| `responseStatus` | long | parsing | Numeric responseCode , if responseCode is not numeric (case on timeout) using value "MISSED_RESPONSE_CODE" from environment variable , default to 510. |
+| `sentBytes` | long | csv | number of bytes sent for the sample. |
+| `subresult` | boolean | parsing | true if sample is a sub result (see field: _globalLabel_) |
+| `success` | boolean | csv | true or false. |
+| `teststart` | date | csv | Test start time. This field need to have started test time logged to csv (add this variable name _TESTSTART.MS_ to property _sample_variables_) |
+| `threadGrpId` | long | parsing | The number of thread group. (Extract from threadName) . |
+| `threadGrpName` | string | parsing | The name of thread group.(Extract from threadName) . |
+| `workerNode` | string | parsing | host port of worker node. (Extract from threadName) . |
+| `threadNumber` | long | parsing | The number of thread (unique in thread group). (Extract from threadName) . |
+| `threadName` | string | csv | Thread name, unique on test. |
+| `transaction` | boolean | parsing | IS Sampler transaction , generated by transaction controller, to identify transaction label should start and and with "\_", the regex used to this is "\_.+\_" |
+| `transactionFailingSampler` | long | parsing | If sample is transaction, this value represent number of failing sampler. |
+| `transactionTotalSampler` | long | parsing | If sample is transaction, this value represent count of total sampler. |
+
+### 3.6.3. Statistics (Only ElasticSearch)
Fields for Statistics
-For percentiles configuration (pc1,pc2,pc3) see [Percentiles configuration](https://jmeter.apache.org/usermanual/properties_reference.html#aggregate_report_graph).
-
-| Fields | Type ELK | source |Description |
-| ---------------------- | -------- | ------------- | ------------------ |
-| `isTotal` | boolean | Parsing | Is total line, label will be "Total" |
-| `sampleCount` | long | Parsing | Sample count |
-| `errorPct` | long | Parsing | Percent of error |
-| `errorCount` | long | Parsing | Error count |
-| `receivedKBytesPerSec` | long | Parsing | Received Kilo Bytes per seconds |
-| `sentKBytesPerSec` | long | Parsing | Sent Kilo Bytes per seconds |
-| `throughput` | long | Parsing | Throughput |
-| `pct1ResTime` | long | Parsing | Percentile 1 response time (aggregate_rpt_pct1) |
-| `pct90` | long | Parsing | percentile 1 value, field name can be changed with PCT1 see [configuration](#logstash) |
-| `pct2ResTime` | long | Parsing | Percentile 2 response time (aggregate_rpt_pct2) |
-| `pct95` | long | Parsing | percentile 2 value, field name can be changed with PCT1 see [configuration](#logstash) |
-| `pct3ResTime` | long | Parsing | Percentile 3 response time (aggregate_rpt_pct3) |
-| `pct99` | long | Parsing | percentile 3 value, field name can be changed with PCT1 see [configuration](#logstash) |
-| `minResTime` | long | Parsing | Minimum response time. |
-| `meanResTime` | long | Parsing | Mean response time. |
-| `medianResTime` | long | Parsing | Median response time. |
-| `maxResTime` | long | Parsing | MAximum response time. |
-
-# Troubleshooting & Limitation
-
-1. Logstash instance can't parse CSV file with different header Format, as first header will be used for all file, if you have files with different format you should use each time a new instance or restart the instance.
+For percentiles configuration (pc1,pc2,pc3)
+see [Percentiles configuration](https://jmeter.apache.org/usermanual/properties_reference.html#aggregate_report_graph).
+
+| Fields | Type ELK | source | Description |
+|------------------------|----------|---------|----------------------------------------------------------------------------------------|
+| `isTotal` | boolean | Parsing | Is total line, label will be "Total" |
+| `sampleCount` | long | Parsing | Sample count |
+| `errorPct` | long | Parsing | Percent of error |
+| `errorCount` | long | Parsing | Error count |
+| `receivedKBytesPerSec` | long | Parsing | Received Kilo Bytes per seconds |
+| `sentKBytesPerSec` | long | Parsing | Sent Kilo Bytes per seconds |
+| `throughput` | long | Parsing | Throughput |
+| `pct1ResTime` | long | Parsing | Percentile 1 response time (aggregate_rpt_pct1) |
+| `pct90` | long | Parsing | percentile 1 value, field name can be changed with PCT1 see [configuration](#logstash) |
+| `pct2ResTime` | long | Parsing | Percentile 2 response time (aggregate_rpt_pct2) |
+| `pct95` | long | Parsing | percentile 2 value, field name can be changed with PCT1 see [configuration](#logstash) |
+| `pct3ResTime` | long | Parsing | Percentile 3 response time (aggregate_rpt_pct3) |
+| `pct99` | long | Parsing | percentile 3 value, field name can be changed with PCT1 see [configuration](#logstash) |
+| `minResTime` | long | Parsing | Minimum response time. |
+| `meanResTime` | long | Parsing | Mean response time. |
+| `medianResTime` | long | Parsing | Median response time. |
+| `maxResTime` | long | Parsing | MAximum response time. |
+
+# 4. Troubleshooting & Limitation
+
+1. Logstash instance can't parse CSV file with different header Format, as first header will be used for all file, if
+ you have files with different format you should use each time a new instance or restart the instance.
1. Change sincedb file can't done on logstash with Elasticsearch without building image.
-1. Label with suffix '-{number}' will be considered as subresult, so don't prefix label with '-{number}' or disable subresult flag with PARSE_WITH_FLAG_SUBRESULT.
+1. Label with suffix '-{number}' will be considered as subresult, so don't prefix label with '-{number}' or disable
+ subresult flag with PARSE_WITH_FLAG_SUBRESULT.
diff --git a/config/pipeline/output-elastic-logstash.conf b/config/pipeline/output-elastic-logstash.conf
index 9076fe3..feb209c 100644
--- a/config/pipeline/output-elastic-logstash.conf
+++ b/config/pipeline/output-elastic-logstash.conf
@@ -9,6 +9,7 @@ output {
http_compression => "${ELASTICSEARCH_HTTP_COMPRESSION:false}"
user => "${ELASTICSEARCH_USER:}"
password => "${ELASTICSEARCH_PASSWORD:}"
+ ssl_certificate_verification => "${ELASTICSEARCH_SSL_VERIFICATION:true}"
template_name => "jmeter-jtl"
template => "/usr/share/logstash/pipeline/template/jmeter-jtl-${ELASTICSEARCH_VERSION:7}x.json"
template_overwrite => true
@@ -24,6 +25,7 @@ output {
http_compression => "${ELASTICSEARCH_HTTP_COMPRESSION:false}"
user => "${ELASTICSEARCH_USER:}"
password => "${ELASTICSEARCH_PASSWORD:}"
+ ssl_certificate_verification => "${ELASTICSEARCH_SSL_VERIFICATION:true}"
template_name => "jmeter-jtl"
template => "/usr/share/logstash/pipeline/template/jmeter-jtl-${ELASTICSEARCH_VERSION:7}x.json"
template_overwrite => true
diff --git a/docker/elasticsearch/Dockerfile b/docker/elasticsearch/Dockerfile
index 116c9cf..d8a82eb 100644
--- a/docker/elasticsearch/Dockerfile
+++ b/docker/elasticsearch/Dockerfile
@@ -1,4 +1,4 @@
-ARG LOGSTASH_VERSION=7.17.10
+ARG LOGSTASH_VERSION=7.17.14
FROM docker.elastic.co/logstash/logstash-oss:${LOGSTASH_VERSION}
ARG ELASTICSEARCH_VERSION=7
diff --git a/docker/influxdb/Dockerfile b/docker/influxdb/Dockerfile
deleted file mode 100644
index 4bb1a62..0000000
--- a/docker/influxdb/Dockerfile
+++ /dev/null
@@ -1,19 +0,0 @@
-ARG LOGSTASH_VERSION=7.17.10
-FROM docker.elastic.co/logstash/logstash-oss:${LOGSTASH_VERSION}
-
-
-
-
-
-
-RUN rm -f /usr/share/logstash/pipeline/logstash.conf
-ADD config/pipeline/ /usr/share/logstash/pipeline/
-ADD config/settings/ /usr/share/logstash/config/
-
-
-RUN mkdir -p /usr/share/logstash/data/plugins/inputs/file
-VOLUME /usr/share/logstash/data/plugins/inputs/file
-
-RUN rm -f /usr/share/logstash/pipeline/output-elastic-logstash.conf
-ADD docker/influxdb/config/pipeline/ /usr/share/logstash/pipeline/
-RUN bin/logstash-plugin install logstash-output-influxdb
\ No newline at end of file
diff --git a/docker/influxdb/config/pipeline/output-influxdb-logstash.conf b/docker/influxdb/config/pipeline/output-influxdb-logstash.conf
deleted file mode 100644
index 4acd129..0000000
--- a/docker/influxdb/config/pipeline/output-influxdb-logstash.conf
+++ /dev/null
@@ -1,52 +0,0 @@
-
-filter {
- mutate {
- copy => {
- "[@metadata][timestamp]" => "time"
- }
- }
-}
-output {
-
- influxdb {
-
- id => "jmeter-influxdb-output"
- host => "${INFLUXDB_HOST:localhost}"
- port => "${INFLUXDB_PORT:8086}"
- user => "${INFLUXDB_USER:}"
- password => "${INFLUXDB_PASSWORD:}"
- db => "${INFLUXDB_DB:jmeter}"
- measurement => "${INFLUXDB_MEASUREMENT:jtl}"
- allow_time_override => true
-
-
- use_event_fields_for_data_points => true
- send_as_tags => ["project",
- "environment",
- "testname",
- "executionid",
- "filename",
- "URL",
- "domain",
- "Hostname",
- "globalLabel",
- "label",
- "request",
- "responseCode",
- "responseStatus",
- "responseMessage",
- "threadGrpId",
- "workerNode",
- "threadGrpName",
- "threadName",
- "threadNumber",
- "subresult",
- "success",
- "transaction",
- "redirectLevel"
- ]
- exclude_fields => ["@timestamp", "@version", "sequence", "message", "type", "timestamp","teststart","labels","host","path"]
-
- }
-
-}
diff --git a/testing/Dockerfile b/testing/Dockerfile
index 197a961..def4968 100644
--- a/testing/Dockerfile
+++ b/testing/Dockerfile
@@ -1,4 +1,4 @@
-ARG LOGSTASH_VERSION=7.17.10
+ARG LOGSTASH_VERSION=7.17.14
FROM docker.elastic.co/logstash/logstash-oss:${LOGSTASH_VERSION}
ARG ELASTICSEARCH_VERSION=7