From f1988bb71642c0dcc61739467836fce371f9f4b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Thu, 3 Aug 2023 16:46:39 +0200 Subject: [PATCH 001/120] Init wazuh-indexer (#3) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update CODEOWNERS * Update README.md and SECURITY.md * Add Wazuh configuration files * Update README.md Signed-off-by: Álex Ruiz --- .github/CODEOWNERS | 26 +-- README.md | 39 +++-- SECURITY.md | 9 +- distribution/src/config/jvm.prod.options | 86 ++++++++++ distribution/src/config/opensearch.prod.yml | 42 +++++ .../src/config/security/internal_users.yml | 56 +++++++ distribution/src/config/security/roles.yml | 149 ++++++++++++++++++ .../src/config/security/roles_mapping.yml | 87 ++++++++++ 8 files changed, 456 insertions(+), 38 deletions(-) create mode 100644 distribution/src/config/jvm.prod.options create mode 100644 distribution/src/config/opensearch.prod.yml create mode 100644 distribution/src/config/security/internal_users.yml create mode 100644 distribution/src/config/security/roles.yml create mode 100644 distribution/src/config/security/roles_mapping.yml diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 18a310862dfbb..d4f049c6e2c09 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -10,28 +10,4 @@ # 2. Go to a file # 3. Use the command palette to run the CODEOWNERS: Show owners of current file command, which will display all code owners for the current file. -# Default ownership for all repo files -* @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jainankitk @kotwanikunal @linuxpi @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah - -/modules/lang-painless/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah -/modules/parent-join/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah -/modules/transport-netty4/ @peternied - -/plugins/identity-shiro/ @peternied - -/server/src/internalClusterTest/java/org/opensearch/index/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah -/server/src/internalClusterTest/java/org/opensearch/search/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah - -/server/src/main/java/org/opensearch/extensions/ @peternied -/server/src/main/java/org/opensearch/identity/ @peternied -/server/src/main/java/org/opensearch/index/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah -/server/src/main/java/org/opensearch/search/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah -/server/src/main/java/org/opensearch/threadpool/ @jed326 @peternied -/server/src/main/java/org/opensearch/transport/ @peternied - -/server/src/test/java/org/opensearch/index/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah -/server/src/test/java/org/opensearch/search/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah - -/.github/ @jed326 @peternied - -/MAINTAINERS.md @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gaobinlong @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @peternied @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah +* @wazuh/devel-indexer diff --git a/README.md b/README.md index 802817ec9cff3..257ecd1386a3c 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,10 @@ - +

+ +

-[![Chat](https://img.shields.io/badge/chat-on%20forums-blue)](https://forum.opensearch.org/c/opensearch/) -[![Documentation](https://img.shields.io/badge/documentation-reference-blue)](https://opensearch.org/docs/latest/opensearch/index/) +[![Chat](https://img.shields.io/badge/chat-on%20forums-blue)](https://groups.google.com/forum/#!forum/wazuh) +[![Slack](https://img.shields.io/badge/slack-join-blue.svg)](https://wazuh.com/community/join-us-on-slack) +[![Documentation](https://img.shields.io/badge/documentation-reference-blue)](https://documentation.wazuh.com) [![codecov](https://codecov.io/gh/opensearch-project/OpenSearch/branch/2.x/graph/badge.svg)](https://codecov.io/gh/opensearch-project/OpenSearch) [![GHA gradle check](https://github.com/opensearch-project/OpenSearch/actions/workflows/gradle-check.yml/badge.svg)](https://github.com/opensearch-project/OpenSearch/actions/workflows/gradle-check.yml) [![GHA validate pull request](https://github.com/opensearch-project/OpenSearch/actions/workflows/wrapper.yml/badge.svg)](https://github.com/opensearch-project/OpenSearch/actions/workflows/wrapper.yml) @@ -19,16 +22,17 @@ ## Welcome! -**OpenSearch** is [a community-driven, open source fork](https://aws.amazon.com/blogs/opensource/introducing-opensearch/) of [Elasticsearch](https://en.wikipedia.org/wiki/Elasticsearch) and [Kibana](https://en.wikipedia.org/wiki/Kibana) following the [license change](https://blog.opensource.org/the-sspl-is-not-an-open-source-license/) in early 2021. We're looking to sustain (and evolve!) a search and analytics suite for the multitude of businesses who are dependent on the rights granted by the original, [Apache v2.0 License](LICENSE.txt). +The Wazuh indexer is a highly scalable, full-text search and analytics engine. This Wazuh central component indexes and stores alerts generated by the Wazuh server and provides near real-time data search and analytics capabilities. + +Wazuh indexer is a open source fork of [OpenSearch](https://github.com/opensearch-project/opensearch). ## Project Resources -* [Project Website](https://opensearch.org/) -* [Downloads](https://opensearch.org/downloads.html) -* [Documentation](https://opensearch.org/docs/) -* Need help? Try [Forums](https://discuss.opendistrocommunity.dev/) -* [Project Principles](https://opensearch.org/#principles) -* [Contributing to OpenSearch](CONTRIBUTING.md) +* [Project Website](https://wazuh.com) +* [Quickstart](https://documentation.wazuh.com/current/quickstart.html) +* [Documentation](https://documentation.wazuh.com) +* Need help? Try [Slack](https://wazuh.com/community/join-us-on-slack) +* [Contributing to Wazuh indexer](CONTRIBUTING.md) * [Maintainer Responsibilities](MAINTAINERS.md) * [Release Management](RELEASING.md) * [Admin Responsibilities](ADMINS.md) @@ -40,7 +44,15 @@ This project has adopted the [Amazon Open Source Code of Conduct](CODE_OF_CONDUCT.md). For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq), or contact [opensource-codeofconduct@amazon.com](mailto:opensource-codeofconduct@amazon.com) with any additional questions or comments. ## Security -If you discover a potential security issue in this project we ask that you notify OpenSearch Security directly via email to security@opensearch.org. Please do **not** create a public GitHub issue. + +To report a possible vulnerability or security issue you can: +- Send us an email to security@wazuh.com. +- Open a new security report under the security tab on this repository. + +**PLEASE DO NOT OPEN A PUBLIC ISSUE ABOUT SECURITY** + +We want to protect our community, so please give us time to fix a vulnerability +before publishing it. ## License @@ -48,10 +60,13 @@ This project is licensed under the [Apache v2.0 License](LICENSE.txt). ## Copyright -Copyright OpenSearch Contributors. See [NOTICE](NOTICE.txt) for details. +- Copyright OpenSearch Contributors. See [NOTICE](NOTICE.txt) for details. +- Copyright Wazuh, Inc. ## Trademark OpenSearch is a registered trademark of Amazon Web Services. OpenSearch includes certain Apache-licensed Elasticsearch code from Elasticsearch B.V. and other source code. Elasticsearch B.V. is not the source of that other source code. ELASTICSEARCH is a registered trademark of Elasticsearch B.V. + +Check Wazuh's [trademark and Brand policy](https://wazuh.com/trademark-and-brand-policy/). diff --git a/SECURITY.md b/SECURITY.md index be4ac7463864a..adefc3e1b9dbc 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,3 +1,10 @@ ## Reporting a Vulnerability -If you discover a potential security issue in this project we ask that you notify OpenSearch Security directly via email to security@opensearch.org. Please do **not** create a public GitHub issue. +To report a possible vulnerability or security issue you can: +- Send us an email to security@wazuh.com. +- Open a new security report under the security tab on this repository. + +**PLEASE DO NOT OPEN A PUBLIC ISSUE ABOUT SECURITY** + +We want to protect our community, so please give us time to fix a vulnerability +before publishing it. diff --git a/distribution/src/config/jvm.prod.options b/distribution/src/config/jvm.prod.options new file mode 100644 index 0000000000000..a9949d48cf3be --- /dev/null +++ b/distribution/src/config/jvm.prod.options @@ -0,0 +1,86 @@ +## JVM configuration + +################################################################ +## IMPORTANT: JVM heap size +################################################################ +## +## You should always set the min and max JVM heap +## size to the same value. For example, to set +## the heap to 4 GB, set: +## +## -Xms4g +## -Xmx4g +## +## See https://opensearch.org/docs/opensearch/install/important-settings/ +## for more information +## +################################################################ + +# Xms represents the initial size of total heap space +# Xmx represents the maximum size of total heap space + +-Xms1g +-Xmx1g + +################################################################ +## Expert settings +################################################################ +## +## All settings below this section are considered +## expert settings. Don't tamper with them unless +## you understand what you are doing +## +################################################################ + +## GC configuration +8-10:-XX:+UseConcMarkSweepGC +8-10:-XX:CMSInitiatingOccupancyFraction=75 +8-10:-XX:+UseCMSInitiatingOccupancyOnly + +## G1GC Configuration +# NOTE: G1 GC is only supported on JDK version 10 or later +# to use G1GC, uncomment the next two lines and update the version on the +# following three lines to your version of the JDK +# 10:-XX:-UseConcMarkSweepGC +# 10:-XX:-UseCMSInitiatingOccupancyOnly +11-:-XX:+UseG1GC +11-:-XX:G1ReservePercent=25 +11-:-XX:InitiatingHeapOccupancyPercent=30 + +## JVM temporary directory +-Djava.io.tmpdir=${OPENSEARCH_TMPDIR} + +## heap dumps + +# generate a heap dump when an allocation from the Java heap fails +# heap dumps are created in the working directory of the JVM +-XX:+HeapDumpOnOutOfMemoryError + +# specify an alternative path for heap dumps; ensure the directory exists and +# has sufficient space +-XX:HeapDumpPath=/var/lib/wazuh-indexer + +# specify an alternative path for JVM fatal error logs +-XX:ErrorFile=/var/log/wazuh-indexer/hs_err_pid%p.log + +## JDK 8 GC logging +8:-XX:+PrintGCDetails +8:-XX:+PrintGCDateStamps +8:-XX:+PrintTenuringDistribution +8:-XX:+PrintGCApplicationStoppedTime +8:-Xloggc:/var/log/wazuh-indexer/gc.log +8:-XX:+UseGCLogFileRotation +8:-XX:NumberOfGCLogFiles=32 +8:-XX:GCLogFileSize=64m + +# JDK 9+ GC logging +9-:-Xlog:gc*,gc+age=trace,safepoint:file=/var/log/wazuh-indexer/gc.log:utctime,pid,tags:filecount=32,filesize=64m + +# Explicitly allow security manager (https://bugs.openjdk.java.net/browse/JDK-8270380) +18-:-Djava.security.manager=allow + +## OpenSearch Performance Analyzer +-Dclk.tck=100 +-Djdk.attach.allowAttachSelf=true +-Djava.security.policy=file:///etc/wazuh-indexer/opensearch-performance-analyzer/opensearch_security.policy +--add-opens=jdk.attach/sun.tools.attach=ALL-UNNAMED \ No newline at end of file diff --git a/distribution/src/config/opensearch.prod.yml b/distribution/src/config/opensearch.prod.yml new file mode 100644 index 0000000000000..e81528cea5efe --- /dev/null +++ b/distribution/src/config/opensearch.prod.yml @@ -0,0 +1,42 @@ +network.host: "0.0.0.0" +node.name: "node-1" +cluster.initial_master_nodes: +- "node-1" +#- "node-2" +#- "node-3" +cluster.name: "wazuh-cluster" +#discovery.seed_hosts: +# - "node-1-ip" +# - "node-2-ip" +# - "node-3-ip" +node.max_local_storage_nodes: "3" +path.data: /var/lib/wazuh-indexer +path.logs: /var/log/wazuh-indexer + +plugins.security.ssl.http.pemcert_filepath: /etc/wazuh-indexer/certs/indexer.pem +plugins.security.ssl.http.pemkey_filepath: /etc/wazuh-indexer/certs/indexer-key.pem +plugins.security.ssl.http.pemtrustedcas_filepath: /etc/wazuh-indexer/certs/root-ca.pem +plugins.security.ssl.transport.pemcert_filepath: /etc/wazuh-indexer/certs/indexer.pem +plugins.security.ssl.transport.pemkey_filepath: /etc/wazuh-indexer/certs/indexer-key.pem +plugins.security.ssl.transport.pemtrustedcas_filepath: /etc/wazuh-indexer/certs/root-ca.pem +plugins.security.ssl.http.enabled: true +plugins.security.ssl.transport.enforce_hostname_verification: false +plugins.security.ssl.transport.resolve_hostname: false + +plugins.security.authcz.admin_dn: +- "CN=admin,OU=Wazuh,O=Wazuh,L=California,C=US" +plugins.security.check_snapshot_restore_write_privileges: true +plugins.security.enable_snapshot_restore_privilege: true +plugins.security.nodes_dn: +- "CN=node-1,OU=Wazuh,O=Wazuh,L=California,C=US" +#- "CN=node-2,OU=Wazuh,O=Wazuh,L=California,C=US" +#- "CN=node-3,OU=Wazuh,O=Wazuh,L=California,C=US" +plugins.security.restapi.roles_enabled: +- "all_access" +- "security_rest_api_access" + +plugins.security.system_indices.enabled: true +plugins.security.system_indices.indices: [".plugins-ml-model", ".plugins-ml-task", ".opendistro-alerting-config", ".opendistro-alerting-alert*", ".opendistro-anomaly-results*", ".opendistro-anomaly-detector*", ".opendistro-anomaly-checkpoints", ".opendistro-anomaly-detection-state", ".opendistro-reports-*", ".opensearch-notifications-*", ".opensearch-notebooks", ".opensearch-observability", ".opendistro-asynchronous-search-response*", ".replication-metadata-store"] + +### Option to allow Filebeat-oss 7.10.2 to work ### +compatibility.override_main_response_version: true \ No newline at end of file diff --git a/distribution/src/config/security/internal_users.yml b/distribution/src/config/security/internal_users.yml new file mode 100644 index 0000000000000..52069500a9b5b --- /dev/null +++ b/distribution/src/config/security/internal_users.yml @@ -0,0 +1,56 @@ +--- +# This is the internal user database +# The hash value is a bcrypt hash and can be generated with plugin/tools/hash.sh + +_meta: + type: "internalusers" + config_version: 2 + +# Define your internal users here + +## Demo users + +admin: + hash: "$2a$12$VcCDgh2NDk07JGN0rjGbM.Ad41qVR/YFJcgHp0UGns5JDymv..TOG" + reserved: true + backend_roles: + - "admin" + description: "Demo admin user" + +kibanaserver: + hash: "$2a$12$4AcgAt3xwOWadA5s5blL6ev39OXDNhmOesEoo33eZtrq2N0YrU3H." + reserved: true + description: "Demo kibanaserver user" + +kibanaro: + hash: "$2a$12$JJSXNfTowz7Uu5ttXfeYpeYE0arACvcwlPBStB1F.MI7f0U9Z4DGC" + reserved: false + backend_roles: + - "kibanauser" + - "readall" + attributes: + attribute1: "value1" + attribute2: "value2" + attribute3: "value3" + description: "Demo kibanaro user" + +logstash: + hash: "$2a$12$u1ShR4l4uBS3Uv59Pa2y5.1uQuZBrZtmNfqB3iM/.jL0XoV9sghS2" + reserved: false + backend_roles: + - "logstash" + description: "Demo logstash user" + +readall: + hash: "$2a$12$ae4ycwzwvLtZxwZ82RmiEunBbIPiAmGZduBAjKN0TXdwQFtCwARz2" + reserved: false + backend_roles: + - "readall" + description: "Demo readall user" + +snapshotrestore: + hash: "$2y$12$DpwmetHKwgYnorbgdvORCenv4NAK8cPUg8AI6pxLCuWf/ALc0.v7W" + reserved: false + backend_roles: + - "snapshotrestore" + description: "Demo snapshotrestore user" \ No newline at end of file diff --git a/distribution/src/config/security/roles.yml b/distribution/src/config/security/roles.yml new file mode 100644 index 0000000000000..ec669b2fe2c97 --- /dev/null +++ b/distribution/src/config/security/roles.yml @@ -0,0 +1,149 @@ +_meta: + type: "roles" + config_version: 2 + +# Restrict users so they can only view visualization and dashboard on kibana +kibana_read_only: + reserved: true + +# The security REST API access role is used to assign specific users access to change the security settings through the REST API. +security_rest_api_access: + reserved: true + +# Allows users to view monitors, destinations and alerts +alerting_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/alerting/alerts/get' + - 'cluster:admin/opendistro/alerting/destination/get' + - 'cluster:admin/opendistro/alerting/monitor/get' + - 'cluster:admin/opendistro/alerting/monitor/search' + +# Allows users to view and acknowledge alerts +alerting_ack_alerts: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/alerting/alerts/*' + +# Allows users to use all alerting functionality +alerting_full_access: + reserved: true + cluster_permissions: + - 'cluster_monitor' + - 'cluster:admin/opendistro/alerting/*' + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices_monitor' + - 'indices:admin/aliases/get' + - 'indices:admin/mappings/get' + +# Allow users to read Anomaly Detection detectors and results +anomaly_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/ad/detector/info' + - 'cluster:admin/opendistro/ad/detector/search' + - 'cluster:admin/opendistro/ad/detectors/get' + - 'cluster:admin/opendistro/ad/result/search' + - 'cluster:admin/opendistro/ad/tasks/search' + +# Allows users to use all Anomaly Detection functionality +anomaly_full_access: + reserved: true + cluster_permissions: + - 'cluster_monitor' + - 'cluster:admin/opendistro/ad/*' + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices_monitor' + - 'indices:admin/aliases/get' + - 'indices:admin/mappings/get' + +# Allows users to read Notebooks +notebooks_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/notebooks/list' + - 'cluster:admin/opendistro/notebooks/get' + +# Allows users to all Notebooks functionality +notebooks_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/notebooks/create' + - 'cluster:admin/opendistro/notebooks/update' + - 'cluster:admin/opendistro/notebooks/delete' + - 'cluster:admin/opendistro/notebooks/get' + - 'cluster:admin/opendistro/notebooks/list' + +# Allows users to read and download Reports +reports_instances_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/reports/instance/list' + - 'cluster:admin/opendistro/reports/instance/get' + - 'cluster:admin/opendistro/reports/menu/download' + +# Allows users to read and download Reports and Report-definitions +reports_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/reports/definition/get' + - 'cluster:admin/opendistro/reports/definition/list' + - 'cluster:admin/opendistro/reports/instance/list' + - 'cluster:admin/opendistro/reports/instance/get' + - 'cluster:admin/opendistro/reports/menu/download' + +# Allows users to all Reports functionality +reports_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/reports/definition/create' + - 'cluster:admin/opendistro/reports/definition/update' + - 'cluster:admin/opendistro/reports/definition/on_demand' + - 'cluster:admin/opendistro/reports/definition/delete' + - 'cluster:admin/opendistro/reports/definition/get' + - 'cluster:admin/opendistro/reports/definition/list' + - 'cluster:admin/opendistro/reports/instance/list' + - 'cluster:admin/opendistro/reports/instance/get' + - 'cluster:admin/opendistro/reports/menu/download' + +# Allows users to use all asynchronous-search functionality +asynchronous_search_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/asynchronous_search/*' + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices:data/read/search*' + +# Allows users to read stored asynchronous-search results +asynchronous_search_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/asynchronous_search/get' + +# Wazuh monitoring and statistics index permissions +manage_wazuh_index: + reserved: true + hidden: false + cluster_permissions: [] + index_permissions: + - index_patterns: + - "wazuh-*" + dls: "" + fls: [] + masked_fields: [] + allowed_actions: + - "read" + - "delete" + - "manage" + - "index" + tenant_permissions: [] + static: false \ No newline at end of file diff --git a/distribution/src/config/security/roles_mapping.yml b/distribution/src/config/security/roles_mapping.yml new file mode 100644 index 0000000000000..66d530d8e0dbb --- /dev/null +++ b/distribution/src/config/security/roles_mapping.yml @@ -0,0 +1,87 @@ +--- +# In this file users, backendroles and hosts can be mapped to Open Distro Security roles. +# Permissions for Opendistro roles are configured in roles.yml + +_meta: + type: "rolesmapping" + config_version: 2 + +# Define your roles mapping here + +## Default roles mapping + +all_access: + reserved: true + hidden: false + backend_roles: + - "admin" + hosts: [] + users: [] + and_backend_roles: [] + description: "Maps admin to all_access" + +own_index: + reserved: false + hidden: false + backend_roles: [] + hosts: [] + users: + - "*" + and_backend_roles: [] + description: "Allow full access to an index named like the username" + +logstash: + reserved: false + hidden: false + backend_roles: + - "logstash" + hosts: [] + users: [] + and_backend_roles: [] + +readall: + reserved: true + hidden: false + backend_roles: + - "readall" + hosts: [] + users: [] + and_backend_roles: [] + +manage_snapshots: + reserved: true + hidden: false + backend_roles: + - "snapshotrestore" + hosts: [] + users: [] + and_backend_roles: [] + +kibana_server: + reserved: true + hidden: false + backend_roles: [] + hosts: [] + users: + - "kibanaserver" + and_backend_roles: [] + +kibana_user: + reserved: false + hidden: false + backend_roles: + - "kibanauser" + hosts: [] + users: [] + and_backend_roles: [] + description: "Maps kibanauser to kibana_user" + + # Wazuh monitoring and statistics index permissions +manage_wazuh_index: + reserved: true + hidden: false + backend_roles: [] + hosts: [] + users: + - "kibanaserver" + and_backend_roles: [] \ No newline at end of file From 7bd2ffa356164155a18f3e80ea4144fe526bbc30 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Thu, 3 Aug 2023 17:05:30 +0200 Subject: [PATCH 002/120] Create codeql.yml MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Álex Ruiz --- .github/workflows/codeql.yml | 78 ++++++++++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 .github/workflows/codeql.yml diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000000000..151be09ec95e6 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,78 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + schedule: + - cron: '00 8 * * 5' + workflow_dispatch: + +jobs: + analyze: + name: Analyze + # Runner size impacts CodeQL analysis time. To learn more, please see: + # - https://gh.io/recommended-hardware-resources-for-running-codeql + # - https://gh.io/supported-runners-and-hardware-resources + # - https://gh.io/using-larger-runners + # Consider using larger runners for possible analysis time improvements. + runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} + timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'java' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby', 'swift' ] + # Use only 'java' to analyze code written in Java, Kotlin or both + # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{matrix.language}}" From 22c9099f0ee3b2a36e165f6cf11619a928cd0897 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Thu, 3 Aug 2023 17:32:06 +0200 Subject: [PATCH 003/120] Update dependabot.yml MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Álex Ruiz --- .github/dependabot.yml | 175 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 175 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 00dc16d3c36a3..5213d0f36f83b 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,601 +4,721 @@ updates: package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /benchmarks/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/reaper/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/archives/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/archives/darwin-tar/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/archives/oss-darwin-tar/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/bwc/minor/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/testKit/opensearch-build-resources/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/testKit/opensearch.build/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/testKit/reaper/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/testKit/symbolic-link-preserving-tar/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/testKit/testingConventions/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/testKit/thirdPartyAudit/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /buildSrc/src/testKit/thirdPartyAudit/sample_jars/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /client/benchmark/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /client/client-benchmark-noop-api-plugin/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /client/rest/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /client/rest-high-level/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /client/sniffer/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /client/test/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/archives/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/archives/darwin-tar/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/archives/integ-test-zip/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/archives/linux-arm64-tar/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/archives/linux-tar/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/archives/no-jdk-darwin-tar/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/archives/no-jdk-linux-tar/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/archives/no-jdk-windows-zip/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/archives/windows-zip/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/bwc/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/bwc/bugfix/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/bwc/maintenance/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/bwc/minor/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/bwc/staged/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/docker/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/docker/docker-arm64-export/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/docker/docker-build-context/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/docker/docker-export/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/packages/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/packages/arm64-deb/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/packages/arm64-rpm/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/packages/deb/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/packages/no-jdk-deb/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/packages/no-jdk-rpm/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/packages/rpm/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/tools/java-version-checker/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/tools/keystore-cli/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/tools/launchers/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/tools/plugin-cli/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /distribution/tools/upgrade-cli/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /doc-tools/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /doc-tools/missing-doclet/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /libs/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /libs/cli/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /libs/core/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /libs/dissect/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /libs/geo/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /libs/grok/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /libs/nio/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /libs/plugin-classloader/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /libs/secure-sm/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /libs/ssl-config/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /libs/x-content/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/aggs-matrix-stats/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/analysis-common/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/geo/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/ingest-common/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/ingest-geoip/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/ingest-user-agent/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/lang-expression/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/lang-mustache/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/lang-painless/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/lang-painless/spi/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/mapper-extras/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/opensearch-dashboards/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/parent-join/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/percolator/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/rank-eval/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/reindex/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/repository-url/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/systemd/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/transport-netty4/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /modules/crypto/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/analysis-icu/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/analysis-kuromoji/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/analysis-nori/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/analysis-phonetic/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/analysis-smartcn/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/analysis-stempel/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/analysis-ukrainian/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/discovery-azure-classic/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/discovery-ec2/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/discovery-ec2/qa/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/discovery-ec2/qa/amazon-ec2/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/discovery-gce/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/discovery-gce/qa/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/discovery-gce/qa/gce/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/examples/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/examples/custom-settings/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/examples/custom-significance-heuristic/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/examples/custom-suggester/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/examples/painless-whitelist/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/examples/rescore/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/examples/rest-handler/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/examples/script-expert-scoring/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/ingest-attachment/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/mapper-annotated-text/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/mapper-murmur3/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/mapper-size/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/repository-azure/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/repository-gcs/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/repository-hdfs/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/repository-s3/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/store-smb/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /plugins/transport-nio/ open-pull-requests-limit: 1 package-ecosystem: gradle @@ -609,274 +729,329 @@ updates: package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/ccs-unavailable-clusters/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/die-with-dignity/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/evil-tests/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/full-cluster-restart/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/logging-config/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/mixed-cluster/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/multi-cluster-search/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/no-bootstrap-tests/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/centos-6/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/centos-7/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/debian-8/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/debian-9/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/fedora-28/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/fedora-29/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/oel-6/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/oel-7/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/sles-12/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/ubuntu-1604/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/ubuntu-1804/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/windows-2012r2/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/os/windows-2016/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/remote-clusters/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/repository-multi-version/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/rolling-upgrade/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/smoke-test-http/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/smoke-test-ingest-disabled/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/smoke-test-ingest-with-all-dependencies/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/smoke-test-multinode/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/smoke-test-plugins/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/translog-policy/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/unconfigured-node-name/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/verify-version-constants/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /qa/wildfly/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /rest-api-spec/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /sandbox/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /sandbox/libs/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /sandbox/modules/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /sandbox/plugins/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /server/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/external-modules/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/external-modules/delayed-aggs/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/fixtures/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/fixtures/azure-fixture/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/fixtures/gcs-fixture/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/fixtures/hdfs-fixture/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/fixtures/krb5kdc-fixture/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/fixtures/minio-fixture/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/fixtures/old-elasticsearch/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/fixtures/s3-fixture/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/framework/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" - directory: /test/logger-usage/ open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly + day: "friday" version: 2 From 6d25d21127b1f887055231aa012e0fc2358b08c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Tue, 19 Sep 2023 13:34:45 +0200 Subject: [PATCH 004/120] Update SECURITY.md (#30) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Álex Ruiz --- SECURITY.md | 53 ++++++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 46 insertions(+), 7 deletions(-) diff --git a/SECURITY.md b/SECURITY.md index adefc3e1b9dbc..b4dec98fdd968 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,10 +1,49 @@ -## Reporting a Vulnerability +# Wazuh Open Source Project Security Policy -To report a possible vulnerability or security issue you can: -- Send us an email to security@wazuh.com. -- Open a new security report under the security tab on this repository. +Version: 2023-06-12 -**PLEASE DO NOT OPEN A PUBLIC ISSUE ABOUT SECURITY** +## Introduction +This document outlines the Security Policy for Wazuh's open source projects. It emphasizes our commitment to maintain a secure environment for our users and contributors, and reflects our belief in the power of collaboration to identify and resolve security vulnerabilities. -We want to protect our community, so please give us time to fix a vulnerability -before publishing it. +## Scope +This policy applies to all open source projects developed, maintained, or hosted by Wazuh. In this case, as this project is a fork, we may forward the reported vulnerability to the upstream. + +## Reporting Security Vulnerabilities +If you believe you've discovered a potential security vulnerability in one of our open source projects, we strongly encourage you to report it to us responsibly. + +Please submit your findings as security advisories under the "Security" tab in the relevant GitHub repository. Alternatively, you may send the details of your findings to security@wazuh.com. + +## Vulnerability Disclosure Policy +Upon receiving a report of a potential vulnerability, our team will initiate an investigation. If the reported issue is confirmed as a vulnerability, we will take the following steps: + +1. Acknowledgment: We will acknowledge the receipt of your vulnerability report and begin our investigation. + +2. Validation: We will validate the issue and work on reproducing it in our environment. + +3. Remediation: We will work on a fix and thoroughly test it + +4. Release & Disclosure: After 90 days from the discovery of the vulnerability, or as soon as a fix is ready and thoroughly tested (whichever comes first), we will release a security update for the affected project. We will also publicly disclose the vulnerability by publishing a CVE (Common Vulnerabilities and Exposures) and acknowledging the discovering party. + +5. Exceptions: In order to preserve the security of the Wazuh community at large, we might extend the disclosure period to allow users to patch their deployments. + +This 90-day period allows for end-users to update their systems and minimizes the risk of widespread exploitation of the vulnerability. + +## Automatic Scanning +We leverage GitHub Actions to perform automated scans of our supply chain. These scans assist us in identifying vulnerabilities and outdated dependencies in a proactive and timely manner. + +## Credit +We believe in giving credit where credit is due. If you report a security vulnerability to us, and we determine that it is a valid vulnerability, we will publicly credit you for the discovery when we disclose the vulnerability. If you wish to remain anonymous, please indicate so in your initial report. + +We do appreciate and encourage feedback from our community, but currently we do not have a bounty program. We might start bounty programs in the future. + +## Compliance with this Policy +We consider the discovery and reporting of security vulnerabilities an important public service. We encourage responsible reporting of any vulnerabilities that may be found in our site or applications. + +Furthermore, we will not take legal action against or suspend or terminate access to the site or services of those who discover and report security vulnerabilities in accordance with this policy because of the fact. + +We ask that all users and contributors respect this policy and the security of our community's users by disclosing vulnerabilities to us in accordance with this policy. + +## Changes to this Security Policy +This policy may be revised from time to time. Each version of the policy will be identified at the top of the page by its effective date. + +If you have any questions about this Security Policy, please contact us at security@wazuh.com From 90e8a8d961a3f9014932754cb174eda2471fccc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Mon, 9 Oct 2023 12:48:08 +0200 Subject: [PATCH 005/120] Add ECS mappings generator (#36) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add ECS mappings generator, documentation and files for vulnerability detector * Add event generator script * Update template settings --------- Signed-off-by: Álex Ruiz --- ecs/.gitignore | 3 + ecs/README.md | 104 ++++++++ ecs/generate.sh | 83 +++++++ .../event-generator/event_generator.py | 235 ++++++++++++++++++ ecs/vulnerability-detector/fields/subset.yml | 19 ++ .../fields/template-settings-legacy.json | 19 ++ .../fields/template-settings.json | 21 ++ 7 files changed, 484 insertions(+) create mode 100644 ecs/.gitignore create mode 100644 ecs/README.md create mode 100755 ecs/generate.sh create mode 100755 ecs/vulnerability-detector/event-generator/event_generator.py create mode 100644 ecs/vulnerability-detector/fields/subset.yml create mode 100644 ecs/vulnerability-detector/fields/template-settings-legacy.json create mode 100644 ecs/vulnerability-detector/fields/template-settings.json diff --git a/ecs/.gitignore b/ecs/.gitignore new file mode 100644 index 0000000000000..a8047fcd2d67d --- /dev/null +++ b/ecs/.gitignore @@ -0,0 +1,3 @@ +**/mappings +*.log +generatedData.json \ No newline at end of file diff --git a/ecs/README.md b/ecs/README.md new file mode 100644 index 0000000000000..a2d353d245c81 --- /dev/null +++ b/ecs/README.md @@ -0,0 +1,104 @@ +## ECS mappings generator + +This script generates the ECS mappings for the Wazuh indices. + +### Requirements + +- ECS repository clone. The script is meant to be launched from the root level of that repository. +- Python 3.6 or higher +- jq + +### Folder structure + +There is a folder for each module. Inside each folder, there is a `fields` folder with the required +files to generate the mappings. These are the inputs for the ECS generator. + +### Usage + +**Copy the `generate.sh` script to the root level of the ECS repository.** + +Use the `generate.sh` script to generate the mappings for a module. The script takes 3 arguments, +plus 2 optional arguments to upload the mappings to the Wazuh indexer (using **composable** indexes). + +```plaintext +Usage: ./generate.sh [--upload ] + * ECS_VERSION: ECS version to generate mappings for + * INDEXER_SRC: Path to the wazuh-indexer repository + * MODULE: Module to generate mappings for + * --upload : Upload generated index template to the OpenSearch cluster. Defaults to https://localhost:9200 +Example: ./generate.sh v8.10.0 ~/wazuh-indexer vulnerability-detector --upload https://indexer:9200 +``` + +For example, to generate the mappings for the `vulnerability-detector` module using the +ECS version `v8.10.0` and the Wazuh indexer in path `~/wazuh/wazuh-indexer`: + +```bash +./generate.sh v8.10.0 ~/wazuh/wazuh-indexer vulnerability-detector +``` + +### Output + +A new `mappings` folder will be created inside the module folder, containing all the generated files. +The files are versioned using the ECS version, so different versions of the same module can be generated. +For our use case, the most important files are under `mappings//generated/elasticsearch/legacy/`: + +- `template.json`: Elasticsearch compatible index template for the module +- `opensearch-template.json`: OpenSearch compatible index template for the module + +The original output is `template.json`, which is not compatible with OpenSearch by default. In order +to make this template compatible with OpenSearch, the following changes are made: + +- the `order` property is renamed to `priority`. +- the `mappings` and `settings` properties are nested under the `template` property. + +The script takes care of these changes automatically, generating the `opensearch-template.json` file as a result. + +### Upload + +You can either upload the index template using cURL or the UI (dev tools). + +```bash +curl -u admin:admin -k -X PUT "https://indexer:9200/_index_template/wazuh-vulnerability-detector" -H "Content-Type: application/json" -d @opensearch-template.json +``` + +Notes: +- PUT and POST are interchangeable. +- The name of the index template does not matter. Any name can be used. +- Adjust credentials and URL accordingly. + +### Adding new mappings + +The easiest way to create mappings for a new module is to take a previous one as a base. +Copy a folder and rename it to the new module name. Then, edit the `fields` files to +match the new module fields. + +The name of the folder will be the name of the module to be passed to the script. All 3 files +are required. + +- `fields/subset.yml`: This file contains the subset of ECS fields to be used for the module. +- `fields/template-settings-legacy.json`: This file contains the legacy template settings for the module. +- `fields/template-settings.json`: This file contains the composable template settings for the module. + +### Event generator + +For testing purposes, the script `generate_events.py` can be used to generate events for a given module. +Currently, it is only able to generate events for the `vulnerability-detector` module. To support other +modules, please extend or refactor the script. + +The script prompts for the required parameters, so it can be launched without arguments: + +```bash +./event_generator.py +``` + +The script will generate a JSON file with the events, and will also ask whether to upload them to the +indexer. If the upload option is selected, the script will ask for the indexer URL and port, credentials, +and index name. + +The script uses a log file. Check it out for debugging or additional information. + +#### References + +- [ECS repository](https://github.com/elastic/ecs) +- [ECS usage](https://github.com/elastic/ecs/blob/main/USAGE.md) +- [ECS field reference](https://www.elastic.co/guide/en/ecs/current/ecs-field-reference.html) diff --git a/ecs/generate.sh b/ecs/generate.sh new file mode 100755 index 0000000000000..b618bb5e97fd6 --- /dev/null +++ b/ecs/generate.sh @@ -0,0 +1,83 @@ +#!/bin/bash + +# Function to display usage information +show_usage() { + echo "Usage: $0 [--upload ]" + echo " * ECS_VERSION: ECS version to generate mappings for" + echo " * INDEXER_SRC: Path to the wazuh-indexer repository" + echo " * MODULE: Module to generate mappings for" + echo " * --upload : Upload generated index template to the OpenSearch cluster. Defaults to https://localhost:9200" + echo "Example: $0 v8.10.0 ~/wazuh-indexer vulnerability-detector --upload https://indexer:9200" +} + +# Function to generate mappings +generate_mappings() { + ECS_VERSION="$1" + INDEXER_SRC="$2" + MODULE="$3" + UPLOAD="$4" + URL="$5" + + IN_FILES_DIR="$INDEXER_SRC/ecs/$MODULE/fields" + OUT_DIR="$INDEXER_SRC/ecs/$MODULE/mappings/$ECS_VERSION" + + # Ensure the output directory exists + mkdir -p "$OUT_DIR" || exit 1 + + # Generate mappings + python scripts/generator.py --strict --ref "$ECS_VERSION" \ + --subset "$IN_FILES_DIR/subset.yml" \ + --template-settings "$IN_FILES_DIR/template-settings.json" \ + --template-settings-legacy "$IN_FILES_DIR/template-settings-legacy.json" \ + --out "$OUT_DIR" || exit 1 + + # Replace "match_only_text" type (not supported by OpenSearch) with "text" + echo "Replacing \"match_only_text\" type with \"text\"" + find "$OUT_DIR" -type f -exec sed -i 's/match_only_text/text/g' {} \; + + # Transform legacy index template for OpenSearch compatibility + cat "$OUT_DIR/generated/elasticsearch/legacy/template.json" | jq '{ + "index_patterns": .index_patterns, + "priority": .order, + "template": { + "settings": .settings, + "mappings": .mappings + } + }' >"$OUT_DIR/generated/elasticsearch/legacy/opensearch-template.json" + + # Check if the --upload flag has been provided + if [ "$UPLOAD" == "--upload" ]; then + upload_mappings "$OUT_DIR" "$URL" || exit 1 + fi + + echo "Mappings saved to $OUT_DIR" +} + +# Function to upload generated composable index template to the OpenSearch cluster +upload_mappings() { + OUT_DIR="$1" + URL="$2" + + echo "Uploading index template to the OpenSearch cluster" + for file in "$OUT_DIR/generated/elasticsearch/composable/component"/*.json; do + component_name=$(basename "$file" .json) + echo "Uploading $component_name" + curl -u admin:admin -X PUT "$URL/_component_template/$component_name?pretty" -H 'Content-Type: application/json' -d@"$file" || exit 1 + done +} + +# Check if the minimum required arguments have been provided +if [ $# -lt 3 ]; then + show_usage + exit 1 +fi + +# Parse command line arguments +ECS_VERSION="$1" +INDEXER_SRC="$2" +MODULE="$3" +UPLOAD="${4:-false}" +URL="${5:-https://localhost:9200}" + +# Generate mappings +generate_mappings "$ECS_VERSION" "$INDEXER_SRC" "$MODULE" "$UPLOAD" "$URL" diff --git a/ecs/vulnerability-detector/event-generator/event_generator.py b/ecs/vulnerability-detector/event-generator/event_generator.py new file mode 100755 index 0000000000000..9cbc0efc44f92 --- /dev/null +++ b/ecs/vulnerability-detector/event-generator/event_generator.py @@ -0,0 +1,235 @@ +#!/bin/python3 + +# This script generates sample events and injects them into the Wazuh Indexer. +# The events follow the Elastic Common Schema (ECS) format, and contains the following fields: +# - ecs +# - base +# - event +# - agent +# - package +# - host +# - vulnerability +# +# This is an ad-hoc script for the vulnearbility module. Extend to support other modules. + +import datetime +import random +import json +import requests +import warnings +import logging + +# Constants and Configuration +LOG_FILE = 'generate_data.log' +GENERATED_DATA_FILE = 'generatedData.json' +DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" + +# Configure logging +logging.basicConfig(filename=LOG_FILE, level=logging.INFO) + +# Suppress warnings +warnings.filterwarnings("ignore") + + +def generate_random_date(): + start_date = datetime.datetime.now() + end_date = start_date - datetime.timedelta(days=10) + random_date = start_date + (end_date - start_date) * random.random() + return random_date.strftime(DATE_FORMAT) + + +def generate_random_agent(): + agent = { + 'build': {'original': f'build{random.randint(0, 9999)}'}, + 'id': f'agent{random.randint(0, 99)}', + 'name': f'Agent{random.randint(0, 99)}', + 'version': f'v{random.randint(0, 9)}-stable', + 'ephemeral_id': f'{random.randint(0, 99999)}', + 'type': random.choice(['filebeat', 'windows', 'linux', 'macos']) + } + return agent + + +def generate_random_event(): + event = { + 'action': random.choice(['login', 'logout', 'create', 'delete', 'modify', 'read', 'write', 'upload', 'download', + 'copy', 'paste', 'cut', 'move', 'rename', 'open', 'close', 'execute', 'run', 'install', + 'uninstall', 'start', 'stop', 'kill', 'suspend', 'resume', 'sleep', 'wake', 'lock', + 'unlock', 'encrypt', 'decrypt', 'compress', 'decompress', 'archive', 'unarchive', + 'mount', 'unmount', 'eject', 'connect', 'disconnect', 'send', 'receive']), + 'agent_id_status': random.choice(['verified', 'mismatch', 'missing', 'auth_metadata_missing']), + 'category': random.choice(['authentication', 'authorization', 'configuration', 'communication', 'file', + 'network', 'process', 'registry', 'storage', 'system', 'web']), + 'code': f'{random.randint(0, 99999)}', + 'created': generate_random_date(), + 'dataset': random.choice(['process', 'file', 'registry', 'socket', 'dns', 'http', 'tls', 'alert', + 'authentication', 'authorization', 'configuration', 'communication', 'file', + 'network', 'process', 'registry', 'storage', 'system', 'web']), + 'duration': random.randint(0, 99999), + 'end': generate_random_date(), + 'hash': str(hash(f'hash{random.randint(0, 99999)}')), + 'id': f'{random.randint(0, 99999)}', + 'ingested': generate_random_date(), + 'kind': random.choice(['alert', 'asset', 'enrichment', 'event', 'metric', + 'state', 'pipeline_error', 'signal']), + 'module': random.choice(['process', 'file', 'registry', 'socket', 'dns', 'http', 'tls', 'alert', + 'authentication', 'authorization', 'configuration', 'communication', 'file', + 'network', 'process', 'registry', 'storage', 'system', 'web']), + 'original': f'original{random.randint(0, 99999)}', + 'outcome': random.choice(['success', 'failure', 'unknown']), + 'provider': random.choice(['process', 'file', 'registry', 'socket', 'dns', 'http', 'tls', 'alert', + 'authentication', 'authorization', 'configuration', 'communication', 'file', + 'network', 'process', 'registry', 'storage', 'system', 'web']), + 'reason': f'This event happened due to reason{random.randint(0, 99999)}', + 'reference': f'https://system.example.com/event/#{random.randint(0, 99999)}', + 'risk_score': round(random.uniform(0, 10), 1), + 'risk_score_norm': round(random.uniform(0, 10), 1), + 'sequence': random.randint(0, 10), + 'severity': random.randint(0, 10), + 'start': generate_random_date(), + 'timezone': random.choice(['UTC', 'GMT', 'PST', 'EST', 'CST', 'MST', 'PDT', 'EDT', 'CDT', 'MDT']), + 'type': random.choice(['access', 'admin', 'allowed', 'change', 'connection', 'creation', 'deletion', + 'denied', 'end', 'error', 'group', 'indicator', 'info', 'installation', 'protocol', + 'start', 'user']), + 'url': f'http://mysystem.example.com/alert/{random.randint(0, 99999)}' + } + return event + + +def generate_random_host(): + family = random.choice(['debian', 'ubuntu', 'macos', 'ios', 'android', 'RHEL']) + version = f'{random.randint(0, 99)}.{random.randint(0, 99)}' + host = { + 'os': { + 'family': family, + 'full': f'{family} {version}', + 'kernel': f'{version}kernel{random.randint(0, 99)}', + 'name': f'{family} {version}', + 'platform': family, + 'type': random.choice(['windows', 'linux', 'macos', 'ios', 'android', 'unix']), + 'version': version + } + } + return host + + +def generate_random_labels(): + labels = {'label1': f'label{random.randint(0, 99)}', 'label2': f'label{random.randint(0, 99)}'} + return labels + + +def generate_random_package(): + package = { + 'architecture': random.choice(['x86', 'x64', 'arm', 'arm64']), + 'build_version': f'build{random.randint(0, 9999)}', + 'checksum': f'checksum{random.randint(0, 9999)}', + 'description': f'description{random.randint(0, 9999)}', + 'install_scope': random.choice(['user', 'system']), + 'installed': generate_random_date(), + 'license': f'license{random.randint(0, 9)}', + 'name': f'name{random.randint(0, 99)}', + 'path': f'/path/to/package{random.randint(0, 99)}', + 'reference': f'package-reference-{random.randint(0, 99)}', + 'size': random.randint(0, 99999), + 'type': random.choice(['deb', 'rpm', 'msi', 'pkg', 'app', 'apk', 'exe', 'zip', 'tar', 'gz', '7z', + 'rar', 'cab', 'iso', 'dmg', 'tar.gz', 'tar.bz2', 'tar.xz', 'tar.Z', 'tar.lz4', + 'tar.sz', 'tar.zst']), + 'version': f'v{random.randint(0, 9)}-stable' + } + return package + + +def generate_random_tags(): + tags = [f'tag{random.randint(0, 99)}' for _ in range(random.randint(0, 9))] + return tags + + +def generate_random_vulnerability(): + id = random.randint(0, 9999) + vulnerability = { + 'category': random.choice(['security', 'config', 'os', 'package', 'custom']), + 'classification': [f'classification{random.randint(0, 9999)}'], + 'description': f'description{random.randint(0, 9999)}', + 'enumeration': 'CVE', + 'id': f'CVE-{id}', + 'reference': f'https://mycve.test.org/cgi-bin/cvename.cgi?name={id}', + 'report_id': f'report-{random.randint(0, 9999)}', + 'scanner': {'vendor': f'vendor-{random.randint(0, 9)}'}, + 'score': { + 'base': round(random.uniform(0, 10), 1), + 'environmental': round(random.uniform(0, 10), 1), + 'temporal': round(random.uniform(0, 10), 1), + 'version': round(random.uniform(0, 10), 1) + }, + 'severity': random.choice(['low', 'medium', 'high', 'critical']) + } + return vulnerability + + +def generate_random_data(number): + data = [] + for _ in range(number): + event_data = { + '@timestamp': generate_random_date(), + 'agent': generate_random_agent(), + 'ecs': {'version': '1.7.0'}, + 'event': generate_random_event(), + 'host': generate_random_host(), + 'labels': generate_random_labels(), + 'message': f'message{random.randint(0, 99999)}', + 'package': generate_random_package(), + 'tags': generate_random_tags(), + 'vulnerability': generate_random_vulnerability() + } + data.append(event_data) + return data + + +def inject_events(ip, port, index, username, password, data): + url = f'https://{ip}:{port}/{index}/_doc' + session = requests.Session() + session.auth = (username, password) + session.verify = False + headers = {'Content-Type': 'application/json'} + + try: + for event_data in data: + response = session.post(url, json=event_data, headers=headers) + if response.status_code != 201: + logging.error(f'Error: {response.status_code}') + logging.error(response.text) + break + logging.info('Data injection completed successfully.') + except Exception as e: + logging.error(f'Error: {str(e)}') + + +def main(): + try: + number = int(input("How many events do you want to generate? ")) + except ValueError: + logging.error("Invalid input. Please enter a valid number.") + return + + logging.info(f"Generating {number} events...") + data = generate_random_data(number) + + with open(GENERATED_DATA_FILE, 'a') as outfile: + for event_data in data: + json.dump(event_data, outfile) + outfile.write('\n') + + logging.info('Data generation completed.') + + inject = input("Do you want to inject the generated data into your indexer? (y/n) ").strip().lower() + if inject == 'y': + ip = input("Enter the IP of your Indexer: ") + port = input("Enter the port of your Indexer: ") + index = input("Enter the index name: ") + username = input("Username: ") + password = input("Password: ") + inject_events(ip, port, index, username, password, data) + + +if __name__ == "__main__": + main() diff --git a/ecs/vulnerability-detector/fields/subset.yml b/ecs/vulnerability-detector/fields/subset.yml new file mode 100644 index 0000000000000..2c8dc0ca3b30f --- /dev/null +++ b/ecs/vulnerability-detector/fields/subset.yml @@ -0,0 +1,19 @@ +--- +name: vulnerability_detector +fields: + base: + fields: "*" + agent: + fields: "*" + ecs: + fields: "*" + event: + fields: "*" + package: + fields: "*" + host: + fields: + os: + fields: "*" + vulnerability: + fields: "*" diff --git a/ecs/vulnerability-detector/fields/template-settings-legacy.json b/ecs/vulnerability-detector/fields/template-settings-legacy.json new file mode 100644 index 0000000000000..21ee9e7a850fc --- /dev/null +++ b/ecs/vulnerability-detector/fields/template-settings-legacy.json @@ -0,0 +1,19 @@ +{ + "index_patterns": [ + "wazuh-states-vulnerabilities" + ], + "order": 1, + "settings": { + "index": { + "codec": "best_compression", + "mapping": { + "total_fields": { + "limit": 1000 + } + }, + "number_of_shards": "1", + "number_of_replicas": "0", + "refresh_interval": "2s" + } + } +} \ No newline at end of file diff --git a/ecs/vulnerability-detector/fields/template-settings.json b/ecs/vulnerability-detector/fields/template-settings.json new file mode 100644 index 0000000000000..bf2dcb4216aff --- /dev/null +++ b/ecs/vulnerability-detector/fields/template-settings.json @@ -0,0 +1,21 @@ +{ + "index_patterns": [ + "wazuh-states-vulnerabilities" + ], + "priority": 1, + "template": { + "settings": { + "index": { + "codec": "best_compression", + "mapping": { + "total_fields": { + "limit": 2000 + } + }, + "number_of_shards": "1", + "number_of_replicas": "0", + "refresh_interval": "2s" + } + } + } +} From a98539c5787663f13f782667432a5d6808f56fe8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Fri, 20 Oct 2023 13:00:58 +0200 Subject: [PATCH 006/120] Add default query fields to vulnerability detector index (#40) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add ECS mappings generator, documentation and files for vulnerability detector * Add event generator script * Add default query fields --------- Signed-off-by: Álex Ruiz --- ecs/README.md | 8 ++++---- .../fields/template-settings-legacy.json | 18 +++++++++++++++++- .../fields/template-settings.json | 18 +++++++++++++++++- 3 files changed, 38 insertions(+), 6 deletions(-) diff --git a/ecs/README.md b/ecs/README.md index a2d353d245c81..d16301fa9bdff 100644 --- a/ecs/README.md +++ b/ecs/README.md @@ -8,7 +8,7 @@ This script generates the ECS mappings for the Wazuh indices. - Python 3.6 or higher - jq -### Folder structure +### Folder structrue There is a folder for each module. Inside each folder, there is a `fields` folder with the required files to generate the mappings. These are the inputs for the ECS generator. @@ -62,7 +62,7 @@ curl -u admin:admin -k -X PUT "https://indexer:9200/_index_template/wazuh-vulner ``` Notes: -- PUT and POST are interchangeable. +- PUT and POST are interchangable. - The name of the index template does not matter. Any name can be used. - Adjust credentials and URL accordingly. @@ -83,7 +83,7 @@ are required. For testing purposes, the script `generate_events.py` can be used to generate events for a given module. Currently, it is only able to generate events for the `vulnerability-detector` module. To support other -modules, please extend or refactor the script. +modules, please extend of refactor the script. The script prompts for the required parameters, so it can be launched without arguments: @@ -95,7 +95,7 @@ The script will generate a JSON file with the events, and will also ask whether indexer. If the upload option is selected, the script will ask for the indexer URL and port, credentials, and index name. -The script uses a log file. Check it out for debugging or additional information. +The script uses log file. Check it out for debugging or additonal information. #### References diff --git a/ecs/vulnerability-detector/fields/template-settings-legacy.json b/ecs/vulnerability-detector/fields/template-settings-legacy.json index 21ee9e7a850fc..5f3135175c9af 100644 --- a/ecs/vulnerability-detector/fields/template-settings-legacy.json +++ b/ecs/vulnerability-detector/fields/template-settings-legacy.json @@ -13,7 +13,23 @@ }, "number_of_shards": "1", "number_of_replicas": "0", - "refresh_interval": "2s" + "refresh_interval": "2s", + "query.default_field": [ + "base.tags", + "agent.id", + "ecs.version", + "event.id", + "event.module", + "event.severity", + "host.os.family", + "host.os.full.text", + "host.os.version", + "package.name", + "package.version", + "vulnerability.id", + "vulnerability.description.text", + "vulnerability.severity" + ] } } } \ No newline at end of file diff --git a/ecs/vulnerability-detector/fields/template-settings.json b/ecs/vulnerability-detector/fields/template-settings.json index bf2dcb4216aff..48e2b051599e8 100644 --- a/ecs/vulnerability-detector/fields/template-settings.json +++ b/ecs/vulnerability-detector/fields/template-settings.json @@ -14,7 +14,23 @@ }, "number_of_shards": "1", "number_of_replicas": "0", - "refresh_interval": "2s" + "refresh_interval": "2s", + "query.default_field": [ + "base.tags", + "agent.id", + "ecs.version", + "event.id", + "event.module", + "event.severity", + "host.os.family", + "host.os.full.text", + "host.os.version", + "package.name", + "package.version", + "vulnerability.id", + "vulnerability.description.text", + "vulnerability.severity" + ] } } } From 1fe63cd7c514008f1266c46ec12eb528582ce914 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Fri, 20 Oct 2023 15:13:05 +0200 Subject: [PATCH 007/120] Create gradle_build.yml MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Álex Ruiz --- .github/workflows/gradle_build.yml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 .github/workflows/gradle_build.yml diff --git a/.github/workflows/gradle_build.yml b/.github/workflows/gradle_build.yml new file mode 100644 index 0000000000000..cf2c9cda8a5b5 --- /dev/null +++ b/.github/workflows/gradle_build.yml @@ -0,0 +1,22 @@ +name: Run Gradle Build +on: + schedule: + - cron: '00 8 * * 5' +jobs: + gradle: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + distribution: temurin + java-version: 11 + + - name: Setup Gradle + uses: gradle/gradle-build-action@v2 + + - name: Execute Gradle build + run: ./gradlew build From 545dc8e60405723e0006aeb6d92125b429ee4f13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Fri, 20 Oct 2023 15:15:15 +0200 Subject: [PATCH 008/120] Update gradle_build.yml MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Álex Ruiz --- .github/workflows/gradle_build.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/gradle_build.yml b/.github/workflows/gradle_build.yml index cf2c9cda8a5b5..d7916f852abf1 100644 --- a/.github/workflows/gradle_build.yml +++ b/.github/workflows/gradle_build.yml @@ -4,10 +4,7 @@ on: - cron: '00 8 * * 5' jobs: gradle: - strategy: - matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - runs-on: ${{ matrix.os }} + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: actions/setup-java@v3 @@ -16,7 +13,7 @@ jobs: java-version: 11 - name: Setup Gradle - uses: gradle/gradle-build-action@v2 + uses: gradle/gradle-build-action@v2.9.0 - name: Execute Gradle build run: ./gradlew build From c95fcebba0d67f58d55feb6eb229ecc61d2de602 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Thu, 2 Nov 2023 19:42:56 +0100 Subject: [PATCH 009/120] Add a script to configure the rollover policy (#49) --- scripts/indexer-ism-init.sh | 208 ++++++++++++++++++++++++++++++++++++ 1 file changed, 208 insertions(+) create mode 100644 scripts/indexer-ism-init.sh diff --git a/scripts/indexer-ism-init.sh b/scripts/indexer-ism-init.sh new file mode 100644 index 0000000000000..5b77addc44e16 --- /dev/null +++ b/scripts/indexer-ism-init.sh @@ -0,0 +1,208 @@ +#!/bin/bash +# Wazuh Copyright (C) 2023 Wazuh Inc. (License GPLv2) +# Wazuh - Indexer set rollover policy and templates + +# Policy settings +MIN_SHARD_SIZE=${MIN_SHARD_SIZE:-25} +MIN_INDEX_AGE=${MIN_INDEX_AGE:-"7d"} +MIN_DOC_COUNT=${MIN_DOC_COUNT:-200000000} +ISM_INDEX_PATTERNS=${ISM_INDEX_PATTERNS:-'["wazuh-alerts-*", "wazuh-archives-*", "-wazuh-alerts-4.x-sample*"]'} +ISM_PRIORITY=${ISM_PRIORITY:-50} + +POLICY_NAME="rollover_policy" + +INDEXER_URL="https://localhost:9200" + +# curl settings shortcuts +C_AUTH="-u admin:admin" + +######################################################################### +# Creates the rollover_policy ISM policy. +# Globals: +# MIN_SHARD_SIZE: The minimum shard size in GB. +# MIN_INDEX_AGE: The minimum index age. +# MIN_DOC_COUNT: The minimum document count. +# ISM_INDEX_PATTERNS: The index patterns to apply the policy. +# ISM_PRIORITY: The policy priority. +# Arguments: +# None. +# Returns: +# The rollover policy as a JSON string +######################################################################### +function generate_rollover_policy() { + cat < Date: Tue, 7 Nov 2023 13:48:29 +0100 Subject: [PATCH 010/120] Update ISM init script (#50) --- scripts/indexer-ism-init.sh | 298 +++++++++++++++++++++++------------- 1 file changed, 192 insertions(+), 106 deletions(-) diff --git a/scripts/indexer-ism-init.sh b/scripts/indexer-ism-init.sh index 5b77addc44e16..eabdb81ca041a 100644 --- a/scripts/indexer-ism-init.sh +++ b/scripts/indexer-ism-init.sh @@ -3,18 +3,20 @@ # Wazuh - Indexer set rollover policy and templates # Policy settings -MIN_SHARD_SIZE=${MIN_SHARD_SIZE:-25} -MIN_INDEX_AGE=${MIN_INDEX_AGE:-"7d"} -MIN_DOC_COUNT=${MIN_DOC_COUNT:-200000000} -ISM_INDEX_PATTERNS=${ISM_INDEX_PATTERNS:-'["wazuh-alerts-*", "wazuh-archives-*", "-wazuh-alerts-4.x-sample*"]'} -ISM_PRIORITY=${ISM_PRIORITY:-50} +MIN_SHARD_SIZE="25" +MIN_INDEX_AGE="7d" +MIN_DOC_COUNT="200000000" +ISM_INDEX_PATTERNS='["wazuh-alerts-*", "wazuh-archives-*", "-wazuh-alerts-4.x-sample*"]' +ISM_PRIORITY="50" +INDEXER_PASSWORD="admin" +INDEXER_HOSTNAME="localhost" POLICY_NAME="rollover_policy" -INDEXER_URL="https://localhost:9200" +INDEXER_URL="https://${INDEXER_HOSTNAME}:9200" # curl settings shortcuts -C_AUTH="-u admin:admin" +C_AUTH="-u admin:${INDEXER_PASSWORD}" ######################################################################### # Creates the rollover_policy ISM policy. @@ -30,35 +32,34 @@ C_AUTH="-u admin:admin" # The rollover policy as a JSON string ######################################################################### function generate_rollover_policy() { - cat <" + echo -e " Set the minimum index age. By default 7d." + echo -e "" + echo -e " -d, --min-doc-count " + echo -e " Set the minimum document count. By default 200000000." + echo -e "" + echo -e " -h, --help" + echo -e " Shows help." + echo -e "" + echo -e " -i, --indexer-hostname " + echo -e " Specifies the Wazuh indexer hostname or IP." + echo -e "" + echo -e " -p, --indexer-password " + echo -e " Specifies the Wazuh indexer admin user password." + echo -e "" + echo -e " -s, --min-shard-size " + echo -e " Set the minimum shard size in GB. By default 25." + echo -e "" + exit 1 +} + ######################################################################### # Main function. ######################################################################### function main() { - # The list should contain every alias which indices implement the - # rollover policy - aliases=("wazuh-alerts" "wazuh-archives") + # The list should contain every alias which indices implement the + # rollover policy + aliases=("wazuh-alerts" "wazuh-archives") + + while [ -n "${1}" ]; do + case "${1}" in + "-a" | "--min-index-age") + if [ -z "${2}" ]; then + echo "Error on arguments. Probably missing after -a|--min-index-age" + show_help + else + MIN_INDEX_AGE="${2}" + shift 2 + fi + ;; + "-d" | "--min-doc-count") + if [ -z "${2}" ]; then + echo "Error on arguments. Probably missing after -d|--min-doc-count" + show_help + else + MIN_DOC_COUNT="${2}" + shift 2 + fi + ;; + "-h" | "--help") + show_help + ;; + "-i" | "--indexer-hostname") + if [ -z "${2}" ]; then + echo "Error on arguments. Probably missing after -i|--indexer-hostname" + show_help + else + INDEXER_HOSTNAME="${2}" + shift 2 + fi + ;; + "-p" | "--indexer-password") + if [ -z "${2}" ]; then + echo "Error on arguments. Probably missing after -p|--indexer-password" + show_help + else + INDEXER_PASSWORD="${2}" + C_AUTH="-u admin:${INDEXER_PASSWORD}" + shift 2 + fi + ;; + "-s" | "--min-shard-size") + if [ -z "${2}" ]; then + echo "Error on arguments. Probably missing after -s|--min-shard-size" + show_help + else + MIN_SHARD_SIZE="${2}" + shift 2 + fi + ;; + *) + echo "Unknow option: ${1}" + show_help + ;; + esac + done - # Load the Wazuh Indexer templates - load_templates + # Load the Wazuh Indexer templates + load_templates - # Upload the rollover policy - upload_rollover_policy + # Upload the rollover policy + upload_rollover_policy - # Create the initial write indices - create_indices "${aliases[@]}" + # Create the initial write indices + create_indices "${aliases[@]}" } -main "$@" \ No newline at end of file +main "$@" From 375cebf4925bfb0a53a54b5d71a48e828cf8b8d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Wed, 8 Nov 2023 18:29:03 +0100 Subject: [PATCH 011/120] Fix bug with -i option (#51) * Fix bug with -i option * Improve error handling --- scripts/indexer-ism-init.sh | 72 +++++++++++++++++++++++++++---------- 1 file changed, 54 insertions(+), 18 deletions(-) diff --git a/scripts/indexer-ism-init.sh b/scripts/indexer-ism-init.sh index eabdb81ca041a..15998ed84fdaf 100644 --- a/scripts/indexer-ism-init.sh +++ b/scripts/indexer-ism-init.sh @@ -85,10 +85,15 @@ function generate_rollover_template() { function load_templates() { # Note: the wazuh-template.json could also be loaded here. for alias in "${aliases[@]}"; do - echo "TEMPLATES AND POLICIES - Uploading ${alias} template" - generate_rollover_template "${alias}" | curl -s -k ${C_AUTH} \ - -X PUT "${INDEXER_URL}/_template/${alias}-rollover" -o /dev/null \ - -H 'Content-Type: application/json' -d @- + generate_rollover_template "${alias}" | + if ! curl -s -k ${C_AUTH} \ + -X PUT "${INDEXER_URL}/_template/${alias}-rollover" -o /dev/null \ + -H 'Content-Type: application/json' -d @-; then + echo "Error uploading ${alias} template" + return 1 + else + echo "${alias} template uploaded" + fi done } @@ -110,15 +115,21 @@ function upload_rollover_policy() { # Check if the ${POLICY_NAME} ISM policy was loaded (404 error if not found) if [[ "${policy_exists}" == "404" ]]; then - echo "TEMPLATES AND POLICIES - Uploading ${POLICY_NAME} ISM policy" - curl -s -k ${C_AUTH} -o /dev/null \ + if ! curl -s -k ${C_AUTH} -o /dev/null \ -X PUT "${INDEXER_URL}/_plugins/_ism/policies/${POLICY_NAME}" \ - -H 'Content-Type: application/json' -d "$(generate_rollover_policy)" + -H 'Content-Type: application/json' \ + -d "$(generate_rollover_policy)"; then + echo "Error uploading ${POLICY_NAME} policy" + return 1 + else + echo "${POLICY_NAME} policy uploaded" + fi else if [[ "${policy_exists}" == "200" ]]; then - echo "TEMPLATES AND POLICIES - ${POLICY_NAME} policy already exists" + echo "${POLICY_NAME} policy already exists" else - echo "TEMPLATES AND POLICIES - Error uploading ${POLICY_NAME} policy" + echo "Error checking if ${POLICY_NAME} exists" + return 1 fi fi } @@ -158,9 +169,15 @@ function generate_write_index_alias() { # 1. The alias. String. ######################################################################### function create_write_index() { - curl -s -k ${C_AUTH} -o /dev/null \ + if ! curl -s -k ${C_AUTH} -o /dev/null \ -X PUT "$INDEXER_URL/%3C${1}-4.x-%7Bnow%2Fd%7D-000001%3E?pretty" \ - -H 'Content-Type: application/json' -d "$(generate_write_index_alias "${1}")" + -H 'Content-Type: application/json' \ + -d "$(generate_write_index_alias "${1}")"; then + echo "Error creating ${1} write index" + exit 1 + else + echo "${1} write index created" + fi } ######################################################################### @@ -169,7 +186,6 @@ function create_write_index() { # 1. List of aliases to initialize. ######################################################################### function create_indices() { - echo "TEMPLATES AND POLICIES - Creating write indices" for alias in "${aliases[@]}"; do # Check if there are any write indices for the current alias write_index_exists=$(check_for_write_index "${alias}") @@ -181,7 +197,6 @@ function create_indices() { done } - ######################################################################### # Shows usage help. ######################################################################### @@ -209,9 +224,15 @@ function show_help() { echo -e " -p, --indexer-password " echo -e " Specifies the Wazuh indexer admin user password." echo -e "" + echo -e " -P, --priority " + echo -e " Specifies the policy's priority." + echo -e "" echo -e " -s, --min-shard-size " echo -e " Set the minimum shard size in GB. By default 25." echo -e "" + echo -e " -v, --verbose" + echo -e " Set verbose mode. Prints more information." + echo -e "" exit 1 } @@ -252,6 +273,7 @@ function main() { show_help else INDEXER_HOSTNAME="${2}" + INDEXER_URL="https://${INDEXER_HOSTNAME}:9200" shift 2 fi ;; @@ -274,6 +296,19 @@ function main() { shift 2 fi ;; + "-P" | "--priority") + if [ -z "${2}" ]; then + echo "Error on arguments. Probably missing after -P|--priority" + show_help + else + ISM_PRIORITY="${2}" + shift 2 + fi + ;; + "-v" | "--verbose") + set -x + shift + ;; *) echo "Unknow option: ${1}" show_help @@ -282,13 +317,14 @@ function main() { done # Load the Wazuh Indexer templates - load_templates - # Upload the rollover policy - upload_rollover_policy - # Create the initial write indices - create_indices "${aliases[@]}" + if load_templates && upload_rollover_policy && create_indices "${aliases[@]}"; then + echo "Indexer ISM initialization finished successfully" + else + echo "Indexer ISM initialization failed" + exit 1 + fi } main "$@" From 4650ed8bec40020f867464acd97adf2ef0d5eca5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Tue, 14 Nov 2023 13:09:03 +0100 Subject: [PATCH 012/120] Update min_doc_count value (#52) --- scripts/indexer-ism-init.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/indexer-ism-init.sh b/scripts/indexer-ism-init.sh index 15998ed84fdaf..1db55b9ae7450 100644 --- a/scripts/indexer-ism-init.sh +++ b/scripts/indexer-ism-init.sh @@ -5,7 +5,7 @@ # Policy settings MIN_SHARD_SIZE="25" MIN_INDEX_AGE="7d" -MIN_DOC_COUNT="200000000" +MIN_DOC_COUNT="600000000" ISM_INDEX_PATTERNS='["wazuh-alerts-*", "wazuh-archives-*", "-wazuh-alerts-4.x-sample*"]' ISM_PRIORITY="50" INDEXER_PASSWORD="admin" From 845f66c69d51e84394487b8189047e95d788439c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Wed, 15 Nov 2023 19:19:20 +0100 Subject: [PATCH 013/120] Improve ISM init script (#57) * Improve ISM init script * Change log file path --- scripts/indexer-ism-init.sh | 57 +++++++++++++++++++++++-------------- 1 file changed, 35 insertions(+), 22 deletions(-) diff --git a/scripts/indexer-ism-init.sh b/scripts/indexer-ism-init.sh index 1db55b9ae7450..3f2edc541a4ad 100644 --- a/scripts/indexer-ism-init.sh +++ b/scripts/indexer-ism-init.sh @@ -12,6 +12,7 @@ INDEXER_PASSWORD="admin" INDEXER_HOSTNAME="localhost" POLICY_NAME="rollover_policy" +LOG_FILE="/tmp/wazuh-indexer/ism-init.log" INDEXER_URL="https://${INDEXER_HOSTNAME}:9200" @@ -84,15 +85,17 @@ function generate_rollover_template() { ######################################################################### function load_templates() { # Note: the wazuh-template.json could also be loaded here. + echo "Will create index templates to configure the alias" for alias in "${aliases[@]}"; do generate_rollover_template "${alias}" | if ! curl -s -k ${C_AUTH} \ - -X PUT "${INDEXER_URL}/_template/${alias}-rollover" -o /dev/null \ + -X PUT "${INDEXER_URL}/_template/${alias}-rollover" \ + -o "${LOG_FILE}" --create-dirs \ -H 'Content-Type: application/json' -d @-; then - echo "Error uploading ${alias} template" + echo " ERROR: '${alias}' template creation failed" return 1 else - echo "${alias} template uploaded" + echo " SUCC: '${alias}' template created or updated" fi done } @@ -106,29 +109,36 @@ function load_templates() { # None. ######################################################################### function upload_rollover_policy() { + echo "Will create the '${POLICY_NAME}' policy" policy_exists=$( curl -s -k ${C_AUTH} \ -X GET "${INDEXER_URL}/_plugins/_ism/policies/${POLICY_NAME}" \ - -o /dev/null \ + -o "${LOG_FILE}" --create-dirs \ -w "%{http_code}" ) # Check if the ${POLICY_NAME} ISM policy was loaded (404 error if not found) if [[ "${policy_exists}" == "404" ]]; then - if ! curl -s -k ${C_AUTH} -o /dev/null \ - -X PUT "${INDEXER_URL}/_plugins/_ism/policies/${POLICY_NAME}" \ - -H 'Content-Type: application/json' \ - -d "$(generate_rollover_policy)"; then - echo "Error uploading ${POLICY_NAME} policy" - return 1 + policy_uploaded=$( + curl -s -k ${C_AUTH} \ + -X PUT "${INDEXER_URL}/_plugins/_ism/policies/${POLICY_NAME}" \ + -o "${LOG_FILE}" --create-dirs \ + -H 'Content-Type: application/json' \ + -d "$(generate_rollover_policy)" \ + -w "%{http_code}" + ) + + if [[ "${policy_uploaded}" == "201" ]]; then + echo " SUCC: '${POLICY_NAME}' policy created" else - echo "${POLICY_NAME} policy uploaded" + echo " ERROR: '${POLICY_NAME}' policy not created => ${policy_uploaded}" + return 1 fi else if [[ "${policy_exists}" == "200" ]]; then - echo "${POLICY_NAME} policy already exists" + echo " INFO: policy '${POLICY_NAME}' already exists. Skipping policy creation" else - echo "Error checking if ${POLICY_NAME} exists" + echo " ERROR: could not check if the policy '${POLICY_NAME}' exists => ${policy_exists}" return 1 fi fi @@ -155,9 +165,9 @@ function generate_write_index_alias() { cat <<-EOF { "aliases": { - "$1": { - "is_write_index": true - } + "$1": { + "is_write_index": true + } } } EOF @@ -169,14 +179,14 @@ function generate_write_index_alias() { # 1. The alias. String. ######################################################################### function create_write_index() { - if ! curl -s -k ${C_AUTH} -o /dev/null \ - -X PUT "$INDEXER_URL/%3C${1}-4.x-%7Bnow%2Fd%7D-000001%3E?pretty" \ + if ! curl -s -k ${C_AUTH} -o "${LOG_FILE}" --create-dirs \ + -X PUT "$INDEXER_URL/%3C${1}-4.x-%7Bnow%2Fd%7D-000001%3E" \ -H 'Content-Type: application/json' \ -d "$(generate_write_index_alias "${1}")"; then - echo "Error creating ${1} write index" + echo " ERROR: creating '${1}' write index" exit 1 else - echo "${1} write index created" + echo " SUCC: '${1}' write index created" fi } @@ -186,6 +196,7 @@ function create_write_index() { # 1. List of aliases to initialize. ######################################################################### function create_indices() { + echo "Will create initial indices for the aliases" for alias in "${aliases[@]}"; do # Check if there are any write indices for the current alias write_index_exists=$(check_for_write_index "${alias}") @@ -193,6 +204,8 @@ function create_indices() { # Create the write index if it does not exist if [[ -z $write_index_exists ]]; then create_write_index "${alias}" + else + echo " INFO: '${alias}' write index already exists. Skipping write index creation" fi done } @@ -320,9 +333,9 @@ function main() { # Upload the rollover policy # Create the initial write indices if load_templates && upload_rollover_policy && create_indices "${aliases[@]}"; then - echo "Indexer ISM initialization finished successfully" + echo "SUCC: Indexer ISM initialization finished successfully." else - echo "Indexer ISM initialization failed" + echo "ERROR: Indexer ISM initialization failed. Check ${LOG_FILE} for more information." exit 1 fi } From f37fd2cd4e1fab0fb83c51e2674008aa63caf609 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Wed, 22 Nov 2023 20:02:14 +0100 Subject: [PATCH 014/120] Update distribution files (#59) * Update config files * Add VERSION file --- VERSION | 1 + distribution/src/bin/indexer-init.sh | 138 +++++++++++++ .../src/bin}/indexer-ism-init.sh | 0 distribution/src/bin/indexer-security-init.sh | 189 ++++++++++++++++++ .../src/config/security/action_groups.yml | 12 ++ .../src/config/security/internal_users.yml | 2 +- distribution/src/config/security/roles.yml | 10 +- .../src/config/security/roles_mapping.yml | 11 +- 8 files changed, 359 insertions(+), 4 deletions(-) create mode 100644 VERSION create mode 100644 distribution/src/bin/indexer-init.sh rename {scripts => distribution/src/bin}/indexer-ism-init.sh (100%) create mode 100644 distribution/src/bin/indexer-security-init.sh create mode 100644 distribution/src/config/security/action_groups.yml diff --git a/VERSION b/VERSION new file mode 100644 index 0000000000000..6ed7776bf3219 --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +4.9.0 diff --git a/distribution/src/bin/indexer-init.sh b/distribution/src/bin/indexer-init.sh new file mode 100644 index 0000000000000..80882e290e4e2 --- /dev/null +++ b/distribution/src/bin/indexer-init.sh @@ -0,0 +1,138 @@ +#!/bin/bash +# Wazuh Copyright (C) 2023 Wazuh Inc. (License GPLv2) +# Wazuh - indexer initialization script + +INSTALL_PATH="/usr/share/wazuh-indexer" +BIN_PATH="${INSTALL_PATH}/bin" + + +######################################################################### +# Parse arguments for security init script. +######################################################################### +function parse_security_args() { + security_args=() + + while [ -n "$1" ]; do + case "$1" in + "-h" | "--help") + security_args+=("${1}") + shift + ;; + "-ho" | "--host") + if [ -n "$2" ]; then + security_args+=("${1}" "${2}") + shift 2 + fi + ;; + "--port") + if [ -n "$2" ]; then + security_args+=("${1}" "${2}") + shift 2 + fi + ;; + "--options") + if [ -n "$2" ]; then + security_args+=("${1}" "${2}") + shift 2 + fi + ;; + *) + shift + ;; + esac + done +} + + +######################################################################### +# Run the security init script. +######################################################################### +function run_security_init() { + echo "Executing Wazuh indexer security init script..." + parse_security_args "$@" + /bin/bash "${BIN_PATH}/indexer-security-init.sh" "${security_args[@]}" +} + + +######################################################################### +# Parse arguments for ISM init script. +######################################################################### +function parse_ism_args() { + ism_args=() + + while [ -n "${1}" ]; do + case "${1}" in + "-a" | "--min-index-age") + if [ -n "${2}" ]; then + ism_args+=("${1}" "${2}") + shift 2 + fi + ;; + "-d" | "--min-doc-count") + if [ -n "${2}" ]; then + ism_args+=("${1}" "${2}") + shift 2 + fi + ;; + "-h" | "--help") + ism_args+=("${1}") + shift + ;; + "-i" | "--indexer-hostname") + if [ -n "${2}" ]; then + ism_args+=("${1}" "${2}") + shift 2 + fi + ;; + "-p" | "--indexer-password") + if [ -n "${2}" ]; then + ism_args+=("${1}" "${2}") + shift 2 + fi + ;; + "-s" | "--min-shard-size") + if [ -n "${2}" ]; then + ism_args+=("${1}" "${2}") + shift 2 + fi + ;; + "-P" | "--priority") + if [ -n "${2}" ]; then + ism_args+=("${1}" "${2}") + shift 2 + fi + ;; + "-v" | "--verbose") + ism_args+=("${1}") + shift + ;; + *) + shift + ;; + esac + done +} + + +######################################################################### +# Run the ISM init script. +######################################################################### +function run_ism_init() { + echo "Executing Wazuh indexer ISM init script..." + parse_ism_args "$@" + /bin/bash "${BIN_PATH}/indexer-ism-init.sh" "${ism_args[@]}"; +} + + +######################################################################### +# Main function. +######################################################################### +function main() { + # If run_security_init returns 0, then run_ism_init + if run_security_init "$@" -gt 0; then + run_ism_init "$@" + fi +} + + +main "$@" diff --git a/scripts/indexer-ism-init.sh b/distribution/src/bin/indexer-ism-init.sh similarity index 100% rename from scripts/indexer-ism-init.sh rename to distribution/src/bin/indexer-ism-init.sh diff --git a/distribution/src/bin/indexer-security-init.sh b/distribution/src/bin/indexer-security-init.sh new file mode 100644 index 0000000000000..b46eb3e47dabd --- /dev/null +++ b/distribution/src/bin/indexer-security-init.sh @@ -0,0 +1,189 @@ +#!/bin/bash + +# Wazuh-indexer securityadmin wrapper +# Copyright (C) 2022, Wazuh Inc. +# +# This program is a free software; you can redistribute it +# and/or modify it under the terms of the GNU General Public +# License (version 2) as published by the FSF - Free Software +# Foundation. + +CONFIG_PATH="/etc/wazuh-indexer" + +if [ ! -d "${CONFIG_PATH}" ]; then + echo "ERROR: it was not possible to find ${CONFIG_PATH}" + exit 1 +fi + +CONFIG_FILE="${CONFIG_PATH}/opensearch.yml" + +if [ ! -f "${CONFIG_FILE}" ]; then + echo "ERROR: it was not possible to find ${CONFIG_FILE}" + exit 1 +fi + +INSTALL_PATH="/usr/share/wazuh-indexer" + +if [ ! -d "${INSTALL_PATH}" ]; then + echo "ERROR: it was not possible to find ${INSTALL_PATH}" + exit 1 +fi + +HOST="" +OPTIONS="-icl -nhnv" +WAZUH_INDEXER_ROOT_CA="$(cat ${CONFIG_FILE} 2>&1 | grep http.pemtrustedcas | sed 's/.*: //' | tr -d "[\"\']")" +WAZUH_INDEXER_ADMIN_PATH="$(dirname "${WAZUH_INDEXER_ROOT_CA}" 2>&1)" +SECURITY_PATH="${INSTALL_PATH}/plugins/opensearch-security" +SECURITY_CONFIG_PATH="${CONFIG_PATH}/opensearch-security" + +# ----------------------------------------------------------------------------- + +trap ctrl_c INT + +clean(){ + + exit_code=$1 + indexer_process_id=$(pgrep -f wazuh-indexer -c) + if [ "${indexer_process_id}" -gt 1 ]; then + pkill -n -f wazuh-indexer + fi + exit "${exit_code}" + +} + +ctrl_c() { + clean 1 +} + +# ----------------------------------------------------------------------------- + +getNetworkHost() { + + HOST=$(grep -hr "network.host:" "${CONFIG_FILE}" 2>&1) + NH="network.host: " + HOST="${HOST//$NH}" + HOST=$(echo "${HOST}" | tr -d "[\"\']") + + isIP=$(echo "${HOST}" | grep -P "^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$") + isDNS=$(echo "${HOST}" | grep -P "^[a-zA-Z0-9][a-zA-Z0-9-]{1,61}[a-zA-Z0-9](?:\.[a-zA-Z]{2,})+$") + + # Allow to find ip with an interface + if [ -z "${isIP}" ] && [ -z "${isDNS}" ]; then + interface="${HOST//_}" + HOST=$(ip -o -4 addr list "${interface}" | awk '{print $4}' | cut -d/ -f1) + fi + + if [ "${HOST}" = "0.0.0.0" ]; then + HOST="127.0.0.1" + fi + + if [ -z "${HOST}" ]; then + echo "ERROR: network host not valid, check ${CONFIG_FILE}" + exit 1 + fi + +} + +# ----------------------------------------------------------------------------- +getPort() { + + PORT=$(grep -hr 'transport.tcp.port' "${CONFIG_FILE}" 2>&1) + if [ "${PORT}" ]; then + PORT=$(echo "${PORT}" | cut -d' ' -f2 | cut -d'-' -f1) + else + PORT="9200" + fi + PORT=$(echo "${PORT}" | tr -d "[\"\']") + +} +# ----------------------------------------------------------------------------- + +securityadmin() { + + if [ ! -d "${SECURITY_PATH}" ]; then + echo "ERROR: it was not possible to find ${SECURITY_PATH}" + exit 1 + elif [ ! -d "${INSTALL_PATH}/jdk" ]; then + echo "ERROR: it was not possible to find ${INSTALL_PATH}/jdk" + exit 1 + fi + + if [ -f "${WAZUH_INDEXER_ADMIN_PATH}/admin.pem" ] && [ -f "${WAZUH_INDEXER_ADMIN_PATH}/admin-key.pem" ] && [ -f "${WAZUH_INDEXER_ROOT_CA}" ]; then + OPENSEARCH_CONF_DIR="${CONFIG_PATH}" JAVA_HOME="${INSTALL_PATH}/jdk" runuser wazuh-indexer --shell="/bin/bash" --command="${SECURITY_PATH}/tools/securityadmin.sh -cd ${SECURITY_CONFIG_PATH} -cacert ${WAZUH_INDEXER_ROOT_CA} -cert ${WAZUH_INDEXER_ADMIN_PATH}/admin.pem -key ${WAZUH_INDEXER_ADMIN_PATH}/admin-key.pem -h ${HOST} -p ${PORT} ${OPTIONS}" + else + echo "ERROR: this tool try to find admin.pem and admin-key.pem in ${WAZUH_INDEXER_ADMIN_PATH} but it couldn't. In this case, you must run manually the Indexer security initializer by running the command: JAVA_HOME="/usr/share/wazuh-indexer/jdk" runuser wazuh-indexer --shell="/bin/bash" --command="/usr/share/wazuh-indexer/plugins/opensearch-security/tools/securityadmin.sh -cd /etc/wazuh-indexer/opensearch-security -cacert /path/to/root-ca.pem -cert /path/to/admin.pem -key /path/to/admin-key.pem -h ${HOST} -p ${PORT} ${OPTIONS}" replacing /path/to/ by your certificates path." + exit 1 + fi + +} + +help() { + echo + echo "Usage: $0 [OPTIONS]" + echo + echo " -ho, --host [Optional] Target IP or DNS to configure security." + echo " --port [Optional] wazuh-indexer security port." + echo " --options [Optional] Custom securityadmin options." + echo " -h, --help Show this help." + echo + exit "$1" +} + + +main() { + + getNetworkHost + getPort + + while [ -n "$1" ] + do + case "$1" in + "-h"|"--help") + help 0 + ;; + "-ho"|"--host") + if [ -n "$2" ]; then + HOST="$2" + HOST=$(echo "${HOST}" | tr -d "[\"\']") + isIP=$(echo "${2}" | grep -P "^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$") + isDNS=$(echo "${2}" | grep -P "^[a-zA-Z0-9][a-zA-Z0-9-]{1,61}[a-zA-Z0-9](?:\.[a-zA-Z]{2,})+$") + if [[ -z "${isIP}" ]] && [[ -z "${isDNS}" ]]; then + echo "The given information does not match with an IP address or a DNS." + exit 1 + fi + shift 2 + else + help 1 + fi + ;; + "--port") + if [ -n "$2" ]; then + PORT="$2" + PORT=$(echo "${PORT}" | tr -d "[\"\']") + if [[ -z $(echo "${2}" | grep -P "^([1-9][0-9]{0,3}|[1-5][0-9]{4}|6[0-4][0-9]{3}|65[0-4][0-9]{2}|655[0-2][0-9]|6553[0-5])$") ]]; then + echo "The given information does not match with a valid PORT number." + exit 1 + fi + shift 2 + else + help 1 + fi + ;; + "--options") + if [ -n "$2" ]; then + OPTIONS="$2" + shift 2 + else + help 1 + fi + ;; + *) + help 1 + esac + done + + securityadmin + +} + +main "$@" diff --git a/distribution/src/config/security/action_groups.yml b/distribution/src/config/security/action_groups.yml new file mode 100644 index 0000000000000..04119c8a23667 --- /dev/null +++ b/distribution/src/config/security/action_groups.yml @@ -0,0 +1,12 @@ +--- +_meta: + type: "actiongroups" + config_version: 2 + +# ISM API permissions group +manage_ism: + reserved: true + hidden: false + allowed_actions: + - "cluster:admin/opendistro/ism/*" + static: false \ No newline at end of file diff --git a/distribution/src/config/security/internal_users.yml b/distribution/src/config/security/internal_users.yml index 52069500a9b5b..1ff2c8c23a151 100644 --- a/distribution/src/config/security/internal_users.yml +++ b/distribution/src/config/security/internal_users.yml @@ -53,4 +53,4 @@ snapshotrestore: reserved: false backend_roles: - "snapshotrestore" - description: "Demo snapshotrestore user" \ No newline at end of file + description: "Demo snapshotrestore user" diff --git a/distribution/src/config/security/roles.yml b/distribution/src/config/security/roles.yml index ec669b2fe2c97..d64d6228ec29e 100644 --- a/distribution/src/config/security/roles.yml +++ b/distribution/src/config/security/roles.yml @@ -146,4 +146,12 @@ manage_wazuh_index: - "manage" - "index" tenant_permissions: [] - static: false \ No newline at end of file + static: false + +# ISM API permissions role +manage_ism: + reserved: true + hidden: false + cluster_permissions: + - "manage_ism" + static: false diff --git a/distribution/src/config/security/roles_mapping.yml b/distribution/src/config/security/roles_mapping.yml index 66d530d8e0dbb..dc4e78e0b5dbb 100644 --- a/distribution/src/config/security/roles_mapping.yml +++ b/distribution/src/config/security/roles_mapping.yml @@ -76,7 +76,7 @@ kibana_user: and_backend_roles: [] description: "Maps kibanauser to kibana_user" - # Wazuh monitoring and statistics index permissions +# Wazuh monitoring and statistics index permissions manage_wazuh_index: reserved: true hidden: false @@ -84,4 +84,11 @@ manage_wazuh_index: hosts: [] users: - "kibanaserver" - and_backend_roles: [] \ No newline at end of file + and_backend_roles: [] + +# ISM API permissions role mapping +manage_ism: + reserved: true + hidden: false + users: + - "kibanaserver" From fc75637c64b7301a704f0f77f79acec3efae10e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Tue, 28 Nov 2023 14:17:04 +0100 Subject: [PATCH 015/120] Update documentation of the ECS tooling (#67) --- ecs/README.md | 82 ++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 59 insertions(+), 23 deletions(-) diff --git a/ecs/README.md b/ecs/README.md index d16301fa9bdff..d4897318585e2 100644 --- a/ecs/README.md +++ b/ecs/README.md @@ -5,36 +5,72 @@ This script generates the ECS mappings for the Wazuh indices. ### Requirements - ECS repository clone. The script is meant to be launched from the root level of that repository. -- Python 3.6 or higher -- jq +- `Python` 3.6 or higher + `venv` module +- `jq` -### Folder structrue +### Folder structure There is a folder for each module. Inside each folder, there is a `fields` folder with the required files to generate the mappings. These are the inputs for the ECS generator. ### Usage -**Copy the `generate.sh` script to the root level of the ECS repository.** +1. Get a copy of the ECS repository at the same level as the `wazuh-indexer` repo: -Use the `generate.sh` script to generate the mappings for a module. The script takes 3 arguments, -plus 2 optional arguments to upload the mappings to the Wazuh indexer (using **composable** indexes). + ```console + git clone git@github.com:elastic/ecs.git + ``` -```plaintext -Usage: ./generate.sh [--upload ] - * ECS_VERSION: ECS version to generate mappings for - * INDEXER_SRC: Path to the wazuh-indexer repository - * MODULE: Module to generate mappings for - * --upload : Upload generated index template to the OpenSearch cluster. Defaults to https://localhost:9200 -Example: ./generate.sh v8.10.0 ~/wazuh-indexer vulnerability-detector --upload https://indexer:9200 -``` +2. Install the dependencies: -For example, to generate the mappings for the `vulnerability-detector` module using the -ECS version `v8.10.0` and the Wazuh indexer in path `~/wazuh/wazuh-indexer`: + ```console + cd ecs + python3 -m venv env + source env/bin/activate + pip install -r scripts/requirements.txt + ``` -```bash -./generate.sh v8.10.0 ~/wazuh/wazuh-indexer vulnerability-detector -``` +2. Copy the `generate.sh` script to the root level of the ECS repository. + + ```console + cp generate.sh ../../ecs + cd ../../ecs + bash generate.sh + ``` + + Expected output: + ``` + Usage: generate.sh [--upload ] + * ECS_VERSION: ECS version to generate mappings for + * INDEXER_SRC: Path to the wazuh-indexer repository + * MODULE: Module to generate mappings for + * --upload : Upload generated index template to the OpenSearch cluster. Defaults to https://localhost:9200 + Example: generate.sh v8.10.0 ~/wazuh-indexer vulnerability-detector --upload https://indexer:9200 + ``` + +3. Use the `generate.sh` script to generate the mappings for a module. The script takes 3 arguments, +plus 2 optional arguments to upload the mappings to the `wazuh-indexer`. Both, composable and legacy mappings +are generated. For example, to generate the mappings for the `vulnerability-detector` module using the + ECS version `v8.10.0` and assuming that path of this repository is `~/wazuh/wazuh-indexer`: + + ```bash + ./generate.sh v8.10.0 ~/wazuh/wazuh-indexer vulnerability-detector + ``` + + The tool will output the folder where they have been generated. + + ```console + Loading schemas from git ref v8.10.0 + Running generator. ECS version 8.10.0 + Replacing "match_only_text" type with "text" + Mappings saved to ~/wazuh/wazuh-indexer/ecs/vulnerability-detector/mappings/v8.10.0 + ``` + +4. When you are done. Exit the virtual environment. + + ```console + deactivate + ``` ### Output @@ -48,8 +84,8 @@ For our use case, the most important files are under `mappings//gen The original output is `template.json`, which is not compatible with OpenSearch by default. In order to make this template compatible with OpenSearch, the following changes are made: -- the `order` property is renamed to `priority`. -- the `mappings` and `settings` properties are nested under the `template` property. +- The `order` property is renamed to `priority`. +- The `mappings` and `settings` properties are nested under the `template` property. The script takes care of these changes automatically, generating the `opensearch-template.json` file as a result. @@ -62,7 +98,7 @@ curl -u admin:admin -k -X PUT "https://indexer:9200/_index_template/wazuh-vulner ``` Notes: -- PUT and POST are interchangable. +- PUT and POST are interchangeable. - The name of the index template does not matter. Any name can be used. - Adjust credentials and URL accordingly. @@ -95,7 +131,7 @@ The script will generate a JSON file with the events, and will also ask whether indexer. If the upload option is selected, the script will ask for the indexer URL and port, credentials, and index name. -The script uses log file. Check it out for debugging or additonal information. +The script uses log file. Check it out for debugging or additional information. #### References From 04089012ac2cd9b21e74af6f87002e76fe2aaac1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Tue, 28 Nov 2023 14:33:34 +0100 Subject: [PATCH 016/120] Add workflow for package generation (#65) * Ignore artifacts folder * Update build script - Updated to v2.11.0 version. - Skipped compilation of the plugins - The artifact nameis sent to a text file, to access it easily in GitHub Actions. * Add GH action to build min packages * Remove commented code * Remove unused code --- .github/workflows/build.yml | 62 ++++++++++++++++++++++++++++++ .github/workflows/gradle_build.yml | 19 --------- .gitignore | 2 + scripts/build.sh | 34 +++++++++------- 4 files changed, 85 insertions(+), 32 deletions(-) create mode 100644 .github/workflows/build.yml delete mode 100644 .github/workflows/gradle_build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000000000..a80b025134de5 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,62 @@ +name: Build slim packages + +# This workflow runs when any of the following occur: + # - Run manually +on: + workflow_dispatch: + + +# Used to run locally using https://github.com/nektos/act +env: + ACT: + VERSION: 2.11.0 + SNAPSHOT: false + PLATFORM: linux + BUILD: bash scripts/build.sh + + +jobs: + build: + runs-on: ubuntu-latest + # Permissions to upload the package + permissions: + packages: write + contents: read + strategy: + matrix: + # act is resource-heavy. Avoid running parallel builds with it: + # DISTRIBUTION: [ rpm ] + # ARCHITECTURE: [ x64 ] + DISTRIBUTION: [ tar, rpm, deb ] + ARCHITECTURE: [ x64, arm64 ] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + distribution: temurin + java-version: 11 + + - name: Setup Gradle + uses: gradle/gradle-build-action@v2.9.0 + + - name: Execute build script + run: | + $BUILD -v $VERSION -s $SNAPSHOT -p $PLATFORM -a ${{ matrix.ARCHITECTURE }} -d ${{ matrix.DISTRIBUTION }} + + # The package name is stored in the artifacts/artifact_name.txt file + - name: Read package name + id: package_name + run: | + echo $(ls -la) + echo "package_name=$(cat artifacts/artifact_name.txt)" >> $GITHUB_OUTPUT + echo "$(cat artifacts/artifact_name.txt)" + + - name: Upload artifact + uses: actions/upload-artifact@v3 + with: + name: ${{ steps.package_name.outputs.package_name }} + path: artifacts/dist/${{ steps.package_name.outputs.package_name }} + if-no-files-found: error + + # assemble: + # release: diff --git a/.github/workflows/gradle_build.yml b/.github/workflows/gradle_build.yml deleted file mode 100644 index d7916f852abf1..0000000000000 --- a/.github/workflows/gradle_build.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Run Gradle Build -on: - schedule: - - cron: '00 8 * * 5' -jobs: - gradle: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-java@v3 - with: - distribution: temurin - java-version: 11 - - - name: Setup Gradle - uses: gradle/gradle-build-action@v2.9.0 - - - name: Execute Gradle build - run: ./gradlew build diff --git a/.gitignore b/.gitignore index 82914fb4fc1e7..36172e2b263aa 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +# build files +artifacts/ # intellij files .idea/ diff --git a/scripts/build.sh b/scripts/build.sh index a0917776507be..48075ea6bf566 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -70,15 +70,19 @@ fi [ -z "$OUTPUT" ] && OUTPUT=artifacts +echo "Creating output directory $OUTPUT/maven/org/opensearch if it doesn't already exist" mkdir -p $OUTPUT/maven/org/opensearch # Build project and publish to maven local. +echo "Building and publishing OpenSearch project to Maven Local" ./gradlew publishToMavenLocal -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER # Publish to existing test repo, using this to stage release versions of the artifacts that can be released from the same build. +echo "Publishing OpenSearch to Test Repository" ./gradlew publishNebulaPublicationToTestRepository -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER # Copy maven publications to be promoted +echo "Copying Maven publications to $OUTPUT/maven/org" cp -r ./build/local-test-repo/org/opensearch "${OUTPUT}"/maven/org # Assemble distribution artifact @@ -103,6 +107,20 @@ case $PLATFORM-$DISTRIBUTION-$ARCHITECTURE in TARGET="$PLATFORM-arm64-$PACKAGE" SUFFIX="$PLATFORM-arm64" ;; + linux-deb-x64) + PACKAGE="deb" + EXT="deb" + TYPE="packages" + TARGET="deb" + SUFFIX="amd64" + ;; + linux-deb-arm64) + PACKAGE="deb" + EXT="deb" + TYPE="packages" + TARGET="arm64-deb" + SUFFIX="arm64" + ;; linux-rpm-x64) PACKAGE="rpm" EXT="rpm" @@ -142,20 +160,10 @@ echo "Building OpenSearch for $PLATFORM-$DISTRIBUTION-$ARCHITECTURE" ./gradlew :distribution:$TYPE:$TARGET:assemble -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER # Copy artifact to dist folder in bundle build output +echo "Copying artifact to ${OUTPUT}/dist" [[ "$SNAPSHOT" == "true" ]] && IDENTIFIER="-SNAPSHOT" ARTIFACT_BUILD_NAME=`ls distribution/$TYPE/$TARGET/build/distributions/ | grep "opensearch-min.*$SUFFIX.$EXT"` +# [WAZUH] Used by the GH workflow to upload the artifact +echo "$ARTIFACT_BUILD_NAME" > "$OUTPUT/artifact_name.txt" mkdir -p "${OUTPUT}/dist" cp distribution/$TYPE/$TARGET/build/distributions/$ARTIFACT_BUILD_NAME "${OUTPUT}"/dist/$ARTIFACT_BUILD_NAME - -echo "Building core plugins..." -mkdir -p "${OUTPUT}/core-plugins" -cd plugins -../gradlew assemble -Dbuild.snapshot="$SNAPSHOT" -Dbuild.version_qualifier=$QUALIFIER -cd .. -for plugin in plugins/*; do - PLUGIN_NAME=$(basename "$plugin") - if [ -d "$plugin" ] && [ "examples" != "$PLUGIN_NAME" ]; then - PLUGIN_ARTIFACT_BUILD_NAME=`ls "$plugin"/build/distributions/ | grep "$PLUGIN_NAME.*$IDENTIFIER.zip"` - cp "$plugin"/build/distributions/"$PLUGIN_ARTIFACT_BUILD_NAME" "${OUTPUT}"/core-plugins/"$PLUGIN_ARTIFACT_BUILD_NAME" - fi -done From 2e0701f238fa72af15e38b6ebaefcde80e216522 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Wed, 29 Nov 2023 17:18:11 +0100 Subject: [PATCH 017/120] Add docker compose environment (#66) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add very basic Docker environment That will do for now * Add latest changes * Update Docker environment - Remove build.md which was included by mistake. - Improve dev.sh script. - Update .gitignore to exclude artifacts folder. - Create .dockerignore file. - Replace get_version.sh script with inline command. - Reduce image size by using alpine as base image. --------- Signed-off-by: Álex Ruiz --- docker/README.md | 71 +++++++++++++++++++++++++++++++++ docker/dev.sh | 58 +++++++++++++++++++++++++++ docker/dev.yml | 26 ++++++++++++ docker/images/.dockerignore | 67 +++++++++++++++++++++++++++++++ docker/images/wi-dev.Dockerfile | 16 ++++++++ 5 files changed, 238 insertions(+) create mode 100644 docker/README.md create mode 100755 docker/dev.sh create mode 100644 docker/dev.yml create mode 100644 docker/images/.dockerignore create mode 100644 docker/images/wi-dev.Dockerfile diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 0000000000000..28a58329ab7d7 --- /dev/null +++ b/docker/README.md @@ -0,0 +1,71 @@ +# Frontend development environments + +Install [Docker Desktop][docker-desktop] as per its instructions, available for Windows, Mac +and Linux (Ubuntu, Debian & Fedora). +This ensures that the development experience between Linux, Mac and Windows is as +similar as possible. + +> IMPORTANT: be methodic during the installation of Docker Desktop, and proceed +> step by step as described in their documentation. Make sure that your system +> meets the system requirements before installing Docker Desktop, and read any +> post-installation note, specially on Linux: [Differences between +> Docker Desktop for Linux and Docker Engine][docker-variant]. + +## Pre-requisites + +1. Assign resources to [Docker Desktop][docker-desktop]. The requirements for the + environments are: + + - 8 GB of RAM (minimum) + - 4 cores + + The more resources the better ☺ + +2. Clone the [wazuh-indexer][wi-repo]. + +3. Set up user permissions + + The Docker volumes will be created by the internal Docker user, making them + read-only. To prevent this, a new group named `docker-desktop` and GUID 100999 + needs to be created, then added to your user and the source code folder: + + ```bash + sudo groupadd -g 100999 docker-desktop + sudo useradd -u 100999 -g 100999 -M docker-desktop + sudo chown -R docker-desktop:docker-desktop $WZD_HOME + sudo usermod -aG docker-desktop $USER + ``` + +## Understanding Docker contexts + +Before we begin starting Docker containers, we need to understand the +differences between Docker Engine and Docker Desktop, more precisely, that the +use different contexts. + +Carefully read these two sections of the Docker documentation: + +- [Differences between Docker Desktop for Linux and Docker Engine][docker-variant]. +- [Switch between Docker Desktop and Docker Engine][docker-context]. + +Docker Desktop will change to its context automatically at start, so be sure +that any existing Docker container using the default context is **stopped** +before starting Docker Desktop and any of the environments in this folder. + +## Starting up the environments + +Use the sh script to up the environment. + +Example: + +```bash +Usage: ./dev.sh {up|down|stop} [security] +``` + +Once the `wazuh-indexer` container is up, attach a shell to it and run `./gradlew run` +to start the application. + + +[docker-desktop]: https://docs.docker.com/get-docker +[docker-variant]: https://docs.docker.com/desktop/install/linux-install/#differences-between-docker-desktop-for-linux-and-docker-engine +[docker-context]: https://docs.docker.com/desktop/install/linux-install/#context +[wi-repo]: https://github.com/wazuh/wazuh-indexer diff --git a/docker/dev.sh b/docker/dev.sh new file mode 100755 index 0000000000000..991ed42de29e7 --- /dev/null +++ b/docker/dev.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +# Attaches the project as a volume to a JDK 17 container +# Requires Docker +# Script usage: bash ./dev.sh + +set -e + +# ==== +# Checks that the script is run from the intended location +# ==== +function check_project_root_folder () { + if [[ "$0" != "./dev.sh" && "$0" != "dev.sh" ]]; then + echo "Run the script from its location" + usage + exit 1 + fi + # Change working directory to the root of the repository + cd .. +} + +# ==== +# Displays usage +# ==== +function usage() { + echo "Usage: ./dev.sh {up|down|stop}" +} + +# ==== +# Main function +# ==== +function main() { + check_project_root_folder "$@" + compose_file=docker/dev.yml + compose_cmd="docker compose -f $compose_file" + REPO_PATH=$(pwd) + VERSION=$(cat VERSION) + export REPO_PATH + export VERSION + + case $1 in + up) + $compose_cmd up -d + ;; + down) + $compose_cmd down + ;; + stop) + $compose_cmd stop + ;; + *) + usage + exit 1 + ;; + esac +} + +main "$@" \ No newline at end of file diff --git a/docker/dev.yml b/docker/dev.yml new file mode 100644 index 0000000000000..7e3b6202c2ee9 --- /dev/null +++ b/docker/dev.yml @@ -0,0 +1,26 @@ +version: "3.9" + +services: + + wazuh-indexer: + image: wazuh-indexer-dev:${VERSION} + container_name: wazuh-indexer-dev-${VERSION} + build: + context: ./.. + dockerfile: ${REPO_PATH}/docker/images/wi-dev.Dockerfile + ports: + # OpenSearch REST API + - 9200:9200 + # Cross-cluster search + # - 9250:9250 + # Node communication and transport + # - 9300:9300 + # Performance Analyzer + # - 9600:9600 + expose: + - 9200 + volumes: + - ${REPO_PATH}:/home/wazuh-indexer/app + entrypoint: ['tail', '-f', '/dev/null'] + user: "1000:1000" + working_dir: /home/wazuh-indexer/app diff --git a/docker/images/.dockerignore b/docker/images/.dockerignore new file mode 100644 index 0000000000000..058a889d5f239 --- /dev/null +++ b/docker/images/.dockerignore @@ -0,0 +1,67 @@ +artifacts/ + +# intellij files +.idea/ +*.iml +*.ipr +*.iws +build-idea/ +out/ + +# include shared intellij config +!.idea/inspectionProfiles/Project_Default.xml +!.idea/runConfigurations/Debug_OpenSearch.xml +!.idea/vcs.xml + +# These files are generated in the main tree by annotation processors +benchmarks/src/main/generated/* +benchmarks/bin/* +benchmarks/build-eclipse-default/* +server/bin/* +server/build-eclipse-default/* +test/framework/build-eclipse-default/* + +# eclipse files +.project +.classpath +.settings +build-eclipse/ + +# netbeans files +nb-configuration.xml +nbactions.xml + +# gradle stuff +.gradle/ +build/ + +# vscode stuff +.vscode/ + +# testing stuff +**/.local* +.vagrant/ +/logs/ + +# osx stuff +.DS_Store + +# default folders in which the create_bwc_index.py expects to find old es versions in +/backwards +/dev-tools/backwards + +# needed in case docs build is run...maybe we can configure doc build to generate files under build? +html_docs + +# random old stuff that we should look at the necessity of... +/tmp/ +eclipse-build + +# projects using testfixtures +testfixtures_shared/ + +# These are generated from .ci/jobs.t +.ci/jobs/ + +# build files generated +doc-tools/missing-doclet/bin/ \ No newline at end of file diff --git a/docker/images/wi-dev.Dockerfile b/docker/images/wi-dev.Dockerfile new file mode 100644 index 0000000000000..7e7f16fbba42c --- /dev/null +++ b/docker/images/wi-dev.Dockerfile @@ -0,0 +1,16 @@ +FROM gradle:jdk17-alpine AS builder +USER gradle +WORKDIR /home/wazuh-indexer/app +COPY --chown=gradle:gradle . /home/wazuh-indexer/app +RUN gradle clean + + +FROM eclipse-temurin:17-jdk-alpine +RUN addgroup -g 1000 wazuh-indexer && \ + adduser -u 1000 -G wazuh-indexer -D -h /home/wazuh-indexer wazuh-indexer && \ + chmod 0775 /home/wazuh-indexer && \ + chown -R 1000:0 /home/wazuh-indexer +USER wazuh-indexer +COPY --from=builder --chown=1000:0 /home/wazuh-indexer/app /home/wazuh-indexer/app +WORKDIR /home/wazuh-indexer/app +EXPOSE 9200 9300 From 80561b18cde48a8dff7be041b766d0cc164da740 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Tue, 5 Dec 2023 18:57:16 +0100 Subject: [PATCH 018/120] Rename packages to wazuh-indexer (#69) * Rename packages to wazuh-indexer * Include VERSION file into packages * Apply Wazuh version to packages names * Improve build.sh script Apply suggestions from ShellCheck --- VERSION | 2 +- ...nternalDistributionArchiveSetupPlugin.java | 2 +- .../InternalDistributionBwcSetupPlugin.java | 8 +- distribution/archives/build.gradle | 6 +- distribution/build.gradle | 39 +++++---- distribution/docker/build.gradle | 2 +- distribution/docker/docker-compose.yml | 4 +- distribution/docker/docker-test-entrypoint.sh | 4 +- distribution/docker/src/docker/Dockerfile | 24 +++--- .../src/docker/bin/docker-entrypoint.sh | 8 +- distribution/packages/build.gradle | 82 ++++++++++--------- .../common/env/{opensearch => wazuh-indexer} | 24 +++--- .../packages/src/common/scripts/postinst | 52 ++++++------ .../packages/src/common/scripts/postrm | 30 +++---- .../packages/src/common/scripts/posttrans | 8 +- .../packages/src/common/scripts/preinst | 38 ++++----- .../packages/src/common/scripts/prerm | 22 ++--- .../src/common/systemd/opensearch.conf | 1 - .../{opensearch.conf => wazuh-indexer.conf} | 0 .../src/common/systemd/systemd-entrypoint | 4 +- .../src/common/systemd/wazuh-indexer.conf | 1 + ...ensearch.service => wazuh-indexer.service} | 22 ++--- .../deb/init.d/{opensearch => wazuh-indexer} | 28 +++---- .../packages/src/deb/lintian/opensearch | 46 ----------- .../packages/src/deb/lintian/wazuh-indexer | 46 +++++++++++ .../rpm/init.d/{opensearch => wazuh-indexer} | 24 +++--- scripts/build.sh | 16 ++-- settings.gradle | 2 +- 28 files changed, 280 insertions(+), 265 deletions(-) rename distribution/packages/src/common/env/{opensearch => wazuh-indexer} (68%) delete mode 100644 distribution/packages/src/common/systemd/opensearch.conf rename distribution/packages/src/common/systemd/sysctl/{opensearch.conf => wazuh-indexer.conf} (100%) create mode 100644 distribution/packages/src/common/systemd/wazuh-indexer.conf rename distribution/packages/src/common/systemd/{opensearch.service => wazuh-indexer.service} (73%) rename distribution/packages/src/deb/init.d/{opensearch => wazuh-indexer} (79%) delete mode 100644 distribution/packages/src/deb/lintian/opensearch create mode 100644 distribution/packages/src/deb/lintian/wazuh-indexer rename distribution/packages/src/rpm/init.d/{opensearch => wazuh-indexer} (80%) diff --git a/VERSION b/VERSION index 6ed7776bf3219..b617d997d7701 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -4.9.0 +4.9.0 \ No newline at end of file diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java index a7f720855951a..e6cb325537a30 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java @@ -156,7 +156,7 @@ private void configureGeneralTaskDefaults(Project project) { project.getTasks().withType(AbstractArchiveTask.class).configureEach(t -> { String subdir = archiveTaskToSubprojectName(t.getName()); t.getDestinationDirectory().set(project.file(subdir + "/build/distributions")); - t.getArchiveBaseName().set("opensearch-min"); + t.getArchiveBaseName().set("wazuh-indexer-min"); }); } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index 0502280cb69ad..e1dc53e9f9fc4 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -123,7 +123,7 @@ private void registerBwcArtifacts(Project bwcProject, DistributionProject distri String expandedDistConfiguration = "expanded-" + projectName; bwcProject.getConfigurations().create(expandedDistConfiguration); bwcProject.getArtifacts().add(expandedDistConfiguration, distributionProject.getExpandedDistDirectory(), artifact -> { - artifact.setName("opensearch"); + artifact.setName("wazuh-indexer"); artifact.builtBy(buildBwcTask); artifact.setType("directory"); }); @@ -132,7 +132,7 @@ private void registerBwcArtifacts(Project bwcProject, DistributionProject distri private void registerDistributionArchiveArtifact(Project bwcProject, DistributionProject distributionProject, String buildBwcTask) { String artifactFileName = distributionProject.getDistFile().getName(); - String artifactName = "opensearch"; + String artifactName = "wazuh-indexer"; String suffix = artifactFileName.endsWith("tar.gz") ? "tar.gz" : artifactFileName.substring(artifactFileName.length() - 3); int archIndex = artifactFileName.indexOf("x64"); @@ -258,12 +258,12 @@ private static class DistributionProject { if (version.onOrAfter("1.1.0")) { this.distFile = new File( checkoutDir, - baseDir + "/" + name + "/build/distributions/opensearch-min-" + version + "-SNAPSHOT" + classifier + "." + extension + baseDir + "/" + name + "/build/distributions/wazuh-indexer-min-" + version + "-SNAPSHOT" + classifier + "." + extension ); } else { this.distFile = new File( checkoutDir, - baseDir + "/" + name + "/build/distributions/opensearch-" + version + "-SNAPSHOT" + classifier + "." + extension + baseDir + "/" + name + "/build/distributions/wazuh-indexer-" + version + "-SNAPSHOT" + classifier + "." + extension ); } // we only ported this down to the 7.x branch. diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index 792b1ab57ddbc..034ac1528a6d9 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -33,8 +33,9 @@ import org.opensearch.gradle.JavaPackageType apply plugin: 'opensearch.internal-distribution-archive-setup' CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String platform, String architecture, JavaPackageType java) { + version = rootProject.file('VERSION').getText() return copySpec { - into("opensearch-${version}") { + into("wazuh-indexer-${version}") { into('lib') { with libFiles() } @@ -81,6 +82,9 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla pluginsDir.getParent() } } + into('') { + with versionFile() + } from(rootProject.projectDir) { include 'README.md' } diff --git a/distribution/build.gradle b/distribution/build.gradle index a323dd15ed9cf..835538ba3733c 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -357,6 +357,13 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } } + versionFile = { + copySpec { + from(rootProject.file('VERSION')) + fileMode 0644 + } + } + modulesFiles = { platform -> copySpec { eachFile { @@ -523,18 +530,18 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { *
path.conf
*
The default directory from which to load configuration. This is used in * the packaging scripts, but in that context it is always - * /etc/opensearch. Its also used in bin/opensearch-plugin, where it is - * /etc/opensearch for the os packages but $OPENSEARCH_HOME/config otherwise.
+ * /etc/wazuh-indexer. Its also used in bin/opensearch-plugin, where it is + * /etc/wazuh-indexer for the os packages but $OPENSEARCH_HOME/config otherwise. *
path.env
*
The env file sourced before bin/opensearch to set environment - * variables. Think /etc/defaults/opensearch.
+ * variables. Think /etc/defaults/wazuh-indexer. *
heap.min and heap.max
*
Default min and max heap
*
scripts.footer
*
Footer appended to control scripts embedded in the distribution that is * (almost) entirely there for cosmetic reasons.
*
stopping.timeout
- *
RPM's init script needs to wait for opensearch to stop before + *
RPM's init script needs to wait for wazuh-indexer to stop before * returning from stop and it needs a maximum time to wait. This is it. One * day. DEB retries forever.
* @@ -542,8 +549,8 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { subprojects { ext.expansionsForDistribution = { distributionType, jdk -> final String defaultHeapSize = "1g" - final String packagingPathData = "path.data: /var/lib/opensearch" - final String pathLogs = "/var/log/opensearch" + final String packagingPathData = "path.data: /var/lib/wazuh-indexer" + final String pathLogs = "/var/log/wazuh-indexer" final String packagingPathLogs = "path.logs: ${pathLogs}" final String packagingLoggc = "${pathLogs}/gc.log" @@ -558,8 +565,8 @@ subprojects { 'project.version': version, 'path.conf': [ - 'deb': '/etc/opensearch', - 'rpm': '/etc/opensearch', + 'deb': '/etc/wazuh-indexer', + 'rpm': '/etc/wazuh-indexer', 'def': '"$OPENSEARCH_HOME"/config' ], 'path.data': [ @@ -568,15 +575,15 @@ subprojects { 'def': '#path.data: /path/to/data' ], 'path.env': [ - 'deb': '/etc/default/opensearch', - 'rpm': '/etc/sysconfig/opensearch', + 'deb': '/etc/default/wazuh-indexer', + 'rpm': '/etc/sysconfig/wazuh-indexer', /* There isn't one of these files for tar or zip but its important to make an empty string here so the script can properly skip it. */ 'def': 'if [ -z "$OPENSEARCH_PATH_CONF" ]; then OPENSEARCH_PATH_CONF="$OPENSEARCH_HOME"/config; done', ], 'source.path.env': [ - 'deb': 'source /etc/default/opensearch', - 'rpm': 'source /etc/sysconfig/opensearch', + 'deb': 'source /etc/default/wazuh-indexer', + 'rpm': 'source /etc/sysconfig/wazuh-indexer', 'def': 'if [ -z "$OPENSEARCH_PATH_CONF" ]; then OPENSEARCH_PATH_CONF="$OPENSEARCH_HOME"/config; fi', ], 'path.logs': [ @@ -594,14 +601,14 @@ subprojects { 'heap.max': defaultHeapSize, 'heap.dump.path': [ - 'deb': "-XX:HeapDumpPath=/var/lib/opensearch", - 'rpm': "-XX:HeapDumpPath=/var/lib/opensearch", + 'deb': "-XX:HeapDumpPath=/var/lib/wazuh-indexer", + 'rpm': "-XX:HeapDumpPath=/var/lib/wazuh-indexer", 'def': "-XX:HeapDumpPath=data" ], 'error.file': [ - 'deb': "-XX:ErrorFile=/var/log/opensearch/hs_err_pid%p.log", - 'rpm': "-XX:ErrorFile=/var/log/opensearch/hs_err_pid%p.log", + 'deb': "-XX:ErrorFile=/usr/share/wazuh-indexer/hs_err_pid%p.log", + 'rpm': "-XX:ErrorFile=/usr/share/wazuh-indexer/hs_err_pid%p.log", 'def': "-XX:ErrorFile=logs/hs_err_pid%p.log" ], diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index ad8678c608b54..f0641aa78d617 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -60,7 +60,7 @@ ext.expansions = { Architecture architecture, DockerBase base, boolean local -> classifier = "linux-\$(arch)" } - final String opensearch = "opensearch-min-${VersionProperties.getOpenSearch()}-${classifier}.tar.gz" + final String opensearch = "wazuh-indexer-min-${VersionProperties.getOpenSearch()}-${classifier}.tar.gz" /* Both the following Dockerfile commands put the resulting artifact at * the same location, regardless of classifier, so that the commands that diff --git a/distribution/docker/docker-compose.yml b/distribution/docker/docker-compose.yml index 5ed2b159ffe2b..bb4eb53cd49d2 100644 --- a/distribution/docker/docker-compose.yml +++ b/distribution/docker/docker-compose.yml @@ -18,7 +18,7 @@ services: - node.store.allow_mmap=false volumes: - ./build/repo:/tmp/opensearch-repo - - ./build/logs/1:/usr/share/opensearch/logs + - ./build/logs/1:/usr/share/wazuh-indexer/logs ports: - "9200" ulimits: @@ -42,7 +42,7 @@ services: - node.store.allow_mmap=false volumes: - ./build/repo:/tmp/opensearch-repo - - ./build/logs/2:/usr/share/opensearch/logs + - ./build/logs/2:/usr/share/wazuh-indexer/logs ports: - "9200" ulimits: diff --git a/distribution/docker/docker-test-entrypoint.sh b/distribution/docker/docker-test-entrypoint.sh index 1cfc62f6b02b0..6ff8306868a47 100755 --- a/distribution/docker/docker-test-entrypoint.sh +++ b/distribution/docker/docker-test-entrypoint.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash set -e -o pipefail -cd /usr/share/opensearch/bin/ +cd /usr/share/wazuh-indexer/bin/ -/usr/local/bin/docker-entrypoint.sh | tee > /usr/share/opensearch/logs/console.log +/usr/local/bin/docker-entrypoint.sh | tee > /usr/share/wazuh-indexer/logs/console.log diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index c980217b0b8dc..268af870ebae9 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -40,13 +40,13 @@ RUN set -eux ; \\ mv \${tini_bin} /tini ; \\ chmod +x /tini -RUN mkdir /usr/share/opensearch -WORKDIR /usr/share/opensearch +RUN mkdir /usr/share/wazuh-indexer +WORKDIR /usr/share/wazuh-indexer ${source_opensearch} RUN tar zxf /opt/opensearch.tar.gz --strip-components=1 -RUN sed -i -e 's/OPENSEARCH_DISTRIBUTION_TYPE=tar/OPENSEARCH_DISTRIBUTION_TYPE=docker/' /usr/share/opensearch/bin/opensearch-env +RUN sed -i -e 's/OPENSEARCH_DISTRIBUTION_TYPE=tar/OPENSEARCH_DISTRIBUTION_TYPE=docker/' /usr/share/wazuh-indexer/bin/opensearch-env RUN mkdir -p config config/jvm.options.d data logs RUN chmod 0775 config config/jvm.options.d data logs COPY config/opensearch.yml config/log4j2.properties config/ @@ -74,27 +74,27 @@ RUN sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-Linux-* && \\ done; \\ (exit \$exit_code) -RUN groupadd -g 1000 opensearch && \\ - adduser -u 1000 -g 1000 -G 0 -d /usr/share/opensearch opensearch && \\ - chmod 0775 /usr/share/opensearch && \\ - chown -R 1000:0 /usr/share/opensearch +RUN groupadd -g 1000 wazuh-indexer && \\ + adduser -u 1000 -g 1000 -G 0 -d /usr/share/wazuh-indexer wazuh-indexer && \\ + chmod 0775 /usr/share/wazuh-indexer && \\ + chown -R 1000:0 /usr/share/wazuh-indexer -WORKDIR /usr/share/opensearch -COPY --from=builder --chown=1000:0 /usr/share/opensearch /usr/share/opensearch +WORKDIR /usr/share/wazuh-indexer +COPY --from=builder --chown=1000:0 /usr/share/wazuh-indexer /usr/share/wazuh-indexer COPY --from=builder --chown=0:0 /tini /tini # Replace OpenJDK's built-in CA certificate keystore with the one from the OS # vendor. The latter is superior in several ways. # REF: https://github.com/elastic/elasticsearch-docker/issues/171 -RUN ln -sf /etc/pki/ca-trust/extracted/java/cacerts /usr/share/opensearch/jdk/lib/security/cacerts +RUN ln -sf /etc/pki/ca-trust/extracted/java/cacerts /usr/share/wazuh-indexer/jdk/lib/security/cacerts -ENV PATH /usr/share/opensearch/bin:\$PATH +ENV PATH /usr/share/wazuh-indexer/bin:\$PATH COPY bin/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh # The JDK's directories' permissions don't allow `java` to be executed under a different # group to the default. Fix this. -RUN find /usr/share/opensearch/jdk -type d -exec chmod 0755 '{}' \\; && \\ +RUN find /usr/share/wazuh-indexer/jdk -type d -exec chmod 0755 '{}' \\; && \\ chmod g=u /etc/passwd && \\ chmod 0775 /usr/local/bin/docker-entrypoint.sh diff --git a/distribution/docker/src/docker/bin/docker-entrypoint.sh b/distribution/docker/src/docker/bin/docker-entrypoint.sh index 33c68afce0bfc..e24c5cb6a7436 100644 --- a/distribution/docker/src/docker/bin/docker-entrypoint.sh +++ b/distribution/docker/src/docker/bin/docker-entrypoint.sh @@ -46,7 +46,7 @@ fi # This is also sourced in opensearch-env, and is only needed here # as well because we use ELASTIC_PASSWORD below. Sourcing this script # is idempotent. -source /usr/share/opensearch/bin/opensearch-env-from-file +source /usr/share/wazuh-indexer/bin/opensearch-env-from-file if [[ -f bin/opensearch-users ]]; then # Check for the ELASTIC_PASSWORD environment variable to set the @@ -56,7 +56,7 @@ if [[ -f bin/opensearch-users ]]; then # enabled, but we have no way of knowing which node we are yet. We'll just # honor the variable if it's present. if [[ -n "$ELASTIC_PASSWORD" ]]; then - [[ -f /usr/share/opensearch/config/opensearch.keystore ]] || (run_as_other_user_if_needed opensearch-keystore create) + [[ -f /usr/share/wazuh-indexer/config/opensearch.keystore ]] || (run_as_other_user_if_needed opensearch-keystore create) if ! (run_as_other_user_if_needed opensearch-keystore has-passwd --silent) ; then # keystore is unencrypted if ! (run_as_other_user_if_needed opensearch-keystore list | grep -q '^bootstrap.password$'); then @@ -76,8 +76,8 @@ fi if [[ "$(id -u)" == "0" ]]; then # If requested and running as root, mutate the ownership of bind-mounts if [[ -n "$TAKE_FILE_OWNERSHIP" ]]; then - chown -R 1000:0 /usr/share/opensearch/{data,logs} + chown -R 1000:0 /usr/share/wazuh-indexer/{data,logs} fi fi -run_as_other_user_if_needed /usr/share/opensearch/bin/opensearch <<<"$KEYSTORE_PASSWORD" +run_as_other_user_if_needed /usr/share/wazuh-indexer/bin/opensearch <<<"$KEYSTORE_PASSWORD" diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 659b25129b23c..9c99a41f34e56 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -58,8 +58,8 @@ import java.util.regex.Pattern * The following commands are useful when it comes to check the user/group * and files permissions set within the RPM and DEB packages: * - * rpm -qlp --dump path/to/opensearch.rpm - * dpkg -c path/to/opensearch.deb + * rpm -qlp --dump path/to/wazuh-indexer.rpm + * dpkg -c path/to/wazuh-indexer.deb */ plugins { @@ -79,20 +79,20 @@ void addProcessFilesTask(String type, boolean jdk) { MavenFilteringHack.filter(it, expansionsForDistribution(type, jdk)) } - into('etc/opensearch') { + into('etc/wazuh-indexer') { with configFiles(type, jdk) } MavenFilteringHack.filter(it, expansionsForDistribution(type, jdk)) doLast { // create empty dirs, we set the permissions when configuring the packages - mkdir "${packagingFiles}/var/log/opensearch" - mkdir "${packagingFiles}/var/lib/opensearch" - mkdir "${packagingFiles}/usr/share/opensearch/plugins" + mkdir "${packagingFiles}/var/log/wazuh-indexer" + mkdir "${packagingFiles}/var/lib/wazuh-indexer" + mkdir "${packagingFiles}/usr/share/wazuh-indexer/plugins" - // bare empty dir for /etc/opensearch and /etc/opensearch/jvm.options.d - mkdir "${packagingFiles}/opensearch" - mkdir "${packagingFiles}/opensearch/jvm.options.d" + // bare empty dir for /etc/wazuh-indexer and /etc/wazuh-indexer/jvm.options.d + mkdir "${packagingFiles}/wazuh-indexer" + mkdir "${packagingFiles}/wazuh-indexer/jvm.options.d" } } } @@ -106,12 +106,13 @@ addProcessFilesTask('rpm', false) // since we have different templated files that need to be consumed, but the structure // is the same Closure commonPackageConfig(String type, boolean jdk, String architecture) { + project.version = rootProject.file('VERSION').getText() return { onlyIf { OS.current().equals(OS.WINDOWS) == false } dependsOn "process'${jdk ? '' : 'NoJdk'}${type.capitalize()}Files" - packageName "opensearch" + packageName "wazuh-indexer" if (type == 'deb') { if (architecture == 'x64') { arch('amd64') @@ -154,7 +155,7 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) { // top level "into" directive is not inherited from ospackage for some reason, so we must // specify it again explicitly for copying common files - into('/usr/share/opensearch') { + into('/usr/share/wazuh-indexer') { into('bin') { with binFiles(type, jdk) } @@ -175,6 +176,9 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) { with jdkFiles(project, 'linux', architecture) } } + into ('') { + with versionFile() + } // we need to specify every intermediate directory in these paths so the package managers know they are explicitly // intended to manage them; otherwise they may be left behind on uninstallation. duplicate calls of the same // directory are fine @@ -203,7 +207,7 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) { } } else { assert type == 'rpm' - into('/usr/share/opensearch') { + into('/usr/share/wazuh-indexer') { from(rootProject.file('licenses')) { include 'APACHE-LICENSE-2.0.txt' rename { 'LICENSE.txt' } @@ -215,29 +219,29 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) { } // ========= config files ========= - configurationFile '/etc/opensearch/opensearch.yml' - configurationFile '/etc/opensearch/jvm.options' - configurationFile '/etc/opensearch/log4j2.properties' + configurationFile '/etc/wazuh-indexer/opensearch.yml' + configurationFile '/etc/wazuh-indexer/jvm.options' + configurationFile '/etc/wazuh-indexer/log4j2.properties' from("${packagingFiles}") { dirPermissions { unix 0750 } into('/etc') - permissionGroup 'opensearch' + permissionGroup 'wazuh-indexer' includeEmptyDirs true createDirectoryEntry true - include("opensearch") // empty dir, just to add directory entry - include("opensearch/jvm.options.d") // empty dir, just to add directory entry + include("wazuh-indexer") // empty dir, just to add directory entry + include("wazuh-indexer/jvm.options.d") // empty dir, just to add directory entry } - from("${packagingFiles}/etc/opensearch") { - into('/etc/opensearch') + from("${packagingFiles}/etc/wazuh-indexer") { + into('/etc/wazuh-indexer') dirPermissions { unix 0750 } filePermissions{ unix 0660 } - permissionGroup 'opensearch' + permissionGroup 'wazuh-indexer' includeEmptyDirs true createDirectoryEntry true fileType CONFIG | NOREPLACE @@ -246,35 +250,35 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) { configurationFile envFile into(new File(envFile).getParent()) { fileType CONFIG | NOREPLACE - permissionGroup 'opensearch' + permissionGroup 'wazuh-indexer' filePermissions { unix 0660 } - from "${packagingFiles}/env/opensearch" + from "${packagingFiles}/env/wazuh-indexer" } // ========= systemd ========= into('/usr/lib/tmpfiles.d') { - from "${packagingFiles}/systemd/opensearch.conf" + from "${packagingFiles}/systemd/wazuh-indexer.conf" filePermissions { unix 0644 } } into('/usr/lib/systemd/system') { fileType CONFIG | NOREPLACE - from "${packagingFiles}/systemd/opensearch.service" + from "${packagingFiles}/systemd/wazuh-indexer.service" filePermissions { unix 0644 } } into('/usr/lib/sysctl.d') { fileType CONFIG | NOREPLACE - from "${packagingFiles}/systemd/sysctl/opensearch.conf" + from "${packagingFiles}/systemd/sysctl/wazuh-indexer.conf" filePermissions { unix 0644 } } - into('/usr/share/opensearch/bin') { + into('/usr/share/wazuh-indexer/bin') { from "${packagingFiles}/systemd/systemd-entrypoint" filePermissions { unix 0755 @@ -282,13 +286,13 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) { } // ========= sysV init ========= - configurationFile '/etc/init.d/opensearch' + configurationFile '/etc/init.d/wazuh-indexer' into('/etc/init.d') { filePermissions { unix 0750 } fileType CONFIG | NOREPLACE - from "${packagingFiles}/init.d/opensearch" + from "${packagingFiles}/init.d/wazuh-indexer" } // ========= empty dirs ========= @@ -307,11 +311,11 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) { } } } - copyEmptyDir('/var/log/opensearch', 'opensearch', 'opensearch', 0750) - copyEmptyDir('/var/lib/opensearch', 'opensearch', 'opensearch', 0750) - copyEmptyDir('/usr/share/opensearch/plugins', 'root', 'root', 0755) + copyEmptyDir('/var/log/wazuh-indexer', 'wazuh-indexer', 'wazuh-indexer', 0750) + copyEmptyDir('/var/lib/wazuh-indexer', 'wazuh-indexer', 'wazuh-indexer', 0750) + copyEmptyDir('/usr/share/wazuh-indexer/plugins', 'root', 'root', 0755) - into '/usr/share/opensearch' + into '/usr/share/wazuh-indexer' with noticeFile(jdk) } } @@ -345,7 +349,7 @@ ospackage { user 'root' permissionGroup 'root' - into '/usr/share/opensearch' + into '/usr/share/wazuh-indexer' } Closure commonDebConfig(boolean jdk, String architecture) { @@ -366,7 +370,7 @@ Closure commonDebConfig(boolean jdk, String architecture) { requires 'adduser' into('/usr/share/lintian/overrides') { - from('src/deb/lintian/opensearch') + from('src/deb/lintian/wazuh-indexer') filePermissions { unix 0644 } @@ -410,7 +414,7 @@ Closure commonRpmConfig(boolean jdk, String architecture) { vendor 'OpenSearch' // TODO ospackage doesn't support icon but we used to have one - // without this the rpm will have parent dirs of any files we copy in, eg /etc/opensearch + // without this the rpm will have parent dirs of any files we copy in, eg /etc/wazuh-indexer addParentDirs false } } @@ -505,7 +509,7 @@ subprojects { Path copyrightPath String expectedLicense String licenseFilename - copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/opensearch/copyright") + copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/wazuh-indexer/copyright") expectedLicense = "ASL-2.0" licenseFilename = "APACHE-LICENSE-2.0.txt" final List header = Arrays.asList("Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/", @@ -524,7 +528,7 @@ subprojects { doLast { String licenseFilename = "APACHE-LICENSE-2.0.txt" final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) - final Path licensePath = packageExtractionDir.toPath().resolve("usr/share/opensearch/LICENSE.txt") + final Path licensePath = packageExtractionDir.toPath().resolve("usr/share/wazuh-indexer/LICENSE.txt") assertLinesInFile(licensePath, licenseLines) } } @@ -537,7 +541,7 @@ subprojects { } doLast { final List noticeLines = Arrays.asList("OpenSearch (https://opensearch.org/)", "Copyright OpenSearch Contributors") - final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/opensearch/NOTICE.txt") + final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/wazuh-indexer/NOTICE.txt") assertLinesInFile(noticePath, noticeLines) } } diff --git a/distribution/packages/src/common/env/opensearch b/distribution/packages/src/common/env/wazuh-indexer similarity index 68% rename from distribution/packages/src/common/env/opensearch rename to distribution/packages/src/common/env/wazuh-indexer index 198bcfde90c4c..1b6f33db8a34b 100644 --- a/distribution/packages/src/common/env/opensearch +++ b/distribution/packages/src/common/env/wazuh-indexer @@ -1,19 +1,19 @@ ################################ -# OpenSearch +# wazuh-indexer ################################ -# OpenSearch home directory -#OPENSEARCH_HOME=/usr/share/opensearch +# wazuh-indexer home directory +#OPENSEARCH_HOME=/usr/share/wazuh-indexer -# OpenSearch Java path +# wazuh-indexer Java path #OPENSEARCH_JAVA_HOME= -# OpenSearch configuration directory +# wazuh-indexer configuration directory # Note: this setting will be shared with command-line tools OPENSEARCH_PATH_CONF=${path.conf} -# OpenSearch PID directory -#PID_DIR=/var/run/opensearch +# wazuh-indexer PID directory +#PID_DIR=/var/run/wazuh-indexer # Additional Java OPTS #OPENSEARCH_JAVA_OPTS= @@ -22,12 +22,12 @@ OPENSEARCH_PATH_CONF=${path.conf} #RESTART_ON_UPGRADE=true ################################ -# OpenSearch service +# wazuh-indexer service ################################ # SysV init.d # -# The number of seconds to wait before checking if OpenSearch started successfully as a daemon process +# The number of seconds to wait before checking if wazuh-indexer started successfully as a daemon process OPENSEARCH_STARTUP_SLEEP_TIME=5 ################################ @@ -36,17 +36,17 @@ OPENSEARCH_STARTUP_SLEEP_TIME=5 # Specifies the maximum file descriptor number that can be opened by this process # When using Systemd, this setting is ignored and the LimitNOFILE defined in -# /usr/lib/systemd/system/opensearch.service takes precedence +# /usr/lib/systemd/system/wazuh-indexer.service takes precedence #MAX_OPEN_FILES=65535 # The maximum number of bytes of memory that may be locked into RAM # Set to "unlimited" if you use the 'bootstrap.memory_lock: true' option # in opensearch.yml. # When using systemd, LimitMEMLOCK must be set in a unit file such as -# /etc/systemd/system/opensearch.service.d/override.conf. +# /etc/systemd/system/wazuh-indexer.service.d/override.conf. #MAX_LOCKED_MEMORY=unlimited # Maximum number of VMA (Virtual Memory Areas) a process can own # When using Systemd, this setting is ignored and the 'vm.max_map_count' -# property is set at boot time in /usr/lib/sysctl.d/opensearch.conf +# property is set at boot time in /usr/lib/sysctl.d/wazuh-indexer.conf #MAX_MAP_COUNT=262144 diff --git a/distribution/packages/src/common/scripts/postinst b/distribution/packages/src/common/scripts/postinst index 308e86b850247..5c1e942f87e18 100644 --- a/distribution/packages/src/common/scripts/postinst +++ b/distribution/packages/src/common/scripts/postinst @@ -50,52 +50,52 @@ case "$1" in ;; esac -# to pick up /usr/lib/sysctl.d/opensearch.conf +# to pick up /usr/lib/sysctl.d/wazuh-indexer.conf if command -v systemctl > /dev/null; then systemctl restart systemd-sysctl.service || true fi if [ "x$IS_UPGRADE" != "xtrue" ]; then if command -v systemctl >/dev/null; then - echo "### NOT starting on installation, please execute the following statements to configure opensearch service to start automatically using systemd" + echo "### NOT starting on installation, please execute the following statements to configure wazuh-indexer service to start automatically using systemd" echo " sudo systemctl daemon-reload" - echo " sudo systemctl enable opensearch.service" - echo "### You can start opensearch service by executing" - echo " sudo systemctl start opensearch.service" + echo " sudo systemctl enable wazuh-indexer.service" + echo "### You can start wazuh-indexer service by executing" + echo " sudo systemctl start wazuh-indexer.service" elif command -v chkconfig >/dev/null; then - echo "### NOT starting on installation, please execute the following statements to configure opensearch service to start automatically using chkconfig" - echo " sudo chkconfig --add opensearch" - echo "### You can start opensearch service by executing" - echo " sudo service opensearch start" + echo "### NOT starting on installation, please execute the following statements to configure wazuh-indexer service to start automatically using chkconfig" + echo " sudo chkconfig --add wazuh-indexer" + echo "### You can start wazuh-indexer service by executing" + echo " sudo service wazuh-indexer start" elif command -v update-rc.d >/dev/null; then - echo "### NOT starting on installation, please execute the following statements to configure opensearch service to start automatically using chkconfig" - echo " sudo update-rc.d opensearch defaults 95 10" - echo "### You can start opensearch service by executing" - echo " sudo /etc/init.d/opensearch start" + echo "### NOT starting on installation, please execute the following statements to configure wazuh-indexer service to start automatically using chkconfig" + echo " sudo update-rc.d wazuh-indexer defaults 95 10" + echo "### You can start wazuh-indexer service by executing" + echo " sudo /etc/init.d/wazuh-indexer start" fi elif [ "$RESTART_ON_UPGRADE" = "true" ]; then - echo -n "Restarting opensearch service..." + echo -n "Restarting wazuh-indexer service..." if command -v systemctl >/dev/null; then systemctl daemon-reload - systemctl restart opensearch.service || true + systemctl restart wazuh-indexer.service || true - elif [ -x /etc/init.d/opensearch ]; then + elif [ -x /etc/init.d/wazuh-indexer ]; then if command -v invoke-rc.d >/dev/null; then - invoke-rc.d opensearch stop || true - invoke-rc.d opensearch start || true + invoke-rc.d wazuh-indexer stop || true + invoke-rc.d wazuh-indexer start || true else - /etc/init.d/opensearch restart || true + /etc/init.d/wazuh-indexer restart || true fi # older suse linux distributions do not ship with systemd # but do not have an /etc/init.d/ directory - # this tries to start the opensearch service on these + # this tries to start the wazuh-indexer service on these # as well without failing this script - elif [ -x /etc/rc.d/init.d/opensearch ] ; then - /etc/rc.d/init.d/opensearch restart || true + elif [ -x /etc/rc.d/init.d/wazuh-indexer ] ; then + /etc/rc.d/init.d/wazuh-indexer restart || true fi echo " OK" fi @@ -103,16 +103,16 @@ fi # the equivalent code for rpm is in posttrans if [ "$PACKAGE" = "deb" ]; then if [ ! -f "${OPENSEARCH_PATH_CONF}"/opensearch.keystore ]; then - /usr/share/opensearch/bin/opensearch-keystore create - chown root:opensearch "${OPENSEARCH_PATH_CONF}"/opensearch.keystore + /usr/share/wazuh-indexer/bin/opensearch-keystore create + chown root:wazuh-indexer "${OPENSEARCH_PATH_CONF}"/opensearch.keystore chmod 660 "${OPENSEARCH_PATH_CONF}"/opensearch.keystore md5sum "${OPENSEARCH_PATH_CONF}"/opensearch.keystore > "${OPENSEARCH_PATH_CONF}"/.opensearch.keystore.initial_md5sum else - if /usr/share/opensearch/bin/opensearch-keystore has-passwd --silent ; then + if /usr/share/wazuh-indexer/bin/opensearch-keystore has-passwd --silent ; then echo "### Warning: unable to upgrade encrypted keystore" 1>&2 echo " Please run opensearch-keystore upgrade and enter password" 1>&2 else - /usr/share/opensearch/bin/opensearch-keystore upgrade + /usr/share/wazuh-indexer/bin/opensearch-keystore upgrade fi fi fi diff --git a/distribution/packages/src/common/scripts/postrm b/distribution/packages/src/common/scripts/postrm index 75eded92a8e41..70871f9ae3004 100644 --- a/distribution/packages/src/common/scripts/postrm +++ b/distribution/packages/src/common/scripts/postrm @@ -32,7 +32,7 @@ case "$1" in REMOVE_JVM_OPTIONS_DIRECTORY=true REMOVE_USER_AND_GROUP=true ;; - failed-upgrade|abort-install|abort-upgrade|disappear|upgrade|disappear) + failed-upgrade|abort-install|abort-upgrade|disappear|upgrade) ;; # RedHat #################################################### @@ -53,34 +53,34 @@ esac if [ "$REMOVE_DIRS" = "true" ]; then - if [ -d /var/log/opensearch ]; then + if [ -d /var/log/wazuh-indexer ]; then echo -n "Deleting log directory..." - rm -rf /var/log/opensearch + rm -rf /var/log/wazuh-indexer echo " OK" fi - if [ -d /usr/share/opensearch/plugins ]; then + if [ -d /usr/share/wazuh-indexer/plugins ]; then echo -n "Deleting plugins directory..." - rm -rf /usr/share/opensearch/plugins + rm -rf /usr/share/wazuh-indexer/plugins echo " OK" fi # plugins may have contained bin files - if [ -d /usr/share/opensearch/bin ]; then + if [ -d /usr/share/wazuh-indexer/bin ]; then echo -n "Deleting plugin bin directories..." - rm -rf /usr/share/opensearch/bin + rm -rf /usr/share/wazuh-indexer/bin echo " OK" fi - if [ -d /var/run/opensearch ]; then + if [ -d /var/run/wazuh-indexer ]; then echo -n "Deleting PID directory..." - rm -rf /var/run/opensearch + rm -rf /var/run/wazuh-indexer echo " OK" fi # Delete the data directory if and only if empty - if [ -d /var/lib/opensearch ]; then - rmdir --ignore-fail-on-non-empty /var/lib/opensearch + if [ -d /var/lib/wazuh-indexer ]; then + rmdir --ignore-fail-on-non-empty /var/lib/wazuh-indexer fi # delete the jvm.options.d directory if and only if empty @@ -105,12 +105,12 @@ if [ "$REMOVE_DIRS" = "true" ]; then fi if [ "$REMOVE_USER_AND_GROUP" = "true" ]; then - if id opensearch > /dev/null 2>&1 ; then - userdel opensearch + if id wazuh-indexer > /dev/null 2>&1 ; then + userdel wazuh-indexer fi - if getent group opensearch > /dev/null 2>&1 ; then - groupdel opensearch + if getent group wazuh-indexer > /dev/null 2>&1 ; then + groupdel wazuh-indexer fi fi diff --git a/distribution/packages/src/common/scripts/posttrans b/distribution/packages/src/common/scripts/posttrans index 3b3d4faa766ee..bf6c844ab82bb 100644 --- a/distribution/packages/src/common/scripts/posttrans +++ b/distribution/packages/src/common/scripts/posttrans @@ -6,16 +6,16 @@ fi export OPENSEARCH_PATH_CONF=${OPENSEARCH_PATH_CONF:-${path.conf}} if [ ! -f "${OPENSEARCH_PATH_CONF}"/opensearch.keystore ]; then - /usr/share/opensearch/bin/opensearch-keystore create - chown root:opensearch "${OPENSEARCH_PATH_CONF}"/opensearch.keystore + /usr/share/wazuh-indexer/bin/opensearch-keystore create + chown root:wazuh-indexer "${OPENSEARCH_PATH_CONF}"/opensearch.keystore chmod 660 "${OPENSEARCH_PATH_CONF}"/opensearch.keystore md5sum "${OPENSEARCH_PATH_CONF}"/opensearch.keystore > "${OPENSEARCH_PATH_CONF}"/.opensearch.keystore.initial_md5sum else - if /usr/share/opensearch/bin/opensearch-keystore has-passwd --silent ; then + if /usr/share/wazuh-indexer/bin/opensearch-keystore has-passwd --silent ; then echo "### Warning: unable to upgrade encrypted keystore" 1>&2 echo " Please run opensearch-keystore upgrade and enter password" 1>&2 else - /usr/share/opensearch/bin/opensearch-keystore upgrade + /usr/share/wazuh-indexer/bin/opensearch-keystore upgrade fi fi diff --git a/distribution/packages/src/common/scripts/preinst b/distribution/packages/src/common/scripts/preinst index 31e5b803b1604..75d93c73fccf1 100644 --- a/distribution/packages/src/common/scripts/preinst +++ b/distribution/packages/src/common/scripts/preinst @@ -29,24 +29,24 @@ case "$1" in # Debian #################################################### install|upgrade) - # Create opensearch group if not existing - if ! getent group opensearch > /dev/null 2>&1 ; then - echo -n "Creating opensearch group..." - addgroup --quiet --system opensearch + # Create wazuh-indexer group if not existing + if ! getent group wazuh-indexer > /dev/null 2>&1 ; then + echo -n "Creating wazuh-indexer group..." + addgroup --quiet --system wazuh-indexer echo " OK" fi - # Create opensearch user if not existing - if ! id opensearch > /dev/null 2>&1 ; then - echo -n "Creating opensearch user..." + # Create wazuh-indexer user if not existing + if ! id wazuh-indexer > /dev/null 2>&1 ; then + echo -n "Creating wazuh-indexer user..." adduser --quiet \ --system \ --no-create-home \ --home /nonexistent \ - --ingroup opensearch \ + --ingroup wazuh-indexer \ --disabled-password \ --shell /bin/false \ - opensearch + wazuh-indexer echo " OK" fi ;; @@ -56,23 +56,23 @@ case "$1" in # RedHat #################################################### 1|2) - # Create opensearch group if not existing - if ! getent group opensearch > /dev/null 2>&1 ; then - echo -n "Creating opensearch group..." - groupadd -r opensearch + # Create wazuh-indexer group if not existing + if ! getent group wazuh-indexer > /dev/null 2>&1 ; then + echo -n "Creating wazuh-indexer group..." + groupadd -r wazuh-indexer echo " OK" fi - # Create opensearch user if not existing - if ! id opensearch > /dev/null 2>&1 ; then - echo -n "Creating opensearch user..." + # Create wazuh-indexer user if not existing + if ! id wazuh-indexer > /dev/null 2>&1 ; then + echo -n "Creating wazuh-indexer user..." useradd --system \ --no-create-home \ --home-dir /nonexistent \ - --gid opensearch \ + --gid wazuh-indexer \ --shell /sbin/nologin \ - --comment "opensearch user" \ - opensearch + --comment "wazuh-indexer user" \ + wazuh-indexer echo " OK" fi ;; diff --git a/distribution/packages/src/common/scripts/prerm b/distribution/packages/src/common/scripts/prerm index dd3cadd383dbe..7609abb950339 100644 --- a/distribution/packages/src/common/scripts/prerm +++ b/distribution/packages/src/common/scripts/prerm @@ -51,23 +51,23 @@ esac # Stops the service if [ "$STOP_REQUIRED" = "true" ]; then - echo -n "Stopping opensearch service..." + echo -n "Stopping wazuh-indexer service..." if command -v systemctl >/dev/null; then - systemctl --no-reload stop opensearch.service + systemctl --no-reload stop wazuh-indexer.service - elif [ -x /etc/init.d/opensearch ]; then + elif [ -x /etc/init.d/wazuh-indexer ]; then if command -v invoke-rc.d >/dev/null; then - invoke-rc.d opensearch stop + invoke-rc.d wazuh-indexer stop else - /etc/init.d/opensearch stop + /etc/init.d/wazuh-indexer stop fi # older suse linux distributions do not ship with systemd # but do not have an /etc/init.d/ directory - # this tries to start the opensearch service on these + # this tries to start the wazuh-indexer service on these # as well without failing this script - elif [ -x /etc/rc.d/init.d/opensearch ] ; then - /etc/rc.d/init.d/opensearch stop + elif [ -x /etc/rc.d/init.d/wazuh-indexer ] ; then + /etc/rc.d/init.d/wazuh-indexer stop fi echo " OK" fi @@ -80,15 +80,15 @@ fi if [ "$REMOVE_SERVICE" = "true" ]; then if command -v systemctl >/dev/null; then - systemctl disable opensearch.service > /dev/null 2>&1 || true + systemctl disable wazuh-indexer.service > /dev/null 2>&1 || true fi if command -v chkconfig >/dev/null; then - chkconfig --del opensearch 2> /dev/null || true + chkconfig --del wazuh-indexer 2> /dev/null || true fi if command -v update-rc.d >/dev/null; then - update-rc.d opensearch remove >/dev/null || true + update-rc.d wazuh-indexer remove >/dev/null || true fi fi diff --git a/distribution/packages/src/common/systemd/opensearch.conf b/distribution/packages/src/common/systemd/opensearch.conf deleted file mode 100644 index 1245c11a6b7e8..0000000000000 --- a/distribution/packages/src/common/systemd/opensearch.conf +++ /dev/null @@ -1 +0,0 @@ -d /var/run/opensearch 0755 opensearch opensearch - - diff --git a/distribution/packages/src/common/systemd/sysctl/opensearch.conf b/distribution/packages/src/common/systemd/sysctl/wazuh-indexer.conf similarity index 100% rename from distribution/packages/src/common/systemd/sysctl/opensearch.conf rename to distribution/packages/src/common/systemd/sysctl/wazuh-indexer.conf diff --git a/distribution/packages/src/common/systemd/systemd-entrypoint b/distribution/packages/src/common/systemd/systemd-entrypoint index de59b4573f79a..cc24e7b3b5b5f 100644 --- a/distribution/packages/src/common/systemd/systemd-entrypoint +++ b/distribution/packages/src/common/systemd/systemd-entrypoint @@ -6,7 +6,7 @@ set -e -o pipefail if [ -n "$OPENSEARCH_KEYSTORE_PASSPHRASE_FILE" ] ; then - exec /usr/share/opensearch/bin/opensearch "$@" < "$OPENSEARCH_KEYSTORE_PASSPHRASE_FILE" + exec /usr/share/wazuh-indexer/bin/opensearch "$@" < "$OPENSEARCH_KEYSTORE_PASSPHRASE_FILE" else - exec /usr/share/opensearch/bin/opensearch "$@" + exec /usr/share/wazuh-indexer/bin/opensearch "$@" fi diff --git a/distribution/packages/src/common/systemd/wazuh-indexer.conf b/distribution/packages/src/common/systemd/wazuh-indexer.conf new file mode 100644 index 0000000000000..f9aa920e837b6 --- /dev/null +++ b/distribution/packages/src/common/systemd/wazuh-indexer.conf @@ -0,0 +1 @@ +d /var/run/wazuh-indexer 0750 wazuh-indexer wazuh-indexer - - diff --git a/distribution/packages/src/common/systemd/opensearch.service b/distribution/packages/src/common/systemd/wazuh-indexer.service similarity index 73% rename from distribution/packages/src/common/systemd/opensearch.service rename to distribution/packages/src/common/systemd/wazuh-indexer.service index 962dc5d2aae72..e396257a9d5cf 100644 --- a/distribution/packages/src/common/systemd/opensearch.service +++ b/distribution/packages/src/common/systemd/wazuh-indexer.service @@ -1,30 +1,30 @@ [Unit] -Description=OpenSearch -Documentation=https://www.elastic.co +Description=wazuh-indexer +Documentation=https://documentation.wazuh.com Wants=network-online.target After=network-online.target [Service] Type=notify -RuntimeDirectory=opensearch +RuntimeDirectory=wazuh-indexer PrivateTmp=true -Environment=OPENSEARCH_HOME=/usr/share/opensearch +Environment=OPENSEARCH_HOME=/usr/share/wazuh-indexer Environment=OPENSEARCH_PATH_CONF=${path.conf} -Environment=PID_DIR=/var/run/opensearch +Environment=PID_DIR=/var/run/wazuh-indexer Environment=OPENSEARCH_SD_NOTIFY=true EnvironmentFile=-${path.env} -WorkingDirectory=/usr/share/opensearch +WorkingDirectory=/usr/share/wazuh-indexer -User=opensearch -Group=opensearch +User=wazuh-indexer +Group=wazuh-indexer -ExecStart=/usr/share/opensearch/bin/systemd-entrypoint -p ${PID_DIR}/opensearch.pid --quiet +ExecStart=/usr/share/wazuh-indexer/bin/systemd-entrypoint -p ${PID_DIR}/wazuh-indexer.pid --quiet # StandardOutput is configured to redirect to journalctl since # some error messages may be logged in standard output before -# opensearch logging system is initialized. OpenSearch -# stores its logs in /var/log/opensearch and does not use +# wazuh-indexer logging system is initialized. Wazuh-indexer +# stores its logs in /var/log/wazuh-indexer and does not use # journalctl by default. If you also want to enable journalctl # logging, you can simply remove the "quiet" option from ExecStart. StandardOutput=journal diff --git a/distribution/packages/src/deb/init.d/opensearch b/distribution/packages/src/deb/init.d/wazuh-indexer similarity index 79% rename from distribution/packages/src/deb/init.d/opensearch rename to distribution/packages/src/deb/init.d/wazuh-indexer index 681d87df1d356..5843a982adc5e 100755 --- a/distribution/packages/src/deb/init.d/opensearch +++ b/distribution/packages/src/deb/init.d/wazuh-indexer @@ -1,22 +1,22 @@ #!/usr/bin/env bash # -# /etc/init.d/opensearch -- startup script for OpenSearch +# /etc/init.d/wazuh-indexer -- startup script for Wazuh indexer # ### BEGIN INIT INFO -# Provides: opensearch +# Provides: wazuh-indexer # Required-Start: $network $remote_fs $named # Required-Stop: $network $remote_fs $named # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 -# Short-Description: Starts opensearch -# Description: Starts opensearch using start-stop-daemon +# Short-Description: Starts wazuh-indexer +# Description: Starts wazuh-indexer using start-stop-daemon ### END INIT INFO set -e -o pipefail PATH=/bin:/usr/bin:/sbin:/usr/sbin -NAME=opensearch -DESC="OpenSearch Server" +NAME=wazuh-indexer +DESC=$NAME DEFAULT=/etc/default/$NAME if [ `id -u` -ne 0 ]; then @@ -53,7 +53,7 @@ OPENSEARCH_PATH_CONF=/etc/$NAME MAX_MAP_COUNT=262144 # OpenSearch PID file directory -PID_DIR="/var/run/opensearch" +PID_DIR="/var/run/$NAME" # End of variables that can be overwritten in $DEFAULT @@ -73,7 +73,7 @@ export JAVA_HOME export OPENSEARCH_JAVA_HOME if [ ! -x "$DAEMON" ]; then - echo "The opensearch startup script does not exists or it is not executable, tried: $DAEMON" + echo "The wazuh-indexer startup script does not exists or it is not executable, tried: $DAEMON" exit 1 fi @@ -82,7 +82,7 @@ case "$1" in log_daemon_msg "Starting $DESC" - pid=`pidofproc -p $PID_FILE opensearch` + pid=`pidofproc -p $PID_FILE wazuh-indexer` if [ -n "$pid" ] ; then log_begin_msg "Already running." log_end_msg 0 @@ -91,10 +91,10 @@ case "$1" in # Ensure that the PID_DIR exists (it is cleaned at OS startup time) if [ -n "$PID_DIR" ] && [ ! -e "$PID_DIR" ]; then - mkdir -p "$PID_DIR" && chown opensearch:opensearch "$PID_DIR" + mkdir -p "$PID_DIR" && chown wazuh-indexer:wazuh-indexer "$PID_DIR" fi if [ -n "$PID_FILE" ] && [ ! -e "$PID_FILE" ]; then - touch "$PID_FILE" && chown opensearch:opensearch "$PID_FILE" + touch "$PID_FILE" && chown wazuh-indexer:wazuh-indexer "$PID_FILE" fi if [ -n "$MAX_OPEN_FILES" ]; then @@ -110,7 +110,7 @@ case "$1" in fi # Start Daemon - start-stop-daemon -d $OPENSEARCH_HOME --start --user opensearch -c opensearch --pidfile "$PID_FILE" --exec $DAEMON -- $DAEMON_OPTS + start-stop-daemon -d $OPENSEARCH_HOME --start --user wazuh-indexer -c wazuh-indexer --pidfile "$PID_FILE" --exec $DAEMON -- $DAEMON_OPTS return=$? if [ $return -eq 0 ]; then i=0 @@ -134,7 +134,7 @@ case "$1" in if [ -f "$PID_FILE" ]; then start-stop-daemon --stop --pidfile "$PID_FILE" \ - --user opensearch \ + --user wazuh-indexer \ --quiet \ --retry forever/TERM/20 > /dev/null if [ $? -eq 1 ]; then @@ -151,7 +151,7 @@ case "$1" in log_end_msg 0 ;; status) - status_of_proc -p $PID_FILE opensearch opensearch && exit 0 || exit $? + status_of_proc -p $PID_FILE wazuh-indexer wazuh-indexer && exit 0 || exit $? ;; restart|force-reload) if [ -f "$PID_FILE" ]; then diff --git a/distribution/packages/src/deb/lintian/opensearch b/distribution/packages/src/deb/lintian/opensearch deleted file mode 100644 index e6db8e8c6b322..0000000000000 --- a/distribution/packages/src/deb/lintian/opensearch +++ /dev/null @@ -1,46 +0,0 @@ -# we don't have a changelog, but we put our copyright file -# under /usr/share/doc/opensearch, which triggers this warning -changelog-file-missing-in-native-package - -# we intentionally copy our copyright file for all deb packages -copyright-file-contains-full-apache-2-license -copyright-should-refer-to-common-license-file-for-apache-2 -copyright-without-copyright-notice - -# we still put all our files under /usr/share/opensearch even after transition to platform dependent packages -arch-dependent-file-in-usr-share - -# we have a bundled jdk, so don't use jarwrapper -missing-dep-on-jarwrapper - -# we prefer to not make our config and log files world readable -non-standard-file-perm etc/default/opensearch 0660 != 0644 -non-standard-dir-perm etc/opensearch/ 0750 != 0755 -non-standard-dir-perm etc/opensearch/jvm.options.d/ 0750 != 0755 -non-standard-file-perm etc/opensearch/* -non-standard-dir-perm var/lib/opensearch/ 0750 != 0755 -non-standard-dir-perm var/log/opensearch/ 0750 != 0755 -executable-is-not-world-readable etc/init.d/opensearch 0750 -non-standard-file-permissions-for-etc-init.d-script etc/init.d/opensearch 0750 != 0755 - -# this lintian tag is simply wrong; contrary to the explanation, debian systemd -# does actually look at /usr/lib/systemd/system -systemd-service-file-outside-lib usr/lib/systemd/system/opensearch.service - -# we do not automatically enable the service in init.d or systemd -script-in-etc-init.d-not-registered-via-update-rc.d etc/init.d/opensearch - -# the package scripts handle init.d/systemd directly and don't need to use deb helpers -maintainer-script-calls-systemctl -prerm-calls-updaterc.d opensearch - -# bundled JDK -embedded-library -arch-dependent-file-in-usr-share usr/share/opensearch/jdk/* -unstripped-binary-or-object usr/share/opensearch/jdk/* -extra-license-file usr/share/opensearch/jdk/legal/* -hardening-no-pie usr/share/opensearch/jdk/bin/* -hardening-no-pie usr/share/opensearch/jdk/lib/* - -# the system java version that lintian assumes is far behind what opensearch uses -unknown-java-class-version diff --git a/distribution/packages/src/deb/lintian/wazuh-indexer b/distribution/packages/src/deb/lintian/wazuh-indexer new file mode 100644 index 0000000000000..6d98dc7a7b879 --- /dev/null +++ b/distribution/packages/src/deb/lintian/wazuh-indexer @@ -0,0 +1,46 @@ +# we don't have a changelog, but we put our copyright file +# under /usr/share/doc/wazuh-indexer, which triggers this warning +changelog-file-missing-in-native-package + +# we intentionally copy our copyright file for all deb packages +copyright-file-contains-full-apache-2-license +copyright-should-refer-to-common-license-file-for-apache-2 +copyright-without-copyright-notice + +# we still put all our files under /usr/share/wazuh-indexer even after transition to platform dependent packages +arch-dependent-file-in-usr-share + +# we have a bundled jdk, so don't use jarwrapper +missing-dep-on-jarwrapper + +# we prefer to not make our config and log files world readable +non-standard-file-perm etc/default/wazuh-indexer 0660 != 0644 +non-standard-dir-perm etc/wazuh-indexer/ 0750 != 0755 +non-standard-dir-perm etc/wazuh-indexer/jvm.options.d/ 0750 != 0755 +non-standard-file-perm etc/wazuh-indexer/* +non-standard-dir-perm var/lib/wazuh-indexer/ 0750 != 0755 +non-standard-dir-perm var/log/wazuh-indexer/ 0750 != 0755 +executable-is-not-world-readable etc/init.d/wazuh-indexer 0750 +non-standard-file-permissions-for-etc-init.d-script etc/init.d/wazuh-indexer 0750 != 0755 + +# this lintian tag is simply wrong; contrary to the explanation, debian systemd +# does actually look at /usr/lib/systemd/system +systemd-service-file-outside-lib usr/lib/systemd/system/wazuh-indexer.service + +# we do not automatically enable the service in init.d or systemd +script-in-etc-init.d-not-registered-via-update-rc.d etc/init.d/wazuh-indexer + +# the package scripts handle init.d/systemd directly and don't need to use deb helpers +maintainer-script-calls-systemctl +prerm-calls-updaterc.d wazuh-indexer + +# bundled JDK +embedded-library +arch-dependent-file-in-usr-share usr/share/wazuh-indexer/jdk/* +unstripped-binary-or-object usr/share/wazuh-indexer/jdk/* +extra-license-file usr/share/wazuh-indexer/jdk/legal/* +hardening-no-pie usr/share/wazuh-indexer/jdk/bin/* +hardening-no-pie usr/share/wazuh-indexer/jdk/lib/* + +# the system java version that lintian assumes is far behind what wazuh-indexer uses +unknown-java-class-version diff --git a/distribution/packages/src/rpm/init.d/opensearch b/distribution/packages/src/rpm/init.d/wazuh-indexer similarity index 80% rename from distribution/packages/src/rpm/init.d/opensearch rename to distribution/packages/src/rpm/init.d/wazuh-indexer index 0cb9bf65796ad..75b7b7ac9756f 100644 --- a/distribution/packages/src/rpm/init.d/opensearch +++ b/distribution/packages/src/rpm/init.d/wazuh-indexer @@ -1,9 +1,9 @@ #!/usr/bin/env bash # -# opensearch +# wazuh-indexer # # chkconfig: 2345 80 20 -# description: Starts and stops a single opensearch instance on this system +# description: Starts and stops a single wazuh-indexer instance on this system # ### BEGIN INIT INFO @@ -12,7 +12,7 @@ # Required-Stop: $network $named # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 -# Short-Description: This service manages the opensearch daemon +# Short-Description: This service manages the wazuh-indexer daemon # Description: OpenSearch is a very scalable, schema-free and high-performance search solution supporting multi-tenancy and near realtime search. ### END INIT INFO @@ -33,13 +33,13 @@ if [ -f /etc/rc.d/init.d/functions ]; then . /etc/rc.d/init.d/functions fi -# Sets the default values for opensearch variables used in this script -OPENSEARCH_HOME="/usr/share/opensearch" +# Sets the default values for wazuh-indexer variables used in this script +OPENSEARCH_HOME="/usr/share/wazuh-indexer" MAX_OPEN_FILES=65535 MAX_MAP_COUNT=262144 OPENSEARCH_PATH_CONF="${path.conf}" -PID_DIR="/var/run/opensearch" +PID_DIR="/var/run/wazuh-indexer" # Source the default env file OPENSEARCH_ENV_FILE="${path.env}" @@ -47,8 +47,8 @@ if [ -f "$OPENSEARCH_ENV_FILE" ]; then . "$OPENSEARCH_ENV_FILE" fi -exec="$OPENSEARCH_HOME/bin/opensearch" -prog="opensearch" +exec="$OPENSEARCH_HOME/bin/wazuh-indexer" +prog="wazuh-indexer" pidfile="$PID_DIR/${prog}.pid" export OPENSEARCH_JAVA_OPTS @@ -60,7 +60,7 @@ export OPENSEARCH_JAVA_HOME lockfile=/var/lock/subsys/$prog if [ ! -x "$exec" ]; then - echo "The opensearch startup script does not exists or it is not executable, tried: $exec" + echo "The wazuh-indexer startup script does not exists or it is not executable, tried: $exec" exit 1 fi @@ -79,16 +79,16 @@ start() { # Ensure that the PID_DIR exists (it is cleaned at OS startup time) if [ -n "$PID_DIR" ] && [ ! -e "$PID_DIR" ]; then - mkdir -p "$PID_DIR" && chown opensearch:opensearch "$PID_DIR" + mkdir -p "$PID_DIR" && chown wazuh-indexer:wazuh-indexer "$PID_DIR" fi if [ -n "$pidfile" ] && [ ! -e "$pidfile" ]; then - touch "$pidfile" && chown opensearch:opensearch "$pidfile" + touch "$pidfile" && chown wazuh-indexer:wazuh-indexer "$pidfile" fi cd $OPENSEARCH_HOME echo -n $"Starting $prog: " # if not running, start it up here, usually something like "daemon $exec" - daemon --user opensearch --pidfile $pidfile $exec -p $pidfile -d + daemon --user wazuh-indexer --pidfile $pidfile $exec -p $pidfile -d retval=$? echo [ $retval -eq 0 ] && touch $lockfile diff --git a/scripts/build.sh b/scripts/build.sh index 48075ea6bf566..55d9f96c1e838 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -71,15 +71,15 @@ fi [ -z "$OUTPUT" ] && OUTPUT=artifacts echo "Creating output directory $OUTPUT/maven/org/opensearch if it doesn't already exist" -mkdir -p $OUTPUT/maven/org/opensearch +mkdir -p "$OUTPUT/maven/org/opensearch" # Build project and publish to maven local. echo "Building and publishing OpenSearch project to Maven Local" -./gradlew publishToMavenLocal -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER +./gradlew publishToMavenLocal -Dbuild.snapshot="$SNAPSHOT" -Dbuild.version_qualifier="$QUALIFIER" # Publish to existing test repo, using this to stage release versions of the artifacts that can be released from the same build. echo "Publishing OpenSearch to Test Repository" -./gradlew publishNebulaPublicationToTestRepository -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER +./gradlew publishNebulaPublicationToTestRepository -Dbuild.snapshot="$SNAPSHOT" -Dbuild.version_qualifier="$QUALIFIER" # Copy maven publications to be promoted echo "Copying Maven publications to $OUTPUT/maven/org" @@ -89,7 +89,7 @@ cp -r ./build/local-test-repo/org/opensearch "${OUTPUT}"/maven/org # see https://github.com/opensearch-project/OpenSearch/blob/main/settings.gradle#L34 for other distribution targets [ -z "$PLATFORM" ] && PLATFORM=$(uname -s | awk '{print tolower($0)}') -[ -z "$ARCHITECTURE" ] && ARCHITECTURE=`uname -m` +[ -z "$ARCHITECTURE" ] && ARCHITECTURE=$(uname -m) [ -z "$DISTRIBUTION" ] && DISTRIBUTION="tar" case $PLATFORM-$DISTRIBUTION-$ARCHITECTURE in @@ -157,13 +157,13 @@ esac echo "Building OpenSearch for $PLATFORM-$DISTRIBUTION-$ARCHITECTURE" -./gradlew :distribution:$TYPE:$TARGET:assemble -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER +./gradlew ":distribution:$TYPE:$TARGET:assemble" -Dbuild.snapshot="$SNAPSHOT" -Dbuild.version_qualifier="$QUALIFIER" # Copy artifact to dist folder in bundle build output echo "Copying artifact to ${OUTPUT}/dist" -[[ "$SNAPSHOT" == "true" ]] && IDENTIFIER="-SNAPSHOT" -ARTIFACT_BUILD_NAME=`ls distribution/$TYPE/$TARGET/build/distributions/ | grep "opensearch-min.*$SUFFIX.$EXT"` +# [[ "$SNAPSHOT" == "true" ]] && IDENTIFIER="-SNAPSHOT" +ARTIFACT_BUILD_NAME=$(ls "distribution/$TYPE/$TARGET/build/distributions/" | grep "wazuh-indexer-min.*$SUFFIX.$EXT") # [WAZUH] Used by the GH workflow to upload the artifact echo "$ARTIFACT_BUILD_NAME" > "$OUTPUT/artifact_name.txt" mkdir -p "${OUTPUT}/dist" -cp distribution/$TYPE/$TARGET/build/distributions/$ARTIFACT_BUILD_NAME "${OUTPUT}"/dist/$ARTIFACT_BUILD_NAME +cp "distribution/$TYPE/$TARGET/build/distributions/$ARTIFACT_BUILD_NAME" "${OUTPUT}/dist/$ARTIFACT_BUILD_NAME" diff --git a/settings.gradle b/settings.gradle index b79c2aee135fc..7cbc3694c03a6 100644 --- a/settings.gradle +++ b/settings.gradle @@ -21,7 +21,7 @@ buildCache { } } -rootProject.name = "OpenSearch" +rootProject.name = "Wazuh indexer" include 'doc-tools' includeBuild("doc-tools/missing-doclet") From 803cef9aa0afaa113c8f2d1b2950b51dc9e7ce76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Thu, 7 Dec 2023 18:08:10 +0100 Subject: [PATCH 019/120] Update vulnerability index mappings (#75) * Remove 'events' ECS field * Add 'wazuh' custom field * Update event_generator.py for vulnerability detector --- ecs/generate.sh | 18 +++++----- .../event-generator/event_generator.py | 35 +++++++++++++------ .../fields/custom/wazuh.yml | 11 ++++++ ecs/vulnerability-detector/fields/subset.yml | 4 +-- .../fields/template-settings-legacy.json | 6 ++-- .../fields/template-settings.json | 6 ++-- 6 files changed, 49 insertions(+), 31 deletions(-) create mode 100644 ecs/vulnerability-detector/fields/custom/wazuh.yml diff --git a/ecs/generate.sh b/ecs/generate.sh index b618bb5e97fd6..14c691d517cf8 100755 --- a/ecs/generate.sh +++ b/ecs/generate.sh @@ -1,5 +1,8 @@ #!/bin/bash +set -e +set -u + # Function to display usage information show_usage() { echo "Usage: $0 [--upload ]" @@ -12,20 +15,15 @@ show_usage() { # Function to generate mappings generate_mappings() { - ECS_VERSION="$1" - INDEXER_SRC="$2" - MODULE="$3" - UPLOAD="$4" - URL="$5" - - IN_FILES_DIR="$INDEXER_SRC/ecs/$MODULE/fields" - OUT_DIR="$INDEXER_SRC/ecs/$MODULE/mappings/$ECS_VERSION" + local IN_FILES_DIR="$INDEXER_SRC/ecs/$MODULE/fields" + local OUT_DIR="$INDEXER_SRC/ecs/$MODULE/mappings/$ECS_VERSION" # Ensure the output directory exists mkdir -p "$OUT_DIR" || exit 1 # Generate mappings python scripts/generator.py --strict --ref "$ECS_VERSION" \ + --include "$IN_FILES_DIR/custom/wazuh.yml" \ --subset "$IN_FILES_DIR/subset.yml" \ --template-settings "$IN_FILES_DIR/template-settings.json" \ --template-settings-legacy "$IN_FILES_DIR/template-settings-legacy.json" \ @@ -55,8 +53,8 @@ generate_mappings() { # Function to upload generated composable index template to the OpenSearch cluster upload_mappings() { - OUT_DIR="$1" - URL="$2" + local OUT_DIR="$1" + local URL="$2" echo "Uploading index template to the OpenSearch cluster" for file in "$OUT_DIR/generated/elasticsearch/composable/component"/*.json; do diff --git a/ecs/vulnerability-detector/event-generator/event_generator.py b/ecs/vulnerability-detector/event-generator/event_generator.py index 9cbc0efc44f92..0b8c71ec5295b 100755 --- a/ecs/vulnerability-detector/event-generator/event_generator.py +++ b/ecs/vulnerability-detector/event-generator/event_generator.py @@ -64,7 +64,7 @@ def generate_random_event(): 'created': generate_random_date(), 'dataset': random.choice(['process', 'file', 'registry', 'socket', 'dns', 'http', 'tls', 'alert', 'authentication', 'authorization', 'configuration', 'communication', 'file', - 'network', 'process', 'registry', 'storage', 'system', 'web']), + 'network', 'process', 'registry', 'storage', 'system', 'web']), 'duration': random.randint(0, 99999), 'end': generate_random_date(), 'hash': str(hash(f'hash{random.randint(0, 99999)}')), @@ -74,12 +74,12 @@ def generate_random_event(): 'state', 'pipeline_error', 'signal']), 'module': random.choice(['process', 'file', 'registry', 'socket', 'dns', 'http', 'tls', 'alert', 'authentication', 'authorization', 'configuration', 'communication', 'file', - 'network', 'process', 'registry', 'storage', 'system', 'web']), + 'network', 'process', 'registry', 'storage', 'system', 'web']), 'original': f'original{random.randint(0, 99999)}', 'outcome': random.choice(['success', 'failure', 'unknown']), 'provider': random.choice(['process', 'file', 'registry', 'socket', 'dns', 'http', 'tls', 'alert', 'authentication', 'authorization', 'configuration', 'communication', 'file', - 'network', 'process', 'registry', 'storage', 'system', 'web']), + 'network', 'process', 'registry', 'storage', 'system', 'web']), 'reason': f'This event happened due to reason{random.randint(0, 99999)}', 'reference': f'https://system.example.com/event/#{random.randint(0, 99999)}', 'risk_score': round(random.uniform(0, 10), 1), @@ -89,15 +89,16 @@ def generate_random_event(): 'start': generate_random_date(), 'timezone': random.choice(['UTC', 'GMT', 'PST', 'EST', 'CST', 'MST', 'PDT', 'EDT', 'CDT', 'MDT']), 'type': random.choice(['access', 'admin', 'allowed', 'change', 'connection', 'creation', 'deletion', - 'denied', 'end', 'error', 'group', 'indicator', 'info', 'installation', 'protocol', - 'start', 'user']), + 'denied', 'end', 'error', 'group', 'indicator', 'info', 'installation', 'protocol', + 'start', 'user']), 'url': f'http://mysystem.example.com/alert/{random.randint(0, 99999)}' } return event def generate_random_host(): - family = random.choice(['debian', 'ubuntu', 'macos', 'ios', 'android', 'RHEL']) + family = random.choice( + ['debian', 'ubuntu', 'macos', 'ios', 'android', 'RHEL']) version = f'{random.randint(0, 99)}.{random.randint(0, 99)}' host = { 'os': { @@ -114,7 +115,8 @@ def generate_random_host(): def generate_random_labels(): - labels = {'label1': f'label{random.randint(0, 99)}', 'label2': f'label{random.randint(0, 99)}'} + labels = { + 'label1': f'label{random.randint(0, 99)}', 'label2': f'label{random.randint(0, 99)}'} return labels @@ -133,7 +135,7 @@ def generate_random_package(): 'size': random.randint(0, 99999), 'type': random.choice(['deb', 'rpm', 'msi', 'pkg', 'app', 'apk', 'exe', 'zip', 'tar', 'gz', '7z', 'rar', 'cab', 'iso', 'dmg', 'tar.gz', 'tar.bz2', 'tar.xz', 'tar.Z', 'tar.lz4', - 'tar.sz', 'tar.zst']), + 'tar.sz', 'tar.zst']), 'version': f'v{random.randint(0, 9)}-stable' } return package @@ -166,6 +168,15 @@ def generate_random_vulnerability(): return vulnerability +def generate_random_wazuh(): + wazuh = { + 'cluster': { + 'name': f'wazuh-cluster-{random.randint(0,10)}' + } + } + return wazuh + + def generate_random_data(number): data = [] for _ in range(number): @@ -173,13 +184,14 @@ def generate_random_data(number): '@timestamp': generate_random_date(), 'agent': generate_random_agent(), 'ecs': {'version': '1.7.0'}, - 'event': generate_random_event(), + # 'event': generate_random_event(), 'host': generate_random_host(), 'labels': generate_random_labels(), 'message': f'message{random.randint(0, 99999)}', 'package': generate_random_package(), 'tags': generate_random_tags(), - 'vulnerability': generate_random_vulnerability() + 'vulnerability': generate_random_vulnerability(), + 'wazuh': generate_random_wazuh() } data.append(event_data) return data @@ -221,7 +233,8 @@ def main(): logging.info('Data generation completed.') - inject = input("Do you want to inject the generated data into your indexer? (y/n) ").strip().lower() + inject = input( + "Do you want to inject the generated data into your indexer? (y/n) ").strip().lower() if inject == 'y': ip = input("Enter the IP of your Indexer: ") port = input("Enter the port of your Indexer: ") diff --git a/ecs/vulnerability-detector/fields/custom/wazuh.yml b/ecs/vulnerability-detector/fields/custom/wazuh.yml new file mode 100644 index 0000000000000..6975a19690e6b --- /dev/null +++ b/ecs/vulnerability-detector/fields/custom/wazuh.yml @@ -0,0 +1,11 @@ +--- +- name: wazuh + title: Wazuh + description: > + Wazuh Inc. custom fields + fields: + - name: cluster.name + type: keyword + level: custom + description: > + Wazuh cluster name. \ No newline at end of file diff --git a/ecs/vulnerability-detector/fields/subset.yml b/ecs/vulnerability-detector/fields/subset.yml index 2c8dc0ca3b30f..bf1b579fde563 100644 --- a/ecs/vulnerability-detector/fields/subset.yml +++ b/ecs/vulnerability-detector/fields/subset.yml @@ -7,8 +7,6 @@ fields: fields: "*" ecs: fields: "*" - event: - fields: "*" package: fields: "*" host: @@ -17,3 +15,5 @@ fields: fields: "*" vulnerability: fields: "*" + wazuh: + fields: "*" diff --git a/ecs/vulnerability-detector/fields/template-settings-legacy.json b/ecs/vulnerability-detector/fields/template-settings-legacy.json index 5f3135175c9af..c85123eaf0a5f 100644 --- a/ecs/vulnerability-detector/fields/template-settings-legacy.json +++ b/ecs/vulnerability-detector/fields/template-settings-legacy.json @@ -18,9 +18,6 @@ "base.tags", "agent.id", "ecs.version", - "event.id", - "event.module", - "event.severity", "host.os.family", "host.os.full.text", "host.os.version", @@ -28,7 +25,8 @@ "package.version", "vulnerability.id", "vulnerability.description.text", - "vulnerability.severity" + "vulnerability.severity", + "wazuh.cluster.name" ] } } diff --git a/ecs/vulnerability-detector/fields/template-settings.json b/ecs/vulnerability-detector/fields/template-settings.json index 48e2b051599e8..89f03eed0284c 100644 --- a/ecs/vulnerability-detector/fields/template-settings.json +++ b/ecs/vulnerability-detector/fields/template-settings.json @@ -19,9 +19,6 @@ "base.tags", "agent.id", "ecs.version", - "event.id", - "event.module", - "event.severity", "host.os.family", "host.os.full.text", "host.os.version", @@ -29,7 +26,8 @@ "package.version", "vulnerability.id", "vulnerability.description.text", - "vulnerability.severity" + "vulnerability.severity", + "wazuh.cluster.name" ] } } From 4eb8d2635199fb8a04726709cb48a24cad18a06c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Mon, 18 Dec 2023 19:07:10 +0100 Subject: [PATCH 020/120] Update `indexer-ism-init.sh` (#81) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Updates the script to upload the wazuh-template.json to the indexer. Signed-off-by: Álex Ruiz --- distribution/src/bin/indexer-ism-init.sh | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/distribution/src/bin/indexer-ism-init.sh b/distribution/src/bin/indexer-ism-init.sh index 3f2edc541a4ad..4217979624bc7 100644 --- a/distribution/src/bin/indexer-ism-init.sh +++ b/distribution/src/bin/indexer-ism-init.sh @@ -84,7 +84,24 @@ function generate_rollover_template() { # Loads the index templates for the rollover policy to the indexer. ######################################################################### function load_templates() { - # Note: the wazuh-template.json could also be loaded here. + # Load wazuh-template.json, needed for initial indices creation. + local wazuh_template_path="/etc/wazuh-indexer/wazuh-template.json" + echo "Will create 'wazuh' index template" + if [ -f $wazuh_template_path ]; then + cat $wazuh_template_path | + if ! curl -s -k ${C_AUTH} \ + -X PUT "${INDEXER_URL}/_template/wazuh" \ + -o "${LOG_FILE}" --create-dirs \ + -H 'Content-Type: application/json' -d @-; then + echo " ERROR: 'wazuh' template creation failed" + exit 1 + else + echo " SUCC: 'wazuh' template created or updated" + fi + else + echo " ERROR: $wazuh_template_path not found" + fi + echo "Will create index templates to configure the alias" for alias in "${aliases[@]}"; do generate_rollover_template "${alias}" | From b2548f967a46cad30b9f0ca38d38713a41e5036f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Tue, 26 Dec 2023 17:13:59 +0100 Subject: [PATCH 021/120] Add workflow to assemble packages (#85) * Add script to assemble arm64 and x64 archives (tar) * Cleanup * Update config file with latest upstream changes * Change packages maintainer information * Fix wrong substitution of config files * Update dockerignore to ignore git folder * Update wazuh-indexer.rpm.spec Remove unnecessary echo commands * Add wazuh-indexer-performance-analyzer.service Required to assembly RPM. The plugin does not install this file, so it needs to be added manually. * Update assemble.sh Successfully assemble RPM x64. Runner needed to arm64 * Update `build.yml` * Add WIP documentation for packages' generation * Test new approach using reusable workflows * Fix errors * Restructure reusable workflow * Fix upload and download paths * New try - Adds a reusable workflow to return the version of Wazuh set in source code. - Attempt to dynamically generate artifacts name to normalize them for usage between jobs. - Adds revision as input for the workflow. - Cleanup * Emulate assemble to test upload of the reusable assembly workflow * Add Caching Gradle dependencies * Remove extra '-' in the packages names on the assembly job * Final cleanup * Enable RPM package assemble Remove unused code * Fix regex to get package name * Fix download-artifact destination path * Exclude unimplemented deb assembly Extend example to run with Act --- .github/workflows/build.yml | 106 ++- .github/workflows/r_assemble.yml | 58 ++ .github/workflows/r_build.yml | 53 ++ .github/workflows/r_version.yml | 22 + distribution/packages/build.gradle | 6 +- ...wazuh-indexer-performance-analyzer.service | 21 + .../packages/src/rpm/wazuh-indexer.cicd.spec | 755 ++++++++++++++++++ .../packages/src/rpm/wazuh-indexer.rpm.spec | 274 +++++++ distribution/src/config/jvm.prod.options | 7 + .../src/config/security/internal_users.yml | 17 +- distribution/src/config/security/roles.yml | 246 +++++- docker/images/.dockerignore | 1 + scripts/README.md | 124 +++ scripts/assemble.sh | 283 +++++++ 14 files changed, 1910 insertions(+), 63 deletions(-) create mode 100644 .github/workflows/r_assemble.yml create mode 100644 .github/workflows/r_build.yml create mode 100644 .github/workflows/r_version.yml create mode 100644 distribution/packages/src/rpm/wazuh-indexer-performance-analyzer.service create mode 100644 distribution/packages/src/rpm/wazuh-indexer.cicd.spec create mode 100644 distribution/packages/src/rpm/wazuh-indexer.rpm.spec create mode 100644 scripts/README.md create mode 100755 scripts/assemble.sh diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a80b025134de5..1483793de3f68 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,62 +1,60 @@ -name: Build slim packages +name: Build packages # This workflow runs when any of the following occur: - # - Run manually +# - Run manually on: workflow_dispatch: - - -# Used to run locally using https://github.com/nektos/act -env: - ACT: - VERSION: 2.11.0 - SNAPSHOT: false - PLATFORM: linux - BUILD: bash scripts/build.sh - + inputs: + revision: + # description: + default: "1" + required: false + type: string + +# ========================== +# Bibliography +# ========================== +# +# * Reusable workflows: limitations +# | https://docs.github.com/en/actions/using-workflows/reusing-workflows#limitations +# * Using matrix in reusable workflows: +# | https://docs.github.com/en/actions/using-workflows/reusing-workflows#using-a-matrix-strategy-with-a-reusable-workflow +# * Reading input from the called workflow +# | https://docs.github.com/en/enterprise-cloud@latest/actions/using-workflows/workflow-syntax-for-github-actions#onworkflow_callinputs jobs: + version: + uses: ./.github/workflows/r_version.yml + build: - runs-on: ubuntu-latest - # Permissions to upload the package - permissions: - packages: write - contents: read + needs: version strategy: - matrix: - # act is resource-heavy. Avoid running parallel builds with it: - # DISTRIBUTION: [ rpm ] - # ARCHITECTURE: [ x64 ] - DISTRIBUTION: [ tar, rpm, deb ] - ARCHITECTURE: [ x64, arm64 ] - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-java@v3 - with: - distribution: temurin - java-version: 11 - - - name: Setup Gradle - uses: gradle/gradle-build-action@v2.9.0 - - - name: Execute build script - run: | - $BUILD -v $VERSION -s $SNAPSHOT -p $PLATFORM -a ${{ matrix.ARCHITECTURE }} -d ${{ matrix.DISTRIBUTION }} - - # The package name is stored in the artifacts/artifact_name.txt file - - name: Read package name - id: package_name - run: | - echo $(ls -la) - echo "package_name=$(cat artifacts/artifact_name.txt)" >> $GITHUB_OUTPUT - echo "$(cat artifacts/artifact_name.txt)" - - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: ${{ steps.package_name.outputs.package_name }} - path: artifacts/dist/${{ steps.package_name.outputs.package_name }} - if-no-files-found: error - - # assemble: - # release: + matrix: + distribution: [tar, rpm, deb] + architecture: [x64, arm64] + uses: ./.github/workflows/r_build.yml + with: + architecture: ${{ matrix.architecture }} + distribution: ${{ matrix.distribution }} + name: wazuh-indexer-min_${{ needs.version.outputs.version }}-${{ inputs.revision }}-${{ matrix.architecture }}_${{ github.sha }}.${{ matrix.distribution }} + # wazuh-indexer-min_4.8.0-rc1_x64_ff98475f.deb + # TODO arm64 != amd64 (deb), x64 != x86_64 (rpm) + # TODO use short SHA https://stackoverflow.com/a/59819441/13918537 + + assemble: + needs: [version, build] + strategy: + matrix: + distribution: [tar, rpm, deb] + architecture: [x64, arm64] + exclude: + # skip arm64 until we have arm runners + - architecture: arm64 + - distribution: [tar, deb] # Exclude deb assembly until it's implemented + + uses: ./.github/workflows/r_assemble.yml + with: + architecture: ${{ matrix.architecture }} + distribution: ${{ matrix.distribution }} + min: wazuh-indexer-min_${{ needs.version.outputs.version }}-${{ inputs.revision }}-${{ matrix.architecture }}_${{ github.sha }}.${{ matrix.distribution }} + name: wazuh-indexer_${{ needs.version.outputs.version }}-${{ inputs.revision }}-${{ matrix.architecture }}_${{ github.sha }}.${{ matrix.distribution }} diff --git a/.github/workflows/r_assemble.yml b/.github/workflows/r_assemble.yml new file mode 100644 index 0000000000000..c807a58a4f781 --- /dev/null +++ b/.github/workflows/r_assemble.yml @@ -0,0 +1,58 @@ +name: Assemble (reusable) + +# This workflow runs when any of the following occur: +# - Run from another workflow +on: + workflow_call: + inputs: + distribution: + description: 'One of [ "tar", "rpm", "deb" ]' + default: "rpm" + required: false + type: string + architecture: + description: 'One of [ "x64", "arm64" ]' + default: "x64" + required: false + type: string + min: + description: The name of the package to download. + required: true + type: string + name: + description: The name of the package to upload. + required: true + type: string + +jobs: + r_assemble: + runs-on: ubuntu-latest + # Permissions to upload the package + permissions: + packages: write + contents: read + steps: + - uses: actions/checkout@v4 + - name: Download artifact + uses: actions/download-artifact@v4 + with: + name: ${{ inputs.min }} + path: artifacts/dist + + - name: Run `assemble.sh` + run: | + bash scripts/assemble.sh -v ${{ vars.OPENSEARCH_VERSION }} -p linux -a ${{ inputs.architecture }} -d ${{ inputs.distribution }} + + # The package's name is stored in artifacts/artifact_name.txt. + - name: Set package name + id: get_name + run: | + echo "name=$(cat artifacts/artifact_name.txt)" >> $GITHUB_OUTPUT + + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.name }} + path: artifacts/dist/${{ steps.get_name.outputs.name }} + if-no-files-found: error + diff --git a/.github/workflows/r_build.yml b/.github/workflows/r_build.yml new file mode 100644 index 0000000000000..50ddb9a75ae76 --- /dev/null +++ b/.github/workflows/r_build.yml @@ -0,0 +1,53 @@ +name: Build (reusable) + +# This workflow runs when any of the following occur: +# - Run from another workflow +on: + workflow_call: + inputs: + distribution: + description: 'One of [ "tar", "rpm", "deb" ]' + default: "rpm" + required: false + type: string + architecture: + description: 'One of [ "x64", "arm64" ]' + default: "x64" + required: false + type: string + name: + type: string + +jobs: + r_build: + runs-on: ubuntu-latest + # Permissions to upload the package + permissions: + packages: write + contents: read + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: 11 + + - name: Setup Gradle + uses: gradle/gradle-build-action@v2.9.0 + + - name: Run `build.sh` + run: | + bash scripts/build.sh -v ${{ vars.OPENSEARCH_VERSION }} -s false -p linux -a ${{ inputs.architecture }} -d ${{ inputs.distribution }} + + # The package's name is stored in artifacts/artifact_name.txt. + - name: Set package name + id: get_name + run: | + echo "name=$(cat artifacts/artifact_name.txt)" >> $GITHUB_OUTPUT + + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.name }} + path: artifacts/dist/${{ steps.get_name.outputs.name }} + if-no-files-found: error diff --git a/.github/workflows/r_version.yml b/.github/workflows/r_version.yml new file mode 100644 index 0000000000000..d3c01135a6c35 --- /dev/null +++ b/.github/workflows/r_version.yml @@ -0,0 +1,22 @@ +name: Version (reusable) + +# This workflow runs when any of the following occur: +# - Run from another workflow +on: + workflow_call: + outputs: + version: + description: "Returns the version of Wazuh" + value: ${{ jobs.r_version.outputs.version }} + +jobs: + r_version: + runs-on: ubuntu-latest + outputs: + version: ${{ steps.get_version.outputs.version }} + steps: + - uses: actions/checkout@v4 + - name: Read 'VERSION' + id: get_version + run: | + echo "version=$(cat VERSION)" >> $GITHUB_OUTPUT diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 9c99a41f34e56..298b568244164 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -324,13 +324,13 @@ apply plugin: 'com.netflix.nebula.ospackage-base' // this is package indepdendent configuration ospackage { - maintainer 'OpenSearch Team ' + maintainer 'Wazuh, Inc ' summary 'Distributed RESTful search engine built for the cloud' packageDescription ''' Reference documentation can be found at - https://github.com/opensearch-project/OpenSearch + https://documentation.wazuh.com/current/getting-started/components/wazuh-indexer.html '''.stripIndent().trim() - url 'https://github.com/opensearch-project/OpenSearch' + url 'https://documentation.wazuh.com/current/getting-started/components/wazuh-indexer.html' // signing setup if (project.hasProperty('signing.password') && BuildParams.isSnapshotBuild() == false) { diff --git a/distribution/packages/src/rpm/wazuh-indexer-performance-analyzer.service b/distribution/packages/src/rpm/wazuh-indexer-performance-analyzer.service new file mode 100644 index 0000000000000..dfd3770f8300c --- /dev/null +++ b/distribution/packages/src/rpm/wazuh-indexer-performance-analyzer.service @@ -0,0 +1,21 @@ +# Copyright OpenSearch Contributors +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +[Unit] +Description=wazuh-indexer Performance Analyzer + +[Service] +Type=simple +ExecStart=/usr/share/wazuh-indexer/bin/wazuh-indexer-performance-analyzer/performance-analyzer-agent-cli +Restart=on-failure +User=wazuh-indexer +Group=wazuh-indexer +EnvironmentFile=-/etc/sysconfig/wazuh-indexer +WorkingDirectory=/usr/share/wazuh-indexer + +[Install] +WantedBy=multi-user.target \ No newline at end of file diff --git a/distribution/packages/src/rpm/wazuh-indexer.cicd.spec b/distribution/packages/src/rpm/wazuh-indexer.cicd.spec new file mode 100644 index 0000000000000..f17e6f7413aab --- /dev/null +++ b/distribution/packages/src/rpm/wazuh-indexer.cicd.spec @@ -0,0 +1,755 @@ +# Wazuh package SPEC +# Copyright (C) 2021, Wazuh Inc. +# +# This program is a free software; you can redistribute it +# and/or modify it under the terms of the GNU General Public +# License (version 2) as published by the FSF - Free Software +# Foundation. +Summary: Wazuh indexer is a search and analytics engine for security-related data. Documentation can be found at https://documentation.wazuh.com/current/getting-started/components/wazuh-indexer.html +Name: wazuh-indexer +Version: %{_version} +Release: %{_release} +License: GPL +Group: System Environment/Daemons +Source0: %{name}-%{version}.tar.gz +URL: https://www.wazuh.com/ +BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) +Vendor: Wazuh, Inc +Packager: Wazuh, Inc +AutoReqProv: no +Requires: coreutils +ExclusiveOS: linux +BuildRequires: tar shadow-utils + +# ----------------------------------------------------------------------------- + +%global USER %{name} +%global GROUP %{name} +%global CONFIG_DIR /etc/%{name} +%global LOG_DIR /var/log/%{name} +%global LIB_DIR /var/lib/%{name} +%global SYS_DIR /usr/lib +%global INSTALL_DIR /usr/share/%{name} +%global REPO_DIR /root/unattended_installer +%global INDEXER_FILE wazuh-indexer-base-%{version}-%{release}-linux-x64.tar.xz + +# ----------------------------------------------------------------------------- + +%description +Wazuh indexer is a near real-time full-text search and analytics engine that gathers security-related data into one platform. This Wazuh central component indexes and stores alerts generated by the Wazuh server. Wazuh indexer can be configured as a single-node or multi-node cluster, providing scalability and high availability. Documentation can be found at https://documentation.wazuh.com/current/getting-started/components/wazuh-indexer.html + +# ----------------------------------------------------------------------------- + +%prep +# Clean BUILDROOT +rm -fr %{buildroot} + +# Create package group +getent group %{GROUP} || groupadd -r %{GROUP} + +# Create package user +if ! id %{USER} &> /dev/null; then + useradd --system \ + --no-create-home \ + --home-dir %{INSTALL_DIR} \ + --gid %{GROUP} \ + --shell /sbin/nologin \ + --comment "%{USER} user" \ + %{USER} +fi + +# ----------------------------------------------------------------------------- + +%install +# Create directories +mkdir -p ${RPM_BUILD_ROOT}%{INSTALL_DIR} +mkdir -p ${RPM_BUILD_ROOT}/etc +mkdir -p ${RPM_BUILD_ROOT}%{LOG_DIR} +mkdir -p ${RPM_BUILD_ROOT}%{LIB_DIR} +mkdir -p ${RPM_BUILD_ROOT}%{SYS_DIR} + +# Set up required files +cp /tmp/%{INDEXER_FILE} ./ + +tar -xf %{INDEXER_FILE} && rm -f %{INDEXER_FILE} +chown -R %{USER}:%{GROUP} wazuh-indexer-*/* + +# Copy base files into RPM_BUILD_ROOT directory +mv wazuh-indexer-*/etc ${RPM_BUILD_ROOT}/ +mv wazuh-indexer-*%{SYS_DIR}/* ${RPM_BUILD_ROOT}%{SYS_DIR}/ +rm -rf wazuh-indexer-*/etc +rm -rf wazuh-indexer-*/usr +cp -pr wazuh-indexer-*/* ${RPM_BUILD_ROOT}%{INSTALL_DIR}/ + +# Build wazuh-certs-tool +%{REPO_DIR}/builder.sh -c + +# Build wazuh-passwords-tool +%{REPO_DIR}/builder.sh -p + +# Copy the security tools +cp %{REPO_DIR}/wazuh-certs-tool.sh ${RPM_BUILD_ROOT}%{INSTALL_DIR}/plugins/opensearch-security/tools/ +cp %{REPO_DIR}/wazuh-passwords-tool.sh ${RPM_BUILD_ROOT}%{INSTALL_DIR}/plugins/opensearch-security/tools/ +cp /root/documentation-templates/wazuh/config.yml ${RPM_BUILD_ROOT}%{INSTALL_DIR}/plugins/opensearch-security/tools/config.yml + +# Copy Wazuh's config files for the security plugin +cp %{REPO_DIR}/config/indexer/roles/action_groups.yml ${RPM_BUILD_ROOT}%{CONFIG_DIR}/opensearch-security +cp %{REPO_DIR}/config/indexer/roles/internal_users.yml ${RPM_BUILD_ROOT}%{CONFIG_DIR}/opensearch-security +cp %{REPO_DIR}/config/indexer/roles/roles.yml ${RPM_BUILD_ROOT}%{CONFIG_DIR}/opensearch-security +cp %{REPO_DIR}/config/indexer/roles/roles_mapping.yml ${RPM_BUILD_ROOT}%{CONFIG_DIR}/opensearch-security + +cp /root/stack/indexer/indexer-security-init.sh ${RPM_BUILD_ROOT}%{INSTALL_DIR}/bin/ + +chmod 750 ${RPM_BUILD_ROOT}/etc/init.d/wazuh-indexer + +# ----------------------------------------------------------------------------- + +%pre +if [ $1 = 1 ];then # Install + # Create package group + getent group %{GROUP} > /dev/null 2>&1 || groupadd -r %{GROUP} + + if ! id %{USER} &> /dev/null; then + useradd --system \ + --no-create-home \ + --home-dir %{INSTALL_DIR} \ + --gid %{GROUP} \ + --shell /sbin/nologin \ + --comment "%{USER} user" \ + %{USER} > /dev/null 2>&1 + fi +fi + +# Stop the services to upgrade the package +if [ $1 = 2 ]; then + if command -v systemctl > /dev/null 2>&1 && systemctl > /dev/null 2>&1 && systemctl is-active --quiet %{name} > /dev/null 2>&1; then + systemctl stop %{name}.service > /dev/null 2>&1 + touch %{INSTALL_DIR}/%{name}.restart + # Check for SysV + elif command -v service > /dev/null 2>&1 && service %{name} status 2>/dev/null | grep "is running" > /dev/null 2>&1; then + service %{name} stop > /dev/null 2>&1 + touch %{INSTALL_DIR}/%{name}.restart + elif [ -x /etc/init.d/%{name} ]; then + if command -v invoke-rc.d >/dev/null && invoke-rc.d --quiet wazuh-indexer status > /dev/null 2>&1; then + invoke-rc.d %{name} stop > /dev/null 2>&1 + touch %{INSTALL_DIR}/%{name}.restart + fi + + # Older Suse linux distributions do not ship with systemd + # but do not have an /etc/init.d/ directory + # this tries to stop the %{name} service on these + # as well without failing this script + elif [ -x /etc/rc.d/init.d/%{name} ] ; then + /etc/rc.d/init.d/%{name} stop > /dev/null 2>&1 + touch %{INSTALL_DIR}/%{name}.restart + fi +fi + +# ----------------------------------------------------------------------------- + +%post + +export OPENSEARCH_PATH_CONF=${OPENSEARCH_PATH_CONF:-%{CONFIG_DIR}} + +if [ $1 = 1 ];then # Install + echo "%{USER} hard nproc 4096" >> /etc/security/limits.conf + echo "%{USER} soft nproc 4096" >> /etc/security/limits.conf + echo "%{USER} hard nofile 65535" >> /etc/security/limits.conf + echo "%{USER} soft nofile 65535" >> /etc/security/limits.conf + + # To pick up /usr/lib/sysctl.d/wazuh-indexer.conf + if command -v systemctl > /dev/null 2>&1; then + systemctl restart systemd-sysctl > /dev/null 2>&1 || true + fi + +fi + + +if [[ -d /run/systemd/system ]] ; then + rm -f /etc/init.d/%{name} +fi + +# If is an upgrade, move the securityconfig files if they exist (4.3.x versions) +if [ ${1} = 2 ]; then + if [ -d "%{INSTALL_DIR}"/plugins/opensearch-security/securityconfig ]; then + + if [ ! -d "%{CONFIG_DIR}"/opensearch-security ]; then + mkdir "%{CONFIG_DIR}"/opensearch-security + fi + + cp -r "%{INSTALL_DIR}"/plugins/opensearch-security/securityconfig/* "%{CONFIG_DIR}"/opensearch-security + fi +fi + +# If is an upgrade, move the securityconfig files if they exist (4.3.x versions) +if [ ${1} = 2 ]; then + if [ -d "%{INSTALL_DIR}"/plugins/opensearch-security/securityconfig ]; then + + if [ ! -d "%{CONFIG_DIR}"/opensearch-security ]; then + mkdir "%{CONFIG_DIR}"/opensearch-security + fi + + cp -r "%{INSTALL_DIR}"/plugins/opensearch-security/securityconfig/* "%{CONFIG_DIR}"/opensearch-security + fi +fi + +# ----------------------------------------------------------------------------- + +%preun + +export OPENSEARCH_PATH_CONF=${OPENSEARCH_PATH_CONF:-%{CONFIG_DIR}} + +if [ $1 = 0 ];then # Remove + echo -n "Stopping wazuh-indexer service..." + if command -v systemctl > /dev/null 2>&1 && systemctl is-active --quiet %{name} > /dev/null 2>&1; then + systemctl --no-reload stop %{name}.service > /dev/null 2>&1 + + # Check for SysV + elif command -v service > /dev/null 2>&1; then + service %{name} stop > /dev/null 2>&1 + elif [ -x /etc/init.d/%{name} ]; then + if command -v invoke-rc.d >/dev/null; then + invoke-rc.d %{name} stop > /dev/null 2>&1 + else + /etc/init.d/%{name} stop > /dev/null 2>&1 + fi + elif [ -x /etc/rc.d/init.d/%{name} ] ; then + /etc/rc.d/init.d/%{name} stop > /dev/null 2>&1 + else # Anything else + kill -15 `pgrep -f opensearch` > /dev/null 2>&1 + fi + echo " OK" + + # Check for systemd + if command -v systemctl > /dev/null 2>&1 && systemctl > /dev/null 2>&1; then + systemctl disable %{name} > /dev/null 2>&1 + systemctl daemon-reload > /dev/null 2>&1 + # Check for SysV + elif command -v service > /dev/null 2>&1 && command -v chkconfig > /dev/null 2>&1; then + chkconfig %{name} off > /dev/null 2>&1 + chkconfig --del %{name} > /dev/null 2>&1 + fi +fi + +# ----------------------------------------------------------------------------- + +%postun + +export OPENSEARCH_PATH_CONF=${OPENSEARCH_PATH_CONF:-%{CONFIG_DIR}} + +if [ $1 = 0 ];then + # Cleaning limits file + sed -i '/%{USER}/d' /etc/security/limits.conf + + # Remove the user if it exists + if getent passwd %{USER} > /dev/null 2>&1; then + userdel %{USER} >/dev/null 2>&1 + fi + + # Remove the group if it exists + if command -v getent > /dev/null 2>&1 && getent group %{GROUP} > /dev/null 2>&1; then + groupdel %{GROUP} >/dev/null 2>&1 + elif getent group %{GROUP} > /dev/null 2>&1; then + groupdel %{GROUP} >/dev/null 2>&1 + fi + + # Remove lingering folders and files + if [ -d /dev/shm/performanceanalyzer ]; then + rm -rf /dev/shm/performanceanalyzer + fi + rm -rf %{INSTALL_DIR} +fi + +# ----------------------------------------------------------------------------- + +%posttrans + +export OPENSEARCH_PATH_CONF=${OPENSEARCH_PATH_CONF:-%{CONFIG_DIR}} + +if [ -f %{INSTALL_DIR}/%{name}.restart ]; then + echo -n "Starting wazuh-indexer service..." + rm -f %{INSTALL_DIR}/%{name}.restart + if command -v systemctl > /dev/null 2>&1; then + systemctl daemon-reload > /dev/null 2>&1 + systemctl restart %{name}.service > /dev/null 2>&1 + + # Check for SysV + elif command -v service > /dev/null 2>&1; then + service %{name} restart > /dev/null 2>&1 + elif [ -x /etc/init.d/%{name} ]; then + if command -v invoke-rc.d >/dev/null; then + invoke-rc.d %{name} restart > /dev/null 2>&1 + else + /etc/init.d/%{name} restart > /dev/null 2>&1 + fi + elif [ -x /etc/rc.d/init.d/%{name} ] ; then + /etc/rc.d/init.d/%{name} restart > /dev/null 2>&1 + fi + echo " OK" +fi + +if [ ! -f "%{CONFIG_DIR}"/opensearch.keystore ]; then + "%{INSTALL_DIR}"/bin/opensearch-keystore create + chown %{USER}:%{GROUP} "%{CONFIG_DIR}"/opensearch.keystore + chmod 660 "%{CONFIG_DIR}"/opensearch.keystore + md5sum "%{CONFIG_DIR}"/opensearch.keystore > "%{CONFIG_DIR}"/.opensearch.keystore.initial_md5sum + chown %{USER}:%{GROUP} "%{CONFIG_DIR}"/.opensearch.keystore.initial_md5sum + chmod 600 "%{CONFIG_DIR}"/.opensearch.keystore.initial_md5sum +else + chown %{USER}:%{GROUP} "%{CONFIG_DIR}"/opensearch.keystore + chmod 660 "%{CONFIG_DIR}"/opensearch.keystore + if "%{INSTALL_DIR}"/bin/opensearch-keystore has-passwd --silent ; then + echo "### Warning: unable to upgrade encrypted keystore" 1>&2 + echo " Please run opensearch-keystore upgrade and enter password" 1>&2 + else + "%{INSTALL_DIR}"/bin/opensearch-keystore upgrade + fi +fi + +# ----------------------------------------------------------------------------- + +%clean +rm -fr %{buildroot} + +# ----------------------------------------------------------------------------- + +%files +%defattr(-, %{USER}, %{GROUP}) +%dir %attr(750, %{USER}, %{GROUP}) %{CONFIG_DIR} +%dir %attr(750, %{USER}, %{GROUP}) %{LIB_DIR} +%dir %attr(750, %{USER}, %{GROUP}) %{LOG_DIR} + +%config(noreplace) %attr(0660, root, %{GROUP}) "/etc/sysconfig/%{name}" + +%config(missingok) /etc/init.d/%{name} +%attr(0640, root, root) %{SYS_DIR}/sysctl.d/%{name}.conf +%attr(0640, root, root) %{SYS_DIR}/systemd/system/%{name}.service +%attr(0640, root, root) %{SYS_DIR}/systemd/system/%{name}-performance-analyzer.service +%attr(0640, root, root) %{SYS_DIR}/tmpfiles.d/%{name}.conf + + +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/*.txt +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-job-scheduler/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-job-scheduler/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-job-scheduler/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-ml/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-ml/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-ml/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-ml/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security/*.jar +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security/tools/ +%attr(740, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security/tools/*.sh +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security/tools/*.md +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security/tools/*.yml +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-index-management/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-index-management/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-index-management/*.txt +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-index-management/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-index-management/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-neural-search/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-neural-search/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-neural-search/*.txt +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-neural-search/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-neural-search/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-notifications/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-notifications/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-notifications/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-notifications/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-asynchronous-search/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-asynchronous-search/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-asynchronous-search/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-asynchronous-search/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-alerting/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-alerting/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-alerting/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-alerting/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-sql/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-sql/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-sql/*.txt +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-sql/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-sql/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-geospatial/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-geospatial/*.txt +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-geospatial/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-geospatial/*.properties +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-geospatial/*.policy +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-reports-scheduler/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-reports-scheduler/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-reports-scheduler/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-reports-scheduler/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-cross-cluster-replication/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-cross-cluster-replication/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-cross-cluster-replication/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-custom-codecs/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-custom-codecs/*.txt +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-custom-codecs/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-custom-codecs/*.properties +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-custom-codecs/*.policy +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-knn/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-knn/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-knn/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-knn/*.properties +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-knn/*.txt +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-knn/lib/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-knn/lib/*.so.1 +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-knn/lib/*.so +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-anomaly-detection/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-anomaly-detection/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-anomaly-detection/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-anomaly-detection/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-notifications-core/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-notifications-core/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-notifications-core/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-notifications-core/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security-analytics/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security-analytics/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security-analytics/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security-analytics/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-observability/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-observability/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-observability/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-observability/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-performance-analyzer/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-performance-analyzer/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-performance-analyzer/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-performance-analyzer/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/ingest-common/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/ingest-common/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/ingest-common/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/repository-url/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/repository-url/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/repository-url/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/repository-url/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/percolator/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/percolator/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/percolator/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/search-pipeline-common/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/search-pipeline-common/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/search-pipeline-common/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/opensearch-dashboards/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/opensearch-dashboards/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/opensearch-dashboards/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/lang-painless/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/lang-painless/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/lang-painless/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/lang-painless/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/analysis-common/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/analysis-common/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/analysis-common/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/rank-eval/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/rank-eval/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/rank-eval/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/aggs-matrix-stats/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/aggs-matrix-stats/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/aggs-matrix-stats/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/geo/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/geo/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/geo/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/mapper-extras/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/mapper-extras/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/mapper-extras/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/ingest-user-agent/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/ingest-user-agent/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/ingest-user-agent/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/systemd/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/systemd/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/systemd/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/systemd/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/transport-netty4/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/transport-netty4/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/transport-netty4/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/transport-netty4/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/parent-join/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/parent-join/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/parent-join/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/lang-mustache/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/lang-mustache/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/lang-mustache/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/lang-mustache/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/ingest-geoip/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/ingest-geoip/*.mmdb +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/ingest-geoip/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/ingest-geoip/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/ingest-geoip/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/reindex/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/reindex/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/reindex/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/reindex/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/reindex/transport-netty4/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/reindex/transport-netty4/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/reindex/transport-netty4/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/reindex/parent-join/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/reindex/parent-join/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/lang-expression/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/lang-expression/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/lang-expression/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/modules/lang-expression/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/lib/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/lib/tools/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/lib/tools/upgrade-cli/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/lib/tools/upgrade-cli/*.jar +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/lib/tools/plugin-cli/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/lib/tools/plugin-cli/*.jar +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/lib/tools/keystore-cli/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/lib/tools/keystore-cli/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/lib/*.jar +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/man/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/man/man1/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/man/man1/*.1 +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/jmods/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/jmods/*.jmod +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/NOTICE +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/include/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/include/*.h +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/include/linux/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/include/linux/*.h +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/*.cfg +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/*.so +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/jspawnhelper +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/*.properties.ja +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/*.dat +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/*.properties +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/*.sym +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/classlist +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/*.jar +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/jexec +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/security/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/security/*.certs +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/security/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/security/cacerts +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/security/*.dat +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/modules +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/server/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/server/*.so +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/server/*.jsa +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/jfr/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/lib/jfr/*.jfc +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/bin/ +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/bin/* +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/release +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.instrument/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.net/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.crypto.cryptoki/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.crypto.cryptoki/*.md +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.security.auth/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.smartcardio/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.smartcardio/*.md +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.zipfs/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.localedata/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.localedata/*.md +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.prefs/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.dynalink/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.dynalink/*.md +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.jpackage/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.management/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.internal.jvmstat/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.xml.crypto/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.xml.crypto/*.md +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.net.http/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.unsupported/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.datatransfer/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.jdi/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.incubator.vector/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.charsets/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.management.jfr/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.accessibility/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.jartool/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.security.sasl/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.internal.ed/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.editpad/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.httpserver/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.base/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.base/*.md +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.base/ASSEMBLY_EXCEPTION +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.base/ADDITIONAL_LICENSE_INFO +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.base/LICENSE +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.jcmd/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.internal.opt/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.internal.opt/*.md +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.scripting/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.xml/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.xml/*.md +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.jdeps/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.jstatd/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.management.agent/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.random/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.sctp/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.sql/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.nio.mapmode/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.hotspot.agent/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.attach/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.naming/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.management/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.sql.rowset/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.rmi/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.internal.vm.compiler/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.unsupported.desktop/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.logging/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.security.jgss/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.internal.vm.compiler.management/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.jfr/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.transaction.xa/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.crypto.ec/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.internal.vm.ci/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.naming.rmi/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.xml.dom/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.internal.le/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.internal.le/*.md +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.jsobject/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.jdwp.agent/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.se/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.compiler/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.incubator.foreign/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.jshell/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.javadoc/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.javadoc/*.md +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.compiler/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.security.jgss/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.jconsole/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.naming.dns/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.management.rmi/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.desktop/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/java.desktop/*.md +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/legal/jdk.jlink/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/security/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/security/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/security/*.security +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/security/policy/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/security/policy/limited/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/security/policy/limited/*.policy +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/security/policy/unlimited/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/security/policy/unlimited/*.policy +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/security/policy/*.txt +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/sdp/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/sdp/*.template +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/management/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/management/*.access +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/management/*.properties +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/jdk/conf/management/*.template +%dir %attr(750, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-security/ +%config(noreplace) %attr(640, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-security/*.yml +%config(noreplace) %attr(640, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-security/*.example +%dir %attr(750, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-notifications/ +%attr(640, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-notifications/*.yml +%config(noreplace) %attr(660, %{USER}, %{GROUP}) %{CONFIG_DIR}/jvm.options +%dir %attr(750, %{USER}, %{GROUP}) %{CONFIG_DIR}/jvm.options.d/ +%dir %attr(750, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-reports-scheduler/ +%attr(660, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-reports-scheduler/*.yml +%config(noreplace) %attr(660, %{USER}, %{GROUP}) %{CONFIG_DIR}/*.properties +%dir %attr(750, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-notifications-core/ +%attr(640, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-notifications-core/*.yml +%config(noreplace) %attr(660, %{USER}, %{GROUP}) %{CONFIG_DIR}/*.yml +%dir %attr(750, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-observability/ +%attr(660, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-observability/*.yml +%dir %attr(750, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-performance-analyzer/ +%attr(640, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-performance-analyzer/agent-stats-metadata +%attr(640, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-performance-analyzer/*.conf +%attr(640, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-performance-analyzer/*.xml +%attr(640, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-performance-analyzer/*.properties +%attr(640, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-performance-analyzer/plugin-stats-metadata +%attr(640, %{USER}, %{GROUP}) %{CONFIG_DIR}/opensearch-performance-analyzer/*.policy +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/ +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/config/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/config/agent-stats-metadata +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/config/*.conf +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/config/*.xml +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/config/*.properties +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/config/plugin-stats-metadata +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/config/*.policy +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/lib/ +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/lib/*.jar +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/bin/ +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/bin/performance-analyzer-agent +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/performance-analyzer-rca/bin/performance-analyzer-rca +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/ +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/opensearch-cli +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/systemd-entrypoint +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/opensearch-upgrade +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/opensearch-shard +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/opensearch +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/opensearch-plugin +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/opensearch-node +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/opensearch-env +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/opensearch-env-from-file +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/indexer-security-init.sh +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/opensearch-keystore +%dir %attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/opensearch-performance-analyzer/ +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/opensearch-performance-analyzer/performance-analyzer-agent-cli +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/opensearch-performance-analyzer/performance-analyzer-agent +%attr(440, %{USER}, %{GROUP}) %{INSTALL_DIR}/VERSION +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/indexer-security-init.sh +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/indexer-ism-init.sh +%attr(750, %{USER}, %{GROUP}) %{INSTALL_DIR}/bin/indexer-init.sh +%attr(640, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security/tools/config.yml +%attr(740, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security/tools/wazuh-certs-tool.sh +%attr(740, %{USER}, %{GROUP}) %{INSTALL_DIR}/plugins/opensearch-security/tools/wazuh-passwords-tool.sh + + +%changelog +* Thu Mar 28 2024 support - 4.9.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-9-0.html +* Tue Jan 30 2024 support - 4.8.1 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-8-1.html +* Fri Dec 15 2023 support - 4.8.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-8-0.html +* Tue Dec 05 2023 support - 4.7.1 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-7-1.html +* Tue Nov 21 2023 support - 4.7.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-7-0.html +* Tue Oct 31 2023 support - 4.6.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-6-0.html +* Tue Oct 24 2023 support - 4.5.4 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-5-4.html +* Tue Oct 10 2023 support - 4.5.3 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-5-3.html +* Thu Aug 31 2023 support - 4.5.2 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-5-2.html +* Thu Aug 24 2023 support - 4.5.1 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-5.1.html +* Thu Aug 10 2023 support - 4.5.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-5-0.html +* Mon Jul 10 2023 support - 4.4.5 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-4-5.html +* Tue Jun 13 2023 support - 4.4.4 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-4-4.html +* Thu May 25 2023 support - 4.4.3 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-4-3.html +* Mon May 08 2023 support - 4.4.2 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-4-2.html +* Mon Apr 17 2023 support - 4.4.1 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-4-1.html +* Wed Jan 18 2023 support - 4.4.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-4-0.html +* Thu Nov 10 2022 support - 4.3.10 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-10.html +* Mon Oct 03 2022 support - 4.3.9 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-9.html +* Mon Sep 19 2022 support - 4.3.8 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-8.html +* Mon Aug 08 2022 support - 4.3.7 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-7.html +* Thu Jul 07 2022 support - 4.3.6 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-6.html +* Wed Jun 29 2022 support - 4.3.5 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-5.html +* Tue Jun 07 2022 support - 4.3.4 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-4.html +* Tue May 31 2022 support - 4.3.3 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-3.html +* Mon May 30 2022 support - 4.3.2 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-2.html +* Wed May 18 2022 support - 4.3.1 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-1.html +* Thu May 05 2022 support - 4.3.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-0.html \ No newline at end of file diff --git a/distribution/packages/src/rpm/wazuh-indexer.rpm.spec b/distribution/packages/src/rpm/wazuh-indexer.rpm.spec new file mode 100644 index 0000000000000..c1f196577212a --- /dev/null +++ b/distribution/packages/src/rpm/wazuh-indexer.rpm.spec @@ -0,0 +1,274 @@ +# Copyright OpenSearch Contributors +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +# No build, no debuginfo +%define debug_package %{nil} + +# Disable brp-java-repack-jars, so jars will not be decompressed and repackaged +%define __jar_repack 0 + +# Generate digests, 8 means algorithm of sha256 +# This is different from rpm sig algorithm +# Requires rpm version 4.12 + to generate but b/c run on older versions +%define _source_filedigest_algorithm 8 +%define _binary_filedigest_algorithm 8 + +# Fixed in Fedora: +# https://www.endpointdev.com/blog/2011/10/rpm-building-fedoras-sharedstatedir/ +%define _sharedstatedir /var/lib + +# User Define Variables +%define product_dir %{_datadir}/%{name} +%define config_dir %{_sysconfdir}/%{name} +%define data_dir %{_sharedstatedir}/%{name} +%define log_dir %{_localstatedir}/log/%{name} +%define pid_dir %{_localstatedir}/run/%{name} +%{!?_version: %define _version 0.0.0 } +%{!?_architecture: %define _architecture x86_64 } + +Name: wazuh-indexer +Version: %{_version} +Release: 1 +License: Apache-2.0 +Summary: An open source distributed and RESTful search engine +URL: https://www.wazuh.com/ +Vendor: Wazuh, Inc +Packager: Wazuh, Inc +Group: Application/Internet +ExclusiveArch: %{_architecture} +AutoReqProv: no + +%description +Wazuh indexer is a near real-time full-text search and analytics engine that +gathers security-related data into one platform. This Wazuh central component +indexes and stores alerts generated by the Wazuh server. Wazuh indexer can be +configured as a single-node or multi-node cluster, providing scalability and +high availability. +For more information, see: https://www.wazuh.com/ + +%prep +# No-op. We are using dir so no need to setup. + +%build +# No-op. This is all pre-built Java. Nothing to do here. + +%install +set -e +cd %{_topdir} && pwd +# Create necessary directories +mkdir -p %{buildroot}%{pid_dir} +mkdir -p %{buildroot}%{product_dir}/plugins +# Install directories/files +cp -a etc usr var %{buildroot} +chmod 0755 %{buildroot}%{product_dir}/bin/* +if [ -d %{buildroot}%{product_dir}/plugins/opensearch-security ]; then + chmod 0755 %{buildroot}%{product_dir}/plugins/opensearch-security/tools/* +fi +# Pre-populate the folders to ensure rpm build success even without all plugins +mkdir -p %{buildroot}%{config_dir}/opensearch-observability +mkdir -p %{buildroot}%{config_dir}/opensearch-reports-scheduler +mkdir -p %{buildroot}%{product_dir}/performance-analyzer-rca +# Symlinks (do not symlink config dir as security demo installer has dependency, if no presense it will switch to rpm/deb mode) +ln -s %{data_dir} %{buildroot}%{product_dir}/data +ln -s %{log_dir} %{buildroot}%{product_dir}/logs +# Pre-populate PA configs if not present +if [ ! -f %{buildroot}%{data_dir}/rca_enabled.conf ]; then + echo 'true' > %{buildroot}%{data_dir}/rca_enabled.conf +fi +if [ ! -f %{buildroot}%{data_dir}/performance_analyzer_enabled.conf ]; then + echo 'true' > %{buildroot}%{data_dir}/performance_analyzer_enabled.conf +fi +# Change Permissions +chmod -Rf a+rX,u+w,g-w,o-w %{buildroot}/* +exit 0 + +%pre +set -e +# Stop existing service +if command -v systemctl >/dev/null && systemctl is-active %{name}.service >/dev/null; then + echo "Stop existing %{name}.service" + systemctl --no-reload stop %{name}.service +fi +if command -v systemctl >/dev/null && systemctl is-active %{name}-performance-analyzer.service >/dev/null; then + echo "Stop existing %{name}-performance-analyzer.service" + systemctl --no-reload stop %{name}-performance-analyzer.service +fi +# Create user and group if they do not already exist. +getent group %{name} > /dev/null 2>&1 || groupadd -r %{name} +getent passwd %{name} > /dev/null 2>&1 || \ + useradd -r -g %{name} -M -s /sbin/nologin \ + -c "%{name} user/group" %{name} +exit 0 + +%post +set -e +# Apply Security Settings +if [ -d %{product_dir}/plugins/opensearch-security ]; then + sh %{product_dir}/plugins/opensearch-security/tools/install_demo_configuration.sh -y -i -s > %{log_dir}/install_demo_configuration.log 2>&1 +fi +chown -R %{name}.%{name} %{config_dir} +chown -R %{name}.%{name} %{log_dir} +# Apply PerformanceAnalyzer Settings +chmod a+rw /tmp +if ! grep -q '## OpenSearch Performance Analyzer' %{config_dir}/jvm.options; then + # Add Performance Analyzer settings in %{config_dir}/jvm.options + CLK_TCK=`/usr/bin/getconf CLK_TCK` + echo >> %{config_dir}/jvm.options + echo '## OpenSearch Performance Analyzer' >> %{config_dir}/jvm.options + echo "-Dclk.tck=$CLK_TCK" >> %{config_dir}/jvm.options + echo "-Djdk.attach.allowAttachSelf=true" >> %{config_dir}/jvm.options + echo "-Djava.security.policy=file://%{config_dir}/opensearch-performance-analyzer/opensearch_security.policy" >> %{config_dir}/jvm.options + echo "--add-opens=jdk.attach/sun.tools.attach=ALL-UNNAMED" >> %{config_dir}/jvm.options +fi +# Reload systemctl daemon +if command -v systemctl > /dev/null; then + systemctl daemon-reload +fi +# Reload other configs +if command -v systemctl > /dev/null; then + systemctl restart systemd-sysctl.service || true +fi + +if command -v systemd-tmpfiles > /dev/null; then + systemd-tmpfiles --create %{name}.conf +fi + +# Messages +echo "### NOT starting on installation, please execute the following statements to configure opensearch service to start automatically using systemd" +echo " sudo systemctl daemon-reload" +echo " sudo systemctl enable opensearch.service" +echo "### You can start opensearch service by executing" +echo " sudo systemctl start opensearch.service" +if [ -d %{product_dir}/plugins/opensearch-security ]; then + echo "### Create opensearch demo certificates in %{config_dir}/" + echo " See demo certs creation log in %{log_dir}/install_demo_configuration.log" +fi +echo "### Upcoming breaking change in packaging" +echo " In a future release of OpenSearch, we plan to change the permissions associated with access to installed files" +echo " If you are configuring tools that require read access to the OpenSearch configuration files, we recommend you add the user that runs these tools to the 'opensearch' group" +echo " For more information, see https://github.com/opensearch-project/opensearch-build/pull/4043" +exit 0 + +%preun +set -e +if command -v systemctl >/dev/null && systemctl is-active %{name}.service >/dev/null; then + echo "Stop existing %{name}.service" + systemctl --no-reload stop %{name}.service +fi +if command -v systemctl >/dev/null && systemctl is-active %{name}-performance-analyzer.service >/dev/null; then + echo "Stop existing %{name}-performance-analyzer.service" + systemctl --no-reload stop %{name}-performance-analyzer.service +fi +exit 0 + +%files +# Permissions +%defattr(-, %{name}, %{name}) + +# Root dirs/docs/licenses +%dir %{product_dir} +%doc %{product_dir}/NOTICE.txt +%doc %{product_dir}/README.md +%license %{product_dir}/LICENSE.txt + +# Config dirs/files +%dir %{config_dir} +%{config_dir}/jvm.options.d +%{config_dir}/opensearch-* +%config(noreplace) %{config_dir}/opensearch.yml +%config(noreplace) %{config_dir}/jvm.options +%config(noreplace) %{config_dir}/log4j2.properties +%config(noreplace) %{data_dir}/rca_enabled.conf +%config(noreplace) %{data_dir}/performance_analyzer_enabled.conf + +# Service files +%attr(0644, root, root) %{_prefix}/lib/systemd/system/%{name}.service +%attr(0644, root, root) %{_prefix}/lib/systemd/system/%{name}-performance-analyzer.service +%attr(0644, root, root) %{_sysconfdir}/init.d/%{name} +%attr(0644, root, root) %config(noreplace) %{_sysconfdir}/sysconfig/%{name} +%attr(0644, root, root) %config(noreplace) %{_prefix}/lib/sysctl.d/%{name}.conf +%attr(0644, root, root) %config(noreplace) %{_prefix}/lib/tmpfiles.d/%{name}.conf + +# Main dirs +%{product_dir}/bin +%{product_dir}/jdk +%{product_dir}/lib +%{product_dir}/modules +%{product_dir}/performance-analyzer-rca +%{product_dir}/plugins +%{log_dir} +%{pid_dir} +%dir %{data_dir} + +# Symlinks +%{product_dir}/data +%{product_dir}/logs + +# Wazuh additional files +%attr(440, %{name}, %{name}) %{product_dir}/VERSION +%attr(750, %{name}, %{name}) %{product_dir}/bin/indexer-security-init.sh +%attr(750, %{name}, %{name}) %{product_dir}/bin/indexer-ism-init.sh +%attr(750, %{name}, %{name}) %{product_dir}/bin/indexer-init.sh + +%changelog +* Thu Mar 28 2024 support - 4.9.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-9-0.html +* Tue Jan 30 2024 support - 4.8.1 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-8-1.html +* Fri Dec 15 2023 support - 4.8.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-8-0.html +* Tue Dec 05 2023 support - 4.7.1 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-7-1.html +* Tue Nov 21 2023 support - 4.7.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-7-0.html +* Tue Oct 31 2023 support - 4.6.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-6-0.html +* Tue Oct 24 2023 support - 4.5.4 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-5-4.html +* Tue Oct 10 2023 support - 4.5.3 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-5-3.html +* Thu Aug 31 2023 support - 4.5.2 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-5-2.html +* Thu Aug 24 2023 support - 4.5.1 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-5.1.html +* Thu Aug 10 2023 support - 4.5.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-5-0.html +* Mon Jul 10 2023 support - 4.4.5 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-4-5.html +* Tue Jun 13 2023 support - 4.4.4 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-4-4.html +* Thu May 25 2023 support - 4.4.3 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-4-3.html +* Mon May 08 2023 support - 4.4.2 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-4-2.html +* Mon Apr 17 2023 support - 4.4.1 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-4-1.html +* Wed Jan 18 2023 support - 4.4.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-4-0.html +* Thu Nov 10 2022 support - 4.3.10 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-10.html +* Mon Oct 03 2022 support - 4.3.9 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-9.html +* Mon Sep 19 2022 support - 4.3.8 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-8.html +* Mon Aug 08 2022 support - 4.3.7 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-7.html +* Thu Jul 07 2022 support - 4.3.6 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-6.html +* Wed Jun 29 2022 support - 4.3.5 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-5.html +* Tue Jun 07 2022 support - 4.3.4 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-4.html +* Tue May 31 2022 support - 4.3.3 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-3.html +* Mon May 30 2022 support - 4.3.2 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-2.html +* Wed May 18 2022 support - 4.3.1 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-1.html +* Thu May 05 2022 support - 4.3.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-0.html +- Initial package \ No newline at end of file diff --git a/distribution/src/config/jvm.prod.options b/distribution/src/config/jvm.prod.options index a9949d48cf3be..9a116b52d314c 100644 --- a/distribution/src/config/jvm.prod.options +++ b/distribution/src/config/jvm.prod.options @@ -79,6 +79,13 @@ # Explicitly allow security manager (https://bugs.openjdk.java.net/browse/JDK-8270380) 18-:-Djava.security.manager=allow +# JDK 20+ Incubating Vector Module for SIMD optimizations; +# disabling may reduce performance on vector optimized lucene +20:--add-modules=jdk.incubator.vector + +# HDFS ForkJoinPool.common() support by SecurityManager +-Djava.util.concurrent.ForkJoinPool.common.threadFactory=org.opensearch.secure_sm.SecuredForkJoinWorkerThreadFactory + ## OpenSearch Performance Analyzer -Dclk.tck=100 -Djdk.attach.allowAttachSelf=true diff --git a/distribution/src/config/security/internal_users.yml b/distribution/src/config/security/internal_users.yml index 1ff2c8c23a151..44ae613e8bb19 100644 --- a/distribution/src/config/security/internal_users.yml +++ b/distribution/src/config/security/internal_users.yml @@ -17,10 +17,17 @@ admin: - "admin" description: "Demo admin user" +anomalyadmin: + hash: "$2y$12$TRwAAJgnNo67w3rVUz4FIeLx9Dy/llB79zf9I15CKJ9vkM4ZzAd3." + reserved: false + opendistro_security_roles: + - "anomaly_full_access" + description: "Demo anomaly admin user, using internal role" + kibanaserver: hash: "$2a$12$4AcgAt3xwOWadA5s5blL6ev39OXDNhmOesEoo33eZtrq2N0YrU3H." reserved: true - description: "Demo kibanaserver user" + description: "Demo OpenSearch Dashboards user" kibanaro: hash: "$2a$12$JJSXNfTowz7Uu5ttXfeYpeYE0arACvcwlPBStB1F.MI7f0U9Z4DGC" @@ -32,25 +39,25 @@ kibanaro: attribute1: "value1" attribute2: "value2" attribute3: "value3" - description: "Demo kibanaro user" + description: "Demo read only user, using external role mapping" logstash: hash: "$2a$12$u1ShR4l4uBS3Uv59Pa2y5.1uQuZBrZtmNfqB3iM/.jL0XoV9sghS2" reserved: false backend_roles: - "logstash" - description: "Demo logstash user" + description: "Demo logstash user, using external role mapping" readall: hash: "$2a$12$ae4ycwzwvLtZxwZ82RmiEunBbIPiAmGZduBAjKN0TXdwQFtCwARz2" reserved: false backend_roles: - "readall" - description: "Demo readall user" + description: "Demo readall user, using external role mapping" snapshotrestore: hash: "$2y$12$DpwmetHKwgYnorbgdvORCenv4NAK8cPUg8AI6pxLCuWf/ALc0.v7W" reserved: false backend_roles: - "snapshotrestore" - description: "Demo snapshotrestore user" + description: "Demo snapshotrestore user, using external role mapping" diff --git a/distribution/src/config/security/roles.yml b/distribution/src/config/security/roles.yml index d64d6228ec29e..de6c91906fd1d 100644 --- a/distribution/src/config/security/roles.yml +++ b/distribution/src/config/security/roles.yml @@ -2,7 +2,7 @@ _meta: type: "roles" config_version: 2 -# Restrict users so they can only view visualization and dashboard on kibana +# Restrict users so they can only view visualization and dashboard on OpenSearchDashboards kibana_read_only: reserved: true @@ -10,6 +10,20 @@ kibana_read_only: security_rest_api_access: reserved: true +security_rest_api_full_access: + reserved: true + cluster_permissions: + - 'restapi:admin/actiongroups' + - 'restapi:admin/allowlist' + - 'restapi:admin/config/update' + - 'restapi:admin/internalusers' + - 'restapi:admin/nodesdn' + - 'restapi:admin/roles' + - 'restapi:admin/rolesmapping' + - 'restapi:admin/ssl/certs/info' + - 'restapi:admin/ssl/certs/reload' + - 'restapi:admin/tenants' + # Allows users to view monitors, destinations and alerts alerting_read_access: reserved: true @@ -18,12 +32,17 @@ alerting_read_access: - 'cluster:admin/opendistro/alerting/destination/get' - 'cluster:admin/opendistro/alerting/monitor/get' - 'cluster:admin/opendistro/alerting/monitor/search' + - 'cluster:admin/opensearch/alerting/findings/get' + - 'cluster:admin/opensearch/alerting/workflow/get' + - 'cluster:admin/opensearch/alerting/workflow_alerts/get' # Allows users to view and acknowledge alerts alerting_ack_alerts: reserved: true cluster_permissions: - 'cluster:admin/opendistro/alerting/alerts/*' + - 'cluster:admin/opendistro/alerting/chained_alerts/*' + - 'cluster:admin/opendistro/alerting/workflow_alerts/*' # Allows users to use all alerting functionality alerting_full_access: @@ -31,6 +50,8 @@ alerting_full_access: cluster_permissions: - 'cluster_monitor' - 'cluster:admin/opendistro/alerting/*' + - 'cluster:admin/opensearch/alerting/*' + - 'cluster:admin/opensearch/notifications/feature/publish' index_permissions: - index_patterns: - '*' @@ -48,6 +69,8 @@ anomaly_read_access: - 'cluster:admin/opendistro/ad/detectors/get' - 'cluster:admin/opendistro/ad/result/search' - 'cluster:admin/opendistro/ad/tasks/search' + - 'cluster:admin/opendistro/ad/detector/validate' + - 'cluster:admin/opendistro/ad/result/topAnomalies' # Allows users to use all Anomaly Detection functionality anomaly_full_access: @@ -63,6 +86,41 @@ anomaly_full_access: - 'indices:admin/aliases/get' - 'indices:admin/mappings/get' +# Allow users to execute read only k-NN actions +knn_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/knn_search_model_action' + - 'cluster:admin/knn_get_model_action' + - 'cluster:admin/knn_stats_action' + +# Allow users to use all k-NN functionality +knn_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/knn_training_model_action' + - 'cluster:admin/knn_training_job_router_action' + - 'cluster:admin/knn_training_job_route_decision_info_action' + - 'cluster:admin/knn_warmup_action' + - 'cluster:admin/knn_delete_model_action' + - 'cluster:admin/knn_remove_model_from_cache_action' + - 'cluster:admin/knn_update_model_graveyard_action' + - 'cluster:admin/knn_search_model_action' + - 'cluster:admin/knn_get_model_action' + - 'cluster:admin/knn_stats_action' + +# Allow users to execute read only ip2geo datasource action +ip2geo_datasource_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/geospatial/datasource/get' + +# Allow users to use all ip2geo datasource action +ip2geo_datasource_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/geospatial/datasource/*' + # Allows users to read Notebooks notebooks_read_access: reserved: true @@ -80,6 +138,34 @@ notebooks_full_access: - 'cluster:admin/opendistro/notebooks/get' - 'cluster:admin/opendistro/notebooks/list' +# Allows users to read observability objects +observability_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/observability/get' + +# Allows users to all Observability functionality +observability_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/observability/create' + - 'cluster:admin/opensearch/observability/update' + - 'cluster:admin/opensearch/observability/delete' + - 'cluster:admin/opensearch/observability/get' + +# Allows users to all PPL functionality +ppl_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/ppl' + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices:admin/mappings/get' + - 'indices:data/read/search*' + - 'indices:monitor/settings/get' + # Allows users to read and download Reports reports_instances_read_access: reserved: true @@ -129,6 +215,164 @@ asynchronous_search_read_access: cluster_permissions: - 'cluster:admin/opendistro/asynchronous_search/get' +# Allows user to use all index_management actions - ism policies, rollups, transforms +index_management_full_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/ism/*" + - "cluster:admin/opendistro/rollup/*" + - "cluster:admin/opendistro/transform/*" + - "cluster:admin/opensearch/controlcenter/lron/*" + - "cluster:admin/opensearch/notifications/channels/get" + - "cluster:admin/opensearch/notifications/feature/publish" + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices:admin/opensearch/ism/*' + +# Allows users to use all cross cluster replication functionality at leader cluster +cross_cluster_replication_leader_full_access: + reserved: true + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - "indices:admin/plugins/replication/index/setup/validate" + - "indices:data/read/plugins/replication/changes" + - "indices:data/read/plugins/replication/file_chunk" + +# Allows users to use all cross cluster replication functionality at follower cluster +cross_cluster_replication_follower_full_access: + reserved: true + cluster_permissions: + - "cluster:admin/plugins/replication/autofollow/update" + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - "indices:admin/plugins/replication/index/setup/validate" + - "indices:data/write/plugins/replication/changes" + - "indices:admin/plugins/replication/index/start" + - "indices:admin/plugins/replication/index/pause" + - "indices:admin/plugins/replication/index/resume" + - "indices:admin/plugins/replication/index/stop" + - "indices:admin/plugins/replication/index/update" + - "indices:admin/plugins/replication/index/status_check" + +# Allows users to use all cross cluster search functionality at remote cluster +cross_cluster_search_remote_full_access: + reserved: true + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices:admin/shards/search_shards' + - 'indices:data/read/search' + +# Allow users to read ML stats/models/tasks +ml_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/ml/stats/nodes' + - 'cluster:admin/opensearch/ml/model_groups/search' + - 'cluster:admin/opensearch/ml/models/get' + - 'cluster:admin/opensearch/ml/models/search' + - 'cluster:admin/opensearch/ml/tasks/get' + - 'cluster:admin/opensearch/ml/tasks/search' + +# Allows users to use all ML functionality +ml_full_access: + reserved: true + cluster_permissions: + - 'cluster_monitor' + - 'cluster:admin/opensearch/ml/*' + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices_monitor' + +# Allows users to use all Notifications functionality +notifications_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/notifications/*' + +# Allows users to read Notifications config/channels +notifications_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/notifications/configs/get' + - 'cluster:admin/opensearch/notifications/features' + - 'cluster:admin/opensearch/notifications/channels/get' + +# Allows users to use all snapshot management functionality +snapshot_management_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/snapshot_management/*' + - 'cluster:admin/opensearch/notifications/feature/publish' + - 'cluster:admin/repository/*' + - 'cluster:admin/snapshot/*' + +# Allows users to see snapshots, repositories, and snapshot management policies +snapshot_management_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/snapshot_management/policy/get' + - 'cluster:admin/opensearch/snapshot_management/policy/search' + - 'cluster:admin/opensearch/snapshot_management/policy/explain' + - 'cluster:admin/repository/get' + - 'cluster:admin/snapshot/get' + +# Allows user to use point in time functionality +point_in_time_full_access: + reserved: true + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'manage_point_in_time' + +# Allows users to see security analytics detectors and others +security_analytics_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/securityanalytics/alerts/get' + - 'cluster:admin/opensearch/securityanalytics/correlations/findings' + - 'cluster:admin/opensearch/securityanalytics/correlations/list' + - 'cluster:admin/opensearch/securityanalytics/detector/get' + - 'cluster:admin/opensearch/securityanalytics/detector/search' + - 'cluster:admin/opensearch/securityanalytics/findings/get' + - 'cluster:admin/opensearch/securityanalytics/mapping/get' + - 'cluster:admin/opensearch/securityanalytics/mapping/view/get' + - 'cluster:admin/opensearch/securityanalytics/rule/get' + - 'cluster:admin/opensearch/securityanalytics/rule/search' + +# Allows users to use all security analytics functionality +security_analytics_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/securityanalytics/alerts/*' + - 'cluster:admin/opensearch/securityanalytics/correlations/*' + - 'cluster:admin/opensearch/securityanalytics/detector/*' + - 'cluster:admin/opensearch/securityanalytics/findings/*' + - 'cluster:admin/opensearch/securityanalytics/mapping/*' + - 'cluster:admin/opensearch/securityanalytics/rule/*' + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices:admin/mapping/put' + - 'indices:admin/mappings/get' + +# Allows users to view and acknowledge alerts +security_analytics_ack_alerts: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/securityanalytics/alerts/*' + # Wazuh monitoring and statistics index permissions manage_wazuh_index: reserved: true diff --git a/docker/images/.dockerignore b/docker/images/.dockerignore index 058a889d5f239..96d12ad527ea5 100644 --- a/docker/images/.dockerignore +++ b/docker/images/.dockerignore @@ -1,4 +1,5 @@ artifacts/ +.git/ # intellij files .idea/ diff --git a/scripts/README.md b/scripts/README.md new file mode 100644 index 0000000000000..e71941bed4085 --- /dev/null +++ b/scripts/README.md @@ -0,0 +1,124 @@ +# `wazuh-indexer` packages generation guide + +The packages' generation process consists on 2 steps: + +* **Build**: compiles the Java application and bundles it into a package. +* **Assembly**: uses the package from the previous step and inflates it with plugins and +configuration files, ready for production deployment. + +We usually generate the packages using GitHub Actions, however, the process is designed to +be independent enough for maximum portability. GitHub Actions provides infrastructure, while +the building process is self-contained in the application code. + +Each section includes instructions to generate packages locally, using Act or Docker. + +- [Install Act](https://github.com/nektos/act) + +## Build + +... +... + +#### Act (GitHub Workflow locally) + +```console +act -j build -W .github/workflows/build.yml --artifact-server-path ./artifacts + +[Build slim packages/build] 🏁 Job succeeded +``` + + +#### Running in Docker + +Within the [Docker environment](../docker): + +```console +bash scripts/build.sh -v 2.11.0 -s false -p linux -a {x64|arm64} -d {rpm|deb|tar} +``` + +The generated package is sent to `artifacts/` + + +## Assemble + + + +### RPM + +The `assemble.sh` script will use the output from the `build.sh` script and use it as a +base to bundle together a final package containing the plugins, the production configuration +and the service files. + +The script will: + +- Extract the rpm package using `rpm2cpio` and `cpio` tools. + + > By default, `rpm2cpio` and `cpio` tools expect the package to be in `wazuh-indexer/artifacts/tmp/rpm`. The script takes care of creating the required folder structure, copying also the min package and the SPEC file. + + Current folder loadout at this stage: + ``` + /rpm/$ARCH + /etc + /usr + /var + wazuh-indexer-min-*.rpm + wazuh-indexer.rpm.spec + ``` + + `usr`, `etc` and `var` folders contain `wazuh-indexer` files, extracted from `wazuh-indexer-min-*.rpm`. + `wazuh-indexer.rpm.spec` is copied over from `wazuh-indexer/distribution/packages/src/rpm/wazuh-indexer.rpm.spec`. + The `wazuh-indexer-performance-analyzer.service` file is also copied from the same folder. It is a dependency of the SPEC file. + +- Install the plugins using the `opensearch-plugin` CLI tool. +- Set up configuration files. + + > Included in `min-package`. Default files are overwritten. + +- Bundle an RPM file with `rpmbuild` and the SPEC file `wazuh-indexer.rpm.spec`. + - `rpmbuild` is part of the `rpm` OS package. + + > `rpmbuild` is invoked from `wazuh-indexer/artifacts/tmp/rpm`. It creates the {BUILD,RPMS,SOURCES,SRPMS,SPECS,TMP} folders and applies the rules in the SPEC file. If successful, `rpmbuild` will generate the package in the `RPMS/` folder. The script will copy it to `wazuh-indexer/artifacts/dist` and clean: remove the `tmp\` folder and its contents. + + Current folder loadout at this stage: + ``` + /rpm/$ARCH + /{BUILD,RPMS,SOURCES,SRPMS,SPECS,TMP} + /etc + /usr + /var + wazuh-indexer-min-*.rpm + wazuh-indexer.rpm.spec + ``` + +### Running in Act + +```console +act -j assemble -W .github/workflows/build.yml --artifact-server-path ./artifacts --matrix distribution:rpm --matrix architecture:x64 --var OPENSEARCH_VERSION=2.11.0 + +[Build slim packages/build] 🏁 Job succeeded +``` + +#### Running in Docker + +Pre-requisites: + +* Current directory: `wazuh-indexer/` +* Existing rpm package in `wazuh-indexer/artifacts/dist/rpm`, as a result of the _Build_ stage. + +```console +MIN_PKG_PATH="./artifacts" +docker run --rm \ + -v ./scripts/:/home/wazuh-indexer/scripts \ + -v $MIN_PKG_PATH:/home/wazuh-indexer/artifacts \ + -v ./distribution/packages/src:/home/wazuh-indexer/distribution/packages/src \ + -w /home/wazuh-indexer \ + -it ubuntu:jammy /bin/bash + +apt-get update +apt-get install -y rpm2cpio rpm cpio +bash scripts/assemble.sh -v 2.11.0 -p linux -a x64 -d rpm +``` + diff --git a/scripts/assemble.sh b/scripts/assemble.sh new file mode 100755 index 0000000000000..dba52e4181d8c --- /dev/null +++ b/scripts/assemble.sh @@ -0,0 +1,283 @@ +#!/bin/bash + +# Copyright OpenSearch Contributors +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +set -ex + +plugins=( + "alerting" # "opensearch-alerting" + "opensearch-job-scheduler" + "opensearch-anomaly-detection" # requires "opensearch-job-scheduler" + "asynchronous-search" # "opensearch-asynchronous-search" + "opensearch-cross-cluster-replication" + "geospatial" # "opensearch-geospatial" + "opensearch-index-management" + "opensearch-knn" + "opensearch-ml-plugin" # "opensearch-ml" + "neural-search" # "opensearch-neural-search" + "opensearch-notifications-core" + "notifications" # "opensearch-notifications" requires "opensearch-notifications-core" + "opensearch-observability" + "performance-analyzer" # "opensearch-performance-analyzer" + "opensearch-reports-scheduler" + "opensearch-security" + "opensearch-security-analytics" + "opensearch-sql-plugin" # "opensearch-sql" +) + +function usage() { + echo "Usage: $0 [args]" + echo "" + echo "Arguments:" + echo -e "-v VERSION\t[Required] OpenSearch version." + echo -e "-q QUALIFIER\t[Optional] Version qualifier." + echo -e "-p PLATFORM\t[Optional] Platform, default is 'uname -s'." + echo -e "-a ARCHITECTURE\t[Optional] Build architecture, default is 'uname -m'." + echo -e "-d DISTRIBUTION\t[Optional] Distribution, default is 'tar'." + echo -e "-o OUTPUT\t[Optional] Output path, default is 'artifacts'." + echo -e "-h help" +} + +while getopts ":h:v:q:o:p:a:d:" arg; do + case $arg in + h) + usage + exit 1 + ;; + v) + VERSION=$OPTARG + ;; + q) + QUALIFIER=$OPTARG + ;; + o) + OUTPUT=$OPTARG + ;; + p) + PLATFORM=$OPTARG + ;; + a) + ARCHITECTURE=$OPTARG + ;; + d) + DISTRIBUTION=$OPTARG + ;; + :) + echo "Error: -${OPTARG} requires an argument" + usage + exit 1 + ;; + ?) + echo "Invalid option: -${arg}" + exit 1 + ;; + esac +done + +if [ -z "$VERSION" ]; then + echo "Error: You must specify the OpenSearch version" + usage + exit 1 +fi + +[ -z "$OUTPUT" ] && OUTPUT=artifacts + +# Assemble distribution artifact +# see https://github.com/opensearch-project/OpenSearch/blob/main/settings.gradle#L34 for other distribution targets + +[ -z "$PLATFORM" ] && PLATFORM=$(uname -s | awk '{print tolower($0)}') +[ -z "$ARCHITECTURE" ] && ARCHITECTURE=$(uname -m) +[ -z "$DISTRIBUTION" ] && DISTRIBUTION="tar" + +case $PLATFORM-$DISTRIBUTION-$ARCHITECTURE in + linux-tar-x64 | darwin-tar-x64) + PACKAGE="tar" + EXT="tar.gz" + # TYPE="archives" + TARGET="$PLATFORM-$PACKAGE" + SUFFIX="$PLATFORM-x64" + ;; + linux-tar-arm64 | darwin-tar-arm64) + PACKAGE="tar" + EXT="tar.gz" + # TYPE="archives" + TARGET="$PLATFORM-arm64-$PACKAGE" + SUFFIX="$PLATFORM-arm64" + ;; + linux-deb-x64) + PACKAGE="deb" + EXT="deb" + # TYPE="packages" + TARGET="deb" + SUFFIX="amd64" + ;; + linux-deb-arm64) + PACKAGE="deb" + EXT="deb" + # TYPE="packages" + TARGET="arm64-deb" + SUFFIX="arm64" + ;; + linux-rpm-x64) + PACKAGE="rpm" + EXT="rpm" + # TYPE="packages" + TARGET="rpm" + SUFFIX="x86_64" + ;; + linux-rpm-arm64) + PACKAGE="rpm" + EXT="rpm" + # TYPE="packages" + TARGET="arm64-rpm" + SUFFIX="aarch64" + ;; + # windows-zip-x64) + # PACKAGE="zip" + # EXT="zip" + # # TYPE="archives" + # TARGET="$PLATFORM-$PACKAGE" + # SUFFIX="$PLATFORM-x64" + # ;; + # windows-zip-arm64) + # PACKAGE="zip" + # EXT="zip" + # # TYPE="archives" + # TARGET="$PLATFORM-arm64-$PACKAGE" + # SUFFIX="$PLATFORM-arm64" + # ;; + *) + echo "Unsupported platform-distribution-architecture combination: $PLATFORM-$DISTRIBUTION-$ARCHITECTURE" + exit 1 + ;; +esac + +echo "Assembling OpenSearch for $PLATFORM-$DISTRIBUTION-$ARCHITECTURE" +# wazuh-indexer-min_4.9.0-1-x64_78fcc3db6a5b470294319e48b58c3d715bee39d1.rpm +ARTIFACT_BUILD_NAME=$(ls "${OUTPUT}/dist/" | grep "wazuh-indexer-min.*.$EXT") + +# Create temporal directory and copy the min package there for extraction +TMP_DIR="${OUTPUT}/tmp/${TARGET}" +mkdir -p "$TMP_DIR" +cp "${OUTPUT}/dist/$ARTIFACT_BUILD_NAME" "${TMP_DIR}" + +function assemble_tar() { + cd "${TMP_DIR}" + PATH_CONF="./config" + PATH_BIN="./bin" + + # Step 1: extract + echo "Extract ${ARTIFACT_BUILD_NAME} archive" + tar -zvxf "${ARTIFACT_BUILD_NAME}" + cd "$(ls -d wazuh-indexer-*/)" + + # Step 2: install plugins + echo "Install plugins" + for plugin in "${plugins[@]}"; do + plugin_from_maven="org.opensearch.plugin:${plugin}:$VERSION.0" + "${PATH_BIN}/opensearch-plugin" install --batch --verbose "${plugin_from_maven}" + done + + # Step 3: swap configuration files + cp $PATH_CONF/security/* $PATH_CONF/opensearch-security/ + cp $PATH_CONF/jvm.prod.options $PATH_CONF/jvm.options + cp $PATH_CONF/opensearch.prod.yml $PATH_CONF/opensearch.yml + + rm -r $PATH_CONF/security + rm $PATH_CONF/jvm.prod.options $PATH_CONF/opensearch.prod.yml + + # Step 4: pack + archive_name="wazuh-indexer-$(cat VERSION)" + cd .. + tar -cvf "${archive_name}-${SUFFIX}.${EXT}" "${archive_name}" + cd ../../.. + cp "${TMP_DIR}/${archive_name}-${SUFFIX}.${EXT}" "${OUTPUT}/dist/" + + echo "Cleaning temporary ${TMP_DIR} folder" + rm -r "${TMP_DIR}" + echo "After execution, shell path is $(pwd)" +} + + +function assemble_rpm() { + # Copy spec + cp "distribution/packages/src/rpm/wazuh-indexer.rpm.spec" "${TMP_DIR}" + # Copy performance analyzer service file + mkdir -p "${TMP_DIR}"/usr/lib/systemd/system + cp "distribution/packages/src/rpm/wazuh-indexer-performance-analyzer.service" "${TMP_DIR}"/usr/lib/systemd/system + + cd "${TMP_DIR}" + PATH_CONF="./etc/wazuh-indexer" + PATH_BIN="./usr/share/wazuh-indexer/bin" + + # Extract min-package. Creates usr/, etc/ and var/ in the current directory + echo "Extract ${ARTIFACT_BUILD_NAME} archive" + rpm2cpio "${ARTIFACT_BUILD_NAME}" | cpio -imdv + + # Install plugins from Maven repository + echo "Install plugins" + for plugin in "${plugins[@]}"; do + plugin_from_maven="org.opensearch.plugin:${plugin}:$VERSION.0" + OPENSEARCH_PATH_CONF=$PATH_CONF "${PATH_BIN}/opensearch-plugin" install --batch --verbose "${plugin_from_maven}" + done + + # Set up configuration files + cp $PATH_CONF/security/* $PATH_CONF/opensearch-security/ + cp $PATH_CONF/jvm.prod.options $PATH_CONF/jvm.options + cp $PATH_CONF/opensearch.prod.yml $PATH_CONF/opensearch.yml + + rm -r $PATH_CONF/security + rm $PATH_CONF/jvm.prod.options $PATH_CONF/opensearch.prod.yml + + # Remove symbolic links and bat files + find . -type l -exec rm -rf {} \; + find . -name "*.bat" -exec rm -rf {} \; + + # Generate final package + local topdir + local version + local spec_file="wazuh-indexer.rpm.spec" + topdir=$(pwd) + version=$(cat ./usr/share/wazuh-indexer/VERSION) + # TODO validate architecture + rpmbuild --bb \ + --define "_topdir ${topdir}" \ + --define "_version ${version}" \ + --define "_architecture ${SUFFIX}" \ + ${spec_file} + + # Move to the root folder, copy the package and clean. + cd ../../.. + package_name="wazuh-indexer-${version}-1.${SUFFIX}.${EXT}" + cp "${TMP_DIR}/RPMS/${SUFFIX}/${package_name}" "${OUTPUT}/dist/" + + echo "Cleaning temporary ${TMP_DIR} folder" + rm -r "${TMP_DIR}" + echo "After execution, shell path is $(pwd)" + # Store package's name to file. Used by GH Action. + echo "${package_name}" > "${OUTPUT}/artifact_name.txt" +} + +case $SUFFIX.$EXT in + linux-arm64.tar.gz) + assemble_tar + ;; + linux-x64.tar.gz) + assemble_tar + ;; + aarch64.rpm) + assemble_rpm + ;; + x86_64.rpm) + assemble_rpm + ;; + amd64.deb) + ;; + arm64.deb) + ;; +esac From 69ecbcfb1efd7e5fc73702b8f6af3eb2da1bdd98 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Wed, 27 Dec 2023 16:09:42 +0100 Subject: [PATCH 022/120] Fix yellow cluster state (#95) * Add template and settings to disable replicas on ISM plugin internal indices * Fix documentation Replaces exit 1 statements with return 1 * Fix uncommented comment line --- distribution/src/bin/indexer-ism-init.sh | 88 +++++++++++++++++++++--- 1 file changed, 78 insertions(+), 10 deletions(-) diff --git a/distribution/src/bin/indexer-ism-init.sh b/distribution/src/bin/indexer-ism-init.sh index 4217979624bc7..b30531cb0a713 100644 --- a/distribution/src/bin/indexer-ism-init.sh +++ b/distribution/src/bin/indexer-ism-init.sh @@ -80,6 +80,48 @@ function generate_rollover_template() { EOF } +######################################################################### +# Creates an index template to disable replicas on ISM configurastion indices. +# Returns: +# The index template as a JSON string. +######################################################################### +function generate_ism_config_template() { + cat <<-EOF + { + "order": 1, + "index_patterns": [ + ".opendistro-ism-managed-index-history-*", + ".opendistro-ism-config", + ".opendistro-job-scheduler-lock" + ], + "settings": { + "number_of_replicas": 0 + } + } + EOF +} + +######################################################################### +# Creates persistent cluster's settings to disable replicas for ISM history. +# Returns: +# The setting as a JSON string. +######################################################################### +function generate_ism_config() { + cat <<-EOF + { + "persistent": { + "plugins": { + "index_state_management": { + "history": { + "number_of_replicas": "0" + } + } + } + } + } + EOF +} + ######################################################################### # Loads the index templates for the rollover policy to the indexer. ######################################################################### @@ -89,18 +131,44 @@ function load_templates() { echo "Will create 'wazuh' index template" if [ -f $wazuh_template_path ]; then cat $wazuh_template_path | + if ! curl -s -k ${C_AUTH} \ + -X PUT "${INDEXER_URL}/_template/wazuh" \ + -o "${LOG_FILE}" --create-dirs \ + -H 'Content-Type: application/json' -d @-; then + echo " ERROR: 'wazuh' template creation failed" + return 1 + else + echo " SUCC: 'wazuh' template created or updated" + fi + else + echo " ERROR: $wazuh_template_path not found" + fi + + # Load template for ISM configuration indices + echo "Will create 'ism_history_indices' index template" + generate_ism_config_template | if ! curl -s -k ${C_AUTH} \ - -X PUT "${INDEXER_URL}/_template/wazuh" \ + -X PUT "${INDEXER_URL}/_template/ism_history_indices" \ -o "${LOG_FILE}" --create-dirs \ -H 'Content-Type: application/json' -d @-; then - echo " ERROR: 'wazuh' template creation failed" - exit 1 + echo " ERROR: 'ism_history_indices' template creation failed" + return 1 else - echo " SUCC: 'wazuh' template created or updated" + echo " SUCC: 'ism_history_indices' template created or updated" + fi + + # Make settings persistent + echo "Will disable replicas for 'plugins.index_state_management.history' indices" + generate_ism_config | + if ! curl -s -k ${C_AUTH} \ + -X PUT "${INDEXER_URL}/_cluster/settings" \ + -o "${LOG_FILE}" --create-dirs \ + -H 'Content-Type: application/json' -d @-; then + echo " ERROR: cluster's settings update failed" + return 1 + else + echo " SUCC: cluster's settings saved" fi - else - echo " ERROR: $wazuh_template_path not found" - fi echo "Will create index templates to configure the alias" for alias in "${aliases[@]}"; do @@ -201,7 +269,7 @@ function create_write_index() { -H 'Content-Type: application/json' \ -d "$(generate_write_index_alias "${1}")"; then echo " ERROR: creating '${1}' write index" - exit 1 + return 1 else echo " SUCC: '${1}' write index created" fi @@ -263,7 +331,7 @@ function show_help() { echo -e " -v, --verbose" echo -e " Set verbose mode. Prints more information." echo -e "" - exit 1 + return 1 } ######################################################################### @@ -353,7 +421,7 @@ function main() { echo "SUCC: Indexer ISM initialization finished successfully." else echo "ERROR: Indexer ISM initialization failed. Check ${LOG_FILE} for more information." - exit 1 + return 1 fi } From 237bf5fd6395b6684cd1c5dca83c5fbd9d5ad21e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Wed, 3 Jan 2024 17:56:41 +0100 Subject: [PATCH 023/120] Update ism-init script (#97) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update ism-init script to parametrize the path of the wazuh-template --------- Signed-off-by: Álex Ruiz --- distribution/src/bin/indexer-ism-init.sh | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/distribution/src/bin/indexer-ism-init.sh b/distribution/src/bin/indexer-ism-init.sh index b30531cb0a713..3e9e4a3f278f3 100644 --- a/distribution/src/bin/indexer-ism-init.sh +++ b/distribution/src/bin/indexer-ism-init.sh @@ -19,6 +19,8 @@ INDEXER_URL="https://${INDEXER_HOSTNAME}:9200" # curl settings shortcuts C_AUTH="-u admin:${INDEXER_PASSWORD}" +ALERTS_TEMPLATE="/etc/wazuh-indexer/wazuh-template.json" + ######################################################################### # Creates the rollover_policy ISM policy. # Globals: @@ -127,10 +129,9 @@ function generate_ism_config() { ######################################################################### function load_templates() { # Load wazuh-template.json, needed for initial indices creation. - local wazuh_template_path="/etc/wazuh-indexer/wazuh-template.json" echo "Will create 'wazuh' index template" - if [ -f $wazuh_template_path ]; then - cat $wazuh_template_path | + if [ -f "${ALERTS_TEMPLATE}" ]; then + cat "${ALERTS_TEMPLATE}" | if ! curl -s -k ${C_AUTH} \ -X PUT "${INDEXER_URL}/_template/wazuh" \ -o "${LOG_FILE}" --create-dirs \ @@ -141,7 +142,7 @@ function load_templates() { echo " SUCC: 'wazuh' template created or updated" fi else - echo " ERROR: $wazuh_template_path not found" + echo " ERROR: ${ALERTS_TEMPLATE} not found" fi # Load template for ISM configuration indices @@ -403,6 +404,15 @@ function main() { shift 2 fi ;; + "-t" | "--template") + if [ -z "${2}" ]; then + echo "Error on arguments. Probably missing