Add index definition for stateless indices (#554)

* Add index definition for stateless indices

Backported from 6.0.0

* Add module to the the automation list

Extend event generator with SCA stuff

* Force automation run

* Revert

* Update ECS templates for modified modules: stateless

* Add back ISM settings and automate the creation of the index template for the wazuh-archives indices

* Update ECS templates for modified modules: stateless

* Update script

* Trigger automation

* Update ECS templates for modified modules: stateless

* Fix output path for the generated archives template

* Update ECS templates for modified modules: stateless

* Increase mappings limit

* Add trailing new lines

---------

Co-authored-by: Wazuh Indexer Bot <github_devel_xdrsiem_indexer@wazuh.com>
This commit is contained in:
Álex Ruiz Becerra 2025-08-19 09:59:27 +02:00 committed by GitHub
parent 67ab3ec7b9
commit c26e75f505
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 19652 additions and 5430 deletions

View File

@ -16,6 +16,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Add documentation for default users and roles (RBAC) [(#535)](https://github.com/wazuh/wazuh-indexer-plugins/pull/535)
- Implement retry mechanism to the initialization plugin [(#541)](https://github.com/wazuh/wazuh-indexer-plugins/pull/541)
- Add documentation for reporting plugin [(#544)](https://github.com/wazuh/wazuh-indexer-plugins/pull/544)
- Add index definition for stateless indices [(#554)](https://github.com/wazuh/wazuh-indexer-plugins/pull/554)
### Dependencies
-

View File

@ -80,24 +80,32 @@ generate_mappings() {
local out_file="$out_dir/generated/elasticsearch/legacy/template-tmp.json"
local csv_file="$out_dir/generated/csv/fields.csv"
# Delete the "tags" field from the index template
echo "Deleting the \"tags\" field from the index template"
jq 'del(.mappings.properties.tags)' "$in_file" > "$out_file"
mv "$out_file" "$in_file"
# Remove multi-fields from the generated index template
echo "Removing multi-fields from the index template"
remove_multi_fields "$in_file" "$out_file"
mv "$out_file" "$in_file"
if [ "$ECS_MODULE" != "stateless" ]; then
# Delete the "tags" field from the index template
echo "Deleting the \"tags\" field from the index template"
jq 'del(.mappings.properties.tags)' "$in_file" > "$out_file"
mv "$out_file" "$in_file"
# Delete the "@timestamp" field from the index template
echo "Deleting the \"@timestamp\" field from the index template"
jq 'del(.mappings.properties."@timestamp")' "$in_file" > "$out_file"
mv "$out_file" "$in_file"
# Delete the "@timestamp" and "tags" fields from the csv file
# Delete the "@timestamp" field from the csv file
echo "Deleting the \"@timestamp\" and \"tags\" fields from the CSV file"
sed -i '/@timestamp/d; /tags/d' "$csv_file"
else
# Generate the template for `wazuh-archives`
echo "Generating template for 'wazuh-archives'"
archives_file="$out_dir/generated/elasticsearch/legacy/template-archives.json"
cp "$in_file" "$archives_file"
sed -i 's/wazuh-alerts/wazuh-archives/g' "$archives_file"
fi
# Transform legacy index template for OpenSearch compatibility
jq '{

View File

@ -69,6 +69,7 @@ detect_modified_modules() {
# Mapping section
module_to_file=(
[stateless]="index-template-alerts.json"
[states-fim-files]="index-template-fim-files.json"
[states-fim-registry-keys]="index-template-fim-registry-keys.json"
[states-fim-registry-values]="index-template-fim-registry-values.json"
@ -166,6 +167,14 @@ commit_and_push_changes() {
mkdir -p "$documentation_dir"
echo " - Copy the updated csv definitions for module '$ecs_module' to '$documentation_dir'"
cp "$CURRENT_PATH/ecs/$ecs_module/$CSV_SUBPATH" "$documentation_dir"
# Generate archives index template from the alerts one
if [ "$ecs_module" == "stateless" ]; then
target_file="$TEMPLATES_PATH/index-template-archives.json"
echo " - Generate template for module '$ecs_module/archives' to '$target_file'"
cp "$CURRENT_PATH/ecs/$ecs_module/$MAPPINGS_SUBPATH" "$target_file"
sed -i 's/wazuh-alerts/wazuh-archives/g' "$target_file"
fi
done
git status --short

View File

@ -0,0 +1,13 @@
## `wazuh-alerts-5.x` time series index
The `wazuh-alerts-*` indices store events received from monitored endpoints that trigger alerts when they match a detection rule.
This is a time-based (stateless) index. The `wazuh-archives-5.x` index uses the same mappings and settings. The template is generated programatically off the `wazuh-alerts-5.x` index.
### Fields summary
For this stage, we are using all the fields of the ECS. Dynamic mode is temporarily set to `false` to avoid the creation of new fields while allowing the indexing of events containing fields not in the schema. These fields can be retrieved from the original event (`_source`).
- [ECS main mappings](https://github.com/elastic/ecs/blob/v8.11.0/schemas/subsets/main.yml)
The detail of the fields can be found in csv file [Stateless Fields](fields.csv).

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,164 @@
#!/bin/python3
import argparse
import datetime
import json
import logging
import random
import requests
import urllib3
# Constants and Configuration
LOG_FILE = "generate_data.log"
GENERATED_DATA_FILE = "generatedData.json"
DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
# Default values
INDEX_NAME = "wazuh-alerts-5.x-000001"
USERNAME = "admin"
PASSWORD = "admin"
IP = "127.0.0.1"
PORT = "9200"
# Configure logging
logging.basicConfig(level=logging.INFO)
# Suppress warnings
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def generate_random_data(number):
data = []
for _ in range(number):
event_data = {
"@timestamp": generate_random_date(),
"agent": generate_random_agent(),
'policy': generate_random_policy(),
'check': generate_random_check(),
"wazuh": generate_random_wazuh(),
}
data.append(event_data)
return data
def generate_random_date():
start_date = datetime.datetime.now()
end_date = start_date - datetime.timedelta(days=10)
random_date = start_date + (end_date - start_date) * random.random()
return random_date.strftime(DATE_FORMAT)
def generate_random_agent():
return {
"id": f"{random.randint(0, 99):03d}",
"name": f"Agent{random.randint(0, 99)}",
"version": f"v{random.randint(0, 9)}-stable",
"host": generate_random_host(),
}
def generate_random_host():
return {
"architecture": random.choice(["x86_64", "arm64"]),
"ip": f"{random.randint(1, 255)}.{random.randint(0, 255)}.{random.randint(0, 255)}.{random.randint(0, 255)}",
}
def generate_random_policy():
policy = {
'id': f'policy{random.randint(0, 999)}',
'name': f'Policy {random.randint(0, 999)}',
'file': f'policy{random.randint(0, 999)}.yml',
'description': 'Generated policy description.',
'references': [f'https://example.com/policy{random.randint(0, 999)}']
}
return policy
def generate_random_check():
check = {
'id': f'check{random.randint(0, 9999)}',
'name': 'Check Example',
'description': 'Generated check description.',
'rationale': 'Generated rationale.',
'remediation': 'Generated remediation.',
'references': [f'https://example.com/check{random.randint(0, 9999)}'],
'condition': 'all',
'compliance': [f'cis:{random.randint(1, 10)}.{random.randint(1, 10)}.{random.randint(1, 10)}'],
'rules': [f'Rule {random.randint(1, 100)}', f'Rule {random.randint(1, 100)}'],
'result': 'pass',
'reason': 'Randomly passed.'
}
return check
def generate_random_wazuh():
return {
"decoders": [f"decoder-{random.randint(0, 5)}" for _ in range(random.randint(1, 3))],
"rules": [f"rule-{random.randint(0, 5)}" for _ in range(random.randint(1, 3))],
"cluster": {
"name": f"wazuh-cluster-{random.randint(0, 10)}",
"node": f"wazuh-cluster-node-{random.randint(0, 10)}",
},
"schema": {"version": "1.7.0"},
}
def inject_events(data, ip, port, username, password, index, protocol):
url = f"{protocol}://{ip}:{port}/{index}/_doc"
session = requests.Session()
session.auth = (username, password)
session.verify = False
headers = {"Content-Type": "application/json"}
try:
for event_data in data:
response = session.post(url, json=event_data, headers=headers)
if response.status_code != 201:
logging.error(f"Error: {response.status_code}")
logging.error(response.text)
break
logging.info("Data injection completed successfully.")
except Exception as e:
logging.error(f"Error: {str(e)}")
def main():
parser = argparse.ArgumentParser(
description="Generate and optionally inject documents into a Wazuh Indexer cluster."
)
parser.add_argument(
"--protocol",
choices=['http', 'https'],
default='https',
help="Specify the protocol to use: http or https. Default is 'https'."
)
args = parser.parse_args()
try:
number = int(input("How many events do you want to generate? "))
except ValueError:
logging.error("Invalid input. Please enter a valid number.")
return
logging.info(f"Generating {number} events...")
data = generate_random_data(number)
with open(GENERATED_DATA_FILE, "a") as outfile:
for event_data in data:
json.dump(event_data, outfile)
outfile.write("\n")
logging.info("Data generation completed.")
inject = (
input("Do you want to inject the generated data into your indexer? (y/n) ")
.strip()
.lower()
)
if inject == "y":
ip = input(f"Enter the IP of your Indexer (default: '{IP}'): ") or IP
port = input(f"Enter the port of your Indexer (default: '{PORT}'): ") or PORT
index = input(f"Enter the index name (default: '{INDEX_NAME}'): ") or INDEX_NAME
username = input(f"Username (default: '{USERNAME}'): ") or USERNAME
password = input(f"Password (default: '{PASSWORD}'): ") or PASSWORD
inject_events(data, ip, port, username, password, index, args.protocol)
if __name__ == "__main__":
main()

View File

@ -0,0 +1,15 @@
---
- name: agent
title: Wazuh Agents
short: Wazuh Inc. custom fields.
type: group
group: 2
fields:
- name: groups
type: keyword
level: custom
description: >
List of groups the agent belongs to.
normalize:
- array
example: "[\"group1\", \"group2\"]"

View File

@ -0,0 +1,99 @@
---
- name: check
title: SCA policy check
description: >
Custom fields for SCA policy check.
type: group
group: 2
fields:
- name: id
type: keyword
level: custom
description: The ID of the SCA policy check.
example: "26000"
- name: name
type: keyword
level: custom
description: The name of the SCA policy check.
example: "Ensure 'Enforce password history' is set to '24 or more password(s)'."
- name: description
type: keyword
level: custom
description: Extended description of the check.
example: >
"The password history setting determines the number of unique new passwords a user must use before an old password can be reused."
- name: rationale
type: keyword
level: custom
description: The reason for the check. Why it is important.
example: >
"The longer a user uses the same password, the more likely it is that the password will be compromised."
- name: remediation
type: keyword
level: custom
description: Actions to take to remediate the check.
example: >
"To establish the recommended configuration, set the following registry value to 24 or more password(s):"
- name: references
type: keyword
level: custom
short: References for the check.
description: >
References for the check. This can include links to documentation, articles, or other resources that provide additional information about the check, such as Common Configuration Enumeration (CCE).
Note: this field should contain an array of values.
normalize:
- array
example: '["https://workbench.cisecurity.org"]'
- name: condition
type: keyword
level: custom
short: Relationship between the rules.
description: >
Describes the relationship between the rules. This field indicates how the rules should be evaluated to determine the overall result of the check.
The allowed values are:
- `all`: All rules must be satisfied.
- `any`: Any of the rules is sufficient.
- `none`: None of the rules must be satisfied.
example: "all"
- name: compliance
type: keyword
level: custom
short: CIS compliance standard.
description: >
CIS compliance standard under which the check is defined. This field indicates the specific compliance standard that the check is associated with, such as CIS benchmarks or other compliance frameworks.
Note: this field should contain an array of values.
normalize:
- array
example: '["cis:1.1.1","cis_csc:5.2"]'
- name: rules
type: keyword
level: custom
short: Rules to be evaluated.
description: >
Expression to be evaluated. This field contains the specific rules or expressions that need to be evaluated to determine the result of the check. The rules are typically defined using a specific syntax or format that allows for logical comparisons and evaluations.
The rules can include various conditions, operators, and values that are used to assess the compliance status of the system or configuration being checked.
Note: this field should contain an array of values.
normalize:
- array
example: >
"[\"c:net.exe accounts -> n:Maximum password age \(days\):\s+(\d+) compare > 0\"," >
"\"c:net.exe accounts -> n:Length of password history maintained:\s+(\d+) compare >= 24\"]"
- name: result
type: keyword
level: custom
short: Result of the check.
description: >
The result of the check. This field indicates whether the check passed or failed based on the evaluation of the rules. The result is typically represented as a boolean value, where "passed" indicates that the check was successful and "failed" indicates that the check did not meet the specified criteria.
example: "failed"
- name: reason
type: keyword
level: custom
short: Reason for the check result.
description: >
The reason for the check result. This field provides additional information or context about the result of the check. It may include details about why the check passed or failed, any specific conditions that were not met, or any other relevant information that helps to understand the outcome of the check.
example: >
"The password history setting is not set to 24 or more password(s)."

View File

@ -0,0 +1,21 @@
---
- name: event
title: Context information about the log or metric event itself
short: Context information about the log or metric event itself
type: group
group: 2
fields:
- name: changed_fields
type: keyword
level: custom
description: >
Fields that were updated since last scan.
normalize:
- array
example: "[\"foo\", \"bar\"]"
- name: collector
type: keyword
level: custom
description: >
Collector used to retrieve the event.
example: file

View File

@ -0,0 +1,97 @@
---
- name: host
reusable:
top_level: true
expected:
- { at: agent, as: host }
fields:
- name: memory
description: >
Memory-related data.
type: object
level: custom
example: "\"total\": 100000, \"free\": 90000, \"used\": {\"percentage\": 10}"
- name: memory.total
description: >
Total memory in MB.
type: long
level: custom
example: 1024
- name: memory.free
description: >
Free memory in MB.
type: long
level: custom
example: 1024
- name: memory.used
description: >
Used memory-related data.
type: object
level: custom
example: "\"percentage\": 10"
- name: memory.used.percentage
description: >
Used memory percentage.
type: long
level: custom
example: 10
- name: cpu
description: >
CPU-related data.
type: object
level: custom
example: "\"name\": \"Intel(R) Core(TM) i7-7700HQ CPU\", \"cores\": 4, \"speed\": 2800"
- name: cpu.name
description: >
CPU Model name.
type: keyword
level: custom
example: "Intel(R) Core(TM) i7-7700HQ CPU"
- name: cpu.cores
description: >
Number of CPU cores.
type: long
level: custom
example: 4
- name: cpu.speed
description: >
CPU clock speed.
type: long
level: custom
example: 2800
- name: network.ingress.queue
type: long
level: custom
description: >
Receive queue length.
example: 10
- name: network.egress.queue
type: long
level: custom
description: >
Transmit queue length.
example: 10
- name: network.egress.drops
type: long
level: custom
description: >
Number of dropped transmitted packets.
example: 10
- name: network.egress.errors
type: long
level: custom
description: >
Number of transmission errors.
example: 10
- name: network.ingress.drops
type: long
level: custom
description: >
Number of dropped received packets.
example: 10
- name: network.ingress.errors
type: long
level: custom
description: >
Number of reception errors.
example: 10

View File

@ -0,0 +1,30 @@
---
- name: interface
reusable:
top_level: true
expected:
- { at: observer.egress.interface, as: observer.ingress.interface }
title: Interface
type: group
group: 2
description: >
Network interface related data.
fields:
- name: mtu
type: long
level: custom
description: >
Maximum transmission unit size.
example: 1500
- name: state
type: keyword
level: custom
description: >
State of the network interface.
example: "up"
- name: type
type: keyword
level: custom
description: >
Interface type.
example: "ethernet"

View File

@ -0,0 +1,38 @@
---
- name: network
title: Network
type: group
group: 2
description: >
Network related data.
fields:
- name: broadcast
type: ip
level: custom
description: >
Broadcast address.
example: "192.168.0.255"
- name: dhcp
type: keyword
level: custom
description: >
DHCP status (enabled, disabled, unknown, BOOTP).
example: "enabled"
- name: gateway
type: ip
level: custom
description: >
Gateway address.
example: "192.168.0.1"
- name: metric
type: long
level: custom
description: >
Metric of the network protocol.
example: 15
- name: netmask
type: ip
level: custom
description: >
Network mask
example: "255.255.255.0"

View File

@ -0,0 +1,6 @@
---
- name: os
reusable:
top_level: false
expected:
- agent.host

View File

@ -0,0 +1,40 @@
---
- name: policy
title: SCA policies
description: >
Custom fields for SCA policies.
type: group
group: 2
fields:
- name: id
type: keyword
level: custom
description: The ID of the SCA policy.
example: "cis_win11_enterprise_21H2"
- name: name
type: keyword
level: custom
description: The name of the SCA policy.
example: "CIS Microsoft Windows 11 Enterprise Benchmark v1.0.0"
- name: file
type: keyword
level: custom
description: The file name of the SCA policy.
example: "cis_win11_enterprise.yml"
- name: description
type: keyword
level: custom
description: Extended description of the policy.
example: >
"The CIS Microsoft Windows 11 Enterprise Benchmark v1.0.0 is a comprehensive security configuration guide that provides prescriptive guidance for establishing a secure baseline configuration for Microsoft Windows 11 Enterprise."
- name: references
type: keyword
level: custom
short: References for the policy.
description: >
References for the policy. This can include links to documentation, articles, or other resources that provide additional information about the policy, such as Common Configuration Enumeration (CCE).
Note: this field should contain an array of values.
normalize:
- array
example: '["https://www.cisecurity.org/cis-benchmarks/"]'

View File

@ -0,0 +1,6 @@
---
- name: risk
reusable:
top_level: false
expected:
- agent.host

View File

@ -0,0 +1,15 @@
- name: vulnerability
title: Vulnerability
group: 2
short: Fields to describe the vulnerability relevant to an event.
description: >
The vulnerability fields describe information about a vulnerability that is
relevant to an event.
type: group
fields:
- name: scanner.reference
type: keyword
level: custom
description: >
Scanner's resource that provides additional information, context, and mitigations for the identified vulnerability.
example: "https://www.example.com/vulnerability/12345"

View File

@ -0,0 +1,38 @@
---
- name: wazuh
title: Wazuh
description: >
Wazuh Inc. custom fields
fields:
- name: decoders
type: keyword
level: custom
normalize: array
description: >
Wazuh decoders that matched on this event.
example: "[ 'decoder-1', 'decoder-2' ]"
- name: rules
type: keyword
level: custom
normalize: array
description: >
Wazuh rules that matched on this event.
example: "[ 'rule-1', 'rule-2' ]"
- name: cluster.name
type: keyword
level: custom
description: >
Wazuh cluster name.
example: "wazuh-cluster-1"
- name: cluster.node
type: keyword
level: custom
description: >
Wazuh cluster node name.
example: "wazuh-cluster-node-1"
- name: schema.version
type: keyword
level: custom
description: >
Wazuh schema version.
example: "1.7.0"

View File

@ -0,0 +1,4 @@
{
"dynamic": "false",
"date_detection": false
}

View File

@ -0,0 +1,602 @@
---
name: wazuh-alerts
fields:
base:
fields: "*"
agent:
fields: "*"
as:
fields: "*"
client:
fields:
address: {}
as:
fields: "*"
bytes: {}
domain: {}
geo:
fields: "*"
ip: {}
mac: {}
nat:
fields:
ip: {}
port: {}
packets: {}
port: {}
subdomain: {}
registered_domain: {}
top_level_domain: {}
user:
fields:
domain: {}
email: {}
full_name: {}
group:
fields: "*"
hash: {}
id: {}
name: {}
roles: {}
cloud:
fields: "*"
code_signature:
fields: "*"
container:
fields: "*"
data_stream:
fields: "*"
destination:
fields:
address: {}
as:
fields: "*"
bytes: {}
domain: {}
geo:
fields: "*"
ip: {}
mac: {}
nat:
fields:
ip: {}
port: {}
packets: {}
port: {}
subdomain: {}
registered_domain: {}
top_level_domain: {}
user:
fields:
domain: {}
email: {}
full_name: {}
group:
fields: "*"
hash: {}
id: {}
name: {}
roles: {}
device:
fields: "*"
dll:
fields: "*"
dns:
fields: "*"
ecs:
fields: "*"
elf:
fields: "*"
email:
fields: "*"
error:
fields: "*"
event:
fields: "*"
faas:
fields: "*"
file:
fields: "*"
geo:
fields: "*"
group:
fields: "*"
hash:
fields: "*"
host:
fields: "*"
http:
fields: "*"
interface:
fields: "*"
log:
fields: "*"
macho:
fields: "*"
network:
fields: "*"
observer:
fields: "*"
orchestrator:
fields: "*"
organization:
fields: "*"
os:
fields: "*"
package:
fields: "*"
pe:
fields: "*"
process:
fields:
args: {}
args_count: {}
code_signature:
fields: "*"
command_line: {}
elf:
fields: "*"
end: {}
entity_id: {}
entry_leader:
fields:
args: {}
args_count: {}
command_line: {}
entity_id: {}
entry_meta:
fields:
type: {}
source:
fields:
ip: {}
executable: {}
interactive: {}
name: {}
parent:
fields:
entity_id: {}
pid: {}
vpid: {}
start: {}
session_leader:
fields:
entity_id: {}
pid: {}
vpid: {}
start: {}
pid: {}
vpid: {}
same_as_process: {}
start: {}
tty:
fields:
char_device:
fields:
major: {}
minor: {}
working_directory: {}
user:
fields:
id: {}
name: {}
real_user:
fields:
id: {}
name: {}
saved_user:
fields:
id: {}
name: {}
group:
fields:
id: {}
name: {}
real_group:
fields:
id: {}
name: {}
saved_group:
fields:
id: {}
name: {}
supplemental_groups:
fields:
id: {}
name: {}
attested_user:
fields:
id: {}
name: {}
attested_groups:
fields:
name: {}
entry_meta:
fields:
type:
docs_only: True
env_vars: {}
executable: {}
exit_code: {}
group_leader:
fields:
args: {}
args_count: {}
command_line: {}
entity_id: {}
executable: {}
interactive: {}
name: {}
pid: {}
vpid: {}
same_as_process: {}
start: {}
tty:
fields:
char_device:
fields:
major: {}
minor: {}
working_directory: {}
user:
fields:
id: {}
name: {}
real_user:
fields:
id: {}
name: {}
saved_user:
fields:
id: {}
name: {}
group:
fields:
id: {}
name: {}
real_group:
fields:
id: {}
name: {}
saved_group:
fields:
id: {}
name: {}
supplemental_groups:
fields:
id: {}
name: {}
hash:
fields: "*"
interactive: {}
io:
fields: "*"
macho:
fields: "*"
name: {}
parent:
fields:
args: {}
args_count: {}
code_signature:
fields: "*"
command_line: {}
elf:
fields: "*"
end: {}
entity_id: {}
executable: {}
exit_code: {}
group_leader:
fields:
entity_id: {}
pid: {}
vpid: {}
start: {}
hash:
fields: "*"
interactive: {}
macho:
fields: "*"
name: {}
pe:
fields: "*"
pgid: {}
pid: {}
vpid: {}
start: {}
thread:
fields:
id: {}
name: {}
capabilities:
fields:
effective: {}
permitted: {}
title: {}
tty:
fields:
char_device:
fields:
major: {}
minor: {}
uptime: {}
working_directory: {}
user:
fields:
id: {}
name: {}
real_user:
fields:
id: {}
name: {}
saved_user:
fields:
id: {}
name: {}
group:
fields:
id: {}
name: {}
real_group:
fields:
id: {}
name: {}
saved_group:
fields:
id: {}
name: {}
supplemental_groups:
fields:
id: {}
name: {}
pe:
fields: "*"
pgid: {}
pid: {}
vpid: {}
previous:
fields:
args: {}
args_count: {}
executable: {}
real_group:
fields:
id: {}
name: {}
real_user:
fields:
id: {}
name: {}
same_as_process:
docs_only: True
saved_group:
fields:
id: {}
name: {}
saved_user:
fields:
id: {}
name: {}
start: {}
supplemental_groups:
fields:
id: {}
name: {}
session_leader:
fields:
args: {}
args_count: {}
command_line: {}
entity_id: {}
executable: {}
interactive: {}
name: {}
pid: {}
vpid: {}
same_as_process: {}
start: {}
tty:
fields:
char_device:
fields:
major: {}
minor: {}
working_directory: {}
parent:
fields:
entity_id: {}
pid: {}
vpid: {}
start: {}
session_leader:
fields:
entity_id: {}
pid: {}
vpid: {}
start: {}
user:
fields:
id: {}
name: {}
real_user:
fields:
id: {}
name: {}
saved_user:
fields:
id: {}
name: {}
group:
fields:
id: {}
name: {}
real_group:
fields:
id: {}
name: {}
saved_group:
fields:
id: {}
name: {}
supplemental_groups:
fields:
id: {}
name: {}
thread:
fields:
id: {}
name: {}
capabilities:
fields:
effective: {}
permitted: {}
title: {}
tty:
fields: "*"
uptime: {}
user:
fields:
id: {}
name: {}
working_directory: {}
registry:
fields: "*"
related:
fields: "*"
risk:
fields: "*"
rule:
fields: "*"
server:
fields:
address: {}
as:
fields: "*"
bytes: {}
domain: {}
geo:
fields: "*"
ip: {}
mac: {}
nat:
fields:
ip: {}
port: {}
packets: {}
port: {}
subdomain: {}
registered_domain: {}
top_level_domain: {}
user:
fields:
domain: {}
email: {}
full_name: {}
group:
fields: "*"
hash: {}
id: {}
name: {}
roles: {}
service:
fields: "*"
source:
fields:
address: {}
as:
fields: "*"
bytes: {}
domain: {}
geo:
fields: "*"
ip: {}
mac: {}
nat:
fields:
ip: {}
port: {}
packets: {}
port: {}
subdomain: {}
registered_domain: {}
top_level_domain: {}
user:
fields:
domain: {}
email: {}
full_name: {}
group:
fields: "*"
hash: {}
id: {}
name: {}
roles: {}
threat:
fields: "*"
tls:
fields: "*"
tracing:
fields: "*"
url:
fields: "*"
user_agent:
fields: "*"
user:
fields:
changes:
fields:
domain: {}
email: {}
group:
fields: "*"
full_name: {}
hash: {}
id: {}
name: {}
roles: {}
domain: {}
effective:
fields:
domain: {}
email: {}
group:
fields: "*"
full_name: {}
hash: {}
id: {}
name: {}
roles: {}
email: {}
group:
fields: "*"
full_name: {}
hash: {}
id: {}
name: {}
risk:
fields: "*"
roles: {}
target:
fields:
domain: {}
email: {}
group:
fields: "*"
full_name: {}
hash: {}
id: {}
name: {}
roles: {}
vlan:
fields: "*"
vulnerability:
fields: "*"
x509:
fields: "*"
wazuh:
fields: "*"
check:
fields: "*"
policy:
fields: "*"

View File

@ -0,0 +1,24 @@
{
"index_patterns": ["wazuh-alerts-5.x-*"],
"order": 1,
"settings": {
"plugins.index_state_management.rollover_alias": "wazuh-alerts",
"mapping.total_fields.limit": 2500,
"index": {
"number_of_shards": "3",
"number_of_replicas": "0",
"auto_expand_replicas": "0-1",
"refresh_interval": "2s",
"query.default_field": [
"agent.host.architecture",
"agent.host.ip",
"agent.id",
"agent.name",
"agent.version",
"wazuh.cluster.name",
"wazuh.cluster.node",
"wazuh.schema.version"
]
}
}
}

View File

@ -0,0 +1,26 @@
{
"index_patterns": ["wazuh-alerts-5.x-*"],
"priority": 1,
"template": {
"settings": {
"plugins.index_state_management.rollover_alias": "wazuh-alerts",
"mapping.total_fields.limit": 2500,
"index": {
"number_of_shards": "3",
"number_of_replicas": "0",
"auto_expand_replicas": "0-1",
"refresh_interval": "2s",
"query.default_field": [
"agent.host.architecture",
"agent.host.ip",
"agent.id",
"agent.name",
"agent.version",
"wazuh.cluster.name",
"wazuh.cluster.node",
"wazuh.schema.version"
]
}
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff