Deleted Logstash and added new Filebeat.yml template
This commit is contained in:
parent
874a05b3ca
commit
3ac36b05f8
@ -1,25 +0,0 @@
|
||||
version: 2
|
||||
jobs:
|
||||
test:
|
||||
machine:
|
||||
python:
|
||||
version: 2.7
|
||||
services:
|
||||
- docker
|
||||
working_directory: ~/wazuh-ansible
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Install pipenv
|
||||
command: pip install pipenv
|
||||
- run:
|
||||
name: Install molecule
|
||||
command: pipenv install --dev --system
|
||||
- run:
|
||||
name: Run molecule
|
||||
command: pipenv run test
|
||||
workflows:
|
||||
version: 2
|
||||
test_molecule:
|
||||
jobs:
|
||||
- test
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -4,6 +4,5 @@ wazuh-elastic_stack-distributed.yml
|
||||
wazuh-elastic_stack-single.yml
|
||||
wazuh-elastic.yml
|
||||
wazuh-kibana.yml
|
||||
wazuh-logstash.yml
|
||||
wazuh-manager.yml
|
||||
*.pyc
|
||||
@ -1,8 +0,0 @@
|
||||
language: python
|
||||
services: docker
|
||||
before_script:
|
||||
- pip install pipenv
|
||||
- pipenv install --dev --system
|
||||
script:
|
||||
- pipenv run test
|
||||
- pipenv run agent
|
||||
@ -126,7 +126,6 @@ Ansible starting point.
|
||||
Roles:
|
||||
- Elastic Stack:
|
||||
- ansible-elasticsearch: This role is prepared to install elasticsearch on the host that runs it.
|
||||
- ansible-logstash: This role involves the installation of logstash on the host that runs it.
|
||||
- ansible-kibana: Using this role we will install Kibana on the host that runs it.
|
||||
- Wazuh:
|
||||
- ansible-filebeat: This role is prepared to install filebeat on the host that runs it.
|
||||
|
||||
@ -18,7 +18,6 @@ These playbooks install and configure Wazuh agent, manager and Elastic Stack.
|
||||
│ ├── roles
|
||||
│ │ ├── elastic-stack
|
||||
│ │ │ ├── ansible-elasticsearch
|
||||
│ │ │ ├── ansible-logstash
|
||||
│ │ │ ├── ansible-kibana
|
||||
│ │
|
||||
│ │ ├── wazuh
|
||||
@ -35,7 +34,6 @@ These playbooks install and configure Wazuh agent, manager and Elastic Stack.
|
||||
│ │ ├── wazuh-elastic_stack-distributed.yml
|
||||
│ │ ├── wazuh-elastic_stack-single.yml
|
||||
│ │ ├── wazuh-kibana.yml
|
||||
│ │ ├── wazuh-logstash.yml
|
||||
│ │ ├── wazuh-manager.yml
|
||||
│
|
||||
│ ├── README.md
|
||||
|
||||
@ -4,8 +4,7 @@
|
||||
roles:
|
||||
- role: wazuh/ansible-wazuh-manager
|
||||
|
||||
# - {role: wazuh/ansible-filebeat} #, filebeat_output_logstash_hosts: 'your elastic stack server IP'
|
||||
# - {role: wazuh/ansible-filebeat} #, filebeat_output_elasticsearch_hosts: 'your elastic stack server IP'
|
||||
# Elasticsearch requires too much memory to test multiple containers concurrently - To Fix
|
||||
# - {role: elastic-stack/ansible-elasticsearch, elasticsearch_network_host: 'localhost'}
|
||||
# - {role: elastic-stack/ansible-logstash, logstash_input_beats: true, elasticsearch_network_host: 'localhost'}
|
||||
# - {role: elastic-stack/ansible-kibana, elasticsearch_network_host: 'localhost'}
|
||||
|
||||
@ -1,4 +0,0 @@
|
||||
---
|
||||
- hosts: <your logstash host>
|
||||
roles:
|
||||
- {role: /etc/ansible/roles/wazuh-ansible/roles/elastic-stack/ansible-logstash, elasticsearch_network_host: ["localhost"]}
|
||||
@ -2,4 +2,4 @@
|
||||
- hosts: <your wazuh server host>
|
||||
roles:
|
||||
- role: /etc/ansible/roles/wazuh-ansible/roles/wazuh/ansible-wazuh-manager
|
||||
- {role: /etc/ansible/roles/wazuh-ansible/roles/wazuh/ansible-filebeat, filebeat_output_logstash_hosts: 'your logstash IP'}
|
||||
- {role: /etc/ansible/roles/wazuh-ansible/roles/wazuh/ansible-filebeat, filebeat_output_elasticsearch_hosts: 'your elasticsearch IP'}
|
||||
|
||||
@ -1,53 +0,0 @@
|
||||
Ansible Role: Logstash
|
||||
----------------------
|
||||
|
||||
An Ansible Role that installs [Logstash](https://www.elastic.co/products/logstash)
|
||||
|
||||
Requirements
|
||||
------------
|
||||
|
||||
This role will work on:
|
||||
* Red Hat
|
||||
* CentOS
|
||||
* Fedora
|
||||
* Debian
|
||||
* Ubuntu
|
||||
|
||||
Role Variables
|
||||
--------------
|
||||
```
|
||||
---
|
||||
logstash_create_config: true
|
||||
logstash_input_beats: false
|
||||
|
||||
elasticsearch_network_host: "127.0.0.1"
|
||||
elasticsearch_http_port: "9200"
|
||||
elastic_stack_version: 5.5.0
|
||||
|
||||
logstash_ssl: false
|
||||
logstash_ssl_dir: /etc/pki/logstash
|
||||
logstash_ssl_certificate_file: ""
|
||||
logstash_ssl_key_file: ""
|
||||
```
|
||||
|
||||
Example Playbook
|
||||
----------------
|
||||
|
||||
```
|
||||
- hosts: logstash
|
||||
roles:
|
||||
- { role: ansible-role-logstash, elasticsearch_network_host: '192.168.33.182' }
|
||||
```
|
||||
|
||||
License and copyright
|
||||
---------------------
|
||||
|
||||
WAZUH Copyright (C) 2017 Wazuh Inc. (License GPLv3)
|
||||
|
||||
### Based on previous work from geerlingguy
|
||||
|
||||
- https://github.com/geerlingguy/ansible-role-elasticsearch
|
||||
|
||||
### Modified by Wazuh
|
||||
|
||||
The playbooks have been modified by Wazuh, including some specific requirements, templates and configuration to improve integration with Wazuh ecosystem.
|
||||
@ -1,19 +0,0 @@
|
||||
---
|
||||
logstash_create_config: true
|
||||
logstash_input_beats: false
|
||||
|
||||
# You can introduce Multiples IPs
|
||||
# elasticseacrh_network_host: ["Localhost1", "Localhost2", "Localhost3", ...]
|
||||
elasticsearch_network_host: ["Localhost"]
|
||||
|
||||
elasticsearch_http_port: "9200"
|
||||
elasticsearch_shards: 5
|
||||
elasticsearch_replicas: 1
|
||||
elastic_stack_version: 7.1.1
|
||||
|
||||
logstash_ssl: false
|
||||
logstash_ssl_dir: /etc/pki/logstash
|
||||
logstash_ssl_certificate_file: ""
|
||||
logstash_ssl_key_file: ""
|
||||
|
||||
logstash_install_java: true
|
||||
@ -1,3 +0,0 @@
|
||||
---
|
||||
- name: restart logstash
|
||||
service: name=logstash state=restarted
|
||||
@ -1,24 +0,0 @@
|
||||
---
|
||||
galaxy_info:
|
||||
author: Wazuh
|
||||
description: Installing and maintaining Elasticsearch server.
|
||||
company: wazuh.com
|
||||
license: license (GPLv3)
|
||||
min_ansible_version: 2.0
|
||||
platforms:
|
||||
- name: EL
|
||||
versions:
|
||||
- all
|
||||
- name: Fedora
|
||||
versions:
|
||||
- all
|
||||
- name: Debian
|
||||
versions:
|
||||
- all
|
||||
- name: Ubuntu
|
||||
versions:
|
||||
- all
|
||||
galaxy_tags:
|
||||
- web
|
||||
- system
|
||||
- monitoring
|
||||
@ -1,45 +0,0 @@
|
||||
---
|
||||
- name: Debian/Ubuntu | Install apt-transport-https and ca-certificates
|
||||
apt:
|
||||
name: ['apt-transport-https', 'ca-certificates']
|
||||
state: present
|
||||
|
||||
- when: logstash_install_java
|
||||
block:
|
||||
- name: Debian/Ubuntu | Install OpenJDK 1.8
|
||||
apt: name=openjdk-8-jre state=present cache_valid_time=3600
|
||||
tags: install
|
||||
|
||||
- name: Debian/Ubuntu | Add Elasticsearch GPG key
|
||||
apt_key:
|
||||
url: "https://artifacts.elastic.co/GPG-KEY-elasticsearch"
|
||||
state: present
|
||||
|
||||
- name: Debian/Ubuntu | Install Elasticsearch repo
|
||||
apt_repository:
|
||||
repo: 'deb https://artifacts.elastic.co/packages/7.x/apt stable main'
|
||||
state: present
|
||||
filename: 'elastic_repo'
|
||||
|
||||
- name: Debian/Ubuntu | Install Logstash
|
||||
apt:
|
||||
name: "logstash=1:{{ elastic_stack_version }}-1"
|
||||
state: present
|
||||
update_cache: true
|
||||
tags: install
|
||||
|
||||
- name: Debian/Ubuntu | Checking if wazuh-manager is installed
|
||||
command: dpkg -l wazuh-manager
|
||||
register: wazuh_manager_check_deb
|
||||
when: logstash_input_beats == false
|
||||
args:
|
||||
warn: false
|
||||
|
||||
- name: Debian/Ubuntu | Add user logstash to group ossec
|
||||
user:
|
||||
name: logstash
|
||||
groups: ossec
|
||||
append: true
|
||||
when:
|
||||
- logstash_input_beats == false
|
||||
- wazuh_manager_check_deb.rc == 0
|
||||
@ -1,5 +0,0 @@
|
||||
---
|
||||
- name: Debian/Ubuntu | Removing Elasticsearch repository
|
||||
apt_repository:
|
||||
repo: deb https://artifacts.elastic.co/packages/5.x/apt stable main
|
||||
state: absent
|
||||
@ -1,5 +0,0 @@
|
||||
---
|
||||
- name: RedHat/CentOS/Fedora | Remove logstash repository (and clean up left-over metadata)
|
||||
yum_repository:
|
||||
name: elastic_repo
|
||||
state: absent
|
||||
@ -1,43 +0,0 @@
|
||||
---
|
||||
- when: logstash_install_java
|
||||
block:
|
||||
- name: RedHat/CentOS/Fedora | Install OpenJDK 1.8
|
||||
yum: name=java-1.8.0-openjdk state=present
|
||||
register: oracle_java_task_rpm_installed
|
||||
tags: install
|
||||
|
||||
- name: RedHat/CentOS/Fedora | Install Logstash repo
|
||||
yum_repository:
|
||||
name: elastic_repo
|
||||
description: Elastic repository for 7.x packages
|
||||
baseurl: https://artifacts.elastic.co/packages/7.x/yum
|
||||
gpgkey: https://artifacts.elastic.co/GPG-KEY-elasticsearch
|
||||
gpgcheck: true
|
||||
|
||||
- name: RedHat/CentOS/Fedora | Install Logstash
|
||||
package: name=logstash-{{ elastic_stack_version }} state=present
|
||||
when: not logstash_install_java or oracle_java_task_rpm_installed is defined
|
||||
tags: install
|
||||
|
||||
- name: RedHat/CentOS/Fedora | Checking if wazuh-manager is installed
|
||||
command: rpm -q wazuh-manager
|
||||
register: wazuh_manager_check_rpm
|
||||
when: logstash_input_beats == false
|
||||
args:
|
||||
warn: false
|
||||
|
||||
- name: RedHat/CentOS/Fedora | Add user logstash to group ossec
|
||||
user:
|
||||
name: logstash
|
||||
groups: ossec
|
||||
append: true
|
||||
when:
|
||||
- logstash_input_beats == false
|
||||
- wazuh_manager_check_rpm.rc == 0
|
||||
|
||||
- name: Amazon Linux change startup group
|
||||
shell: sed -i 's/.*LS_GROUP=logstash.*/LS_GROUP=ossec/' /etc/logstash/startup.options
|
||||
when:
|
||||
- logstash_input_beats == false
|
||||
- wazuh_manager_check_rpm.rc == 0
|
||||
- ansible_distribution == "Amazon" and ansible_distribution_major_version == "NA"
|
||||
@ -1,27 +0,0 @@
|
||||
---
|
||||
- name: Ensure Logstash SSL key pair directory exists.
|
||||
file:
|
||||
path: "{{ logstash_ssl_dir }}"
|
||||
state: directory
|
||||
when: logstash_ssl
|
||||
tags: configure
|
||||
|
||||
- name: Copy SSL key and cert for logstash.
|
||||
copy:
|
||||
src: "{{ item }}"
|
||||
dest: "{{ logstash_ssl_dir }}/{{ item | basename }}"
|
||||
mode: 0644
|
||||
with_items:
|
||||
- "{{ logstash_ssl_key_file }}"
|
||||
- "{{ logstash_ssl_certificate_file }}"
|
||||
when: logstash_ssl
|
||||
tags: configure
|
||||
|
||||
- name: Logstash configuration
|
||||
template:
|
||||
src: 01-wazuh.conf.j2
|
||||
dest: /etc/logstash/conf.d/01-wazuh.conf
|
||||
owner: root
|
||||
group: root
|
||||
notify: restart logstash
|
||||
tags: configure
|
||||
@ -1,40 +0,0 @@
|
||||
---
|
||||
- import_tasks: RedHat.yml
|
||||
when: ansible_os_family == 'RedHat'
|
||||
|
||||
- import_tasks: Debian.yml
|
||||
when: ansible_os_family == "Debian"
|
||||
|
||||
- import_tasks: config.yml
|
||||
when: logstash_create_config
|
||||
|
||||
- name: Reload systemd
|
||||
systemd: daemon_reload=yes
|
||||
ignore_errors: true
|
||||
when:
|
||||
- not (ansible_distribution == "Amazon" and ansible_distribution_major_version == "NA")
|
||||
- not (ansible_distribution == "Ubuntu" and ansible_distribution_version is version('15.04', '<'))
|
||||
- not (ansible_distribution == "Debian" and ansible_distribution_version is version('8', '<'))
|
||||
|
||||
- name: Amazon Linux create service
|
||||
shell: /usr/share/logstash/bin/system-install /etc/logstash/startup.options
|
||||
when: ansible_distribution == "Amazon" and ansible_distribution_major_version == "NA"
|
||||
|
||||
- name: Ensure Logstash started and enabled
|
||||
service:
|
||||
name: logstash
|
||||
enabled: true
|
||||
state: started
|
||||
|
||||
- name: Amazon Linux start Logstash
|
||||
service:
|
||||
name: logstash
|
||||
enabled: true
|
||||
state: started
|
||||
when: ansible_distribution == "Amazon" and ansible_distribution_major_version == "NA"
|
||||
|
||||
- import_tasks: "RMRedHat.yml"
|
||||
when: ansible_os_family == "RedHat"
|
||||
|
||||
- import_tasks: "RMDebian.yml"
|
||||
when: ansible_os_family == "Debian"
|
||||
@ -1,73 +0,0 @@
|
||||
#jinja2: trim_blocks:False
|
||||
# {{ ansible_managed }}
|
||||
# Wazuh - Logstash configuration file
|
||||
|
||||
{% if logstash_input_beats == true %}
|
||||
## Remote Wazuh Manager - Filebeat input
|
||||
input {
|
||||
beats {
|
||||
port => 5000
|
||||
codec => "json_lines"
|
||||
{% if logstash_ssl == true %}
|
||||
ssl => true
|
||||
ssl_certificate => "{{ logstash_ssl_dir }}/{{ logstash_ssl_certificate_file | basename }}"
|
||||
ssl_key => "{{ logstash_ssl_dir }}/{{ logstash_ssl_key_file | basename }}"
|
||||
{% endif %}
|
||||
}
|
||||
}
|
||||
{% else %}
|
||||
## Local Wazuh Manager - JSON file input
|
||||
input {
|
||||
file {
|
||||
type => "wazuh-alerts"
|
||||
path => "/var/ossec/logs/alerts/alerts.json"
|
||||
codec => "json"
|
||||
}
|
||||
}
|
||||
{% endif %}
|
||||
filter {
|
||||
if [data][srcip] {
|
||||
mutate {
|
||||
add_field => [ "@src_ip", "%{[data][srcip]}" ]
|
||||
}
|
||||
}
|
||||
if [data][aws][sourceIPAddress] {
|
||||
mutate {
|
||||
add_field => [ "@src_ip", "%{[data][aws][sourceIPAddress]}" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
filter {
|
||||
if [data][srcip] {
|
||||
mutate {
|
||||
add_field => [ "@src_ip", "%{[data][srcip]}" ]
|
||||
}
|
||||
}
|
||||
if [data][aws][sourceIPAddress] {
|
||||
mutate {
|
||||
add_field => [ "@src_ip", "%{[data][aws][sourceIPAddress]}" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
filter {
|
||||
geoip {
|
||||
source => "@src_ip"
|
||||
target => "GeoLocation"
|
||||
fields => ["city_name", "country_name", "region_name", "location"]
|
||||
}
|
||||
date {
|
||||
match => ["timestamp", "ISO8601"]
|
||||
target => "@timestamp"
|
||||
}
|
||||
mutate {
|
||||
remove_field => [ "timestamp", "beat", "input_type", "tags", "count", "@version", "log", "offset", "type", "@src_ip", "host"]
|
||||
}
|
||||
}
|
||||
output {
|
||||
#stdout { codec => rubydebug }
|
||||
elasticsearch {
|
||||
hosts => {{ elasticsearch_network_host | to_json}}
|
||||
index => "wazuh-alerts-3.x-%{+YYYY.MM.dd}"
|
||||
document_type => "wazuh"
|
||||
}
|
||||
}
|
||||
@ -19,34 +19,10 @@ Role Variables
|
||||
Available variables are listed below, along with default values (see `defaults/main.yml`):
|
||||
|
||||
```
|
||||
filebeat_create_config: true
|
||||
|
||||
filebeat_prospectors:
|
||||
- input_type: log
|
||||
paths:
|
||||
- "/var/ossec/logs/alerts/alerts.json"
|
||||
document_type: json
|
||||
json.message_key: log
|
||||
json.keys_under_root: true
|
||||
json.overwrite_keys: true
|
||||
|
||||
filebeat_output_elasticsearch_enabled: false
|
||||
filebeat_output_elasticsearch_hosts:
|
||||
- "localhost:9200"
|
||||
|
||||
filebeat_output_logstash_enabled: true
|
||||
filebeat_output_logstash_hosts:
|
||||
- "192.168.212.158:5000"
|
||||
|
||||
filebeat_enable_logging: true
|
||||
filebeat_log_level: debug
|
||||
filebeat_log_dir: /var/log/mybeat
|
||||
filebeat_log_filename: mybeat.log
|
||||
|
||||
filebeat_ssl_dir: /etc/pki/logstash
|
||||
filebeat_ssl_certificate_file: ""
|
||||
filebeat_ssl_key_file: ""
|
||||
filebeat_ssl_insecure: "false"
|
||||
```
|
||||
|
||||
License and copyright
|
||||
|
||||
@ -14,16 +14,12 @@ filebeat_output_elasticsearch_enabled: false
|
||||
filebeat_output_elasticsearch_hosts:
|
||||
- "localhost:9200"
|
||||
|
||||
filebeat_output_logstash_enabled: true
|
||||
filebeat_output_logstash_hosts:
|
||||
- "192.168.212.158:5000"
|
||||
|
||||
filebeat_enable_logging: true
|
||||
filebeat_log_level: debug
|
||||
filebeat_log_dir: /var/log/mybeat
|
||||
filebeat_log_filename: mybeat.log
|
||||
|
||||
filebeat_ssl_dir: /etc/pki/logstash
|
||||
filebeat_ssl_dir: /etc/pki/filebeat
|
||||
filebeat_ssl_certificate_file: ""
|
||||
filebeat_ssl_key_file: ""
|
||||
filebeat_ssl_insecure: "false"
|
||||
|
||||
@ -1,150 +1,58 @@
|
||||
filebeat:
|
||||
# List of prospectors to fetch data.
|
||||
prospectors:
|
||||
{{ filebeat_prospectors | to_json }}
|
||||
# Wazuh - Filebeat configuration file
|
||||
|
||||
# Configure what outputs to use when sending the data collected by the beat.
|
||||
# Multiple outputs may be used.
|
||||
output:
|
||||
filebeat.inputs:
|
||||
- type: log
|
||||
paths:
|
||||
- '/var/ossec/logs/alerts/alerts.json'
|
||||
|
||||
{% if filebeat_output_elasticsearch_enabled %}
|
||||
### Elasticsearch as output
|
||||
elasticsearch:
|
||||
# Array of hosts to connect to.
|
||||
setup.template.json.enabled: true
|
||||
setup.template.json.path: "/etc/filebeat/wazuh-template.json"
|
||||
setup.template.json.name: "wazuh"
|
||||
setup.template.overwrite: true
|
||||
|
||||
processors:
|
||||
- decode_json_fields:
|
||||
fields: ['message']
|
||||
process_array: true
|
||||
max_depth: 200
|
||||
target: ''
|
||||
overwrite_keys: true
|
||||
- drop_fields:
|
||||
fields: ['message', 'ecs', 'beat', 'input_type', 'tags', 'count', '@version', 'log', 'offset', 'type', 'host']
|
||||
- rename:
|
||||
fields:
|
||||
- from: "data.aws.sourceIPAddress"
|
||||
to: "@src_ip"
|
||||
ignore_missing: true
|
||||
fail_on_error: false
|
||||
when:
|
||||
regexp:
|
||||
data.aws.sourceIPAddress: \b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b
|
||||
- rename:
|
||||
fields:
|
||||
- from: "data.srcip"
|
||||
to: "@src_ip"
|
||||
ignore_missing: true
|
||||
fail_on_error: false
|
||||
when:
|
||||
regexp:
|
||||
data.srcip: \b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b
|
||||
- rename:
|
||||
fields:
|
||||
- from: "data.win.eventdata.ipAddress"
|
||||
to: "@src_ip"
|
||||
ignore_missing: true
|
||||
fail_on_error: false
|
||||
when:
|
||||
regexp:
|
||||
data.win.eventdata.ipAddress: \b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b
|
||||
|
||||
# Send events directly to Elasticsearch
|
||||
output.elasticsearch:
|
||||
hosts: {{ filebeat_output_elasticsearch_hosts | to_json }}
|
||||
#pipeline: geoip
|
||||
indices:
|
||||
- index: 'wazuh-alerts-3.x-%{+yyyy.MM.dd}'
|
||||
|
||||
# Optional protocol and basic auth credentials. These are deprecated.
|
||||
#protocol: "https"
|
||||
#username: "admin"
|
||||
#password: "s3cr3t"
|
||||
|
||||
# Number of workers per Elasticsearch host.
|
||||
#worker: 1
|
||||
|
||||
# Optional index name. The default is "filebeat" and generates
|
||||
# [filebeat-]YYYY.MM.DD keys.
|
||||
#index: "filebeat"
|
||||
|
||||
# Optional HTTP Path
|
||||
#path: "/elasticsearch"
|
||||
|
||||
# Proxy server URL
|
||||
# proxy_url: http://proxy:3128
|
||||
|
||||
# The number of times a particular Elasticsearch index operation is attempted. If
|
||||
# the indexing operation doesn't succeed after this many retries, the events are
|
||||
# dropped. The default is 3.
|
||||
#max_retries: 3
|
||||
|
||||
# The maximum number of events to bulk in a single Elasticsearch bulk API index request.
|
||||
# The default is 50.
|
||||
#bulk_max_size: 50
|
||||
|
||||
# Configure http request timeout before failing an request to Elasticsearch.
|
||||
#timeout: 90
|
||||
|
||||
# The number of seconds to wait for new events between two bulk API index requests.
|
||||
# If `bulk_max_size` is reached before this interval expires, addition bulk index
|
||||
# requests are made.
|
||||
#flush_interval: 1
|
||||
|
||||
# Boolean that sets if the topology is kept in Elasticsearch. The default is
|
||||
# false. This option makes sense only for Packetbeat.
|
||||
#save_topology: false
|
||||
|
||||
# The time to live in seconds for the topology information that is stored in
|
||||
# Elasticsearch. The default is 15 seconds.
|
||||
#topology_expire: 15
|
||||
|
||||
{% if filebeat_ssl_certificate_file and filebeat_ssl_key_file %}
|
||||
# tls configuration. By default is off.
|
||||
tls:
|
||||
# List of root certificates for HTTPS server verifications
|
||||
#certificate_authorities: ["/etc/pki/root/ca.pem"]
|
||||
|
||||
# Certificate for TLS client authentication
|
||||
certificate: "{{ filebeat_ssl_dir }}/{{ filebeat_ssl_certificate_file | basename }}"
|
||||
|
||||
# Client Certificate Key
|
||||
certificate_key: "{{ filebeat_ssl_dir }}/{{ filebeat_ssl_key_file | basename}}"
|
||||
|
||||
# Controls whether the client verifies server certificates and host name.
|
||||
# If insecure is set to true, all server host names and certificates will be
|
||||
# accepted. In this mode TLS based connections are susceptible to
|
||||
# man-in-the-middle attacks. Use only for testing.
|
||||
insecure: {{ filebeat_ssl_insecure }}
|
||||
|
||||
# Configure cipher suites to be used for TLS connections
|
||||
#cipher_suites: []
|
||||
|
||||
# Configure curve types for ECDHE based cipher suites
|
||||
#curve_types: []
|
||||
|
||||
# Configure minimum TLS version allowed for connection to logstash
|
||||
#min_version: 1.0
|
||||
|
||||
# Configure maximum TLS version allowed for connection to logstash
|
||||
#max_version: 1.2
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% if filebeat_output_logstash_enabled %}
|
||||
### Logstash as output
|
||||
logstash:
|
||||
# The Logstash hosts
|
||||
hosts: {{ filebeat_output_logstash_hosts | to_json }}
|
||||
|
||||
# Number of workers per Logstash host.
|
||||
#worker: 1
|
||||
|
||||
# Optional load balance the events between the Logstash hosts
|
||||
#loadbalance: true
|
||||
|
||||
# Optional index name. The default index name depends on the each beat.
|
||||
# For Packetbeat, the default is set to packetbeat, for Topbeat
|
||||
# top topbeat and for Filebeat to filebeat.
|
||||
#index: filebeat
|
||||
|
||||
{% if filebeat_ssl_certificate_file and filebeat_ssl_key_file %}
|
||||
# Optional TLS. By default is off.
|
||||
tls:
|
||||
# List of root certificates for HTTPS server verifications
|
||||
#certificate_authorities: ["/etc/pki/root/ca.pem"]
|
||||
|
||||
# Certificate for TLS client authentication
|
||||
certificate: "{{ filebeat_ssl_dir }}/{{ filebeat_ssl_certificate_file | basename }}"
|
||||
|
||||
# Client Certificate Key
|
||||
certificate_key: "{{ filebeat_ssl_dir }}/{{ filebeat_ssl_key_file | basename}}"
|
||||
|
||||
# Controls whether the client verifies server certificates and host name.
|
||||
# If insecure is set to true, all server host names and certificates will be
|
||||
# accepted. In this mode TLS based connections are susceptible to
|
||||
# man-in-the-middle attacks. Use only for testing.
|
||||
#insecure: true
|
||||
insecure: {{ filebeat_ssl_insecure }}
|
||||
|
||||
# Configure cipher suites to be used for TLS connections
|
||||
#cipher_suites: []
|
||||
|
||||
# Configure curve types for ECDHE based cipher suites
|
||||
#curve_types: []
|
||||
{% endif %}
|
||||
|
||||
{% if filebeat_enable_logging %}
|
||||
logging:
|
||||
### Filebeat log
|
||||
level: {{ filebeat_log_level }}
|
||||
|
||||
# Enable file rotation with default configuration
|
||||
to_files: true
|
||||
|
||||
# Do not log to syslog
|
||||
to_syslog: false
|
||||
|
||||
files:
|
||||
path: {{ filebeat_log_dir }}
|
||||
name: {{ filebeat_log_filename }}
|
||||
keepfiles: 7
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
# Optional. Send events to Logstash instead of Elasticsearch
|
||||
#output.logstash.hosts: ["YOUR_LOGSTASH_SERVER_IP:5000"]
|
||||
@ -1,4 +1,3 @@
|
||||
---
|
||||
- src: geerlingguy.java
|
||||
- src: geerlingguy.elasticsearch
|
||||
- src: geerlingguy.logstash
|
||||
|
||||
@ -17,5 +17,4 @@
|
||||
roles:
|
||||
- geerlingguy.java
|
||||
- geerlingguy.elasticsearch
|
||||
- geerlingguy.logstash
|
||||
- role_under_test
|
||||
|
||||
Loading…
Reference in New Issue
Block a user