Merge pull request #468 from wazuh/feature-420-ansible-odfe-documentation
ODFE documentation
This commit is contained in:
commit
0621572b26
294
README.md
294
README.md
@ -7,6 +7,10 @@
|
||||
|
||||
These playbooks install and configure Wazuh agent, manager and Elastic Stack.
|
||||
|
||||
## Branches
|
||||
* `master` branch corresponds to the latest Wazuh Ansible changes. It might be unstable.
|
||||
* `3.13` branch on correspond to the last Wazuh Ansible stable version.
|
||||
|
||||
## Documentation
|
||||
|
||||
* [Wazuh Ansible documentation](https://documentation.wazuh.com/current/deploying-with-ansible/index.html)
|
||||
@ -20,8 +24,13 @@ These playbooks install and configure Wazuh agent, manager and Elastic Stack.
|
||||
│ │ │ ├── ansible-elasticsearch
|
||||
│ │ │ ├── ansible-kibana
|
||||
│ │
|
||||
│ │ ├── opendistro
|
||||
│ │ │ ├── opendistro-elasticsearch
|
||||
│ │ │ ├── opendistro-kibana
|
||||
│ │
|
||||
│ │ ├── wazuh
|
||||
│ │ │ ├── ansible-filebeat
|
||||
│ │ │ ├── ansible-filebeat-oss
|
||||
│ │ │ ├── ansible-wazuh-manager
|
||||
│ │ │ ├── ansible-wazuh-agent
|
||||
│ │
|
||||
@ -35,40 +44,293 @@ These playbooks install and configure Wazuh agent, manager and Elastic Stack.
|
||||
│ │ ├── wazuh-elastic_stack-single.yml
|
||||
│ │ ├── wazuh-kibana.yml
|
||||
│ │ ├── wazuh-manager.yml
|
||||
│ │ ├── wazuh-manager-oss.yml
|
||||
│ │ ├── wazuh-opendistro.yml
|
||||
│ │ ├── wazuh-opendistro-kibana.yml
|
||||
│
|
||||
│ ├── README.md
|
||||
│ ├── VERSION
|
||||
│ ├── CHANGELOG.md
|
||||
|
||||
|
||||
## Branches
|
||||
## Example: production-ready distributed environment
|
||||
|
||||
* `stable` branch on correspond to the last Wazuh-Ansible stable version.
|
||||
* `master` branch contains the latest code, be aware of possible bugs on this branch.
|
||||
### Playbook
|
||||
The hereunder example playbook uses the `wazuh-ansible` role to provision a production-ready Wazuh environment. The architecture includes 2 Wazuh nodes, 3 ODFE nodes and a mixed ODFE-Kibana node.
|
||||
|
||||
## Testing
|
||||
```yaml
|
||||
---
|
||||
# Certificates generation
|
||||
- hosts: es1
|
||||
roles:
|
||||
- role: ../roles/opendistro/opendistro-elasticsearch
|
||||
elasticsearch_network_host: "{{ private_ip }}"
|
||||
elasticsearch_cluster_nodes:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
elasticsearch_discovery_nodes:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
perform_installation: false
|
||||
become: yes
|
||||
become_user: root
|
||||
vars:
|
||||
elasticsearch_node_master: true
|
||||
instances:
|
||||
node1:
|
||||
name: node-1 # Important: must be equal to elasticsearch_node_name.
|
||||
ip: "{{ hostvars.es1.private_ip }}" # When unzipping, the node will search for its node name folder to get the cert.
|
||||
node2:
|
||||
name: node-2
|
||||
ip: "{{ hostvars.es2.private_ip }}"
|
||||
node3:
|
||||
name: node-3
|
||||
ip: "{{ hostvars.es3.private_ip }}"
|
||||
node4:
|
||||
name: node-4
|
||||
ip: "{{ hostvars.manager.private_ip }}"
|
||||
node5:
|
||||
name: node-5
|
||||
ip: "{{ hostvars.worker.private_ip }}"
|
||||
node6:
|
||||
name: node-6
|
||||
ip: "{{ hostvars.kibana.private_ip }}"
|
||||
tags:
|
||||
- generate-certs
|
||||
|
||||
1. Get the `wazuh-ansible` folder from the `wazuh-qa` [repository](https://github.com/wazuh/wazuh-qa/tree/master/ansible/wazuh-ansible).
|
||||
#ODFE Cluster
|
||||
- hosts: odfe_cluster
|
||||
strategy: free
|
||||
roles:
|
||||
- role: ../roles/opendistro/opendistro-elasticsearch
|
||||
elasticsearch_network_host: "{{ private_ip }}"
|
||||
become: yes
|
||||
become_user: root
|
||||
vars:
|
||||
elasticsearch_cluster_nodes:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
elasticsearch_discovery_nodes:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
elasticsearch_node_master: true
|
||||
instances:
|
||||
node1:
|
||||
name: node-1 # Important: must be equal to elasticsearch_node_name.
|
||||
ip: "{{ hostvars.es1.private_ip }}" # When unzipping, the node will search for its node name folder to get the cert.
|
||||
node2:
|
||||
name: node-2
|
||||
ip: "{{ hostvars.es2.private_ip }}"
|
||||
node3:
|
||||
name: node-3
|
||||
ip: "{{ hostvars.es3.private_ip }}"
|
||||
node4:
|
||||
name: node-4
|
||||
ip: "{{ hostvars.manager.private_ip }}"
|
||||
node5:
|
||||
name: node-5
|
||||
ip: "{{ hostvars.worker.private_ip }}"
|
||||
node6:
|
||||
name: node-6
|
||||
ip: "{{ hostvars.kibana.private_ip }}"
|
||||
|
||||
```
|
||||
git clone https://github.com/wazuh/wazuh-qa
|
||||
#Wazuh cluster
|
||||
- hosts: manager
|
||||
roles:
|
||||
- role: "../roles/wazuh/ansible-wazuh-manager"
|
||||
- role: "../roles/wazuh/ansible-filebeat-oss"
|
||||
filebeat_node_name: node-4
|
||||
become: yes
|
||||
become_user: root
|
||||
vars:
|
||||
wazuh_manager_config:
|
||||
connection:
|
||||
- type: 'secure'
|
||||
port: '1514'
|
||||
protocol: 'tcp'
|
||||
queue_size: 131072
|
||||
api:
|
||||
https: 'yes'
|
||||
cluster:
|
||||
disable: 'no'
|
||||
node_name: 'master'
|
||||
node_type: 'master'
|
||||
nodes:
|
||||
- '"{{ hostvars.manager.private_ip }}"'
|
||||
hidden: 'no'
|
||||
filebeat_output_elasticsearch_hosts:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
|
||||
- hosts: worker
|
||||
roles:
|
||||
- role: "../roles/wazuh/ansible-wazuh-manager"
|
||||
- role: "../roles/wazuh/ansible-filebeat-oss"
|
||||
filebeat_node_name: node-5
|
||||
become: yes
|
||||
become_user: root
|
||||
vars:
|
||||
wazuh_manager_config:
|
||||
connection:
|
||||
- type: 'secure'
|
||||
port: '1514'
|
||||
protocol: 'tcp'
|
||||
queue_size: 131072
|
||||
api:
|
||||
https: 'yes'
|
||||
cluster:
|
||||
disable: 'no'
|
||||
node_name: 'worker_01'
|
||||
node_type: 'worker'
|
||||
key: 'c98b62a9b6169ac5f67dae55ae4a9088'
|
||||
nodes:
|
||||
- '"{{ hostvars.manager.private_ip }}"'
|
||||
hidden: 'no'
|
||||
filebeat_output_elasticsearch_hosts:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
|
||||
#ODFE+Kibana node
|
||||
- hosts: kibana
|
||||
roles:
|
||||
- role: "../roles/opendistro/opendistro-elasticsearch"
|
||||
- role: "../roles/opendistro/opendistro-kibana"
|
||||
become: yes
|
||||
become_user: root
|
||||
vars:
|
||||
elasticsearch_network_host: "{{ hostvars.kibana.private_ip }}"
|
||||
elasticsearch_node_name: node-6
|
||||
elasticsearch_node_master: false
|
||||
elasticsearch_node_ingest: false
|
||||
elasticsearch_node_data: false
|
||||
elasticsearch_cluster_nodes:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
elasticsearch_discovery_nodes:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
kibana_node_name: node-6
|
||||
wazuh_api_credentials:
|
||||
- id: default
|
||||
url: https://{{ hostvars.manager.private_ip }}
|
||||
port: 55000
|
||||
user: foo
|
||||
password: bar
|
||||
instances:
|
||||
node1:
|
||||
name: node-1 # Important: must be equal to elasticsearch_node_name.
|
||||
ip: "{{ hostvars.es1.private_ip }}" # When unzipping, the node will search for its node name folder to get the cert.
|
||||
node2:
|
||||
name: node-2
|
||||
ip: "{{ hostvars.es2.private_ip }}"
|
||||
node3:
|
||||
name: node-3
|
||||
ip: "{{ hostvars.es3.private_ip }}"
|
||||
node4:
|
||||
name: node-4
|
||||
ip: "{{ hostvars.manager.private_ip }}"
|
||||
node5:
|
||||
name: node-5
|
||||
ip: "{{ hostvars.worker.private_ip }}"
|
||||
node6:
|
||||
name: node-6
|
||||
ip: "{{ hostvars.kibana.private_ip }}"
|
||||
```
|
||||
|
||||
2. Copy the `Pipfile` and the `molecule` folder into the root wazuh-ansible directory:
|
||||
### Inventory file
|
||||
|
||||
```
|
||||
cp wazuh-qa/ansible/wazuh-ansible/* . -R
|
||||
- The `ansible_host` variable should contain the `address/FQDN` used to gather facts and provision each node.
|
||||
- The `private_ip` variable should contain the `address/FQDN` used for the internal cluster communications.
|
||||
- Whether the environment is located in a local subnet, `ansible_host` and `private_ip` variables should match.
|
||||
- The ssh credentials used by Ansible during the provision can be specified in this file too. Another option is including them directly on the playbook.
|
||||
|
||||
```ini
|
||||
es1 ansible_host=<es1_ec2_public_ip> private_ip=<es1_ec2_private_ip> elasticsearch_node_name=node-1
|
||||
es2 ansible_host=<es2_ec2_public_ip> private_ip=<es2_ec2_private_ip> elasticsearch_node_name=node-2
|
||||
es3 ansible_host=<es3_ec2_public_ip> private_ip=<es3_ec2_private_ip> elasticsearch_node_name=node-3
|
||||
kibana ansible_host=<kibana_node_public_ip> private_ip=<kibana_ec2_private_ip>
|
||||
manager ansible_host=<manager_node_public_ip> private_ip=<manager_ec2_private_ip>
|
||||
worker ansible_host=<worker_node_public_ip> private_ip=<worker_ec2_private_ip>
|
||||
|
||||
[odfe_cluster]
|
||||
es1
|
||||
es2
|
||||
es3
|
||||
|
||||
[all:vars]
|
||||
ansible_ssh_user=vagrant
|
||||
ansible_ssh_private_key_file=/path/to/ssh/key.pem
|
||||
ansible_ssh_extra_args='-o StrictHostKeyChecking=no'
|
||||
```
|
||||
|
||||
3. Follow these steps for launching the tests. Check the Pipfile for running different scenarios:
|
||||
### Launching the playbook
|
||||
|
||||
```bash
|
||||
ansible-playbook wazuh-odfe-production-ready.yml -i inventory
|
||||
```
|
||||
pip install pipenv
|
||||
sudo pipenv install
|
||||
pipenv run test
|
||||
pipenv run agent
|
||||
|
||||
After the playbook execution, the Wazuh UI should be reachable through `https://<kibana_host>:5601`
|
||||
|
||||
## Example: single-host environment
|
||||
|
||||
### Playbook
|
||||
The hereunder example playbook uses the `wazuh-ansible` role to provision a single-host Wazuh environment. This architecture includes all the Wazuh and ODFE components in a single node.
|
||||
|
||||
```yaml
|
||||
---
|
||||
# Single node
|
||||
- hosts: server
|
||||
become: yes
|
||||
become_user: root
|
||||
roles:
|
||||
- role: ../roles/opendistro/opendistro-elasticsearch
|
||||
- role: "../roles/wazuh/ansible-wazuh-manager"
|
||||
- role: "../roles/wazuh/ansible-filebeat-oss"
|
||||
- role: "../roles/opendistro/opendistro-kibana"
|
||||
vars:
|
||||
single_node: true
|
||||
minimum_master_nodes: 1
|
||||
elasticsearch_node_master: true
|
||||
elasticsearch_network_host: <your server host>
|
||||
filebeat_node_name: node-1
|
||||
filebeat_output_elasticsearch_hosts: <your server host>
|
||||
ansible_ssh_user: vagrant
|
||||
ansible_ssh_private_key_file: /path/to/ssh/key.pem
|
||||
ansible_ssh_extra_args: '-o StrictHostKeyChecking=no'
|
||||
instances:
|
||||
node1:
|
||||
name: node-1 # Important: must be equal to elasticsearch_node_name.
|
||||
ip: <your server host>
|
||||
```
|
||||
|
||||
### Inventory file
|
||||
|
||||
```ini
|
||||
[server]
|
||||
<your server host>
|
||||
|
||||
[all:vars]
|
||||
ansible_ssh_user=vagrant
|
||||
ansible_ssh_private_key_file=/path/to/ssh/key.pem
|
||||
ansible_ssh_extra_args='-o StrictHostKeyChecking=no'
|
||||
```
|
||||
|
||||
### Launching the playbook
|
||||
|
||||
```bash
|
||||
ansible-playbook wazuh-odfe-single.yml -i inventory
|
||||
```
|
||||
|
||||
After the playbook execution, the Wazuh UI should be reachable through `https://<your server host>:5601`
|
||||
|
||||
## Contribute
|
||||
|
||||
If you want to contribute to our repository, please fork our Github repository and submit a pull request.
|
||||
@ -88,7 +350,7 @@ https://github.com/dj-wasabi/ansible-ossec-server
|
||||
## License and copyright
|
||||
|
||||
WAZUH
|
||||
Copyright (C) 2016-2018 Wazuh Inc. (License GPLv2)
|
||||
Copyright (C) 2016-2020 Wazuh Inc. (License GPLv2)
|
||||
|
||||
## Web references
|
||||
|
||||
|
||||
184
playbooks/wazuh-odfe-production-ready.yml
Normal file
184
playbooks/wazuh-odfe-production-ready.yml
Normal file
@ -0,0 +1,184 @@
|
||||
---
|
||||
# Certificates generation
|
||||
- hosts: es1
|
||||
roles:
|
||||
- role: ../roles/opendistro/opendistro-elasticsearch
|
||||
elasticsearch_network_host: "{{ private_ip }}"
|
||||
elasticsearch_cluster_nodes:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
elasticsearch_discovery_nodes:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
perform_installation: false
|
||||
become: yes
|
||||
become_user: root
|
||||
vars:
|
||||
elasticsearch_node_master: true
|
||||
instances:
|
||||
node1:
|
||||
name: node-1 # Important: must be equal to elasticsearch_node_name.
|
||||
ip: "{{ hostvars.es1.private_ip }}" # When unzipping, the node will search for its node name folder to get the cert.
|
||||
node2:
|
||||
name: node-2
|
||||
ip: "{{ hostvars.es2.private_ip }}"
|
||||
node3:
|
||||
name: node-3
|
||||
ip: "{{ hostvars.es3.private_ip }}"
|
||||
node4:
|
||||
name: node-4
|
||||
ip: "{{ hostvars.manager.private_ip }}"
|
||||
node5:
|
||||
name: node-5
|
||||
ip: "{{ hostvars.worker.private_ip }}"
|
||||
node6:
|
||||
name: node-6
|
||||
ip: "{{ hostvars.kibana.private_ip }}"
|
||||
tags:
|
||||
- generate-certs
|
||||
|
||||
#ODFE Cluster
|
||||
- hosts: odfe_cluster
|
||||
strategy: free
|
||||
roles:
|
||||
- role: ../roles/opendistro/opendistro-elasticsearch
|
||||
elasticsearch_network_host: "{{ private_ip }}"
|
||||
become: yes
|
||||
become_user: root
|
||||
vars:
|
||||
elasticsearch_cluster_nodes:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
elasticsearch_discovery_nodes:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
elasticsearch_node_master: true
|
||||
instances:
|
||||
node1:
|
||||
name: node-1 # Important: must be equal to elasticsearch_node_name.
|
||||
ip: "{{ hostvars.es1.private_ip }}" # When unzipping, the node will search for its node name folder to get the cert.
|
||||
node2:
|
||||
name: node-2
|
||||
ip: "{{ hostvars.es2.private_ip }}"
|
||||
node3:
|
||||
name: node-3
|
||||
ip: "{{ hostvars.es3.private_ip }}"
|
||||
node4:
|
||||
name: node-4
|
||||
ip: "{{ hostvars.manager.private_ip }}"
|
||||
node5:
|
||||
name: node-5
|
||||
ip: "{{ hostvars.worker.private_ip }}"
|
||||
node6:
|
||||
name: node-6
|
||||
ip: "{{ hostvars.kibana.private_ip }}"
|
||||
|
||||
#Wazuh cluster
|
||||
- hosts: manager
|
||||
roles:
|
||||
- role: "../roles/wazuh/ansible-wazuh-manager"
|
||||
- role: "../roles/wazuh/ansible-filebeat-oss"
|
||||
filebeat_node_name: node-4
|
||||
become: yes
|
||||
become_user: root
|
||||
vars:
|
||||
wazuh_manager_config:
|
||||
connection:
|
||||
- type: 'secure'
|
||||
port: '1514'
|
||||
protocol: 'tcp'
|
||||
queue_size: 131072
|
||||
api:
|
||||
https: 'yes'
|
||||
cluster:
|
||||
disable: 'no'
|
||||
node_name: 'master'
|
||||
node_type: 'master'
|
||||
nodes:
|
||||
- '"{{ hostvars.manager.private_ip }}"'
|
||||
hidden: 'no'
|
||||
filebeat_output_elasticsearch_hosts:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
|
||||
- hosts: worker
|
||||
roles:
|
||||
- role: "../roles/wazuh/ansible-wazuh-manager"
|
||||
- role: "../roles/wazuh/ansible-filebeat-oss"
|
||||
filebeat_node_name: node-5
|
||||
become: yes
|
||||
become_user: root
|
||||
vars:
|
||||
wazuh_manager_config:
|
||||
connection:
|
||||
- type: 'secure'
|
||||
port: '1514'
|
||||
protocol: 'tcp'
|
||||
queue_size: 131072
|
||||
api:
|
||||
https: 'yes'
|
||||
cluster:
|
||||
disable: 'no'
|
||||
node_name: 'worker_01'
|
||||
node_type: 'worker'
|
||||
key: 'c98b62a9b6169ac5f67dae55ae4a9088'
|
||||
nodes:
|
||||
- '"{{ hostvars.manager.private_ip }}"'
|
||||
hidden: 'no'
|
||||
filebeat_output_elasticsearch_hosts:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
|
||||
#ODFE+Kibana node
|
||||
- hosts: kibana
|
||||
roles:
|
||||
- role: "../roles/opendistro/opendistro-elasticsearch"
|
||||
- role: "../roles/opendistro/opendistro-kibana"
|
||||
become: yes
|
||||
become_user: root
|
||||
vars:
|
||||
elasticsearch_network_host: "{{ hostvars.kibana.private_ip }}"
|
||||
elasticsearch_node_name: node-6
|
||||
elasticsearch_node_master: false
|
||||
elasticsearch_node_ingest: false
|
||||
elasticsearch_node_data: false
|
||||
elasticsearch_cluster_nodes:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
elasticsearch_discovery_nodes:
|
||||
- "{{ hostvars.es1.private_ip }}"
|
||||
- "{{ hostvars.es2.private_ip }}"
|
||||
- "{{ hostvars.es3.private_ip }}"
|
||||
kibana_node_name: node-6
|
||||
wazuh_api_credentials:
|
||||
- id: default
|
||||
url: https://{{ hostvars.manager.private_ip }}
|
||||
port: 55000
|
||||
user: foo
|
||||
password: bar
|
||||
instances:
|
||||
node1:
|
||||
name: node-1 # Important: must be equal to elasticsearch_node_name.
|
||||
ip: "{{ hostvars.es1.private_ip }}" # When unzipping, the node will search for its node name folder to get the cert.
|
||||
node2:
|
||||
name: node-2
|
||||
ip: "{{ hostvars.es2.private_ip }}"
|
||||
node3:
|
||||
name: node-3
|
||||
ip: "{{ hostvars.es3.private_ip }}"
|
||||
node4:
|
||||
name: node-4
|
||||
ip: "{{ hostvars.manager.private_ip }}"
|
||||
node5:
|
||||
name: node-5
|
||||
ip: "{{ hostvars.worker.private_ip }}"
|
||||
node6:
|
||||
name: node-6
|
||||
ip: "{{ hostvars.kibana.private_ip }}"
|
||||
21
playbooks/wazuh-odfe-single.yml
Normal file
21
playbooks/wazuh-odfe-single.yml
Normal file
@ -0,0 +1,21 @@
|
||||
---
|
||||
# Single node
|
||||
- hosts: <your server host>
|
||||
become: yes
|
||||
become_user: root
|
||||
roles:
|
||||
- role: ../roles/opendistro/opendistro-elasticsearch
|
||||
- role: ../roles/wazuh/ansible-wazuh-manager
|
||||
- role: ../roles/wazuh/ansible-filebeat-oss
|
||||
- role: ../roles/opendistro/opendistro-kibana
|
||||
vars:
|
||||
single_node: true
|
||||
minimum_master_nodes: 1
|
||||
elasticsearch_node_master: true
|
||||
elasticsearch_network_host: <your server host>
|
||||
filebeat_node_name: node-1
|
||||
filebeat_output_elasticsearch_hosts: <your server host>
|
||||
instances:
|
||||
node1:
|
||||
name: node-1 # Important: must be equal to elasticsearch_node_name.
|
||||
ip: <your server host>
|
||||
@ -3,10 +3,10 @@
|
||||
es_version: "7.8.0"
|
||||
es_major_version: "7.x"
|
||||
|
||||
opendistro_version: 1.9.0
|
||||
opendistro_version: 1.10.1
|
||||
|
||||
elasticsearch_cluster_name: wazuh-cluster
|
||||
single_node: true
|
||||
single_node: false
|
||||
elasticsearch_node_name: node-1
|
||||
opendistro_cluster_name: wazuh
|
||||
elasticsearch_node_data: true
|
||||
elasticsearch_node_ingest: true
|
||||
@ -56,7 +56,7 @@ opendistro_http_port: 9200
|
||||
certs_gen_tool_version: 1.7
|
||||
|
||||
# Url of Search Guard certificates generator tool
|
||||
certs_gen_tool_url: "https://releases.floragunn.com/search-guard-tlstool/{{ certs_gen_tool_version }}/search-guard-tlstool-{{ certs_gen_tool_version }}.zip"
|
||||
certs_gen_tool_url: "https://search.maven.org/remotecontent?filepath=com/floragunn/search-guard-tlstool/{{ certs_gen_tool_version }}/search-guard-tlstool-{{ certs_gen_tool_version }}.zip"
|
||||
|
||||
elasticrepo:
|
||||
apt: 'https://artifacts.elastic.co/packages/7.x/apt'
|
||||
|
||||
@ -70,6 +70,7 @@
|
||||
tags: debug
|
||||
when:
|
||||
- hostvars[inventory_hostname]['private_ip'] is not defined or not hostvars[inventory_hostname]['private_ip']
|
||||
- single_node == false
|
||||
|
||||
- name: Wait for Elasticsearch API (Private IP)
|
||||
uri:
|
||||
@ -87,6 +88,7 @@
|
||||
tags: debug
|
||||
when:
|
||||
- hostvars[inventory_hostname]['private_ip'] is defined and hostvars[inventory_hostname]['private_ip']
|
||||
- single_node == false
|
||||
|
||||
- import_tasks: "RMRedHat.yml"
|
||||
when: ansible_os_family == "RedHat"
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
cluster.name: {{ elasticsearch_cluster_name }}
|
||||
cluster.name: {{ opendistro_cluster_name }}
|
||||
node.name: {{ elasticsearch_node_name }}
|
||||
path.data: /var/lib/elasticsearch
|
||||
path.logs: /var/log/elasticsearch
|
||||
@ -6,6 +6,9 @@ network.host: {{ elasticsearch_network_host }}
|
||||
|
||||
node.master: {{ elasticsearch_node_master|lower }}
|
||||
|
||||
{% if single_node == true %}
|
||||
discovery.type: single-node
|
||||
{% else %}
|
||||
cluster.initial_master_nodes:
|
||||
{% for item in elasticsearch_cluster_nodes %}
|
||||
- {{ item }}
|
||||
@ -15,6 +18,7 @@ discovery.seed_hosts:
|
||||
{% for item in elasticsearch_discovery_nodes %}
|
||||
- {{ item }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if elasticsearch_node_data|lower == 'false' %}
|
||||
node.data: false
|
||||
|
||||
@ -6,7 +6,6 @@ elasticsearch_nodes: |-
|
||||
{% for item in groups['es_cluster'] -%}
|
||||
{{ hostvars[item]['ip'] }}{% if not loop.last %}","{% endif %}
|
||||
{%- endfor %}
|
||||
elasticsearch_network_host: 172.16.0.161
|
||||
elastic_api_protocol: https
|
||||
kibana_conf_path: /etc/kibana
|
||||
kibana_node_name: node-1
|
||||
@ -14,12 +13,12 @@ kibana_server_host: "0.0.0.0"
|
||||
kibana_server_port: "5601"
|
||||
kibana_server_name: "kibana"
|
||||
kibana_max_payload_bytes: 1048576
|
||||
elastic_stack_version: 7.8.0
|
||||
elastic_stack_version: 7.9.1
|
||||
wazuh_version: 3.13.2
|
||||
wazuh_app_url: https://packages.wazuh.com/wazuhapp/wazuhapp
|
||||
|
||||
# The OpenDistro package repository
|
||||
kibana_opendistro_version: -1.9.0-1 # Version includes the - for RedHat family compatibility, replace with = for Debian hosts
|
||||
kibana_opendistro_version: -1.10.1-1 # Version includes the - for RedHat family compatibility, replace with = for Debian hosts
|
||||
|
||||
package_repos:
|
||||
yum:
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
filebeat_version: 7.8.0
|
||||
filebeat_version: 7.9.1
|
||||
|
||||
wazuh_template_branch: v3.13.2
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user