Fix cluster key and add custom user

This commit is contained in:
Manuel Gutierrez 2020-10-27 14:26:10 +01:00
parent 13cda56e94
commit 8cb307064b
No known key found for this signature in database
GPG Key ID: CEB8789DAED15AAD
2 changed files with 22 additions and 17 deletions

View File

@ -1,4 +1,4 @@
# Wazuh-Ansible # Wazuh-Ansible
[![Slack](https://img.shields.io/badge/slack-join-blue.svg)](https://wazuh.com/community/join-us-on-slack/) [![Slack](https://img.shields.io/badge/slack-join-blue.svg)](https://wazuh.com/community/join-us-on-slack/)
[![Email](https://img.shields.io/badge/email-join-blue.svg)](https://groups.google.com/forum/#!forum/wazuh) [![Email](https://img.shields.io/badge/email-join-blue.svg)](https://groups.google.com/forum/#!forum/wazuh)
@ -26,15 +26,15 @@ These playbooks install and configure Wazuh agent, manager and Elastic Stack.
├── wazuh-ansible ├── wazuh-ansible
│ ├── roles │ ├── roles
│ │ ├── elastic-stack │ │ ├── elastic-stack
│ │ │ ├── ansible-elasticsearch │ │ │ ├── ansible-elasticsearch
│ │ │ ├── ansible-kibana │ │ │ ├── ansible-kibana
│ │ │ │
│ │ ├── opendistro │ │ ├── opendistro
│ │ │ ├── opendistro-elasticsearch │ │ │ ├── opendistro-elasticsearch
│ │ │ ├── opendistro-kibana │ │ │ ├── opendistro-kibana
│ │ │ │
│ │ ├── wazuh │ │ ├── wazuh
│ │ │ ├── ansible-filebeat │ │ │ ├── ansible-filebeat
│ │ │ ├── ansible-filebeat-oss │ │ │ ├── ansible-filebeat-oss
│ │ │ ├── ansible-wazuh-manager │ │ │ ├── ansible-wazuh-manager
@ -105,7 +105,7 @@ The hereunder example playbook uses the `wazuh-ansible` role to provision a prod
ip: "{{ hostvars.kibana.private_ip }}" ip: "{{ hostvars.kibana.private_ip }}"
tags: tags:
- generate-certs - generate-certs
#ODFE Cluster #ODFE Cluster
- hosts: odfe_cluster - hosts: odfe_cluster
strategy: free strategy: free
@ -143,7 +143,7 @@ The hereunder example playbook uses the `wazuh-ansible` role to provision a prod
node6: node6:
name: node-6 name: node-6
ip: "{{ hostvars.kibana.private_ip }}" ip: "{{ hostvars.kibana.private_ip }}"
#Wazuh cluster #Wazuh cluster
- hosts: manager - hosts: manager
roles: roles:
@ -165,6 +165,7 @@ The hereunder example playbook uses the `wazuh-ansible` role to provision a prod
disable: 'no' disable: 'no'
node_name: 'master' node_name: 'master'
node_type: 'master' node_type: 'master'
key: 'c98b62a9b6169ac5f67dae55ae4a9088'
nodes: nodes:
- '"{{ hostvars.manager.private_ip }}"' - '"{{ hostvars.manager.private_ip }}"'
hidden: 'no' hidden: 'no'
@ -172,7 +173,7 @@ The hereunder example playbook uses the `wazuh-ansible` role to provision a prod
- "{{ hostvars.es1.private_ip }}" - "{{ hostvars.es1.private_ip }}"
- "{{ hostvars.es2.private_ip }}" - "{{ hostvars.es2.private_ip }}"
- "{{ hostvars.es3.private_ip }}" - "{{ hostvars.es3.private_ip }}"
- hosts: worker - hosts: worker
roles: roles:
- role: "../roles/wazuh/ansible-wazuh-manager" - role: "../roles/wazuh/ansible-wazuh-manager"
@ -201,7 +202,7 @@ The hereunder example playbook uses the `wazuh-ansible` role to provision a prod
- "{{ hostvars.es1.private_ip }}" - "{{ hostvars.es1.private_ip }}"
- "{{ hostvars.es2.private_ip }}" - "{{ hostvars.es2.private_ip }}"
- "{{ hostvars.es3.private_ip }}" - "{{ hostvars.es3.private_ip }}"
#ODFE+Kibana node #ODFE+Kibana node
- hosts: kibana - hosts: kibana
roles: roles:
@ -253,9 +254,9 @@ The hereunder example playbook uses the `wazuh-ansible` role to provision a prod
### Inventory file ### Inventory file
- The `ansible_host` variable should contain the `address/FQDN` used to gather facts and provision each node. - The `ansible_host` variable should contain the `address/FQDN` used to gather facts and provision each node.
- The `private_ip` variable should contain the `address/FQDN` used for the internal cluster communications. - The `private_ip` variable should contain the `address/FQDN` used for the internal cluster communications.
- Whether the environment is located in a local subnet, `ansible_host` and `private_ip` variables should match. - Whether the environment is located in a local subnet, `ansible_host` and `private_ip` variables should match.
- The ssh credentials used by Ansible during the provision can be specified in this file too. Another option is including them directly on the playbook. - The ssh credentials used by Ansible during the provision can be specified in this file too. Another option is including them directly on the playbook.
```ini ```ini
@ -305,7 +306,7 @@ The hereunder example playbook uses the `wazuh-ansible` role to provision a sing
single_node: true single_node: true
minimum_master_nodes: 1 minimum_master_nodes: 1
elasticsearch_node_master: true elasticsearch_node_master: true
elasticsearch_network_host: <your server host> elasticsearch_network_host: <your server host>
filebeat_node_name: node-1 filebeat_node_name: node-1
filebeat_output_elasticsearch_hosts: <your server host> filebeat_output_elasticsearch_hosts: <your server host>
ansible_ssh_user: vagrant ansible_ssh_user: vagrant
@ -341,7 +342,7 @@ After the playbook execution, the Wazuh UI should be reachable through `https://
If you want to contribute to our repository, please fork our Github repository and submit a pull request. If you want to contribute to our repository, please fork our Github repository and submit a pull request.
If you are not familiar with Github, you can also share them through [our users mailing list](https://groups.google.com/d/forum/wazuh), to which you can subscribe by sending an email to `wazuh+subscribe@googlegroups.com`. If you are not familiar with Github, you can also share them through [our users mailing list](https://groups.google.com/d/forum/wazuh), to which you can subscribe by sending an email to `wazuh+subscribe@googlegroups.com`.
### Modified by Wazuh ### Modified by Wazuh

View File

@ -98,9 +98,13 @@
disable: 'no' disable: 'no'
node_name: 'master' node_name: 'master'
node_type: 'master' node_type: 'master'
key: 'c98b62a9b6169ac5f67dae55ae4a9088'
nodes: nodes:
- '"{{ hostvars.manager.private_ip }}"' - '"{{ hostvars.manager.private_ip }}"'
hidden: 'no' hidden: 'no'
wazuh_api_users:
- username: custom-user
password: .S3cur3Pa55w0rd*-
filebeat_output_elasticsearch_hosts: filebeat_output_elasticsearch_hosts:
- "{{ hostvars.es1.private_ip }}" - "{{ hostvars.es1.private_ip }}"
- "{{ hostvars.es2.private_ip }}" - "{{ hostvars.es2.private_ip }}"
@ -161,8 +165,8 @@
- id: default - id: default
url: https://{{ hostvars.manager.private_ip }} url: https://{{ hostvars.manager.private_ip }}
port: 55000 port: 55000
username: foo username: custom-user
password: bar password: .S3cur3Pa55w0rd*-
instances: instances:
node1: node1:
name: node-1 # Important: must be equal to elasticsearch_node_name. name: node-1 # Important: must be equal to elasticsearch_node_name.