diff --git a/CHANGELOG.md b/CHANGELOG.md index 520661ef..c9b8803d 100755 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,30 @@ # Change Log All notable changes to this project will be documented in this file. + +## [v3.12.3_7.6.2] + +### Added + +- Update to Wazuh v3.12.2 +- AWS S3 block to template ([@limitup](https://github.com/limitup)) [PR#404](https://github.com/wazuh/wazuh-ansible/pull/413) + +### Changed + +- Update Kibana optimize task parameters and command ([@jm404](https://github.com/jm404)) [PR#404](https://github.com/wazuh/wazuh-ansible/pull/412) +- Update Kibana optimize folder and owner ([@jm404](https://github.com/jm404)) [PR#404](https://github.com/wazuh/wazuh-ansible/pull/410) + +## [v3.12.2_7.6.2] + +### Added + +- Update to Wazuh v3.12.2 + +### Fixed +- Adjusting Kibana plugin optimization max memory ([@Zenidd](https://github.com/Zenidd)) [PR#404](https://github.com/wazuh/wazuh-ansible/pull/404) +- Removed python-cryptography library tasks ([@Zenidd](https://github.com/Zenidd)) [PR#401](https://github.com/wazuh/wazuh-ansible/pull/401) +- Removed duplicated task block ([@manuasir](https://github.com/manuasir)) [PR#400](https://github.com/wazuh/wazuh-ansible/pull/400) + ## [v3.12.0_7.6.1] ### Added diff --git a/VERSION b/VERSION index d6be8992..22dfe1fb 100644 --- a/VERSION +++ b/VERSION @@ -1,2 +1,2 @@ WAZUH-ANSIBLE_VERSION="v4" -REVISION="31140" +REVISION="31220" diff --git a/playbooks/wazuh-manager-oss.yml b/playbooks/wazuh-manager-oss.yml new file mode 100644 index 00000000..3dc6346d --- /dev/null +++ b/playbooks/wazuh-manager-oss.yml @@ -0,0 +1,9 @@ +--- +- hosts: managers + roles: +# - role: ../roles/wazuh/ansible-wazuh-manager + - role: ../roles/wazuh/ansible-filebeat-oss + filebeat_output_elasticsearch_hosts: + - "172.16.0.161:9200" + - "172.16.0.162:9200" + - "172.16.0.163:9200" \ No newline at end of file diff --git a/playbooks/wazuh-opendistro-kibana.yml b/playbooks/wazuh-opendistro-kibana.yml new file mode 100644 index 00000000..fa3600c1 --- /dev/null +++ b/playbooks/wazuh-opendistro-kibana.yml @@ -0,0 +1,4 @@ +--- +- hosts: es1 + roles: + - role: ../roles/opendistro/opendistro-kibana diff --git a/playbooks/wazuh-opendistro.yml b/playbooks/wazuh-opendistro.yml new file mode 100644 index 00000000..271dfa5b --- /dev/null +++ b/playbooks/wazuh-opendistro.yml @@ -0,0 +1,4 @@ +--- +- hosts: es_cluster + roles: + - role: ../roles/opendistro/opendistro-elasticsearch diff --git a/roles/elastic-stack/ansible-elasticsearch/README.md b/roles/elastic-stack/ansible-elasticsearch/README.md index c574aa9f..f37d3cec 100644 --- a/roles/elastic-stack/ansible-elasticsearch/README.md +++ b/roles/elastic-stack/ansible-elasticsearch/README.md @@ -134,7 +134,7 @@ It is possible to define users directly on the playbook, these must be defined o License and copyright --------------------- -WAZUH Copyright (C) 2017 Wazuh Inc. (License GPLv3) +WAZUH Copyright (C) 2020 Wazuh Inc. (License GPLv3) ### Based on previous work from geerlingguy diff --git a/roles/elastic-stack/ansible-elasticsearch/defaults/main.yml b/roles/elastic-stack/ansible-elasticsearch/defaults/main.yml index e04f9527..13b3fa53 100644 --- a/roles/elastic-stack/ansible-elasticsearch/defaults/main.yml +++ b/roles/elastic-stack/ansible-elasticsearch/defaults/main.yml @@ -4,8 +4,9 @@ elasticsearch_http_port: 9200 elasticsearch_network_host: 127.0.0.1 elasticsearch_reachable_host: 127.0.0.1 elasticsearch_jvm_xms: null -elastic_stack_version: 7.6.1 +elastic_stack_version: 7.7.0 elasticsearch_lower_disk_requirements: false +elasticsearch_path_repo: [] elasticrepo: apt: 'https://artifacts.elastic.co/packages/7.x/apt' @@ -36,7 +37,7 @@ node_certs_source: /usr/share/elasticsearch node_certs_destination: /etc/elasticsearch/certs # CA generation -master_certs_path: /es_certs +master_certs_path: "{{ playbook_dir }}/es_certs" generate_CA: true ca_key_name: "" ca_cert_name: "" diff --git a/roles/elastic-stack/ansible-elasticsearch/tasks/Debian.yml b/roles/elastic-stack/ansible-elasticsearch/tasks/Debian.yml index 74c6bcf2..20f4231b 100644 --- a/roles/elastic-stack/ansible-elasticsearch/tasks/Debian.yml +++ b/roles/elastic-stack/ansible-elasticsearch/tasks/Debian.yml @@ -18,16 +18,6 @@ - ansible_distribution == "Ubuntu" - ansible_distribution_major_version | int == 14 -- name: Update and upgrade apt packages - become: true - apt: - upgrade: yes - update_cache: yes - cache_valid_time: 86400 #One day - when: - - ansible_distribution == "Ubuntu" - - ansible_distribution_major_version | int == 14 - - name: Debian/Ubuntu | Add Elasticsearch GPG key. apt_key: url: "{{ elasticrepo.gpg }}" diff --git a/roles/elastic-stack/ansible-elasticsearch/tasks/xpack_security.yml b/roles/elastic-stack/ansible-elasticsearch/tasks/xpack_security.yml index 47438f98..a203cfd4 100644 --- a/roles/elastic-stack/ansible-elasticsearch/tasks/xpack_security.yml +++ b/roles/elastic-stack/ansible-elasticsearch/tasks/xpack_security.yml @@ -102,6 +102,7 @@ state: directory mode: 0700 delegate_to: "127.0.0.1" + become: no when: - node_certs_generator @@ -111,6 +112,7 @@ state: directory mode: 0700 delegate_to: "127.0.0.1" + become: no when: - node_certs_generator @@ -139,6 +141,7 @@ src: "{{ master_certs_path }}/certs.zip" dest: "{{ master_certs_path }}/" delegate_to: "127.0.0.1" + become: no when: - node_certs_generator tags: @@ -149,6 +152,8 @@ copy: src: "{{ item }}" dest: "{{ node_certs_destination }}/" + owner: root + group: elasticsearch mode: 0440 with_items: - "{{ master_certs_path }}/{{ elasticsearch_node_name }}/{{ elasticsearch_node_name }}.key" @@ -164,6 +169,8 @@ copy: src: "{{ item }}" dest: "{{ node_certs_destination }}/" + owner: root + group: elasticsearch mode: 0440 with_items: - "{{ master_certs_path }}/{{ elasticsearch_node_name }}/{{ elasticsearch_node_name }}.key" @@ -178,9 +185,11 @@ - name: Ensuring folder permissions file: path: "{{ node_certs_destination }}/" - mode: 0774 + owner: root + group: elasticsearch + mode: 0770 state: directory - recurse: yes + recurse: no when: - elasticsearch_xpack_security - generate_CA diff --git a/roles/elastic-stack/ansible-elasticsearch/templates/elasticsearch.yml.j2 b/roles/elastic-stack/ansible-elasticsearch/templates/elasticsearch.yml.j2 index 0d6887f5..184bc4b4 100644 --- a/roles/elastic-stack/ansible-elasticsearch/templates/elasticsearch.yml.j2 +++ b/roles/elastic-stack/ansible-elasticsearch/templates/elasticsearch.yml.j2 @@ -6,6 +6,12 @@ path.data: /var/lib/elasticsearch path.logs: /var/log/elasticsearch bootstrap.memory_lock: true network.host: {{ elasticsearch_network_host }} +{% if elasticsearch_path_repo | length>0 %} +path.repo: +{% for item in elasticsearch_path_repo %} + - {{ item }} +{% endfor %} +{% endif %} {% if single_node %} discovery.type: single-node diff --git a/roles/elastic-stack/ansible-kibana/README.md b/roles/elastic-stack/ansible-kibana/README.md index 593cf319..28978761 100644 --- a/roles/elastic-stack/ansible-kibana/README.md +++ b/roles/elastic-stack/ansible-kibana/README.md @@ -37,7 +37,7 @@ Example Playbook License and copyright --------------------- -WAZUH Copyright (C) 2017 Wazuh Inc. (License GPLv3) +WAZUH Copyright (C) 2020 Wazuh Inc. (License GPLv3) ### Based on previous work from geerlingguy diff --git a/roles/elastic-stack/ansible-kibana/defaults/main.yml b/roles/elastic-stack/ansible-kibana/defaults/main.yml index 2ac2cde5..fd392334 100644 --- a/roles/elastic-stack/ansible-kibana/defaults/main.yml +++ b/roles/elastic-stack/ansible-kibana/defaults/main.yml @@ -5,8 +5,8 @@ elasticsearch_http_port: "9200" elasticsearch_network_host: "127.0.0.1" kibana_server_host: "0.0.0.0" kibana_server_port: "5601" -elastic_stack_version: 7.6.1 -wazuh_version: 3.12.0 +elastic_stack_version: 7.7.0 +wazuh_version: 3.12.3 wazuh_app_url: https://packages.wazuh.com/wazuhapp/wazuhapp elasticrepo: @@ -34,7 +34,7 @@ node_certs_source: /usr/share/elasticsearch node_certs_destination: /etc/kibana/certs # CA Generation -master_certs_path: /es_certs +master_certs_path: "{{ playbook_dir }}/es_certs" generate_CA: true ca_cert_name: "" @@ -50,4 +50,4 @@ build_from_sources: false wazuh_plugin_branch: 3.12-7.6 #Nodejs NODE_OPTIONS -node_options: --max-old-space-size=4096 +node_options: --no-warnings --max-old-space-size=2048 --max-http-header-size=65536 diff --git a/roles/elastic-stack/ansible-kibana/tasks/main.yml b/roles/elastic-stack/ansible-kibana/tasks/main.yml index 2e39391f..96df7d64 100644 --- a/roles/elastic-stack/ansible-kibana/tasks/main.yml +++ b/roles/elastic-stack/ansible-kibana/tasks/main.yml @@ -28,6 +28,8 @@ copy: src: "{{ item }}" dest: "{{ node_certs_destination }}/" + owner: root + group: kibana mode: 0440 with_items: - "{{ master_certs_path }}/{{ kibana_node_name }}/{{ kibana_node_name }}.key" @@ -42,6 +44,8 @@ copy: src: "{{ item }}" dest: "{{ node_certs_destination }}/" + owner: root + group: kibana mode: 0440 with_items: - "{{ master_certs_path }}/{{ kibana_node_name }}/{{ kibana_node_name }}.key" @@ -52,22 +56,14 @@ - not generate_CA tags: xpack-security -- name: Ensuring certificates folder owner +- name: Ensuring certificates folder owner and permissions file: path: "{{ node_certs_destination }}/" state: directory - recurse: yes + recurse: no owner: kibana group: kibana - when: - - kibana_xpack_security - tags: xpack-security - -- name: Ensuring certificates folder owner - file: - path: "{{ node_certs_destination }}/" mode: 0770 - recurse: yes when: - kibana_xpack_security notify: restart kibana @@ -136,12 +132,12 @@ - not build_from_sources - name: Kibana optimization (can take a while) - shell: NODE_OPTIONS="{{ node_options }}" /usr/share/kibana/bin/kibana --optimize + shell: /usr/share/kibana/node/bin/node {{ node_options }} /usr/share/kibana/src/cli --optimize args: executable: /bin/bash + creates: /usr/share/kibana/optimize/wazuh/ become: yes become_user: kibana - changed_when: false tags: - skip_ansible_lint @@ -165,14 +161,20 @@ file: path: /usr/share/kibana/optimize/wazuh/config/ state: directory + recurse: yes + owner: kibana + group: kibana + mode: 0751 + changed_when: False - name: Configure Wazuh Kibana Plugin template: src: wazuh.yml.j2 dest: /usr/share/kibana/optimize/wazuh/config/wazuh.yml owner: kibana - group: root - mode: 0644 + group: kibana + mode: 0751 + changed_when: False - name: Reload systemd configuration systemd: diff --git a/roles/opendistro/opendistro-elasticsearch/defaults/main.yml b/roles/opendistro/opendistro-elasticsearch/defaults/main.yml new file mode 100644 index 00000000..aa683033 --- /dev/null +++ b/roles/opendistro/opendistro-elasticsearch/defaults/main.yml @@ -0,0 +1,58 @@ +--- +# The OpenDistro version +opendistro_version: 1.8.0 +elasticsearch_cluster_name: wazuh-cluster + +# Minimum master nodes in cluster, 2 for 3 nodes elasticsearch cluster +minimum_master_nodes: 2 + +# Elasticsearch version +es_version: "7.3.2" +es_major_version: "7.x" + +# Configure hostnames for Elasticsearch nodes +# Example es1.example.com, es2.example.com +domain_name: wazuh.com + +# The OpenDistro package repository +package_repos: + yum: + opendistro: + baseurl: 'https://d3g5vo6xdbdb9a.cloudfront.net/yum/noarch/' + gpg: 'https://d3g5vo6xdbdb9a.cloudfront.net/GPG-KEY-opendistroforelasticsearch' + elasticsearch_oss: + baseurl: 'https://artifacts.elastic.co/packages/oss-7.x/yum' + gpg: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch' + +opendistro_sec_plugin_conf_path: /usr/share/elasticsearch/plugins/opendistro_security/securityconfig +opendistro_sec_plugin_tools_path: /usr/share/elasticsearch/plugins/opendistro_security/tools +opendistro_conf_path: /etc/elasticsearch/ +es_nodes: |- + {% for item in groups['es_cluster'] -%} + {{ hostvars[item]['ip'] }}{% if not loop.last %}","{% endif %} + {%- endfor %} + +# Security password +opendistro_security_password: admin +# Set JVM memory limits +opendistro_jvm_xms: null + +opendistro_http_port: 9200 + +certs_gen_tool_version: 1.7 +# Url of Search Guard certificates generator tool +certs_gen_tool_url: "https://releases.floragunn.com/search-guard-tlstool/{{ certs_gen_tool_version }}/search-guard-tlstool-{{ certs_gen_tool_version }}.zip" + +elasticrepo: + apt: 'https://artifacts.elastic.co/packages/7.x/apt' + yum: 'https://artifacts.elastic.co/packages/7.x/yum' + gpg: 'https://artifacts.elastic.co/GPG-KEY-opendistro' + key_id: '46095ACC8548582C1A2699A9D27D666CD88E42B4' + +opendistro_admin_password: changeme +opendistro_kibana_password: changeme +# Cluster Settings +single_node: true +opendistro_cluster_name: wazuh + +local_certs_path: /tmp/opendistro-nodecerts \ No newline at end of file diff --git a/roles/opendistro/opendistro-elasticsearch/handlers/main.yml b/roles/opendistro/opendistro-elasticsearch/handlers/main.yml new file mode 100644 index 00000000..3cfaa6b0 --- /dev/null +++ b/roles/opendistro/opendistro-elasticsearch/handlers/main.yml @@ -0,0 +1,5 @@ +--- +- name: restart elasticsearch + service: + name: elasticsearch + state: restarted diff --git a/roles/opendistro/opendistro-elasticsearch/meta/main.yml b/roles/opendistro/opendistro-elasticsearch/meta/main.yml new file mode 100644 index 00000000..e09933c7 --- /dev/null +++ b/roles/opendistro/opendistro-elasticsearch/meta/main.yml @@ -0,0 +1,24 @@ +--- +galaxy_info: + author: Wazuh + description: Installing and maintaining Opendistro server. + company: wazuh.com + license: license (GPLv3) + min_ansible_version: 2.0 + platforms: + - name: EL + versions: + - all + - name: Ubuntu + versions: + - all + - name: Debian + versions: + - all + - name: Fedora + versions: + - all + galaxy_tags: + - web + - system + - monitoring diff --git a/roles/opendistro/opendistro-elasticsearch/tasks/RMRedHat.yml b/roles/opendistro/opendistro-elasticsearch/tasks/RMRedHat.yml new file mode 100644 index 00000000..31f0416a --- /dev/null +++ b/roles/opendistro/opendistro-elasticsearch/tasks/RMRedHat.yml @@ -0,0 +1,6 @@ +--- +- name: RedHat/CentOS/Fedora | Remove Elasticsearch repository (and clean up left-over metadata) + yum_repository: + name: opendistro_repo + state: absent + changed_when: false diff --git a/roles/opendistro/opendistro-elasticsearch/tasks/RedHat.yml b/roles/opendistro/opendistro-elasticsearch/tasks/RedHat.yml new file mode 100644 index 00000000..f018c9f7 --- /dev/null +++ b/roles/opendistro/opendistro-elasticsearch/tasks/RedHat.yml @@ -0,0 +1,38 @@ +--- +- block: + + - name: RedHat/CentOS/Fedora | Add OpenDistro repo + yum_repository: + file: opendistro + name: opendistro_repo + description: Opendistro yum repository + baseurl: "{{ package_repos.yum.opendistro.baseurl }}" + gpgkey: "{{ package_repos.yum.opendistro.gpg }}" + gpgcheck: true + changed_when: false + + - name: RedHat/CentOS/Fedora | Add Elasticsearch-oss repo + yum_repository: + file: opendistro + name: elasticsearch_oss_repo + description: Elasticsearch-oss yum repository + baseurl: "{{ package_repos.yum.elasticsearch_oss.baseurl }}" + gpgkey: "{{ package_repos.yum.elasticsearch_oss.gpg }}" + gpgcheck: true + changed_when: false + + - name: RedHat/CentOS/Fedora | Install OpenJDK 11 + yum: + name: java-11-openjdk-devel + state: present + + - name: RedHat/CentOS/Fedora | Install OpenDistro dependencies + yum: + name: "{{ packages }}" + vars: + packages: + - wget + - unzip + + tags: + - install \ No newline at end of file diff --git a/roles/opendistro/opendistro-elasticsearch/tasks/local_actions.yml b/roles/opendistro/opendistro-elasticsearch/tasks/local_actions.yml new file mode 100644 index 00000000..6885276d --- /dev/null +++ b/roles/opendistro/opendistro-elasticsearch/tasks/local_actions.yml @@ -0,0 +1,72 @@ +--- +- block: + + - name: Local action | Create local temporary directory for certificates generation + file: + path: "{{ local_certs_path }}" + state: directory + + - name: Local action | Check that the generation tool exists + stat: + path: "{{ local_certs_path }}/search-guard-tlstool-{{ certs_gen_tool_version }}.zip" + register: tool_package + + - name: Local action | Download certificates generation tool + get_url: + url: "{{ certs_gen_tool_url }}" + dest: "{{ local_certs_path }}/search-guard-tlstool-{{ certs_gen_tool_version }}.zip" + when: not tool_package.stat.exists + + - name: Local action | Extract the certificates generation tool + unarchive: + src: "{{ local_certs_path }}/search-guard-tlstool-1.7.zip" + dest: "{{ local_certs_path }}/" + + - name: Local action | Add the execution bit to the binary + file: + dest: "{{ local_certs_path }}/tools/sgtlstool.sh" + mode: a+x + + - name: Local action | Prepare the certificates generation template file + template: + src: "templates/tlsconfig.yml.j2" + dest: "{{ local_certs_path }}/config/tlsconfig.yml" + register: tlsconfig_template + + - name: Create a directory if it does not exist + file: + path: "{{ local_certs_path }}/certs/" + state: directory + mode: '0755' + + - name: Local action | Check if root CA file exists + stat: + path: "{{ local_certs_path }}/certs/root-ca.key" + register: root_ca_file + + - name: Local action | Generate the node & admin certificates in local + command: >- + {{ local_certs_path }}/tools/sgtlstool.sh + -c {{ local_certs_path }}/config/tlsconfig.yml + -ca -crt + -t {{ local_certs_path }}/certs/ + -f -o + when: + - not root_ca_file.stat.exists + - tlsconfig_template.changed + + - name: Local action | Generate the node & admin certificates using an existing root CA + command: >- + {{ local_certs_path }}/tools/sgtlstool.sh + -c {{ local_certs_path }}/config/tlsconfig.yml + -crt + -t {{ local_certs_path }}/certs/ + -f + when: + - root_ca_file.stat.exists + - tlsconfig_template.changed + + run_once: true + delegate_to: localhost + tags: + - generate-certs \ No newline at end of file diff --git a/roles/opendistro/opendistro-elasticsearch/tasks/main.yml b/roles/opendistro/opendistro-elasticsearch/tasks/main.yml new file mode 100644 index 00000000..9df1e01c --- /dev/null +++ b/roles/opendistro/opendistro-elasticsearch/tasks/main.yml @@ -0,0 +1,68 @@ +--- + +- import_tasks: local_actions.yml + +- import_tasks: RedHat.yml + when: ansible_os_family == 'RedHat' + +- name: Install OpenDistro + package: + name: opendistroforelasticsearch-{{ opendistro_version }} + state: present + register: install + tags: install + +- name: Remove elasticsearch configuration file + file: + path: "{{ opendistro_conf_path }}/elasticsearch.yml" + state: absent + when: install.changed + tags: install + +- name: Copy Configuration File + blockinfile: + block: "{{ lookup('template', 'elasticsearch.yml.j2') }}" + dest: "{{ opendistro_conf_path }}/elasticsearch.yml" + create: true + group: elasticsearch + mode: 0640 + marker: "## {mark} Opendistro general settings ##" + when: install.changed + tags: install + +- import_tasks: security_actions.yml + +- name: Configure OpenDistro Elasticsearch JVM memmory. + template: + src: "templates/jvm.options.j2" + dest: /etc/elasticsearch/jvm.options + owner: root + group: elasticsearch + mode: 0644 + force: yes + notify: restart elasticsearch + tags: install + +- name: Ensure Elasticsearch started and enabled + service: + name: elasticsearch + enabled: true + state: started + +- name: Wait for Elasticsearch API + uri: + url: "https://{{ es_nodes.split(',')[0].split('\"')[0] }}:9200/_cluster/health/" + user: "admin" # Default OpenDistro user is always "admin" + password: "{{ opendistro_admin_password }}" + validate_certs: no + status_code: 200,401 + return_content: yes + timeout: 4 + register: _result + until: ( _result.json is defined) and (_result.json.status == "green") + retries: 24 + delay: 5 + tags: debug + +- import_tasks: "RMRedHat.yml" + when: ansible_os_family == "RedHat" \ No newline at end of file diff --git a/roles/opendistro/opendistro-elasticsearch/tasks/security_actions.yml b/roles/opendistro/opendistro-elasticsearch/tasks/security_actions.yml new file mode 100644 index 00000000..ea48874e --- /dev/null +++ b/roles/opendistro/opendistro-elasticsearch/tasks/security_actions.yml @@ -0,0 +1,79 @@ +- block: + - name: Remove demo certs + file: + path: "{{ item }}" + state: absent + with_items: + - "{{ opendistro_conf_path }}/kirk.pem" + - "{{ opendistro_conf_path }}/kirk-key.pem" + - "{{ opendistro_conf_path }}/esnode.pem" + - "{{ opendistro_conf_path }}/esnode-key.pem" + + - name: Copy the node & admin certificates to Elasticsearch cluster + copy: + src: "{{ local_certs_path }}/certs/{{ item }}" + dest: /etc/elasticsearch/ + mode: 0644 + with_items: + - root-ca.pem + - root-ca.key + - "{{ inventory_hostname }}.key" + - "{{ inventory_hostname }}.pem" + - "{{ inventory_hostname }}_http.key" + - "{{ inventory_hostname }}_http.pem" + - "{{ inventory_hostname }}_elasticsearch_config_snippet.yml" + - admin.key + - admin.pem + + - name: Copy the OpenDistro security configuration file to cluster + blockinfile: + block: "{{ lookup('file', '{{ local_certs_path }}/certs/{{ inventory_hostname }}_elasticsearch_config_snippet.yml') }}" + dest: "{{ opendistro_conf_path }}/elasticsearch.yml" + insertafter: EOF + marker: "## {mark} Opendistro Security Node & Admin certificates configuration ##" + + - name: Prepare the OpenDistro security configuration file + replace: + path: "{{ opendistro_conf_path }}/elasticsearch.yml" + regexp: 'searchguard' + replace: 'opendistro_security' + tags: local + + - name: Restart elasticsearch with security configuration + systemd: + name: elasticsearch + state: restarted + + - name: Copy the OpenDistro security internal users template + template: + src: "templates/internal_users.yml.j2" + dest: "{{ opendistro_sec_plugin_conf_path }}/internal_users.yml" + mode: 0644 + run_once: true + + - name: Set the Admin user password + shell: > + sed -i 's,{{ opendistro_admin_password }},'$(sh {{ opendistro_sec_plugin_tools_path }}/hash.sh -p {{ opendistro_admin_password }} | tail -1)',' + {{ opendistro_sec_plugin_conf_path }}/internal_users.yml + run_once: true + + - name: Set the kibanaserver role/user pasword + shell: > + sed -i 's,{{ opendistro_kibana_password }},'$(sh {{ opendistro_sec_plugin_tools_path }}/hash.sh -p {{ opendistro_kibana_password }} | tail -1)',' + {{ opendistro_sec_plugin_conf_path }}/internal_users.yml + run_once: true + + - name: Initialize the OpenDistro security index in elasticsearch + command: > + {{ opendistro_sec_plugin_tools_path }}/securityadmin.sh + -cacert {{ opendistro_conf_path }}/root-ca.pem + -cert {{ opendistro_conf_path }}/admin.pem + -key {{ opendistro_conf_path }}/admin.key + -cd {{ opendistro_sec_plugin_conf_path }}/ + -nhnv -icl + -h {{ hostvars[inventory_hostname]['ip'] }} + run_once: true + + tags: + - security + when: install.changed \ No newline at end of file diff --git a/roles/opendistro/opendistro-elasticsearch/templates/elasticsearch.yml.j2 b/roles/opendistro/opendistro-elasticsearch/templates/elasticsearch.yml.j2 new file mode 100644 index 00000000..58a8ece2 --- /dev/null +++ b/roles/opendistro/opendistro-elasticsearch/templates/elasticsearch.yml.j2 @@ -0,0 +1,22 @@ +cluster.name: "{{ opendistro_cluster_name }}" + +node.name: "{{ inventory_hostname }}" + +path.data: /var/lib/elasticsearch + +path.logs: /var/log/elasticsearch + +network.host: "{{ hostvars[inventory_hostname]['ip'] }}" + +http.port: "{{ opendistro_http_port }}" + +discovery.seed_hosts: ["{{ es_nodes }}"] + +cluster.initial_master_nodes: ["{{ es_nodes }}"] + +discovery.zen.minimum_master_nodes: "{{ minimum_master_nodes }}" +opendistro_security.allow_default_init_securityindex: true +opendistro_security.audit.type: internal_elasticsearch +opendistro_security.enable_snapshot_restore_privilege: true +opendistro_security.check_snapshot_restore_write_privileges: true +opendistro_security.restapi.roles_enabled: ["all_access", "security_rest_api_access"] diff --git a/roles/opendistro/opendistro-elasticsearch/templates/internal_users.yml.j2 b/roles/opendistro/opendistro-elasticsearch/templates/internal_users.yml.j2 new file mode 100644 index 00000000..471a5c28 --- /dev/null +++ b/roles/opendistro/opendistro-elasticsearch/templates/internal_users.yml.j2 @@ -0,0 +1,21 @@ +--- +# This is the internal user database +# The hash value is a bcrypt hash and can be generated with plugin/tools/hash.sh + +_meta: + type: "internalusers" + config_version: 2 + +# Define your internal users here + +admin: + hash: "{{ opendistro_admin_password }}" + reserved: true + backend_roles: + - "admin" + description: "admin user" + +kibanaserver: + hash: "{{ opendistro_kibana_password }}" + reserved: true + description: "kibanaserver user" diff --git a/roles/opendistro/opendistro-elasticsearch/templates/jvm.options.j2 b/roles/opendistro/opendistro-elasticsearch/templates/jvm.options.j2 new file mode 100644 index 00000000..de69125c --- /dev/null +++ b/roles/opendistro/opendistro-elasticsearch/templates/jvm.options.j2 @@ -0,0 +1,117 @@ +#jinja2: trim_blocks:False +# {{ ansible_managed }} +## JVM configuration + +################################################################ +## IMPORTANT: JVM heap size +################################################################ +## +## You should always set the min and max JVM heap +## size to the same value. For example, to set +## the heap to 4 GB, set: +## +## -Xms4g +## -Xmx4g +## +## See https://www.elastic.co/guide/en/elasticsearch/reference/current/heap-size.html +## for more information +## +################################################################ + +# Xms represents the initial size of total heap space +# Xmx represents the maximum size of total heap space + +# Xms represents the initial size of total heap space +# Xmx represents the maximum size of total heap space +{% if opendistro_jvm_xms is not none %} +{% if opendistro_jvm_xms < 32000 %} +-Xms{{ opendistro_jvm_xms }}m +-Xmx{{ opendistro_jvm_xms }}m +{% else %} +-Xms32000m +-Xmx32000m +{% endif %} +{% else %} +-Xms{% if ansible_memtotal_mb < 64000 %}{{ ((ansible_memtotal_mb|int)/2)|int }}m{% else %}32000m{% endif %} +-Xmx{% if ansible_memtotal_mb < 64000 %}{{ ((ansible_memtotal_mb|int)/2)|int }}m{% else %}32000m{% endif %} +{% endif %} + +################################################################ +## Expert settings +################################################################ +## +## All settings below this section are considered +## expert settings. Don't tamper with them unless +## you understand what you are doing +## +################################################################ + +## GC configuration +-XX:+UseConcMarkSweepGC +-XX:CMSInitiatingOccupancyFraction=75 +-XX:+UseCMSInitiatingOccupancyOnly + +## optimizations + +# pre-touch memory pages used by the JVM during initialization +-XX:+AlwaysPreTouch + +## basic + +# force the server VM +-server + +# explicitly set the stack size +-Xss1m + +# set to headless, just in case +-Djava.awt.headless=true + +# ensure UTF-8 encoding by default (e.g. filenames) +-Dfile.encoding=UTF-8 + +# use our provided JNA always versus the system one +-Djna.nosys=true + +# turn off a JDK optimization that throws away stack traces for common +# exceptions because stack traces are important for debugging +-XX:-OmitStackTraceInFastThrow + +# flags to configure Netty +-Dio.netty.noUnsafe=true +-Dio.netty.noKeySetOptimization=true +-Dio.netty.recycler.maxCapacityPerThread=0 + +# log4j 2 +-Dlog4j.shutdownHookEnabled=false +-Dlog4j2.disable.jmx=true + +## heap dumps + +# generate a heap dump when an allocation from the Java heap fails +# heap dumps are created in the working directory of the JVM +-XX:+HeapDumpOnOutOfMemoryError + +# specify an alternative path for heap dumps +# ensure the directory exists and has sufficient space +-XX:HeapDumpPath=/var/lib/elasticsearch + +## GC logging + +#-XX:+PrintGCDetails +#-XX:+PrintGCTimeStamps +#-XX:+PrintGCDateStamps +#-XX:+PrintClassHistogram +#-XX:+PrintTenuringDistribution +#-XX:+PrintGCApplicationStoppedTime + +# log GC status to a file with time stamps +# ensure the directory exists +#-Xloggc:${loggc} + +# By default, the GC log file will not rotate. +# By uncommenting the lines below, the GC log file +# will be rotated every 128MB at most 32 times. +#-XX:+UseGCLogFileRotation +#-XX:NumberOfGCLogFiles=32 +#-XX:GCLogFileSize=128M diff --git a/roles/opendistro/opendistro-elasticsearch/templates/tlsconfig.yml.j2 b/roles/opendistro/opendistro-elasticsearch/templates/tlsconfig.yml.j2 new file mode 100644 index 00000000..0f7671e2 --- /dev/null +++ b/roles/opendistro/opendistro-elasticsearch/templates/tlsconfig.yml.j2 @@ -0,0 +1,61 @@ +ca: + root: + dn: CN=root.ca.{{ domain_name }},OU=CA,O={{ domain_name }}\, Inc.,DC={{ domain_name }} + keysize: 2048 + validityDays: 730 + pkPassword: none + file: root-ca.pem + +### Default values and global settings +defaults: + validityDays: 730 + pkPassword: none + # Set this to true in order to generate config and certificates for + # the HTTP interface of nodes + httpsEnabled: true + reuseTransportCertificatesForHttp: false + verifyHostnames: false + resolveHostnames: false + +### +### Nodes +### +# +# Specify the nodes of your ES cluster here +# +nodes: +{% for item in groups['es_cluster'] %} + - name: {{ item }} + dn: CN={{ item }}.{{ domain_name }},OU=Ops,O={{ domain_name }}\, Inc.,DC={{ domain_name }} + dns: {{ item }}.{{ domain_name }} + ip: {{ hostvars[item]['ip'] }} +{% endfor %} +{% if groups['kibana'] is defined and groups['kibana']|length > 0 %} +{% for item in groups['kibana'] %} + - name: {{ item }} + dn: CN={{ item }}.{{ domain_name }},OU=Ops,O={{ domain_name }}\, Inc.,DC={{ domain_name }} + dns: {{ item }}.{{ domain_name }} + ip: {{ hostvars[item]['ip'] }} +{% endfor %} +{% endif %} +{% if groups['managers'] is defined and groups['managers']|length > 0 %} +{% for item in groups['managers'] %} + - name: {{ item }} + dn: CN={{ item }}.{{ domain_name }},OU=Ops,O={{ domain_name }}\, Inc.,DC={{ domain_name }} + dns: {{ item }}.{{ domain_name }} + ip: {{ hostvars[item]['ip'] }} +{% endfor %} +{% endif %} +### +### Clients +### +# +# Specify the clients that shall access your ES cluster with certificate authentication here +# +# At least one client must be an admin user (i.e., a super-user). Admin users can +# be specified with the attribute admin: true +# +clients: + - name: admin + dn: CN=admin.{{ domain_name }},OU=Ops,O={{ domain_name }}\, Inc.,DC={{ domain_name }} + admin: true diff --git a/roles/opendistro/opendistro-kibana/defaults/main.yml b/roles/opendistro/opendistro-kibana/defaults/main.yml new file mode 100644 index 00000000..428880ee --- /dev/null +++ b/roles/opendistro/opendistro-kibana/defaults/main.yml @@ -0,0 +1,62 @@ +--- + +elasticsearch_http_port: 9200 +elasticsearch_nodes: |- + {% for item in groups['es_cluster'] -%} + {{ hostvars[item]['ip'] }}{% if not loop.last %}","{% endif %} + {%- endfor %} +elasticsearch_network_host: 172.16.0.161 +elastic_api_protocol: https +kibana_conf_path: /etc/kibana +kibana_server_host: "0.0.0.0" +kibana_server_port: "5601" +kibana_server_name: "kibana" +kibana_max_payload_bytes: 1048576 +elastic_stack_version: 7.7.0 +wazuh_version: 3.12.3 +wazuh_app_url: https://packages.wazuh.com/wazuhapp/wazuhapp + +# The OpenDistro package repository +package_repos: + yum: + opendistro: + baseurl: 'https://d3g5vo6xdbdb9a.cloudfront.net/yum/noarch/' + gpg: 'https://d3g5vo6xdbdb9a.cloudfront.net/GPG-KEY-opendistroforelasticsearch' + elasticsearch_oss: + baseurl: 'https://artifacts.elastic.co/packages/oss-7.x/yum' + gpg: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch' + + +# API credentials +wazuh_api_credentials: + - id: "default" + url: "http://localhost" + port: 55000 + user: "foo" + password: "bar" + +# opendistro Security +kibana_opendistro_security: true +kibana_newsfeed_enabled: "false" +kibana_telemetry_optin: "false" +kibana_telemetry_enabled: "false" + +opendistro_security_user: elastic +opendistro_admin_password: changeme +opendistro_kibana_user: kibanaserver +opendistro_kibana_password: changeme +local_certs_path: /tmp/opendistro-nodecerts + +# Nodejs +nodejs: + repo_dict: + debian: "deb" + redhat: "rpm" + repo_url_ext: "nodesource.com/setup_10.x" + +# Build from sources +build_from_sources: false +wazuh_plugin_branch: 3.12-7.6 + +#Nodejs NODE_OPTIONS +node_options: --no-warnings --max-old-space-size=2048 --max-http-header-size=65536 diff --git a/roles/opendistro/opendistro-kibana/handlers/main.yml b/roles/opendistro/opendistro-kibana/handlers/main.yml new file mode 100644 index 00000000..55ea3d3c --- /dev/null +++ b/roles/opendistro/opendistro-kibana/handlers/main.yml @@ -0,0 +1,3 @@ +--- +- name: restart kibana + service: name=kibana state=restarted diff --git a/roles/opendistro/opendistro-kibana/tasks/RMRedHat.yml b/roles/opendistro/opendistro-kibana/tasks/RMRedHat.yml new file mode 100644 index 00000000..959c70e9 --- /dev/null +++ b/roles/opendistro/opendistro-kibana/tasks/RMRedHat.yml @@ -0,0 +1,6 @@ +--- +- name: Remove Elasticsearch repository (and clean up left-over metadata) + yum_repository: + name: opendistro_repo + state: absent + changed_when: false diff --git a/roles/opendistro/opendistro-kibana/tasks/RedHat.yml b/roles/opendistro/opendistro-kibana/tasks/RedHat.yml new file mode 100644 index 00000000..d2dbc4ec --- /dev/null +++ b/roles/opendistro/opendistro-kibana/tasks/RedHat.yml @@ -0,0 +1,23 @@ +--- +- block: + + - name: RedHat/CentOS/Fedora | Add OpenDistro repo + yum_repository: + file: opendistro + name: opendistro_repo + description: Opendistro yum repository + baseurl: "{{ package_repos.yum.opendistro.baseurl }}" + gpgkey: "{{ package_repos.yum.opendistro.gpg }}" + gpgcheck: true + + - name: RedHat/CentOS/Fedora | Add Elasticsearch-oss repo + yum_repository: + file: opendistro + name: elasticsearch_oss_repo + description: Elasticsearch-oss yum repository + baseurl: "{{ package_repos.yum.elasticsearch_oss.baseurl }}" + gpgkey: "{{ package_repos.yum.elasticsearch_oss.gpg }}" + gpgcheck: true + + tags: + - install \ No newline at end of file diff --git a/roles/opendistro/opendistro-kibana/tasks/build_wazuh_plugin.yml b/roles/opendistro/opendistro-kibana/tasks/build_wazuh_plugin.yml new file mode 100644 index 00000000..b7ceb87f --- /dev/null +++ b/roles/opendistro/opendistro-kibana/tasks/build_wazuh_plugin.yml @@ -0,0 +1,76 @@ +--- + - name: Ensure the Git package is present + package: + name: git + state: present + + - name: Modify repo url if host is in Debian family + set_fact: + node_js_repo_type: deb + when: + - ansible_os_family | lower == "debian" + + - name: Download script to install Nodejs repository + get_url: + url: "https://{{ nodejs['repo_dict'][ansible_os_family|lower] }}.{{ nodejs['repo_url_ext'] }}" + dest: "/tmp/setup_nodejs_repo.sh" + mode: 0700 + + - name: Execute downloaded script to install Nodejs repo + command: /tmp/setup_nodejs_repo.sh + register: node_repo_installation_result + changed_when: false + + - name: Install Nodejs + package: + name: nodejs + state: present + + - name: Install yarn dependency to build the Wazuh Kibana Plugin + # Using shell due to errors when evaluating text between @ with command + shell: "npm install -g {{ 'yarn' }}{{ '@' }}{{ '1.10.1'}}" # noqa 305 + register: install_yarn_result + changed_when: install_yarn_result == 0 + + - name: Remove old wazuh-kibana-app git directory + file: + path: /tmp/app + state: absent + changed_when: false + + - name: Clone wazuh-kibana-app repository # Using command as git module doesn't cover single-branch nor depth + command: git clone https://github.com/wazuh/wazuh-kibana-app -b {{ wazuh_plugin_branch }} --single-branch --depth=1 app # noqa 303 + register: clone_app_repo_result + changed_when: false + args: + chdir: "/tmp" + + - name: Executing yarn to build the package + command: "{{ item }}" + with_items: + - "yarn" + - "yarn build" + register: yarn_execution_result + changed_when: false + args: + chdir: "/tmp/app/" + + - name: Obtain name of generated package + shell: "find ./ -name 'wazuh-*.zip' -printf '%f\\n'" + register: wazuhapp_package_name + changed_when: false + args: + chdir: "/tmp/app/build" + + - name: Install Wazuh Plugin (can take a while) + shell: NODE_OPTIONS="{{ node_options }}" /usr/share/kibana/bin/kibana-plugin install file:///tmp/app/build/{{ wazuhapp_package_name.stdout }} + args: + executable: /bin/bash + creates: /usr/share/kibana/plugins/wazuh/package.json + chdir: /usr/share/kibana + become: yes + become_user: kibana + notify: restart kibana + tags: + - install + - skip_ansible_lint diff --git a/roles/opendistro/opendistro-kibana/tasks/main.yml b/roles/opendistro/opendistro-kibana/tasks/main.yml new file mode 100644 index 00000000..013648db --- /dev/null +++ b/roles/opendistro/opendistro-kibana/tasks/main.yml @@ -0,0 +1,133 @@ +--- + +- name: Stopping early, trying to compile Wazuh Kibana Plugin on Debian 10 is not possible + fail: + msg: "It's not possible to compile the Wazuh Kibana plugin on Debian 10 due to: https://github.com/wazuh/wazuh-kibana-app/issues/1924" + when: + - build_from_sources + - ansible_distribution == "Debian" + - ansible_distribution_major_version == "10" + +- import_tasks: RedHat.yml + when: ansible_os_family == 'RedHat' + +- name: Reload systemd + systemd: + daemon_reload: true + ignore_errors: true + when: + - not (ansible_distribution == "Amazon" and ansible_distribution_version == "(Karoo)") + - not (ansible_distribution == "Ubuntu" and ansible_distribution_version is version('15.04', '<')) + - not (ansible_distribution == "Debian" and ansible_distribution_version is version('8', '<')) + - not (ansible_os_family == "RedHat" and ansible_distribution_version is version('7', '<')) + +- name: Install Kibana + package: + name: opendistroforelasticsearch-kibana + state: present + register: install + tags: install + +- name: Remove Kibana configuration file + file: + path: "{{ kibana_conf_path }}/kibana.yml" + state: absent + when: install.changed + tags: install + +- import_tasks: security_actions.yml + +- name: Copy Configuration File + blockinfile: + block: "{{ lookup('template', 'opendistro_kibana.yml.j2') }}" + dest: "{{ kibana_conf_path }}/kibana.yml" + create: true + group: kibana + owner: kibana + mode: 0640 + marker: "## {mark} Kibana general settings ##" + notify: restart kibana + tags: + - install + - configure + +- name: Build and Install Wazuh Kibana Plugin from sources + import_tasks: build_wazuh_plugin.yml + when: + - build_from_sources is defined + - build_from_sources + +- name: Install Wazuh Plugin (can take a while) + shell: >- + NODE_OPTIONS="{{ node_options }}" /usr/share/kibana/bin/kibana-plugin install + {{ wazuh_app_url }}-{{ wazuh_version }}_{{ elastic_stack_version }}.zip + args: + executable: /bin/bash + creates: /usr/share/kibana/plugins/wazuh/package.json + chdir: /usr/share/kibana + become: yes + become_user: kibana + notify: restart kibana + tags: + - install + - skip_ansible_lint + when: + - not build_from_sources + +- name: Kibana optimization (can take a while) + shell: /usr/share/kibana/node/bin/node {{ node_options }} /usr/share/kibana/src/cli --optimize + args: + executable: /bin/bash + become: yes + become_user: kibana + changed_when: false + tags: + - skip_ansible_lint + +- name: Wait for Elasticsearch port + wait_for: host={{ elasticsearch_network_host }} port={{ elasticsearch_http_port }} + +- name: Select correct API protocol + set_fact: + elastic_api_protocol: "{% if kibana_opendistro_security is defined and kibana_opendistro_security %}https{% else %}http{% endif %}" + +- name: Attempting to delete legacy Wazuh index if exists + uri: + url: "{{ elastic_api_protocol }}://{{ elasticsearch_network_host }}:{{ elasticsearch_http_port }}/.wazuh" + method: DELETE + user: "admin" + password: "{{ opendistro_admin_password }}" + validate_certs: no + status_code: 200, 404 + +- name: Create wazuh plugin config directory + file: + path: /usr/share/kibana/optimize/wazuh/config/ + state: directory + recurse: yes + owner: kibana + group: kibana + mode: 0751 + changed_when: False + +- name: Configure Wazuh Kibana Plugin + template: + src: wazuh.yml.j2 + dest: /usr/share/kibana/optimize/wazuh/config/wazuh.yml + owner: kibana + group: kibana + mode: 0751 + changed_when: False + +- name: Reload systemd configuration + systemd: + daemon_reload: true + +- name: Ensure Kibana started and enabled + service: + name: kibana + enabled: true + state: started + +- import_tasks: RMRedHat.yml + when: ansible_os_family == 'RedHat' \ No newline at end of file diff --git a/roles/opendistro/opendistro-kibana/tasks/security_actions.yml b/roles/opendistro/opendistro-kibana/tasks/security_actions.yml new file mode 100644 index 00000000..be63c9ea --- /dev/null +++ b/roles/opendistro/opendistro-kibana/tasks/security_actions.yml @@ -0,0 +1,13 @@ +- block: + + - name: Copy the certificates from local to the Kibana instance + copy: + src: "{{ local_certs_path }}/certs/{{ item }}" + dest: /usr/share/kibana + mode: 0644 + with_items: + - "{{ inventory_hostname }}_http.key" + - "{{ inventory_hostname }}_http.pem" + tags: + - security + when: install.changed \ No newline at end of file diff --git a/roles/opendistro/opendistro-kibana/templates/opendistro_kibana.yml.j2 b/roles/opendistro/opendistro-kibana/templates/opendistro_kibana.yml.j2 new file mode 100644 index 00000000..40dd9d6c --- /dev/null +++ b/roles/opendistro/opendistro-kibana/templates/opendistro_kibana.yml.j2 @@ -0,0 +1,38 @@ +# {{ ansible_managed }} +# Description: +# Default Kibana configuration for Open Distro. +server.port: {{ kibana_server_port }} + +#server.basePath: "" +server.maxPayloadBytes: {{ kibana_max_payload_bytes }} +server.name: {{ kibana_server_name }} +server.host: {{ kibana_server_host }} + + +{% if kibana_opendistro_security %} +elasticsearch.hosts: "https://{{ elasticsearch_network_host }}:{{ elasticsearch_http_port }}" +{% else %} +elasticsearch.hosts: "http://{{ elasticsearch_network_host }}:{{ elasticsearch_http_port }}" +{% endif %} + +elasticsearch.username: {{ opendistro_kibana_user }} +elasticsearch.password: {{ opendistro_kibana_password }} +elasticsearch.ssl.verificationMode: none + +elasticsearch.requestHeadersWhitelist: ["securitytenant","Authorization"] +opendistro_security.multitenancy.enabled: false # FIXME: should be enabled starting with Wazuh App v3.13 +opendistro_security.multitenancy.tenants.preferred: ["Private", "Global"] +opendistro_security.readonly_mode.roles: ["kibana_read_only"] + +# OpenDistro Security +{% if kibana_opendistro_security %} +server.ssl.enabled: true +server.ssl.certificate: "/usr/share/kibana/{{ inventory_hostname }}_http.pem" +server.ssl.key: "/usr/share/kibana//{{ inventory_hostname }}_http.key" +{% endif %} + +newsfeed.enabled: {{ kibana_newsfeed_enabled }} +telemetry.optIn: {{ kibana_telemetry_optin }} +telemetry.enabled: {{ kibana_telemetry_enabled }} + + diff --git a/roles/opendistro/opendistro-kibana/templates/wazuh.yml.j2 b/roles/opendistro/opendistro-kibana/templates/wazuh.yml.j2 new file mode 100644 index 00000000..1cbc9e2d --- /dev/null +++ b/roles/opendistro/opendistro-kibana/templates/wazuh.yml.j2 @@ -0,0 +1,134 @@ +--- +# +# Wazuh app - App configuration file +# Copyright (C) 2015-2019 Wazuh, Inc. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Find more information about this on the LICENSE file. +# +# ======================== Wazuh app configuration file ======================== +# +# Please check the documentation for more information on configuration options: +# https://documentation.wazuh.com/current/installation-guide/index.html +# +# Also, you can check our repository: +# https://github.com/wazuh/wazuh-kibana-app +# +# ------------------------------- Index patterns ------------------------------- +# +# Default index pattern to use. +#pattern: wazuh-alerts-3.x-* +# +# ----------------------------------- Checks ----------------------------------- +# +# Defines which checks must to be consider by the healthcheck +# step once the Wazuh app starts. Values must to be true or false. +#checks.pattern : true +#checks.template: true +#checks.api : true +#checks.setup : true +# +# --------------------------------- Extensions --------------------------------- +# +# Defines which extensions should be activated when you add a new API entry. +# You can change them after Wazuh app starts. +# Values must to be true or false. +#extensions.pci : true +#extensions.gdpr : true +#extensions.hipaa : true +#extensions.nist : true +#extensions.audit : true +#extensions.oscap : false +#extensions.ciscat : false +#extensions.aws : false +#extensions.virustotal: false +#extensions.osquery : false +#extensions.docker : false +# +# ---------------------------------- Time out ---------------------------------- +# +# Defines maximum timeout to be used on the Wazuh app requests. +# It will be ignored if it is bellow 1500. +# It means milliseconds before we consider a request as failed. +# Default: 20000 +#timeout: 20000 +# +# ------------------------------ Advanced indices ------------------------------ +# +# Configure .wazuh indices shards and replicas. +#wazuh.shards : 1 +#wazuh.replicas : 0 +# +# --------------------------- Index pattern selector --------------------------- +# +# Defines if the user is allowed to change the selected +# index pattern directly from the Wazuh app top menu. +# Default: true +#ip.selector: true +# +# List of index patterns to be ignored +#ip.ignore: [] +# +# -------------------------------- X-Pack RBAC --------------------------------- +# +# Custom setting to enable/disable built-in X-Pack RBAC security capabilities. +# Default: enabled +#xpack.rbac.enabled: true +# +# ------------------------------ wazuh-monitoring ------------------------------ +# +# Custom setting to enable/disable wazuh-monitoring indices. +# Values: true, false, worker +# If worker is given as value, the app will show the Agents status +# visualization but won't insert data on wazuh-monitoring indices. +# Default: true +#wazuh.monitoring.enabled: true +# +# Custom setting to set the frequency for wazuh-monitoring indices cron task. +# Default: 900 (s) +#wazuh.monitoring.frequency: 900 +# +# Configure wazuh-monitoring-3.x-* indices shards and replicas. +#wazuh.monitoring.shards: 2 +#wazuh.monitoring.replicas: 0 +# +# Configure wazuh-monitoring-3.x-* indices custom creation interval. +# Values: h (hourly), d (daily), w (weekly), m (monthly) +# Default: d +#wazuh.monitoring.creation: d +# +# Default index pattern to use for Wazuh monitoring +#wazuh.monitoring.pattern: wazuh-monitoring-3.x-* +# +# +# ------------------------------- App privileges -------------------------------- +#admin: true +# +# ------------------------------- App logging level ----------------------------- +# Set the logging level for the Wazuh App log files. +# Default value: info +# Allowed values: info, debug +#logs.level: info +# +#-------------------------------- API entries ----------------------------------- +#The following configuration is the default structure to define an API entry. +# +#hosts: +# - : +# url: http(s):// +# port: +# user: +# password: + +hosts: +{% for api in wazuh_api_credentials %} + - {{ api['id'] }}: + url: {{ api['url'] }} + port: {{ api['port'] }} + user: {{ api['user'] }} + password: {{ api['password'] }} +{% endfor %} diff --git a/roles/wazuh/ansible-filebeat-oss/README.md b/roles/wazuh/ansible-filebeat-oss/README.md new file mode 100644 index 00000000..bed47531 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/README.md @@ -0,0 +1,39 @@ +Ansible Role: Filebeat for Elastic Stack +------------------------------------ + +An Ansible Role that installs [Filebeat-oss](https://www.elastic.co/products/beats/filebeat), this can be used in conjunction with [ansible-wazuh-manager](https://github.com/wazuh/wazuh-ansible/ansible-wazuh-server). + +Requirements +------------ + +This role will work on: + * Red Hat + * CentOS + * Fedora + * Debian + * Ubuntu + +Role Variables +-------------- + +Available variables are listed below, along with default values (see `defaults/main.yml`): + +``` + filebeat_output_elasticsearch_enabled: false + filebeat_output_elasticsearch_hosts: + - "localhost:9200" + +``` + +License and copyright +--------------------- + +WAZUH Copyright (C) 2020 Wazuh Inc. (License GPLv3) + +### Based on previous work from geerlingguy + + - https://github.com/geerlingguy/ansible-role-filebeat + +### Modified by Wazuh + +The playbooks have been modified by Wazuh, including some specific requirements, templates and configuration to improve integration with Wazuh ecosystem. diff --git a/roles/wazuh/ansible-filebeat-oss/defaults/main.yml b/roles/wazuh/ansible-filebeat-oss/defaults/main.yml new file mode 100644 index 00000000..7603fd51 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/defaults/main.yml @@ -0,0 +1,30 @@ +--- +filebeat_version: 7.7.0 + +filebeat_create_config: true + +filebeat_output_elasticsearch_enabled: false +filebeat_output_elasticsearch_hosts: + - "localhost:9200" + +filebeat_module_package_url: https://packages.wazuh.com/3.x/filebeat +filebeat_module_package_name: wazuh-filebeat-0.1.tar.gz +filebeat_module_package_path: /tmp/ +filebeat_module_destination: /usr/share/filebeat/module +filebeat_module_folder: /usr/share/filebeat/module/wazuh +elasticsearch_security_user: admin +elasticsearch_security_password: changeme +# Security plugin +filebeat_security: true +filebeat_security_user: admin +filebeat_security_password: changeme +filebeat_ssl_dir: /etc/pki/filebeat + +# Local path to store the generated certificates (OpenDistro security plugin) +local_certs_path: /tmp/opendistro-nodecerts + +elasticrepo: + apt: 'https://artifacts.elastic.co/packages/oss-7.x/apt' + yum: 'https://artifacts.elastic.co/packages/oss-7.x/yum' + gpg: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch' + key_id: '46095ACC8548582C1A2699A9D27D666CD88E42B4' diff --git a/roles/wazuh/ansible-filebeat-oss/handlers/main.yml b/roles/wazuh/ansible-filebeat-oss/handlers/main.yml new file mode 100644 index 00000000..96e15a22 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/handlers/main.yml @@ -0,0 +1,3 @@ +--- +- name: restart filebeat + service: name=filebeat state=restarted diff --git a/roles/wazuh/ansible-filebeat-oss/meta/main.yml b/roles/wazuh/ansible-filebeat-oss/meta/main.yml new file mode 100644 index 00000000..4fd7e900 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/meta/main.yml @@ -0,0 +1,29 @@ +--- +dependencies: [] + +galaxy_info: + author: Wazuh + description: Installing and maintaining Filebeat-oss. + company: wazuh.com + license: license (GPLv3) + min_ansible_version: 2.0 + platforms: + - name: EL + versions: + - 6 + - 7 + - name: Fedora + versions: + - all + - name: Debian + versions: + - jessie + - name: Ubuntu + versions: + - precise + - trusty + - xenial + galaxy_tags: + - web + - system + - monitoring diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/Debian.yml b/roles/wazuh/ansible-filebeat-oss/tasks/Debian.yml new file mode 100644 index 00000000..33c94cf6 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/Debian.yml @@ -0,0 +1,22 @@ +--- +- name: Debian/Ubuntu | Install apt-transport-https and ca-certificates + apt: + name: + - apt-transport-https + - ca-certificates + state: present + register: filebeat_ca_packages_install + until: filebeat_ca_packages_install is succeeded + +- name: Debian/Ubuntu | Add Elasticsearch apt key. + apt_key: + url: "{{ elasticrepo.gpg }}" + id: "{{ elasticrepo.key_id }}" + state: present + +- name: Debian/Ubuntu | Add Filebeat-oss repository. + apt_repository: + repo: "deb {{ elasticrepo.apt }} stable main" + state: present + update_cache: true + changed_when: false diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/RMDebian.yml b/roles/wazuh/ansible-filebeat-oss/tasks/RMDebian.yml new file mode 100644 index 00000000..25a33909 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/RMDebian.yml @@ -0,0 +1,6 @@ +--- +- name: Debian/Ubuntu | Remove Filebeat repository (and clean up left-over metadata) + apt_repository: + repo: "deb {{ elasticrepo.apt }} stable main" + state: absent + changed_when: false diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/RMRedHat.yml b/roles/wazuh/ansible-filebeat-oss/tasks/RMRedHat.yml new file mode 100644 index 00000000..8565894e --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/RMRedHat.yml @@ -0,0 +1,6 @@ +--- +- name: RedHat/CentOS/Fedora | Remove Filebeat repository (and clean up left-over metadata) + yum_repository: + name: elastic_oss-repo_7 + state: absent + changed_when: false diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/RedHat.yml b/roles/wazuh/ansible-filebeat-oss/tasks/RedHat.yml new file mode 100644 index 00000000..74873aca --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/RedHat.yml @@ -0,0 +1,9 @@ +--- +- name: RedHat/CentOS/Fedora/Amazon Linux | Install Filebeats repo + yum_repository: + name: elastic_oss-repo_7 + description: Elastic repository for 7.x packages + baseurl: "{{ elasticrepo.yum }}" + gpgkey: "{{ elasticrepo.gpg }}" + gpgcheck: true + changed_when: false diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/config.yml b/roles/wazuh/ansible-filebeat-oss/tasks/config.yml new file mode 100644 index 00000000..f64c8ceb --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/config.yml @@ -0,0 +1,22 @@ +--- +- block: + - name: Copy Filebeat configuration. + template: + src: filebeat.yml.j2 + dest: "/etc/filebeat/filebeat.yml" + owner: root + group: root + mode: 0400 + notify: restart filebeat + + - name: Copy Elasticsearch template. + template: + src: elasticsearch.yml.j2 + dest: "/etc/filebeat/wazuh-template.json" + owner: root + group: root + mode: 0400 + notify: restart filebeat + + tags: + - configure \ No newline at end of file diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/main.yml b/roles/wazuh/ansible-filebeat-oss/tasks/main.yml new file mode 100644 index 00000000..2c5d3de1 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/main.yml @@ -0,0 +1,67 @@ +--- +- include_tasks: RedHat.yml + when: ansible_os_family == 'RedHat' + +- include_tasks: Debian.yml + when: ansible_os_family == 'Debian' + +- name: Install Filebeat + package: + name: filebeat + state: present + register: install + tags: + - install + - init + +- name: Checking if Filebeat Module folder file exists + stat: + path: "{{ filebeat_module_folder }}" + register: filebeat_module_folder + +- name: Download Filebeat module package + get_url: + url: "{{ filebeat_module_package_url }}/{{ filebeat_module_package_name }}" + dest: "{{ filebeat_module_package_path }}" + when: not filebeat_module_folder.stat.exists + +- name: Unpack Filebeat module package + unarchive: + src: "{{ filebeat_module_package_path }}/{{ filebeat_module_package_name }}" + dest: "{{ filebeat_module_destination }}" + remote_src: yes + when: not filebeat_module_folder.stat.exists + +- name: Setting 0755 permission for Filebeat module folder + file: dest={{ filebeat_module_folder }} mode=u=rwX,g=rwX,o=rwX recurse=yes + when: not filebeat_module_folder.stat.exists + +- name: Checking if Filebeat Module package file exists + stat: + path: "{{ filebeat_module_package_path }}/{{ filebeat_module_package_name }}" + register: filebeat_module_package + when: filebeat_module_package is not defined + +- name: Delete Filebeat module package file + file: + state: absent + path: "{{ filebeat_module_package_path }}/{{ filebeat_module_package_name }}" + when: filebeat_module_package.stat.exists + +- import_tasks: config.yml + notify: restart filebeat + +- include_tasks: security_actions.yml + when: filebeat_security + +- name: Ensure Filebeat is started and enabled at boot. + service: + name: filebeat + state: started + enabled: true + +- include_tasks: "RMRedHat.yml" + when: ansible_os_family == "RedHat" + +- include_tasks: "RMDebian.yml" + when: ansible_os_family == "Debian" diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/security_actions.yml b/roles/wazuh/ansible-filebeat-oss/tasks/security_actions.yml new file mode 100644 index 00000000..dfea91ee --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/security_actions.yml @@ -0,0 +1,28 @@ +- block: + + - name: Ensure Filebeat SSL key pair directory exists. + file: + path: "{{ filebeat_ssl_dir }}" + state: directory + + - name: Copy the certificates from local to the Manager instance + copy: + src: "{{ local_certs_path }}/certs/{{ item }}" + dest: "{{ filebeat_ssl_dir }}" + mode: 0644 + with_items: + - "{{ inventory_hostname }}.key" + - "{{ inventory_hostname }}.pem" + - "root-ca.pem" + + - name: Ensuring folder & certs permissions + file: + path: "{{ filebeat_ssl_dir }}/" + mode: 0774 + state: directory + recurse: yes + + tags: + - security + when: + - filebeat_security diff --git a/roles/wazuh/ansible-filebeat-oss/templates/elasticsearch.yml.j2 b/roles/wazuh/ansible-filebeat-oss/templates/elasticsearch.yml.j2 new file mode 100644 index 00000000..88d50c3f --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/templates/elasticsearch.yml.j2 @@ -0,0 +1,1800 @@ +{ + "order": 0, + "index_patterns": [ + "wazuh-alerts-3.x-*", + "wazuh-archives-3.x-*" + ], + "settings": { + "index.refresh_interval": "5s", + "index.number_of_shards": "3", + "index.number_of_replicas": "0", + "index.auto_expand_replicas": "0-1", + "index.mapping.total_fields.limit": 10000, + "index.query.default_field": [ + "GeoLocation.city_name", + "GeoLocation.continent_code", + "GeoLocation.country_code2", + "GeoLocation.country_code3", + "GeoLocation.country_name", + "GeoLocation.ip", + "GeoLocation.postal_code", + "GeoLocation.real_region_name", + "GeoLocation.region_name", + "GeoLocation.timezone", + "agent.id", + "agent.ip", + "agent.name", + "cluster.name", + "cluster.node", + "command", + "data", + "data.action", + "data.audit", + "data.audit.acct", + "data.audit.arch", + "data.audit.auid", + "data.audit.command", + "data.audit.cwd", + "data.audit.dev", + "data.audit.directory.inode", + "data.audit.directory.mode", + "data.audit.directory.name", + "data.audit.egid", + "data.audit.enforcing", + "data.audit.euid", + "data.audit.exe", + "data.audit.execve.a0", + "data.audit.execve.a1", + "data.audit.execve.a2", + "data.audit.execve.a3", + "data.audit.exit", + "data.audit.file.inode", + "data.audit.file.mode", + "data.audit.file.name", + "data.audit.fsgid", + "data.audit.fsuid", + "data.audit.gid", + "data.audit.id", + "data.audit.key", + "data.audit.list", + "data.audit.old-auid", + "data.audit.old-ses", + "data.audit.old_enforcing", + "data.audit.old_prom", + "data.audit.op", + "data.audit.pid", + "data.audit.ppid", + "data.audit.prom", + "data.audit.res", + "data.audit.session", + "data.audit.sgid", + "data.audit.srcip", + "data.audit.subj", + "data.audit.success", + "data.audit.suid", + "data.audit.syscall", + "data.audit.tty", + "data.audit.uid", + "data.aws.accountId", + "data.aws.account_id", + "data.aws.action", + "data.aws.actor", + "data.aws.aws_account_id", + "data.aws.description", + "data.aws.dstport", + "data.aws.errorCode", + "data.aws.errorMessage", + "data.aws.eventID", + "data.aws.eventName", + "data.aws.eventSource", + "data.aws.eventType", + "data.aws.id", + "data.aws.name", + "data.aws.requestParameters.accessKeyId", + "data.aws.requestParameters.bucketName", + "data.aws.requestParameters.gatewayId", + "data.aws.requestParameters.groupDescription", + "data.aws.requestParameters.groupId", + "data.aws.requestParameters.groupName", + "data.aws.requestParameters.host", + "data.aws.requestParameters.hostedZoneId", + "data.aws.requestParameters.instanceId", + "data.aws.requestParameters.instanceProfileName", + "data.aws.requestParameters.loadBalancerName", + "data.aws.requestParameters.loadBalancerPorts", + "data.aws.requestParameters.masterUserPassword", + "data.aws.requestParameters.masterUsername", + "data.aws.requestParameters.name", + "data.aws.requestParameters.natGatewayId", + "data.aws.requestParameters.networkAclId", + "data.aws.requestParameters.path", + "data.aws.requestParameters.policyName", + "data.aws.requestParameters.port", + "data.aws.requestParameters.stackId", + "data.aws.requestParameters.stackName", + "data.aws.requestParameters.subnetId", + "data.aws.requestParameters.subnetIds", + "data.aws.requestParameters.volumeId", + "data.aws.requestParameters.vpcId", + "data.aws.resource.accessKeyDetails.accessKeyId", + "data.aws.resource.accessKeyDetails.principalId", + "data.aws.resource.accessKeyDetails.userName", + "data.aws.resource.instanceDetails.instanceId", + "data.aws.resource.instanceDetails.instanceState", + "data.aws.resource.instanceDetails.networkInterfaces.privateDnsName", + "data.aws.resource.instanceDetails.networkInterfaces.publicDnsName", + "data.aws.resource.instanceDetails.networkInterfaces.subnetId", + "data.aws.resource.instanceDetails.networkInterfaces.vpcId", + "data.aws.resource.instanceDetails.tags.value", + "data.aws.responseElements.AssociateVpcCidrBlockResponse.vpcId", + "data.aws.responseElements.description", + "data.aws.responseElements.instanceId", + "data.aws.responseElements.instances.instanceId", + "data.aws.responseElements.instancesSet.items.instanceId", + "data.aws.responseElements.listeners.port", + "data.aws.responseElements.loadBalancerName", + "data.aws.responseElements.loadBalancers.vpcId", + "data.aws.responseElements.loginProfile.userName", + "data.aws.responseElements.networkAcl.vpcId", + "data.aws.responseElements.ownerId", + "data.aws.responseElements.publicIp", + "data.aws.responseElements.user.userId", + "data.aws.responseElements.user.userName", + "data.aws.responseElements.volumeId", + "data.aws.service.serviceName", + "data.aws.severity", + "data.aws.source", + "data.aws.sourceIPAddress", + "data.aws.srcport", + "data.aws.userIdentity.accessKeyId", + "data.aws.userIdentity.accountId", + "data.aws.userIdentity.userName", + "data.aws.vpcEndpointId", + "data.command", + "data.data", + "data.docker.Actor.Attributes.container", + "data.docker.Actor.Attributes.image", + "data.docker.Actor.Attributes.name", + "data.docker.Actor.ID", + "data.docker.id", + "data.docker.message", + "data.docker.status", + "data.dstip", + "data.dstport", + "data.dstuser", + "data.extra_data", + "data.hardware.serial", + "data.id", + "data.integration", + "data.netinfo.iface.adapter", + "data.netinfo.iface.ipv4.address", + "data.netinfo.iface.ipv6.address", + "data.netinfo.iface.mac", + "data.netinfo.iface.name", + "data.os.architecture", + "data.os.build", + "data.os.codename", + "data.os.hostname", + "data.os.major", + "data.os.minor", + "data.os.name", + "data.os.platform", + "data.os.release", + "data.os.release_version", + "data.os.sysname", + "data.os.version", + "data.oscap.check.description", + "data.oscap.check.id", + "data.oscap.check.identifiers", + "data.oscap.check.oval.id", + "data.oscap.check.rationale", + "data.oscap.check.references", + "data.oscap.check.result", + "data.oscap.check.severity", + "data.oscap.check.title", + "data.oscap.scan.benchmark.id", + "data.oscap.scan.content", + "data.oscap.scan.id", + "data.oscap.scan.profile.id", + "data.oscap.scan.profile.title", + "data.osquery.columns.address", + "data.osquery.columns.command", + "data.osquery.columns.description", + "data.osquery.columns.dst_ip", + "data.osquery.columns.gid", + "data.osquery.columns.hostname", + "data.osquery.columns.md5", + "data.osquery.columns.path", + "data.osquery.columns.sha1", + "data.osquery.columns.sha256", + "data.osquery.columns.src_ip", + "data.osquery.columns.user", + "data.osquery.columns.username", + "data.osquery.name", + "data.osquery.pack", + "data.port.process", + "data.port.protocol", + "data.port.state", + "data.process.args", + "data.process.cmd", + "data.process.egroup", + "data.process.euser", + "data.process.fgroup", + "data.process.name", + "data.process.rgroup", + "data.process.ruser", + "data.process.sgroup", + "data.process.state", + "data.process.suser", + "data.program.architecture", + "data.program.description", + "data.program.format", + "data.program.location", + "data.program.multiarch", + "data.program.name", + "data.program.priority", + "data.program.section", + "data.program.source", + "data.program.vendor", + "data.program.version", + "data.protocol", + "data.pwd", + "data.sca", + "data.sca.check.compliance.cis", + "data.sca.check.compliance.cis_csc", + "data.sca.check.compliance.pci_dss", + "data.sca.check.compliance.hipaa", + "data.sca.check.compliance.nist_800_53", + "data.sca.check.description", + "data.sca.check.directory", + "data.sca.check.file", + "data.sca.check.id", + "data.sca.check.previous_result", + "data.sca.check.process", + "data.sca.check.rationale", + "data.sca.check.reason", + "data.sca.check.references", + "data.sca.check.registry", + "data.sca.check.remediation", + "data.sca.check.result", + "data.sca.check.status", + "data.sca.check.title", + "data.sca.description", + "data.sca.file", + "data.sca.invalid", + "data.sca.name", + "data.sca.policy", + "data.sca.policy_id", + "data.sca.scan_id", + "data.sca.total_checks", + "data.script", + "data.src_ip", + "data.src_port", + "data.srcip", + "data.srcport", + "data.srcuser", + "data.status", + "data.system_name", + "data.title", + "data.tty", + "data.uid", + "data.url", + "data.virustotal.description", + "data.virustotal.error", + "data.virustotal.found", + "data.virustotal.permalink", + "data.virustotal.scan_date", + "data.virustotal.sha1", + "data.virustotal.source.alert_id", + "data.virustotal.source.file", + "data.virustotal.source.md5", + "data.virustotal.source.sha1", + "data.vulnerability.advisories", + "data.vulnerability.bugzilla_reference", + "data.vulnerability.cve", + "data.vulnerability.cvss.cvss2.base_score", + "data.vulnerability.cvss.cvss2.exploitability_score", + "data.vulnerability.cvss.cvss2.impact_score", + "data.vulnerability.cvss.cvss2.vector.access_complexity", + "data.vulnerability.cvss.cvss2.vector.attack_vector", + "data.vulnerability.cvss.cvss2.vector.authentication", + "data.vulnerability.cvss.cvss2.vector.availability", + "data.vulnerability.cvss.cvss2.vector.confidentiality_impact", + "data.vulnerability.cvss.cvss2.vector.integrity_impact", + "data.vulnerability.cvss.cvss2.vector.privileges_required", + "data.vulnerability.cvss.cvss2.vector.scope", + "data.vulnerability.cvss.cvss2.vector.user_interaction", + "data.vulnerability.cvss.cvss3.base_score", + "data.vulnerability.cvss.cvss3.exploitability_score", + "data.vulnerability.cvss.cvss3.impact_score", + "data.vulnerability.cvss.cvss3.vector.access_complexity", + "data.vulnerability.cvss.cvss3.vector.attack_vector", + "data.vulnerability.cvss.cvss3.vector.authentication", + "data.vulnerability.cvss.cvss3.vector.availability", + "data.vulnerability.cvss.cvss3.vector.confidentiality_impact", + "data.vulnerability.cvss.cvss3.vector.integrity_impact", + "data.vulnerability.cvss.cvss3.vector.privileges_required", + "data.vulnerability.cvss.cvss3.vector.scope", + "data.vulnerability.cvss.cvss3.vector.user_interaction", + "data.vulnerability.cwe_reference", + "data.vulnerability.package.architecture", + "data.vulnerability.package.condition", + "data.vulnerability.package.generated_cpe", + "data.vulnerability.package.name", + "data.vulnerability.package.version", + "data.vulnerability.rationale", + "data.vulnerability.reference", + "data.vulnerability.severity", + "data.vulnerability.state", + "data.vulnerability.title", + "data.win.eventdata.auditPolicyChanges", + "data.win.eventdata.auditPolicyChangesId", + "data.win.eventdata.binary", + "data.win.eventdata.category", + "data.win.eventdata.categoryId", + "data.win.eventdata.data", + "data.win.eventdata.image", + "data.win.eventdata.ipAddress", + "data.win.eventdata.ipPort", + "data.win.eventdata.keyName", + "data.win.eventdata.logonGuid", + "data.win.eventdata.logonProcessName", + "data.win.eventdata.operation", + "data.win.eventdata.parentImage", + "data.win.eventdata.processId", + "data.win.eventdata.processName", + "data.win.eventdata.providerName", + "data.win.eventdata.returnCode", + "data.win.eventdata.service", + "data.win.eventdata.status", + "data.win.eventdata.subcategory", + "data.win.eventdata.subcategoryGuid", + "data.win.eventdata.subcategoryId", + "data.win.eventdata.subjectDomainName", + "data.win.eventdata.subjectLogonId", + "data.win.eventdata.subjectUserName", + "data.win.eventdata.subjectUserSid", + "data.win.eventdata.targetDomainName", + "data.win.eventdata.targetLinkedLogonId", + "data.win.eventdata.targetLogonId", + "data.win.eventdata.targetUserName", + "data.win.eventdata.targetUserSid", + "data.win.eventdata.workstationName", + "data.win.system.channel", + "data.win.system.computer", + "data.win.system.eventID", + "data.win.system.eventRecordID", + "data.win.system.eventSourceName", + "data.win.system.keywords", + "data.win.system.level", + "data.win.system.message", + "data.win.system.opcode", + "data.win.system.processID", + "data.win.system.providerGuid", + "data.win.system.providerName", + "data.win.system.securityUserID", + "data.win.system.severityValue", + "data.win.system.userID", + "decoder.ftscomment", + "decoder.name", + "decoder.parent", + "full_log", + "host", + "id", + "input", + "location", + "manager.name", + "message", + "offset", + "predecoder.hostname", + "predecoder.program_name", + "previous_log", + "previous_output", + "program_name", + "rule.cis", + "rule.cve", + "rule.description", + "rule.gdpr", + "rule.gpg13", + "rule.groups", + "rule.id", + "rule.info", + "rule.pci_dss", + "rule.hipaa", + "rule.nist_800_53", + "syscheck.audit.effective_user.id", + "syscheck.audit.effective_user.name", + "syscheck.audit.group.id", + "syscheck.audit.group.name", + "syscheck.audit.login_user.id", + "syscheck.audit.login_user.name", + "syscheck.audit.process.id", + "syscheck.audit.process.name", + "syscheck.audit.process.ppid", + "syscheck.audit.user.id", + "syscheck.audit.user.name", + "syscheck.diff", + "syscheck.event", + "syscheck.gid_after", + "syscheck.gid_before", + "syscheck.gname_after", + "syscheck.gname_before", + "syscheck.inode_after", + "syscheck.inode_before", + "syscheck.md5_after", + "syscheck.md5_before", + "syscheck.path", + "syscheck.perm_after", + "syscheck.perm_before", + "syscheck.sha1_after", + "syscheck.sha1_before", + "syscheck.sha256_after", + "syscheck.sha256_before", + "syscheck.tags", + "syscheck.uid_after", + "syscheck.uid_before", + "syscheck.uname_after", + "syscheck.uname_before", + "title", + "type" + ] + }, + "mappings": { + "dynamic_templates": [ + { + "string_as_keyword": { + "mapping": { + "type": "keyword" + }, + "match_mapping_type": "string" + } + } + ], + "date_detection": false, + "properties": { + "@timestamp": { + "type": "date" + }, + "timestamp": { + "type": "date", + "format": "date_optional_time||epoch_millis" + }, + "@version": { + "type": "text" + }, + "agent": { + "properties": { + "ip": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + }, + "manager": { + "properties": { + "name": { + "type": "keyword" + } + } + }, + "cluster": { + "properties": { + "name": { + "type": "keyword" + }, + "node": { + "type": "keyword" + } + } + }, + "full_log": { + "type": "text" + }, + "previous_log": { + "type": "text" + }, + "GeoLocation": { + "properties": { + "area_code": { + "type": "long" + }, + "city_name": { + "type": "keyword" + }, + "continent_code": { + "type": "text" + }, + "coordinates": { + "type": "double" + }, + "country_code2": { + "type": "text" + }, + "country_code3": { + "type": "text" + }, + "country_name": { + "type": "keyword" + }, + "dma_code": { + "type": "long" + }, + "ip": { + "type": "keyword" + }, + "latitude": { + "type": "double" + }, + "location": { + "type": "geo_point" + }, + "longitude": { + "type": "double" + }, + "postal_code": { + "type": "keyword" + }, + "real_region_name": { + "type": "keyword" + }, + "region_name": { + "type": "keyword" + }, + "timezone": { + "type": "text" + } + } + }, + "host": { + "type": "keyword" + }, + "syscheck": { + "properties": { + "path": { + "type": "keyword" + }, + "sha1_before": { + "type": "keyword" + }, + "sha1_after": { + "type": "keyword" + }, + "uid_before": { + "type": "keyword" + }, + "uid_after": { + "type": "keyword" + }, + "gid_before": { + "type": "keyword" + }, + "gid_after": { + "type": "keyword" + }, + "perm_before": { + "type": "keyword" + }, + "perm_after": { + "type": "keyword" + }, + "md5_after": { + "type": "keyword" + }, + "md5_before": { + "type": "keyword" + }, + "gname_after": { + "type": "keyword" + }, + "gname_before": { + "type": "keyword" + }, + "inode_after": { + "type": "keyword" + }, + "inode_before": { + "type": "keyword" + }, + "mtime_after": { + "type": "date", + "format": "date_optional_time" + }, + "mtime_before": { + "type": "date", + "format": "date_optional_time" + }, + "uname_after": { + "type": "keyword" + }, + "uname_before": { + "type": "keyword" + }, + "size_before": { + "type": "long" + }, + "size_after": { + "type": "long" + }, + "diff": { + "type": "keyword" + }, + "event": { + "type": "keyword" + }, + "audit": { + "properties": { + "effective_user": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + }, + "group": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + }, + "login_user": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + }, + "process": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "ppid": { + "type": "keyword" + } + } + }, + "user": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + } + } + }, + "sha256_after": { + "type": "keyword" + }, + "sha256_before": { + "type": "keyword" + }, + "tags": { + "type": "keyword" + } + } + }, + "location": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "offset": { + "type": "keyword" + }, + "rule": { + "properties": { + "description": { + "type": "keyword" + }, + "groups": { + "type": "keyword" + }, + "level": { + "type": "long" + }, + "id": { + "type": "keyword" + }, + "cve": { + "type": "keyword" + }, + "info": { + "type": "keyword" + }, + "frequency": { + "type": "long" + }, + "firedtimes": { + "type": "long" + }, + "cis": { + "type": "keyword" + }, + "pci_dss": { + "type": "keyword" + }, + "gdpr": { + "type": "keyword" + }, + "gpg13": { + "type": "keyword" + }, + "hipaa": { + "type": "keyword" + }, + "nist_800_53": { + "type": "keyword" + }, + "mail": { + "type": "boolean" + } + } + }, + "predecoder": { + "properties": { + "program_name": { + "type": "keyword" + }, + "timestamp": { + "type": "keyword" + }, + "hostname": { + "type": "keyword" + } + } + }, + "decoder": { + "properties": { + "parent": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "ftscomment": { + "type": "keyword" + }, + "fts": { + "type": "long" + }, + "accumulate": { + "type": "long" + } + } + }, + "data": { + "properties": { + "audit": { + "properties": { + "acct": { + "type": "keyword" + }, + "arch": { + "type": "keyword" + }, + "auid": { + "type": "keyword" + }, + "command": { + "type": "keyword" + }, + "cwd": { + "type": "keyword" + }, + "dev": { + "type": "keyword" + }, + "directory": { + "properties": { + "inode": { + "type": "keyword" + }, + "mode": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + }, + "egid": { + "type": "keyword" + }, + "enforcing": { + "type": "keyword" + }, + "euid": { + "type": "keyword" + }, + "exe": { + "type": "keyword" + }, + "execve": { + "properties": { + "a0": { + "type": "keyword" + }, + "a1": { + "type": "keyword" + }, + "a2": { + "type": "keyword" + }, + "a3": { + "type": "keyword" + } + } + }, + "exit": { + "type": "keyword" + }, + "file": { + "properties": { + "inode": { + "type": "keyword" + }, + "mode": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + }, + "fsgid": { + "type": "keyword" + }, + "fsuid": { + "type": "keyword" + }, + "gid": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "key": { + "type": "keyword" + }, + "list": { + "type": "keyword" + }, + "old-auid": { + "type": "keyword" + }, + "old-ses": { + "type": "keyword" + }, + "old_enforcing": { + "type": "keyword" + }, + "old_prom": { + "type": "keyword" + }, + "op": { + "type": "keyword" + }, + "pid": { + "type": "keyword" + }, + "ppid": { + "type": "keyword" + }, + "prom": { + "type": "keyword" + }, + "res": { + "type": "keyword" + }, + "session": { + "type": "keyword" + }, + "sgid": { + "type": "keyword" + }, + "srcip": { + "type": "keyword" + }, + "subj": { + "type": "keyword" + }, + "success": { + "type": "keyword" + }, + "suid": { + "type": "keyword" + }, + "syscall": { + "type": "keyword" + }, + "tty": { + "type": "keyword" + }, + "type": { + "type": "keyword" + }, + "uid": { + "type": "keyword" + } + } + }, + "protocol": { + "type": "keyword" + }, + "action": { + "type": "keyword" + }, + "srcip": { + "type": "keyword" + }, + "dstip": { + "type": "keyword" + }, + "srcport": { + "type": "keyword" + }, + "dstport": { + "type": "keyword" + }, + "srcuser": { + "type": "keyword" + }, + "dstuser": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "status": { + "type": "keyword" + }, + "data": { + "type": "keyword" + }, + "extra_data": { + "type": "keyword" + }, + "system_name": { + "type": "keyword" + }, + "url": { + "type": "keyword" + }, + "oscap": { + "properties": { + "check": { + "properties": { + "description": { + "type": "text" + }, + "id": { + "type": "keyword" + }, + "identifiers": { + "type": "text" + }, + "oval": { + "properties": { + "id": { + "type": "keyword" + } + } + }, + "rationale": { + "type": "text" + }, + "references": { + "type": "text" + }, + "result": { + "type": "keyword" + }, + "severity": { + "type": "keyword" + }, + "title": { + "type": "keyword" + } + } + }, + "scan": { + "properties": { + "benchmark": { + "properties": { + "id": { + "type": "keyword" + } + } + }, + "content": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "profile": { + "properties": { + "id": { + "type": "keyword" + }, + "title": { + "type": "keyword" + } + } + }, + "return_code": { + "type": "long" + }, + "score": { + "type": "double" + } + } + } + } + }, + "type": { + "type": "keyword" + }, + "netinfo": { + "properties": { + "iface": { + "properties": { + "name": { + "type": "keyword" + }, + "mac": { + "type": "keyword" + }, + "adapter": { + "type": "keyword" + }, + "type": { + "type": "keyword" + }, + "state": { + "type": "keyword" + }, + "mtu": { + "type": "long" + }, + "tx_bytes": { + "type": "long" + }, + "rx_bytes": { + "type": "long" + }, + "tx_errors": { + "type": "long" + }, + "rx_errors": { + "type": "long" + }, + "tx_dropped": { + "type": "long" + }, + "rx_dropped": { + "type": "long" + }, + "tx_packets": { + "type": "long" + }, + "rx_packets": { + "type": "long" + }, + "ipv4": { + "properties": { + "gateway": { + "type": "keyword" + }, + "dhcp": { + "type": "keyword" + }, + "address": { + "type": "keyword" + }, + "netmask": { + "type": "keyword" + }, + "broadcast": { + "type": "keyword" + }, + "metric": { + "type": "long" + } + } + }, + "ipv6": { + "properties": { + "gateway": { + "type": "keyword" + }, + "dhcp": { + "type": "keyword" + }, + "address": { + "type": "keyword" + }, + "netmask": { + "type": "keyword" + }, + "broadcast": { + "type": "keyword" + }, + "metric": { + "type": "long" + } + } + } + } + } + } + }, + "os": { + "properties": { + "hostname": { + "type": "keyword" + }, + "architecture": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "version": { + "type": "keyword" + }, + "codename": { + "type": "keyword" + }, + "major": { + "type": "keyword" + }, + "minor": { + "type": "keyword" + }, + "build": { + "type": "keyword" + }, + "platform": { + "type": "keyword" + }, + "sysname": { + "type": "keyword" + }, + "release": { + "type": "keyword" + }, + "release_version": { + "type": "keyword" + } + } + }, + "port": { + "properties": { + "protocol": { + "type": "keyword" + }, + "local_ip": { + "type": "ip" + }, + "local_port": { + "type": "long" + }, + "remote_ip": { + "type": "ip" + }, + "remote_port": { + "type": "long" + }, + "tx_queue": { + "type": "long" + }, + "rx_queue": { + "type": "long" + }, + "inode": { + "type": "long" + }, + "state": { + "type": "keyword" + }, + "pid": { + "type": "long" + }, + "process": { + "type": "keyword" + } + } + }, + "hardware": { + "properties": { + "serial": { + "type": "keyword" + }, + "cpu_name": { + "type": "keyword" + }, + "cpu_cores": { + "type": "long" + }, + "cpu_mhz": { + "type": "double" + }, + "ram_total": { + "type": "long" + }, + "ram_free": { + "type": "long" + }, + "ram_usage": { + "type": "long" + } + } + }, + "program": { + "properties": { + "format": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "priority": { + "type": "keyword" + }, + "section": { + "type": "keyword" + }, + "size": { + "type": "long" + }, + "vendor": { + "type": "keyword" + }, + "install_time": { + "type": "keyword" + }, + "version": { + "type": "keyword" + }, + "architecture": { + "type": "keyword" + }, + "multiarch": { + "type": "keyword" + }, + "source": { + "type": "keyword" + }, + "description": { + "type": "keyword" + }, + "location": { + "type": "keyword" + } + } + }, + "process": { + "properties": { + "pid": { + "type": "long" + }, + "name": { + "type": "keyword" + }, + "state": { + "type": "keyword" + }, + "ppid": { + "type": "long" + }, + "utime": { + "type": "long" + }, + "stime": { + "type": "long" + }, + "cmd": { + "type": "keyword" + }, + "args": { + "type": "keyword" + }, + "euser": { + "type": "keyword" + }, + "ruser": { + "type": "keyword" + }, + "suser": { + "type": "keyword" + }, + "egroup": { + "type": "keyword" + }, + "sgroup": { + "type": "keyword" + }, + "fgroup": { + "type": "keyword" + }, + "rgroup": { + "type": "keyword" + }, + "priority": { + "type": "long" + }, + "nice": { + "type": "long" + }, + "size": { + "type": "long" + }, + "vm_size": { + "type": "long" + }, + "resident": { + "type": "long" + }, + "share": { + "type": "long" + }, + "start_time": { + "type": "long" + }, + "pgrp": { + "type": "long" + }, + "session": { + "type": "long" + }, + "nlwp": { + "type": "long" + }, + "tgid": { + "type": "long" + }, + "tty": { + "type": "long" + }, + "processor": { + "type": "long" + } + } + }, + "sca": { + "properties": { + "type": { + "type": "keyword" + }, + "scan_id": { + "type": "keyword" + }, + "policy": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "file": { + "type": "keyword" + }, + "description": { + "type": "keyword" + }, + "passed": { + "type": "integer" + }, + "failed": { + "type": "integer" + }, + "score": { + "type": "long" + }, + "check": { + "properties": { + "id": { + "type": "keyword" + }, + "title": { + "type": "keyword" + }, + "description": { + "type": "keyword" + }, + "rationale": { + "type": "keyword" + }, + "remediation": { + "type": "keyword" + }, + "compliance": { + "properties": { + "cis": { + "type": "keyword" + }, + "cis_csc": { + "type": "keyword" + }, + "pci_dss": { + "type": "keyword" + }, + "hipaa": { + "type": "keyword" + }, + "nist_800_53": { + "type": "keyword" + } + } + }, + "references": { + "type": "keyword" + }, + "file": { + "type": "keyword" + }, + "directory": { + "type": "keyword" + }, + "registry": { + "type": "keyword" + }, + "process": { + "type": "keyword" + }, + "result": { + "type": "keyword" + }, + "previous_result": { + "type": "keyword" + }, + "reason": { + "type": "keyword" + }, + "status": { + "type": "keyword" + } + } + }, + "invalid": { + "type": "keyword" + }, + "policy_id": { + "type": "keyword" + }, + "total_checks": { + "type": "keyword" + } + } + }, + "command": { + "type": "keyword" + }, + "integration": { + "type": "keyword" + }, + "timestamp": { + "type": "date" + }, + "title": { + "type": "keyword" + }, + "uid": { + "type": "keyword" + }, + "virustotal": { + "properties": { + "description": { + "type": "keyword" + }, + "error": { + "type": "keyword" + }, + "found": { + "type": "keyword" + }, + "malicious": { + "type": "keyword" + }, + "permalink": { + "type": "keyword" + }, + "positives": { + "type": "keyword" + }, + "scan_date": { + "type": "keyword" + }, + "sha1": { + "type": "keyword" + }, + "source": { + "properties": { + "alert_id": { + "type": "keyword" + }, + "file": { + "type": "keyword" + }, + "md5": { + "type": "keyword" + }, + "sha1": { + "type": "keyword" + } + } + }, + "total": { + "type": "keyword" + } + } + }, + "vulnerability": { + "properties": { + "advisories": { + "type": "keyword" + }, + "bugzilla_reference": { + "type": "keyword" + }, + "cve": { + "type": "keyword" + }, + "cvss": { + "properties": { + "cvss2": { + "properties": { + "base_score": { + "type": "keyword" + }, + "exploitability_score": { + "type": "keyword" + }, + "impact_score": { + "type": "keyword" + }, + "vector": { + "properties": { + "access_complexity": { + "type": "keyword" + }, + "attack_vector": { + "type": "keyword" + }, + "authentication": { + "type": "keyword" + }, + "availability": { + "type": "keyword" + }, + "confidentiality_impact": { + "type": "keyword" + }, + "integrity_impact": { + "type": "keyword" + }, + "privileges_required": { + "type": "keyword" + }, + "scope": { + "type": "keyword" + }, + "user_interaction": { + "type": "keyword" + } + } + } + } + }, + "cvss3": { + "properties": { + "base_score": { + "type": "keyword" + }, + "exploitability_score": { + "type": "keyword" + }, + "impact_score": { + "type": "keyword" + }, + "vector": { + "properties": { + "access_complexity": { + "type": "keyword" + }, + "attack_vector": { + "type": "keyword" + }, + "authentication": { + "type": "keyword" + }, + "availability": { + "type": "keyword" + }, + "confidentiality_impact": { + "type": "keyword" + }, + "integrity_impact": { + "type": "keyword" + }, + "privileges_required": { + "type": "keyword" + }, + "scope": { + "type": "keyword" + }, + "user_interaction": { + "type": "keyword" + } + } + } + } + } + } + }, + "cwe_reference": { + "type": "keyword" + }, + "package": { + "properties": { + "architecture": { + "type": "keyword" + }, + "condition": { + "type": "keyword" + }, + "generated_cpe": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "version": { + "type": "keyword" + } + } + }, + "published": { + "type": "date" + }, + "updated": { + "type": "date" + }, + "rationale": { + "type": "keyword" + }, + "reference": { + "type": "keyword" + }, + "severity": { + "type": "keyword" + }, + "state": { + "type": "keyword" + }, + "title": { + "type": "keyword" + } + } + }, + "aws": { + "properties": { + "bytes": { + "type": "long" + }, + "dstaddr": { + "type": "ip" + }, + "srcaddr": { + "type": "ip" + }, + "end": { + "type": "date" + }, + "start": { + "type": "date" + }, + "source_ip_address": { + "type": "ip" + }, + "service": { + "properties": { + "count": { + "type": "long" + }, + "action.networkConnectionAction.remoteIpDetails": { + "properties": { + "ipAddressV4": { + "type": "ip" + }, + "geoLocation": { + "type": "geo_point" + } + } + }, + "eventFirstSeen": { + "type": "date" + }, + "eventLastSeen": { + "type": "date" + } + } + }, + "createdAt": { + "type": "date" + }, + "updatedAt": { + "type": "date" + }, + "resource.instanceDetails": { + "properties": { + "launchTime": { + "type": "date" + }, + "networkInterfaces": { + "properties": { + "privateIpAddress": { + "type": "ip" + }, + "publicIp": { + "type": "ip" + } + } + } + } + } + } + } + } + }, + "program_name": { + "type": "keyword" + }, + "command": { + "type": "keyword" + }, + "type": { + "type": "text" + }, + "title": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "input": { + "properties": { + "type": { + "type": "keyword" + } + } + }, + "previous_output": { + "type": "keyword" + } + } + }, + "version": 1 +} diff --git a/roles/wazuh/ansible-filebeat-oss/templates/filebeat.yml.j2 b/roles/wazuh/ansible-filebeat-oss/templates/filebeat.yml.j2 new file mode 100644 index 00000000..67a99347 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/templates/filebeat.yml.j2 @@ -0,0 +1,32 @@ +# Wazuh - Filebeat configuration file + +# Wazuh - Filebeat configuration file +filebeat.modules: + - module: wazuh + alerts: + enabled: true + archives: + enabled: false + +setup.template.json.enabled: true +setup.template.json.path: '/etc/filebeat/wazuh-template.json' +setup.template.json.name: 'wazuh' +setup.template.overwrite: true +setup.ilm.enabled: false + +# Send events directly to Elasticsearch +output.elasticsearch: + hosts: {{ filebeat_output_elasticsearch_hosts | to_json }} + +{% if filebeat_security %} + username: {{ elasticsearch_security_user }} + password: {{ elasticsearch_security_password }} + protocol: https + ssl.certificate_authorities: + - {{ filebeat_ssl_dir }}/root-ca.pem + ssl.certificate: "{{ filebeat_ssl_dir }}/{{ inventory_hostname }}.pem" + ssl.key: "{{ filebeat_ssl_dir }}/{{ inventory_hostname }}.key" +{% endif %} + +# Optional. Send events to Logstash instead of Elasticsearch +#output.logstash.hosts: ["YOUR_LOGSTASH_SERVER_IP:5000"] \ No newline at end of file diff --git a/roles/wazuh/ansible-filebeat/README.md b/roles/wazuh/ansible-filebeat/README.md index ad588e64..416f7da0 100644 --- a/roles/wazuh/ansible-filebeat/README.md +++ b/roles/wazuh/ansible-filebeat/README.md @@ -28,7 +28,7 @@ Available variables are listed below, along with default values (see `defaults/m License and copyright --------------------- -WAZUH Copyright (C) 2017 Wazuh Inc. (License GPLv3) +WAZUH Copyright (C) 2020 Wazuh Inc. (License GPLv3) ### Based on previous work from geerlingguy diff --git a/roles/wazuh/ansible-filebeat/defaults/main.yml b/roles/wazuh/ansible-filebeat/defaults/main.yml index 8f06aaf4..12e62dce 100644 --- a/roles/wazuh/ansible-filebeat/defaults/main.yml +++ b/roles/wazuh/ansible-filebeat/defaults/main.yml @@ -1,5 +1,5 @@ --- -filebeat_version: 7.6.1 +filebeat_version: 7.7.0 filebeat_create_config: true @@ -22,10 +22,8 @@ filebeat_enable_logging: true filebeat_log_level: debug filebeat_log_dir: /var/log/mybeat filebeat_log_filename: mybeat.log - filebeat_ssl_dir: /etc/pki/filebeat filebeat_ssl_certificate_file: "" -filebeat_ssl_key_file: "" filebeat_ssl_insecure: "false" filebeat_module_package_url: https://packages.wazuh.com/3.x/filebeat @@ -46,7 +44,7 @@ node_certs_destination: /etc/filebeat/certs # CA Generation -master_certs_path: /es_certs +master_certs_path: "{{ playbook_dir }}/es_certs" generate_CA: true ca_cert_name: "" diff --git a/roles/wazuh/ansible-filebeat/tasks/config.yml b/roles/wazuh/ansible-filebeat/tasks/config.yml index d45b06e8..2b0b7eda 100644 --- a/roles/wazuh/ansible-filebeat/tasks/config.yml +++ b/roles/wazuh/ansible-filebeat/tasks/config.yml @@ -17,23 +17,4 @@ group: root mode: 0400 notify: restart filebeat - tags: configure - -- name: Ensure Filebeat SSL key pair directory exists. - file: - path: "{{ filebeat_ssl_dir }}" - state: directory - when: filebeat_ssl_key_file - tags: configure - -- name: Copy SSL key and cert for filebeat. - copy: - src: "{{ item }}" - dest: "{{ filebeat_ssl_dir }}/{{ item | basename }}" - mode: 0400 - with_items: - - "{{ filebeat_ssl_key_file }}" - - "{{ filebeat_ssl_certificate_file }}" - notify: restart filebeat - when: filebeat_ssl_key_file and filebeat_ssl_certificate_file - tags: configure + tags: configure \ No newline at end of file diff --git a/roles/wazuh/ansible-filebeat/tasks/main.yml b/roles/wazuh/ansible-filebeat/tasks/main.yml index 4948c252..3e62339b 100644 --- a/roles/wazuh/ansible-filebeat/tasks/main.yml +++ b/roles/wazuh/ansible-filebeat/tasks/main.yml @@ -30,6 +30,8 @@ copy: src: "{{ item }}" dest: "{{ node_certs_destination }}/" + owner: root + group: root mode: 0440 with_items: - "{{ master_certs_path }}/{{ filebeat_node_name }}/{{ filebeat_node_name }}.key" @@ -44,6 +46,8 @@ copy: src: "{{ item }}" dest: "{{ node_certs_destination }}/" + owner: root + group: root mode: 0440 with_items: - "{{ master_certs_path }}/{{ filebeat_node_name }}/{{ filebeat_node_name }}.key" @@ -57,9 +61,9 @@ - name: Ensuring folder & certs permissions file: path: "{{ node_certs_destination }}/" - mode: 0774 + mode: 0770 state: directory - recurse: yes + recurse: no when: - filebeat_xpack_security tags: xpack-security @@ -69,14 +73,13 @@ path: "{{ filebeat_module_folder }}" register: filebeat_module_folder - - name: Download Filebeat module package get_url: url: "{{ filebeat_module_package_url }}/{{ filebeat_module_package_name }}" dest: "{{ filebeat_module_package_path }}" when: not filebeat_module_folder.stat.exists -- name: Unpakcing Filebeat module package +- name: Unpack Filebeat module package unarchive: src: "{{ filebeat_module_package_path }}/{{ filebeat_module_package_name }}" dest: "{{ filebeat_module_destination }}" diff --git a/roles/wazuh/ansible-filebeat/tests/requirements.yml b/roles/wazuh/ansible-filebeat/tests/requirements.yml deleted file mode 100644 index 63d857e2..00000000 --- a/roles/wazuh/ansible-filebeat/tests/requirements.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -- src: geerlingguy.java -- src: geerlingguy.elasticsearch diff --git a/roles/wazuh/ansible-filebeat/tests/test.yml b/roles/wazuh/ansible-filebeat/tests/test.yml deleted file mode 100644 index 3a4c8f21..00000000 --- a/roles/wazuh/ansible-filebeat/tests/test.yml +++ /dev/null @@ -1,20 +0,0 @@ ---- -- hosts: all - - pre_tasks: - - name: Update apt cache. - apt: - cache_valid_time: 600 - when: ansible_os_family == 'Debian' - - - name: Install test dependencies (RedHat). - package: name=which state=present - when: ansible_os_family == 'RedHat' - - - name: Install test dependencies. - package: name=curl state=present - - roles: - - geerlingguy.java - - geerlingguy.elasticsearch - - role_under_test diff --git a/roles/wazuh/ansible-wazuh-agent/defaults/main.yml b/roles/wazuh/ansible-wazuh-agent/defaults/main.yml index 8041962f..bac623e7 100644 --- a/roles/wazuh/ansible-wazuh-agent/defaults/main.yml +++ b/roles/wazuh/ansible-wazuh-agent/defaults/main.yml @@ -1,5 +1,5 @@ --- -wazuh_agent_version: 3.12.0-1 +wazuh_agent_version: 3.12.3-1 # Custom packages installation @@ -12,7 +12,7 @@ wazuh_custom_packages_installation_agent_rpm_url: "" wazuh_agent_sources_installation: enabled: false - branch: "v3.12.0" + branch: "v3.12.3" user_language: "y" user_no_stop: "y" user_install_type: "agent" @@ -39,6 +39,7 @@ wazuh_managers: api_port: 55000 api_proto: 'http' api_user: null +wazuh_api_reachable_from_agent: false wazuh_profile_centos: 'centos, centos7, centos7.6' wazuh_profile_ubuntu: 'ubuntu, ubuntu18, ubuntu18.04' wazuh_auto_restart: 'yes' @@ -63,9 +64,9 @@ wazuh_winagent_config: # Adding quotes to auth_path_x86 since win_shell outputs error otherwise auth_path_x86: C:\'Program Files (x86)'\ossec-agent\agent-auth.exe check_md5: True - md5: 91efaefae4e1977670eab0c768a22a93 -wazuh_winagent_config_url: https://packages.wazuh.com/3.x/windows/wazuh-agent-3.12.0-1.msi -wazuh_winagent_package_name: wazuh-agent-3.12.0-1.msi + md5: 4ae4e930d3ae9d572b07cd9e7207d783 +wazuh_winagent_config_url: https://packages.wazuh.com/3.x/windows/wazuh-agent-3.12.3-1.msi +wazuh_winagent_package_name: wazuh-agent-3.12.3-1.msi wazuh_agent_config: repo: apt: 'deb https://packages.wazuh.com/3.x/apt/ stable main' @@ -86,7 +87,6 @@ wazuh_agent_config: frequency: 43200 scan_on_start: 'yes' auto_ignore: 'no' - alert_new_files: 'yes' win_audit_interval: 60 skip_nfs: 'yes' skip_dev: 'yes' diff --git a/roles/wazuh/ansible-wazuh-agent/tasks/Linux.yml b/roles/wazuh/ansible-wazuh-agent/tasks/Linux.yml index 7fa0cc03..f6b0576e 100644 --- a/roles/wazuh/ansible-wazuh-agent/tasks/Linux.yml +++ b/roles/wazuh/ansible-wazuh-agent/tasks/Linux.yml @@ -131,14 +131,15 @@ user: "{{ wazuh_managers.0.api_user }}" password: "{{ api_pass }}" register: newagent_api - notify: restart wazuh-agent - vars: - agent_name: "{% if single_agent_name is defined %}{{ single_agent_name }}{% else %}{{ inventory_hostname }}{% endif %}" + delegate_to: "{{ 'localhost' if not wazuh_api_reachable_from_agent else inventory_hostname }}" + become: no + changed_when: newagent_api.json.error == 0 when: - not check_keys.stat.exists or check_keys.stat.size == 0 - - wazuh_agent_authd.registration_address is not none - become: false - ignore_errors: true + - wazuh_managers.0.address is not none + tags: + - config + - api - name: Linux | Retrieve new agent data via rest-API uri: @@ -155,8 +156,11 @@ - wazuh_agent_authd.registration_address is not none - newagent_api.json.error == 0 register: newagentdata_api - delegate_to: localhost - become: false + delegate_to: "{{ 'localhost' if not wazuh_api_reachable_from_agent else inventory_hostname }}" + become: no + tags: + - config + - api - name: Linux | Register agent (via rest-API) command: /var/ossec/bin/manage_agents diff --git a/roles/wazuh/ansible-wazuh-agent/tasks/RedHat.yml b/roles/wazuh/ansible-wazuh-agent/tasks/RedHat.yml index 8dbd2452..17d97c96 100644 --- a/roles/wazuh/ansible-wazuh-agent/tasks/RedHat.yml +++ b/roles/wazuh/ansible-wazuh-agent/tasks/RedHat.yml @@ -27,30 +27,14 @@ - not wazuh_agent_sources_installation.enabled - not wazuh_custom_packages_installation_agent_enabled -- name: RedHat/CentOS/Fedora | download Oracle Java RPM - get_url: - url: https://download.oracle.com/otn-pub/java/jdk/8u202-b08/1961070e4c9b4e26a04e7f5a083f551e/jre-8u202-linux-x64.rpm - dest: /tmp/jre-8-linux-x64.rpm - headers: 'Cookie:oraclelicense=accept-securebackup-cookie' - register: oracle_java_task_rpm_download - until: oracle_java_task_rpm_download is succeeded +- name: RedHat/CentOS/Fedora | Install OpenJDK 1.8 + yum: name=java-1.8.0-openjdk state=present when: - wazuh_agent_config.cis_cat.disable == 'no' - wazuh_agent_config.cis_cat.install_java == 'yes' tags: - init -- name: RedHat/CentOS/Fedora | Install Oracle Java RPM - package: name=/tmp/jre-8-linux-x64.rpm state=present - register: wazuh_agent_java_package_install - until: wazuh_agent_java_package_install is succeeded - when: - - wazuh_agent_config.cis_cat.disable == 'no' - - wazuh_agent_config.cis_cat.install_java == 'yes' - - oracle_java_task_rpm_download is defined - tags: - - init - - name: Set Distribution CIS filename for RHEL5 set_fact: cis_distribution_filename: cis_rhel5_linux_rcl.txt diff --git a/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 b/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 index ee71769e..2ee7f97d 100644 --- a/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 +++ b/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 @@ -201,7 +201,6 @@ {% if wazuh_agent_config.syscheck is defined %} no - {{ wazuh_agent_config.syscheck.frequency }} {% if ansible_system == "Linux" %} {{ wazuh_agent_config.syscheck.scan_on_start }} diff --git a/roles/wazuh/ansible-wazuh-manager/README.md b/roles/wazuh/ansible-wazuh-manager/README.md index 199e7810..ac52363d 100644 --- a/roles/wazuh/ansible-wazuh-manager/README.md +++ b/roles/wazuh/ansible-wazuh-manager/README.md @@ -218,7 +218,7 @@ Including an example of how to use your role (for instance, with variables passe License and copyright --------------------- -WAZUH Copyright (C) 2017 Wazuh Inc. (License GPLv3) +WAZUH Copyright (C) 2020 Wazuh Inc. (License GPLv3) ### Based on previous work from dj-wasabi diff --git a/roles/wazuh/ansible-wazuh-manager/defaults/main.yml b/roles/wazuh/ansible-wazuh-manager/defaults/main.yml index db4f8841..a046addf 100644 --- a/roles/wazuh/ansible-wazuh-manager/defaults/main.yml +++ b/roles/wazuh/ansible-wazuh-manager/defaults/main.yml @@ -1,21 +1,21 @@ --- -wazuh_manager_version: 3.12.0-1 +wazuh_manager_version: 3.12.3-1 wazuh_manager_fqdn: "wazuh-server" wazuh_manager_package_state: present # Custom packages installation wazuh_custom_packages_installation_manager_enabled: false -wazuh_custom_packages_installation_manager_deb_url: "https://s3-us-west-1.amazonaws.com/packages-dev.wazuh.com/warehouse/branches/3.12/deb/var/wazuh-manager_3.12.0-0.3319fimreworksqlite_amd64.deb" -wazuh_custom_packages_installation_manager_rpm_url: "https://s3-us-west-1.amazonaws.com/packages-dev.wazuh.com/warehouse/branches/3.12/rpm/var/wazuh-manager-3.12.0-0.3319fimreworksqlite.x86_64.rpm" +wazuh_custom_packages_installation_manager_deb_url: "https://s3-us-west-1.amazonaws.com/packages-dev.wazuh.com/" +wazuh_custom_packages_installation_manager_rpm_url: "https://s3-us-west-1.amazonaws.com/packages-dev.wazuh.com/" wazuh_custom_packages_installation_api_enabled: false -wazuh_custom_packages_installation_api_deb_url: "https://s3-us-west-1.amazonaws.com/packages-dev.wazuh.com/warehouse/branches/3.12/deb/var/wazuh-api_3.12.0-0.3319fimreworksqlite_amd64.deb" -wazuh_custom_packages_installation_api_rpm_url: "https://s3-us-west-1.amazonaws.com/packages-dev.wazuh.com/warehouse/branches/3.12/rpm/var/wazuh-api-3.12.0-0.3319fimreworksqlite.x86_64.rpm" +wazuh_custom_packages_installation_api_deb_url: "https://s3-us-west-1.amazonaws.com/packages-dev.wazuh.com/" +wazuh_custom_packages_installation_api_rpm_url: "https://s3-us-west-1.amazonaws.com/packages-dev.wazuh.com/" # Sources installation wazuh_manager_sources_installation: enabled: false - branch: "v3.12.0" + branch: "v3.12.3" user_language: "en" user_no_stop: "y" user_install_type: "server" @@ -40,7 +40,7 @@ wazuh_manager_sources_installation: wazuh_api_sources_installation: enabled: false - branch: "v3.12.0" + branch: "v3.12.3" update: "y" remove: "y" directory: null @@ -152,7 +152,6 @@ wazuh_manager_config: frequency: 43200 scan_on_start: 'yes' auto_ignore: 'no' - alert_new_files: 'yes' ignore: - /etc/mtab - /etc/hosts.deny @@ -236,7 +235,6 @@ wazuh_manager_config: providers: - enabled: 'no' os: - - 'precise' - 'trusty' - 'xenial' - 'bionic' @@ -355,6 +353,29 @@ wazuh_manager_config: - server: null port: null format: null + integrations: + #slack + - name: null + hook_url: '' + alert_level: 10 + alert_format: 'json' + rule_id: null + #pagerduty + - name: null + api_key: '' + alert_level: 12 + monitor_aws: + disabled: 'yes' + interval: '10m' + run_on_start: 'yes' + skip_on_error: 'yes' + s3: + - name: null + bucket_type: null + path: null + only_logs_after: null + access_key: null + secret_key: null labels: enable: false list: @@ -367,7 +388,6 @@ wazuh_manager_config: # syscheck: # frequency: 43200 # scan_on_start: 'yes' - # alert_new_files: 'yes' # ignore: # - /etc/mtab # - /etc/mnttab @@ -398,7 +418,6 @@ wazuh_manager_config: # frequency: 43200 # scan_on_start: 'yes' # auto_ignore: 'no' - # alert_new_files: 'yes' # windows_registry: # - key: 'HKEY_LOCAL_MACHINE\Software\Classes\batfile' # arch: 'both' diff --git a/roles/wazuh/ansible-wazuh-manager/tasks/RedHat.yml b/roles/wazuh/ansible-wazuh-manager/tasks/RedHat.yml index cb0dbf5a..2e0751fd 100644 --- a/roles/wazuh/ansible-wazuh-manager/tasks/RedHat.yml +++ b/roles/wazuh/ansible-wazuh-manager/tasks/RedHat.yml @@ -62,25 +62,6 @@ - ( ansible_distribution == 'CentOS' or ansible_distribution == 'RedHat' ) and ansible_distribution_major_version == '6' - wazuh_manager_config.cluster.disable != 'yes' -- name: CentOS/RedHat 6 | Install python-cryptography module - pip: name=cryptography state=present - register: wazuh_manager_cryptography_package_installed - until: wazuh_manager_cryptography_package_installed is succeeded - environment: - PATH: "/opt/rh/python27/root/usr/bin:{{ ansible_env.PATH }}" - LD_LIBRARY_PATH: "/opt/rh/python27/root/usr/lib64:/opt/rh/python27/root/usr/lib" - when: - - ( ansible_distribution == 'CentOS' or ansible_distribution == 'RedHat' ) and ansible_distribution_major_version == '6' - - wazuh_manager_config.cluster.disable != 'yes' - -- name: RedHat/CentOS/Fedora | Install python-cryptography module - package: name=python-cryptography state=present - register: wazuh_manager_cryptography_package_installed - until: wazuh_manager_cryptography_package_installed is succeeded - when: - - not (( ansible_distribution == 'CentOS' or ansible_distribution == 'RedHat') and ansible_distribution_major_version == '6' ) - - wazuh_manager_config.cluster.disable != 'yes' - - name: RedHat/CentOS/Fedora | Install OpenJDK 1.8 yum: name=java-1.8.0-openjdk state=present when: @@ -168,4 +149,3 @@ - ansible_distribution_major_version|int < 6 tags: - init - diff --git a/roles/wazuh/ansible-wazuh-manager/tasks/installation_from_sources.yml b/roles/wazuh/ansible-wazuh-manager/tasks/installation_from_sources.yml index e019d2f9..2fa00fe7 100644 --- a/roles/wazuh/ansible-wazuh-manager/tasks/installation_from_sources.yml +++ b/roles/wazuh/ansible-wazuh-manager/tasks/installation_from_sources.yml @@ -123,7 +123,7 @@ path: /var/ossec/api/app.js register: wazuh_api when: - - wazuh_manager_config.cluster.node_type == "master" + - wazuh_manager_config.cluster.node_type == "master" or wazuh_manager_config.cluster.node_type == "worker" - name: Install Wazuh API from sources block: diff --git a/roles/wazuh/ansible-wazuh-manager/tasks/main.yml b/roles/wazuh/ansible-wazuh-manager/tasks/main.yml index eaabdb77..71914578 100644 --- a/roles/wazuh/ansible-wazuh-manager/tasks/main.yml +++ b/roles/wazuh/ansible-wazuh-manager/tasks/main.yml @@ -106,7 +106,7 @@ - name: Installing the local_rules.xml (default local_rules.xml) template: src=var-ossec-rules-local_rules.xml.j2 dest=/var/ossec/etc/rules/local_rules.xml - owner=root + owner=ossec group=ossec mode=0640 notify: restart wazuh-manager @@ -118,7 +118,7 @@ - name: Adding local rules files copy: src="{{ wazuh_manager_config.ruleset.rules_path }}" dest=/var/ossec/etc/rules/ - owner=root + owner=ossec group=ossec mode=0640 notify: restart wazuh-manager @@ -130,7 +130,7 @@ - name: Installing the local_decoder.xml template: src=var-ossec-rules-local_decoder.xml.j2 dest=/var/ossec/etc/decoders/local_decoder.xml - owner=root + owner=ossec group=ossec mode=0640 notify: restart wazuh-manager @@ -142,7 +142,7 @@ - name: Adding local decoders files copy: src="{{ wazuh_manager_config.ruleset.decoders_path }}" dest=/var/ossec/etc/decoders/ - owner=root + owner=ossec group=ossec mode=0640 notify: restart wazuh-manager diff --git a/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 b/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 index 998900b2..53565007 100644 --- a/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 +++ b/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 @@ -294,7 +294,6 @@ {{ wazuh_manager_config.syscheck.disable }} - {{ wazuh_manager_config.syscheck.alert_new_files }} {{ wazuh_manager_config.syscheck.frequency }} {{ wazuh_manager_config.syscheck.scan_on_start }} @@ -544,6 +543,55 @@ {% endfor %} {% endif %} +{% if wazuh_manager_config.integrations is defined %} +{% for integration in wazuh_manager_config.integrations %} +{% if integration.name is not none %} + + + {{ integration.name }} + {% if integration.hook_url is defined %} + {{ integration.hook_url }} + {% endif %} + {% if integration.api_key is defined %} + {{ integration.api_key }} + {% endif %} + {% if integration.alert_format is defined %} + {{ integration.alert_format }} + {% endif %} + {% if integration.alert_level is defined %} + {{ integration.alert_level }} + {% endif %} + {% if integration.rule_id is defined %} + {{ integration.rule_id }} + {% endif %} + +{% endif %} +{% endfor %} +{% endif %} + +{% if monitor_aws is defined and monitor_aws.disabled == "no" %} + + + {{ monitor_aws.disabled }} + {{ monitor_aws.interval }} + {{ monitor_aws.run_on_start }} + {{ monitor_aws.skip_on_error }} + {% for bucket in monitor_aws.s3 %} + + {{ bucket.name }} + {% if bucket.path is defined %} + {{ bucket.path }} + {% endif %} + {% if bucket.only_logs_after is defined %} + {{ bucket.only_logs_after }} + {% endif %} + {{ bucket.access_key }} + {{ bucket.secret_key }} + + {% endfor %} + +{% endif %} + {% if wazuh_manager_config.labels.enable == true %} {% for label in wazuh_manager_config.labels.list %} diff --git a/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-shared-agent.conf.j2 b/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-shared-agent.conf.j2 index f300f22a..78893385 100644 --- a/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-shared-agent.conf.j2 +++ b/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-shared-agent.conf.j2 @@ -7,7 +7,6 @@ {% if agent_config.syscheck.auto_ignore is defined %} {{ agent_config.syscheck.auto_ignore }} {% endif %} - {{ agent_config.syscheck.alert_new_files }} {{ agent_config.syscheck.frequency }} {{ agent_config.syscheck.scan_on_start }}