diff --git a/playbooks/wazuh-manager-oss.yml b/playbooks/wazuh-manager-oss.yml new file mode 100644 index 00000000..3dc6346d --- /dev/null +++ b/playbooks/wazuh-manager-oss.yml @@ -0,0 +1,9 @@ +--- +- hosts: managers + roles: +# - role: ../roles/wazuh/ansible-wazuh-manager + - role: ../roles/wazuh/ansible-filebeat-oss + filebeat_output_elasticsearch_hosts: + - "172.16.0.161:9200" + - "172.16.0.162:9200" + - "172.16.0.163:9200" \ No newline at end of file diff --git a/playbooks/wazuh-opendistro-kibana.yml b/playbooks/wazuh-opendistro-kibana.yml new file mode 100644 index 00000000..fa3600c1 --- /dev/null +++ b/playbooks/wazuh-opendistro-kibana.yml @@ -0,0 +1,4 @@ +--- +- hosts: es1 + roles: + - role: ../roles/opendistro/opendistro-kibana diff --git a/playbooks/wazuh-opendistro.yml b/playbooks/wazuh-opendistro.yml index ede8ca93..271dfa5b 100644 --- a/playbooks/wazuh-opendistro.yml +++ b/playbooks/wazuh-opendistro.yml @@ -1,4 +1,4 @@ --- -- hosts: es-cluster +- hosts: es_cluster roles: - role: ../roles/opendistro/opendistro-elasticsearch diff --git a/roles/elastic-stack/ansible-elasticsearch/README.md b/roles/elastic-stack/ansible-elasticsearch/README.md index c574aa9f..f37d3cec 100644 --- a/roles/elastic-stack/ansible-elasticsearch/README.md +++ b/roles/elastic-stack/ansible-elasticsearch/README.md @@ -134,7 +134,7 @@ It is possible to define users directly on the playbook, these must be defined o License and copyright --------------------- -WAZUH Copyright (C) 2017 Wazuh Inc. (License GPLv3) +WAZUH Copyright (C) 2020 Wazuh Inc. (License GPLv3) ### Based on previous work from geerlingguy diff --git a/roles/elastic-stack/ansible-elasticsearch/defaults/main.yml b/roles/elastic-stack/ansible-elasticsearch/defaults/main.yml index 279283c2..b4a8308f 100644 --- a/roles/elastic-stack/ansible-elasticsearch/defaults/main.yml +++ b/roles/elastic-stack/ansible-elasticsearch/defaults/main.yml @@ -4,7 +4,7 @@ elasticsearch_http_port: 9200 elasticsearch_network_host: 127.0.0.1 elasticsearch_reachable_host: 127.0.0.1 elasticsearch_jvm_xms: null -elastic_stack_version: 7.6.2 +elastic_stack_version: 7.7.0 elasticsearch_lower_disk_requirements: false elasticrepo: diff --git a/roles/elastic-stack/ansible-kibana/README.md b/roles/elastic-stack/ansible-kibana/README.md index 593cf319..28978761 100644 --- a/roles/elastic-stack/ansible-kibana/README.md +++ b/roles/elastic-stack/ansible-kibana/README.md @@ -37,7 +37,7 @@ Example Playbook License and copyright --------------------- -WAZUH Copyright (C) 2017 Wazuh Inc. (License GPLv3) +WAZUH Copyright (C) 2020 Wazuh Inc. (License GPLv3) ### Based on previous work from geerlingguy diff --git a/roles/elastic-stack/ansible-kibana/defaults/main.yml b/roles/elastic-stack/ansible-kibana/defaults/main.yml index e3164ec0..fd392334 100644 --- a/roles/elastic-stack/ansible-kibana/defaults/main.yml +++ b/roles/elastic-stack/ansible-kibana/defaults/main.yml @@ -5,7 +5,7 @@ elasticsearch_http_port: "9200" elasticsearch_network_host: "127.0.0.1" kibana_server_host: "0.0.0.0" kibana_server_port: "5601" -elastic_stack_version: 7.6.2 +elastic_stack_version: 7.7.0 wazuh_version: 3.12.3 wazuh_app_url: https://packages.wazuh.com/wazuhapp/wazuhapp diff --git a/roles/opendistro/opendistro-elasticsearch/defaults/main.yml b/roles/opendistro/opendistro-elasticsearch/defaults/main.yml index 0c8f8f1f..aa683033 100644 --- a/roles/opendistro/opendistro-elasticsearch/defaults/main.yml +++ b/roles/opendistro/opendistro-elasticsearch/defaults/main.yml @@ -1,6 +1,6 @@ --- # The OpenDistro version -opendistro_version: 1.6.0 +opendistro_version: 1.8.0 elasticsearch_cluster_name: wazuh-cluster # Minimum master nodes in cluster, 2 for 3 nodes elasticsearch cluster @@ -16,8 +16,8 @@ domain_name: wazuh.com # The OpenDistro package repository package_repos: - yum: - opendistro: + yum: + opendistro: baseurl: 'https://d3g5vo6xdbdb9a.cloudfront.net/yum/noarch/' gpg: 'https://d3g5vo6xdbdb9a.cloudfront.net/GPG-KEY-opendistroforelasticsearch' elasticsearch_oss: @@ -28,7 +28,7 @@ opendistro_sec_plugin_conf_path: /usr/share/elasticsearch/plugins/opendistro_sec opendistro_sec_plugin_tools_path: /usr/share/elasticsearch/plugins/opendistro_security/tools opendistro_conf_path: /etc/elasticsearch/ es_nodes: |- - {% for item in groups['es-cluster'] -%} + {% for item in groups['es_cluster'] -%} {{ hostvars[item]['ip'] }}{% if not loop.last %}","{% endif %} {%- endfor %} diff --git a/roles/opendistro/opendistro-elasticsearch/tasks/RMRedHat.yml b/roles/opendistro/opendistro-elasticsearch/tasks/RMRedHat.yml index 46989361..31f0416a 100644 --- a/roles/opendistro/opendistro-elasticsearch/tasks/RMRedHat.yml +++ b/roles/opendistro/opendistro-elasticsearch/tasks/RMRedHat.yml @@ -1,6 +1,6 @@ --- - name: RedHat/CentOS/Fedora | Remove Elasticsearch repository (and clean up left-over metadata) yum_repository: - name: elastic_repo_7 + name: opendistro_repo state: absent changed_when: false diff --git a/roles/opendistro/opendistro-elasticsearch/tasks/local_actions.yml b/roles/opendistro/opendistro-elasticsearch/tasks/local_actions.yml index 60379616..6885276d 100644 --- a/roles/opendistro/opendistro-elasticsearch/tasks/local_actions.yml +++ b/roles/opendistro/opendistro-elasticsearch/tasks/local_actions.yml @@ -2,50 +2,71 @@ - block: - name: Local action | Create local temporary directory for certificates generation - local_action: - module: file + file: path: "{{ local_certs_path }}" state: directory - run_once: true - + + - name: Local action | Check that the generation tool exists + stat: + path: "{{ local_certs_path }}/search-guard-tlstool-{{ certs_gen_tool_version }}.zip" + register: tool_package + - name: Local action | Download certificates generation tool - local_action: - module: get_url + get_url: url: "{{ certs_gen_tool_url }}" dest: "{{ local_certs_path }}/search-guard-tlstool-{{ certs_gen_tool_version }}.zip" - run_once: true + when: not tool_package.stat.exists - name: Local action | Extract the certificates generation tool - local_action: - module: unarchive + unarchive: src: "{{ local_certs_path }}/search-guard-tlstool-1.7.zip" dest: "{{ local_certs_path }}/" - name: Local action | Add the execution bit to the binary - local_action: - module: file + file: dest: "{{ local_certs_path }}/tools/sgtlstool.sh" mode: a+x - run_once: true - name: Local action | Prepare the certificates generation template file - local_action: - module: template + template: src: "templates/tlsconfig.yml.j2" dest: "{{ local_certs_path }}/config/tlsconfig.yml" - run_once: true + register: tlsconfig_template + + - name: Create a directory if it does not exist + file: + path: "{{ local_certs_path }}/certs/" + state: directory + mode: '0755' - name: Local action | Check if root CA file exists - local_action: - module: stat - path: "{{ local_certs_path }}/config/root-ca.key" + stat: + path: "{{ local_certs_path }}/certs/root-ca.key" register: root_ca_file - name: Local action | Generate the node & admin certificates in local - local_action: - module: command {{ local_certs_path }}/tools/sgtlstool.sh -c {{ local_certs_path }}/config/tlsconfig.yml -ca -crt -t {{ local_certs_path }}/config/ -f -o - run_once: true - when: root_ca_file.stat.exists == False + command: >- + {{ local_certs_path }}/tools/sgtlstool.sh + -c {{ local_certs_path }}/config/tlsconfig.yml + -ca -crt + -t {{ local_certs_path }}/certs/ + -f -o + when: + - not root_ca_file.stat.exists + - tlsconfig_template.changed + - name: Local action | Generate the node & admin certificates using an existing root CA + command: >- + {{ local_certs_path }}/tools/sgtlstool.sh + -c {{ local_certs_path }}/config/tlsconfig.yml + -crt + -t {{ local_certs_path }}/certs/ + -f + when: + - root_ca_file.stat.exists + - tlsconfig_template.changed + + run_once: true + delegate_to: localhost tags: - generate-certs \ No newline at end of file diff --git a/roles/opendistro/opendistro-elasticsearch/tasks/main.yml b/roles/opendistro/opendistro-elasticsearch/tasks/main.yml index c8941208..9df1e01c 100644 --- a/roles/opendistro/opendistro-elasticsearch/tasks/main.yml +++ b/roles/opendistro/opendistro-elasticsearch/tasks/main.yml @@ -7,7 +7,7 @@ - name: Install OpenDistro package: - name: opendistroforelasticsearch-{{ opendistro_version }} + name: opendistroforelasticsearch-{{ opendistro_version }} state: present register: install tags: install @@ -18,7 +18,7 @@ state: absent when: install.changed tags: install - + - name: Copy Configuration File blockinfile: block: "{{ lookup('template', 'elasticsearch.yml.j2') }}" diff --git a/roles/opendistro/opendistro-elasticsearch/tasks/security_actions.yml b/roles/opendistro/opendistro-elasticsearch/tasks/security_actions.yml index 1fee6fef..ea48874e 100644 --- a/roles/opendistro/opendistro-elasticsearch/tasks/security_actions.yml +++ b/roles/opendistro/opendistro-elasticsearch/tasks/security_actions.yml @@ -1,5 +1,4 @@ - block: - - name: Remove demo certs file: path: "{{ item }}" @@ -12,7 +11,7 @@ - name: Copy the node & admin certificates to Elasticsearch cluster copy: - src: "{{ local_certs_path }}/config/{{ item }}" + src: "{{ local_certs_path }}/certs/{{ item }}" dest: /etc/elasticsearch/ mode: 0644 with_items: @@ -28,7 +27,7 @@ - name: Copy the OpenDistro security configuration file to cluster blockinfile: - block: "{{ lookup('file', '{{ local_certs_path }}/config/{{ inventory_hostname }}_elasticsearch_config_snippet.yml') }}" + block: "{{ lookup('file', '{{ local_certs_path }}/certs/{{ inventory_hostname }}_elasticsearch_config_snippet.yml') }}" dest: "{{ opendistro_conf_path }}/elasticsearch.yml" insertafter: EOF marker: "## {mark} Opendistro Security Node & Admin certificates configuration ##" @@ -76,5 +75,5 @@ run_once: true tags: - - production_ready + - security when: install.changed \ No newline at end of file diff --git a/roles/opendistro/opendistro-elasticsearch/templates/tlsconfig.yml.j2 b/roles/opendistro/opendistro-elasticsearch/templates/tlsconfig.yml.j2 index 85792a6a..0f7671e2 100644 --- a/roles/opendistro/opendistro-elasticsearch/templates/tlsconfig.yml.j2 +++ b/roles/opendistro/opendistro-elasticsearch/templates/tlsconfig.yml.j2 @@ -17,7 +17,6 @@ defaults: verifyHostnames: false resolveHostnames: false - ### ### Nodes ### @@ -25,13 +24,28 @@ defaults: # Specify the nodes of your ES cluster here # nodes: -{% for item in groups['es-cluster'] %} +{% for item in groups['es_cluster'] %} - name: {{ item }} dn: CN={{ item }}.{{ domain_name }},OU=Ops,O={{ domain_name }}\, Inc.,DC={{ domain_name }} dns: {{ item }}.{{ domain_name }} ip: {{ hostvars[item]['ip'] }} {% endfor %} - +{% if groups['kibana'] is defined and groups['kibana']|length > 0 %} +{% for item in groups['kibana'] %} + - name: {{ item }} + dn: CN={{ item }}.{{ domain_name }},OU=Ops,O={{ domain_name }}\, Inc.,DC={{ domain_name }} + dns: {{ item }}.{{ domain_name }} + ip: {{ hostvars[item]['ip'] }} +{% endfor %} +{% endif %} +{% if groups['managers'] is defined and groups['managers']|length > 0 %} +{% for item in groups['managers'] %} + - name: {{ item }} + dn: CN={{ item }}.{{ domain_name }},OU=Ops,O={{ domain_name }}\, Inc.,DC={{ domain_name }} + dns: {{ item }}.{{ domain_name }} + ip: {{ hostvars[item]['ip'] }} +{% endfor %} +{% endif %} ### ### Clients ### diff --git a/roles/opendistro/opendistro-kibana/defaults/main.yml b/roles/opendistro/opendistro-kibana/defaults/main.yml new file mode 100644 index 00000000..428880ee --- /dev/null +++ b/roles/opendistro/opendistro-kibana/defaults/main.yml @@ -0,0 +1,62 @@ +--- + +elasticsearch_http_port: 9200 +elasticsearch_nodes: |- + {% for item in groups['es_cluster'] -%} + {{ hostvars[item]['ip'] }}{% if not loop.last %}","{% endif %} + {%- endfor %} +elasticsearch_network_host: 172.16.0.161 +elastic_api_protocol: https +kibana_conf_path: /etc/kibana +kibana_server_host: "0.0.0.0" +kibana_server_port: "5601" +kibana_server_name: "kibana" +kibana_max_payload_bytes: 1048576 +elastic_stack_version: 7.7.0 +wazuh_version: 3.12.3 +wazuh_app_url: https://packages.wazuh.com/wazuhapp/wazuhapp + +# The OpenDistro package repository +package_repos: + yum: + opendistro: + baseurl: 'https://d3g5vo6xdbdb9a.cloudfront.net/yum/noarch/' + gpg: 'https://d3g5vo6xdbdb9a.cloudfront.net/GPG-KEY-opendistroforelasticsearch' + elasticsearch_oss: + baseurl: 'https://artifacts.elastic.co/packages/oss-7.x/yum' + gpg: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch' + + +# API credentials +wazuh_api_credentials: + - id: "default" + url: "http://localhost" + port: 55000 + user: "foo" + password: "bar" + +# opendistro Security +kibana_opendistro_security: true +kibana_newsfeed_enabled: "false" +kibana_telemetry_optin: "false" +kibana_telemetry_enabled: "false" + +opendistro_security_user: elastic +opendistro_admin_password: changeme +opendistro_kibana_user: kibanaserver +opendistro_kibana_password: changeme +local_certs_path: /tmp/opendistro-nodecerts + +# Nodejs +nodejs: + repo_dict: + debian: "deb" + redhat: "rpm" + repo_url_ext: "nodesource.com/setup_10.x" + +# Build from sources +build_from_sources: false +wazuh_plugin_branch: 3.12-7.6 + +#Nodejs NODE_OPTIONS +node_options: --no-warnings --max-old-space-size=2048 --max-http-header-size=65536 diff --git a/roles/opendistro/opendistro-kibana/handlers/main.yml b/roles/opendistro/opendistro-kibana/handlers/main.yml new file mode 100644 index 00000000..55ea3d3c --- /dev/null +++ b/roles/opendistro/opendistro-kibana/handlers/main.yml @@ -0,0 +1,3 @@ +--- +- name: restart kibana + service: name=kibana state=restarted diff --git a/roles/opendistro/opendistro-kibana/tasks/RMRedHat.yml b/roles/opendistro/opendistro-kibana/tasks/RMRedHat.yml new file mode 100644 index 00000000..959c70e9 --- /dev/null +++ b/roles/opendistro/opendistro-kibana/tasks/RMRedHat.yml @@ -0,0 +1,6 @@ +--- +- name: Remove Elasticsearch repository (and clean up left-over metadata) + yum_repository: + name: opendistro_repo + state: absent + changed_when: false diff --git a/roles/opendistro/opendistro-kibana/tasks/RedHat.yml b/roles/opendistro/opendistro-kibana/tasks/RedHat.yml new file mode 100644 index 00000000..d2dbc4ec --- /dev/null +++ b/roles/opendistro/opendistro-kibana/tasks/RedHat.yml @@ -0,0 +1,23 @@ +--- +- block: + + - name: RedHat/CentOS/Fedora | Add OpenDistro repo + yum_repository: + file: opendistro + name: opendistro_repo + description: Opendistro yum repository + baseurl: "{{ package_repos.yum.opendistro.baseurl }}" + gpgkey: "{{ package_repos.yum.opendistro.gpg }}" + gpgcheck: true + + - name: RedHat/CentOS/Fedora | Add Elasticsearch-oss repo + yum_repository: + file: opendistro + name: elasticsearch_oss_repo + description: Elasticsearch-oss yum repository + baseurl: "{{ package_repos.yum.elasticsearch_oss.baseurl }}" + gpgkey: "{{ package_repos.yum.elasticsearch_oss.gpg }}" + gpgcheck: true + + tags: + - install \ No newline at end of file diff --git a/roles/opendistro/opendistro-kibana/tasks/build_wazuh_plugin.yml b/roles/opendistro/opendistro-kibana/tasks/build_wazuh_plugin.yml new file mode 100644 index 00000000..b7ceb87f --- /dev/null +++ b/roles/opendistro/opendistro-kibana/tasks/build_wazuh_plugin.yml @@ -0,0 +1,76 @@ +--- + - name: Ensure the Git package is present + package: + name: git + state: present + + - name: Modify repo url if host is in Debian family + set_fact: + node_js_repo_type: deb + when: + - ansible_os_family | lower == "debian" + + - name: Download script to install Nodejs repository + get_url: + url: "https://{{ nodejs['repo_dict'][ansible_os_family|lower] }}.{{ nodejs['repo_url_ext'] }}" + dest: "/tmp/setup_nodejs_repo.sh" + mode: 0700 + + - name: Execute downloaded script to install Nodejs repo + command: /tmp/setup_nodejs_repo.sh + register: node_repo_installation_result + changed_when: false + + - name: Install Nodejs + package: + name: nodejs + state: present + + - name: Install yarn dependency to build the Wazuh Kibana Plugin + # Using shell due to errors when evaluating text between @ with command + shell: "npm install -g {{ 'yarn' }}{{ '@' }}{{ '1.10.1'}}" # noqa 305 + register: install_yarn_result + changed_when: install_yarn_result == 0 + + - name: Remove old wazuh-kibana-app git directory + file: + path: /tmp/app + state: absent + changed_when: false + + - name: Clone wazuh-kibana-app repository # Using command as git module doesn't cover single-branch nor depth + command: git clone https://github.com/wazuh/wazuh-kibana-app -b {{ wazuh_plugin_branch }} --single-branch --depth=1 app # noqa 303 + register: clone_app_repo_result + changed_when: false + args: + chdir: "/tmp" + + - name: Executing yarn to build the package + command: "{{ item }}" + with_items: + - "yarn" + - "yarn build" + register: yarn_execution_result + changed_when: false + args: + chdir: "/tmp/app/" + + - name: Obtain name of generated package + shell: "find ./ -name 'wazuh-*.zip' -printf '%f\\n'" + register: wazuhapp_package_name + changed_when: false + args: + chdir: "/tmp/app/build" + + - name: Install Wazuh Plugin (can take a while) + shell: NODE_OPTIONS="{{ node_options }}" /usr/share/kibana/bin/kibana-plugin install file:///tmp/app/build/{{ wazuhapp_package_name.stdout }} + args: + executable: /bin/bash + creates: /usr/share/kibana/plugins/wazuh/package.json + chdir: /usr/share/kibana + become: yes + become_user: kibana + notify: restart kibana + tags: + - install + - skip_ansible_lint diff --git a/roles/opendistro/opendistro-kibana/tasks/main.yml b/roles/opendistro/opendistro-kibana/tasks/main.yml new file mode 100644 index 00000000..013648db --- /dev/null +++ b/roles/opendistro/opendistro-kibana/tasks/main.yml @@ -0,0 +1,133 @@ +--- + +- name: Stopping early, trying to compile Wazuh Kibana Plugin on Debian 10 is not possible + fail: + msg: "It's not possible to compile the Wazuh Kibana plugin on Debian 10 due to: https://github.com/wazuh/wazuh-kibana-app/issues/1924" + when: + - build_from_sources + - ansible_distribution == "Debian" + - ansible_distribution_major_version == "10" + +- import_tasks: RedHat.yml + when: ansible_os_family == 'RedHat' + +- name: Reload systemd + systemd: + daemon_reload: true + ignore_errors: true + when: + - not (ansible_distribution == "Amazon" and ansible_distribution_version == "(Karoo)") + - not (ansible_distribution == "Ubuntu" and ansible_distribution_version is version('15.04', '<')) + - not (ansible_distribution == "Debian" and ansible_distribution_version is version('8', '<')) + - not (ansible_os_family == "RedHat" and ansible_distribution_version is version('7', '<')) + +- name: Install Kibana + package: + name: opendistroforelasticsearch-kibana + state: present + register: install + tags: install + +- name: Remove Kibana configuration file + file: + path: "{{ kibana_conf_path }}/kibana.yml" + state: absent + when: install.changed + tags: install + +- import_tasks: security_actions.yml + +- name: Copy Configuration File + blockinfile: + block: "{{ lookup('template', 'opendistro_kibana.yml.j2') }}" + dest: "{{ kibana_conf_path }}/kibana.yml" + create: true + group: kibana + owner: kibana + mode: 0640 + marker: "## {mark} Kibana general settings ##" + notify: restart kibana + tags: + - install + - configure + +- name: Build and Install Wazuh Kibana Plugin from sources + import_tasks: build_wazuh_plugin.yml + when: + - build_from_sources is defined + - build_from_sources + +- name: Install Wazuh Plugin (can take a while) + shell: >- + NODE_OPTIONS="{{ node_options }}" /usr/share/kibana/bin/kibana-plugin install + {{ wazuh_app_url }}-{{ wazuh_version }}_{{ elastic_stack_version }}.zip + args: + executable: /bin/bash + creates: /usr/share/kibana/plugins/wazuh/package.json + chdir: /usr/share/kibana + become: yes + become_user: kibana + notify: restart kibana + tags: + - install + - skip_ansible_lint + when: + - not build_from_sources + +- name: Kibana optimization (can take a while) + shell: /usr/share/kibana/node/bin/node {{ node_options }} /usr/share/kibana/src/cli --optimize + args: + executable: /bin/bash + become: yes + become_user: kibana + changed_when: false + tags: + - skip_ansible_lint + +- name: Wait for Elasticsearch port + wait_for: host={{ elasticsearch_network_host }} port={{ elasticsearch_http_port }} + +- name: Select correct API protocol + set_fact: + elastic_api_protocol: "{% if kibana_opendistro_security is defined and kibana_opendistro_security %}https{% else %}http{% endif %}" + +- name: Attempting to delete legacy Wazuh index if exists + uri: + url: "{{ elastic_api_protocol }}://{{ elasticsearch_network_host }}:{{ elasticsearch_http_port }}/.wazuh" + method: DELETE + user: "admin" + password: "{{ opendistro_admin_password }}" + validate_certs: no + status_code: 200, 404 + +- name: Create wazuh plugin config directory + file: + path: /usr/share/kibana/optimize/wazuh/config/ + state: directory + recurse: yes + owner: kibana + group: kibana + mode: 0751 + changed_when: False + +- name: Configure Wazuh Kibana Plugin + template: + src: wazuh.yml.j2 + dest: /usr/share/kibana/optimize/wazuh/config/wazuh.yml + owner: kibana + group: kibana + mode: 0751 + changed_when: False + +- name: Reload systemd configuration + systemd: + daemon_reload: true + +- name: Ensure Kibana started and enabled + service: + name: kibana + enabled: true + state: started + +- import_tasks: RMRedHat.yml + when: ansible_os_family == 'RedHat' \ No newline at end of file diff --git a/roles/opendistro/opendistro-kibana/tasks/security_actions.yml b/roles/opendistro/opendistro-kibana/tasks/security_actions.yml new file mode 100644 index 00000000..be63c9ea --- /dev/null +++ b/roles/opendistro/opendistro-kibana/tasks/security_actions.yml @@ -0,0 +1,13 @@ +- block: + + - name: Copy the certificates from local to the Kibana instance + copy: + src: "{{ local_certs_path }}/certs/{{ item }}" + dest: /usr/share/kibana + mode: 0644 + with_items: + - "{{ inventory_hostname }}_http.key" + - "{{ inventory_hostname }}_http.pem" + tags: + - security + when: install.changed \ No newline at end of file diff --git a/roles/opendistro/opendistro-kibana/templates/opendistro_kibana.yml.j2 b/roles/opendistro/opendistro-kibana/templates/opendistro_kibana.yml.j2 new file mode 100644 index 00000000..40dd9d6c --- /dev/null +++ b/roles/opendistro/opendistro-kibana/templates/opendistro_kibana.yml.j2 @@ -0,0 +1,38 @@ +# {{ ansible_managed }} +# Description: +# Default Kibana configuration for Open Distro. +server.port: {{ kibana_server_port }} + +#server.basePath: "" +server.maxPayloadBytes: {{ kibana_max_payload_bytes }} +server.name: {{ kibana_server_name }} +server.host: {{ kibana_server_host }} + + +{% if kibana_opendistro_security %} +elasticsearch.hosts: "https://{{ elasticsearch_network_host }}:{{ elasticsearch_http_port }}" +{% else %} +elasticsearch.hosts: "http://{{ elasticsearch_network_host }}:{{ elasticsearch_http_port }}" +{% endif %} + +elasticsearch.username: {{ opendistro_kibana_user }} +elasticsearch.password: {{ opendistro_kibana_password }} +elasticsearch.ssl.verificationMode: none + +elasticsearch.requestHeadersWhitelist: ["securitytenant","Authorization"] +opendistro_security.multitenancy.enabled: false # FIXME: should be enabled starting with Wazuh App v3.13 +opendistro_security.multitenancy.tenants.preferred: ["Private", "Global"] +opendistro_security.readonly_mode.roles: ["kibana_read_only"] + +# OpenDistro Security +{% if kibana_opendistro_security %} +server.ssl.enabled: true +server.ssl.certificate: "/usr/share/kibana/{{ inventory_hostname }}_http.pem" +server.ssl.key: "/usr/share/kibana//{{ inventory_hostname }}_http.key" +{% endif %} + +newsfeed.enabled: {{ kibana_newsfeed_enabled }} +telemetry.optIn: {{ kibana_telemetry_optin }} +telemetry.enabled: {{ kibana_telemetry_enabled }} + + diff --git a/roles/opendistro/opendistro-kibana/templates/wazuh.yml.j2 b/roles/opendistro/opendistro-kibana/templates/wazuh.yml.j2 new file mode 100644 index 00000000..1cbc9e2d --- /dev/null +++ b/roles/opendistro/opendistro-kibana/templates/wazuh.yml.j2 @@ -0,0 +1,134 @@ +--- +# +# Wazuh app - App configuration file +# Copyright (C) 2015-2019 Wazuh, Inc. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Find more information about this on the LICENSE file. +# +# ======================== Wazuh app configuration file ======================== +# +# Please check the documentation for more information on configuration options: +# https://documentation.wazuh.com/current/installation-guide/index.html +# +# Also, you can check our repository: +# https://github.com/wazuh/wazuh-kibana-app +# +# ------------------------------- Index patterns ------------------------------- +# +# Default index pattern to use. +#pattern: wazuh-alerts-3.x-* +# +# ----------------------------------- Checks ----------------------------------- +# +# Defines which checks must to be consider by the healthcheck +# step once the Wazuh app starts. Values must to be true or false. +#checks.pattern : true +#checks.template: true +#checks.api : true +#checks.setup : true +# +# --------------------------------- Extensions --------------------------------- +# +# Defines which extensions should be activated when you add a new API entry. +# You can change them after Wazuh app starts. +# Values must to be true or false. +#extensions.pci : true +#extensions.gdpr : true +#extensions.hipaa : true +#extensions.nist : true +#extensions.audit : true +#extensions.oscap : false +#extensions.ciscat : false +#extensions.aws : false +#extensions.virustotal: false +#extensions.osquery : false +#extensions.docker : false +# +# ---------------------------------- Time out ---------------------------------- +# +# Defines maximum timeout to be used on the Wazuh app requests. +# It will be ignored if it is bellow 1500. +# It means milliseconds before we consider a request as failed. +# Default: 20000 +#timeout: 20000 +# +# ------------------------------ Advanced indices ------------------------------ +# +# Configure .wazuh indices shards and replicas. +#wazuh.shards : 1 +#wazuh.replicas : 0 +# +# --------------------------- Index pattern selector --------------------------- +# +# Defines if the user is allowed to change the selected +# index pattern directly from the Wazuh app top menu. +# Default: true +#ip.selector: true +# +# List of index patterns to be ignored +#ip.ignore: [] +# +# -------------------------------- X-Pack RBAC --------------------------------- +# +# Custom setting to enable/disable built-in X-Pack RBAC security capabilities. +# Default: enabled +#xpack.rbac.enabled: true +# +# ------------------------------ wazuh-monitoring ------------------------------ +# +# Custom setting to enable/disable wazuh-monitoring indices. +# Values: true, false, worker +# If worker is given as value, the app will show the Agents status +# visualization but won't insert data on wazuh-monitoring indices. +# Default: true +#wazuh.monitoring.enabled: true +# +# Custom setting to set the frequency for wazuh-monitoring indices cron task. +# Default: 900 (s) +#wazuh.monitoring.frequency: 900 +# +# Configure wazuh-monitoring-3.x-* indices shards and replicas. +#wazuh.monitoring.shards: 2 +#wazuh.monitoring.replicas: 0 +# +# Configure wazuh-monitoring-3.x-* indices custom creation interval. +# Values: h (hourly), d (daily), w (weekly), m (monthly) +# Default: d +#wazuh.monitoring.creation: d +# +# Default index pattern to use for Wazuh monitoring +#wazuh.monitoring.pattern: wazuh-monitoring-3.x-* +# +# +# ------------------------------- App privileges -------------------------------- +#admin: true +# +# ------------------------------- App logging level ----------------------------- +# Set the logging level for the Wazuh App log files. +# Default value: info +# Allowed values: info, debug +#logs.level: info +# +#-------------------------------- API entries ----------------------------------- +#The following configuration is the default structure to define an API entry. +# +#hosts: +# - : +# url: http(s):// +# port: +# user: +# password: + +hosts: +{% for api in wazuh_api_credentials %} + - {{ api['id'] }}: + url: {{ api['url'] }} + port: {{ api['port'] }} + user: {{ api['user'] }} + password: {{ api['password'] }} +{% endfor %} diff --git a/roles/wazuh/ansible-filebeat-oss/README.md b/roles/wazuh/ansible-filebeat-oss/README.md new file mode 100644 index 00000000..bed47531 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/README.md @@ -0,0 +1,39 @@ +Ansible Role: Filebeat for Elastic Stack +------------------------------------ + +An Ansible Role that installs [Filebeat-oss](https://www.elastic.co/products/beats/filebeat), this can be used in conjunction with [ansible-wazuh-manager](https://github.com/wazuh/wazuh-ansible/ansible-wazuh-server). + +Requirements +------------ + +This role will work on: + * Red Hat + * CentOS + * Fedora + * Debian + * Ubuntu + +Role Variables +-------------- + +Available variables are listed below, along with default values (see `defaults/main.yml`): + +``` + filebeat_output_elasticsearch_enabled: false + filebeat_output_elasticsearch_hosts: + - "localhost:9200" + +``` + +License and copyright +--------------------- + +WAZUH Copyright (C) 2020 Wazuh Inc. (License GPLv3) + +### Based on previous work from geerlingguy + + - https://github.com/geerlingguy/ansible-role-filebeat + +### Modified by Wazuh + +The playbooks have been modified by Wazuh, including some specific requirements, templates and configuration to improve integration with Wazuh ecosystem. diff --git a/roles/wazuh/ansible-filebeat-oss/defaults/main.yml b/roles/wazuh/ansible-filebeat-oss/defaults/main.yml new file mode 100644 index 00000000..7603fd51 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/defaults/main.yml @@ -0,0 +1,30 @@ +--- +filebeat_version: 7.7.0 + +filebeat_create_config: true + +filebeat_output_elasticsearch_enabled: false +filebeat_output_elasticsearch_hosts: + - "localhost:9200" + +filebeat_module_package_url: https://packages.wazuh.com/3.x/filebeat +filebeat_module_package_name: wazuh-filebeat-0.1.tar.gz +filebeat_module_package_path: /tmp/ +filebeat_module_destination: /usr/share/filebeat/module +filebeat_module_folder: /usr/share/filebeat/module/wazuh +elasticsearch_security_user: admin +elasticsearch_security_password: changeme +# Security plugin +filebeat_security: true +filebeat_security_user: admin +filebeat_security_password: changeme +filebeat_ssl_dir: /etc/pki/filebeat + +# Local path to store the generated certificates (OpenDistro security plugin) +local_certs_path: /tmp/opendistro-nodecerts + +elasticrepo: + apt: 'https://artifacts.elastic.co/packages/oss-7.x/apt' + yum: 'https://artifacts.elastic.co/packages/oss-7.x/yum' + gpg: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch' + key_id: '46095ACC8548582C1A2699A9D27D666CD88E42B4' diff --git a/roles/wazuh/ansible-filebeat-oss/handlers/main.yml b/roles/wazuh/ansible-filebeat-oss/handlers/main.yml new file mode 100644 index 00000000..96e15a22 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/handlers/main.yml @@ -0,0 +1,3 @@ +--- +- name: restart filebeat + service: name=filebeat state=restarted diff --git a/roles/wazuh/ansible-filebeat-oss/meta/main.yml b/roles/wazuh/ansible-filebeat-oss/meta/main.yml new file mode 100644 index 00000000..4fd7e900 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/meta/main.yml @@ -0,0 +1,29 @@ +--- +dependencies: [] + +galaxy_info: + author: Wazuh + description: Installing and maintaining Filebeat-oss. + company: wazuh.com + license: license (GPLv3) + min_ansible_version: 2.0 + platforms: + - name: EL + versions: + - 6 + - 7 + - name: Fedora + versions: + - all + - name: Debian + versions: + - jessie + - name: Ubuntu + versions: + - precise + - trusty + - xenial + galaxy_tags: + - web + - system + - monitoring diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/Debian.yml b/roles/wazuh/ansible-filebeat-oss/tasks/Debian.yml new file mode 100644 index 00000000..33c94cf6 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/Debian.yml @@ -0,0 +1,22 @@ +--- +- name: Debian/Ubuntu | Install apt-transport-https and ca-certificates + apt: + name: + - apt-transport-https + - ca-certificates + state: present + register: filebeat_ca_packages_install + until: filebeat_ca_packages_install is succeeded + +- name: Debian/Ubuntu | Add Elasticsearch apt key. + apt_key: + url: "{{ elasticrepo.gpg }}" + id: "{{ elasticrepo.key_id }}" + state: present + +- name: Debian/Ubuntu | Add Filebeat-oss repository. + apt_repository: + repo: "deb {{ elasticrepo.apt }} stable main" + state: present + update_cache: true + changed_when: false diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/RMDebian.yml b/roles/wazuh/ansible-filebeat-oss/tasks/RMDebian.yml new file mode 100644 index 00000000..25a33909 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/RMDebian.yml @@ -0,0 +1,6 @@ +--- +- name: Debian/Ubuntu | Remove Filebeat repository (and clean up left-over metadata) + apt_repository: + repo: "deb {{ elasticrepo.apt }} stable main" + state: absent + changed_when: false diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/RMRedHat.yml b/roles/wazuh/ansible-filebeat-oss/tasks/RMRedHat.yml new file mode 100644 index 00000000..8565894e --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/RMRedHat.yml @@ -0,0 +1,6 @@ +--- +- name: RedHat/CentOS/Fedora | Remove Filebeat repository (and clean up left-over metadata) + yum_repository: + name: elastic_oss-repo_7 + state: absent + changed_when: false diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/RedHat.yml b/roles/wazuh/ansible-filebeat-oss/tasks/RedHat.yml new file mode 100644 index 00000000..74873aca --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/RedHat.yml @@ -0,0 +1,9 @@ +--- +- name: RedHat/CentOS/Fedora/Amazon Linux | Install Filebeats repo + yum_repository: + name: elastic_oss-repo_7 + description: Elastic repository for 7.x packages + baseurl: "{{ elasticrepo.yum }}" + gpgkey: "{{ elasticrepo.gpg }}" + gpgcheck: true + changed_when: false diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/config.yml b/roles/wazuh/ansible-filebeat-oss/tasks/config.yml new file mode 100644 index 00000000..f64c8ceb --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/config.yml @@ -0,0 +1,22 @@ +--- +- block: + - name: Copy Filebeat configuration. + template: + src: filebeat.yml.j2 + dest: "/etc/filebeat/filebeat.yml" + owner: root + group: root + mode: 0400 + notify: restart filebeat + + - name: Copy Elasticsearch template. + template: + src: elasticsearch.yml.j2 + dest: "/etc/filebeat/wazuh-template.json" + owner: root + group: root + mode: 0400 + notify: restart filebeat + + tags: + - configure \ No newline at end of file diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/main.yml b/roles/wazuh/ansible-filebeat-oss/tasks/main.yml new file mode 100644 index 00000000..2c5d3de1 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/main.yml @@ -0,0 +1,67 @@ +--- +- include_tasks: RedHat.yml + when: ansible_os_family == 'RedHat' + +- include_tasks: Debian.yml + when: ansible_os_family == 'Debian' + +- name: Install Filebeat + package: + name: filebeat + state: present + register: install + tags: + - install + - init + +- name: Checking if Filebeat Module folder file exists + stat: + path: "{{ filebeat_module_folder }}" + register: filebeat_module_folder + +- name: Download Filebeat module package + get_url: + url: "{{ filebeat_module_package_url }}/{{ filebeat_module_package_name }}" + dest: "{{ filebeat_module_package_path }}" + when: not filebeat_module_folder.stat.exists + +- name: Unpack Filebeat module package + unarchive: + src: "{{ filebeat_module_package_path }}/{{ filebeat_module_package_name }}" + dest: "{{ filebeat_module_destination }}" + remote_src: yes + when: not filebeat_module_folder.stat.exists + +- name: Setting 0755 permission for Filebeat module folder + file: dest={{ filebeat_module_folder }} mode=u=rwX,g=rwX,o=rwX recurse=yes + when: not filebeat_module_folder.stat.exists + +- name: Checking if Filebeat Module package file exists + stat: + path: "{{ filebeat_module_package_path }}/{{ filebeat_module_package_name }}" + register: filebeat_module_package + when: filebeat_module_package is not defined + +- name: Delete Filebeat module package file + file: + state: absent + path: "{{ filebeat_module_package_path }}/{{ filebeat_module_package_name }}" + when: filebeat_module_package.stat.exists + +- import_tasks: config.yml + notify: restart filebeat + +- include_tasks: security_actions.yml + when: filebeat_security + +- name: Ensure Filebeat is started and enabled at boot. + service: + name: filebeat + state: started + enabled: true + +- include_tasks: "RMRedHat.yml" + when: ansible_os_family == "RedHat" + +- include_tasks: "RMDebian.yml" + when: ansible_os_family == "Debian" diff --git a/roles/wazuh/ansible-filebeat-oss/tasks/security_actions.yml b/roles/wazuh/ansible-filebeat-oss/tasks/security_actions.yml new file mode 100644 index 00000000..dfea91ee --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/tasks/security_actions.yml @@ -0,0 +1,28 @@ +- block: + + - name: Ensure Filebeat SSL key pair directory exists. + file: + path: "{{ filebeat_ssl_dir }}" + state: directory + + - name: Copy the certificates from local to the Manager instance + copy: + src: "{{ local_certs_path }}/certs/{{ item }}" + dest: "{{ filebeat_ssl_dir }}" + mode: 0644 + with_items: + - "{{ inventory_hostname }}.key" + - "{{ inventory_hostname }}.pem" + - "root-ca.pem" + + - name: Ensuring folder & certs permissions + file: + path: "{{ filebeat_ssl_dir }}/" + mode: 0774 + state: directory + recurse: yes + + tags: + - security + when: + - filebeat_security diff --git a/roles/wazuh/ansible-filebeat-oss/templates/elasticsearch.yml.j2 b/roles/wazuh/ansible-filebeat-oss/templates/elasticsearch.yml.j2 new file mode 100644 index 00000000..88d50c3f --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/templates/elasticsearch.yml.j2 @@ -0,0 +1,1800 @@ +{ + "order": 0, + "index_patterns": [ + "wazuh-alerts-3.x-*", + "wazuh-archives-3.x-*" + ], + "settings": { + "index.refresh_interval": "5s", + "index.number_of_shards": "3", + "index.number_of_replicas": "0", + "index.auto_expand_replicas": "0-1", + "index.mapping.total_fields.limit": 10000, + "index.query.default_field": [ + "GeoLocation.city_name", + "GeoLocation.continent_code", + "GeoLocation.country_code2", + "GeoLocation.country_code3", + "GeoLocation.country_name", + "GeoLocation.ip", + "GeoLocation.postal_code", + "GeoLocation.real_region_name", + "GeoLocation.region_name", + "GeoLocation.timezone", + "agent.id", + "agent.ip", + "agent.name", + "cluster.name", + "cluster.node", + "command", + "data", + "data.action", + "data.audit", + "data.audit.acct", + "data.audit.arch", + "data.audit.auid", + "data.audit.command", + "data.audit.cwd", + "data.audit.dev", + "data.audit.directory.inode", + "data.audit.directory.mode", + "data.audit.directory.name", + "data.audit.egid", + "data.audit.enforcing", + "data.audit.euid", + "data.audit.exe", + "data.audit.execve.a0", + "data.audit.execve.a1", + "data.audit.execve.a2", + "data.audit.execve.a3", + "data.audit.exit", + "data.audit.file.inode", + "data.audit.file.mode", + "data.audit.file.name", + "data.audit.fsgid", + "data.audit.fsuid", + "data.audit.gid", + "data.audit.id", + "data.audit.key", + "data.audit.list", + "data.audit.old-auid", + "data.audit.old-ses", + "data.audit.old_enforcing", + "data.audit.old_prom", + "data.audit.op", + "data.audit.pid", + "data.audit.ppid", + "data.audit.prom", + "data.audit.res", + "data.audit.session", + "data.audit.sgid", + "data.audit.srcip", + "data.audit.subj", + "data.audit.success", + "data.audit.suid", + "data.audit.syscall", + "data.audit.tty", + "data.audit.uid", + "data.aws.accountId", + "data.aws.account_id", + "data.aws.action", + "data.aws.actor", + "data.aws.aws_account_id", + "data.aws.description", + "data.aws.dstport", + "data.aws.errorCode", + "data.aws.errorMessage", + "data.aws.eventID", + "data.aws.eventName", + "data.aws.eventSource", + "data.aws.eventType", + "data.aws.id", + "data.aws.name", + "data.aws.requestParameters.accessKeyId", + "data.aws.requestParameters.bucketName", + "data.aws.requestParameters.gatewayId", + "data.aws.requestParameters.groupDescription", + "data.aws.requestParameters.groupId", + "data.aws.requestParameters.groupName", + "data.aws.requestParameters.host", + "data.aws.requestParameters.hostedZoneId", + "data.aws.requestParameters.instanceId", + "data.aws.requestParameters.instanceProfileName", + "data.aws.requestParameters.loadBalancerName", + "data.aws.requestParameters.loadBalancerPorts", + "data.aws.requestParameters.masterUserPassword", + "data.aws.requestParameters.masterUsername", + "data.aws.requestParameters.name", + "data.aws.requestParameters.natGatewayId", + "data.aws.requestParameters.networkAclId", + "data.aws.requestParameters.path", + "data.aws.requestParameters.policyName", + "data.aws.requestParameters.port", + "data.aws.requestParameters.stackId", + "data.aws.requestParameters.stackName", + "data.aws.requestParameters.subnetId", + "data.aws.requestParameters.subnetIds", + "data.aws.requestParameters.volumeId", + "data.aws.requestParameters.vpcId", + "data.aws.resource.accessKeyDetails.accessKeyId", + "data.aws.resource.accessKeyDetails.principalId", + "data.aws.resource.accessKeyDetails.userName", + "data.aws.resource.instanceDetails.instanceId", + "data.aws.resource.instanceDetails.instanceState", + "data.aws.resource.instanceDetails.networkInterfaces.privateDnsName", + "data.aws.resource.instanceDetails.networkInterfaces.publicDnsName", + "data.aws.resource.instanceDetails.networkInterfaces.subnetId", + "data.aws.resource.instanceDetails.networkInterfaces.vpcId", + "data.aws.resource.instanceDetails.tags.value", + "data.aws.responseElements.AssociateVpcCidrBlockResponse.vpcId", + "data.aws.responseElements.description", + "data.aws.responseElements.instanceId", + "data.aws.responseElements.instances.instanceId", + "data.aws.responseElements.instancesSet.items.instanceId", + "data.aws.responseElements.listeners.port", + "data.aws.responseElements.loadBalancerName", + "data.aws.responseElements.loadBalancers.vpcId", + "data.aws.responseElements.loginProfile.userName", + "data.aws.responseElements.networkAcl.vpcId", + "data.aws.responseElements.ownerId", + "data.aws.responseElements.publicIp", + "data.aws.responseElements.user.userId", + "data.aws.responseElements.user.userName", + "data.aws.responseElements.volumeId", + "data.aws.service.serviceName", + "data.aws.severity", + "data.aws.source", + "data.aws.sourceIPAddress", + "data.aws.srcport", + "data.aws.userIdentity.accessKeyId", + "data.aws.userIdentity.accountId", + "data.aws.userIdentity.userName", + "data.aws.vpcEndpointId", + "data.command", + "data.data", + "data.docker.Actor.Attributes.container", + "data.docker.Actor.Attributes.image", + "data.docker.Actor.Attributes.name", + "data.docker.Actor.ID", + "data.docker.id", + "data.docker.message", + "data.docker.status", + "data.dstip", + "data.dstport", + "data.dstuser", + "data.extra_data", + "data.hardware.serial", + "data.id", + "data.integration", + "data.netinfo.iface.adapter", + "data.netinfo.iface.ipv4.address", + "data.netinfo.iface.ipv6.address", + "data.netinfo.iface.mac", + "data.netinfo.iface.name", + "data.os.architecture", + "data.os.build", + "data.os.codename", + "data.os.hostname", + "data.os.major", + "data.os.minor", + "data.os.name", + "data.os.platform", + "data.os.release", + "data.os.release_version", + "data.os.sysname", + "data.os.version", + "data.oscap.check.description", + "data.oscap.check.id", + "data.oscap.check.identifiers", + "data.oscap.check.oval.id", + "data.oscap.check.rationale", + "data.oscap.check.references", + "data.oscap.check.result", + "data.oscap.check.severity", + "data.oscap.check.title", + "data.oscap.scan.benchmark.id", + "data.oscap.scan.content", + "data.oscap.scan.id", + "data.oscap.scan.profile.id", + "data.oscap.scan.profile.title", + "data.osquery.columns.address", + "data.osquery.columns.command", + "data.osquery.columns.description", + "data.osquery.columns.dst_ip", + "data.osquery.columns.gid", + "data.osquery.columns.hostname", + "data.osquery.columns.md5", + "data.osquery.columns.path", + "data.osquery.columns.sha1", + "data.osquery.columns.sha256", + "data.osquery.columns.src_ip", + "data.osquery.columns.user", + "data.osquery.columns.username", + "data.osquery.name", + "data.osquery.pack", + "data.port.process", + "data.port.protocol", + "data.port.state", + "data.process.args", + "data.process.cmd", + "data.process.egroup", + "data.process.euser", + "data.process.fgroup", + "data.process.name", + "data.process.rgroup", + "data.process.ruser", + "data.process.sgroup", + "data.process.state", + "data.process.suser", + "data.program.architecture", + "data.program.description", + "data.program.format", + "data.program.location", + "data.program.multiarch", + "data.program.name", + "data.program.priority", + "data.program.section", + "data.program.source", + "data.program.vendor", + "data.program.version", + "data.protocol", + "data.pwd", + "data.sca", + "data.sca.check.compliance.cis", + "data.sca.check.compliance.cis_csc", + "data.sca.check.compliance.pci_dss", + "data.sca.check.compliance.hipaa", + "data.sca.check.compliance.nist_800_53", + "data.sca.check.description", + "data.sca.check.directory", + "data.sca.check.file", + "data.sca.check.id", + "data.sca.check.previous_result", + "data.sca.check.process", + "data.sca.check.rationale", + "data.sca.check.reason", + "data.sca.check.references", + "data.sca.check.registry", + "data.sca.check.remediation", + "data.sca.check.result", + "data.sca.check.status", + "data.sca.check.title", + "data.sca.description", + "data.sca.file", + "data.sca.invalid", + "data.sca.name", + "data.sca.policy", + "data.sca.policy_id", + "data.sca.scan_id", + "data.sca.total_checks", + "data.script", + "data.src_ip", + "data.src_port", + "data.srcip", + "data.srcport", + "data.srcuser", + "data.status", + "data.system_name", + "data.title", + "data.tty", + "data.uid", + "data.url", + "data.virustotal.description", + "data.virustotal.error", + "data.virustotal.found", + "data.virustotal.permalink", + "data.virustotal.scan_date", + "data.virustotal.sha1", + "data.virustotal.source.alert_id", + "data.virustotal.source.file", + "data.virustotal.source.md5", + "data.virustotal.source.sha1", + "data.vulnerability.advisories", + "data.vulnerability.bugzilla_reference", + "data.vulnerability.cve", + "data.vulnerability.cvss.cvss2.base_score", + "data.vulnerability.cvss.cvss2.exploitability_score", + "data.vulnerability.cvss.cvss2.impact_score", + "data.vulnerability.cvss.cvss2.vector.access_complexity", + "data.vulnerability.cvss.cvss2.vector.attack_vector", + "data.vulnerability.cvss.cvss2.vector.authentication", + "data.vulnerability.cvss.cvss2.vector.availability", + "data.vulnerability.cvss.cvss2.vector.confidentiality_impact", + "data.vulnerability.cvss.cvss2.vector.integrity_impact", + "data.vulnerability.cvss.cvss2.vector.privileges_required", + "data.vulnerability.cvss.cvss2.vector.scope", + "data.vulnerability.cvss.cvss2.vector.user_interaction", + "data.vulnerability.cvss.cvss3.base_score", + "data.vulnerability.cvss.cvss3.exploitability_score", + "data.vulnerability.cvss.cvss3.impact_score", + "data.vulnerability.cvss.cvss3.vector.access_complexity", + "data.vulnerability.cvss.cvss3.vector.attack_vector", + "data.vulnerability.cvss.cvss3.vector.authentication", + "data.vulnerability.cvss.cvss3.vector.availability", + "data.vulnerability.cvss.cvss3.vector.confidentiality_impact", + "data.vulnerability.cvss.cvss3.vector.integrity_impact", + "data.vulnerability.cvss.cvss3.vector.privileges_required", + "data.vulnerability.cvss.cvss3.vector.scope", + "data.vulnerability.cvss.cvss3.vector.user_interaction", + "data.vulnerability.cwe_reference", + "data.vulnerability.package.architecture", + "data.vulnerability.package.condition", + "data.vulnerability.package.generated_cpe", + "data.vulnerability.package.name", + "data.vulnerability.package.version", + "data.vulnerability.rationale", + "data.vulnerability.reference", + "data.vulnerability.severity", + "data.vulnerability.state", + "data.vulnerability.title", + "data.win.eventdata.auditPolicyChanges", + "data.win.eventdata.auditPolicyChangesId", + "data.win.eventdata.binary", + "data.win.eventdata.category", + "data.win.eventdata.categoryId", + "data.win.eventdata.data", + "data.win.eventdata.image", + "data.win.eventdata.ipAddress", + "data.win.eventdata.ipPort", + "data.win.eventdata.keyName", + "data.win.eventdata.logonGuid", + "data.win.eventdata.logonProcessName", + "data.win.eventdata.operation", + "data.win.eventdata.parentImage", + "data.win.eventdata.processId", + "data.win.eventdata.processName", + "data.win.eventdata.providerName", + "data.win.eventdata.returnCode", + "data.win.eventdata.service", + "data.win.eventdata.status", + "data.win.eventdata.subcategory", + "data.win.eventdata.subcategoryGuid", + "data.win.eventdata.subcategoryId", + "data.win.eventdata.subjectDomainName", + "data.win.eventdata.subjectLogonId", + "data.win.eventdata.subjectUserName", + "data.win.eventdata.subjectUserSid", + "data.win.eventdata.targetDomainName", + "data.win.eventdata.targetLinkedLogonId", + "data.win.eventdata.targetLogonId", + "data.win.eventdata.targetUserName", + "data.win.eventdata.targetUserSid", + "data.win.eventdata.workstationName", + "data.win.system.channel", + "data.win.system.computer", + "data.win.system.eventID", + "data.win.system.eventRecordID", + "data.win.system.eventSourceName", + "data.win.system.keywords", + "data.win.system.level", + "data.win.system.message", + "data.win.system.opcode", + "data.win.system.processID", + "data.win.system.providerGuid", + "data.win.system.providerName", + "data.win.system.securityUserID", + "data.win.system.severityValue", + "data.win.system.userID", + "decoder.ftscomment", + "decoder.name", + "decoder.parent", + "full_log", + "host", + "id", + "input", + "location", + "manager.name", + "message", + "offset", + "predecoder.hostname", + "predecoder.program_name", + "previous_log", + "previous_output", + "program_name", + "rule.cis", + "rule.cve", + "rule.description", + "rule.gdpr", + "rule.gpg13", + "rule.groups", + "rule.id", + "rule.info", + "rule.pci_dss", + "rule.hipaa", + "rule.nist_800_53", + "syscheck.audit.effective_user.id", + "syscheck.audit.effective_user.name", + "syscheck.audit.group.id", + "syscheck.audit.group.name", + "syscheck.audit.login_user.id", + "syscheck.audit.login_user.name", + "syscheck.audit.process.id", + "syscheck.audit.process.name", + "syscheck.audit.process.ppid", + "syscheck.audit.user.id", + "syscheck.audit.user.name", + "syscheck.diff", + "syscheck.event", + "syscheck.gid_after", + "syscheck.gid_before", + "syscheck.gname_after", + "syscheck.gname_before", + "syscheck.inode_after", + "syscheck.inode_before", + "syscheck.md5_after", + "syscheck.md5_before", + "syscheck.path", + "syscheck.perm_after", + "syscheck.perm_before", + "syscheck.sha1_after", + "syscheck.sha1_before", + "syscheck.sha256_after", + "syscheck.sha256_before", + "syscheck.tags", + "syscheck.uid_after", + "syscheck.uid_before", + "syscheck.uname_after", + "syscheck.uname_before", + "title", + "type" + ] + }, + "mappings": { + "dynamic_templates": [ + { + "string_as_keyword": { + "mapping": { + "type": "keyword" + }, + "match_mapping_type": "string" + } + } + ], + "date_detection": false, + "properties": { + "@timestamp": { + "type": "date" + }, + "timestamp": { + "type": "date", + "format": "date_optional_time||epoch_millis" + }, + "@version": { + "type": "text" + }, + "agent": { + "properties": { + "ip": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + }, + "manager": { + "properties": { + "name": { + "type": "keyword" + } + } + }, + "cluster": { + "properties": { + "name": { + "type": "keyword" + }, + "node": { + "type": "keyword" + } + } + }, + "full_log": { + "type": "text" + }, + "previous_log": { + "type": "text" + }, + "GeoLocation": { + "properties": { + "area_code": { + "type": "long" + }, + "city_name": { + "type": "keyword" + }, + "continent_code": { + "type": "text" + }, + "coordinates": { + "type": "double" + }, + "country_code2": { + "type": "text" + }, + "country_code3": { + "type": "text" + }, + "country_name": { + "type": "keyword" + }, + "dma_code": { + "type": "long" + }, + "ip": { + "type": "keyword" + }, + "latitude": { + "type": "double" + }, + "location": { + "type": "geo_point" + }, + "longitude": { + "type": "double" + }, + "postal_code": { + "type": "keyword" + }, + "real_region_name": { + "type": "keyword" + }, + "region_name": { + "type": "keyword" + }, + "timezone": { + "type": "text" + } + } + }, + "host": { + "type": "keyword" + }, + "syscheck": { + "properties": { + "path": { + "type": "keyword" + }, + "sha1_before": { + "type": "keyword" + }, + "sha1_after": { + "type": "keyword" + }, + "uid_before": { + "type": "keyword" + }, + "uid_after": { + "type": "keyword" + }, + "gid_before": { + "type": "keyword" + }, + "gid_after": { + "type": "keyword" + }, + "perm_before": { + "type": "keyword" + }, + "perm_after": { + "type": "keyword" + }, + "md5_after": { + "type": "keyword" + }, + "md5_before": { + "type": "keyword" + }, + "gname_after": { + "type": "keyword" + }, + "gname_before": { + "type": "keyword" + }, + "inode_after": { + "type": "keyword" + }, + "inode_before": { + "type": "keyword" + }, + "mtime_after": { + "type": "date", + "format": "date_optional_time" + }, + "mtime_before": { + "type": "date", + "format": "date_optional_time" + }, + "uname_after": { + "type": "keyword" + }, + "uname_before": { + "type": "keyword" + }, + "size_before": { + "type": "long" + }, + "size_after": { + "type": "long" + }, + "diff": { + "type": "keyword" + }, + "event": { + "type": "keyword" + }, + "audit": { + "properties": { + "effective_user": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + }, + "group": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + }, + "login_user": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + }, + "process": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "ppid": { + "type": "keyword" + } + } + }, + "user": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + } + } + }, + "sha256_after": { + "type": "keyword" + }, + "sha256_before": { + "type": "keyword" + }, + "tags": { + "type": "keyword" + } + } + }, + "location": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "offset": { + "type": "keyword" + }, + "rule": { + "properties": { + "description": { + "type": "keyword" + }, + "groups": { + "type": "keyword" + }, + "level": { + "type": "long" + }, + "id": { + "type": "keyword" + }, + "cve": { + "type": "keyword" + }, + "info": { + "type": "keyword" + }, + "frequency": { + "type": "long" + }, + "firedtimes": { + "type": "long" + }, + "cis": { + "type": "keyword" + }, + "pci_dss": { + "type": "keyword" + }, + "gdpr": { + "type": "keyword" + }, + "gpg13": { + "type": "keyword" + }, + "hipaa": { + "type": "keyword" + }, + "nist_800_53": { + "type": "keyword" + }, + "mail": { + "type": "boolean" + } + } + }, + "predecoder": { + "properties": { + "program_name": { + "type": "keyword" + }, + "timestamp": { + "type": "keyword" + }, + "hostname": { + "type": "keyword" + } + } + }, + "decoder": { + "properties": { + "parent": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "ftscomment": { + "type": "keyword" + }, + "fts": { + "type": "long" + }, + "accumulate": { + "type": "long" + } + } + }, + "data": { + "properties": { + "audit": { + "properties": { + "acct": { + "type": "keyword" + }, + "arch": { + "type": "keyword" + }, + "auid": { + "type": "keyword" + }, + "command": { + "type": "keyword" + }, + "cwd": { + "type": "keyword" + }, + "dev": { + "type": "keyword" + }, + "directory": { + "properties": { + "inode": { + "type": "keyword" + }, + "mode": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + }, + "egid": { + "type": "keyword" + }, + "enforcing": { + "type": "keyword" + }, + "euid": { + "type": "keyword" + }, + "exe": { + "type": "keyword" + }, + "execve": { + "properties": { + "a0": { + "type": "keyword" + }, + "a1": { + "type": "keyword" + }, + "a2": { + "type": "keyword" + }, + "a3": { + "type": "keyword" + } + } + }, + "exit": { + "type": "keyword" + }, + "file": { + "properties": { + "inode": { + "type": "keyword" + }, + "mode": { + "type": "keyword" + }, + "name": { + "type": "keyword" + } + } + }, + "fsgid": { + "type": "keyword" + }, + "fsuid": { + "type": "keyword" + }, + "gid": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "key": { + "type": "keyword" + }, + "list": { + "type": "keyword" + }, + "old-auid": { + "type": "keyword" + }, + "old-ses": { + "type": "keyword" + }, + "old_enforcing": { + "type": "keyword" + }, + "old_prom": { + "type": "keyword" + }, + "op": { + "type": "keyword" + }, + "pid": { + "type": "keyword" + }, + "ppid": { + "type": "keyword" + }, + "prom": { + "type": "keyword" + }, + "res": { + "type": "keyword" + }, + "session": { + "type": "keyword" + }, + "sgid": { + "type": "keyword" + }, + "srcip": { + "type": "keyword" + }, + "subj": { + "type": "keyword" + }, + "success": { + "type": "keyword" + }, + "suid": { + "type": "keyword" + }, + "syscall": { + "type": "keyword" + }, + "tty": { + "type": "keyword" + }, + "type": { + "type": "keyword" + }, + "uid": { + "type": "keyword" + } + } + }, + "protocol": { + "type": "keyword" + }, + "action": { + "type": "keyword" + }, + "srcip": { + "type": "keyword" + }, + "dstip": { + "type": "keyword" + }, + "srcport": { + "type": "keyword" + }, + "dstport": { + "type": "keyword" + }, + "srcuser": { + "type": "keyword" + }, + "dstuser": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "status": { + "type": "keyword" + }, + "data": { + "type": "keyword" + }, + "extra_data": { + "type": "keyword" + }, + "system_name": { + "type": "keyword" + }, + "url": { + "type": "keyword" + }, + "oscap": { + "properties": { + "check": { + "properties": { + "description": { + "type": "text" + }, + "id": { + "type": "keyword" + }, + "identifiers": { + "type": "text" + }, + "oval": { + "properties": { + "id": { + "type": "keyword" + } + } + }, + "rationale": { + "type": "text" + }, + "references": { + "type": "text" + }, + "result": { + "type": "keyword" + }, + "severity": { + "type": "keyword" + }, + "title": { + "type": "keyword" + } + } + }, + "scan": { + "properties": { + "benchmark": { + "properties": { + "id": { + "type": "keyword" + } + } + }, + "content": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "profile": { + "properties": { + "id": { + "type": "keyword" + }, + "title": { + "type": "keyword" + } + } + }, + "return_code": { + "type": "long" + }, + "score": { + "type": "double" + } + } + } + } + }, + "type": { + "type": "keyword" + }, + "netinfo": { + "properties": { + "iface": { + "properties": { + "name": { + "type": "keyword" + }, + "mac": { + "type": "keyword" + }, + "adapter": { + "type": "keyword" + }, + "type": { + "type": "keyword" + }, + "state": { + "type": "keyword" + }, + "mtu": { + "type": "long" + }, + "tx_bytes": { + "type": "long" + }, + "rx_bytes": { + "type": "long" + }, + "tx_errors": { + "type": "long" + }, + "rx_errors": { + "type": "long" + }, + "tx_dropped": { + "type": "long" + }, + "rx_dropped": { + "type": "long" + }, + "tx_packets": { + "type": "long" + }, + "rx_packets": { + "type": "long" + }, + "ipv4": { + "properties": { + "gateway": { + "type": "keyword" + }, + "dhcp": { + "type": "keyword" + }, + "address": { + "type": "keyword" + }, + "netmask": { + "type": "keyword" + }, + "broadcast": { + "type": "keyword" + }, + "metric": { + "type": "long" + } + } + }, + "ipv6": { + "properties": { + "gateway": { + "type": "keyword" + }, + "dhcp": { + "type": "keyword" + }, + "address": { + "type": "keyword" + }, + "netmask": { + "type": "keyword" + }, + "broadcast": { + "type": "keyword" + }, + "metric": { + "type": "long" + } + } + } + } + } + } + }, + "os": { + "properties": { + "hostname": { + "type": "keyword" + }, + "architecture": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "version": { + "type": "keyword" + }, + "codename": { + "type": "keyword" + }, + "major": { + "type": "keyword" + }, + "minor": { + "type": "keyword" + }, + "build": { + "type": "keyword" + }, + "platform": { + "type": "keyword" + }, + "sysname": { + "type": "keyword" + }, + "release": { + "type": "keyword" + }, + "release_version": { + "type": "keyword" + } + } + }, + "port": { + "properties": { + "protocol": { + "type": "keyword" + }, + "local_ip": { + "type": "ip" + }, + "local_port": { + "type": "long" + }, + "remote_ip": { + "type": "ip" + }, + "remote_port": { + "type": "long" + }, + "tx_queue": { + "type": "long" + }, + "rx_queue": { + "type": "long" + }, + "inode": { + "type": "long" + }, + "state": { + "type": "keyword" + }, + "pid": { + "type": "long" + }, + "process": { + "type": "keyword" + } + } + }, + "hardware": { + "properties": { + "serial": { + "type": "keyword" + }, + "cpu_name": { + "type": "keyword" + }, + "cpu_cores": { + "type": "long" + }, + "cpu_mhz": { + "type": "double" + }, + "ram_total": { + "type": "long" + }, + "ram_free": { + "type": "long" + }, + "ram_usage": { + "type": "long" + } + } + }, + "program": { + "properties": { + "format": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "priority": { + "type": "keyword" + }, + "section": { + "type": "keyword" + }, + "size": { + "type": "long" + }, + "vendor": { + "type": "keyword" + }, + "install_time": { + "type": "keyword" + }, + "version": { + "type": "keyword" + }, + "architecture": { + "type": "keyword" + }, + "multiarch": { + "type": "keyword" + }, + "source": { + "type": "keyword" + }, + "description": { + "type": "keyword" + }, + "location": { + "type": "keyword" + } + } + }, + "process": { + "properties": { + "pid": { + "type": "long" + }, + "name": { + "type": "keyword" + }, + "state": { + "type": "keyword" + }, + "ppid": { + "type": "long" + }, + "utime": { + "type": "long" + }, + "stime": { + "type": "long" + }, + "cmd": { + "type": "keyword" + }, + "args": { + "type": "keyword" + }, + "euser": { + "type": "keyword" + }, + "ruser": { + "type": "keyword" + }, + "suser": { + "type": "keyword" + }, + "egroup": { + "type": "keyword" + }, + "sgroup": { + "type": "keyword" + }, + "fgroup": { + "type": "keyword" + }, + "rgroup": { + "type": "keyword" + }, + "priority": { + "type": "long" + }, + "nice": { + "type": "long" + }, + "size": { + "type": "long" + }, + "vm_size": { + "type": "long" + }, + "resident": { + "type": "long" + }, + "share": { + "type": "long" + }, + "start_time": { + "type": "long" + }, + "pgrp": { + "type": "long" + }, + "session": { + "type": "long" + }, + "nlwp": { + "type": "long" + }, + "tgid": { + "type": "long" + }, + "tty": { + "type": "long" + }, + "processor": { + "type": "long" + } + } + }, + "sca": { + "properties": { + "type": { + "type": "keyword" + }, + "scan_id": { + "type": "keyword" + }, + "policy": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "file": { + "type": "keyword" + }, + "description": { + "type": "keyword" + }, + "passed": { + "type": "integer" + }, + "failed": { + "type": "integer" + }, + "score": { + "type": "long" + }, + "check": { + "properties": { + "id": { + "type": "keyword" + }, + "title": { + "type": "keyword" + }, + "description": { + "type": "keyword" + }, + "rationale": { + "type": "keyword" + }, + "remediation": { + "type": "keyword" + }, + "compliance": { + "properties": { + "cis": { + "type": "keyword" + }, + "cis_csc": { + "type": "keyword" + }, + "pci_dss": { + "type": "keyword" + }, + "hipaa": { + "type": "keyword" + }, + "nist_800_53": { + "type": "keyword" + } + } + }, + "references": { + "type": "keyword" + }, + "file": { + "type": "keyword" + }, + "directory": { + "type": "keyword" + }, + "registry": { + "type": "keyword" + }, + "process": { + "type": "keyword" + }, + "result": { + "type": "keyword" + }, + "previous_result": { + "type": "keyword" + }, + "reason": { + "type": "keyword" + }, + "status": { + "type": "keyword" + } + } + }, + "invalid": { + "type": "keyword" + }, + "policy_id": { + "type": "keyword" + }, + "total_checks": { + "type": "keyword" + } + } + }, + "command": { + "type": "keyword" + }, + "integration": { + "type": "keyword" + }, + "timestamp": { + "type": "date" + }, + "title": { + "type": "keyword" + }, + "uid": { + "type": "keyword" + }, + "virustotal": { + "properties": { + "description": { + "type": "keyword" + }, + "error": { + "type": "keyword" + }, + "found": { + "type": "keyword" + }, + "malicious": { + "type": "keyword" + }, + "permalink": { + "type": "keyword" + }, + "positives": { + "type": "keyword" + }, + "scan_date": { + "type": "keyword" + }, + "sha1": { + "type": "keyword" + }, + "source": { + "properties": { + "alert_id": { + "type": "keyword" + }, + "file": { + "type": "keyword" + }, + "md5": { + "type": "keyword" + }, + "sha1": { + "type": "keyword" + } + } + }, + "total": { + "type": "keyword" + } + } + }, + "vulnerability": { + "properties": { + "advisories": { + "type": "keyword" + }, + "bugzilla_reference": { + "type": "keyword" + }, + "cve": { + "type": "keyword" + }, + "cvss": { + "properties": { + "cvss2": { + "properties": { + "base_score": { + "type": "keyword" + }, + "exploitability_score": { + "type": "keyword" + }, + "impact_score": { + "type": "keyword" + }, + "vector": { + "properties": { + "access_complexity": { + "type": "keyword" + }, + "attack_vector": { + "type": "keyword" + }, + "authentication": { + "type": "keyword" + }, + "availability": { + "type": "keyword" + }, + "confidentiality_impact": { + "type": "keyword" + }, + "integrity_impact": { + "type": "keyword" + }, + "privileges_required": { + "type": "keyword" + }, + "scope": { + "type": "keyword" + }, + "user_interaction": { + "type": "keyword" + } + } + } + } + }, + "cvss3": { + "properties": { + "base_score": { + "type": "keyword" + }, + "exploitability_score": { + "type": "keyword" + }, + "impact_score": { + "type": "keyword" + }, + "vector": { + "properties": { + "access_complexity": { + "type": "keyword" + }, + "attack_vector": { + "type": "keyword" + }, + "authentication": { + "type": "keyword" + }, + "availability": { + "type": "keyword" + }, + "confidentiality_impact": { + "type": "keyword" + }, + "integrity_impact": { + "type": "keyword" + }, + "privileges_required": { + "type": "keyword" + }, + "scope": { + "type": "keyword" + }, + "user_interaction": { + "type": "keyword" + } + } + } + } + } + } + }, + "cwe_reference": { + "type": "keyword" + }, + "package": { + "properties": { + "architecture": { + "type": "keyword" + }, + "condition": { + "type": "keyword" + }, + "generated_cpe": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "version": { + "type": "keyword" + } + } + }, + "published": { + "type": "date" + }, + "updated": { + "type": "date" + }, + "rationale": { + "type": "keyword" + }, + "reference": { + "type": "keyword" + }, + "severity": { + "type": "keyword" + }, + "state": { + "type": "keyword" + }, + "title": { + "type": "keyword" + } + } + }, + "aws": { + "properties": { + "bytes": { + "type": "long" + }, + "dstaddr": { + "type": "ip" + }, + "srcaddr": { + "type": "ip" + }, + "end": { + "type": "date" + }, + "start": { + "type": "date" + }, + "source_ip_address": { + "type": "ip" + }, + "service": { + "properties": { + "count": { + "type": "long" + }, + "action.networkConnectionAction.remoteIpDetails": { + "properties": { + "ipAddressV4": { + "type": "ip" + }, + "geoLocation": { + "type": "geo_point" + } + } + }, + "eventFirstSeen": { + "type": "date" + }, + "eventLastSeen": { + "type": "date" + } + } + }, + "createdAt": { + "type": "date" + }, + "updatedAt": { + "type": "date" + }, + "resource.instanceDetails": { + "properties": { + "launchTime": { + "type": "date" + }, + "networkInterfaces": { + "properties": { + "privateIpAddress": { + "type": "ip" + }, + "publicIp": { + "type": "ip" + } + } + } + } + } + } + } + } + }, + "program_name": { + "type": "keyword" + }, + "command": { + "type": "keyword" + }, + "type": { + "type": "text" + }, + "title": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "input": { + "properties": { + "type": { + "type": "keyword" + } + } + }, + "previous_output": { + "type": "keyword" + } + } + }, + "version": 1 +} diff --git a/roles/wazuh/ansible-filebeat-oss/templates/filebeat.yml.j2 b/roles/wazuh/ansible-filebeat-oss/templates/filebeat.yml.j2 new file mode 100644 index 00000000..67a99347 --- /dev/null +++ b/roles/wazuh/ansible-filebeat-oss/templates/filebeat.yml.j2 @@ -0,0 +1,32 @@ +# Wazuh - Filebeat configuration file + +# Wazuh - Filebeat configuration file +filebeat.modules: + - module: wazuh + alerts: + enabled: true + archives: + enabled: false + +setup.template.json.enabled: true +setup.template.json.path: '/etc/filebeat/wazuh-template.json' +setup.template.json.name: 'wazuh' +setup.template.overwrite: true +setup.ilm.enabled: false + +# Send events directly to Elasticsearch +output.elasticsearch: + hosts: {{ filebeat_output_elasticsearch_hosts | to_json }} + +{% if filebeat_security %} + username: {{ elasticsearch_security_user }} + password: {{ elasticsearch_security_password }} + protocol: https + ssl.certificate_authorities: + - {{ filebeat_ssl_dir }}/root-ca.pem + ssl.certificate: "{{ filebeat_ssl_dir }}/{{ inventory_hostname }}.pem" + ssl.key: "{{ filebeat_ssl_dir }}/{{ inventory_hostname }}.key" +{% endif %} + +# Optional. Send events to Logstash instead of Elasticsearch +#output.logstash.hosts: ["YOUR_LOGSTASH_SERVER_IP:5000"] \ No newline at end of file diff --git a/roles/wazuh/ansible-filebeat/README.md b/roles/wazuh/ansible-filebeat/README.md index ad588e64..416f7da0 100644 --- a/roles/wazuh/ansible-filebeat/README.md +++ b/roles/wazuh/ansible-filebeat/README.md @@ -28,7 +28,7 @@ Available variables are listed below, along with default values (see `defaults/m License and copyright --------------------- -WAZUH Copyright (C) 2017 Wazuh Inc. (License GPLv3) +WAZUH Copyright (C) 2020 Wazuh Inc. (License GPLv3) ### Based on previous work from geerlingguy diff --git a/roles/wazuh/ansible-filebeat/defaults/main.yml b/roles/wazuh/ansible-filebeat/defaults/main.yml index 78b6b3c7..12e62dce 100644 --- a/roles/wazuh/ansible-filebeat/defaults/main.yml +++ b/roles/wazuh/ansible-filebeat/defaults/main.yml @@ -1,5 +1,5 @@ --- -filebeat_version: 7.6.2 +filebeat_version: 7.7.0 filebeat_create_config: true @@ -22,10 +22,8 @@ filebeat_enable_logging: true filebeat_log_level: debug filebeat_log_dir: /var/log/mybeat filebeat_log_filename: mybeat.log - filebeat_ssl_dir: /etc/pki/filebeat filebeat_ssl_certificate_file: "" -filebeat_ssl_key_file: "" filebeat_ssl_insecure: "false" filebeat_module_package_url: https://packages.wazuh.com/3.x/filebeat diff --git a/roles/wazuh/ansible-filebeat/tasks/config.yml b/roles/wazuh/ansible-filebeat/tasks/config.yml index d45b06e8..2b0b7eda 100644 --- a/roles/wazuh/ansible-filebeat/tasks/config.yml +++ b/roles/wazuh/ansible-filebeat/tasks/config.yml @@ -17,23 +17,4 @@ group: root mode: 0400 notify: restart filebeat - tags: configure - -- name: Ensure Filebeat SSL key pair directory exists. - file: - path: "{{ filebeat_ssl_dir }}" - state: directory - when: filebeat_ssl_key_file - tags: configure - -- name: Copy SSL key and cert for filebeat. - copy: - src: "{{ item }}" - dest: "{{ filebeat_ssl_dir }}/{{ item | basename }}" - mode: 0400 - with_items: - - "{{ filebeat_ssl_key_file }}" - - "{{ filebeat_ssl_certificate_file }}" - notify: restart filebeat - when: filebeat_ssl_key_file and filebeat_ssl_certificate_file - tags: configure + tags: configure \ No newline at end of file diff --git a/roles/wazuh/ansible-filebeat/tasks/main.yml b/roles/wazuh/ansible-filebeat/tasks/main.yml index 5a15926d..a6ef7fc8 100644 --- a/roles/wazuh/ansible-filebeat/tasks/main.yml +++ b/roles/wazuh/ansible-filebeat/tasks/main.yml @@ -80,7 +80,7 @@ dest: "{{ filebeat_module_package_path }}" when: not filebeat_module_folder.stat.exists -- name: Unpakcing Filebeat module package +- name: Unpack Filebeat module package unarchive: src: "{{ filebeat_module_package_path }}/{{ filebeat_module_package_name }}" dest: "{{ filebeat_module_destination }}" diff --git a/roles/wazuh/ansible-filebeat/tests/requirements.yml b/roles/wazuh/ansible-filebeat/tests/requirements.yml deleted file mode 100644 index 63d857e2..00000000 --- a/roles/wazuh/ansible-filebeat/tests/requirements.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -- src: geerlingguy.java -- src: geerlingguy.elasticsearch diff --git a/roles/wazuh/ansible-filebeat/tests/test.yml b/roles/wazuh/ansible-filebeat/tests/test.yml deleted file mode 100644 index 3a4c8f21..00000000 --- a/roles/wazuh/ansible-filebeat/tests/test.yml +++ /dev/null @@ -1,20 +0,0 @@ ---- -- hosts: all - - pre_tasks: - - name: Update apt cache. - apt: - cache_valid_time: 600 - when: ansible_os_family == 'Debian' - - - name: Install test dependencies (RedHat). - package: name=which state=present - when: ansible_os_family == 'RedHat' - - - name: Install test dependencies. - package: name=curl state=present - - roles: - - geerlingguy.java - - geerlingguy.elasticsearch - - role_under_test diff --git a/roles/wazuh/ansible-wazuh-manager/README.md b/roles/wazuh/ansible-wazuh-manager/README.md index 199e7810..ac52363d 100644 --- a/roles/wazuh/ansible-wazuh-manager/README.md +++ b/roles/wazuh/ansible-wazuh-manager/README.md @@ -218,7 +218,7 @@ Including an example of how to use your role (for instance, with variables passe License and copyright --------------------- -WAZUH Copyright (C) 2017 Wazuh Inc. (License GPLv3) +WAZUH Copyright (C) 2020 Wazuh Inc. (License GPLv3) ### Based on previous work from dj-wasabi