Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor: split elastic_beat into core_filebeat, metricbeat and dashmate #658

Merged
merged 12 commits into from
Aug 13, 2024
32 changes: 16 additions & 16 deletions ansible/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@
become: true
roles:
- elastic_stack
- metricbeat

- name: Setup load tester
hosts: load_test
Expand All @@ -90,7 +91,8 @@
hosts: metrics
become: true
roles:
- role: metrics
- metrics
- metricbeat

- name: Set up miners
hosts: miners
Expand All @@ -100,8 +102,8 @@
- role: dashd
tags:
- dashd
- role: elastic_beats
core_container_name: dashd
- core_filebeat
- metricbeat

- name: Set up mixers
hosts: mixer_nodes
Expand All @@ -111,8 +113,8 @@
- role: dashd
tags:
- dashd
- role: elastic_beats
core_container_name: dashd
- core_filebeat
- metricbeat

- name: Set up core and tenderdash on seed nodes
hosts: seed_nodes
Expand All @@ -133,8 +135,8 @@
tags:
- dashd
- role: tenderdash
- role: elastic_beats
core_container_name: dashd
- core_filebeat
- metricbeat

- name: Set up core on masternodes
hosts: masternodes
Expand All @@ -158,8 +160,8 @@
tags:
- dashd
- mn_status_report
- role: elastic_beats
core_container_name: dashd
- core_filebeat
- metricbeat


# Start network
Expand Down Expand Up @@ -189,8 +191,8 @@
dashd_zmq: true
dashd_listen: true
- insight
- role: elastic_beats
core_container_name: dashd
- core_filebeat
- metricbeat
tags:
- web

Expand All @@ -205,8 +207,8 @@
enable_wallet: true
tags:
- dashd
- role: elastic_beats
core_container_name: dashd
- core_filebeat
- metricbeat

# Register masternodes and set sporks

Expand Down Expand Up @@ -283,9 +285,7 @@
- role: dash_cli
- role: dashmate
- role: mn_status_report
- role: elastic_beats
core_container_name: core
abci_logs_path: "{{ dashmate_logs_dir }}"
- role: metricbeat

- name: Set up protx diff script
hosts: masternodes
Expand Down
1 change: 1 addition & 0 deletions ansible/group_vars/all
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,7 @@ kibana_encryptionkey:
# Set to 50% of instance memory
# https://www.elastic.co/guide/en/elasticsearch/guide/current/heap-sizing.html
elastic_heap_size: 8g
metricbeat_enabled: true

elastic_compose_project_name: elastic
elastic_path: '{{ dashd_home }}/{{ elastic_compose_project_name }}'
Expand Down
3 changes: 3 additions & 0 deletions ansible/roles/core_filebeat/defaults/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
---

core_container_name: "dashd"
32 changes: 32 additions & 0 deletions ansible/roles/core_filebeat/tasks/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
---

- name: Load common filebeat config
ansible.builtin.include_vars:
file: common.yml

- name: Get core container host info
community.docker.docker_host_info:
containers: true
containers_filters:
name: '{{ core_container_name }}'
register: core_host_info

- name: Set container ids for core if core is running
ansible.builtin.set_fact:
core_container_id: '{{ core_host_info.containers[0].Id }}'
when: core_host_info.containers | length > 0

- name: Load core input config if core is running
ansible.builtin.include_vars:
file: core.yml
when: core_container_id is defined

- name: Set up filebeat log monitoring
ansible.builtin.include_role:
name: geerlingguy.filebeat

- name: Make sure filebeat is restarted
ansible.builtin.service:
name: filebeat
state: restarted
enabled: true
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,15 @@ filebeat_version: 8.x
filebeat_package: "filebeat={{ elastic_version }}"
filebeat_output_logstash_enabled: false
filebeat_output_elasticsearch_enabled: true
filebeat_output_elasticsearch_hosts:
- "{{ hostvars['logs-1'].private_ip }}:9200"
filebeat_output_elasticsearch_hosts: >-
{{
query('inventory_hostnames', 'logs_nodes') |
map('extract', hostvars, ['private_ip']) |
map('string') |
product([':9200']) |
map('join') |
list
}}
filebeat_output_elasticsearch_auth:
username: "{{ elastic_username }}"
password: "{{ elastic_password }}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

filebeat_inputs:
- type: container
enabled: "{{ core_host_info.containers | length > 0 }}"
enabled: true
index: "logs-core-{{ dash_network_name }}-%{[agent.version]}"
paths:
- '/var/lib/docker/containers/{{ core_container_id }}/*.log'
Expand Down
53 changes: 53 additions & 0 deletions ansible/roles/dashmate/tasks/logs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
---

- name: Create logs dir
ansible.builtin.file:
path: '{{ dashmate_logs_dir }}'
state: directory
owner: '{{ dashmate_user }}'
group: '{{ dashmate_group }}'
recurse: true

- name: Configure log rotation
ansible.builtin.include_role:
name: arillso.logrotate
vars:
logrotate_applications:
- name: platform-logs
definitions:
- logs:
- '{{ dashmate_logs_dir }}/*.log'
options:
- rotate 7
- daily
- maxsize 1G
- missingok
- notifempty
- copytruncate
- compress
- delaycompress

- name: Ensure logrotate runs hourly under systemd timer
ansible.builtin.lineinfile:
path: /lib/systemd/system/logrotate.timer
regexp: '^OnCalendar=hourly'
insertafter: '^OnCalendar=daily'
line: OnCalendar=hourly

- name: Load common filebeat config
ansible.builtin.include_vars:
file: "{{ role_path }}/../core_filebeat/vars/common.yml"

- name: Load filebeat inputs
ansible.builtin.include_vars:
file: filebeat_inputs.yml

- name: Set up filebeat log monitoring
ansible.builtin.include_role:
name: geerlingguy.filebeat

- name: Make sure filebeat is restarted
ansible.builtin.service:
name: filebeat
state: restarted
enabled: true
35 changes: 2 additions & 33 deletions ansible/roles/dashmate/tasks/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,39 +42,8 @@
dir: '{{ dashmate_home }}'
users: '{{ system_users + [dashmate_user_dict] }}'

- name: Create logs dir
ansible.builtin.file:
path: '{{ dashmate_logs_dir }}'
state: directory
owner: '{{ dashmate_user }}'
group: '{{ dashmate_group }}'
recurse: true

- name: Configure log rotation
ansible.builtin.include_role:
name: arillso.logrotate
vars:
logrotate_applications:
- name: platform-logs
definitions:
- logs:
- '{{ dashmate_logs_dir }}/*.log'
options:
- rotate 7
- daily
- maxsize 1G
- missingok
- notifempty
- copytruncate
- compress
- delaycompress

- name: Ensure logrotate runs hourly under systemd timer
ansible.builtin.lineinfile:
path: /lib/systemd/system/logrotate.timer
regexp: '^OnCalendar=hourly'
insertafter: '^OnCalendar=daily'
line: OnCalendar=hourly
- name: Configure logs
ansible.builtin.import_tasks: ./logs.yml

- name: Create dashmate config dir
ansible.builtin.file:
Expand Down
61 changes: 61 additions & 0 deletions ansible/roles/dashmate/vars/filebeat_inputs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
---

filebeat_inputs:
- type: log
enabled: true
index: "logs-core-{{ dash_network_name }}-%{[agent.version]}"
paths:
- "{{ dashmate_logs_dir }}/core.log"
processors:
- add_fields:
target: event
fields:
dataset: "core-{{ dash_network_name }}"
- dissect:
tokenizer: "%{?timestamp} %{message}"
overwrite_keys: true
target_prefix: ""
- type: log
enabled: "{{ dashmate_platform_enable }}"
json.message_key: message
index: "logs-drive.abci-{{ dash_network_name }}-%{[agent.version]}"
paths:
- "{{ dashmate_logs_dir }}/drive-json.log"
processors:
- timestamp:
field: json.timestamp
layouts:
- UNIX_MS
- add_fields:
target: event
fields:
dataset: "drive.abci-{{ dash_network_name }}"
- rename:
fields:
- from: "json.fields.message"
to: "message"
- from: "json.level"
to: "log.level"
ignore_missing: true
fail_on_error: true
- type: log
enabled: "{{ dashmate_platform_enable }}"
json.message_key: message
index: "logs-drive.tenderdash-{{ dash_network_name }}-%{[agent.version]}"
paths:
- "{{ dashmate_logs_dir }}/tenderdash.log"
processors:
- add_fields:
target: event
fields:
dataset: "drive.tenderdash-{{ dash_network_name }}"
- rename:
fields:
- from: "json.message"
to: "message"
ignore_missing: true
fail_on_error: true
- rename:
fields:
- from: "json.level"
to: "log.level"
Loading
Loading