Authored by chunhua.zhang

add

---
# tasks file for filebeat
# make sure user type YES
- name: "make sure user type YES to confirm"
fail:
msg: "user do not type YES"
when: confirmation != 'YES'
- name: Init filebeat folders
shell: mkdir -p /etc/filebeat /Data/logs/filebeat
... ... @@ -22,25 +13,13 @@
src: "filebeat-5.6.4"
dest: "/usr/sbin/filebeat"
mode: 0755
notify:
- restart filebeat-base
- restart filebeat
- name: Clean nginx-access folder
shell: rm -rf /usr/share/filebeat/bin/data/nginx-access
notify:
- restart filebeat
- name: update filebeat base configuration file
template:
src: "base.yml"
dest: "/etc/filebeat/filebeat-base.yml"
notify:
- restart filebeat-base
- name: update filebeat configuration file
- name: copy nginx filebeat config files and start filebeat
template:
src: "{{ config_group }}.yml"
dest: "/etc/filebeat/filebeat-{{ config_group }}.yml"
src: "{{ item }}.yml"
dest: "/etc/filebeat/filebeat-{{ item }}.yml"
with_items:
- nginx-access
- ufo-nginx-access
notify:
- restart filebeat
... ...
filebeat.prospectors:
- input_type: log
paths:
- /Data/local/ops/collector/data/*/*
- /Data/logs/*/kpi-event.log
document_type: metrics
tail_files: true
output.kafka:
enabled: true
hosts: ["{{ kafka_hosts }}"]
topic: '%{[type]}'
compression: snappy
max_message_bytes: 10000000
worker: 2
filebeat.prospectors:
- input_type: log
paths:
- /Data/logs/activity/debug-log.log
tags: ["trace_debug"]
document_type: trace_debug
tail_files: true
multiline.pattern: '^\['
multiline.negate: true
multiline.match: after
output.kafka:
enabled: true
hosts: ["{{ kafka_hosts }}"]
topic: '%{[type]}'
compression: snappy
max_message_bytes: 10000000
worker: 2
filebeat.prospectors:
- input_type: log
paths:
- /Data/logs/bigdata/debug.log
tags: ["trace_debug"]
document_type: trace_debug
tail_files: true
multiline.pattern: '^\['
multiline.negate: true
multiline.match: after
output.kafka:
enabled: true
hosts: ["{{ kafka_hosts }}"]
topic: '%{[type]}'
compression: snappy
max_message_bytes: 10000000
worker: 2
filebeat.prospectors:
- input_type: log
paths:
- /Data/logs/gateway/debug.log
tags: ["trace_debug"]
document_type: trace_debug
tail_files: true
multiline.pattern: '^\['
multiline.negate: true
multiline.match: after
- input_type: log
paths:
- /Data/logs/gateway/gateway_access.log
tags: ["gateway_access"]
document_type: gateway_access
max_backoff: 5s
output.kafka:
enabled: true
hosts: ["{{ kafka_hosts }}"]
topic: '%{[type]}'
compression: snappy
max_message_bytes: 10000000
worker: 2
filebeat.prospectors:
- input_type: log
paths:
- /Data/logs/order/debug-log.log
tags: ["trace_debug"]
document_type: trace_debug
tail_files: true
multiline.pattern: '^\['
multiline.negate: true
multiline.match: after
output.kafka:
enabled: true
hosts: ["{{ kafka_hosts }}"]
topic: '%{[type]}'
compression: snappy
max_message_bytes: 10000000
worker: 2
filebeat.prospectors:
- input_type: log
paths:
- /Data/logs/product/debug-log.log
tags: ["trace_debug"]
document_type: trace_debug
tail_files: true
multiline.pattern: '^\['
multiline.negate: true
multiline.match: after
output.kafka:
enabled: true
hosts: ["{{ kafka_hosts }}"]
topic: '%{[type]}'
compression: snappy
max_message_bytes: 10000000
worker: 2
filebeat.prospectors:
- input_type: log
paths:
- /Data/logs/promotion/debug-log.log
tags: ["trace_debug"]
document_type: trace_debug
tail_files: true
multiline.pattern: '^\['
multiline.negate: true
multiline.match: after
output.kafka:
enabled: true
hosts: ["{{ kafka_hosts }}"]
topic: '%{[type]}'
compression: snappy
max_message_bytes: 10000000
worker: 2
filebeat.prospectors:
- input_type: log
paths:
- /Data/logs/resources/debug-log.log
tags: ["trace_debug"]
document_type: trace_debug
tail_files: true
multiline.pattern: '^\['
multiline.negate: true
multiline.match: after
output.kafka:
enabled: true
hosts: ["{{ kafka_hosts }}"]
topic: '%{[type]}'
compression: snappy
max_message_bytes: 10000000
worker: 2
filebeat.prospectors:
- input_type: log
paths:
- /Data/logs/sns/debug.log
tags: ["trace_debug"]
document_type: trace_debug
tail_files: true
multiline.pattern: '^\['
multiline.negate: true
multiline.match: after
output.kafka:
enabled: true
hosts: ["{{ kafka_hosts }}"]
topic: '%{[type]}'
compression: snappy
max_message_bytes: 10000000
worker: 2
filebeat.prospectors:
- input_type: log
paths:
- /Data/logs/uic/register-log.log
tags: ["uic_register"]
tail_files: true
document_type: uic_register
- input_type: log
paths:
- /Data/logs/uic/uic-login-log.log
tags: ["uic_login"]
tail_files: true
document_type: uic_login
- input_type: log
paths:
- /Data/logs/uic/debug-log.log
tags: ["trace_debug"]
document_type: trace_debug
tail_files: true
multiline.pattern: '^\['
multiline.negate: true
multiline.match: after
output.kafka:
enabled: true
hosts: ["{{ kafka_hosts }}"]
topic: '%{[type]}'
compression: snappy
max_message_bytes: 10000000
worker: 2
filebeat.prospectors:
- input_type: log
paths:
- /Data/logs/union/ACTIVE_DINGDANG.log
document_type: metrics
fields:
source_ip: "{{ hostvars[inventory_hostname]['ansible_default_ipv4']['address'] }}"
nginx_type: "union"
fields_under_root: true
tail_files: true
- input_type: log
paths:
- /Data/logs/nginx/nginx_error.log
document_type: metrics
fields:
source_ip: "{{ hostvars[inventory_hostname]['ansible_default_ipv4']['address'] }}"
nginx_type: "union"
fields_under_root: true
tail_files: true
multiline.pattern: '^\d{4}'
multiline.negate: true
multiline.match: after
output.kafka:
enabled: true
hosts: ["{{ kafka_hosts }}"]
topic: '%{[type]}'
compression: gzip
max_message_bytes: 10000000
worker: 2
filebeat.prospectors:
- input_type: log
paths:
- /Data/logs/users/debug-log.log
tags: ["trace_debug"]
document_type: trace_debug
tail_files: true
multiline.pattern: '^\['
multiline.negate: true
multiline.match: after
output.kafka:
enabled: true
hosts: ["{{ kafka_hosts }}"]
topic: '%{[type]}'
compression: snappy
max_message_bytes: 10000000
worker: 2