Compare commits

..

143 Commits

Author SHA1 Message Date
pengln 250958670c Merge pull request '增加uc01vn项目的filebeat采集处理器' (#71) from pengln/filebeat:master into master
Reviewed-on: https://app-jms.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/71
2022-06-01 09:19:18 +08:00
pengln b5133e3bc3 增加uc01vn项目的filebeat采集处理器 2022-05-31 15:55:05 +08:00
pengln 26a4760a13 Merge pull request '修订sublog判断不正确导致索引名称含有none的问题' (#70) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/70
2022-02-18 14:26:58 +08:00
pengln 2a086b7935 修订sublog判断不正确导致索引名称含有none的问题 2022-02-18 14:26:35 +08:00
pengln 01f4a45153 Merge pull request '修订模板名称和模板模式不一致的问题' (#69) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/69
2022-02-18 14:19:58 +08:00
pengln 431fd883d0 修订模板名称和模板模式不一致的问题 2022-02-18 14:19:39 +08:00
pengln a4abffb2ea Merge pull request '修复elastic_delays_metric_enabled属性始终渲染为False的问题' (#68) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/68
2022-02-15 17:52:19 +08:00
pengln c4a5e690e2 修复elastic_delays_metric_enabled属性始终渲染为False的问题 2022-02-15 17:51:59 +08:00
pengln c565b687ff Merge pull request '修复elastic_delays_metric_enabled属性始终渲染为False的问题' (#67) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/67
2022-02-15 17:31:22 +08:00
pengln 1d874245bd 修复elastic_delays_metric_enabled属性始终渲染为False的问题 2022-02-15 17:30:53 +08:00
pengln a224f93817 Merge pull request '默认filebaet_ilm_enabled=yes' (#66) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/66
2022-02-15 16:06:22 +08:00
pengln 6693204a3a 默认filebaet_ilm_enabled=yes
取消gw日志的延迟采集
2022-02-15 16:05:42 +08:00
pengln d876fd6a9b Merge pull request '紧急修复配置模板文件语法错误' (#65) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/65
2022-02-15 15:55:20 +08:00
pengln a48bfc9e1f 紧急修复配置模板文件语法错误 2022-02-15 15:55:04 +08:00
pengln b3bfcfb2af Merge pull request '如果filebaet_ilm_enabled被设置为false则禁止ilm,这是为了兼容华为云的ES' (#64) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/64
2022-02-15 15:52:21 +08:00
pengln f6c876d064 如果filebaet_ilm_enabled被设置为false则禁止ilm,这是为了兼容华为云的ES 2022-02-15 15:51:43 +08:00
pengln 0455b1d144 如果filebaet_ilm_enabled被设置为false则禁止ilm,这是为了兼容华为云的ES 2022-02-15 15:51:29 +08:00
pengln 85e4876338 将参数默认值从False改为no 2022-02-15 14:39:54 +08:00
pengln 478a76ad2d Merge pull request '# 关闭掉Filebeat本身的metrics日志输出' (#63) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/63
2022-02-15 14:09:26 +08:00
pengln 7f03a7f4f5 # 关闭掉Filebeat本身的metrics日志输出
# 修订配置文件自动reload
2022-02-15 13:59:33 +08:00
pengln 465bf8f349 Merge pull request '[#1002039] Nginx日志采集过滤Countly日志文件' (#62) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/62
2022-02-15 11:22:24 +08:00
pengln e9ec1f3367 [#1002039] Nginx日志采集过滤Countly日志文件 2022-02-15 11:21:23 +08:00
pengln 3cc0f5f74d Merge pull request '#[1002041] 只给quick日志和gw日志开启elastic_delays检查' (#61) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/61
2022-02-15 11:13:56 +08:00
pengln 46e42aa987 #[1002041] 只给quick日志和gw日志开启elastic_delays检查 2022-02-15 11:12:33 +08:00
pengln ca65eed0d1 Merge pull request '[#1002043] 通过强制filebeat注册ilm策略方式,修订意外丢失的ilm策略' (#60) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/60
2022-02-15 10:19:21 +08:00
pengln 30ca92301c [#1002043] 通过强制filebeat注册ilm策略方式,修订意外丢失的ilm策略 2022-02-15 10:17:34 +08:00
pengln 061963154a Merge pull request '[#1002040] 合并Nginx日志的索引以项目和日期命名' (#59) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/59
2022-02-15 09:37:53 +08:00
pengln 92566348bc [#1002040] 合并Nginx日志的索引以项目和日期命名 2022-02-15 09:33:27 +08:00
pengln b9eb52ca82 Merge pull request '更新filebeat_monitor版本1636654cf7' (#58) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/58
2022-01-04 13:39:03 +08:00
pengln 0cca106826 更新filebeat_monitor版本1636654cf7 2022-01-04 13:38:32 +08:00
pengln 6133934b31 Merge pull request '修复 multiline 属性不存在的bug' (#57) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/57
2021-12-29 18:12:58 +08:00
pengln 45ff4f4930 修复 multiline 属性不存在的bug 2021-12-29 18:12:37 +08:00
pengln e1071bc98f Merge pull request '修复 filebeat 多行日志推送Grok失败的问题' (#56) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/56
2021-12-29 18:09:59 +08:00
pengln 156f4012e3 修复 filebeat 多行日志推送Grok失败的问题 2021-12-29 18:09:04 +08:00
pengln 5c6ad53062 Merge pull request '更新 filebeat monitor 到版本 v0.3 dff5318323' (#55) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/55
2021-12-24 10:42:26 +08:00
pengln f0fefd5cad 更新 filebeat monitor 到版本 v0.3 dff5318323 2021-12-24 10:40:49 +08:00
pengln 085cfe0e51 Merge pull request '更新 filebeat monitor 到版本 v0.3 7456f46579' (#54) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/54
2021-12-23 16:11:34 +08:00
pengln 2a77de9455 更新 filebeat monitor 到版本 v0.3 7456f46579 2021-12-23 16:11:00 +08:00
pengln 444b9f2430 Merge pull request '更新 filebeat_monitor 版本到 v0.3 8415ecc662' (#53) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/53
2021-12-23 15:04:02 +08:00
pengln 900540df86 更新 filebeat_monitor 版本到 v0.3 8415ecc662 2021-12-23 15:03:29 +08:00
pengln 055aafb961 Merge pull request '更新 filebeat 版本到 v0.3 afe5de7e20' (#52) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/52
2021-12-23 13:31:21 +08:00
pengln 7697daa4c4 更新 filebeat 版本到 v0.3 afe5de7e20 2021-12-23 13:30:44 +08:00
pengln 2175d33631 Merge pull request '修复 queue.mem 和 flush.min_events 冲突的bug' (#51) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/51
2021-12-23 10:24:44 +08:00
pengln d8f60f18c0 修复 queue.mem 和 flush.min_events 冲突的bug 2021-12-23 10:24:13 +08:00
pengln 417012b35a Merge pull request '调整默认资源限制' (#50) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/50
2021-12-23 10:17:58 +08:00
pengln d75cf5f4a6 调整默认资源限制 2021-12-23 10:17:18 +08:00
pengln 2ad4d4d270 Merge pull request '更新 filebeat monitor' (#49) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/49
2021-12-23 08:39:45 +08:00
pengln 3851a1d532 更新 filebeat monitor 2021-12-23 08:38:43 +08:00
pengln 7f666b82c7 Merge pull request '修订 logging.json 为 false' (#48) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/48
2021-12-22 17:19:48 +08:00
pengln e87ca97eef 修订 logging.json 为 false 2021-12-22 17:18:53 +08:00
pengln b423ae6efa Merge pull request '修订template_pattern默认值' (#47) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/47
2021-12-22 16:25:36 +08:00
pengln 9724240311 修订template_pattern默认值 2021-12-22 16:25:01 +08:00
pengln 5427c0fdef Merge pull request '修订配置参数不生效的bug' (#46) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/46
2021-12-22 15:52:27 +08:00
pengln f957954d68 修订配置参数不生效的bug 2021-12-22 15:51:16 +08:00
pengln 9613b9eb3a Merge pull request '更新 filebeat_monitor 版本到 v0.3 86b04c0c52' (#45) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/45
2021-12-22 15:28:19 +08:00
pengln d8cff95097 更新 filebeat_monitor 版本到 v0.3 86b04c0c52 2021-12-22 15:27:41 +08:00
pengln f8b8f01d3b Merge pull request '增加instanceName为主机名' (#44) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/44
2021-12-22 15:12:43 +08:00
pengln 360c5087af 增加instanceName为主机名 2021-12-22 15:11:49 +08:00
pengln 877e7b2a3f Merge pull request '增加 elastic_delays_metric_enabled 开关' (#43) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/43
2021-12-22 14:53:21 +08:00
pengln 69e671eaab 增加 elastic_delays_metric_enabled 配置开关 2021-12-22 14:51:34 +08:00
pengln d5149bba48 更新 filebeat monitor 到 v0.3 49005da52d 2021-12-22 09:43:59 +08:00
pengln 5b18d66375 增加同步渲染monitor脚本的逻辑 2021-12-22 09:13:01 +08:00
pengln cf5d120d36 Merge pull request '更新filebeat monitor到版本 v0.3 b1212a1e2a' (#42) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/42
2021-12-21 16:47:34 +08:00
pengln 2b00681464 更新filebeat monitor到版本 v0.3 b1212a1e2a 2021-12-21 16:46:59 +08:00
pengln 8de9b81bee Merge pull request '修订sublog默认值的bug' (#41) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/41
2021-12-21 16:33:55 +08:00
pengln 5e884c9e31 修订sublog默认值的bug 2021-12-21 16:33:25 +08:00
pengln 8361cc7450 Merge pull request '修订filebeat_monitor的启动参数' (#40) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/40
2021-12-21 16:28:13 +08:00
pengln 2f068f2081 修订filebeat_monitor的启动参数 2021-12-21 16:27:28 +08:00
pengln ecde47fd8e Merge pull request '更新 filebeat monitor 到 v0.3 740a7fbe5f' (#39) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/39
2021-12-21 15:10:35 +08:00
pengln 446b05748f 更新 filebeat monitor 到 v0.3 740a7fbe5f 2021-12-21 15:09:13 +08:00
pengln 0311229869 Merge pull request '更新 curl 版本为 7.80.0' (#38) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/38
2021-12-10 17:25:20 +08:00
pengln e7f5d23739 更新 curl 版本为 7.80.0
安装目标改为curl-7.80.0 避免和旧版本冲突
2021-12-10 17:24:18 +08:00
pengln 1a43040553 Merge pull request '修复manager采集默认索引前缀的错误' (#37) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/37
2021-12-09 09:22:02 +08:00
pengln 9364d4a119 修复manager采集默认索引前缀的错误 2021-12-09 09:21:20 +08:00
pengln 9ca41764bf Merge pull request '修复排除项判断错误导致无法启动filebeat的问题' (#36) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/36
2021-12-07 17:34:19 +08:00
pengln d7db5cc45c 修复排除项判断错误导致无法启动filebeat的问题 2021-12-07 17:33:47 +08:00
pengln 3aa63c5768 Merge pull request '增加排除文件的配置项' (#35) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/35
2021-12-07 17:31:38 +08:00
pengln 8a9799148c 增加排除文件的配置项
增加默认排除gateway日志的*_81.log日志
2021-12-07 17:31:02 +08:00
pengln a82bbe7f5a Merge pull request '从本地部署新版CURL工具' (#34) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/34
2021-12-07 16:49:03 +08:00
pengln 9a3ea92cb6 从本地部署新版CURL工具 2021-12-07 16:48:34 +08:00
pengln 172db31fd1 Merge pull request '将日志调整为json格式,便于针对日志进行监控' (#33) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/33
2021-12-07 16:43:29 +08:00
pengln aa22795f04 将日志调整为json格式,便于针对日志进行监控 2021-12-07 16:42:57 +08:00
pengln b25af69671 Merge pull request '增加最新版本的curl工具' (#32) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/32
2021-12-07 11:46:26 +08:00
pengln fae0a461ad 增加最新版本的curl工具 2021-12-07 11:45:39 +08:00
pengln 3d3084dcbe Merge pull request '修复 pipeline 变量引用错误的bug' (#31) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/31
2021-12-07 10:14:52 +08:00
pengln 65a5016c77 修复 pipeline 变量引用错误的bug 2021-12-07 10:14:04 +08:00
pengln 3d967ead59 Merge pull request '更新 filebeat_monitor 版本到 0.2 [e22401bdad18c513]' (#30) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/30
2021-12-06 18:23:08 +08:00
pengln 16edee71c7 更新 filebeat_monitor 版本到 0.2 [e22401bdad18c513] 2021-12-06 18:22:36 +08:00
pengln e987937ee0 Merge pull request '更新 filebeat_monitor 版本到 0.2 [1f1d2fc6127f3df4]' (#29) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/29
2021-12-06 18:15:30 +08:00
pengln dd41d8b593 更新 filebeat_monitor 版本到 0.2 [1f1d2fc6127f3df4] 2021-12-06 18:14:55 +08:00
pengln 89be8e1a0f Merge pull request '更新 filebeat_monitor 版本到 0.2 [d2f0ae164986967a]' (#28) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/28
2021-12-06 18:07:58 +08:00
pengln ed74c07360 更新 filebeat_monitor 版本到 0.2 [d2f0ae164986967a] 2021-12-06 18:07:22 +08:00
pengln f776ffc3a2 Merge pull request '修复Check Filebeat Service Running Status命令找不到的问题' (#27) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/27
2021-12-06 17:46:35 +08:00
pengln 7afaff0287 修复Check Filebeat Service Running Status命令找不到的问题 2021-12-06 17:13:54 +08:00
pengln 492f3c0066 Merge pull request '修复配置文件Jinja2格式错误的问题' (#26) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/26
2021-12-06 16:50:04 +08:00
pengln 0d5b4c6f82 修复配置文件Jinja2格式错误的问题 2021-12-06 16:49:24 +08:00
pengln 767d454c72 Merge pull request '修复monitor丢失用户名和密码配置的问题' (#25) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/25
2021-12-06 16:41:46 +08:00
pengln 63ad661e6c 修复monitor丢失用户名和密码配置的问题 2021-12-06 16:41:03 +08:00
pengln bcd35eb339 Merge pull request '将filebeat_monitor版本更新到 0.2 [9453d07c82]' (#24) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/24
2021-12-06 15:50:54 +08:00
pengln 9b89f40bc0 将filebeat_monitor版本更新到 0.2 [9453d07c82] 2021-12-06 15:49:51 +08:00
pengln 3f7cad82fc Merge pull request '#1001742 Filebeat Ansible脚本优化' (#23) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/23
2021-12-06 11:16:53 +08:00
pengln 5a55f636ad #1001742 Filebeat Ansible脚本优化 2021-12-06 11:15:08 +08:00
pengln 30ac005569 Merge pull request 'master' (#22) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/22
2021-12-06 09:33:01 +08:00
pengln 7b33b59d98 #1001742 Filebeat Ansible脚本优化 2021-12-03 18:24:22 +08:00
pengln bfa03eced0 #1001742 Filebeat Ansible脚本优化 2021-12-01 16:12:08 +08:00
pengln 47aa29e097 Merge pull request '#1001686 role变量改为defaults' (#21) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/21
2021-11-18 15:30:17 +08:00
pengln d28b8ef4d1 #1001686 role变量改为defaults 2021-11-18 15:28:21 +08:00
pengln 5ad8b391e3 Merge pull request '#1001686 quick日志的特殊性必须从文件头采集日志' (#20) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/20
2021-11-18 14:51:10 +08:00
pengln 4ae39dba43 #1001686 quick日志的特殊性必须从文件头采集日志 2021-11-18 14:50:21 +08:00
pengln 81b7afea63 Merge pull request 'master' (#19) from pengln/filebeat:master into master
Reviewed-on: https://app.huanyuantech.com:10301/gitea/ansible-collection/filebeat/pulls/19
2021-11-18 14:29:58 +08:00
pengln 6eb8dd6abb #1001686 Filebeat增加采集quick日志的模板 2021-11-18 14:26:10 +08:00
pengln 5ea88300dd #1001686 Filebeat增加采集quick日志的模板 2021-11-18 14:03:38 +08:00
pengln 44757720a6 Merge pull request '增加推送ES时的压缩级别设置,默认0不压缩' (#18) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/18
2021-11-16 11:26:09 +08:00
pengln 6209c19282 增加推送ES时的压缩级别设置,默认0不压缩 2021-11-16 11:25:38 +08:00
pengln 4cd81cd33d Merge pull request '修复日志路径错误的问题' (#17) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/17
2021-11-16 10:43:46 +08:00
pengln 0d5e84e2e5 修复日志路径错误的问题 2021-11-16 10:43:21 +08:00
pengln 0ee0082008 Merge pull request '修复路径名导致索引名失败的问题' (#16) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/16
2021-11-16 10:23:35 +08:00
pengln c1574b8da5 修复路径名导致索引名失败的问题 2021-11-16 10:23:10 +08:00
pengln 4bc683c26c Merge pull request 'master' (#15) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/15
2021-11-15 19:21:06 +08:00
pengln 8fb95cbf6a 取消磁盘队列解决queue冲突问题 2021-11-15 19:19:50 +08:00
pengln 02b2b99b8e 增加了队列限制 2021-11-15 19:05:45 +08:00
pengln cf78b2034b Merge pull request '#1001590 增加针对业务应用日志的采集' (#14) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/14
2021-11-09 16:06:12 +08:00
pengln 4263ed1523 #1001590 增加针对业务应用日志的采集 2021-11-09 16:05:39 +08:00
pengln 254874af51 Merge pull request '#1001590 增加针对业务应用日志的采集' (#13) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/13
2021-11-09 15:17:36 +08:00
pengln 99d5df308f #1001590 增加针对业务应用日志的采集 2021-11-09 15:16:48 +08:00
pengln 6c0ead6c5a Merge pull request '#1001590 增加针对业务应用日志的采集' (#12) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/12
2021-11-09 14:50:13 +08:00
pengln 732407677c #1001590 增加针对业务应用日志的采集 2021-11-09 14:49:47 +08:00
pengln 313315f4ef Merge pull request '#1001590 增加针对业务应用日志的采集' (#11) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/11
2021-11-09 14:35:19 +08:00
pengln 0a22f5025e #1001590 增加针对业务应用日志的采集 2021-11-09 14:34:58 +08:00
pengln 9e723cbef6 Merge pull request '#1001590 增加针对业务应用日志的采集' (#10) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/10
2021-11-09 14:32:09 +08:00
pengln 671280c111 #1001590 增加针对业务应用日志的采集 2021-11-09 14:31:40 +08:00
pengln 62997175e6 Merge pull request '#1001590 增加针对业务应用日志的采集' (#9) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/9
2021-11-09 12:42:39 +08:00
pengln 8b7c9c4f49 #1001590 增加针对业务应用日志的采集 2021-11-09 12:42:11 +08:00
pengln 913e2dd9d0 Merge pull request '#1001590 增加针对业务应用日志的采集' (#8) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/8
2021-11-09 11:45:21 +08:00
pengln 3a99b77ce6 #1001590 增加针对业务应用日志的采集 2021-11-09 11:38:23 +08:00
pengln 541ee16b5e Merge pull request '#1001590 增加针对业务应用日志的采集' (#7) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/7
2021-11-09 11:30:15 +08:00
pengln 9ec02ab8b5 #1001590 增加针对业务应用日志的采集 2021-11-09 11:29:48 +08:00
pengln 90cc4a5722 Merge pull request '#1001590 增加针对业务应用日志的采集' (#6) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/6
2021-11-09 11:25:42 +08:00
pengln fc73dc3ef6 #1001590 增加针对业务应用日志的采集 2021-11-09 11:25:10 +08:00
pengln 8c9da16cca Merge pull request '#1001590 增加针对业务应用日志的采集' (#5) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/5
2021-11-09 11:15:01 +08:00
pengln 00fb976157 #1001590 增加针对业务应用日志的采集 2021-11-09 11:14:31 +08:00
pengln 0decac47c9 Merge pull request '#1001590 增加针对业务应用日志的采集' (#4) from pengln/filebeat:master into master
Reviewed-on: https://app.amonstack.com:10301/gitea/ansible-collection/filebeat/pulls/4
2021-11-09 11:12:26 +08:00
pengln a4e23e2a9c #1001590 增加针对业务应用日志的采集 2021-11-09 11:11:23 +08:00
38 changed files with 1514 additions and 114 deletions

626
.gitignore vendored Normal file
View File

@ -0,0 +1,626 @@
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
[Ll]ogs/
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# ---> GitBook
# Node rules:
## Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
## Dependency directory
## Commenting this out is preferred by some people, see
## https://docs.npmjs.com/misc/faq#should-i-check-my-node_modules-folder-into-git
node_modules
# Book build output
_book
# eBook build output
*.epub
*.mobi
*.pdf
# ---> TortoiseGit
# Project-level settings
/.tgitconfig
# ---> JENKINS_HOME
# Learn more about Jenkins and JENKINS_HOME directory for which this file is
# intended.
#
# http://jenkins-ci.org/
# https://wiki.jenkins-ci.org/display/JENKINS/Administering+Jenkins
#
# Note: secret.key is purposefully not tracked by git. This should be backed up
# separately because configs may contain secrets which were encrypted using the
# secret.key. To back up secrets use 'tar -czf /tmp/secrets.tgz secret*' and
# save the file separate from your repository. If you want secrets backed up
# with configuration, then see the bottom of this file for an example.
# Ignore all JENKINS_HOME except jobs directory, root xml config, and
# .gitignore file.
# /*
/test/t*
!/jobs
!/.gitignore
!/*.xml
.git/
# Ignore all files in jobs subdirectories except for folders.
# Note: git doesn't track folders, only file content.
jobs/**
!jobs/**/
# Uncomment the following line to save next build numbers with config.
#!jobs/**/nextBuildNumber
# For performance reasons, we want to ignore builds in Jenkins jobs because it
# contains many tiny files on large installations. This can impact git
# performance when running even basic commands like 'git status'.
builds
indexing
# Exclude only config.xml files in repository subdirectories.
!config.xml
# Don't track workspaces (when users build on the master).
jobs/**/*workspace
# Security warning: If secrets are included with your configuration, then an
# adversary will be able to decrypt all encrypted secrets within Jenkins
# config. Including secrets is a bad practice, but the example is included in
# case someone still wants it for convenience. Uncomment the following line to
# include secrets for decryption with repository configuration in Git.
#!/secret*
# As a result, only Jenkins settings and job config.xml files in JENKINS_HOME
# will be tracked by git.
# ---> VisualStudioCode
.vscode/
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# ---> VisualStudio
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
[Bb]uild/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
[Tt]est/
[Tt]est/*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.tlog
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Nuget personal access tokens and Credentials
nuget.config
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
# VS Code files for those working on multiple tools
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# Windows Installer files from build outputs
*.cab
*.msi
*.msix
*.msm
*.msp
# JetBrains Rider
.idea/
*.sln.iml
# ---> SVN
.svn/

View File

@ -8,7 +8,7 @@ namespace: youyan
name: filebeat
# The version of the collection. Must be compatible with semantic versioning
version: 1.7.10
version: 1.8.0
# The path to the Markdown (.md) readme file. This path is relative to the root of the collection
readme: README.md

15
playbooks/deploy.yml Normal file
View File

@ -0,0 +1,15 @@
- name: "Install Filebeat Instance <{{ instance_name }}>"
hosts: "{{ instance_name }}_filebeats"
tasks:
- name: Filebeat Deploy
include_role:
name: filebeat
vars:
log_type: "{{ item }}"
with_items: "{{ deploy_logs }}"
- name: Update Alerts Script With Cron
ansible.builtin.shell: "svn up /data/ops/script/monitor.sh; chmod 755 /data/ops/script/monitor.sh"

View File

@ -1,8 +0,0 @@
- name: "Install Filebeat Instance <{{ instance_name }}>"
hosts: filebeats
roles:
- filebeat
- supervisor

View File

@ -0,0 +1,29 @@
---
language: python
python: "2.7"
# Use the new container infrastructure
sudo: false
# Install ansible
addons:
apt:
packages:
- python-pip
install:
# Install ansible
- pip install ansible
# Check ansible version
- ansible --version
# Create ansible.cfg with correct roles_path
- printf '[defaults]\nroles_path=../' >ansible.cfg
script:
# Basic role syntax check
- ansible-playbook tests/test.yml -i tests/inventory --syntax-check
notifications:
webhooks: https://galaxy.ansible.com/api/v1/notifications/

View File

@ -0,0 +1,38 @@
Role Name
=========
A brief description of the role goes here.
Requirements
------------
Any pre-requisites that may not be covered by Ansible itself or the role should be mentioned here. For instance, if the role uses the EC2 module, it may be a good idea to mention in this section that the boto package is required.
Role Variables
--------------
A description of the settable variables for this role should go here, including any variables that are in defaults/main.yml, vars/main.yml, and any variables that can/should be set via parameters to the role. Any variables that are read from other roles and/or the global scope (ie. hostvars, group vars, etc.) should be mentioned here as well.
Dependencies
------------
A list of other roles hosted on Galaxy should go here, plus any details in regards to parameters that may need to be set for other roles, or variables that are used from other roles.
Example Playbook
----------------
Including an example of how to use your role (for instance, with variables passed in as parameters) is always nice for users too:
- hosts: servers
roles:
- { role: username.rolename, x: 42 }
License
-------
BSD
Author Information
------------------
An optional section for the role authors to include contact information, or a website (HTML is not allowed).

View File

@ -0,0 +1,2 @@
---
# defaults file for curl

Binary file not shown.

View File

@ -0,0 +1,2 @@
---
# handlers file for curl

View File

@ -0,0 +1,52 @@
galaxy_info:
author: your name
description: your role description
company: your company (optional)
# If the issue tracker for your role is not on github, uncomment the
# next line and provide a value
# issue_tracker_url: http://example.com/issue/tracker
# Choose a valid license ID from https://spdx.org - some suggested licenses:
# - BSD-3-Clause (default)
# - MIT
# - GPL-2.0-or-later
# - GPL-3.0-only
# - Apache-2.0
# - CC-BY-4.0
license: license (GPL-2.0-or-later, MIT, etc)
min_ansible_version: 2.1
# If this a Container Enabled role, provide the minimum Ansible Container version.
# min_ansible_container_version:
#
# Provide a list of supported platforms, and for each platform a list of versions.
# If you don't wish to enumerate all versions for a particular platform, use 'all'.
# To view available platforms and versions (or releases), visit:
# https://galaxy.ansible.com/api/v1/platforms/
#
# platforms:
# - name: Fedora
# versions:
# - all
# - 25
# - name: SomePlatform
# versions:
# - all
# - 1.0
# - 7
# - 99.99
galaxy_tags: []
# List tags for your role here, one per line. A tag is a keyword that describes
# and categorizes the role. Users find roles by searching for tags. Be sure to
# remove the '[]' above, if you add tags to this list.
#
# NOTE: A tag is limited to a single word comprised of alphanumeric characters.
# Maximum 20 tags per role.
dependencies: []
# List your role dependencies here, one per line. Be sure to remove the '[]' above,
# if you add dependencies to this list.

View File

@ -0,0 +1,11 @@
---
# tasks file for curl
- name: Install CURL Tool
ansible.builtin.copy:
src: curl
dest: /usr/bin/curl-7.80.0
owner: root
group: root
mode: '0755'

View File

@ -0,0 +1,2 @@
localhost

View File

@ -0,0 +1,5 @@
---
- hosts: localhost
remote_user: root
roles:
- curl

View File

@ -0,0 +1,2 @@
---
# vars file for curl

View File

@ -1,2 +1,62 @@
---
# defaults file for filebeat
# vars file for filebeat
pip_bin_path: "/data/opt/python3/bin/pip"
python_bin_dir: "/data/opt/python3/bin"
filebeat_version: 7.10.1
filebeat_oss_version: false
filebeat_download_url: "https://artifacts.elastic.co/downloads/beats/filebeat"
filebeat_work_path: "/data/opt/filebeat/{{ instance_name }}_{{ log_type }}"
filebeat_tail_files: true
filebeat_max_procs: 2
filebeat_compression_level: 5
filebeat_output_works: 1
filebeat_gid: 1801
filebeat_uid: 1801
filebeat_older: 24h
filebeat_logs:
gateway:
paths:
- "/data/log/nginx/*.log"
index_prefix: "gw-nginx"
template_name: "gw-nginx-ug10cn"
template_pattern: "gw-nginx-*"
#[#1002039] Nginx日志采集过滤Countly日志文件
exclude_files: ['_81\.log$', '.*countly.*\.log$']
dissect_tokenizers:
- tokenizer: "/%{}/%{}/%{}/%{}_%{sublog}.log"
field_name: "log.file.path"
filebeat_older_dir: "/data/opt/filebeat/{{ instance_name }}"
filebeat_older_data_dir: data
filebaet_ilm_enabled: yes
manager:
paths:
- "/data/game/htdocs/center/manager/storage/cp/logs/*.log"
index_prefix: "mgr"
template_name: "mgr-cp"
template_pattern: "mgr-*"
multiline: true
sublog: cp
filebeat_older_dir: "/data/opt/filebeat/{{ instance_name }}"
filebeat_older_data_dir: data2
filebaet_ilm_enabled: yes
quick:
paths:
- "/data/game/sync/quick/filebeat/*.txt"
index_prefix: "quick"
template_name: "quick"
template_pattern: "quick-*"
sublog: quick
dissect_tokenizers:
- tokenizer: "/%{}/%{}/%{}/%{}/%{}/%{zone_id}_%{server_id}_%{log_file_time}.txt"
field_name: "log.file.path"
convert_timestamp:
field_name: log_datetime
layouts:
- '2006-01-02 15:04:05'
filebeat_older_dir: "/data/opt/filebeat/quick"
filebeat_older_data_dir: data
filebaet_ilm_enabled: yes
#[1002041] 只给quick日志elastic_delays检查
elastic_delays_metric_enabled: yes

Binary file not shown.

View File

@ -0,0 +1,40 @@
function process(event) {
var msg = event.Get("message");
var i
msg = msg.replace("/\r/g", "");
msg = msg.replace("/\n/g", "");
msg = msg.replace("/\r/\n/g", "");
var fields = msg.split("\t");
if (Number(fields[1]) == 64) {
// log_id = 64的日志是子后台同步过来的需要特殊处理掉
event.Cancel(); return;
}
for (i = 0; i < 27; i++) {
if (fields[i] == null) {
fields[i] = "";
} else if (fields[i].trim() == "-") {
fields[i] = "";
} else {
fields[i] = fields[i].trim();
}
}
event.Put("log_datetime", fields[0]);
event.Put("log_id", fields[1]);
event.Put("account_id", fields[2]);
event.Put("role_id", fields[3]);
event.Put("role_name", fields[4]);
event.Put("spid", fields[5]);
event.Put("channel", fields[6]);
for (i = 7; i < fields.length; i++) {
if (fields[i] == "-") { fields[i] = "" }
event.Put("col" + (i - 6), fields[i]);
}
return event;
}

View File

@ -0,0 +1,42 @@
function process(event) {
var msg = event.Get("message");
var i
msg = msg.replace("/\r/g", "");
msg = msg.replace("/\n/g", "");
msg = msg.replace("/\r/\n/g", "");
var fields = msg.split("\t");
if (Number(fields[1]) == 64) {
// log_id = 64的日志是子后台同步过来的需要特殊处理掉
event.Cancel(); return;
}
for (i = 0; i < 25; i++) {
if (fields[i] == null) {
fields[i] = "";
} else if (fields[i].trim() == "-") {
fields[i] = "";
} else {
fields[i] = fields[i].trim();
}
}
event.Put("log_datetime", fields[0]);
event.Put("log_id", fields[1]);
event.Put("role_id", fields[2]);
event.Put("role_name", fields[3]);
event.Put("account_id", fields[4]);
var ws = fields[4].split("_") // 从account_id分离spid和channel
// account_id中只有第一个部分是描述spid的故spid和channel使用相同的信息填充
event.Put("spid", ws[0]);
event.Put("channel", ws[0]);
for (i = 5; i < fields.length; i++) {
event.Put("col" + (i - 4), fields[i]);
}
return event;
}

View File

@ -0,0 +1,42 @@
function process(event) {
var msg = event.Get("message");
var i
msg = msg.replace("/\r/g", "");
msg = msg.replace("/\n/g", "");
msg = msg.replace("/\r/\n/g", "");
var fields = msg.split("\t");
if (Number(fields[1]) == 64) {
// log_id = 64的日志是子后台同步过来的需要特殊处理掉
event.Cancel(); return;
}
for (i = 0; i < 25; i++) {
if (fields[i] == null) {
fields[i] = "";
} else if (fields[i].trim() == "-") {
fields[i] = "";
} else {
fields[i] = fields[i].trim();
}
}
event.Put("log_datetime", fields[0]);
event.Put("log_id", fields[1]);
event.Put("role_id", fields[2]);
event.Put("role_name", fields[3]);
event.Put("account_id", fields[4]);
var ws = fields[4].split("_") // 从account_id分离spid和channel
// account_id中只有第一个部分是描述spid的故spid和channel使用相同的信息填充
event.Put("spid", ws[0]);
event.Put("channel", ws[0]);
for (i = 5; i < fields.length; i++) {
event.Put("col" + (i - 4), fields[i]);
}
return event;
}

View File

@ -1,2 +1,31 @@
---
# handlers file for filebeat
- name: Reload Filebeat Service {{ instance_name }}_{{ log_type }}
ansible.builtin.service:
name: "filebeat_{{ instance_name }}_{{ log_type }}"
state: reloaded
use: "{{ 'sysvinit' if ansible_facts['distribution'] == 'CentOS' and ansible_facts['distribution_major_version'] == '6' else 'auto' }}"
when: current_filebeat_version.stdout != ''
- name: Start Filebeat Service {{ instance_name }}_{{ log_type }}
ansible.builtin.service:
name: "filebeat_{{ instance_name }}_{{ log_type }}"
state: started
enabled: true
sleep: 5
use: "{{ 'sysvinit' if ansible_facts['distribution'] == 'CentOS' and ansible_facts['distribution_major_version'] == '6' else 'auto' }}"
- name: Restart Filebeat Service {{ instance_name }}_{{ log_type }}
ansible.builtin.service:
name: "filebeat_{{ instance_name }}_{{ log_type }}"
state: restarted
sleep: 5
use: "{{ 'sysvinit' if ansible_facts['distribution'] == 'CentOS' and ansible_facts['distribution_major_version'] == '6' else 'auto' }}"
when: current_filebeat_version.stdout != ''
- name: Clean Old Filebeat {{ instance_name }}_{{ log_type }}
include_tasks: clean_old_filebeat.yml
- name: Restart Filebeat Monitor Plugin {{ instance_name }}_{{ log_type }}
ansible.builtin.shell: "service filebeat_{{ instance_name }}_{{ log_type }} restart_monitor"
when: current_filebeat_version.stdout != ''

View File

@ -47,6 +47,11 @@ galaxy_info:
# NOTE: A tag is limited to a single word comprised of alphanumeric characters.
# Maximum 20 tags per role.
dependencies: []
dependencies:
- supervisor
- curl
# List your role dependencies here, one per line. Be sure to remove the '[]' above,
# if you add dependencies to this list.
allow_duplicates: true

View File

@ -0,0 +1,17 @@
---
- name: Check Old Filebeat Directory
ansible.builtin.shell: "ls {{ filebeat_logs[log_type]['filebeat_older_dir'] }} || echo ''"
register: check_older_dir
changed_when: check_older_dir.stdout != ''
- name: Clean Old Filebeat Directory
community.general.archive:
path: "{{ filebeat_logs[log_type]['filebeat_older_dir'] }}"
dest: "/data/old-filebeat-{{ 'quick' if log_type == 'quick' else instance_name }}-backup.tgz"
remove: yes
when: check_older_dir.stdout != ''
- name: Clean Old Filebeat SysV Script
ansible.builtin.shell: "/bin/rm -vf /etc/init.d/filebeat_{{ instance_name }}"
register: clean_sysv_script
changed_when: clean_sysv_script.stdout != ''

View File

@ -1,38 +1,145 @@
---
- name: Create Filebeat Group
- name: Create Filebeat Group "{{ instance_name }}_{{ log_type }}"
ansible.builtin.group:
name: filebeat
state: present
gid: 1802
gid: "{{ filebeat_gid }}"
- name: Create Filebeat User
- name: Create Filebeat User "{{ instance_name }}_{{ log_type }}"
ansible.builtin.user:
name: filebeat
group: filebeat
shell: /sbin/nologin
state: present
uid: 1802
uid: "{{ filebeat_uid }}"
- name: Make Work directory
- name: Make Work directory "{{ instance_name }}_{{ log_type }}"
ansible.builtin.file:
path: "{{ work_path }}"
path: "{{ filebeat_work_path }}"
owner: filebeat
group: filebeat
state: directory
mode: '0755'
- name: Install Filebeat
- name: Check Filebeat Version "{{ instance_name }}_{{ log_type }}"
ansible.builtin.shell: "({{ filebeat_work_path }}/filebeat version | grep -E 'filebeat version {{ version }}') || echo ''"
register: current_filebeat_version
changed_when: current_filebeat_version.stdout == ''
vars:
version: "{{ filebeat_logs[log_type]['filebeat_version'] | default(filebeat_version) }}"
- name: Install Filebeat "{{ instance_name }}_{{ log_type }} {{ 'filebeat-oss-' if filebeat_logs[log_type]['filebeat_oss_version'] | default(filebeat_oss_version) else 'filebeat-' }}{{ filebeat_logs[log_type]['filebeat_version'] | default(filebeat_version) }}"
ansible.builtin.unarchive:
src: "{{ filebeat_download_url }}"
dest: "{{ work_path }}"
src: "{{ filebeat_logs[log_type]['filebeat_download_url'] | default(filebeat_download_url) }}/{{ package_name }}"
dest: "{{ filebeat_work_path }}"
remote_src: yes
validate_certs: false
extra_opts:
- --strip-components=1
- name: Render Filebeat Configure File
ansible.builtin.template:
src: filebeat-gateway-nginx-template.yml
dest: "{{ work_path }}/filebeat.yml"
vars:
package_name: "{{ 'filebeat-oss' if oss else 'filebeat' }}-{{ version }}-{{ ansible_facts['system'] | lower }}-{{ ansible_facts['userspace_architecture'] }}.tar.gz"
oss: "{{ filebeat_logs[log_type]['filebeat_oss_version'] | default(filebeat_oss_version) }}"
version: "{{ filebeat_logs[log_type]['filebeat_version'] | default(filebeat_version) }}"
when: current_filebeat_version.stdout == ''
- name: Install Filebeat Monitor Plugin {{ instance_name }}_{{ log_type }}
ansible.builtin.copy:
src: filebeat_monitor
dest: "{{ filebeat_work_path }}"
owner: root
group: root
mode: '0644'
mode: '0755'
notify: "Restart Filebeat Monitor Plugin {{ instance_name }}_{{ log_type }}"
- name: Render Filebeat Configure File "{{ instance_name }}_{{ log_type }}"
ansible.builtin.template:
src: "filebeat.yml"
dest: "{{ filebeat_work_path }}/filebeat.yml"
owner: root
group: root
mode: '0644'
notify: "Reload Filebeat Service {{ instance_name }}_{{ log_type }}"
vars:
logs: "{{ filebeat_logs }}"
- name: Render Custom Processor "{{ instance_name }}_{{ log_type }}"
ansible.builtin.copy:
src: "{{ script['file'] }}"
dest: "{{ filebeat_work_path }}"
owner: root
group: root
mode: '0644'
notify: "Reload Filebeat Service {{ instance_name }}_{{ log_type }}"
with_items: "{{ filebeat_logs[log_type]['scripts'] | default([]) }}"
loop_control:
loop_var: "script"
- name: Render Filebeat Monitor Plugin Configure File "{{ instance_name }}_{{ log_type }}"
ansible.builtin.template:
src: "filebeat_monitor.yaml"
dest: "{{ filebeat_work_path }}/monitor.yaml"
owner: root
group: root
mode: '0644'
notify: "Restart Filebeat Monitor Plugin {{ instance_name }}_{{ log_type }}"
- name: Render Filebeat Supervisor Configure File "{{ instance_name }}_{{ log_type }}"
ansible.builtin.template:
src: supervisor.yml
dest: "{{ filebeat_work_path }}/supervisor.yml"
owner: root
group: root
mode: '0640'
notify: "Restart Filebeat Service {{ instance_name }}_{{ log_type }}"
- name: Render Filebeat SystemV Script "{{ instance_name }}_{{ log_type }}"
ansible.builtin.template:
src: supervisor.sh
dest: "/etc/init.d/filebeat_{{ instance_name }}_{{ log_type }}"
owner: root
group: root
mode: '0750'
notify: "Restart Filebeat Service {{ instance_name }}_{{ log_type }}"
- name: Stop Old Filebeat "{{ instance_name }}_{{ log_type }}"
include_tasks: stop_old_filebeat.yml
vars:
target: "{{ instance_name }}_{{ log_type }}"
when: filebeat_data_migration | default(false)
- name: Check Old Filebeat Data "{{ instance_name }}_{{ log_type }}"
ansible.builtin.shell: "ls {{ filebeat_logs[log_type]['filebeat_older_dir'] }}/{{ filebeat_logs[log_type]['filebeat_older_data_dir'] }} || echo ''"
register: check_old_filebeat_data
changed_when: check_old_filebeat_data.stdout != ''
when: filebeat_data_migration | default(false)
- name: Merge Filebeat Data "{{ instance_name }}_{{ log_type }}"
ansible.builtin.copy:
src: "{{ filebeat_logs[log_type]['filebeat_older_dir'] }}/{{ filebeat_logs[log_type]['filebeat_older_data_dir'] }}/"
dest: "{{ filebeat_work_path }}/data/"
owner: filebeat
group: filebeat
mode: '0750'
remote_src: yes
directory_mode: yes
notify: "Clean Old Filebeat {{ instance_name }}_{{ log_type }}"
when: filebeat_data_migration | default(false) and check_old_filebeat_data.stdout != ''
- name: Check Filebeat Service Running Status
ansible.builtin.shell: "{{ python_bin_dir }}/supervisorctl -c {{ filebeat_work_path }}/supervisor.yml status filebeat_{{ instance_name }}_{{ log_type }}"
register: filebeat_status
changed_when: filebeat_status.rc != 0
failed_when: false
- name: First Start Filebeat Service "{{ instance_name }}_{{ log_type }}"
ansible.builtin.service:
name: "filebeat_{{ instance_name }}_{{ log_type }}"
state: started
enabled: true
sleep: 30
use: "{{ 'sysvinit' if ansible_facts['distribution'] == 'CentOS' and ansible_facts['distribution_major_version'] == '6' else 'auto' }}"
when: filebeat_status.rc != 0

View File

@ -0,0 +1,22 @@
---
- name: Get Old Running Supervisor Processes {{ target }}
ansible.builtin.shell: "ps -ef | grep -v grep | grep -w /data/opt/filebeat/{{ instance_name }}/supervisor.yml | awk '{print $2}'"
register: supervisor_processes
changed_when: supervisor_processes.stdout != ''
- name: Get Old Running Filebeat Processes {{ target }}
ansible.builtin.shell: "ps -ef | grep -v grep | grep -w /data/opt/filebeat/{{ instance_name }}/filebeat | awk '{print $2}'"
register: filebeat_processes
changed_when: filebeat_processes.stdout != ''
- name: Stop Old Filebeat Processes {{ target }}
ansible.builtin.shell: "kill -9 {{ (supervisor_processes.stdout_lines + filebeat_processes.stdout_lines) | join(' ') }}"
when: supervisor_processes.stdout != '' or filebeat_processes.stdout != ''
- name: Remove Old Processes Residue
ansible.builtin.shell: "/bin/rm -vf /tmp/supervisord_{{ instance_name }}.* /tmp/filebeat_{{ target }}*.log"
register: residue
changed_when: residue.stdout != ''

View File

@ -1,16 +1,25 @@
path.data: {{ work_path }}/data
filebeat.registry.path: {{ work_path }}/data/registry/gw-nginx-{{ instance_name }}
queue.mem:
events: 512
flush.min_events: 128
flush.timeout: 5s
filebeat.inputs:
{% for log in logs %}
{% for log in gwlogs %}
- type: log
paths:
- "/data/log/nginx/access_{{ log }}.log"
- "/data/log/nginx/error_{{ log }}.log"
fields:
source: {{ log | regex_replace('\*') }}
harvester_buffer_size: 1638400
tail_files: {{ tail_files | default(true) | string | lower }}
{% endfor %}
max_procs: {{ max_procs | default(2) }}
filebeat.config.modules:
# Glob pattern for configuration loading
path: ${path.config}/modules.d/*.yml
@ -24,7 +33,7 @@ filebeat.config.modules:
# ======================= Elasticsearch template setting =======================
setup.ilm.enabled: false
setup.template.name: "gw-nginx-ug10cn"
setup.template.pattern: "gw-nginx-ug10cn-*"
setup.template.pattern: "gw-nginx-*"
setup.template.settings:
index.number_of_shards: 3
@ -40,6 +49,9 @@ output.elasticsearch:
pipeline: "gwlogs"
username: {{ es_user }}
password: {{ es_passwd }}
bulk_max_size: 1500
compression_level: {{ compression_level | default(0) }}
worker: 1
# ================================= Processors =================================

View File

@ -0,0 +1,67 @@
path.data: {{ work_path }}/data2
filebeat.registry.path: {{ work_path }}/data2/registry/mgr-cp-{{ instance_name }}
queue.mem:
events: 512
flush.min_events: 128
flush.timeout: 5s
filebeat.inputs:
{% for log in mgrlogs %}
- type: log
paths:
- {{ log }}
multiline.type: pattern
multiline.pattern: '^[0-9]{4}-[0-9]{2}-[0-9]{2}'
multiline.negate: true
multiline.match: after
multiline.skip_newline: true
fields:
source: cp
harvester_buffer_size: 1638400
tail_files: {{ tail_files | default(true) | string | lower }}
{% endfor %}
max_procs: {{ max_procs | default(2) }}
filebeat.config.modules:
# Glob pattern for configuration loading
path: ${path.config}/modules.d/*.yml
# Set to true to enable config reloading
reload.enabled: false
# Period on which files under path should be checked for changes
#reload.period: 10s
# ======================= Elasticsearch template setting =======================
setup.ilm.enabled: false
setup.template.name: "mgr-cp"
setup.template.pattern: "mgr-cp-*"
setup.template.settings:
index.number_of_shards: 3
setup.kibana:
host: "{{ kibana_url }}"
space.id: "{{ kibana_space }}"
# ---------------------------- Elasticsearch Output ----------------------------
output.elasticsearch:
hosts: {{ es_hosts }}
index: "mgr-{{ instance_name }}-%{[fields.source]}-%{+yyyy.MM.dd}"
pipeline: "cplogs"
username: {{ es_user }}
password: {{ es_passwd }}
bulk_max_size: 1500
compression_level: {{ compression_level | default(0) }}
worker: 3
# ================================= Processors =================================
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~

View File

@ -0,0 +1,61 @@
path.data: {{ work_path }}/data3
filebeat.registry.path: {{ work_path }}/data3/registry/quick-{{ instance_name }}
queue.mem:
events: 512
flush.min_events: 128
flush.timeout: 5s
filebeat.inputs:
{% for log in quicklog %}
- type: log
paths:
- {{ log }}
fields:
source: quick
harvester_buffer_size: 1638400
{% endfor %}
max_procs: {{ max_procs | default(2) }}
filebeat.config.modules:
# Glob pattern for configuration loading
path: ${path.config}/modules.d/*.yml
# Set to true to enable config reloading
reload.enabled: false
# Period on which files under path should be checked for changes
#reload.period: 10s
# ======================= Elasticsearch template setting =======================
setup.template.name: "quick"
setup.template.pattern: "quick-*"
# ---------------------------- Elasticsearch Output ----------------------------
output.elasticsearch:
hosts: {{ quick_es_hosts }}
index: "quick-{{ instance_name }}-%{[fields.source]}-%{+yyyy.MM.dd}"
pipeline: "quicklog"
username: {{ quick_es_user }}
password: {{ quick_es_passwd }}
bulk_max_size: 1500
compression_level: {{ compression_level | default(0) }}
worker: 1
# ================================= Processors =================================
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
- dissect:
tokenizer: "/%{}/%{}/%{}/%{}/%{}/%{zone_id}_%{server_id}_%{log_t}.txt"
field: "log.file.path"
target_prefix: ""
- dissect:
tokenizer: "%{date} %{+date->} %{log_id->} %{account_id->} %{role_id->} %{role_name->} %{spid->} %{channel->} %{col1->} %{col2->} %{col3->} %{col4->} %{col5->} %{col6->} %{col7->} %{col8->} %{col9->} %{col10->} %{col11->} %{col12->} %{col13->}"
field: "message"
target_prefix: ""

View File

@ -0,0 +1,120 @@
# 关闭掉Filebeat本身的metrics日志输出
logging.metrics.enabled: false
# 修订配置文件自动reload
filebeat.config.inputs:
enabled: true
path: ${path.config}/filebeat.yml
reload.enabled: true
reload.period: 10s
logging.json: false
queue.mem:
events: {{ filebeat_queue_mem_events | default(8192) }}
flush.min_events: {{ filebeat_queue_mem_flush_min_event | default(512) }}
filebeat.inputs:
- type: log
paths: {{ logs[log_type]['paths'] }}
{% if "exclude_files" in logs[log_type] -%}
exclude_files: {{ logs[log_type]['exclude_files'] }}
{% endif -%}
{% if 'multiline' in logs[log_type] and logs[log_type]['multiline'] -%}
multiline.type: pattern
multiline.pattern: '^[0-9]{4}-[0-9]{2}-[0-9]{2}'
multiline.negate: true
multiline.match: after
multiline.skip_newline: true
{% endif -%}
harvester_buffer_size: 2048
ignore_older: {{ filebeat_older }}
tail_files: {{ filebeat_tail_files | string | lower }}
http.enabled: true
http.host: unix://${path.config}/filebeat.sock
max_procs: {{ filebeat_max_procs }}
filebeat.config.modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false
# ======================= Elasticsearch template setting =======================
#[#1002043] 通过强制filebeat注册ilm策略方式修订意外丢失的ilm策略。
#如果filebaet_ilm_enabled被设置为false则禁止ilm这是为了兼容华为云的ES
setup.ilm.enabled: false
setup.ilm.policy_name: "{{ logs[log_type]['setup_ilm_policy_name'] | default('gw-nginx-ug10') }}"
setup.ilm.check_exists: false
setup.template.name: "{{ logs[log_type]['template_name'] }}"
setup.template.pattern: "{{ logs[log_type]['template_pattern'] }}"
setup.template.settings:
index.number_of_shards: "{{ filebeat_number_of_shards | default(3) }}"
{% if 'kibana' in logs[log_type] -%}
setup.kibana:
host: "{{ logs[log_type]['kibana']['kibana_url'] }}"
space.id: "{{ logs[log_type]['kibana']['kibana_space'] }}"
{% endif -%}
# ---------------------------- Elasticsearch Output ----------------------------
{% if 'output' not in logs[log_type] or logs[log_type]['output'] == 'elastic' %}
output.elasticsearch:
hosts: {{ logs[log_type]['es_hosts'] }}
#[#1002040] 如果sublog字段配置为none则忽略索引名称中的sublog字段
{% if 'sublog' in logs[log_type] and logs[log_type]['sublog'] == 'none' -%}
index: "{{ logs[log_type]['index_prefix'] }}-{{ instance_name }}-%{+yyyy.MM.dd}"
{% elif 'sublog' in logs[log_type] and logs[log_type]['sublog'] -%}
index: "{{ logs[log_type]['index_prefix'] }}-{{ instance_name }}-{{ logs[log_type]['sublog'] }}-%{+yyyy.MM.dd}"
{% else -%}
index: "{{ logs[log_type]['index_prefix'] }}-{{ instance_name }}-%{[sublog]}-%{+yyyy.MM.dd}"
{% endif -%}
{% if 'pipeline' in logs[log_type] and logs[log_type]['pipeline'] -%}
pipeline: "{{ logs[log_type]['pipeline'] }}"
{% endif -%}
{% if 'es_user' in logs[log_type] and 'es_passwd' in logs[log_type] -%}
username: {{ logs[log_type]['es_user'] | default('') }}
password: {{ logs[log_type]['es_passwd'] | default('') }}
{% endif -%}
bulk_max_size: {{ filebeat_bulk_max_size | default(500) }}
flush_interval: 5s
compression_level: {{ filebeat_compression_level }}
worker: {{ filebeat_output_works }}
{% endif -%}
{% if 'output' in logs[log_type] and logs[log_type]['output'] == 'debug' -%}
output.console:
pretty: true
{% endif -%}
# ================================= Processors =================================
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
{% if 'dissect_tokenizers' in logs[log_type] and logs[log_type]['dissect_tokenizers'] -%}
{% for tokenizer in logs[log_type]['dissect_tokenizers'] -%}
- dissect:
tokenizer: "{{ tokenizer['tokenizer'] }}"
field: {{ tokenizer['field_name'] }}
target_prefix: ""
{% endfor -%}
{% endif -%}
{% if 'convert_timestamp' in logs[log_type] and logs[log_type]['convert_timestamp'] -%}
- timestamp:
field: {{ logs[log_type]['convert_timestamp']['field_name'] }}
layouts: {{ logs[log_type]['convert_timestamp']['layouts'] }}
timezone: "Asia/Shanghai"
{% endif -%}
{% if 'scripts' in logs[log_type] and logs[log_type]['scripts'] -%}
{% for script in logs[log_type]['scripts'] -%}
- script:
lang: javascript
id: {{ script['id'] }}
file: {{ script['file'] }}
{% endfor %}
{%- endif -%}

View File

@ -0,0 +1,9 @@
instance_name: {{ inventory_hostname }}
project: {{ instance_name }}
sublog: {{ 'quick' if log_type == 'quick' else '' }}
filebeat_config_path: {{ filebeat_logs[log_type]['filebeat_config_path'] | default('filebeat.yml') }}
metric_host: {{ filebeat_monitor_sock | default('./filebeat_monitor.sock') }}
push_gateway_url: {{ filebeat_monitor_push_gateway | default('') }}
push_gateway_lifecycle: {{ filebeat_monitor_push_lifecycle | default(5) }}
log_path: {{ filebeat_monitor_log_path | default('logs/filebeat_monitor.log') }}
elastic_delays_metric_enabled: {{ filebeat_logs[log_type]['elastic_delays_metric_enabled'] | default('False')}}

View File

@ -1,14 +1,26 @@
#!/bin/bash
# chkconfig: 345 83 04
set -o nounset
# filebeat_{{ instance_name }}_{{ log_type }} Start filebeat
#
# chkconfig: 2345 33 81
# description: Starts, stops and saves filebeat_{{ instance_name }}_{{ log_type }}
#
#
### BEGIN INIT INFO
# Provides: youyan
# Required-Start:
# Required-Stop:
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: start and stop filebeat_{{ instance_name }}_{{ log_type }}
# Description: Start, stop filebeat_{{ instance_name }}_{{ log_type }}
### END INIT INFO
. /etc/rc.d/init.d/functions
RETVAL=0
PIDFILE="/tmp/supervisord_{{instance_name}}.pid"
LOCKFILE="/tmp/supervisord.lock"
OPTIONS="-c {{ config_file_path }}"
PIDFILE="/tmp/supervisord_filebeat_{{ instance_name }}_{{ log_type }}.pid"
LOCKFILE="/tmp/supervisord_filebeat_{{ instance_name }}_{{ log_type }}.lock"
OPTIONS="-c {{ filebeat_work_path }}/supervisor.yml"
WAIT_FOR_SUBPROCESSES="yes"
start() {
@ -67,7 +79,11 @@ case "$1" in
RETVAL=$?
;;
reload)
/usr/bin/supervisorctl $OPTIONS reload
{{ python_bin_dir }}/supervisorctl $OPTIONS restart filebeat_{{ instance_name }}_{{ log_type }}
RETVAL=$?
;;
restart_monitor)
{{ python_bin_dir }}/supervisorctl $OPTIONS restart filebeat_monitor_{{ instance_name }}_{{ log_type }}
RETVAL=$?
;;
restart)

View File

@ -0,0 +1,47 @@
[supervisord]
logfile = /tmp/supervisord_filebeat_{{ instance_name }}_{{ log_type }}.log
logfile_maxbytes = 50MB
logfile_backups=5
loglevel = debug
pidfile = /tmp/supervisord_filebeat_{{ instance_name }}_{{ log_type }}.pid
nodaemon = false
minfds = 1024
minprocs = 200
umask = 022
user = filebeat
identifier = filebeat_{{ instance_name }}_{{ log_type }}
directory = {{ filebeat_work_path }}
nocleanup = true
strip_ansi = false
[unix_http_server]
file = /tmp/supervisord_filebeat_{{ instance_name }}_{{ log_type }}.sock
chmod = 0777
chown = filebeat:filebeat
username = filebeat_supervisord
password = eNlB.UlOrJAnA
[program:filebeat_{{ instance_name }}_{{ log_type }}]
directory={{ filebeat_work_path }}/
command={{ filebeat_work_path }}/filebeat -c {{ filebeat_work_path }}/filebeat.yml
stdout_logfile=/dev/null
stderr_logfile=/dev/null
autostart=true
autorestart=true
[program:filebeat_monitor_{{ instance_name }}_{{ log_type }}]
directory={{ filebeat_work_path }}/
command={{ filebeat_work_path }}/filebeat_monitor -c {{filebeat_work_path}}/monitor.yaml
stdout_logfile=/dev/null
stderr_logfile=/dev/null
autostart=true
autorestart=true
[supervisorctl]
serverurl = unix:///tmp/supervisord_filebeat_{{ instance_name }}_{{ log_type }}.sock
username = filebeat_supervisord
password = eNlB.UlOrJAnA
prompt = filebeat_{{ instance_name }}_{{ log_type }}
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface

View File

@ -1,5 +0,0 @@
---
# vars file for filebeat
filebeat_download_url: "https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-7.10.1-linux-x86_64.tar.gz"
work_path: "/data/opt/filebeat/{{ instance_name }}"
logs: ["api", "cl*", "doc", "download", "gw*", "cp", "pay", "static"]

View File

@ -1,2 +1,4 @@
---
# defaults file for supervisor
# vars file for supervisor
pip_bin_path: "/data/opt/python3/bin/pip"
python_bin_dir: "/data/opt/python3/bin"

View File

@ -1,2 +1,2 @@
---
# handlers file for supervisor
# handlers file for supervisor

View File

@ -50,3 +50,5 @@ galaxy_info:
dependencies: []
# List your role dependencies here, one per line. Be sure to remove the '[]' above,
# if you add dependencies to this list.
allow_duplicates: true

View File

@ -5,35 +5,5 @@
name: supervisor
executable: "{{ pip_bin_path }}"
- name: Render Supervisor Configure File
ansible.builtin.template:
src: supervisor.yml
dest: "{{ work_path }}/supervisor.yml"
owner: root
group: root
mode: '0750'
- name: Render Supervisor SystemV Script
ansible.builtin.template:
src: supervisor.sh
dest: "{{ work_path }}/supervisor_initd.sh"
owner: root
group: root
mode: '0750'
vars:
config_file_path: "{{ work_path }}/supervisor.yml"
- name: Symlink SystemV Script
ansible.builtin.file:
src: "{{ work_path }}/supervisor_initd.sh"
dest: "/etc/init.d/filebeat_{{ instance_name }}"
owner: "root"
state: link
- name: Start Filebeat Service
ansible.builtin.service:
name: "filebeat_{{ instance_name }}"
state: restarted
enabled: true
sleep: 30
use: sysvinit

View File

@ -1,35 +0,0 @@
[supervisord]
logfile = /tmp/supervisord_{{instance_name}}.log
logfile_maxbytes = 50MB
logfile_backups=5
loglevel = debug
pidfile = /tmp/supervisord_{{instance_name}}.pid
nodaemon = false
minfds = 1024
minprocs = 200
umask = 022
user = filebeat
identifier = supervisor_{{instance_name}}
directory = {{ work_path }}
nocleanup = true
strip_ansi = false
[unix_http_server]
file = /tmp/supervisord_{{instance_name}}.sock
chmod = 0777
chown = filebeat:filebeat
username = filebeat
password = eNlB.UlOrJAnA
[program:filebeat_{{instance_name}}]
command={{ work_path }}/filebeat -c {{ work_path }}/filebeat.yml
autorestart=true
[supervisorctl]
serverurl = unix:///tmp/supervisord_{{instance_name}}.sock
username = filebeat
password = eNlB.UlOrJAnA
prompt = filebeat_{{instance_name}}
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface

View File

@ -1,6 +0,0 @@
---
# vars file for supervisor
pip_bin_path: "/data/opt/python3/bin/pip"
python_bin_dir: "/data/opt/python3/bin"
work_path: "/data/opt/filebeat/{{ instance_name }}"