Compare commits

..

No commits in common. "master" and "v0.1.1" have entirely different histories.

38 changed files with 113 additions and 1513 deletions

626
.gitignore vendored
View File

@ -1,626 +0,0 @@
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
[Ll]ogs/
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# ---> GitBook
# Node rules:
## Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
## Dependency directory
## Commenting this out is preferred by some people, see
## https://docs.npmjs.com/misc/faq#should-i-check-my-node_modules-folder-into-git
node_modules
# Book build output
_book
# eBook build output
*.epub
*.mobi
*.pdf
# ---> TortoiseGit
# Project-level settings
/.tgitconfig
# ---> JENKINS_HOME
# Learn more about Jenkins and JENKINS_HOME directory for which this file is
# intended.
#
# http://jenkins-ci.org/
# https://wiki.jenkins-ci.org/display/JENKINS/Administering+Jenkins
#
# Note: secret.key is purposefully not tracked by git. This should be backed up
# separately because configs may contain secrets which were encrypted using the
# secret.key. To back up secrets use 'tar -czf /tmp/secrets.tgz secret*' and
# save the file separate from your repository. If you want secrets backed up
# with configuration, then see the bottom of this file for an example.
# Ignore all JENKINS_HOME except jobs directory, root xml config, and
# .gitignore file.
# /*
/test/t*
!/jobs
!/.gitignore
!/*.xml
.git/
# Ignore all files in jobs subdirectories except for folders.
# Note: git doesn't track folders, only file content.
jobs/**
!jobs/**/
# Uncomment the following line to save next build numbers with config.
#!jobs/**/nextBuildNumber
# For performance reasons, we want to ignore builds in Jenkins jobs because it
# contains many tiny files on large installations. This can impact git
# performance when running even basic commands like 'git status'.
builds
indexing
# Exclude only config.xml files in repository subdirectories.
!config.xml
# Don't track workspaces (when users build on the master).
jobs/**/*workspace
# Security warning: If secrets are included with your configuration, then an
# adversary will be able to decrypt all encrypted secrets within Jenkins
# config. Including secrets is a bad practice, but the example is included in
# case someone still wants it for convenience. Uncomment the following line to
# include secrets for decryption with repository configuration in Git.
#!/secret*
# As a result, only Jenkins settings and job config.xml files in JENKINS_HOME
# will be tracked by git.
# ---> VisualStudioCode
.vscode/
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# ---> VisualStudio
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
[Bb]uild/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
[Tt]est/
[Tt]est/*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.tlog
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Nuget personal access tokens and Credentials
nuget.config
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
# VS Code files for those working on multiple tools
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# Windows Installer files from build outputs
*.cab
*.msi
*.msix
*.msm
*.msp
# JetBrains Rider
.idea/
*.sln.iml
# ---> SVN
.svn/

View File

@ -8,7 +8,7 @@ namespace: youyan
name: filebeat
# The version of the collection. Must be compatible with semantic versioning
version: 1.8.0
version: 1.7.10
# The path to the Markdown (.md) readme file. This path is relative to the root of the collection
readme: README.md

View File

@ -1,15 +0,0 @@
- name: "Install Filebeat Instance <{{ instance_name }}>"
hosts: "{{ instance_name }}_filebeats"
tasks:
- name: Filebeat Deploy
include_role:
name: filebeat
vars:
log_type: "{{ item }}"
with_items: "{{ deploy_logs }}"
- name: Update Alerts Script With Cron
ansible.builtin.shell: "svn up /data/ops/script/monitor.sh; chmod 755 /data/ops/script/monitor.sh"

View File

@ -0,0 +1,8 @@
- name: "Install Filebeat Instance <{{ instance_name }}>"
hosts: filebeats
roles:
- filebeat
- supervisor

View File

@ -1,29 +0,0 @@
---
language: python
python: "2.7"
# Use the new container infrastructure
sudo: false
# Install ansible
addons:
apt:
packages:
- python-pip
install:
# Install ansible
- pip install ansible
# Check ansible version
- ansible --version
# Create ansible.cfg with correct roles_path
- printf '[defaults]\nroles_path=../' >ansible.cfg
script:
# Basic role syntax check
- ansible-playbook tests/test.yml -i tests/inventory --syntax-check
notifications:
webhooks: https://galaxy.ansible.com/api/v1/notifications/

View File

@ -1,38 +0,0 @@
Role Name
=========
A brief description of the role goes here.
Requirements
------------
Any pre-requisites that may not be covered by Ansible itself or the role should be mentioned here. For instance, if the role uses the EC2 module, it may be a good idea to mention in this section that the boto package is required.
Role Variables
--------------
A description of the settable variables for this role should go here, including any variables that are in defaults/main.yml, vars/main.yml, and any variables that can/should be set via parameters to the role. Any variables that are read from other roles and/or the global scope (ie. hostvars, group vars, etc.) should be mentioned here as well.
Dependencies
------------
A list of other roles hosted on Galaxy should go here, plus any details in regards to parameters that may need to be set for other roles, or variables that are used from other roles.
Example Playbook
----------------
Including an example of how to use your role (for instance, with variables passed in as parameters) is always nice for users too:
- hosts: servers
roles:
- { role: username.rolename, x: 42 }
License
-------
BSD
Author Information
------------------
An optional section for the role authors to include contact information, or a website (HTML is not allowed).

View File

@ -1,2 +0,0 @@
---
# defaults file for curl

Binary file not shown.

View File

@ -1,2 +0,0 @@
---
# handlers file for curl

View File

@ -1,52 +0,0 @@
galaxy_info:
author: your name
description: your role description
company: your company (optional)
# If the issue tracker for your role is not on github, uncomment the
# next line and provide a value
# issue_tracker_url: http://example.com/issue/tracker
# Choose a valid license ID from https://spdx.org - some suggested licenses:
# - BSD-3-Clause (default)
# - MIT
# - GPL-2.0-or-later
# - GPL-3.0-only
# - Apache-2.0
# - CC-BY-4.0
license: license (GPL-2.0-or-later, MIT, etc)
min_ansible_version: 2.1
# If this a Container Enabled role, provide the minimum Ansible Container version.
# min_ansible_container_version:
#
# Provide a list of supported platforms, and for each platform a list of versions.
# If you don't wish to enumerate all versions for a particular platform, use 'all'.
# To view available platforms and versions (or releases), visit:
# https://galaxy.ansible.com/api/v1/platforms/
#
# platforms:
# - name: Fedora
# versions:
# - all
# - 25
# - name: SomePlatform
# versions:
# - all
# - 1.0
# - 7
# - 99.99
galaxy_tags: []
# List tags for your role here, one per line. A tag is a keyword that describes
# and categorizes the role. Users find roles by searching for tags. Be sure to
# remove the '[]' above, if you add tags to this list.
#
# NOTE: A tag is limited to a single word comprised of alphanumeric characters.
# Maximum 20 tags per role.
dependencies: []
# List your role dependencies here, one per line. Be sure to remove the '[]' above,
# if you add dependencies to this list.

View File

@ -1,11 +0,0 @@
---
# tasks file for curl
- name: Install CURL Tool
ansible.builtin.copy:
src: curl
dest: /usr/bin/curl-7.80.0
owner: root
group: root
mode: '0755'

View File

@ -1,2 +0,0 @@
localhost

View File

@ -1,5 +0,0 @@
---
- hosts: localhost
remote_user: root
roles:
- curl

View File

@ -1,2 +0,0 @@
---
# vars file for curl

View File

@ -1,62 +1,2 @@
---
# vars file for filebeat
pip_bin_path: "/data/opt/python3/bin/pip"
python_bin_dir: "/data/opt/python3/bin"
filebeat_version: 7.10.1
filebeat_oss_version: false
filebeat_download_url: "https://artifacts.elastic.co/downloads/beats/filebeat"
filebeat_work_path: "/data/opt/filebeat/{{ instance_name }}_{{ log_type }}"
filebeat_tail_files: true
filebeat_max_procs: 2
filebeat_compression_level: 5
filebeat_output_works: 1
filebeat_gid: 1801
filebeat_uid: 1801
filebeat_older: 24h
filebeat_logs:
gateway:
paths:
- "/data/log/nginx/*.log"
index_prefix: "gw-nginx"
template_name: "gw-nginx-ug10cn"
template_pattern: "gw-nginx-*"
#[#1002039] Nginx日志采集过滤Countly日志文件
exclude_files: ['_81\.log$', '.*countly.*\.log$']
dissect_tokenizers:
- tokenizer: "/%{}/%{}/%{}/%{}_%{sublog}.log"
field_name: "log.file.path"
filebeat_older_dir: "/data/opt/filebeat/{{ instance_name }}"
filebeat_older_data_dir: data
filebaet_ilm_enabled: yes
manager:
paths:
- "/data/game/htdocs/center/manager/storage/cp/logs/*.log"
index_prefix: "mgr"
template_name: "mgr-cp"
template_pattern: "mgr-*"
multiline: true
sublog: cp
filebeat_older_dir: "/data/opt/filebeat/{{ instance_name }}"
filebeat_older_data_dir: data2
filebaet_ilm_enabled: yes
quick:
paths:
- "/data/game/sync/quick/filebeat/*.txt"
index_prefix: "quick"
template_name: "quick"
template_pattern: "quick-*"
sublog: quick
dissect_tokenizers:
- tokenizer: "/%{}/%{}/%{}/%{}/%{}/%{zone_id}_%{server_id}_%{log_file_time}.txt"
field_name: "log.file.path"
convert_timestamp:
field_name: log_datetime
layouts:
- '2006-01-02 15:04:05'
filebeat_older_dir: "/data/opt/filebeat/quick"
filebeat_older_data_dir: data
filebaet_ilm_enabled: yes
#[1002041] 只给quick日志elastic_delays检查
elastic_delays_metric_enabled: yes
# defaults file for filebeat

View File

@ -1,40 +0,0 @@
function process(event) {
var msg = event.Get("message");
var i
msg = msg.replace("/\r/g", "");
msg = msg.replace("/\n/g", "");
msg = msg.replace("/\r/\n/g", "");
var fields = msg.split("\t");
if (Number(fields[1]) == 64) {
// log_id = 64的日志是子后台同步过来的需要特殊处理掉
event.Cancel(); return;
}
for (i = 0; i < 27; i++) {
if (fields[i] == null) {
fields[i] = "";
} else if (fields[i].trim() == "-") {
fields[i] = "";
} else {
fields[i] = fields[i].trim();
}
}
event.Put("log_datetime", fields[0]);
event.Put("log_id", fields[1]);
event.Put("account_id", fields[2]);
event.Put("role_id", fields[3]);
event.Put("role_name", fields[4]);
event.Put("spid", fields[5]);
event.Put("channel", fields[6]);
for (i = 7; i < fields.length; i++) {
if (fields[i] == "-") { fields[i] = "" }
event.Put("col" + (i - 6), fields[i]);
}
return event;
}

View File

@ -1,42 +0,0 @@
function process(event) {
var msg = event.Get("message");
var i
msg = msg.replace("/\r/g", "");
msg = msg.replace("/\n/g", "");
msg = msg.replace("/\r/\n/g", "");
var fields = msg.split("\t");
if (Number(fields[1]) == 64) {
// log_id = 64的日志是子后台同步过来的需要特殊处理掉
event.Cancel(); return;
}
for (i = 0; i < 25; i++) {
if (fields[i] == null) {
fields[i] = "";
} else if (fields[i].trim() == "-") {
fields[i] = "";
} else {
fields[i] = fields[i].trim();
}
}
event.Put("log_datetime", fields[0]);
event.Put("log_id", fields[1]);
event.Put("role_id", fields[2]);
event.Put("role_name", fields[3]);
event.Put("account_id", fields[4]);
var ws = fields[4].split("_") // 从account_id分离spid和channel
// account_id中只有第一个部分是描述spid的故spid和channel使用相同的信息填充
event.Put("spid", ws[0]);
event.Put("channel", ws[0]);
for (i = 5; i < fields.length; i++) {
event.Put("col" + (i - 4), fields[i]);
}
return event;
}

View File

@ -1,42 +0,0 @@
function process(event) {
var msg = event.Get("message");
var i
msg = msg.replace("/\r/g", "");
msg = msg.replace("/\n/g", "");
msg = msg.replace("/\r/\n/g", "");
var fields = msg.split("\t");
if (Number(fields[1]) == 64) {
// log_id = 64的日志是子后台同步过来的需要特殊处理掉
event.Cancel(); return;
}
for (i = 0; i < 25; i++) {
if (fields[i] == null) {
fields[i] = "";
} else if (fields[i].trim() == "-") {
fields[i] = "";
} else {
fields[i] = fields[i].trim();
}
}
event.Put("log_datetime", fields[0]);
event.Put("log_id", fields[1]);
event.Put("role_id", fields[2]);
event.Put("role_name", fields[3]);
event.Put("account_id", fields[4]);
var ws = fields[4].split("_") // 从account_id分离spid和channel
// account_id中只有第一个部分是描述spid的故spid和channel使用相同的信息填充
event.Put("spid", ws[0]);
event.Put("channel", ws[0]);
for (i = 5; i < fields.length; i++) {
event.Put("col" + (i - 4), fields[i]);
}
return event;
}

View File

@ -1,31 +1,2 @@
---
# handlers file for filebeat
- name: Reload Filebeat Service {{ instance_name }}_{{ log_type }}
ansible.builtin.service:
name: "filebeat_{{ instance_name }}_{{ log_type }}"
state: reloaded
use: "{{ 'sysvinit' if ansible_facts['distribution'] == 'CentOS' and ansible_facts['distribution_major_version'] == '6' else 'auto' }}"
when: current_filebeat_version.stdout != ''
- name: Start Filebeat Service {{ instance_name }}_{{ log_type }}
ansible.builtin.service:
name: "filebeat_{{ instance_name }}_{{ log_type }}"
state: started
enabled: true
sleep: 5
use: "{{ 'sysvinit' if ansible_facts['distribution'] == 'CentOS' and ansible_facts['distribution_major_version'] == '6' else 'auto' }}"
- name: Restart Filebeat Service {{ instance_name }}_{{ log_type }}
ansible.builtin.service:
name: "filebeat_{{ instance_name }}_{{ log_type }}"
state: restarted
sleep: 5
use: "{{ 'sysvinit' if ansible_facts['distribution'] == 'CentOS' and ansible_facts['distribution_major_version'] == '6' else 'auto' }}"
when: current_filebeat_version.stdout != ''
- name: Clean Old Filebeat {{ instance_name }}_{{ log_type }}
include_tasks: clean_old_filebeat.yml
- name: Restart Filebeat Monitor Plugin {{ instance_name }}_{{ log_type }}
ansible.builtin.shell: "service filebeat_{{ instance_name }}_{{ log_type }} restart_monitor"
when: current_filebeat_version.stdout != ''

View File

@ -47,11 +47,6 @@ galaxy_info:
# NOTE: A tag is limited to a single word comprised of alphanumeric characters.
# Maximum 20 tags per role.
dependencies:
- supervisor
- curl
dependencies: []
# List your role dependencies here, one per line. Be sure to remove the '[]' above,
# if you add dependencies to this list.
allow_duplicates: true

View File

@ -1,17 +0,0 @@
---
- name: Check Old Filebeat Directory
ansible.builtin.shell: "ls {{ filebeat_logs[log_type]['filebeat_older_dir'] }} || echo ''"
register: check_older_dir
changed_when: check_older_dir.stdout != ''
- name: Clean Old Filebeat Directory
community.general.archive:
path: "{{ filebeat_logs[log_type]['filebeat_older_dir'] }}"
dest: "/data/old-filebeat-{{ 'quick' if log_type == 'quick' else instance_name }}-backup.tgz"
remove: yes
when: check_older_dir.stdout != ''
- name: Clean Old Filebeat SysV Script
ansible.builtin.shell: "/bin/rm -vf /etc/init.d/filebeat_{{ instance_name }}"
register: clean_sysv_script
changed_when: clean_sysv_script.stdout != ''

View File

@ -1,145 +1,38 @@
---
- name: Create Filebeat Group "{{ instance_name }}_{{ log_type }}"
- name: Create Filebeat Group
ansible.builtin.group:
name: filebeat
state: present
gid: "{{ filebeat_gid }}"
gid: 1802
- name: Create Filebeat User "{{ instance_name }}_{{ log_type }}"
- name: Create Filebeat User
ansible.builtin.user:
name: filebeat
group: filebeat
shell: /sbin/nologin
state: present
uid: "{{ filebeat_uid }}"
uid: 1802
- name: Make Work directory "{{ instance_name }}_{{ log_type }}"
- name: Make Work directory
ansible.builtin.file:
path: "{{ filebeat_work_path }}"
path: "{{ work_path }}"
owner: filebeat
group: filebeat
state: directory
mode: '0755'
- name: Check Filebeat Version "{{ instance_name }}_{{ log_type }}"
ansible.builtin.shell: "({{ filebeat_work_path }}/filebeat version | grep -E 'filebeat version {{ version }}') || echo ''"
register: current_filebeat_version
changed_when: current_filebeat_version.stdout == ''
vars:
version: "{{ filebeat_logs[log_type]['filebeat_version'] | default(filebeat_version) }}"
- name: Install Filebeat "{{ instance_name }}_{{ log_type }} {{ 'filebeat-oss-' if filebeat_logs[log_type]['filebeat_oss_version'] | default(filebeat_oss_version) else 'filebeat-' }}{{ filebeat_logs[log_type]['filebeat_version'] | default(filebeat_version) }}"
- name: Install Filebeat
ansible.builtin.unarchive:
src: "{{ filebeat_logs[log_type]['filebeat_download_url'] | default(filebeat_download_url) }}/{{ package_name }}"
dest: "{{ filebeat_work_path }}"
src: "{{ filebeat_download_url }}"
dest: "{{ work_path }}"
remote_src: yes
validate_certs: false
extra_opts:
- --strip-components=1
vars:
package_name: "{{ 'filebeat-oss' if oss else 'filebeat' }}-{{ version }}-{{ ansible_facts['system'] | lower }}-{{ ansible_facts['userspace_architecture'] }}.tar.gz"
oss: "{{ filebeat_logs[log_type]['filebeat_oss_version'] | default(filebeat_oss_version) }}"
version: "{{ filebeat_logs[log_type]['filebeat_version'] | default(filebeat_version) }}"
when: current_filebeat_version.stdout == ''
- name: Install Filebeat Monitor Plugin {{ instance_name }}_{{ log_type }}
ansible.builtin.copy:
src: filebeat_monitor
dest: "{{ filebeat_work_path }}"
owner: root
group: root
mode: '0755'
notify: "Restart Filebeat Monitor Plugin {{ instance_name }}_{{ log_type }}"
- name: Render Filebeat Configure File "{{ instance_name }}_{{ log_type }}"
- name: Render Filebeat Configure File
ansible.builtin.template:
src: "filebeat.yml"
dest: "{{ filebeat_work_path }}/filebeat.yml"
src: filebeat-gateway-nginx-template.yml
dest: "{{ work_path }}/filebeat.yml"
owner: root
group: root
mode: '0644'
notify: "Reload Filebeat Service {{ instance_name }}_{{ log_type }}"
vars:
logs: "{{ filebeat_logs }}"
- name: Render Custom Processor "{{ instance_name }}_{{ log_type }}"
ansible.builtin.copy:
src: "{{ script['file'] }}"
dest: "{{ filebeat_work_path }}"
owner: root
group: root
mode: '0644'
notify: "Reload Filebeat Service {{ instance_name }}_{{ log_type }}"
with_items: "{{ filebeat_logs[log_type]['scripts'] | default([]) }}"
loop_control:
loop_var: "script"
- name: Render Filebeat Monitor Plugin Configure File "{{ instance_name }}_{{ log_type }}"
ansible.builtin.template:
src: "filebeat_monitor.yaml"
dest: "{{ filebeat_work_path }}/monitor.yaml"
owner: root
group: root
mode: '0644'
notify: "Restart Filebeat Monitor Plugin {{ instance_name }}_{{ log_type }}"
- name: Render Filebeat Supervisor Configure File "{{ instance_name }}_{{ log_type }}"
ansible.builtin.template:
src: supervisor.yml
dest: "{{ filebeat_work_path }}/supervisor.yml"
owner: root
group: root
mode: '0640'
notify: "Restart Filebeat Service {{ instance_name }}_{{ log_type }}"
- name: Render Filebeat SystemV Script "{{ instance_name }}_{{ log_type }}"
ansible.builtin.template:
src: supervisor.sh
dest: "/etc/init.d/filebeat_{{ instance_name }}_{{ log_type }}"
owner: root
group: root
mode: '0750'
notify: "Restart Filebeat Service {{ instance_name }}_{{ log_type }}"
- name: Stop Old Filebeat "{{ instance_name }}_{{ log_type }}"
include_tasks: stop_old_filebeat.yml
vars:
target: "{{ instance_name }}_{{ log_type }}"
when: filebeat_data_migration | default(false)
- name: Check Old Filebeat Data "{{ instance_name }}_{{ log_type }}"
ansible.builtin.shell: "ls {{ filebeat_logs[log_type]['filebeat_older_dir'] }}/{{ filebeat_logs[log_type]['filebeat_older_data_dir'] }} || echo ''"
register: check_old_filebeat_data
changed_when: check_old_filebeat_data.stdout != ''
when: filebeat_data_migration | default(false)
- name: Merge Filebeat Data "{{ instance_name }}_{{ log_type }}"
ansible.builtin.copy:
src: "{{ filebeat_logs[log_type]['filebeat_older_dir'] }}/{{ filebeat_logs[log_type]['filebeat_older_data_dir'] }}/"
dest: "{{ filebeat_work_path }}/data/"
owner: filebeat
group: filebeat
mode: '0750'
remote_src: yes
directory_mode: yes
notify: "Clean Old Filebeat {{ instance_name }}_{{ log_type }}"
when: filebeat_data_migration | default(false) and check_old_filebeat_data.stdout != ''
- name: Check Filebeat Service Running Status
ansible.builtin.shell: "{{ python_bin_dir }}/supervisorctl -c {{ filebeat_work_path }}/supervisor.yml status filebeat_{{ instance_name }}_{{ log_type }}"
register: filebeat_status
changed_when: filebeat_status.rc != 0
failed_when: false
- name: First Start Filebeat Service "{{ instance_name }}_{{ log_type }}"
ansible.builtin.service:
name: "filebeat_{{ instance_name }}_{{ log_type }}"
state: started
enabled: true
sleep: 30
use: "{{ 'sysvinit' if ansible_facts['distribution'] == 'CentOS' and ansible_facts['distribution_major_version'] == '6' else 'auto' }}"
when: filebeat_status.rc != 0
mode: '0644'

View File

@ -1,22 +0,0 @@
---
- name: Get Old Running Supervisor Processes {{ target }}
ansible.builtin.shell: "ps -ef | grep -v grep | grep -w /data/opt/filebeat/{{ instance_name }}/supervisor.yml | awk '{print $2}'"
register: supervisor_processes
changed_when: supervisor_processes.stdout != ''
- name: Get Old Running Filebeat Processes {{ target }}
ansible.builtin.shell: "ps -ef | grep -v grep | grep -w /data/opt/filebeat/{{ instance_name }}/filebeat | awk '{print $2}'"
register: filebeat_processes
changed_when: filebeat_processes.stdout != ''
- name: Stop Old Filebeat Processes {{ target }}
ansible.builtin.shell: "kill -9 {{ (supervisor_processes.stdout_lines + filebeat_processes.stdout_lines) | join(' ') }}"
when: supervisor_processes.stdout != '' or filebeat_processes.stdout != ''
- name: Remove Old Processes Residue
ansible.builtin.shell: "/bin/rm -vf /tmp/supervisord_{{ instance_name }}.* /tmp/filebeat_{{ target }}*.log"
register: residue
changed_when: residue.stdout != ''

View File

@ -1,24 +1,15 @@
path.data: {{ work_path }}/data
filebeat.registry.path: {{ work_path }}/data/registry/gw-nginx-{{ instance_name }}
queue.mem:
events: 512
flush.min_events: 128
flush.timeout: 5s
filebeat.inputs:
{% for log in gwlogs %}
{% for log in logs %}
- type: log
paths:
- "/data/log/nginx/access_{{ log }}.log"
- "/data/log/nginx/error_{{ log }}.log"
fields:
source: {{ log | regex_replace('\*') }}
harvester_buffer_size: 1638400
tail_files: {{ tail_files | default(true) | string | lower }}
{% endfor %}
max_procs: {{ max_procs | default(2) }}
{% endfor %}
filebeat.config.modules:
# Glob pattern for configuration loading
@ -33,7 +24,7 @@ filebeat.config.modules:
# ======================= Elasticsearch template setting =======================
setup.ilm.enabled: false
setup.template.name: "gw-nginx-ug10cn"
setup.template.pattern: "gw-nginx-*"
setup.template.pattern: "gw-nginx-ug10cn-*"
setup.template.settings:
index.number_of_shards: 3
@ -49,9 +40,6 @@ output.elasticsearch:
pipeline: "gwlogs"
username: {{ es_user }}
password: {{ es_passwd }}
bulk_max_size: 1500
compression_level: {{ compression_level | default(0) }}
worker: 1
# ================================= Processors =================================

View File

@ -1,67 +0,0 @@
path.data: {{ work_path }}/data2
filebeat.registry.path: {{ work_path }}/data2/registry/mgr-cp-{{ instance_name }}
queue.mem:
events: 512
flush.min_events: 128
flush.timeout: 5s
filebeat.inputs:
{% for log in mgrlogs %}
- type: log
paths:
- {{ log }}
multiline.type: pattern
multiline.pattern: '^[0-9]{4}-[0-9]{2}-[0-9]{2}'
multiline.negate: true
multiline.match: after
multiline.skip_newline: true
fields:
source: cp
harvester_buffer_size: 1638400
tail_files: {{ tail_files | default(true) | string | lower }}
{% endfor %}
max_procs: {{ max_procs | default(2) }}
filebeat.config.modules:
# Glob pattern for configuration loading
path: ${path.config}/modules.d/*.yml
# Set to true to enable config reloading
reload.enabled: false
# Period on which files under path should be checked for changes
#reload.period: 10s
# ======================= Elasticsearch template setting =======================
setup.ilm.enabled: false
setup.template.name: "mgr-cp"
setup.template.pattern: "mgr-cp-*"
setup.template.settings:
index.number_of_shards: 3
setup.kibana:
host: "{{ kibana_url }}"
space.id: "{{ kibana_space }}"
# ---------------------------- Elasticsearch Output ----------------------------
output.elasticsearch:
hosts: {{ es_hosts }}
index: "mgr-{{ instance_name }}-%{[fields.source]}-%{+yyyy.MM.dd}"
pipeline: "cplogs"
username: {{ es_user }}
password: {{ es_passwd }}
bulk_max_size: 1500
compression_level: {{ compression_level | default(0) }}
worker: 3
# ================================= Processors =================================
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~

View File

@ -1,61 +0,0 @@
path.data: {{ work_path }}/data3
filebeat.registry.path: {{ work_path }}/data3/registry/quick-{{ instance_name }}
queue.mem:
events: 512
flush.min_events: 128
flush.timeout: 5s
filebeat.inputs:
{% for log in quicklog %}
- type: log
paths:
- {{ log }}
fields:
source: quick
harvester_buffer_size: 1638400
{% endfor %}
max_procs: {{ max_procs | default(2) }}
filebeat.config.modules:
# Glob pattern for configuration loading
path: ${path.config}/modules.d/*.yml
# Set to true to enable config reloading
reload.enabled: false
# Period on which files under path should be checked for changes
#reload.period: 10s
# ======================= Elasticsearch template setting =======================
setup.template.name: "quick"
setup.template.pattern: "quick-*"
# ---------------------------- Elasticsearch Output ----------------------------
output.elasticsearch:
hosts: {{ quick_es_hosts }}
index: "quick-{{ instance_name }}-%{[fields.source]}-%{+yyyy.MM.dd}"
pipeline: "quicklog"
username: {{ quick_es_user }}
password: {{ quick_es_passwd }}
bulk_max_size: 1500
compression_level: {{ compression_level | default(0) }}
worker: 1
# ================================= Processors =================================
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
- dissect:
tokenizer: "/%{}/%{}/%{}/%{}/%{}/%{zone_id}_%{server_id}_%{log_t}.txt"
field: "log.file.path"
target_prefix: ""
- dissect:
tokenizer: "%{date} %{+date->} %{log_id->} %{account_id->} %{role_id->} %{role_name->} %{spid->} %{channel->} %{col1->} %{col2->} %{col3->} %{col4->} %{col5->} %{col6->} %{col7->} %{col8->} %{col9->} %{col10->} %{col11->} %{col12->} %{col13->}"
field: "message"
target_prefix: ""

View File

@ -1,120 +0,0 @@
# 关闭掉Filebeat本身的metrics日志输出
logging.metrics.enabled: false
# 修订配置文件自动reload
filebeat.config.inputs:
enabled: true
path: ${path.config}/filebeat.yml
reload.enabled: true
reload.period: 10s
logging.json: false
queue.mem:
events: {{ filebeat_queue_mem_events | default(8192) }}
flush.min_events: {{ filebeat_queue_mem_flush_min_event | default(512) }}
filebeat.inputs:
- type: log
paths: {{ logs[log_type]['paths'] }}
{% if "exclude_files" in logs[log_type] -%}
exclude_files: {{ logs[log_type]['exclude_files'] }}
{% endif -%}
{% if 'multiline' in logs[log_type] and logs[log_type]['multiline'] -%}
multiline.type: pattern
multiline.pattern: '^[0-9]{4}-[0-9]{2}-[0-9]{2}'
multiline.negate: true
multiline.match: after
multiline.skip_newline: true
{% endif -%}
harvester_buffer_size: 2048
ignore_older: {{ filebeat_older }}
tail_files: {{ filebeat_tail_files | string | lower }}
http.enabled: true
http.host: unix://${path.config}/filebeat.sock
max_procs: {{ filebeat_max_procs }}
filebeat.config.modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false
# ======================= Elasticsearch template setting =======================
#[#1002043] 通过强制filebeat注册ilm策略方式修订意外丢失的ilm策略。
#如果filebaet_ilm_enabled被设置为false则禁止ilm这是为了兼容华为云的ES
setup.ilm.enabled: false
setup.ilm.policy_name: "{{ logs[log_type]['setup_ilm_policy_name'] | default('gw-nginx-ug10') }}"
setup.ilm.check_exists: false
setup.template.name: "{{ logs[log_type]['template_name'] }}"
setup.template.pattern: "{{ logs[log_type]['template_pattern'] }}"
setup.template.settings:
index.number_of_shards: "{{ filebeat_number_of_shards | default(3) }}"
{% if 'kibana' in logs[log_type] -%}
setup.kibana:
host: "{{ logs[log_type]['kibana']['kibana_url'] }}"
space.id: "{{ logs[log_type]['kibana']['kibana_space'] }}"
{% endif -%}
# ---------------------------- Elasticsearch Output ----------------------------
{% if 'output' not in logs[log_type] or logs[log_type]['output'] == 'elastic' %}
output.elasticsearch:
hosts: {{ logs[log_type]['es_hosts'] }}
#[#1002040] 如果sublog字段配置为none则忽略索引名称中的sublog字段
{% if 'sublog' in logs[log_type] and logs[log_type]['sublog'] == 'none' -%}
index: "{{ logs[log_type]['index_prefix'] }}-{{ instance_name }}-%{+yyyy.MM.dd}"
{% elif 'sublog' in logs[log_type] and logs[log_type]['sublog'] -%}
index: "{{ logs[log_type]['index_prefix'] }}-{{ instance_name }}-{{ logs[log_type]['sublog'] }}-%{+yyyy.MM.dd}"
{% else -%}
index: "{{ logs[log_type]['index_prefix'] }}-{{ instance_name }}-%{[sublog]}-%{+yyyy.MM.dd}"
{% endif -%}
{% if 'pipeline' in logs[log_type] and logs[log_type]['pipeline'] -%}
pipeline: "{{ logs[log_type]['pipeline'] }}"
{% endif -%}
{% if 'es_user' in logs[log_type] and 'es_passwd' in logs[log_type] -%}
username: {{ logs[log_type]['es_user'] | default('') }}
password: {{ logs[log_type]['es_passwd'] | default('') }}
{% endif -%}
bulk_max_size: {{ filebeat_bulk_max_size | default(500) }}
flush_interval: 5s
compression_level: {{ filebeat_compression_level }}
worker: {{ filebeat_output_works }}
{% endif -%}
{% if 'output' in logs[log_type] and logs[log_type]['output'] == 'debug' -%}
output.console:
pretty: true
{% endif -%}
# ================================= Processors =================================
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
{% if 'dissect_tokenizers' in logs[log_type] and logs[log_type]['dissect_tokenizers'] -%}
{% for tokenizer in logs[log_type]['dissect_tokenizers'] -%}
- dissect:
tokenizer: "{{ tokenizer['tokenizer'] }}"
field: {{ tokenizer['field_name'] }}
target_prefix: ""
{% endfor -%}
{% endif -%}
{% if 'convert_timestamp' in logs[log_type] and logs[log_type]['convert_timestamp'] -%}
- timestamp:
field: {{ logs[log_type]['convert_timestamp']['field_name'] }}
layouts: {{ logs[log_type]['convert_timestamp']['layouts'] }}
timezone: "Asia/Shanghai"
{% endif -%}
{% if 'scripts' in logs[log_type] and logs[log_type]['scripts'] -%}
{% for script in logs[log_type]['scripts'] -%}
- script:
lang: javascript
id: {{ script['id'] }}
file: {{ script['file'] }}
{% endfor %}
{%- endif -%}

View File

@ -1,9 +0,0 @@
instance_name: {{ inventory_hostname }}
project: {{ instance_name }}
sublog: {{ 'quick' if log_type == 'quick' else '' }}
filebeat_config_path: {{ filebeat_logs[log_type]['filebeat_config_path'] | default('filebeat.yml') }}
metric_host: {{ filebeat_monitor_sock | default('./filebeat_monitor.sock') }}
push_gateway_url: {{ filebeat_monitor_push_gateway | default('') }}
push_gateway_lifecycle: {{ filebeat_monitor_push_lifecycle | default(5) }}
log_path: {{ filebeat_monitor_log_path | default('logs/filebeat_monitor.log') }}
elastic_delays_metric_enabled: {{ filebeat_logs[log_type]['elastic_delays_metric_enabled'] | default('False')}}

View File

@ -1,47 +0,0 @@
[supervisord]
logfile = /tmp/supervisord_filebeat_{{ instance_name }}_{{ log_type }}.log
logfile_maxbytes = 50MB
logfile_backups=5
loglevel = debug
pidfile = /tmp/supervisord_filebeat_{{ instance_name }}_{{ log_type }}.pid
nodaemon = false
minfds = 1024
minprocs = 200
umask = 022
user = filebeat
identifier = filebeat_{{ instance_name }}_{{ log_type }}
directory = {{ filebeat_work_path }}
nocleanup = true
strip_ansi = false
[unix_http_server]
file = /tmp/supervisord_filebeat_{{ instance_name }}_{{ log_type }}.sock
chmod = 0777
chown = filebeat:filebeat
username = filebeat_supervisord
password = eNlB.UlOrJAnA
[program:filebeat_{{ instance_name }}_{{ log_type }}]
directory={{ filebeat_work_path }}/
command={{ filebeat_work_path }}/filebeat -c {{ filebeat_work_path }}/filebeat.yml
stdout_logfile=/dev/null
stderr_logfile=/dev/null
autostart=true
autorestart=true
[program:filebeat_monitor_{{ instance_name }}_{{ log_type }}]
directory={{ filebeat_work_path }}/
command={{ filebeat_work_path }}/filebeat_monitor -c {{filebeat_work_path}}/monitor.yaml
stdout_logfile=/dev/null
stderr_logfile=/dev/null
autostart=true
autorestart=true
[supervisorctl]
serverurl = unix:///tmp/supervisord_filebeat_{{ instance_name }}_{{ log_type }}.sock
username = filebeat_supervisord
password = eNlB.UlOrJAnA
prompt = filebeat_{{ instance_name }}_{{ log_type }}
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface

View File

@ -0,0 +1,5 @@
---
# vars file for filebeat
filebeat_download_url: "https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-7.10.1-linux-x86_64.tar.gz"
work_path: "/data/opt/filebeat/{{ instance_name }}"
logs: ["api", "cl*", "doc", "download", "gw*", "cp", "pay", "static"]

View File

@ -1,4 +1,2 @@
---
# vars file for supervisor
pip_bin_path: "/data/opt/python3/bin/pip"
python_bin_dir: "/data/opt/python3/bin"
# defaults file for supervisor

View File

@ -1,2 +1,2 @@
---
# handlers file for supervisor
# handlers file for supervisor

View File

@ -50,5 +50,3 @@ galaxy_info:
dependencies: []
# List your role dependencies here, one per line. Be sure to remove the '[]' above,
# if you add dependencies to this list.
allow_duplicates: true

View File

@ -5,5 +5,35 @@
name: supervisor
executable: "{{ pip_bin_path }}"
- name: Render Supervisor Configure File
ansible.builtin.template:
src: supervisor.yml
dest: "{{ work_path }}/supervisor.yml"
owner: root
group: root
mode: '0750'
- name: Render Supervisor SystemV Script
ansible.builtin.template:
src: supervisor.sh
dest: "{{ work_path }}/supervisor_initd.sh"
owner: root
group: root
mode: '0750'
vars:
config_file_path: "{{ work_path }}/supervisor.yml"
- name: Symlink SystemV Script
ansible.builtin.file:
src: "{{ work_path }}/supervisor_initd.sh"
dest: "/etc/init.d/filebeat_{{ instance_name }}"
owner: "root"
state: link
- name: Start Filebeat Service
ansible.builtin.service:
name: "filebeat_{{ instance_name }}"
state: restarted
enabled: true
sleep: 30
use: sysvinit

View File

@ -1,26 +1,14 @@
#!/bin/bash
# filebeat_{{ instance_name }}_{{ log_type }} Start filebeat
#
# chkconfig: 2345 33 81
# description: Starts, stops and saves filebeat_{{ instance_name }}_{{ log_type }}
#
#
### BEGIN INIT INFO
# Provides: youyan
# Required-Start:
# Required-Stop:
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: start and stop filebeat_{{ instance_name }}_{{ log_type }}
# Description: Start, stop filebeat_{{ instance_name }}_{{ log_type }}
### END INIT INFO
# chkconfig: 345 83 04
set -o nounset
. /etc/rc.d/init.d/functions
RETVAL=0
PIDFILE="/tmp/supervisord_filebeat_{{ instance_name }}_{{ log_type }}.pid"
LOCKFILE="/tmp/supervisord_filebeat_{{ instance_name }}_{{ log_type }}.lock"
OPTIONS="-c {{ filebeat_work_path }}/supervisor.yml"
PIDFILE="/tmp/supervisord_{{instance_name}}.pid"
LOCKFILE="/tmp/supervisord.lock"
OPTIONS="-c {{ config_file_path }}"
WAIT_FOR_SUBPROCESSES="yes"
start() {
@ -79,11 +67,7 @@ case "$1" in
RETVAL=$?
;;
reload)
{{ python_bin_dir }}/supervisorctl $OPTIONS restart filebeat_{{ instance_name }}_{{ log_type }}
RETVAL=$?
;;
restart_monitor)
{{ python_bin_dir }}/supervisorctl $OPTIONS restart filebeat_monitor_{{ instance_name }}_{{ log_type }}
/usr/bin/supervisorctl $OPTIONS reload
RETVAL=$?
;;
restart)

View File

@ -0,0 +1,35 @@
[supervisord]
logfile = /tmp/supervisord_{{instance_name}}.log
logfile_maxbytes = 50MB
logfile_backups=5
loglevel = debug
pidfile = /tmp/supervisord_{{instance_name}}.pid
nodaemon = false
minfds = 1024
minprocs = 200
umask = 022
user = filebeat
identifier = supervisor_{{instance_name}}
directory = {{ work_path }}
nocleanup = true
strip_ansi = false
[unix_http_server]
file = /tmp/supervisord_{{instance_name}}.sock
chmod = 0777
chown = filebeat:filebeat
username = filebeat
password = eNlB.UlOrJAnA
[program:filebeat_{{instance_name}}]
command={{ work_path }}/filebeat -c {{ work_path }}/filebeat.yml
autorestart=true
[supervisorctl]
serverurl = unix:///tmp/supervisord_{{instance_name}}.sock
username = filebeat
password = eNlB.UlOrJAnA
prompt = filebeat_{{instance_name}}
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface

View File

@ -0,0 +1,6 @@
---
# vars file for supervisor
pip_bin_path: "/data/opt/python3/bin/pip"
python_bin_dir: "/data/opt/python3/bin"
work_path: "/data/opt/filebeat/{{ instance_name }}"