-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbeats.py
More file actions
89 lines (69 loc) · 3.15 KB
/
beats.py
File metadata and controls
89 lines (69 loc) · 3.15 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import time
import pyinotify
from multiprocessing import Queue
from multiprocessing import Process
from lib.logs import Logger
from lib.daemon import daemon_init
from lib.yml import BeatsConfParse
from lib.kafka import KafkaProductor
from lib.EventHander import SimpleEventHandler, MultilineEventHandler, TagsEventHandler, TagAndMultilineEventHandler
root = os.path.dirname(os.path.abspath(__file__))
logger = Logger('{}/logs/{}.logs'.format(root, sys.argv[0].split('.')[0]), sys.argv[0])
CONF = '{0}/conf/filebeat.yml'.format(root)
def start_a_monitor(**kwargs):
_wm = pyinotify.WatchManager()
if kwargs.__contains__('multiline') and kwargs.__contains__('tags'):
negate = kwargs['multiline'].get('negate') or False
_handler = TagAndMultilineEventHandler(queue=kwargs['queue'],
fields=kwargs['fields'],
multi_negate=negate,
tag_head_pattern=kwargs['tags']['forward'],
multi_head_pattern=kwargs['multiline']['patterns'])
elif kwargs.__contains__('multiline'):
negate = kwargs['multiline'].get('negate') or False
_handler = MultilineEventHandler(queue=kwargs['queue'],
fields=kwargs['fields'],
multi_negate=negate,
multi_head_pattern=kwargs['multiline']['patterns'])
elif kwargs.__contains__('tags'):
_handler = TagsEventHandler(queue=kwargs['queue'],
fields=kwargs['fields'],
tag_head_pattern=kwargs['tags'['forward']])
else:
_handler = SimpleEventHandler(queue=kwargs['queue'],fields=kwargs['fields'])
_notifier = pyinotify.ThreadedNotifier(_wm, _handler)
_notifier.daemon = True
_wm.add_watch(path=kwargs['paths'], mask=pyinotify.IN_MODIFY, rec=True, do_glob=True, quiet=True)
return _notifier
def processing_productors(datas, queues):
# queues is a list contains blank Queue
# datas is a dict data parsed from filebeat.yml
jobs = [start_a_monitor(queue=queue, **data['prospectors'], **data['output']) for data, queue in zip(datas, queues)]
for job in jobs:
logger.info('Get monitor thread info {}'.format(print(job)))
job.start()
while True:
try:
time.sleep(10)
except:
for job in jobs:
job.stop()
raise
def processing_consumers(_queues):
KafkaProductor(_queues).start
@daemon_init
def main():
yaml = BeatsConfParse(CONF).run
queues = [{'queue': Queue(), 'kafka': x['output']['bootstrap_server'], 'topic': x['output']['topic']} for x in yaml]
productors = Process(name="productors", target=processing_productors, args=(yaml, queues))
consumers = Process(name="comsumers", target=processing_consumers, args=(queues))
productors.start()
consumers.start()
productors.join()
consumers.join()
if __name__ == '__main__':
main()