Skip to content

Commit 445daf3

Browse files
author
Anze
committed
SNMP sensors data retreival and job scheduling
1 parent 7e02375 commit 445daf3

File tree

4 files changed

+243
-30
lines changed

4 files changed

+243
-30
lines changed

Pipfile

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,13 @@ url = "https://pypi.org/simple"
44
verify_ssl = true
55

66
[dev-packages]
7+
pylint = "*"
78

89
[packages]
910
requests = "*"
1011
python-dotenv = "*"
12+
apscheduler = "*"
13+
ansicolors = "*"
1114

1215
[requires]
1316
python_version = "3.6"

Pipfile.lock

Lines changed: 126 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

collector.py

Lines changed: 39 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,19 @@
11
import requests
2+
import logging
23

34

45
class Collector(object):
56
def __init__(self, backend_url, bot_token):
67
self.backend_url = backend_url
78
self.bot_token = bot_token
89

9-
def fetch_accounts_entities(self, protocol):
10+
def fetch_job_configs(self, protocol):
11+
"""
12+
Returns pairs (account_id, entity_info), where entity_info is everything needed for collecting data
13+
from the entity - credentials and list of sensors (with intervals) for selected protocol.
14+
The data is cleaned up as much as possible, so that it only contains the things necessary for collectors
15+
to do their job.
16+
"""
1017
# find all the accounts we have access to:
1118
r = requests.get('{}/accounts/?b={}'.format(self.backend_url, self.bot_token))
1219
if r.status_code != 200:
@@ -31,32 +38,45 @@ def fetch_accounts_entities(self, protocol):
3138
# make sure that the protocol is enabled on the entity:
3239
if protocol not in entity_info["protocols"]:
3340
continue
34-
# and hide all other protocols: (not strictly necessary, just cleaner)
35-
entity_info["protocols"] = {
36-
protocol: entity_info["protocols"][protocol]
37-
}
38-
41+
# and that credential is set:
42+
if not entity_info["protocols"][protocol]["credential"]:
43+
continue
44+
credential_id = entity_info["protocols"][protocol]["credential"]
45+
# and that there is at least one sensor enabled for this protocol:
3946
if not entity_info["protocols"][protocol]["sensors"]:
4047
continue
4148

42-
credential = None
43-
if entity_info["protocols"][protocol]["credential"]:
44-
credential_id = entity_info["protocols"][protocol]["credential"]
45-
r = requests.get('{}/accounts/{}/credentials/{}?b={}'.format(self.backend_url, account_id, credential_id, self.bot_token))
46-
if r.status_code != 200:
47-
raise Exception("Network error, got status {} while retrieving {}/accounts/{}/credentials/{}".format(r.status_code, self.backend_url, account_id, credential_id))
48-
credential = r.json()
49+
r = requests.get('{}/accounts/{}/credentials/{}?b={}'.format(self.backend_url, account_id, credential_id, self.bot_token))
50+
if r.status_code != 200:
51+
raise Exception("Network error, got status {} while retrieving {}/accounts/{}/credentials/{}".format(r.status_code, self.backend_url, account_id, credential_id))
52+
credential = r.json()
53+
entity_info["credential_details"] = credential["details"]
4954

5055
sensors = []
51-
for sensor in entity_info["protocols"][protocol]["sensors"]:
52-
r = requests.get('{}/accounts/{}/sensors/{}?b={}'.format(self.backend_url, account_id, sensor["sensor"], self.bot_token))
56+
for sensor_info in entity_info["protocols"][protocol]["sensors"]:
57+
sensor_id = sensor_info["sensor"]
58+
r = requests.get('{}/accounts/{}/sensors/{}?b={}'.format(self.backend_url, account_id, sensor_id, self.bot_token))
5359
if r.status_code != 200:
5460
raise Exception("Network error, got status {} while retrieving {}/accounts/{}/sensors/{}".format(r.status_code, self.backend_url, account_id, sensor["sensor"]))
61+
sensor = r.json()
62+
63+
# determine interval, since this part is generic:
64+
if sensor_info["interval"] is not None:
65+
interval = sensor_info["interval"]
66+
elif sensor["default_interval"] is not None:
67+
interval = sensor["default_interval"]
68+
else:
69+
logging.warn("Interval not set, ignoring sensor {} on entity {}!".format(sensor_id, entity_id))
70+
continue
71+
del sensor["default_interval"] # cleanup - nobody should need this anymore
72+
5573
sensors.append({
56-
"sensor": r.json(),
57-
"interval": sensor["interval"],
74+
"sensor_details": sensor["details"],
75+
"interval": interval,
5876
})
59-
entity_info["protocols"][protocol]["sensors"] = sensors
77+
# and hide all other protocols, saving just sensors for selected one: (not strictly necessary, just cleaner)
78+
entity_info["sensors"] = sensors
79+
del entity_info["protocols"]
6080

61-
yield account_id, entity_info, credential
81+
yield account_id, entity_info
6282

grafolean-worker-snmp.py

Lines changed: 75 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,92 @@
1+
from apscheduler.schedulers.blocking import BlockingScheduler
12
import os
3+
import dotenv
4+
import logging
5+
from pytz import utc
6+
from colors import color
27

38

49
from collector import Collector
510

611

12+
logging.basicConfig(format='%(asctime)s | %(levelname)s | %(message)s',
13+
datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG)
14+
logging.addLevelName(logging.DEBUG, color("DBG", 7))
15+
logging.addLevelName(logging.INFO, "INF")
16+
logging.addLevelName(logging.WARNING, color('WRN', fg='red'))
17+
logging.addLevelName(logging.ERROR, color('ERR', bg='red'))
18+
log = logging.getLogger("{}.{}".format(__name__, "base"))
19+
20+
721
class SNMPCollector(Collector):
822
def __init__(self, *args, **kwargs):
923
super().__init__(*args, **kwargs)
10-
self.init()
1124

12-
def init(self):
13-
for account_id, entity_info, credential in self.fetch_accounts_entities('snmp'):
14-
# print(account_id, entity_info, credential)
15-
import json
16-
print(json.dumps(entity_info))
25+
self.jobs_configs = []
26+
for account_id, entity_info in self.fetch_job_configs('snmp'):
27+
# convert entity_info into easy-to-use task definitions:
28+
for sensor_info in entity_info["sensors"]:
29+
self.jobs_configs.append({
30+
"account_id": account_id,
31+
"entity": entity_info["details"],
32+
"sensor": sensor_info["sensor_details"],
33+
"interval": sensor_info["interval"],
34+
"credential": entity_info["credential_details"],
35+
})
36+
37+
# >>> import json
38+
# >>> print(json.dumps(self.jobs_configs))
39+
# [
40+
# {
41+
# "account_id": 1,
42+
# "entity": {
43+
# "ipv4": "127.0.0.1"
44+
# },
45+
# "sensor": {
46+
# "oids": [
47+
# {
48+
# "oid": "1.3.6.1.4.1.2021.13.16.2.1.3",
49+
# "fetch_method": "get"
50+
# }
51+
# ],
52+
# "expression": "$1",
53+
# "output_path": "lm-sensors"
54+
# },
55+
# "interval": 30,
56+
# "credential": {
57+
# "version": "snmpv1",
58+
# "snmpv12_community": "public"
59+
# }
60+
# }
61+
# ]
62+
63+
@staticmethod
64+
def do_snmp(account_id, entity, sensor, interval, credential):
65+
log.info("Running job for account [{account_id}], IP [{ipv4}], OIDS: {oids}".format(
66+
account_id=account_id,
67+
ipv4=entity["ipv4"],
68+
oids=["SNMP{} {}".format(o["fetch_method"].upper(), o["oid"]) for o in sensor["oids"]],
69+
))
1770

1871
def execute(self):
19-
pass
72+
# initialize a scheduler:
73+
job_defaults = {
74+
'coalesce': True, # if multiple jobs "misfire", re-run only one instance of a missed job
75+
'max_instances': 1,
76+
}
77+
self.scheduler = BlockingScheduler(job_defaults=job_defaults, timezone=utc)
78+
79+
# apply config to scheduler:
80+
for job_config in self.jobs_configs:
81+
self.scheduler.add_job(SNMPCollector.do_snmp, 'interval', seconds=job_config["interval"], kwargs=job_config)
82+
83+
try:
84+
self.scheduler.start()
85+
except KeyboardInterrupt:
86+
log.info("Got exit signal, exiting.")
2087

2188
if __name__ == "__main__":
22-
from dotenv import load_dotenv
23-
load_dotenv()
89+
dotenv.load_dotenv()
2490

2591
backend_url = os.environ.get('BACKEND_URL')
2692
bot_token = os.environ.get('BOT_TOKEN')

0 commit comments

Comments
 (0)