Skip to content

Commit

Permalink
feat: prepare modules integration
Browse files Browse the repository at this point in the history
Signed-off-by: djerfy <[email protected]>
  • Loading branch information
djerfy committed Jan 10, 2024
1 parent f353a0e commit ffb7ec6
Show file tree
Hide file tree
Showing 7 changed files with 140 additions and 10 deletions.
12 changes: 8 additions & 4 deletions src/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -45,24 +45,28 @@ monitoring:
labels:
include: []
exclude: []
# optional
# openebs
openebs:
enabled: False
engine: cstor
labels:
include: []
exclude: []
# velero
velero:
enabled: True
enabled: False
labels:
include: []
exclude: []
# trivy
trivy:
enabled: True
enabled: False
labels:
include: []
exclude: []
# certificates
certs:
enabled: True
enabled: False
labels:
include: []
exclude: []
11 changes: 11 additions & 0 deletions src/modules/common/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,14 @@ def matchLabels(match_labels=None, object_labels=None):
return True

return False

def rawObjects(data=[]):
"""
description: get objects from raw api, convert items and return only objects
return: list
"""
for key, value in data.items():
if key == "items":
return value

return []
1 change: 1 addition & 0 deletions src/modules/kubernetes/openebs/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from modules.kubernetes.openebs.cstorpoolclusters import openebsGetCstorpoolclusters
88 changes: 88 additions & 0 deletions src/modules/kubernetes/openebs/cstorpoolclusters.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
from kubernetes import client
from pyzabbix import ZabbixMetric
from modules.common.functions import *
import json, urllib3

urllib3.disable_warnings()

def openebsGetCstorpoolclusters(config=None):
"""
description: get cstorpoolclusters data
return: list
"""
kubernetes = client.CustomObjectsApi()

cstorpoolclusters = []

for cstorpoolcluster in rawObjects(kubernetes.list_cluster_custom_object(group="cstor.openebs.io", version="v1", plural="cstorpoolclusters")):
json = {
"name": cstorpoolcluster.metadata.name,
"namespace": cstorpoolcluster.metadata.namespace,
"instances": {
"desired": cstorpoolcluster.status.desiredInstances,
"healthy": cstorpoolcluster.status.healthyInstances,
"provisioned": cstorpoolcluster.status.provisionedInstances
},
"version": {
"desired": cstorpoolcluster.status.versionDetails.desired,
"current": cstorpoolcluster.status.versionDetails.status.current
}
}

if matchLabels(config['labels']['exclude'], cstorpoolcluster.metadata.labels):
continue

if config['labels']['include'] != []:
if not matchLabels(config['labels']['exclude'], cstorpoolcluster.metadata.labels):
continue

if any(c['name'] == json['name'] and c['namespace'] == json['namespace'] for c in cstorpoolclusters):
continue

cstorpoolclusters.append(json)

return cstorpoolclusters

def ZabbixDiscoveryCstorpoolclusters(clustername, cstorpoolclusters=[]):
"""
description: create a discovery for cstorpoolclusters, per namespace
return: class ZabbixMetric
"""
discovery = {"data":[]}

for cstorpoolcluster in cstorpoolclusters:
output = {
"{#KUBERNETES_OPENEBS_CSTORPOOLCLUSTER_NAMESPACE}": cstorpoolcluster['namespace'],
"{#KUBERNETES_OPENEBS_CSTORPOOLCLUSTER_NAME}": cstorpoolcluster['name']}
discovery['data'].append(output)

sender = [ZabbixMetric(clustername, "kubernetes.openebs.cstorpoolclusters.discovery", json.dumps(discovery))]

return sender

def ZabbixItemCstorpoolclusters(clustername, cstorpoolclusters=[]):
"""
description: create a item for cstorpoolclusters, per namespace
return: class ZabbixMetric
"""
sender = []

for cstorpoolcluster in cstorpoolclusters:
sender.append(ZabbixMetric(clustername, f"kubernetes.openebs.cstorpoolclusters.desiredInstances[{cstorpoolcluster['namespace']},{cstorpoolcluster['name']}]", cstorpoolcluster['instances']['desired']),)
sender.append(ZabbixMetric(clustername, f"kubernetes.openebs.cstorpoolclusters.healthyInstances[{cstorpoolcluster['namespace']},{cstorpoolcluster['name']}]", cstorpoolcluster['instances']['healthy']),)
sender.append(ZabbixMetric(clustername, f"kubernetes.openebs.cstorpoolclusters.provisionedInstances[{cstorpoolcluster['namespace']},{cstorpoolcluster['name']}]", cstorpoolcluster['instances']['provisioned']),)
sender.append(ZabbixMetric(clustername, f"kubernetes.openebs.cstorpoolclusters.desiredVersion[{cstorpoolcluster['namespace']},{cstorpoolcluster['name']}]", cstorpoolcluster['version']['desired']),)
sender.append(ZabbixMetric(clustername, f"kubernetes.openebs.cstorpoolclusters.currentVersion[{cstorpoolcluster['namespace']},{cstorpoolcluster['name']}]", cstorpoolcluster['version']['current']),)

return sender

def baseOpenebsCstorpoolclusters(mode=None, config=None):
"""
description: monitoring openebs cstorpoolclusters
return: class ZabbixMetric
"""
if mode == "discovery":
return ZabbixDiscoveryCstorpoolclusters(config['kubernetes']['name'], openebsGetCstorpoolclusters(config['monitoring']['openebs']))
if mode == "item":
return ZabbixItemCstorpoolclusters(config['kubernetes']['name'], openebsGetCstorpoolclusters(config['monitoring']['openebs']))
return []
1 change: 1 addition & 0 deletions src/modules/kubernetes/trivy/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from modules.kubernetes.trivy.vulnerabilityreports import trivyGetVulnerabilityreports
19 changes: 19 additions & 0 deletions src/modules/kubernetes/trivy/vulnerabilityreports.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from kubernetes import client
from pyzabbix import ZabbixMetric
from modules.common.functions import *
import json, urllib3

urllib3.disable_warnings()

def trivyGetVulnerabilityreports(config=None):
"""
description: get vulnerabilityreports data
return: list
"""
kubernetes = client.CustomObjectsApi()

reports = []

for vuln in rawObjects(kubernetes.list_cluster_custom_object(group="aquasecurity.github.io", version="v1alpha1", plural="vulnerabilityreports")):
print(vuln['metadata']['name'])
print(vuln['report']['summary'])
18 changes: 12 additions & 6 deletions src/zabbix-kubernetes-discovery.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from kubernetes import config as kube_config
from pyzabbix import ZabbixSender
from modules.kubernetes.base import *
from modules.kubernetes.openebs import *

parser = argparse.ArgumentParser()
parser.add_argument("--config-file", dest="config_file", action="store", required=False, help="Configuration file (default: config.yaml)", default="config.yaml")
Expand Down Expand Up @@ -56,36 +57,41 @@ def mainThread(func):
if __name__ == "__main__":
logging.info("Application zabbix-kubernetes-discovery started")

# cronjobs
# cronjobs (base)
if config['monitoring']['cronjobs']['enabled']:
schedule.every(config['zabbix']['schedule']['discovery']).seconds.do(mainThread, lambda: mainSend(baseCronjobs(mode="discovery", config=config)))
schedule.every(config['zabbix']['schedule']['items']).seconds.do(mainThread, lambda: mainSend(baseCronjobs(mode="item", config=config)))

# daemonsets
# daemonsets (base)
if config['monitoring']['daemonsets']['enabled']:
schedule.every(config['zabbix']['schedule']['discovery']).seconds.do(mainThread, lambda: mainSend(baseDaemonsets(mode="discovery", config=config)))
schedule.every(config['zabbix']['schedule']['items']).seconds.do(mainThread, lambda: mainSend(baseDaemonsets(mode="item", config=config)))

# deployments
# deployments (base)
if config['monitoring']['deployments']['enabled']:
schedule.every(config['zabbix']['schedule']['discovery']).seconds.do(mainThread, lambda: mainSend(baseDeployments(mode="discovery", config=config)))
schedule.every(config['zabbix']['schedule']['items']).seconds.do(mainThread, lambda: mainSend(baseDeployments(mode="item", config=config)))

# nodes
# nodes (base)
if config['monitoring']['nodes']['enabled']:
schedule.every(config['zabbix']['schedule']['discovery']).seconds.do(mainThread, lambda: mainSend(baseNodes(mode="discovery", config=config)))
schedule.every(config['zabbix']['schedule']['items']).seconds.do(mainThread, lambda: mainSend(baseNodes(mode="item", config=config)))

# statefulsets
# statefulsets (base)
if config['monitoring']['statefulsets']['enabled']:
schedule.every(config['zabbix']['schedule']['discovery']).seconds.do(mainThread, lambda: mainSend(baseStatefulsets(mode="discovery", config=config)))
schedule.every(config['zabbix']['schedule']['items']).seconds.do(mainThread, lambda: mainSend(baseStatefulsets(mode="item", config=config)))

# volumes
# volumes (base)
if config['monitoring']['volumes']['enabled']:
schedule.every(config['zabbix']['schedule']['discovery']).seconds.do(mainThread, lambda: mainSend(baseVolumes(mode="discovery", config=config)))
schedule.every(config['zabbix']['schedule']['items']).seconds.do(mainThread, lambda: mainSend(baseVolumes(mode="item", config=config)))

# cstorpoolclusters (openebs)
if config['monitoring']['openebs']['enabled']:
schedule.every(config['zabbix']['schedule']['discovery']).seconds.do(mainThread, lambda: mainSend(baseOpenebsCstorpoolclusters(mode="discovery", config=config)))
schedule.every(config['zabbix']['schedule']['items']).seconds.do(mainThread, lambda: mainSend(baseOpenebsCstorpoolclusters(mode="item", config=config)))

# tasks
while True:
schedule.run_pending()
Expand Down

0 comments on commit ffb7ec6

Please sign in to comment.