You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
366 lines
14 KiB
366 lines
14 KiB
#!/usr/bin/env python
|
|
# coding=utf-8
|
|
|
|
__author__ = "TrackMe Limited"
|
|
__copyright__ = "Copyright 2022-2026, TrackMe Limited, U.K."
|
|
__credits__ = "TrackMe Limited, U.K."
|
|
__license__ = "TrackMe Limited, all rights reserved"
|
|
__version__ = "0.1.0"
|
|
__maintainer__ = "TrackMe Limited, U.K."
|
|
__email__ = "support@trackme-solutions.com"
|
|
__status__ = "PRODUCTION"
|
|
|
|
# Standard library imports
|
|
import ast
|
|
import json
|
|
import logging
|
|
from logging.handlers import RotatingFileHandler
|
|
import os
|
|
import sys
|
|
import time
|
|
from collections import OrderedDict
|
|
|
|
# Third-party imports
|
|
import urllib3
|
|
|
|
# Disable InsecureRequestWarning
|
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
|
|
|
# splunk home
|
|
splunkhome = os.environ["SPLUNK_HOME"]
|
|
|
|
# set logging
|
|
filehandler = RotatingFileHandler(
|
|
"%s/var/log/splunk/trackme_extract_splk_dhm.log" % splunkhome,
|
|
mode="a",
|
|
maxBytes=10000000,
|
|
backupCount=1,
|
|
)
|
|
formatter = logging.Formatter(
|
|
"%(asctime)s %(levelname)s %(filename)s %(funcName)s %(lineno)d %(message)s"
|
|
)
|
|
logging.Formatter.converter = time.gmtime
|
|
filehandler.setFormatter(formatter)
|
|
log = logging.getLogger() # root logger - Good to get it only once.
|
|
for hdlr in log.handlers[:]: # remove the existing file handlers
|
|
if isinstance(hdlr, logging.FileHandler):
|
|
log.removeHandler(hdlr)
|
|
log.addHandler(filehandler) # set the new handler
|
|
# set the log level to INFO, DEBUG as the default is ERROR
|
|
log.setLevel(logging.INFO)
|
|
|
|
# append current directory
|
|
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
# import libs
|
|
import import_declare_test
|
|
|
|
# import Splunk libs
|
|
from splunklib.searchcommands import (
|
|
dispatch,
|
|
StreamingCommand,
|
|
Configuration,
|
|
Option,
|
|
validators,
|
|
)
|
|
|
|
# Import trackme libs
|
|
from trackme_libs import trackme_reqinfo, trackme_idx_for_tenant
|
|
|
|
# Import trackme libs for feeds
|
|
from trackme_libs_splk_feeds import trackme_splk_dhm_gen_metrics
|
|
|
|
|
|
@Configuration(distributed=False)
|
|
class TrackMeMergeSplkDhm(StreamingCommand):
|
|
mode = Option(
|
|
doc="""
|
|
**Syntax:** **mode=****
|
|
**Description:** Specify the metric details output mode, valid options are compact|full|all .""",
|
|
require=False,
|
|
default="minimal",
|
|
validate=validators.Match("mode", r"^(minimal|compact|full|all)$"),
|
|
)
|
|
|
|
field_current = Option(
|
|
doc="""
|
|
**Syntax:** **field_current=****
|
|
**Description:** field name containing the current object dictionnary.""",
|
|
require=True,
|
|
)
|
|
|
|
tenant_id = Option(
|
|
doc="""
|
|
**Syntax:** **tenant_id=****
|
|
**Description:** The tenant identifier, only used with gen_metrics=True.""",
|
|
require=False,
|
|
default=None,
|
|
)
|
|
|
|
gen_metrics = Option(
|
|
doc="""
|
|
**Syntax:** **gen_metrics=****
|
|
**Description:** Generate and index metrics details.""",
|
|
require=False,
|
|
default=False,
|
|
validate=validators.Match("gen_metrics", r"^(True|False)$"),
|
|
)
|
|
|
|
def stream(self, records):
|
|
# Start performance counter
|
|
start = time.time()
|
|
|
|
# Get request info and set logging level
|
|
reqinfo = trackme_reqinfo(
|
|
self._metadata.searchinfo.session_key, self._metadata.searchinfo.splunkd_uri
|
|
)
|
|
log.setLevel(reqinfo["logging_level"])
|
|
|
|
if self.gen_metrics == "True" and self.tenant_id:
|
|
tenant_indexes = trackme_idx_for_tenant(
|
|
self._metadata.searchinfo.session_key,
|
|
self._metadata.searchinfo.splunkd_uri,
|
|
self.tenant_id,
|
|
)
|
|
else:
|
|
tenant_indexes = None
|
|
|
|
# records_metrics
|
|
records_metrics = []
|
|
|
|
# Iterate through records
|
|
for subrecord in records:
|
|
|
|
# retrieve host_idx_blocklists, host_st_blocklists
|
|
host_idx_blocklists = subrecord.get("host_idx_blocklists", [])
|
|
host_st_blocklists = subrecord.get("host_st_blocklists", [])
|
|
|
|
# if string, then turn into list from comma separated string
|
|
if isinstance(host_idx_blocklists, str):
|
|
host_idx_blocklists = host_idx_blocklists.split(",")
|
|
if isinstance(host_st_blocklists, str):
|
|
host_st_blocklists = host_st_blocklists.split(",")
|
|
|
|
# Try to parse current_dict
|
|
try:
|
|
current_dict = ast.literal_eval(subrecord[self.field_current])
|
|
except Exception as e:
|
|
current_dict = None
|
|
|
|
# ensure we have a value for object_category (splk-dhm)
|
|
subrecord["object_category"] = "splk-dhm"
|
|
|
|
# create a new raw, remove not needed data
|
|
rawdict = subrecord
|
|
|
|
# remove fields not needed any longer
|
|
try:
|
|
del rawdict[self.field_current]
|
|
except Exception as e:
|
|
pass
|
|
|
|
# If current_dict is not None, process it
|
|
if current_dict:
|
|
|
|
# handle blocklists, if any.
|
|
current_dict = {
|
|
key: val
|
|
for key, val in current_dict.items()
|
|
if val["idx"] not in host_idx_blocklists
|
|
and val["st"] not in host_st_blocklists
|
|
}
|
|
|
|
# Process records in full mode
|
|
if self.mode == "full":
|
|
# Create new_dict with the required fields for full mode
|
|
new_dict = {
|
|
p_id: {
|
|
"summary_idx": p_info["idx"],
|
|
"summary_st": p_info["st"],
|
|
"summary_first_time": time.strftime(
|
|
"%d %b %Y %H:%M:%S",
|
|
time.localtime(int(float(p_info["first_time"]))),
|
|
),
|
|
"summary_last_time": time.strftime(
|
|
"%d %b %Y %H:%M:%S",
|
|
time.localtime(int(float(p_info["last_time"]))),
|
|
),
|
|
"summary_last_ingest_lag": p_info["last_ingest_lag"],
|
|
"summary_last_event_lag": p_info["last_event_lag"],
|
|
"summary_time_measure": time.strftime(
|
|
"%d %b %Y %H:%M:%S",
|
|
time.localtime(int(float(p_info["time_measure"]))),
|
|
),
|
|
"summary_last_ingest": time.strftime(
|
|
"%d %b %Y %H:%M:%S",
|
|
time.localtime(int(float(p_info["last_ingest"]))),
|
|
),
|
|
"summary_last_eventcount": p_info["last_eventcount"],
|
|
"summary_max_lag_allowed": p_info["max_lag_allowed"],
|
|
"summary_max_delay_allowed": p_info["max_delay_allowed"],
|
|
"state": p_info["state"],
|
|
}
|
|
for p_id, p_info in current_dict.items()
|
|
}
|
|
# Process records in compact mode
|
|
elif self.mode == "compact":
|
|
# Create new_dict with the required fields for compact mode
|
|
new_dict = {
|
|
p_id: {
|
|
"summary": f"idx:{p_info['idx']} | st:{p_info['st']} | last event:{time.strftime('%d %b %Y %H:%M:%S', time.localtime(int(float(p_info['last_time']))))} | last lag/delay:{p_info['last_ingest_lag']}/{p_info['last_event_lag']} | max lag/delay:{p_info['max_lag_allowed']}/{p_info['max_delay_allowed']} | state:{p_info['state']}"
|
|
}
|
|
for p_id, p_info in current_dict.items()
|
|
}
|
|
|
|
# Process records in minimal mode
|
|
elif self.mode == "minimal":
|
|
# counters
|
|
count_green = 0
|
|
count_red = 0
|
|
|
|
# Create new_dict with the required fields for minimal mode
|
|
for p_id, p_info in current_dict.items():
|
|
if p_info["state"] == "green":
|
|
count_green += 1
|
|
elif p_info["state"] == "red":
|
|
count_red += 1
|
|
|
|
new_dict_minimal = {
|
|
"green": count_green,
|
|
"red": count_red,
|
|
}
|
|
|
|
# process all
|
|
elif self.mode == "all":
|
|
# counters
|
|
count_green = 0
|
|
count_red = 0
|
|
|
|
# Create new_dict with the required fields for full mode
|
|
new_dict_full = {
|
|
p_id: {
|
|
"summary_idx": p_info["idx"],
|
|
"summary_st": p_info["st"],
|
|
"summary_first_time": time.strftime(
|
|
"%d %b %Y %H:%M:%S",
|
|
time.localtime(int(float(p_info["first_time"]))),
|
|
),
|
|
"summary_last_time": time.strftime(
|
|
"%d %b %Y %H:%M:%S",
|
|
time.localtime(int(float(p_info["last_time"]))),
|
|
),
|
|
"summary_last_ingest_lag": p_info["last_ingest_lag"],
|
|
"summary_last_event_lag": p_info["last_event_lag"],
|
|
"summary_time_measure": time.strftime(
|
|
"%d %b %Y %H:%M:%S",
|
|
time.localtime(int(float(p_info["time_measure"]))),
|
|
),
|
|
"summary_last_ingest": time.strftime(
|
|
"%d %b %Y %H:%M:%S",
|
|
time.localtime(int(float(p_info["last_ingest"]))),
|
|
),
|
|
"summary_last_eventcount": p_info["last_eventcount"],
|
|
"summary_max_lag_allowed": p_info["max_lag_allowed"],
|
|
"summary_max_delay_allowed": p_info["max_delay_allowed"],
|
|
"state": p_info["state"],
|
|
}
|
|
for p_id, p_info in current_dict.items()
|
|
}
|
|
|
|
# Create new_dict_minimal
|
|
for p_id, p_info in current_dict.items():
|
|
if p_info["state"] == "green":
|
|
count_green += 1
|
|
elif p_info["state"] == "red":
|
|
count_red += 1
|
|
|
|
new_dict_minimal = {
|
|
"green": count_green,
|
|
"red": count_red,
|
|
}
|
|
|
|
# Create new_dict with the required fields for compact mode
|
|
new_dict_compact = {
|
|
p_id: {
|
|
"summary": f"idx:{p_info['idx']} | st:{p_info['st']} | last event:{time.strftime('%d %b %Y %H:%M:%S', time.localtime(int(float(p_info['last_time']))))} | last lag/delay:{p_info['last_ingest_lag']}/{p_info['last_event_lag']} | max lag/delay:{p_info['max_lag_allowed']}/{p_info['max_delay_allowed']} | state:{p_info['state']}"
|
|
}
|
|
for p_id, p_info in current_dict.items()
|
|
}
|
|
|
|
# Yield the processed record
|
|
if self.mode != "all":
|
|
yield {
|
|
"_time": time.time(),
|
|
"splk_dhm_st_summary": json.dumps(new_dict, indent=1),
|
|
"_raw": rawdict,
|
|
}
|
|
else:
|
|
yield {
|
|
"_time": time.time(),
|
|
"splk_dhm_st_summary": current_dict,
|
|
"splk_dhm_st_summary_full": json.dumps(new_dict_full, indent=1),
|
|
"splk_dhm_st_summary_compact": json.dumps(
|
|
new_dict_compact, indent=1
|
|
),
|
|
"splk_dhm_st_summary_minimal": json.dumps(
|
|
new_dict_minimal, indent=0
|
|
),
|
|
"_raw": rawdict,
|
|
}
|
|
|
|
# Generate metrics
|
|
if self.gen_metrics:
|
|
records_metrics.append(
|
|
{
|
|
"object": subrecord.get("object"),
|
|
"object_id": subrecord.get("key"),
|
|
"object_category": "splk-dhm",
|
|
"alias": subrecord.get("alias"),
|
|
"metrics_dict": current_dict,
|
|
}
|
|
)
|
|
|
|
# handle empty current_dict
|
|
else:
|
|
if self.mode != "all":
|
|
yield {
|
|
"_time": time.time(),
|
|
"splk_dhm_st_summary": {},
|
|
"_raw": rawdict,
|
|
}
|
|
else:
|
|
yield {
|
|
"_time": time.time(),
|
|
"splk_dhm_st_summary": {},
|
|
"splk_dhm_st_summary_full": {},
|
|
"splk_dhm_st_summary_compact": {},
|
|
"splk_dhm_st_summary_minimal": {},
|
|
"_raw": rawdict,
|
|
}
|
|
|
|
# call the gen metrics function
|
|
if self.gen_metrics == "True":
|
|
if records_metrics:
|
|
metrics_gen_start = time.time()
|
|
try:
|
|
gen_metrics = trackme_splk_dhm_gen_metrics(
|
|
self.tenant_id,
|
|
tenant_indexes.get("trackme_metric_idx"),
|
|
records_metrics,
|
|
)
|
|
logging.info(
|
|
f'context="gen_metrics", tenant_id="{self.tenant_id}", function trackme_splk_dhm_gen_metrics success {gen_metrics}, run_time={round(time.time()-metrics_gen_start, 3)}, no_entities={len(records_metrics)}'
|
|
)
|
|
except Exception as e:
|
|
logging.error(
|
|
f'context="gen_metrics", tenant_id="{self.tenant_id}", function trackme_splk_dhm_gen_metrics failed, tenant_indexes="{tenant_indexes}", records_metrics="{records_metrics}", exception {str(e)}'
|
|
)
|
|
|
|
# Log the run time
|
|
logging.info(
|
|
f"trackmeextractsplkdhm has terminated, run_time={round(time.time() - start, 3)}"
|
|
)
|
|
|
|
|
|
dispatch(TrackMeMergeSplkDhm, sys.argv, sys.stdin, sys.stdout, __name__)
|