You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
Splunk_Deploiement/apps/trackme/bin/trackmeextractsplkmhm.py

327 lines
12 KiB

#!/usr/bin/env python
# coding=utf-8
__author__ = "TrackMe Limited"
__copyright__ = "Copyright 2022-2026, TrackMe Limited, U.K."
__credits__ = "TrackMe Limited, U.K."
__license__ = "TrackMe Limited, all rights reserved"
__version__ = "0.1.0"
__maintainer__ = "TrackMe Limited, U.K."
__email__ = "support@trackme-solutions.com"
__status__ = "PRODUCTION"
# Standard library imports
import ast
import json
import logging
from logging.handlers import RotatingFileHandler
import os
import sys
import time
from collections import OrderedDict
# Third-party imports
import urllib3
# Disable InsecureRequestWarning
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# splunk home
splunkhome = os.environ["SPLUNK_HOME"]
# set logging
filehandler = RotatingFileHandler(
"%s/var/log/splunk/trackme_extract_splk_mhm.log" % splunkhome,
mode="a",
maxBytes=10000000,
backupCount=1,
)
formatter = logging.Formatter(
"%(asctime)s %(levelname)s %(filename)s %(funcName)s %(lineno)d %(message)s"
)
logging.Formatter.converter = time.gmtime
filehandler.setFormatter(formatter)
log = logging.getLogger() # root logger - Good to get it only once.
for hdlr in log.handlers[:]: # remove the existing file handlers
if isinstance(hdlr, logging.FileHandler):
log.removeHandler(hdlr)
log.addHandler(filehandler) # set the new handler
# set the log level to INFO, DEBUG as the default is ERROR
log.setLevel(logging.INFO)
# append current directory
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
# import libs
import import_declare_test
# import Splunk libs
from splunklib.searchcommands import (
dispatch,
StreamingCommand,
Configuration,
Option,
validators,
)
# Import trackme libs
from trackme_libs import trackme_reqinfo, trackme_idx_for_tenant
# Import trackme libs for feeds
from trackme_libs_splk_feeds import trackme_splk_mhm_gen_metrics
@Configuration(distributed=False)
class TrackMeMergeSplkDhm(StreamingCommand):
mode = Option(
doc="""
**Syntax:** **mode=****
**Description:** Specify the metric details output mode, valid options are minimal|compact|full|all .""",
require=False,
default="minimal",
validate=validators.Match("mode", r"^(minimal|compact|full|all)$"),
)
field_current = Option(
doc="""
**Syntax:** **field_current=****
**Description:** field name containing the current object dictionnary.""",
require=True,
)
tenant_id = Option(
doc="""
**Syntax:** **tenant_id=****
**Description:** The tenant identifier, only used with gen_metrics=True.""",
require=False,
default=None,
)
gen_metrics = Option(
doc="""
**Syntax:** **gen_metrics=****
**Description:** Generate and index metrics details.""",
require=False,
default=False,
validate=validators.Match("gen_metrics", r"^(True|False)$"),
)
def stream(self, records):
# Start performance counter
start = time.time()
# Get request info and set logging level
reqinfo = trackme_reqinfo(
self._metadata.searchinfo.session_key, self._metadata.searchinfo.splunkd_uri
)
log.setLevel(reqinfo["logging_level"])
if self.gen_metrics == "True" and self.tenant_id:
tenant_indexes = trackme_idx_for_tenant(
self._metadata.searchinfo.session_key,
self._metadata.searchinfo.splunkd_uri,
self.tenant_id,
)
else:
tenant_indexes = None
# records_metrics
records_metrics = []
# Iterate through records
for subrecord in records:
# Attempt to get the current_dict
try:
current_dict = ast.literal_eval(subrecord[self.field_current])
except Exception as e:
log.warning(f"Failed to parse current_dict, exception: {e}")
current_dict = None
# ensure we have a value for object_category (splk-mhm)
subrecord["object_category"] = "splk-mhm"
# handle the raw records
rawdict = subrecord
# remove fields not needed any longer
try:
del rawdict[self.field_current]
except Exception as e:
pass
# If we have current_dict
if current_dict:
# Full mode
if self.mode == "full":
# Create new_dict with the required fields for full mode
new_dict = {
p_id: {
"summary_idx": p_info["idx"],
"summary_metric_category": p_info["metric_category"],
"summary_last_time": time.strftime(
"%d %b %Y %H:%M:%S",
time.localtime(int(float(p_info["last_time"]))),
),
"summary_last_metric_lag": p_info["last_metric_lag"],
"summary_time_measure": time.strftime(
"%d %b %Y %H:%M:%S",
time.localtime(int(float(p_info["time_measure"]))),
),
"summary_max_lag_allowed": p_info["lag_allowed"],
"state": p_info["state"],
}
for p_id, p_info in current_dict.items()
}
# Compact mode
elif self.mode == "compact":
# Create new_dict with the required fields for compact mode
new_dict = {
p_id: {
"summary": f"idx:{p_info['idx']} | last:{time.strftime('%d %b %Y %H:%M:%S', time.localtime(int(float(p_info['last_time']))))} | max:{p_info['lag_allowed']} | state:{p_info['state']}"
}
for p_id, p_info in current_dict.items()
}
# Minimal mode
elif self.mode == "minimal":
# counters
count_green = 0
count_red = 0
# Create new_dict with the required fields for minimal mode
for p_id, p_info in current_dict.items():
if p_info["state"] == "green":
count_green += 1
elif p_info["state"] == "red":
count_red += 1
new_dict_minimal = {
"green": count_green,
"red": count_red,
}
# process both
elif self.mode == "all":
# counters
count_green = 0
count_red = 0
# Create new_dict with the required fields for full mode
new_dict_full = {
p_id: {
"summary_idx": p_info["idx"],
"summary_metric_category": p_info["metric_category"],
"summary_last_time": time.strftime(
"%d %b %Y %H:%M:%S",
time.localtime(int(float(p_info["last_time"]))),
),
"summary_last_metric_lag": p_info["last_metric_lag"],
"summary_time_measure": time.strftime(
"%d %b %Y %H:%M:%S",
time.localtime(int(float(p_info["time_measure"]))),
),
"summary_max_lag_allowed": p_info["lag_allowed"],
"state": p_info["state"],
}
for p_id, p_info in current_dict.items()
}
# Create new_dict_minimal
for p_id, p_info in current_dict.items():
if p_info["state"] == "green":
count_green += 1
elif p_info["state"] == "red":
count_red += 1
new_dict_minimal = {
"green": count_green,
"red": count_red,
}
# Create new_dict with the required fields for compact mode
new_dict_compact = {
p_id: {
"summary": f"idx:{p_info['idx']} | last:{time.strftime('%d %b %Y %H:%M:%S', time.localtime(int(float(p_info['last_time']))))} | max:{p_info['lag_allowed']} | state:{p_info['state']}"
}
for p_id, p_info in current_dict.items()
}
if self.mode != "all":
yield {
"_time": time.time(),
"metric_details": json.dumps(new_dict, indent=1),
"_raw": rawdict,
}
else:
yield {
"_time": time.time(),
"metric_details": current_dict,
"metric_details_minimal": json.dumps(
new_dict_minimal, indent=1
),
"metric_details_full": json.dumps(new_dict_full, indent=1),
"metric_details_compact": json.dumps(
new_dict_compact, indent=1
),
"_raw": rawdict,
}
# Generate metrics
if self.gen_metrics:
records_metrics.append(
{
"object": subrecord.get("object"),
"object_id": subrecord.get("key"),
"object_category": "splk-dhm",
"alias": subrecord.get("alias"),
"metrics_dict": current_dict,
}
)
# handle empty current_dict
else:
if self.mode != "all":
yield {
"_time": time.time(),
"metric_details": {},
"_raw": rawdict,
}
else:
yield {
"_time": time.time(),
"metric_details": {},
"metric_details_minimal": {},
"metric_details_full": {},
"metric_details_compact": {},
"_raw": rawdict,
}
# call the gen metrics function
if self.gen_metrics == "True":
metrics_gen_start = time.time()
if records_metrics:
try:
gen_metrics = trackme_splk_mhm_gen_metrics(
self.tenant_id,
tenant_indexes.get("trackme_metric_idx"),
records_metrics,
)
logging.info(
f'context="gen_metrics", tenant_id="{self.tenant_id}", function trackme_splk_mhm_gen_metrics success {gen_metrics}, run_time={round(time.time()-metrics_gen_start, 3)}, no_entities={len(records_metrics)}'
)
except Exception as e:
logging.error(
f'context="gen_metrics", tenant_id="{self.tenant_id}", function trackme_splk_mhm_gen_metrics failed, tenant_indexes="{tenant_indexes}", records_metrics="{records_metrics}", exception {str(e)}'
)
# Log the run time
logging.info(
f"trackmeextractsplkmhm has terminated, run_time={round(time.time() - start, 3)}"
)
dispatch(TrackMeMergeSplkDhm, sys.argv, sys.stdin, sys.stdout, __name__)