#!/usr/bin/env python # coding=utf-8 __author__ = "TrackMe Limited" __copyright__ = "Copyright 2022-2026, TrackMe Limited, U.K." __credits__ = "TrackMe Limited, U.K." __license__ = "TrackMe Limited, all rights reserved" __version__ = "0.1.0" __maintainer__ = "TrackMe Limited, U.K." __email__ = "support@trackme-solutions.com" __status__ = "PRODUCTION" import os import sys import json from collections import OrderedDict import time import logging from logging.handlers import RotatingFileHandler from urllib.parse import urlencode import urllib.parse import urllib3 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) # splunk home splunkhome = os.environ["SPLUNK_HOME"] # append lib sys.path.append(os.path.join(splunkhome, "etc", "apps", "trackme", "lib")) # import TrackMe libs from trackme_libs import JSONFormatter # import trackme libs utils from trackme_libs_utils import ( escape_backslash, replace_encoded_doublebackslashes, remove_leading_spaces, ) # logging: # To avoid overriding logging destination of callers, the libs will not set on purpose any logging definition # and rely on callers themselves def trackme_fqm_gen_metrics( timestamp, tenant_id, object_value, object_id, metric_index, metrics_event ): try: if not isinstance(metrics_event, dict): metrics_event = json.loads(metrics_event) # Create a dedicated logger for FLX metrics fqm_logger = logging.getLogger("trackme.fqm.metrics") fqm_logger.setLevel(logging.INFO) # Only add the handler if it doesn't exist yet if not fqm_logger.handlers: # Set up the file handler filehandler = RotatingFileHandler( f"{splunkhome}/var/log/splunk/trackme_fqm_metrics.log", mode="a", maxBytes=100000000, backupCount=1, ) formatter = JSONFormatter(timestamp=timestamp) filehandler.setFormatter(formatter) fqm_logger.addHandler(filehandler) # Prevent propagation to root logger fqm_logger.propagate = False else: # Find the RotatingFileHandler among existing handlers filehandler = None for handler in fqm_logger.handlers: if isinstance(handler, RotatingFileHandler): filehandler = handler break # If no RotatingFileHandler found, create one if filehandler is None: filehandler = RotatingFileHandler( f"{splunkhome}/var/log/splunk/trackme_fqm_metrics.log", mode="a", maxBytes=100000000, backupCount=1, ) fqm_logger.addHandler(filehandler) # Update formatter with current timestamp formatter = JSONFormatter(timestamp=timestamp) filehandler.setFormatter(formatter) fqm_logger.info( "Metrics - group=fqm_metrics", extra={ "target_index": metric_index, "tenant_id": tenant_id, "object": object_value, "object_id": object_id, "object_category": "splk-fqm", "metrics_event": json.dumps(metrics_event), }, ) except Exception as e: raise Exception(str(e)) def trackme_fqm_gen_metrics_from_list( tenant_id, metric_index, metrics_list ): try: if not isinstance(metrics_list, list): metrics_list = json.loads(metrics_list) # Create a dedicated logger for FQM metrics fqm_logger = logging.getLogger("trackme.fqm.metrics") fqm_logger.setLevel(logging.INFO) # Only add the handler if it doesn't exist yet if not fqm_logger.handlers: # Set up the file handler filehandler = RotatingFileHandler( f"{splunkhome}/var/log/splunk/trackme_fqm_metrics.log", mode="a", maxBytes=100000000, backupCount=1, ) fqm_logger.addHandler(filehandler) # Prevent propagation to root logger fqm_logger.propagate = False else: # Find the RotatingFileHandler among existing handlers filehandler = None for handler in fqm_logger.handlers: if isinstance(handler, RotatingFileHandler): filehandler = handler break # If no RotatingFileHandler found, create one if filehandler is None: filehandler = RotatingFileHandler( f"{splunkhome}/var/log/splunk/trackme_fqm_metrics.log", mode="a", maxBytes=100000000, backupCount=1, ) fqm_logger.addHandler(filehandler) for metrics_item in metrics_list: timestamp = float(metrics_item.get("time")) metrics_item.pop("time") # Remove time field # Update formatter with new timestamp formatter = JSONFormatter(timestamp=timestamp) filehandler.setFormatter(formatter) # Build metrics_event dynamically from fields starting with "fields_quality." metrics_event = {} metrics_data = metrics_item.get("metrics", {}) for key, value in metrics_data.items(): if key.startswith("fields_quality."): metrics_event[key] = value if value is not None else 0 fqm_logger.info( "Metrics - group=fqm_metrics", extra={ "target_index": metric_index, "tenant_id": tenant_id, "object": metrics_item.get("object"), "object_id": metrics_item.get("object_id"), "object_category": "splk-fqm", "metrics_event": json.dumps(metrics_event), }, ) except Exception as e: raise Exception(str(e)) # return main searches logics for that entity def splk_fqm_return_searches(tenant_id, fqm_type, entity_info): # log debug logging.debug( f'Starting function=splk_fqm_return_searches with entity_info="{json.dumps(entity_info, indent=2)}"' ) # define required searches dynamically based on the upstream entity information splk_fqm_mctalog_search = None splk_fqm_metrics_report = None splk_fqm_mpreview = None splk_fqm_metrics_populate_search = None splk_fqm_chart_values_search = None splk_fqm_chart_description_search = None splk_fqm_chart_status_search = None splk_fqm_table_summary_search = None splk_fqm_table_summary_formated_search = None splk_fqm_metrics_success_overtime = None splk_fqm_search_sample_events = None # metadata search constraint (set to * by default to avoid prevent results in case of no metadata fields) metadata_search_constraint = "*" # Extract metadata fields from fields_quality_summary to build search constraint try: if "fields_quality_summary" in entity_info and entity_info["fields_quality_summary"]: # Parse the JSON string if it's a string, otherwise use as-is if it's already a dict if isinstance(entity_info["fields_quality_summary"], str): fields_quality_data = json.loads(entity_info["fields_quality_summary"]) else: fields_quality_data = entity_info["fields_quality_summary"] # Extract metadata fields and build constraint metadata_constraints = [] for key, value in fields_quality_data.items(): if key.startswith("metadata."): # Format as "metadata.fieldname"="value" constraint = f'"{key}"="{value}"' metadata_constraints.append(constraint) # Join all constraints with spaces if metadata_constraints: metadata_search_constraint = " ".join(metadata_constraints) except (json.JSONDecodeError, KeyError, TypeError) as e: logging.warning(f"Failed to extract metadata constraints from fields_quality_summary: {str(e)}") pass try: ######## # mstats ######## # mcatalog splk_fqm_mctalog_search = remove_leading_spaces( f"""\ | mcatalog values(metric_name) as metrics, values(_dims) as dims where `trackme_metrics_idx({tenant_id})` tenant_id="{tenant_id}" object_category="splk-fqm" object_id="{entity_info["_key"]}" metric_name=* by index """ ) # metrics report splk_fqm_metrics_report = remove_leading_spaces( f"""\ | mstats latest(_value) as latest_value, avg(_value) as avg_value, max(_value) as max_value, perc95(_value) as perc95_value, stdev(_value) as stdev_value where `trackme_metrics_idx({tenant_id})` metric_name=* tenant_id="{tenant_id}" object_category="splk-fqm" object_id="{entity_info["_key"]}" by index, object, metric_name | foreach *_value [ eval <> = if(match(metric_name, "\\.status"), round('<>', 0), round('<>', 3)) ] """ ) # mpreview splk_fqm_mpreview = remove_leading_spaces( f"""\ | mpreview `trackme_metrics_idx({tenant_id})` filter="tenant_id={tenant_id} object_category="splk-fqm" object_id={entity_info["_key"]}" """ ) # metrics popuating search splk_fqm_metrics_populate_search = remove_leading_spaces( f"""\ | mcatalog values(metric_name) as metrics where `trackme_metrics_idx({tenant_id})` tenant_id="{tenant_id}" object_category="splk-fqm" object_id="{entity_info["_key"]}" metric_name=* | mvexpand metrics | rename metrics as metric_name | rex field=metric_name "^trackme\\.splk\\.fqm\\.(?