You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
Splunk_Deploiement/apps/trackme/bin/trackmepersistentfields.py

1515 lines
99 KiB

#!/usr/bin/env python
# coding=utf-8
__author__ = "TrackMe Limited"
__copyright__ = "Copyright 2022-2026, TrackMe Limited, U.K."
__credits__ = "TrackMe Limited, U.K."
__license__ = "TrackMe Limited, all rights reserved"
__version__ = "0.1.0"
__maintainer__ = "TrackMe Limited, U.K."
__email__ = "support@trackme-solutions.com"
__status__ = "PRODUCTION"
# Standard library
import os
import sys
import time
import json
import hashlib
# External libraries
import urllib3
# Disable urllib3 warnings
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Configure logging
import logging
from logging.handlers import RotatingFileHandler
splunkhome = os.environ["SPLUNK_HOME"]
# set logging
filehandler = RotatingFileHandler(
os.path.join(splunkhome, "var", "log", "splunk", "trackme_persistentfields.log"),
mode="a",
maxBytes=10_000_000,
backupCount=1,
)
formatter = logging.Formatter(
"%(asctime)s %(levelname)s %(filename)s %(funcName)s %(lineno)d %(message)s"
)
logging.Formatter.converter = time.gmtime
filehandler.setFormatter(formatter)
log = logging.getLogger() # root logger - Good to get it only once.
for hdlr in log.handlers[:]: # remove the existing file handlers
if isinstance(hdlr, logging.FileHandler):
log.removeHandler(hdlr)
log.addHandler(filehandler) # set the new handler
# set the log level to INFO, DEBUG as the default is ERROR
log.setLevel(logging.INFO)
# append current directory
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
# import libs
import import_declare_test
# import Splunk libs
from splunklib.searchcommands import (
dispatch,
StreamingCommand,
Configuration,
Option,
validators,
)
# Import trackme libs
from trackme_libs import trackme_reqinfo
from trackme_libs_utils import decode_unicode, get_uuid
# import TrackMe get data libs
from trackme_libs_get_data import (
search_kv_collection_restmode,
search_kv_collection_searchmode,
search_kv_collection_sdkmode,
)
# Import trackMe kvstore batch libs
from trackme_libs_kvstore_batch import batch_update_worker
# import trackme libs persistent fields definition
from collections_data import (
persistent_fields_dsm,
persistent_fields_dhm,
persistent_fields_mhm,
persistent_fields_flx,
persistent_fields_fqm,
persistent_fields_wlk,
)
@Configuration(distributed=False)
class TrackMePersistentHandler(StreamingCommand):
collection = Option(
doc="""
**Syntax:** **collection=****
**Description:** Specify the collection.""",
require=True,
default="None",
validate=validators.Match("collection", r"^.*$"),
)
key = Option(
doc="""
**Syntax:** **key=****
**Description:** Specify the key.""",
require=True,
default="None",
validate=validators.Match("key", r"^.*$"),
)
update_collection = Option(
doc="""
**Syntax:** **update_collection=****
**Description:** Enables or disables updating and inserting innto the collection, this replaces the need from calling outputlookup.""",
require=False,
default=False,
validate=validators.Match("key", r"^(True|False)$"),
)
def get_component(self, collection_name):
"""
Determine the component name based on the collection name.
Args:
collection_name (str): The name of the collection.
Returns:
str: The component name derived from the collection name.
"""
# Define the prefix and corresponding component name
if collection_name.startswith("trackme_dsm_"):
component = "dsm"
elif collection_name.startswith("trackme_dhm_"):
component = "dhm"
elif collection_name.startswith("trackme_mhm_"):
component = "mhm"
elif collection_name.startswith("trackme_flx_"):
component = "flx"
elif collection_name.startswith("trackme_fqm_"):
component = "fqm"
elif collection_name.startswith("trackme_wlk_"):
component = "wlk"
else:
component = None # or a default value if there's an expected default
return component
def stream(self, records):
# performance counter
start_time = time.time()
# Get request info and set logging level
reqinfo = trackme_reqinfo(
self._metadata.searchinfo.session_key, self._metadata.searchinfo.splunkd_uri
)
log.setLevel(reqinfo["logging_level"])
# Max multi thread workers
max_multi_thread_workers = int(
reqinfo["trackme_conf"]["trackme_general"]["max_multi_thread_workers"]
)
# set instance_id
self.instance_id = get_uuid()
# connect to the KVstore
target_collection = f"kv_{self.collection}"
collection = self.service.kvstore[target_collection]
# set the component
persistent_fields = []
component = self.get_component(self.collection)
if component == "dsm":
persistent_fields = []
for field in persistent_fields_dsm:
persistent_fields.append(field)
elif component == "dhm":
for field in persistent_fields_dhm:
persistent_fields.append(field)
elif component == "mhm":
for field in persistent_fields_mhm:
persistent_fields.append(field)
elif component == "flx":
for field in persistent_fields_flx:
persistent_fields.append(field)
elif component == "fqm":
for field in persistent_fields_fqm:
persistent_fields.append(field)
elif component == "wlk":
for field in persistent_fields_wlk:
persistent_fields.append(field)
# set task
#
task_start = time.time()
task_instance_id = get_uuid()
task_name = "get_collection_records"
# get all records
collection_records, collection_keys, collection_dict, last_page = (
search_kv_collection_sdkmode(
logging, self.service, target_collection, page=1, page_count=0, orderby="keyid"
)
)
# end task
#
task_end = time.time()
task_run_time = round((task_end - task_start), 3)
logging.info(
f'instance_id={self.instance_id}, task="{task_name}", task_instance_id={task_instance_id}, task_run_time="{task_run_time}", task_end=1, task has terminated.'
)
#
# Define Meta
#
final_records = []
# Loop in the results
for record in records:
record_is_new = record.get(self.key) not in collection_keys
# ctime: if record is new, add a ctime field, otherwise ensure we have a ctime field set to the current time
if record_is_new:
record["ctime"] = time.time()
else:
ctime = record.get("ctime", None)
if not ctime:
record["ctime"] = time.time()
# get time, if any
time_event = None
try:
time_event = record["_time"]
except Exception as e:
time_event = time.time()
logging.debug(f'instance_id="{self.instance_id}", inspecting record={json.dumps(record, indent=2)}')
# always set an object_256 and add _key in the record
record_object_value = decode_unicode(record["object"])
record_alias = decode_unicode(record["alias"])
logging.debug(
f'instance_id="{self.instance_id}", object="{record["object"]}", decoded_object="{record_object_value}", alias="{record["alias"]}", decoded_alias="{record_alias}"'
)
# add the _key in the record if there is none
if not record.get("_key"):
object_256 = hashlib.sha256(
record_object_value.encode("utf-8")
).hexdigest()
record["_key"] = object_256
logging.debug(
f'instance_id="{self.instance_id}", adding _key="{object_256}" to record for object="{record_object_value}", alias="{record_alias}"'
)
# handle unicode for object and alias
record["object"] = record_object_value
record["alias"] = record_alias
# get tracker_runtime, if any
tracker_runtime = record.get("tracker_runtime", None)
if tracker_runtime:
try:
tracker_runtime = float(tracker_runtime)
except Exception as e:
tracker_runtime = time.time()
else:
tracker_runtime = time.time()
# rejected record: only applies to component dsm/dhm, if the value of data_last_time_seen in record is is lower than the current value in the KVstore,
# then the record should be rejected as it is outdated and might indicate a plateform level temporary issue
# fields are epochtime and should be loaded as float, rejected_record is a boolean
# the current value in the Kvstore can be retrieved from: float(collection_dict[record.get(self.key)].get("data_last_time_seen"))
# this should be made through a try/except block to avoid any potential issue, if an exception is raised log the exception and set rejected_record to False
rejected_record = False
if component in ["dsm", "dhm"]:
try:
rejected_record_key = record.get(self.key)
logging.debug(f'instance_id="{self.instance_id}", record key="{rejected_record_key}"')
rejected_record_dict = collection_dict.get(rejected_record_key)
logging.debug(f'instance_id="{self.instance_id}", rejected_record_dict="{rejected_record_dict}"')
if rejected_record_dict is not None:
kvstore_data_last_time_seen = rejected_record_dict.get(
"data_last_time_seen", None
)
# get kvstore_data_last_time_seen
kvstore_data_last_time_seen = rejected_record_dict.get(
"data_last_time_seen", None
)
logging.info(
f'instance_id="{self.instance_id}", kvstore_data_last_time_seen="{kvstore_data_last_time_seen}"'
)
if kvstore_data_last_time_seen:
kvstore_data_last_time_seen = float(
kvstore_data_last_time_seen
)
# get current_data_last_time_seen
current_data_last_time_seen = record.get(
"data_last_time_seen", None
)
if current_data_last_time_seen:
current_data_last_time_seen = float(
current_data_last_time_seen
)
# process if we have values
if kvstore_data_last_time_seen and current_data_last_time_seen:
if (
current_data_last_time_seen
< kvstore_data_last_time_seen
):
rejected_record = True
logging.warning(
f'instance_id="{self.instance_id}", collection="{target_collection}", component="{component}", record key="{record.get(self.key)}", rejected record detected, epoch value in kVstore {kvstore_data_last_time_seen} is bigger than record submitted value {current_data_last_time_seen}, record="{json.dumps(record, indent=2)}"'
)
else:
rejected_record = False
logging.debug(
f'instance_id="{self.instance_id}", collection="{target_collection}", component="{component}", record key="{record.get(self.key)}", rejected record not detected, epoch value in kVstore {kvstore_data_last_time_seen} and record submitted value {current_data_last_time_seen} are both None, record="{json.dumps(record, indent=2)}"'
)
else:
rejected_record = False
logging.debug(
f'instance_id="{self.instance_id}", collection="{target_collection}", component="{component}", record key="{record.get(self.key)}", object="{record.get("object")}", rejected record not detected, epoch value in kVstore {kvstore_data_last_time_seen} and record submitted value {current_data_last_time_seen} are both None, record="{json.dumps(record, indent=2)}"'
)
else:
rejected_record = False
logging.warning(
f'instance_id="{self.instance_id}", collection="{target_collection}", component="{component}", record key="{record.get(self.key)}", object="{record.get("object")}", this object could not be found in the dictionnary, most likely because this sourcetype is corrupted and index non printable characters as its name!'
)
except Exception as e:
logging.error(
f'instance_id="{self.instance_id}", collection="{target_collection}", component="{component}", failed to extract and convert data_last_time_seen, record key="{record.get(self.key)}", object="{record.get("object")}", exception message="{str(e)}"'
)
rejected_record = False
# detect conflict update
conflict_update = False
if not record_is_new:
# attempt to retrieve and convert mtime value, if fails for any reason, set conflict_update to False
try:
mtime = float(collection_dict[record.get(self.key)].get("mtime"))
if mtime > float(tracker_runtime):
conflict_update = True
logging.info(
f'instance_id="{self.instance_id}", record key="{record.get(self.key)}", conflict update detected, preserving persistent fields="{persistent_fields}", record="{json.dumps(record, indent=2)}"'
)
else:
conflict_update = False
except Exception as e:
logging.error(
f'instance_id="{self.instance_id}", failed to extract and convert mtime="{mtime}", tracker_runtime="{tracker_runtime}", exception message="{str(e)}"'
)
conflict_update = False
# create a summary record
summary_record = {}
# Add _time first
summary_record["_time"] = float(time_event)
# if not rejected
if not rejected_record:
# Handle merging of tracker-keyed JSON fields for FLX concurrent trackers support
# This must be done before the main loop to properly merge tracker-specific data
if component == "flx" and not record_is_new:
existing_record = collection_dict.get(record.get(self.key), {})
# Merge metrics (JSON object keyed by tracker_name)
if "metrics" in record and "metrics" in existing_record:
try:
new_metrics_str = record.get("metrics")
existing_metrics_str = existing_record.get("metrics")
# Parse both as JSON objects
new_metrics = {}
existing_metrics = {}
if new_metrics_str:
if isinstance(new_metrics_str, str):
try:
new_metrics = json.loads(new_metrics_str)
except (json.JSONDecodeError, TypeError):
# If parsing fails, try to treat as regular metrics dict
new_metrics = {}
elif isinstance(new_metrics_str, dict):
new_metrics = new_metrics_str
if existing_metrics_str:
if isinstance(existing_metrics_str, str):
try:
existing_metrics = json.loads(existing_metrics_str)
except (json.JSONDecodeError, TypeError):
# If parsing fails, might be old format, skip merge
existing_metrics = {}
elif isinstance(existing_metrics_str, dict):
existing_metrics = existing_metrics_str
# Merge: existing trackers preserved, new tracker updates/overwrites its entry
merged_metrics = existing_metrics.copy()
merged_metrics.update(new_metrics)
# Remove internal "status" field from metrics
# This is an internal field, not a user metric
# Handle tracker-keyed format: {"tracker1": {"metric1": 123, "status": 1}, ...}
# Handle old format: {"metric1": 123, "status": 1}
if merged_metrics:
# Check if it's tracker-keyed format (values are dicts with metrics)
is_tracker_keyed = False
for tracker_name, tracker_metrics in merged_metrics.items():
if isinstance(tracker_metrics, dict):
# Check if this looks like a metrics dict (has numeric/string values)
# If all values are simple types, it's likely tracker-keyed metrics format
if all(isinstance(v, (int, float, str, bool)) or v is None for v in tracker_metrics.values()):
# This is tracker-keyed format, remove "status" from each tracker's metrics
is_tracker_keyed = True
if "status" in tracker_metrics:
cleaned_metrics = tracker_metrics.copy()
del cleaned_metrics["status"]
merged_metrics[tracker_name] = cleaned_metrics
# If old format (not tracker-keyed), remove "status" from top level
if not is_tracker_keyed and "status" in merged_metrics:
cleaned_metrics = merged_metrics.copy()
del cleaned_metrics["status"]
merged_metrics = cleaned_metrics
# Store merged metrics as JSON string
record["metrics"] = json.dumps(merged_metrics)
logging.debug(
f'instance_id="{self.instance_id}", merged metrics for object="{record.get("object")}", '
f'existing_trackers={list(existing_metrics.keys())}, '
f'new_trackers={list(new_metrics.keys())}, '
f'merged_trackers={list(merged_metrics.keys())}'
)
except Exception as e:
logging.error(
f'instance_id="{self.instance_id}", failed to merge metrics for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
elif "metrics" in record:
# New record or existing record doesn't have metrics, clean "status" from incoming metrics
try:
new_metrics_str = record.get("metrics")
if new_metrics_str:
new_metrics = {}
if isinstance(new_metrics_str, str):
try:
new_metrics = json.loads(new_metrics_str)
except (json.JSONDecodeError, TypeError):
pass
elif isinstance(new_metrics_str, dict):
new_metrics = new_metrics_str
# Remove internal "status" field from metrics
if new_metrics:
# Check if it's tracker-keyed format
is_tracker_keyed = False
for tracker_name, tracker_metrics in new_metrics.items():
if isinstance(tracker_metrics, dict):
if all(isinstance(v, (int, float, str, bool)) or v is None for v in tracker_metrics.values()):
is_tracker_keyed = True
if "status" in tracker_metrics:
cleaned_metrics = tracker_metrics.copy()
del cleaned_metrics["status"]
new_metrics[tracker_name] = cleaned_metrics
# If old format, remove "status" from top level
if not is_tracker_keyed and "status" in new_metrics:
cleaned_metrics = new_metrics.copy()
del cleaned_metrics["status"]
new_metrics = cleaned_metrics
# Store cleaned metrics back
if isinstance(new_metrics_str, str):
record["metrics"] = json.dumps(new_metrics)
else:
record["metrics"] = new_metrics
except Exception as e:
logging.error(
f'instance_id="{self.instance_id}", failed to clean status from metrics for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
# Merge status_description (JSON object keyed by tracker_name)
if "status_description" in record:
# Field exists in incoming record, try to merge
try:
new_status_desc_str = record.get("status_description")
existing_status_desc_str = existing_record.get("status_description") if "status_description" in existing_record else None
# If incoming value is None or empty, preserve existing and skip merge
if not new_status_desc_str:
if existing_status_desc_str:
record["status_description"] = existing_status_desc_str
else:
# No existing, remove empty field
record.pop("status_description", None)
else:
# We have incoming data, proceed with merge
new_status_desc = None
existing_status_desc = {}
if isinstance(new_status_desc_str, str):
# Skip empty strings (they're not valid JSON)
if new_status_desc_str.strip():
try:
new_status_desc = json.loads(new_status_desc_str)
except (json.JSONDecodeError, TypeError):
# If parsing fails, might be old format string, skip merge
new_status_desc = None
else:
# Empty string, preserve existing if available
if existing_status_desc_str:
record["status_description"] = existing_status_desc_str
else:
# No existing, remove empty field
record.pop("status_description", None)
new_status_desc = None
elif isinstance(new_status_desc_str, dict):
new_status_desc = new_status_desc_str
# Parse existing status_description if available
if existing_status_desc_str:
if isinstance(existing_status_desc_str, str):
# Skip empty strings
if existing_status_desc_str.strip():
try:
existing_status_desc = json.loads(existing_status_desc_str)
except (json.JSONDecodeError, TypeError):
# If parsing fails, might be old format string, skip merge
existing_status_desc = {}
else:
existing_status_desc = {}
elif isinstance(existing_status_desc_str, dict):
existing_status_desc = existing_status_desc_str
# Merge: existing trackers preserved, new tracker updates/overwrites its entry
if new_status_desc is not None:
if new_status_desc:
merged_status_desc = existing_status_desc.copy() if existing_status_desc else {}
merged_status_desc.update(new_status_desc)
# Store merged status_description as JSON string (only if we have content)
if merged_status_desc:
# Check if merged dict has any non-empty values
has_content = any(v for v in merged_status_desc.values() if v)
if has_content:
record["status_description"] = json.dumps(merged_status_desc)
elif existing_status_desc:
# Merged is empty, preserve existing if available
record["status_description"] = existing_record.get("status_description")
elif existing_status_desc:
# If new is empty but existing has content, preserve existing
record["status_description"] = existing_record.get("status_description")
elif existing_status_desc:
# New is empty but existing has content, preserve existing
record["status_description"] = existing_record.get("status_description")
else:
# Both are empty, remove field to avoid storing {}
record.pop("status_description", None)
elif existing_status_desc:
# No new data but existing has content, preserve existing
record["status_description"] = existing_record.get("status_description")
except Exception as e:
logging.error(
f'instance_id="{self.instance_id}", failed to merge status_description for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
elif "status_description" in existing_record:
# Field not in incoming record, preserve existing
record["status_description"] = existing_record.get("status_description")
# Merge status_description_short (JSON object keyed by tracker_name)
if "status_description_short" in record:
try:
new_status_desc_short_str = record.get("status_description_short")
existing_status_desc_short_str = existing_record.get("status_description_short") if "status_description_short" in existing_record else None
# If incoming value is None or empty, preserve existing and skip merge
if not new_status_desc_short_str:
if existing_status_desc_short_str:
record["status_description_short"] = existing_status_desc_short_str
else:
# No existing, remove empty field
record.pop("status_description_short", None)
else:
# We have incoming data, proceed with merge
new_status_desc_short = None
existing_status_desc_short = {}
if isinstance(new_status_desc_short_str, str):
# Skip empty strings (they're not valid JSON)
if new_status_desc_short_str.strip():
try:
new_status_desc_short = json.loads(new_status_desc_short_str)
except (json.JSONDecodeError, TypeError):
# If parsing fails, might be old format string, skip merge
new_status_desc_short = None
else:
# Empty string, preserve existing if available
if existing_status_desc_short_str:
record["status_description_short"] = existing_status_desc_short_str
else:
# No existing, remove empty field
record.pop("status_description_short", None)
new_status_desc_short = None
elif isinstance(new_status_desc_short_str, dict):
new_status_desc_short = new_status_desc_short_str
# Parse existing status_description_short if available
if existing_status_desc_short_str:
if isinstance(existing_status_desc_short_str, str):
# Skip empty strings
if existing_status_desc_short_str.strip():
try:
existing_status_desc_short = json.loads(existing_status_desc_short_str)
except (json.JSONDecodeError, TypeError):
# If parsing fails, might be old format string, skip merge
existing_status_desc_short = {}
else:
existing_status_desc_short = {}
elif isinstance(existing_status_desc_short_str, dict):
existing_status_desc_short = existing_status_desc_short_str
# Merge: existing trackers preserved, new tracker updates/overwrites its entry
if new_status_desc_short is not None:
if new_status_desc_short:
merged_status_desc_short = existing_status_desc_short.copy() if existing_status_desc_short else {}
merged_status_desc_short.update(new_status_desc_short)
# Store merged status_description_short as JSON string (only if we have content)
if merged_status_desc_short:
# Check if merged dict has any non-empty values
has_content = any(v for v in merged_status_desc_short.values() if v)
if has_content:
record["status_description_short"] = json.dumps(merged_status_desc_short)
elif existing_status_desc_short:
# Merged is empty, preserve existing if available
record["status_description_short"] = existing_record.get("status_description_short")
elif existing_status_desc_short:
# If new is empty but existing has content, preserve existing
record["status_description_short"] = existing_record.get("status_description_short")
elif existing_status_desc_short:
# New is empty but existing has content, preserve existing
record["status_description_short"] = existing_record.get("status_description_short")
else:
# Both are empty, remove field to avoid storing {}
record.pop("status_description_short", None)
elif existing_status_desc_short:
# No new data but existing has content, preserve existing
record["status_description_short"] = existing_record.get("status_description_short")
except Exception as e:
logging.error(
f'instance_id="{self.instance_id}", failed to merge status_description_short for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
elif "status_description_short" in existing_record:
# Field not in incoming record, preserve existing
record["status_description_short"] = existing_record.get("status_description_short")
# Merge object_description (JSON object keyed by tracker_name)
if "object_description" in record:
# Field exists in incoming record, try to merge
try:
new_object_desc_str = record.get("object_description")
existing_object_desc_str = existing_record.get("object_description") if "object_description" in existing_record else None
# If incoming value is None or empty, preserve existing and skip merge
if not new_object_desc_str:
if existing_object_desc_str:
record["object_description"] = existing_object_desc_str
else:
# No existing, remove empty field
record.pop("object_description", None)
else:
# We have incoming data, proceed with merge
new_object_desc = None
existing_object_desc = {}
if isinstance(new_object_desc_str, str):
# Skip empty strings (they're not valid JSON)
if new_object_desc_str.strip():
try:
new_object_desc = json.loads(new_object_desc_str)
except (json.JSONDecodeError, TypeError):
# If parsing fails, might be old format string, skip merge
new_object_desc = None
else:
# Empty string, preserve existing if available
if existing_object_desc_str:
record["object_description"] = existing_object_desc_str
else:
# No existing, remove empty field
record.pop("object_description", None)
new_object_desc = None
elif isinstance(new_object_desc_str, dict):
new_object_desc = new_object_desc_str
# Parse existing object_description if available
if existing_object_desc_str:
if isinstance(existing_object_desc_str, str):
# Skip empty strings
if existing_object_desc_str.strip():
try:
existing_object_desc = json.loads(existing_object_desc_str)
except (json.JSONDecodeError, TypeError):
# If parsing fails, might be old format string, skip merge
existing_object_desc = {}
else:
existing_object_desc = {}
elif isinstance(existing_object_desc_str, dict):
existing_object_desc = existing_object_desc_str
# Merge: existing trackers preserved, new tracker updates/overwrites its entry
if new_object_desc is not None:
if new_object_desc:
merged_object_desc = existing_object_desc.copy() if existing_object_desc else {}
merged_object_desc.update(new_object_desc)
# Store merged object_description as JSON string (only if we have content)
if merged_object_desc:
# Check if merged dict has any non-empty values
has_content = any(v for v in merged_object_desc.values() if v)
if has_content:
record["object_description"] = json.dumps(merged_object_desc)
elif existing_object_desc:
# Merged is empty, preserve existing if available
record["object_description"] = existing_record.get("object_description")
elif existing_object_desc:
# If new is empty but existing has content, preserve existing
record["object_description"] = existing_record.get("object_description")
elif existing_object_desc:
# New is empty but existing has content, preserve existing
record["object_description"] = existing_record.get("object_description")
else:
# Both are empty, remove field to avoid storing {}
record.pop("object_description", None)
elif existing_object_desc:
# No new data but existing has content, preserve existing
record["object_description"] = existing_record.get("object_description")
except Exception as e:
logging.error(
f'instance_id="{self.instance_id}", failed to merge object_description for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
elif "object_description" in existing_record:
# Field not in incoming record, preserve existing
record["object_description"] = existing_record.get("object_description")
# Merge status (JSON object keyed by tracker_name)
if "status" in record:
# Field exists in incoming record, try to merge
try:
new_status_str = record.get("status")
existing_status_str = existing_record.get("status") if "status" in existing_record else None
# Parse incoming status
new_status = None
existing_status = {}
if isinstance(new_status_str, str):
# Try to parse as JSON (tracker-keyed format)
if new_status_str.strip():
try:
new_status = json.loads(new_status_str)
if not isinstance(new_status, dict):
# If not a dict, treat as old format (simple integer)
try:
status_value = int(new_status_str)
# Need tracker name to create tracker-keyed format
if "tracker_name" in record:
tracker_name_value = record.get("tracker_name")
if isinstance(tracker_name_value, str):
try:
tracker_names = json.loads(tracker_name_value)
if isinstance(tracker_names, list) and tracker_names:
new_status = {tracker_names[-1]: status_value}
except (json.JSONDecodeError, TypeError):
pass
except (ValueError, TypeError):
pass
except (json.JSONDecodeError, TypeError):
# If parsing fails, might be old format integer string
try:
status_value = int(new_status_str)
# Need tracker name to create tracker-keyed format
if "tracker_name" in record:
tracker_name_value = record.get("tracker_name")
if isinstance(tracker_name_value, str):
try:
tracker_names = json.loads(tracker_name_value)
if isinstance(tracker_names, list) and tracker_names:
new_status = {tracker_names[-1]: status_value}
except (json.JSONDecodeError, TypeError):
pass
except (ValueError, TypeError):
pass
elif isinstance(new_status_str, dict):
new_status = new_status_str
elif isinstance(new_status_str, int):
# Old format integer, need tracker name to create tracker-keyed format
if "tracker_name" in record:
tracker_name_value = record.get("tracker_name")
if isinstance(tracker_name_value, str):
try:
tracker_names = json.loads(tracker_name_value)
if isinstance(tracker_names, list) and tracker_names:
new_status = {tracker_names[-1]: new_status_str}
except (json.JSONDecodeError, TypeError):
pass
# Parse existing status if available
if existing_status_str:
if isinstance(existing_status_str, str):
if existing_status_str.strip():
try:
existing_status = json.loads(existing_status_str)
if not isinstance(existing_status, dict):
# If not a dict, treat as old format (simple integer)
try:
status_value = int(existing_status_str)
# Convert to tracker-keyed format using existing tracker_name
if "tracker_name" in existing_record:
tracker_name_value = existing_record.get("tracker_name")
if isinstance(tracker_name_value, str):
try:
tracker_names = json.loads(tracker_name_value)
if isinstance(tracker_names, list) and tracker_names:
existing_status = {tracker_names[-1]: status_value}
except (json.JSONDecodeError, TypeError):
existing_status = {}
except (ValueError, TypeError):
existing_status = {}
except (json.JSONDecodeError, TypeError):
# If parsing fails, might be old format integer string
try:
status_value = int(existing_status_str)
# Convert to tracker-keyed format using existing tracker_name
if "tracker_name" in existing_record:
tracker_name_value = existing_record.get("tracker_name")
if isinstance(tracker_name_value, str):
try:
tracker_names = json.loads(tracker_name_value)
if isinstance(tracker_names, list) and tracker_names:
existing_status = {tracker_names[-1]: status_value}
except (json.JSONDecodeError, TypeError):
existing_status = {}
except (ValueError, TypeError):
existing_status = {}
else:
existing_status = {}
elif isinstance(existing_status_str, dict):
existing_status = existing_status_str
elif isinstance(existing_status_str, int):
# Old format integer, convert to tracker-keyed format
if "tracker_name" in existing_record:
tracker_name_value = existing_record.get("tracker_name")
if isinstance(tracker_name_value, str):
try:
tracker_names = json.loads(tracker_name_value)
if isinstance(tracker_names, list) and tracker_names:
existing_status = {tracker_names[-1]: existing_status_str}
except (json.JSONDecodeError, TypeError):
existing_status = {}
# Merge: existing trackers preserved, new tracker updates/overwrites its entry
# Ensure new_status is a dict before merging (if it's not None)
if new_status is not None:
if isinstance(new_status, dict) and new_status:
merged_status = existing_status.copy() if existing_status else {}
merged_status.update(new_status)
# Store merged status as JSON string
if merged_status:
record["status"] = json.dumps(merged_status)
elif existing_status:
# New is empty but existing has content, preserve existing
record["status"] = existing_record.get("status")
elif isinstance(new_status, dict) and not new_status:
# New is empty dict but existing has content, preserve existing
if existing_status:
record["status"] = existing_record.get("status")
elif existing_status:
# New is not a dict (e.g., integer from old format without tracker_name)
# Preserve existing status
record["status"] = existing_record.get("status")
elif existing_status:
# No new data but existing has content, preserve existing
record["status"] = existing_record.get("status")
except Exception as e:
logging.error(
f'instance_id="{self.instance_id}", failed to merge status for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
elif "status" in existing_record:
# Field not in incoming record, preserve existing
record["status"] = existing_record.get("status")
# Preserve existing metrics if not in incoming record
if "metrics" not in record and "metrics" in existing_record:
record["metrics"] = existing_record.get("metrics")
# Merge tracker_name (JSON array of tracker names)
if "tracker_name" in record:
try:
new_tracker_name_str = record.get("tracker_name")
existing_tracker_name_str = existing_record.get("tracker_name") if "tracker_name" in existing_record else None
# Parse both as JSON arrays
new_tracker_names = []
existing_tracker_names = []
if new_tracker_name_str:
if isinstance(new_tracker_name_str, str):
try:
new_tracker_names = json.loads(new_tracker_name_str)
if not isinstance(new_tracker_names, list):
# If not a list, treat as single tracker name (backward compatibility)
new_tracker_names = [new_tracker_name_str] if new_tracker_name_str else []
except (json.JSONDecodeError, TypeError):
# If parsing fails, treat as single tracker name (backward compatibility)
new_tracker_names = [new_tracker_name_str] if new_tracker_name_str else []
elif isinstance(new_tracker_name_str, list):
new_tracker_names = new_tracker_name_str
if existing_tracker_name_str:
if isinstance(existing_tracker_name_str, str):
try:
existing_tracker_names = json.loads(existing_tracker_name_str)
if not isinstance(existing_tracker_names, list):
# If not a list, treat as single tracker name (backward compatibility)
existing_tracker_names = [existing_tracker_name_str] if existing_tracker_name_str else []
except (json.JSONDecodeError, TypeError):
# If parsing fails, treat as single tracker name (backward compatibility)
existing_tracker_names = [existing_tracker_name_str] if existing_tracker_name_str else []
elif isinstance(existing_tracker_name_str, list):
existing_tracker_names = existing_tracker_name_str
# Merge: combine both arrays and remove duplicates
merged_tracker_names = list(set(existing_tracker_names + new_tracker_names))
merged_tracker_names.sort() # Sort for consistency
# Store merged tracker_name as JSON array
if merged_tracker_names:
record["tracker_name"] = json.dumps(merged_tracker_names)
elif existing_tracker_names:
# New is empty, preserve existing
record["tracker_name"] = existing_record.get("tracker_name")
else:
# Both are empty, remove field
record.pop("tracker_name", None)
except Exception as e:
logging.error(
f'instance_id="{self.instance_id}", failed to merge tracker_name for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
elif "tracker_name" in existing_record:
# Field not in incoming record, preserve existing
record["tracker_name"] = existing_record.get("tracker_name")
# Convert and merge tracker_runtime (store as tracker-keyed JSON)
# tracker_runtime comes from macro as simple value (now()), need to convert to tracker-keyed JSON
if "tracker_runtime" in record:
try:
new_tracker_runtime_value = record.get("tracker_runtime")
existing_tracker_runtime_str = existing_record.get("tracker_runtime") if "tracker_runtime" in existing_record else None
# Get tracker_name from incoming record to use as key
incoming_tracker_name = None
if "tracker_name" in record:
try:
tracker_name_value = record.get("tracker_name")
if isinstance(tracker_name_value, str):
try:
tracker_names = json.loads(tracker_name_value)
if isinstance(tracker_names, list) and tracker_names:
# Use the last tracker name (most recent) as the key
incoming_tracker_name = tracker_names[-1]
except (json.JSONDecodeError, TypeError):
# If parsing fails, treat as single tracker name
incoming_tracker_name = tracker_name_value
elif isinstance(tracker_name_value, list) and tracker_name_value:
incoming_tracker_name = tracker_name_value[-1]
except:
pass
# If we have a tracker name and runtime value, convert to tracker-keyed JSON
if incoming_tracker_name and new_tracker_runtime_value:
try:
new_runtime = float(new_tracker_runtime_value)
# Parse existing tracker_runtime (might be tracker-keyed JSON or simple value)
existing_runtimes = {}
if existing_tracker_runtime_str:
if isinstance(existing_tracker_runtime_str, str):
try:
existing_runtimes = json.loads(existing_tracker_runtime_str)
if not isinstance(existing_runtimes, dict):
# If not a dict, treat as simple value (backward compatibility)
# Convert to tracker-keyed format using existing tracker_name if available
existing_tracker_name = None
if "tracker_name" in existing_record:
try:
existing_tn = existing_record.get("tracker_name")
if isinstance(existing_tn, str):
try:
existing_tn_list = json.loads(existing_tn)
if isinstance(existing_tn_list, list) and existing_tn_list:
existing_tracker_name = existing_tn_list[-1]
except:
existing_tracker_name = existing_tn
except:
pass
if existing_tracker_name:
existing_runtimes = {existing_tracker_name: float(existing_tracker_runtime_str)}
else:
existing_runtimes = {}
except (json.JSONDecodeError, TypeError):
# If parsing fails, might be simple numeric value (backward compatibility)
existing_tracker_name = None
if "tracker_name" in existing_record:
try:
existing_tn = existing_record.get("tracker_name")
if isinstance(existing_tn, str):
try:
existing_tn_list = json.loads(existing_tn)
if isinstance(existing_tn_list, list) and existing_tn_list:
existing_tracker_name = existing_tn_list[-1]
except:
existing_tracker_name = existing_tn
except:
pass
if existing_tracker_name:
existing_runtimes = {existing_tracker_name: float(existing_tracker_runtime_str)}
else:
existing_runtimes = {}
elif isinstance(existing_tracker_runtime_str, dict):
existing_runtimes = existing_tracker_runtime_str
else:
# Numeric value (backward compatibility)
existing_tracker_name = None
if "tracker_name" in existing_record:
try:
existing_tn = existing_record.get("tracker_name")
if isinstance(existing_tn, str):
try:
existing_tn_list = json.loads(existing_tn)
if isinstance(existing_tn_list, list) and existing_tn_list:
existing_tracker_name = existing_tn_list[-1]
except:
existing_tracker_name = existing_tn
except:
pass
if existing_tracker_name:
existing_runtimes = {existing_tracker_name: float(existing_tracker_runtime_str)}
else:
existing_runtimes = {}
# Merge: existing trackers preserved, new tracker updates/overwrites its entry
merged_runtimes = existing_runtimes.copy()
merged_runtimes[incoming_tracker_name] = new_runtime
# Store merged tracker_runtime as JSON string for detailed tracking (per-tracker runtimes)
record["tracker_runtimes"] = json.dumps(merged_runtimes)
# Store the maximum (latest) runtime as a simple value in tracker_runtime for backward compatibility
# This ensures staleness checks (now()-tracker_runtime) continue to work
# The entity is considered "active" if ANY tracker has run recently
if merged_runtimes:
max_runtime = max(merged_runtimes.values())
record["tracker_runtime"] = max_runtime
else:
record["tracker_runtime"] = new_runtime
except (ValueError, TypeError) as e:
logging.error(
f'instance_id="{self.instance_id}", failed to convert tracker_runtime for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
elif new_tracker_runtime_value:
# We have runtime but no tracker_name, store as simple value (backward compatibility)
try:
record["tracker_runtime"] = float(new_tracker_runtime_value)
except (ValueError, TypeError):
pass
except Exception as e:
logging.error(
f'instance_id="{self.instance_id}", failed to merge tracker_runtime for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
elif "tracker_runtime" in existing_record:
# Field not in incoming record, preserve existing
record["tracker_runtime"] = existing_record.get("tracker_runtime")
# Merge disruption_min_time_sec (take maximum value for concurrent trackers)
if "disruption_min_time_sec" in record:
try:
new_disruption_min_time_str = record.get("disruption_min_time_sec")
existing_disruption_min_time_str = existing_record.get("disruption_min_time_sec") if "disruption_min_time_sec" in existing_record else None
# Get incoming tracker name to use as key
incoming_tracker_name = None
if "tracker_name" in record:
try:
tracker_name_value = record.get("tracker_name")
if isinstance(tracker_name_value, str):
try:
tracker_names = json.loads(tracker_name_value)
if isinstance(tracker_names, list) and tracker_names:
incoming_tracker_name = tracker_names[-1]
except (json.JSONDecodeError, TypeError):
incoming_tracker_name = tracker_name_value
elif isinstance(tracker_name_value, list) and tracker_name_value:
incoming_tracker_name = tracker_name_value[-1]
except:
pass
# If we have a tracker name and disruption_min_time_sec value, convert to tracker-keyed JSON
if incoming_tracker_name and new_disruption_min_time_str:
try:
new_disruption_min_time = None
if isinstance(new_disruption_min_time_str, str):
try:
# Try parsing as tracker-keyed JSON
parsed = json.loads(new_disruption_min_time_str)
if isinstance(parsed, dict):
# Already tracker-keyed format
new_disruption_times = parsed
else:
# Simple numeric value, convert to tracker-keyed
new_disruption_min_time = int(float(new_disruption_min_time_str))
new_disruption_times = {incoming_tracker_name: new_disruption_min_time}
except (json.JSONDecodeError, TypeError, ValueError):
# Not JSON, treat as simple numeric value
new_disruption_min_time = int(float(new_disruption_min_time_str))
new_disruption_times = {incoming_tracker_name: new_disruption_min_time}
elif isinstance(new_disruption_min_time_str, dict):
new_disruption_times = new_disruption_min_time_str
else:
# Numeric value
new_disruption_min_time = int(float(new_disruption_min_time_str))
new_disruption_times = {incoming_tracker_name: new_disruption_min_time}
# Parse existing disruption_min_time_sec (might be tracker-keyed JSON or simple value)
existing_disruption_times = {}
if existing_disruption_min_time_str:
if isinstance(existing_disruption_min_time_str, str):
try:
existing_disruption_times = json.loads(existing_disruption_min_time_str)
if not isinstance(existing_disruption_times, dict):
# If not a dict, treat as simple value (backward compatibility)
existing_tracker_name = None
if "tracker_name" in existing_record:
try:
existing_tn = existing_record.get("tracker_name")
if isinstance(existing_tn, str):
try:
existing_tn_list = json.loads(existing_tn)
if isinstance(existing_tn_list, list) and existing_tn_list:
existing_tracker_name = existing_tn_list[-1]
except:
existing_tracker_name = existing_tn
except:
pass
if existing_tracker_name:
existing_disruption_times = {existing_tracker_name: int(float(existing_disruption_min_time_str))}
else:
existing_disruption_times = {}
except (json.JSONDecodeError, TypeError, ValueError):
# If parsing fails, might be simple numeric value (backward compatibility)
existing_tracker_name = None
if "tracker_name" in existing_record:
try:
existing_tn = existing_record.get("tracker_name")
if isinstance(existing_tn, str):
try:
existing_tn_list = json.loads(existing_tn)
if isinstance(existing_tn_list, list) and existing_tn_list:
existing_tracker_name = existing_tn_list[-1]
except:
existing_tracker_name = existing_tn
except:
pass
if existing_tracker_name:
existing_disruption_times = {existing_tracker_name: int(float(existing_disruption_min_time_str))}
else:
existing_disruption_times = {}
elif isinstance(existing_disruption_min_time_str, dict):
existing_disruption_times = existing_disruption_min_time_str
else:
# Numeric value (backward compatibility)
existing_tracker_name = None
if "tracker_name" in existing_record:
try:
existing_tn = existing_record.get("tracker_name")
if isinstance(existing_tn, str):
try:
existing_tn_list = json.loads(existing_tn)
if isinstance(existing_tn_list, list) and existing_tn_list:
existing_tracker_name = existing_tn_list[-1]
except:
existing_tracker_name = existing_tn
except:
pass
if existing_tracker_name:
existing_disruption_times = {existing_tracker_name: int(float(existing_disruption_min_time_str))}
else:
existing_disruption_times = {}
# Merge: existing trackers preserved, new tracker updates/overwrites its entry
merged_disruption_times = existing_disruption_times.copy()
merged_disruption_times.update(new_disruption_times)
# Store merged disruption_min_time_sec as JSON string for detailed tracking (per-tracker values)
record["disruption_min_time_sec"] = json.dumps(merged_disruption_times)
# Store the maximum value as a simple numeric value for backward compatibility
# This ensures disruption_queue_lookup receives the highest value across all trackers
if merged_disruption_times:
max_disruption_time = max(int(float(v)) for v in merged_disruption_times.values())
# Store as simple value for disruption queue (but keep tracker-keyed JSON in record)
# The disruption queue will use the aggregated maximum value from trackmedecisionmaker
else:
# Fallback to new value if merged is empty
if new_disruption_min_time is not None:
record["disruption_min_time_sec"] = new_disruption_min_time
except (ValueError, TypeError) as e:
logging.error(
f'instance_id="{self.instance_id}", failed to convert disruption_min_time_sec for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
elif new_disruption_min_time_str:
# We have disruption_min_time_sec but no tracker_name, store as simple value (backward compatibility)
try:
if isinstance(new_disruption_min_time_str, str):
try:
parsed = json.loads(new_disruption_min_time_str)
if isinstance(parsed, dict):
# Tracker-keyed format but no tracker_name to use as key, take maximum
max_value = max(int(float(v)) for v in parsed.values())
record["disruption_min_time_sec"] = max_value
else:
record["disruption_min_time_sec"] = int(float(new_disruption_min_time_str))
except (json.JSONDecodeError, TypeError, ValueError):
record["disruption_min_time_sec"] = int(float(new_disruption_min_time_str))
else:
record["disruption_min_time_sec"] = int(float(new_disruption_min_time_str))
except (ValueError, TypeError):
pass
except Exception as e:
logging.error(
f'instance_id="{self.instance_id}", failed to merge disruption_min_time_sec for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
elif "disruption_min_time_sec" in existing_record:
# Field not in incoming record, preserve existing
record["disruption_min_time_sec"] = existing_record.get("disruption_min_time_sec")
# Merge max_sec_inactive (take minimum value for concurrent trackers - most restrictive)
if "max_sec_inactive" in record:
try:
new_max_sec_inactive_str = record.get("max_sec_inactive")
existing_max_sec_inactive_str = existing_record.get("max_sec_inactive") if "max_sec_inactive" in existing_record else None
# Get incoming tracker name to use as key
incoming_tracker_name = None
if "tracker_name" in record:
try:
tracker_name_value = record.get("tracker_name")
if isinstance(tracker_name_value, str):
try:
tracker_names = json.loads(tracker_name_value)
if isinstance(tracker_names, list) and tracker_names:
incoming_tracker_name = tracker_names[-1]
except (json.JSONDecodeError, TypeError):
incoming_tracker_name = tracker_name_value
elif isinstance(tracker_name_value, list) and tracker_name_value:
incoming_tracker_name = tracker_name_value[-1]
except:
pass
# If we have a tracker name and max_sec_inactive value, convert to tracker-keyed JSON
if incoming_tracker_name and new_max_sec_inactive_str:
try:
new_max_sec_inactive = None
if isinstance(new_max_sec_inactive_str, str):
try:
# Try parsing as tracker-keyed JSON
parsed = json.loads(new_max_sec_inactive_str)
if isinstance(parsed, dict):
# Already tracker-keyed format
new_max_sec_inactive_times = parsed
else:
# Simple numeric value, convert to tracker-keyed
new_max_sec_inactive = int(float(new_max_sec_inactive_str))
new_max_sec_inactive_times = {incoming_tracker_name: new_max_sec_inactive}
except (json.JSONDecodeError, TypeError, ValueError):
# Not JSON, treat as simple numeric value
new_max_sec_inactive = int(float(new_max_sec_inactive_str))
new_max_sec_inactive_times = {incoming_tracker_name: new_max_sec_inactive}
elif isinstance(new_max_sec_inactive_str, dict):
new_max_sec_inactive_times = new_max_sec_inactive_str
else:
# Numeric value
new_max_sec_inactive = int(float(new_max_sec_inactive_str))
new_max_sec_inactive_times = {incoming_tracker_name: new_max_sec_inactive}
# Parse existing max_sec_inactive (might be tracker-keyed JSON or simple value)
existing_max_sec_inactive_times = {}
if existing_max_sec_inactive_str:
if isinstance(existing_max_sec_inactive_str, str):
try:
existing_max_sec_inactive_times = json.loads(existing_max_sec_inactive_str)
if not isinstance(existing_max_sec_inactive_times, dict):
# If not a dict, treat as simple value (backward compatibility)
existing_tracker_name = None
if "tracker_name" in existing_record:
try:
existing_tn = existing_record.get("tracker_name")
if isinstance(existing_tn, str):
try:
existing_tn_list = json.loads(existing_tn)
if isinstance(existing_tn_list, list) and existing_tn_list:
existing_tracker_name = existing_tn_list[-1]
except:
existing_tracker_name = existing_tn
except:
pass
if existing_tracker_name:
existing_max_sec_inactive_times = {existing_tracker_name: int(float(existing_max_sec_inactive_str))}
else:
existing_max_sec_inactive_times = {}
except (json.JSONDecodeError, TypeError, ValueError):
# If parsing fails, might be simple numeric value (backward compatibility)
existing_tracker_name = None
if "tracker_name" in existing_record:
try:
existing_tn = existing_record.get("tracker_name")
if isinstance(existing_tn, str):
try:
existing_tn_list = json.loads(existing_tn)
if isinstance(existing_tn_list, list) and existing_tn_list:
existing_tracker_name = existing_tn_list[-1]
except:
existing_tracker_name = existing_tn
except:
pass
if existing_tracker_name:
existing_max_sec_inactive_times = {existing_tracker_name: int(float(existing_max_sec_inactive_str))}
else:
existing_max_sec_inactive_times = {}
elif isinstance(existing_max_sec_inactive_str, dict):
existing_max_sec_inactive_times = existing_max_sec_inactive_str
else:
# Numeric value (backward compatibility)
existing_tracker_name = None
if "tracker_name" in existing_record:
try:
existing_tn = existing_record.get("tracker_name")
if isinstance(existing_tn, str):
try:
existing_tn_list = json.loads(existing_tn)
if isinstance(existing_tn_list, list) and existing_tn_list:
existing_tracker_name = existing_tn_list[-1]
except:
existing_tracker_name = existing_tn
except:
pass
if existing_tracker_name:
existing_max_sec_inactive_times = {existing_tracker_name: int(float(existing_max_sec_inactive_str))}
else:
existing_max_sec_inactive_times = {}
# Merge: existing trackers preserved, new tracker updates/overwrites its entry
merged_max_sec_inactive_times = existing_max_sec_inactive_times.copy()
merged_max_sec_inactive_times.update(new_max_sec_inactive_times)
# Store the minimum value as a simple numeric value for backward compatibility
# This ensures inactive entity detection uses the most restrictive (lowest) value
# The entity is considered inactive if ANY tracker's threshold is exceeded
# The inactive inspector reads max_sec_inactive directly from KVstore as a numeric value
if merged_max_sec_inactive_times:
# Filter out 0 values (which disable the feature) before taking minimum
non_zero_values = [int(float(v)) for v in merged_max_sec_inactive_times.values() if int(float(v)) > 0]
if non_zero_values:
min_max_sec_inactive = min(non_zero_values)
# Store minimum value as simple numeric for inactive inspector
record["max_sec_inactive"] = min_max_sec_inactive
else:
# All values are 0, store 0 (disabled)
record["max_sec_inactive"] = 0
else:
# Fallback to new value if merged is empty
if new_max_sec_inactive is not None:
record["max_sec_inactive"] = new_max_sec_inactive
except (ValueError, TypeError) as e:
logging.error(
f'instance_id="{self.instance_id}", failed to convert max_sec_inactive for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
elif new_max_sec_inactive_str:
# We have max_sec_inactive but no tracker_name, store as simple value (backward compatibility)
try:
if isinstance(new_max_sec_inactive_str, str):
try:
parsed = json.loads(new_max_sec_inactive_str)
if isinstance(parsed, dict):
# Tracker-keyed format but no tracker_name to use as key, take minimum
non_zero_values = [int(float(v)) for v in parsed.values() if int(float(v)) > 0]
if non_zero_values:
min_value = min(non_zero_values)
record["max_sec_inactive"] = min_value
else:
record["max_sec_inactive"] = 0
else:
record["max_sec_inactive"] = int(float(new_max_sec_inactive_str))
except (json.JSONDecodeError, TypeError, ValueError):
record["max_sec_inactive"] = int(float(new_max_sec_inactive_str))
else:
record["max_sec_inactive"] = int(float(new_max_sec_inactive_str))
except (ValueError, TypeError):
pass
except Exception as e:
logging.error(
f'instance_id="{self.instance_id}", failed to merge max_sec_inactive for object="{record.get("object")}", '
f'exception="{str(e)}"'
)
elif "max_sec_inactive" in existing_record:
# Field not in incoming record, preserve existing
record["max_sec_inactive"] = existing_record.get("max_sec_inactive")
# loop through the dict
for k in record:
logging.debug(f'instance_id="{self.instance_id}", field="{k}", value="{record[k]}"')
# Exclude the event time, add existing fields
if k != "_time":
#
# handle persistent field
#
if not record_is_new:
# if field is in persistent list of fields
if k in persistent_fields:
# preserve persistent fields if conflict update is detected
if conflict_update:
summary_record[k] = collection_dict[
record.get(self.key)
].get(k)
else:
summary_record[k] = record[k]
# normal field
else:
summary_record[k] = record[k]
else:
# record is new, no need to consider it
summary_record[k] = record[k]
# insert and update the collection if requested
final_records.append(record)
# log
logging.debug(f'instance_id="{self.instance_id}", final_records={json.dumps(final_records, indent=2)}')
# set task
#
task_start = time.time()
task_instance_id = get_uuid()
task_name = "kvstore_batch_update"
# Execute batch update synchronously
batch_update_worker(
target_collection, collection, final_records, self.instance_id, task_instance_id, task_name=task_name, max_multi_thread_workers=max_multi_thread_workers
)
# end task
#
task_end = time.time()
task_run_time = round((task_end - task_start), 3)
logging.info(
f'instance_id={self.instance_id}, task="{task_name}", task_instance_id={task_instance_id}, task_run_time="{task_run_time}", task_end=1, task has terminated. no_records="{len(final_records)}", collection="{self.collection}"'
)
# Yield records back to Splunk pipeline
for record in final_records:
yield record
# perf counter for the entire call
logging.info(
f'instance_id="{self.instance_id}", trackmepersistentfields has terminated, collection="{self.collection}", key="{self.key}", update_collection="{self.update_collection}", run_time="{round((time.time() - start_time), 3)}"'
)
dispatch(TrackMePersistentHandler, sys.argv, sys.stdin, sys.stdout, __name__)