# Copyright (C) 2005-2024 Splunk Inc. All Rights Reserved. import itsi_py3 from itsi_py3 import _ from splunk.util import normalizeBoolean import ITOA.itoa_common as utils from ITOA.itoa_exceptions import ItoaDatamodelContextError from ITOA.itoa_object import ItoaObject from ITOA.saved_search_utility import SavedSearch from ITOA.setup_logging import logger from itsi.itsi_utils import ITOAInterfaceUtils from itsi.itsi_utils import GLOBAL_SECURITY_GROUP_CONFIG from itsi.searches.itsi_searches import ItsiKpiSearches BASE_SEARCH_KPI_ATTRIBUTES = [ 'base_search', 'search_alert_earliest', 'is_entity_breakdown', 'entity_id_fields', 'entity_breakdown_id_fields', 'alert_period', 'alert_lag', 'is_service_entity_filter', 'metric_qualifier', 'sec_grp', 'is_metric' ] DEFAULT_VALUE_KPI_ATTRIBUTES_DICT = { 'search_alert_earliest': '5', 'metric_qualifier': '', 'alert_period': '5', 'alert_lag': '30', '_owner': 'nobody' } DEFAULT_GAP_SEVERITY_ATTRIBUTES_VALUES = { 'gap_severity': 'unknown', 'gap_severity_color': '#CCCCCC', 'gap_severity_value': '-1', 'gap_severity_color_light': '#EEEEEE' } ANOMALY_DETECTION_ATTRBUTES = [ 'anomaly_detection_is_enabled', 'cohesive_anomaly_detection_is_enabled', 'anomaly_detection_alerting_enabled', 'trending_ad', 'cohesive_ad' ] SEARCH_AND_CALCULATE_ATTRIBUTES = [ 'base_search_id', 'is_service_entity_filter', 'is_entity_breakdown', 'entity_breakdown_id_fields', 'alert_period', 'base_search', 'search_alert_earliest', 'title', 'alert_lag', 'unit', 'entity_id_fields', 'threshold_field', 'search_type', 'entity_statop', 'base_search_metric', 'aggregate_statop', 'metric_qualifier', 'fill_gaps', 'gap_custom_alert_value', ] + list(DEFAULT_GAP_SEVERITY_ATTRIBUTES_VALUES.keys()) GENERATED_SEARCH_ATTRIBUTES = [ 'search', 'search_aggregate', 'kpi_base_search', 'search_entities', 'search_time_series', 'search_time_series_aggregate', 'search_time_series_entities', 'search_time_compare', 'search_alert', 'search_alert_entities' ] BACKFILL_ATTRIBUTES = [ 'backfill_enabled', 'backfill_earliest_time' ] THRESHOLDS_ATTRIBUTES = [ 'kpi_threshold_template_id', 'tz_offset', 'time_variate_thresholds', 'adaptive_thresholds_is_enabled', 'adaptive_thresholding_training_window', 'aggregate_thresholds', 'entity_thresholds', 'time_variate_thresholds_specification', 'aggregate_thresholds_alert_enabled', 'aggregate_thresholds_custom_alert_enabled', 'aggregate_thresholds_custom_alert_rules' ] BASE_SEARCH_METRIC_KPI_ATTRIBUTES = [ 'threshold_field', 'unit', 'entity_statop', 'aggregate_statop', 'fill_gaps', 'gap_custom_alert_value', ] + list(DEFAULT_GAP_SEVERITY_ATTRIBUTES_VALUES.keys()) RECOMMENDATION_RESTORE_EXCLUDE_FIELDS = [ 'is_recommended_time_policies', 'was_recommendation_modified', 'did_load_recommendation', 'recommendation_training_window', 'recommendation_start_date', 'threshold_direction', ] # Fields that are returned in the pseudo-KPI object that aren't part of it natively EXTERNAL_FIELDS = set([ "services_depending_on_me", ]) def validate_data_gaps_filling_options(object, object_type='kpi'): """ Validate data gaps filling attribute (fill_gaps) for a KPI or a metric in a KPI Base Search. @type object: dict @param object: KPI object or metric object in KPI Base Search @type object_type: basestring @param object_type: type of object for which validation is being performed, 'kpi' or 'kpi_base_search' @return: True, if validation is successful. else, False. """ supported_options = ['null_value', 'custom_value', 'last_available_value'] msg = '' obj_name = 'kpi' if object_type != 'kpi': obj_name = 'metric' if not object.get('fill_gaps') or object['fill_gaps'] == 'null_value': # Case: if fill_gaps field is missing, consider it a 'null_value' scenario and validate gap severities. object['fill_gaps'] = 'null_value' else: # Case: validate 'custom_value' or 'last_available_value' options for fill_gaps attribute if object['fill_gaps'] not in supported_options: msg = _('Invalid option provided to fill data gaps for object "%s". option_provided="%s", ' 'supported_options="%s", %s_key="%s"') % \ (object_type, object['fill_gaps'], supported_options, obj_name, object.get('_key')) return False, msg if object['fill_gaps'] == 'custom_value': if 'gap_custom_alert_value' not in object: msg = _('Custom value to fill data gaps not provided. Provide a static value when "custom_value" ' 'option is selected to fill data gaps. %s_key="%s", missing_field="gap_custom_alert_value"') \ % (obj_name, object.get('_key')) return False, msg if not utils.is_string_numeric(object['gap_custom_alert_value']): msg = _('Custom value provided to fill in data gap is not valid. Only numeric values are accepted.' ' %s_key="%s", invalid_field="gap_custom_alert_value"') % (obj_name, object.get('_key')) return False, msg # only allow positive number as custom values for data gaps if float(object['gap_custom_alert_value']) < 0: msg = _('Only positive numeric values are accepted to fill data gaps for object "%s". ' 'Negative value provided. %s_key="%s"') % (object_type, obj_name, object.get('_key')) return False, msg any_gap_severity_field_missing = False for field in list(DEFAULT_GAP_SEVERITY_ATTRIBUTES_VALUES.keys()): if field not in object: any_gap_severity_field_missing = True break if any_gap_severity_field_missing: # if any of the gap severity fields is missing, set all the gap severity fields to default values for field in list(DEFAULT_GAP_SEVERITY_ATTRIBUTES_VALUES.keys()): object[field] = DEFAULT_GAP_SEVERITY_ATTRIBUTES_VALUES[field] # if 'gap_custom_alert_value' field is missing, then add it with default value (0) if 'gap_custom_alert_value' not in object: object['gap_custom_alert_value'] = '0' return True, msg def convert_filter_to_kpi_filter(filter_data): """ Convert a KPI filter subcomponent to its "correct" form (since KPI's aren't real objects of the KVstore) This is a recursive call used for parsing. Reference: https://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTkvstore $not is not supported, as pwu can't find any documentation on how it's supposed to work in the first place. :param filter_data: KVstore filter subcomponent :type filter_data: any :return: KVstore filter subcomponent :rtype: any """ if type(filter_data) is dict: rv = {} for key, value in filter_data.items(): if key == "$ne": """ Dirty hack for fetching objects with multi-value fields by fetching all objects and relying on apply_filter_to_results() to filter correctly. Otherwise, the query {"foo": {"$ne": 0}} can't fetch a service like: { ... "kpis": [ {"foo": 0}, {"foo": 1}, ], ... } pwu: I'm willing to bet $100 that no customer will ever query this value organically. """ rv[key] = "H1A2C3K4Y5V5A6L" elif key.startswith("$"): rv[key] = convert_filter_to_kpi_filter(value) else: rv["kpis.%s" % key] = convert_filter_to_kpi_filter(value) return rv elif type(filter_data) is list: return [convert_filter_to_kpi_filter(subdata) for subdata in filter_data] else: return filter_data def convert_kpi_to_modified_kpi(service_obj, kpi_obj): """ Convert a KPI into a modified for use in the KPI API. This modifies the base KPI. This function defines what a modified KPI is. :param service_obj: Service object containing the target KPI :type service_obj: dict :param kpi_obj: Target KPI :type kpi_obj: dict :return: Target KPI, modified :rtype: dict """ kpi_obj["object_type"] = "modified_kpi" kpi_obj["services_depending_on_me"] = [] kpi_obj["permissions"] = service_obj["permissions"] kpi_obj["sec_grp"] = service_obj["sec_grp"] key = kpi_obj["_key"] for obj in service_obj.get("services_depending_on_me", []): if key in obj["kpis_depending_on"]: kpi_obj["services_depending_on_me"].append(obj["serviceid"]) return kpi_obj class ItsiKpi(ItoaObject): """ Implements ITSI KPI """ collection_name = 'itsi_services' def __init__(self, session_key, current_user_name): super(ItsiKpi, self).__init__(session_key, current_user_name, 'kpi', collection_name=self.collection_name) @staticmethod def get_kpi_saved_search_name(kpi_id): if not isinstance(kpi_id, itsi_py3.string_type): message = _('Invalid type="%s" for kpi_id. Expecting string type.') % type(kpi_id).__name_ logger.error(message) raise TypeError(message) return 'Indicator - ' + kpi_id + ' - ITSI Search' def _populate_with_base_search_attr(self, kpi, sec_grp): """ Populate given KPI object with base search attributes if applicable. @type kpi: dict @param kpi: kpi object to populate @type sec_grp: basestring @param sec_grp: security group of service @rtype: None @returns: Nothing. Given KPI object is modified in-place. """ if not isinstance(kpi, dict): message = _('Invalid type="%s" for KPI. Expecting a dictionary.') % type(kpi).__name__ logger.error(message) raise TypeError(message) if kpi.get('search_type') != 'shared_base': # guard against inadvertent call logger.warning('Search type="%s" not applicable. Will pass.', kpi.get('search_type')) return backend = self.storage_interface.get_backend(self.session_key) shared_base_search = backend.get(self.session_key, 'nobody', 'kpi_base_search', kpi.get('base_search_id')) if not isinstance(shared_base_search, dict): msg = _('Base search with ID: "%s" does not exist. No attributes to populate.') % kpi.get('base_search_id') logger.warning(msg) return if shared_base_search.get('sec_grp') not in [sec_grp, GLOBAL_SECURITY_GROUP_CONFIG.get('key')]: self.raise_error(logger, _('Shared base search configured on KPI "%s" does not match security ' 'group of KPI/Service. Check the team on the service.') % kpi.get('title')) for attr in BASE_SEARCH_KPI_ATTRIBUTES: kpi[attr] = shared_base_search.get(attr, '') metrics = shared_base_search.get('metrics', []) for metric in metrics: if isinstance(metric, dict) and metric.get('_key') != kpi.get('base_search_metric'): continue # configured kpi isnt concerned with this metric for attr in BASE_SEARCH_METRIC_KPI_ATTRIBUTES: kpi[attr] = metric.get(attr, '') break # there can be only one selected metric, we got ours. return def _gen_and_update_searches(self, kpi, service_entity_rules, sec_grp): """ Update KPI search strings for given KPI @type kpi: dict @param kpi: kpi object @type service_entity_rules: list @param service_entity_rules: entity rules corresponding to the service @type: basestring @param sec_grp: security group of the service @rtype: none @return: nothing. updates search strings in the KPI passed in. """ # Now generate search strings for KPI & update the KPI. searches = ItsiKpiSearches(self.session_key, kpi, service_entity_rules, sec_grp=sec_grp).gen_kpi_searches(gen_alert_search=True) kpi['kpi_base_search'] = searches['kpi_base_search'] kpi['search'] = searches['alert_search'] kpi['search_aggregate'] = searches['single_value_search'] kpi['search_entities'] = searches['single_value_search'] kpi['search_time_series'] = searches['time_series_search'] kpi['search_time_series_aggregate'] = searches['time_series_search'] kpi['search_time_series_entities'] = searches['entity_time_series_search'] kpi['search_time_compare'] = searches['compare_search'] kpi['search_alert'] = searches['alert_search'] # Assume search fields are always present if kpi.get('search_type', 'adhoc') == 'datamodel': # Assume default to be true to avoid accidental overwrite here # User specifies base_search for adhoc searches but is generated # for datamodel searches, so set it explicitly after generation kpi['base_search'] = kpi['kpi_base_search'] # KPI thresholds searches need to be updated too. # we do not need to save search strings in threshold objects if (isinstance(kpi.get('aggregate_thresholds'), dict) and isinstance(kpi['aggregate_thresholds'].get('search'), itsi_py3.string_type)): kpi['aggregate_thresholds']['search'] = '' if (isinstance(kpi.get('entity_thresholds'), dict) and isinstance(kpi['entity_thresholds'].get('search'), itsi_py3.string_type)): kpi['entity_thresholds']['search'] = '' # KPI time variate threshold searches need to be updated too policies = iter(kpi.get('time_variate_thresholds_specification', {}).get('policies', {}).values()) for policy in policies: if isinstance(policy, dict): aggregate_thresholds = policy['aggregate_thresholds'] if 'search' in aggregate_thresholds: aggregate_thresholds['search'] = '' entity_thresholds = policy['entity_thresholds'] if 'search' in entity_thresholds: entity_thresholds['search'] = '' return def populate(self, kpi, service_entity_rules, service_id, service_title, service_is_enabled, sec_grp): """ populate a KPI object. @type kpi: dict @param kpi: kpi object @type service_entity_rules: list @param service_entity_rules: entity rules for service @type service_id: basestring @param service_id: identifier of the service @type service_title: basestring @param service_title: title of the service @type service_is_enabled: boolean @param service_is_enabled: Indicates if service is enabled or disabled. @type: basestring @param sec_grp: security group of the service @rtype None @returns: nothing. in-place population. """ if not isinstance(kpi, dict): raise TypeError(_('Invalid type for KPI. Expecting a dictionary.')) if not isinstance(service_entity_rules, list): # if service_entity_rules is set to none explictly, convert to list if service_entity_rules is None: service_entity_rules = [] else: raise TypeError(_('Invalid type for service_entity_rules. Expecting valid list')) if not isinstance(service_id, itsi_py3.string_type): raise TypeError(_('Invalid type for service_id. Expecting valid string.')) if not isinstance(service_title, itsi_py3.string_type): raise TypeError(_('Invalid type for service_title. Expecting valid string.')) if not isinstance(service_is_enabled, int): raise TypeError(_('Invalid type for service_is_enabled. Expecting int.')) if 'search_occurrences' in kpi: kpi['search_occurrences'] = int(kpi['search_occurrences']) # Convert to valid number kpi['service_id'] = service_id kpi['service_title'] = service_title kpi['enabled'] = service_is_enabled kpi['backfill_earliest_time'] = kpi.get('backfill_earliest_time', '-7d').strip().replace(' ', '') if kpi.get('search_type') == 'shared_base': self._populate_with_base_search_attr(kpi, sec_grp) self._gen_and_update_searches(kpi, service_entity_rules, sec_grp) def escape_backslash_and_double_quote_in_kpi_title(self, kpi_title): """ Escape backslash and quote in the kpi title. Args: kpi_title (string): Kpi title Returns: string : escaped Kpi title """ escaped_kpi = '' for char in kpi_title: if char == '\\' or char == '"': escaped_kpi += '\\' escaped_kpi += char return escaped_kpi def generate_saved_search_settings(self, kpi, service_entity_rules, sec_grp, acl_update=True, sync_schedule_disabled=True): """ Generate a dictionary representing settings (kv pairs) for a savedsearches.conf stanza @type kpi: dict @param kpi: corresponding kpi object @type service_entity_rules: list @param service_entity_rules: entity rules for the given service @type: basestring @param sec_grp: security group of the service @rtype: dict @param: requested saved search settings """ saved_search_settings = {} saved_search_id = self.get_kpi_saved_search_name(kpi['_key']) saved_search_settings['name'] = saved_search_id kpi_copy = kpi.copy() # Created copy of kpi because we only escape the kpi title # in the generated search not in kpi object kpi_copy['title'] = self.escape_backslash_and_double_quote_in_kpi_title(kpi_copy.get('title')) # NOTE: We set generate_entity_filter to true so that we use the entity_filter lookup macro searches = ItsiKpiSearches( self.session_key, kpi_copy, service_entity_rules, generate_entity_filter=True, sec_grp=sec_grp ).gen_kpi_searches( gen_alert_search=True ) saved_search_settings['search'] = searches['alert_search'] saved_search_settings['description'] = 'Auto created scheduled search during kpi creation' saved_search_settings['disabled'] = '0' if kpi.get('enabled') == 1 else '1' # Handle the timing of a KPI search, some data may not be coming in real time so we allow for a # configurable lag in the KPI search up to 30 minutes, our values are in seconds for lag, minutes for # earliest, so we convert all searches to seconds based time modifiers alert_lag = int(kpi.get('alert_lag', 30)) alert_earliest = int(kpi.get('search_alert_earliest', 5)) * 60 if alert_lag == 0: # Real Time case we need to set latest time to now saved_search_settings['dispatch.earliest_time'] = '-' + str(alert_earliest) + 's' saved_search_settings['dispatch.latest_time'] = 'now' elif alert_lag <= 1800: # Normal Case, adjust search timing to account for the lag saved_search_settings['dispatch.earliest_time'] = '-' + str(alert_earliest + alert_lag) + 's' saved_search_settings['dispatch.latest_time'] = '-' + str(alert_lag) + 's' else: raise ValueError(_("Invalid alert_lag passed to saved search management, must be below 30 minutes")) saved_search_settings['enableSched'] = '1' # Regenerate a random cron every time in order to take into account a change in the alert period # Technically this means on save there is a potential for a kpi to execute slightly off rhythm at # the point of save if the start point of the cron changes for a 5 or 15 period kpi saved_search_settings['cron_schedule'] = SavedSearch.generate_cron_schedule(kpi.get('alert_period', 5), sync_schedule_disabled) saved_search_settings['alert.suppress'] = '0' saved_search_settings['alert.track'] = '0' saved_search_settings['alert.digest_mode'] = '1' saved_search_settings['actions'] = 'indicator' saved_search_settings['action.indicator._itsi_kpi_id'] = kpi.get('_key', '') saved_search_settings['action.indicator._itsi_service_id'] = kpi.get('service_id', '') saved_search_settings['kpi_title'] = kpi.get('title', '') saved_search_settings['acl_update'] = acl_update return saved_search_settings def check_perc_value(self, statop): """ Checks for valid stats operator. If the stats operator is percNN, make sure NN is within the valid percentage range @type statop: string @param statop: stats operator @type return: None @param return: raises exceptions on invalid stats operators """ if not utils.is_stats_operation(statop): self.raise_error_bad_validation( logger, _('An invalid aggregation operator is specified for a KPI. ' 'Refer to the ITSI documentation for a list of valid operators and syntax.') ) # if the statop is 'percNN', validate the percentage range # the format of string 'perc' has already been validated by is_stats_operation() if 'perc' in statop: if not utils.is_valid_perc(statop[4:]): self.raise_error_bad_validation( logger, _('Invalid percentile value enter, the value must be a whole number between ' '1 and 99.') ) def _set_entity_breakdown_field(self, kpi): """ Set entity_breakdown_id_fields to entity_id_fields, if entity_breakdown_id_fields is missing or empty. @param kpi: kpi object @return: None """ # PBL-5603: changes made in this story, allow user to split KPI by a different entity field from # entity filtering field. As a part of this change, new field 'entity_breakdown_id_fields' # was added to kpi object. To guard against migration issues and cases where # 'entity_breakdown_id_fields' would be missing in kpi object, added following check. We fall back # to 'entity_id_fields', in cases when 'entity_breakdown_id_fields' is missing. if kpi.get('is_entity_breakdown', False): entity_breakdown_id_fields = kpi.get('entity_breakdown_id_fields', None) if entity_breakdown_id_fields is None or len(entity_breakdown_id_fields) == 0: kpi['entity_breakdown_id_fields'] = kpi.get('entity_id_fields', '') logger.debug('entity_breakdown_id_fields missing from kpi object = {}. ' 'Setting it to entity_id_fields.'.format(kpi.get('_key'))) def validate_kpi_basic_structure(self, kpi, for_base_service_template=False): """ Validate only the KPI level validation, skips any validation that depends on parent object. @type kpi: iterable list @param kpi: a valid list of KPIs in json format @type return: None @param return: None, raise exceptions on invalid KPIs """ if not utils.is_valid_str(kpi.get('title')): self.raise_error_bad_validation(logger, _('KPIs must have a valid title.')) ITOAInterfaceUtils.validate_aggregate_thresholds(kpi) ITOAInterfaceUtils.validate_entity_thresholds(kpi) if not for_base_service_template: field_validation_list = ['backfill_enabled', 'time_variate_thresholds', 'adaptive_thresholds_is_enabled', 'anomaly_detection_is_enabled', 'cohesive_anomaly_detection_is_enabled', 'anomaly_detection_alerting_enabled'] else: # do not need to validate backfill fields for Base Service Template KPIs field_validation_list = ['time_variate_thresholds', 'adaptive_thresholds_is_enabled', 'anomaly_detection_is_enabled', 'cohesive_anomaly_detection_is_enabled', 'anomaly_detection_alerting_enabled'] for field_name in field_validation_list: if field_name not in kpi: kpi[field_name] = False else: field_value = kpi[field_name] kpi[field_name] = normalizeBoolean(field_value, enableStrictMode=True) alert_on = kpi.get('alert_on') if alert_on and alert_on not in ['aggregate', 'entity', 'both']: kpi['alert_on'] = 'aggregate' alert_period = kpi.get('alert_period') if alert_period and (utils.is_string_numeric(alert_period) or utils.is_valid_num(alert_period)): kpi['alert_period'] = int(alert_period) self._set_entity_breakdown_field(kpi) is_valid, msg = validate_data_gaps_filling_options(kpi) if not is_valid: self.raise_error_bad_validation(logger, msg) # Validate if minimal fields for search are populated if 'search_type' in kpi: search_type = kpi['search_type'] if search_type != 'datamodel' and search_type != 'metric': if not utils.is_valid_str(kpi.get('base_search')): if search_type == 'shared_base': backend = self.storage_interface.get_backend(self.session_key) shared_base_search = backend.get(self.session_key, 'nobody', 'kpi_base_search', kpi.get('base_search_id')) if shared_base_search: if normalizeBoolean(shared_base_search.get('is_metric')): kpi['metric'] = shared_base_search.get('metric', {}) else: self.raise_error_bad_validation( logger, _('Shared base KPIs does not seem to have populated a base search. ' 'Specify a base search for the KPI.') ) else: self.raise_error_bad_validation( logger, _('Ad hoc search KPIs does not seem to have populated a base search. ' 'Specify a base search for the KPI.') ) if not utils.is_valid_str(kpi.get('threshold_field')): if search_type == 'shared_base': backend = self.storage_interface.get_backend(self.session_key) shared_base_search = backend.get(self.session_key, 'nobody', 'kpi_base_search', kpi.get('base_search_id')) if shared_base_search and not normalizeBoolean(shared_base_search.get('is_metric')): self.raise_error_bad_validation( logger, _('A valid threshold field is not specified for a KPI with shared base search. ' 'Threshold fields must be specified for shared base search KPIs.') ) else: self.raise_error_bad_validation( logger, _('A valid threshold field is not specified for a KPI with ad hoc search. ' 'Threshold fields must be specified for ad hoc search based KPIs.') ) elif search_type == 'metric': # metric based KPI metric_search_spec = kpi.get('metric') if not utils.is_valid_dict(metric_search_spec): self.raise_error_bad_validation( logger, _('Metric search KPIs do not seem to have specified a metric search. ' 'Specify a metric based search for the KPI.') ) if (not (utils.is_valid_str(metric_search_spec.get('metric_index')) and utils.is_valid_str(metric_search_spec.get('metric_name')))): self.raise_error_bad_validation( logger, _('Metric search based KPI does not seem to have specified a valid metric search. ' 'Specify metric based search KPIs with all mandatory fields: ' 'metric_index, metric_name.') ) else: # Datamodel search based KPI datamodel_search_spec = kpi.get('datamodel') if not utils.is_valid_dict(datamodel_search_spec): self.raise_error_bad_validation( logger, _('Data model search-based KPI does not seem to have specified a valid data model search. ' 'Specify a data model search-based KPI.') ) if (not (utils.is_valid_str(datamodel_search_spec.get('datamodel')) and utils.is_valid_str(datamodel_search_spec.get('object')) and utils.is_valid_str(datamodel_search_spec.get('field')) and utils.is_valid_str(datamodel_search_spec.get('owner_field')))): self.raise_error_bad_validation( logger, _('Data model search-based KPI does not seem to have specified a valid data model search. ' 'Specify a data model search-based KPI with all mandatory fields: ' 'data model, object, field and owner_field.') ) datamodel_filters = kpi.get('datamodel_filter', []) if not utils.is_valid_list(datamodel_filters): self.raise_error_bad_validation( logger, _('Data model filters must be an array of filters. ' 'Found a KPI with an invalid specification for data model filters.') ) for datamodel_filter in datamodel_filters: if not utils.is_valid_dict(datamodel_filter): self.raise_error_bad_validation( logger, _('Each data model filter must be a valid JSON filter specification. ' 'Found a KPI with an invalid specification for a data model filter.') ) if not (utils.is_valid_str(datamodel_filter.get('_field')) and utils.is_valid_str(datamodel_filter.get('_value'))): self.raise_error_bad_validation( logger, _('Each data model filter must specify a field and value. ' 'Found a KPI with no field or value specified for a data model filter.') ) filter_operator = datamodel_filter.get('_operator') if not (utils.is_valid_str(filter_operator) and (filter_operator in ['=', '>', '<'])): self.raise_error_bad_validation( logger, _('Each data model filter operator must be =, < or >. ' 'Found a KPI with invalid operator specified for a data model filter.') ) # aggregate_statop is a mandatory field, check the syntax aggregate_statop = kpi.get('aggregate_statop') if isinstance(aggregate_statop, itsi_py3.string_type): self.check_perc_value(aggregate_statop) else: # We can infer it from the old statop field if that is present old_statop = kpi.get('statop') if isinstance(old_statop, itsi_py3.string_type): self.check_perc_value(old_statop) kpi['aggregate_statop'] = old_statop else: self.raise_error_bad_validation( logger, _('A valid aggregation operator is not specified for a KPI. Aggregate operator must be specified.') ) # entity_statop is an optional field, need to check syntax if it exists entity_statop = kpi.get('entity_statop') if isinstance(entity_statop, itsi_py3.string_type): self.check_perc_value(entity_statop) alert_lag = kpi.get('alert_lag') try: if alert_lag is not None: alert_lag = int(alert_lag) else: alert_lag = 30 except Exception: self.raise_error_bad_validation( logger, _('Invalid alert_lag, must be a positive integer less than 1800 (in s = 30 minutes).') ) if alert_lag >= 1800: # 30 minutes enforced due to restrictions of the health scoring system for services self.raise_error_bad_validation( logger, _('Invalid alert_lag, must be a positive integer less than 1800 (in s = 30 minutes). ' 'Specified: {0}').format(alert_lag) ) def convert_invalid_datamodel_kpi_to_adhoc(self, kpi, cached_datamodel_dict): """ Fix up datamodel KPIs with invalid datamodels - this is to avoid errors during saving in several scenarios IMPORTANT: this should really be used only in upgrade scenarios. In standard configuration, this is not needed. @type kpi: dict @param kpi: a single KPI @type cached_datamodel_dict: dict @param kpi: a prefetched list of datamodels @type return: boolean @param return: True if a conversion was performed """ if kpi.get('search_type', '') == 'datamodel': try: datamodel_spec = kpi.get('datamodel', {}) ItsiKpiSearches.get_datamodel_context(self.session_key, 'nobody', datamodel_spec.get('field'), datamodel_spec.get('datamodel'), datamodel_object_name=datamodel_spec.get('object'), cached_datamodel_dict=cached_datamodel_dict) except ItoaDatamodelContextError: ''' Mark the searches as invalid adhoc searches to provide a cue in KPI config. Altering the search is needed since an invalid datamodel search will fail saved search creation. ''' kpi['search_type'] = 'adhoc' kpi['base_search'] = 'Invalid datamodel search "' + kpi.get('base_search', '') + '"' logger.error('Found KPI (Id: %s) with stale datamodel specification. Auto converting ' 'this KPI to adhoc search type to prevent migration/upgrade failures.', kpi.get('_key')) return True return False