You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

268 lines
10 KiB

# Copyright (C) 2005-2025 Splunk Inc. All Rights Reserved.
from json.decoder import JSONDecodeError
from ITOA.saved_search_utility import APP_FILTER_PREFIX, SavedSearch
from ITOA.setup_logging import setup_logging
from itsi.content_packs.constants import ContentPackFields
from itsi.content_packs.readers import ContentPackMetadataReader
from itsi.content_packs.readers import get_content_pack_conf
from itsi.content_packs.readers import get_content_packs_conf
from itsi.content_packs.entitlements_processor import ContentPackEntitlementsProcessor
from itsi.objects.itsi_content_pack_saved_search_status import ItsiContentPackSavedSearchStatus
from SA_ITOA_app_common.splunklib import client, results
from SA_ITOA_app_common.splunklib.binding import HTTPError
import time
LOGGER = setup_logging(
logfile_name='itsi_content_packs_retrieve.log',
logger_name='itsi.content_packs.retrieve'
)
def retrieve_all(getargs=None, session_key=None):
"""
Returns all the content packs found for the specified arguments.
:param getargs: the get args to pass to Splunkd
:type getargs: dict
:param session_key: the session key
:type session_key: str
:return: a dictionary of content pack data which consists of content packs
that were present without any error and content packs that failed to load.
:rtype: dict
:About the Exception blocks mentioned in this method
JSONDecodeError, TypeError, FileNotFoundError, Exception
1. When the manifest.json file is corrupted it will throw a JSON error
because the read() function used for JSON file has no exception handling
strategy. Hence, the code breaks and comes back to the calling function.
2. If rules.json is corrupted, it will give a TypeError. The reason is
the read() function used for reading rule.json has a try-catch block.
Hence, no error is returned. But, the variable will be assigned to None.
As a result the function calling read() has some steps which uses the
value returned. As a result, it will throw a Type Error. Here also the
function has no try-catch block to catch such exceptions and it comes back
to the calling function.
3. If both files are not present, when it tries to read() both the json
files. It will throw filenotfound. But we can't track which one is not
present as same error message will be thrown. For that the user has to
check the logs.
4. The last Exception is a default exception implemented before. This
exception will rarely be caught. And the type cannot be defined.Therefore,
kept it Exception only.
"""
conf_getargs = {
'count': 0,
'output_mode': 'json'
}
if getargs is not None and isinstance(getargs, dict):
conf_getargs.update(getargs)
conf = get_content_packs_conf(
getargs=conf_getargs,
session_key=session_key
)
content_pack_result = {
'success': [],
'failure': []
}
content_packs = []
failed_content_packs = []
reader = ContentPackMetadataReader(
logger=LOGGER,
session_key=session_key
)
for entry in conf:
try:
error_message = {
'title' : entry['content'][ContentPackFields.TITLE],
'error_code': '',
'error_message': '',
'action_detail': ''
}
entitlements_processor = ContentPackEntitlementsProcessor(
content_pack_id=entry['name'],
logger=LOGGER,
session_key=session_key
)
content_pack = reader.read(entry)
cp_saved_searches_status = ItsiContentPackSavedSearchStatus(session_key, 'nobody').get('nobody', entry['name'])
if cp_saved_searches_status:
saved_searches = cp_saved_searches_status.get('saved_searches')
else:
saved_searches = retrieve_saved_search_count_details(session_key, entry['name'])
content_pack[ContentPackFields.SAVED_SEARCHES] = saved_searches
if saved_searches['total']:
content_pack[ContentPackFields.ENTITLEMENT_STATUS] = True
else:
entitlements_processor.apply_content_pack_level_entitlement(content_pack)
except JSONDecodeError as exc:
LOGGER.error('Error while reading content pack entry="%s"', entry)
LOGGER.exception(exc)
error_message['error_code'] = 500
error_message[
'error_message'
] = 'JSON Decode Error! manifest.json file corrupted'
error_message[
'action_detail'
] = 'Check the manifest.json file and itsi_content_packs_retrieve.log for more details'
failed_content_packs.append(error_message)
except TypeError as exc:
LOGGER.error('Error while reading content pack entry="%s"', entry)
LOGGER.exception(exc)
error_message["error_code"] = 500
error_message[
'error_message'
] = 'JSON Decode Error! rule.json file corrupted'
error_message[
'action_detail'
] = 'Check the rule.json file and itsi_content_packs_retrieve.log for more details'
failed_content_packs.append(error_message)
except FileNotFoundError as exc:
LOGGER.error('Error while reading content pack entry="%s"', entry)
LOGGER.exception(exc)
error_message['error_code'] = 404
error_message[
'error_message'
] = 'FileNotFound Error! manifest.json or rule.json absent'
error_message[
'action_detail'
] = 'Check the itsi_content_packs_retrieve.log for more details'
failed_content_packs.append(error_message)
except Exception as exc:
LOGGER.error('Error while reading content pack entry="%s"', entry)
LOGGER.exception(exc)
error_message['error_code'] = 500
error_message['error_message'] = 'An unexpected error occurred.'
error_message[
'action_detail'
] = 'Check the itsi_content_packs_retrieve.log for more details'
failed_content_packs.append(error_message)
else:
content_packs.append(content_pack)
content_pack_result['success'] = content_packs
content_pack_result['failure'] = failed_content_packs
return content_pack_result
def retrieve_saved_search_count_details(session_key, content_pack_id):
"""
Returns the count of enabled, disabled, and total saved searches for the given content pack.
:param session_key: the session key
:type session_key: str
:param content_pack_id: the content pack id
:type content_pack_id: str
:return: content pack saved search data counts (total saved searches, enabled saved searches,disabled saved searches)
:rtype: dict
"""
try:
cp_saved_searches = SavedSearch.get_all_searches(
session_key=session_key,
namespace='-',
search=APP_FILTER_PREFIX + content_pack_id
)
except Exception as reason:
raise Exception('Failed to retrieve saved searches of the content pack {0}. ERROR {1}'.format(content_pack_id, reason))
saved_searches_obj = {
"total": 0,
"enabled": 0,
"disabled": 0
}
for saved_search in cp_saved_searches:
if saved_search.get('disabled').strip() == '0':
saved_searches_obj['enabled'] = saved_searches_obj['enabled'] + 1
else:
saved_searches_obj['disabled'] = saved_searches_obj['disabled'] + 1
saved_searches_obj['total'] = saved_searches_obj['enabled'] + saved_searches_obj['disabled']
# updates the existion collection with savedsearches count details for content pack
ItsiContentPackSavedSearchStatus(session_key, 'nobody').update_content_pack_saved_search_status(content_pack_id, saved_searches_obj['total'], saved_searches_obj['enabled'], saved_searches_obj['disabled'])
return saved_searches_obj
def retrieve_saved_search_consistency_status(session_key, content_pack_id):
"""
Returns the saved search consistency status of the content pack objects data for the given content pack.
If all the savedsearches of contentpacks are in enabled (or in disable state) then savedsearches are in consistent state.
:param session_key: the session key
:type session_key: str
:param content_pack_id: the content pack id
:type content_pack_id: str
:return: consistency status of the content pack
:rtype: dict
"""
has_saved_searches = False
has_consistent_status = False
try:
cp_saved_searches = SavedSearch.get_all_searches(
session_key=session_key,
namespace='-',
search=APP_FILTER_PREFIX + content_pack_id
)
if len(cp_saved_searches) > 0:
has_saved_searches = True
except Exception as reason:
raise Exception('Failed to retrieve saved searches of the content pack {0}. ERROR {1}'.format(content_pack_id, reason))
if has_saved_searches:
enabled = 0
disabled = 0
for search in cp_saved_searches:
if (search.get('disabled').strip() == '1'):
disabled = disabled + 1
else:
enabled = enabled + 1
if (enabled == 0 or disabled == 0):
has_consistent_status = True
saved_search_consistency_status = {
"has_saved_searches": has_saved_searches,
"has_consistent_status": has_consistent_status
}
return saved_search_consistency_status
def retrieve_one(content_pack_id, version=None, session_key=None):
"""
Returns the content pack data for the given content pack id.
:param content_pack_id: the content pack id
:type content_pack_id: str
:param version: the content pack version
:type version: str
:param session_key: the session key
:type session_key: str
:return: the content pack data
:rtype: dict
"""
conf = get_content_pack_conf(content_pack_id, version, session_key)
reader = ContentPackMetadataReader(
logger=LOGGER,
session_key=session_key
)
return reader.read(conf, extra_fields=[
ContentPackFields.OVERVIEW
])