You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

195 lines
7.3 KiB

# Copyright (C) 2005-2024 Splunk Inc. All Rights Reserved.
import time
import itsi_py3
from ITOA.itoa_object import CRUDMethodTypes, ItoaObject
from ITOA.setup_logging import logger
from itsi.itsi_utils import GLOBAL_SECURITY_GROUP_CONFIG
from uuid import uuid1
TYPE_VALIDATE = 'VALIDATE'
TYPE_PUBLISH = 'PUBLISH'
STATUS_NOT_STARTED = 'NOT STARTED'
STATUS_IN_PROGRESS = 'IN PROGRESS'
STATUS_SUCCESS = 'SUCCESS'
STATUS_FAILED = 'FAILED'
ACTION_TYPE_SYNCHRONIZE = 'SYNCHRONIZE'
ACTION_TYPE_DELETE = 'DELETE'
SANDBOX_CHANGE_TYPE = 'sandbox_operations'
class ItsiSandboxSyncLog(ItoaObject):
"""
Implements ITSI Sandbox Sync Log object
This additionally handles ITSI sandbox transactions for asynchronous actions.
"""
logger = logger
log_prefix = '[ITSI Sandbox Sync Log] '
collection_name = 'itsi_sandbox_sync_log'
object_type = 'sandbox_sync_log'
def __init__(self, session_key, current_user_name):
logger.info('Instantiating Sandbox Sync Log Object')
session_key = session_key
super(ItsiSandboxSyncLog, self).__init__(session_key, current_user_name, self.object_type,
collection_name=self.collection_name, is_securable_object=True)
# CRUD Operations
def create_record(self, owner, transaction_id, mod_object_type, mod_object_id, record):
"""
Create a sandbox sync log object in a record format.
:param owner: Owner of the object
:type: string
:param transaction_id: ID of transaction for tracebacks. This is also the key of the object.
:type: string
:param mod_object_type: Type of object this record refers to
:type: str
:param mod_object_id: ID of object this record refers to
:type: str
:param record: Object with fields to be saved as a log
:type: dict
:return: Object creation response
:type: dict
"""
now = int(time.time())
if transaction_id is None:
transaction_id = uuid1().hex
record_object = {
'_key': 'log_%s' % transaction_id,
'timestamp': now,
'title': 'Transaction %s' % transaction_id,
'owner': owner,
'user': self.current_user_name,
'transaction_id': transaction_id,
'mod_object_type': mod_object_type,
'mod_object_id': mod_object_id,
'details': {},
'warnings': [],
'errors': [],
'sec_grp': GLOBAL_SECURITY_GROUP_CONFIG['key'],
}
for k, v in record.items():
record_object[k] = v
logger.info('Creating record %s: %s' % (transaction_id, record_object))
self.write_to_log(record_object.get('warnings', []), 'warning')
self.write_to_log(record_object.get('errors', []), 'error')
return super().create(owner, record_object, transaction_id=transaction_id)
def update_record(self, owner, transaction_id, record):
"""
Update a sandbox sync log object in a record format.
:param owner: Owner of the object
:type: string
:param transaction_id: ID of transaction for tracebacks. This is also the key of the object.
:type: string
:param record: Object with fields to be saved as a log
:type: dict
:return: Object creation response
:type: dict
"""
now = int(time.time())
record_object = {
'_key': 'log_%s' % transaction_id,
'timestamp': now,
'title': 'Transaction %s' % transaction_id,
'owner': owner,
'user': self.current_user_name,
'transaction_id': transaction_id,
# Don't include other fields to avoid accidentally erasing data
}
for k, v in record.items():
record_object[k] = v
logger.info('Updating record %s: %s' % (transaction_id, record_object))
self.write_to_log(record_object.get('warnings', []), 'warning')
self.write_to_log(record_object.get('errors', []), 'error')
return super().update(owner, record_object['_key'], record_object, is_partial_data=True,
transaction_id=transaction_id)
def create(self, *args, **kwargs):
"""
Overrides `ItoaObject.create()`
This should not be called, but this is required for proper test functionality.
Use `create_record()` instead.
"""
self.logger.warning('Sandbox sync logs should be created via `create_record()`.')
return super(ItsiSandboxSyncLog, self).create(*args, **kwargs)
def update(self, *args, **kwargs):
"""
Overrides `ItoaObject.update()`
This should not be called, but this is required for proper test functionality.
Use `update_record()` instead.
"""
self.logger.warning('Sandbox sync logs should be updated via `update_record()`.')
return super(ItsiSandboxSyncLog, self).update(*args, **kwargs)
def save_batch(self, *args, **kwargs):
"""
Overrides `ItoaObject.save_batch()`
This should not be called, but this is required for proper test functionality.
Use `create_record()` instead.
"""
self.logger.warning('Sandbox sync logs should be created via `create_record()`.')
return super(ItsiSandboxSyncLog, self).save_batch(*args, **kwargs)
# Helper functions
@staticmethod
def write_to_log(list_to_log, log_level):
"""
Write elements of a list to an actual log
:param list_to_log: List of elements to log
:type: List
:param log_level: Level to log at
:type: String
"""
if log_level == 'error':
func = logger.error
elif log_level == 'warning':
func = logger.warning
else:
func = logger.info
for log in list_to_log:
func(log)
def delete_records(self, owner, offset=0, filter_data=None,
req_source='unknown',
transaction_id=None):
"""
Deletes objects of the oldest matching criteria based on the offset and filtering,
if no filtering specified, deletes objects of this object type upto the offset amount
@type owner: string
@param owner: user who is performing this operation
@type offset: number of records to be skipped from delete
@param offset: int defining the number of records
@type filter_data: dictionary
@param filter_data: json filter constructed to filter data. Follows mongodb syntax
@type req_source: string
@param req_source: identified source initiating the operation
@return: none, throws exceptions on errors
"""
delete_objects = self.get_bulk(owner, filter_data=filter_data, fields=self.delete_object_fields, sort_key='timestamp', sort_dir='asc')
number_of_records = len(delete_objects) - offset
if number_of_records > 0:
pruned_deleted_objects = []
for index in range(number_of_records):
pruned_deleted_objects.append(delete_objects[index])
self.delete_batch(owner, pruned_deleted_objects, req_source, transaction_id)
else:
self.logger.info('No records deleted as the limit not met')