You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
Splunk_Deploiement/apps/pusher_app/bin/git_pusher.py

319 lines
14 KiB

#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import json
import logging
import tempfile
import shutil
import subprocess
from datetime import datetime
from http.server import HTTPServer, BaseHTTPRequestHandler
from urllib.parse import parse_qs, urlparse
# Configuration du logging
log_dir = '/opt/splunk/var/log/splunk'
os.makedirs(log_dir, exist_ok=True)
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler(os.path.join(log_dir, 'git_pusher.log')),
logging.StreamHandler()
]
)
logger = logging.getLogger('git_pusher')
class GitPusherRequestHandler(BaseHTTPRequestHandler):
"""Handler pour les requêtes HTTP"""
def do_OPTIONS(self):
"""Traiter les requêtes OPTIONS (CORS preflight)"""
self.send_response(200)
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
self.send_header('Access-Control-Allow-Headers', 'Content-Type')
self.end_headers()
def do_POST(self):
"""Traiter les requêtes POST"""
# Envoyer les headers CORS EN PREMIER
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
self.send_header('Access-Control-Allow-Headers', 'Content-Type')
self.end_headers()
try:
logger.info(f"POST request to {self.path}")
# Parser l'URL et les paramètres
parsed_url = urlparse(self.path)
query_params = parse_qs(parsed_url.query)
logger.info(f"Query params keys: {list(query_params.keys())}")
# Extraire les paramètres
git_url = query_params.get('git_url', [''])[0]
git_branch = query_params.get('git_branch', ['main'])[0]
git_token = query_params.get('git_token', [''])[0]
commit_message = query_params.get('commit_message', [''])[0]
# Accepter soit 'apps' soit 'dashboards'
apps_json = query_params.get('apps', query_params.get('dashboards', ['[]']))[0]
user = query_params.get('user', ['unknown'])[0]
logger.info(f"Parameters received: git_url={git_url}, branch={git_branch}, user={user}")
logger.info(f"Raw apps_json: '{apps_json}'")
# Parser les apps
try:
# parse_qs décode déjà, mais au cas où
if isinstance(apps_json, str):
apps = json.loads(apps_json)
else:
apps = apps_json
except (json.JSONDecodeError, TypeError) as e:
logger.error(f"JSON parse error: {e} - trying to parse: {apps_json}")
apps = []
logger.info(f"Parsed apps: {len(apps)} items - {apps}")
# Valider
if not git_url or not git_token or not commit_message or not apps:
logger.warning(f"Validation failed: git_url={bool(git_url)}, git_token={bool(git_token)}, commit_message={bool(commit_message)}, apps={len(apps)}")
response = {
'status': 'error',
'message': 'Missing required parameters'
}
self.wfile.write(json.dumps(response).encode())
return
# Créer un répertoire temporaire
temp_dir = tempfile.mkdtemp(prefix='splunk_git_')
logger.info(f"Created temp directory: {temp_dir}")
try:
# Préparer l'URL Git
git_url_with_token = self.prepare_git_url(git_url, git_token)
# Cloner
logger.info("Cloning repository...")
self.clone_repository(temp_dir, git_url_with_token, git_branch)
# Récupérer TOUTES les applications (dossiers complets)
logger.info("Fetching applications from Splunk...")
dashboard_contents = self.fetch_apps_directories(apps)
# Créer le dossier apps
apps_dir = os.path.join(temp_dir, 'apps')
os.makedirs(apps_dir, exist_ok=True)
# Copier les applications
logger.info("Copying applications to repository...")
for app_data in dashboard_contents:
app_name = app_data['name']
app_path = app_data['path']
dest_path = os.path.join(apps_dir, app_name)
if os.path.exists(app_path):
logger.info(f"Copying app {app_name} from {app_path}")
shutil.copytree(app_path, dest_path)
logger.info(f"Copied app: {app_name}")
else:
logger.warning(f"App path not found: {app_path}")
# Configurer git
logger.info("Configuring git...")
subprocess.run(['git', 'config', 'user.email', 'splunk@splunk.local'],
cwd=temp_dir, capture_output=True)
subprocess.run(['git', 'config', 'user.name', 'Splunk Git Pusher'],
cwd=temp_dir, capture_output=True)
# Commit et push
logger.info("Adding files...")
subprocess.run(['git', 'add', '-A'], cwd=temp_dir, capture_output=True)
full_message = f"{commit_message}\n\nPushed by: {user}\nTimestamp: {datetime.now().isoformat()}"
logger.info("Committing...")
result = subprocess.run(['git', 'commit', '-m', full_message],
cwd=temp_dir, capture_output=True, text=True)
if result.returncode != 0:
logger.warning(f"Commit may have failed or had no changes: {result.stderr}")
logger.info("Pushing...")
result = subprocess.run(['git', 'push', 'origin', git_branch],
cwd=temp_dir, capture_output=True, text=True, timeout=60)
if result.returncode != 0:
raise Exception(f"Push failed: {result.stderr}")
logger.info("Push successful!")
response = {
'status': 'success',
'message': f'Successfully pushed {len(dashboard_contents)} dashboards from {len(apps)} application(s) to Git',
'dashboards_pushed': len(dashboard_contents)
}
self.wfile.write(json.dumps(response).encode())
finally:
logger.info(f"Cleaning up {temp_dir}")
shutil.rmtree(temp_dir, ignore_errors=True)
except Exception as e:
logger.error(f"Error: {str(e)}", exc_info=True)
response = {
'status': 'error',
'message': f'Error: {str(e)}'
}
self.wfile.write(json.dumps(response).encode())
def log_message(self, format, *args):
"""Éviter les logs HTTP par défaut"""
logger.debug(format % args)
@staticmethod
def prepare_git_url(git_url, token):
"""Préparer l'URL Git avec le token"""
if git_url.startswith('https://'):
parts = git_url.replace('https://', '').split('/')
return f"https://{token}@{'/'.join(parts)}"
return git_url
@staticmethod
def clone_repository(dest_dir, git_url, branch):
"""Cloner le repository"""
try:
cmd = ['git', 'clone', '--depth', '1', '--branch', branch, git_url, dest_dir]
result = subprocess.run(cmd, capture_output=True, text=True, timeout=60)
if result.returncode != 0:
raise Exception(f"Clone failed: {result.stderr}")
logger.info("Repository cloned successfully")
except subprocess.TimeoutExpired:
raise Exception("Git clone operation timed out")
except FileNotFoundError:
raise Exception("Git is not installed on this system")
@staticmethod
def fetch_apps_directories(apps):
"""Récupérer les dossiers complets des applications"""
logger.info(f"Fetching directories for {len(apps)} applications")
splunk_home = '/opt/splunk'
apps_base_path = os.path.join(splunk_home, 'etc', 'apps')
app_directories = []
for app in apps:
app_id = app.get('id') or app.get('app_id')
app_path = os.path.join(apps_base_path, app_id)
logger.info(f"Checking app directory: {app_path}")
if os.path.isdir(app_path):
app_directories.append({
'name': app_id,
'path': app_path,
'size': sum(os.path.getsize(os.path.join(dirpath, filename))
for dirpath, dirnames, filenames in os.walk(app_path)
for filename in filenames)
})
logger.info(f"Found app: {app_id} at {app_path}")
else:
logger.warning(f"App directory not found: {app_path}")
logger.info(f"Successfully found {len(app_directories)} application directories")
return app_directories
"""Récupérer TOUS les dashboards de chaque application"""
logger.info(f"Fetching dashboards from {len(apps)} applications")
import urllib.request
import urllib.error
import ssl
import base64
# Ignorer les certificats SSL auto-signés
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
dashboard_contents = []
# Lire le fichier de configuration Splunk pour obtenir les credentials
# Ou utiliser des credentials par défaut
splunk_username = os.environ.get('SPLUNK_USERNAME', 'admin')
splunk_password = os.environ.get('SPLUNK_PASSWORD', 'changeme')
# Créer l'authentification Basic
credentials = base64.b64encode(f"{splunk_username}:{splunk_password}".encode()).decode()
for app in apps:
app_id = app.get('id') or app.get('app_id')
logger.info(f"Fetching all dashboards from app: {app_id}")
try:
# Récupérer la liste de TOUS les dashboards de cette app
api_url = f"https://127.0.0.1:8089/servicesNS/-/{app_id}/data/ui/views?output_mode=json&count=0"
logger.debug(f"API URL: {api_url}")
req = urllib.request.Request(api_url)
req.add_header('Authorization', f'Basic {credentials}')
with urllib.request.urlopen(req, timeout=15, context=ssl_context) as response:
api_data = json.loads(response.read().decode('utf-8'))
if 'entry' in api_data and len(api_data['entry']) > 0:
for entry in api_data['entry']:
try:
dashboard_id = entry.get('name')
content = entry.get('content', {})
# eai:data contient le XML complet du dashboard
dashboard_xml = content.get('eai:data', '')
if dashboard_xml:
dashboard_contents.append({
'id': f"{app_id}_{dashboard_id}",
'app': app_id,
'content': dashboard_xml,
'name': dashboard_id
})
logger.debug(f"Fetched: {dashboard_id} from {app_id}")
except Exception as e:
logger.error(f"Error processing dashboard entry: {str(e)}")
logger.info(f"Found {len([d for d in dashboard_contents if d['app'] == app_id])} dashboards in {app_id}")
else:
logger.warning(f"No dashboards found in app {app_id}")
except urllib.error.HTTPError as e:
logger.error(f"HTTP {e.code} when fetching app {app_id}: {e.reason}")
except urllib.error.URLError as e:
logger.error(f"Cannot reach Splunk API for app {app_id}: {e.reason}")
except Exception as e:
logger.error(f"Error fetching dashboards from {app_id}: {str(e)}")
logger.info(f"Successfully fetched {len(dashboard_contents)} dashboards total")
return dashboard_contents
def start_server(port=9999):
"""Démarrer le serveur HTTP"""
server = HTTPServer(('0.0.0.0', port), GitPusherRequestHandler)
logger.info(f"Git Pusher server listening on 0.0.0.0:{port} (HTTP)")
server.serve_forever()
if __name__ == '__main__':
# Démarrer le serveur en background
port = 9999
logger.info(f"Starting Git Pusher on port {port}")
start_server(port)