You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
946 lines
39 KiB
946 lines
39 KiB
#!/usr/bin/env python
|
|
# -*- coding: utf-8 -*-
|
|
"""
|
|
Git Pusher - Main Server
|
|
Serveur HTTP pour pousser les applications Splunk vers Git
|
|
et déployer vers le Search Head Cluster via le SH Deployer
|
|
|
|
Avec système de licence par fichier .lic
|
|
"""
|
|
|
|
import sys
|
|
import os
|
|
import json
|
|
import logging
|
|
import tempfile
|
|
import shutil
|
|
import subprocess
|
|
import ssl
|
|
import urllib.request
|
|
import urllib.error
|
|
from datetime import datetime
|
|
from http.server import HTTPServer, BaseHTTPRequestHandler
|
|
from urllib.parse import parse_qs, urlparse
|
|
|
|
# Importer le validateur de licence
|
|
# En production, ce fichier sera dans le même dossier
|
|
try:
|
|
from license_validator import (
|
|
validate_license,
|
|
save_license_file,
|
|
check_limits,
|
|
increment_usage,
|
|
get_splunk_hostname,
|
|
get_usage_stats,
|
|
parse_license_content
|
|
)
|
|
except ImportError:
|
|
# Fallback pour le développement
|
|
print("Warning: license_validator not found, running without license checks")
|
|
def validate_license(): return {"valid": True, "type": "dev", "days_remaining": 999}
|
|
def save_license_file(c): return {"success": True}
|
|
def check_limits(): return {"allowed": True}
|
|
def increment_usage(): return {}
|
|
def get_splunk_hostname(): return "dev-host"
|
|
def get_usage_stats(): return {}
|
|
def parse_license_content(c): return {}
|
|
|
|
# ============================================
|
|
# CONFIGURATION
|
|
# ============================================
|
|
|
|
# Chemins Splunk
|
|
SPLUNK_HOME = os.environ.get('SPLUNK_HOME', '/opt/splunk')
|
|
APP_HOME = os.path.join(SPLUNK_HOME, 'etc', 'apps', 'pusher_app_prem')
|
|
CONFIG_FILE = os.path.join(APP_HOME, 'local', 'config.json')
|
|
|
|
# Configuration par défaut
|
|
DEFAULT_CONFIG = {
|
|
"api": {
|
|
"url": "",
|
|
"port": 9999,
|
|
"useProxy": True
|
|
},
|
|
"deployer": {
|
|
"enabled": False,
|
|
"host": "",
|
|
"port": 9998,
|
|
"token": "",
|
|
"useSSL": True
|
|
},
|
|
"license": {
|
|
"checkInterval": 24
|
|
},
|
|
"advanced": {
|
|
"logLevel": "INFO",
|
|
"timeout": 30,
|
|
"gitTimeout": 120
|
|
}
|
|
}
|
|
|
|
def load_config():
|
|
"""Charger la configuration depuis le fichier"""
|
|
try:
|
|
if os.path.exists(CONFIG_FILE):
|
|
with open(CONFIG_FILE, 'r') as f:
|
|
config = json.load(f)
|
|
# Fusionner avec la config par défaut pour les clés manquantes
|
|
return {**DEFAULT_CONFIG, **config}
|
|
except Exception as e:
|
|
logger.error(f"Erreur chargement config: {e}")
|
|
return DEFAULT_CONFIG.copy()
|
|
|
|
def save_config(config):
|
|
"""Sauvegarder la configuration dans le fichier"""
|
|
try:
|
|
local_dir = os.path.join(APP_HOME, 'local')
|
|
os.makedirs(local_dir, exist_ok=True)
|
|
|
|
with open(CONFIG_FILE, 'w') as f:
|
|
json.dump(config, f, indent=2)
|
|
|
|
os.chmod(CONFIG_FILE, 0o600)
|
|
logger.info(f"Configuration sauvegardée: {CONFIG_FILE}")
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Erreur sauvegarde config: {e}")
|
|
return False
|
|
|
|
# Charger la configuration au démarrage
|
|
APP_CONFIG = load_config()
|
|
|
|
# Configuration du SH Deployer (depuis la config ou valeurs par défaut)
|
|
SH_DEPLOYER_CONFIG = {
|
|
"enabled": APP_CONFIG.get("deployer", {}).get("enabled", False),
|
|
"host": APP_CONFIG.get("deployer", {}).get("host", ""),
|
|
"port": APP_CONFIG.get("deployer", {}).get("port", 9998),
|
|
"use_ssl": APP_CONFIG.get("deployer", {}).get("useSSL", True),
|
|
"token": APP_CONFIG.get("deployer", {}).get("token", ""),
|
|
"timeout": APP_CONFIG.get("advanced", {}).get("timeout", 30)
|
|
}
|
|
|
|
# Configuration du logging
|
|
log_dir = '/opt/splunk/var/log/splunk'
|
|
os.makedirs(log_dir, exist_ok=True)
|
|
|
|
log_level = getattr(logging, APP_CONFIG.get("advanced", {}).get("logLevel", "INFO"), logging.INFO)
|
|
|
|
logging.basicConfig(
|
|
level=log_level,
|
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
handlers=[
|
|
logging.FileHandler(os.path.join(log_dir, 'git_pusher.log')),
|
|
logging.StreamHandler()
|
|
]
|
|
)
|
|
logger = logging.getLogger('git_pusher')
|
|
|
|
|
|
class GitPusherRequestHandler(BaseHTTPRequestHandler):
|
|
"""Handler pour les requêtes HTTP"""
|
|
|
|
def send_cors_headers(self):
|
|
"""Envoyer les headers CORS complets"""
|
|
# Permettre toutes les origines
|
|
origin = self.headers.get('Origin', '*')
|
|
self.send_header('Access-Control-Allow-Origin', origin)
|
|
self.send_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS, PUT, DELETE')
|
|
self.send_header('Access-Control-Allow-Headers', 'Content-Type, Authorization, X-Requested-With, Accept, Origin, X-Splunk-Form-Key, X-Auth-Token')
|
|
self.send_header('Access-Control-Allow-Credentials', 'true')
|
|
self.send_header('Access-Control-Max-Age', '86400') # Cache preflight 24h
|
|
|
|
def do_OPTIONS(self):
|
|
"""Traiter les requêtes OPTIONS (CORS preflight)"""
|
|
logger.info(f"OPTIONS request from {self.headers.get('Origin', 'unknown')}")
|
|
self.send_response(200)
|
|
self.send_cors_headers()
|
|
self.end_headers()
|
|
# Important: ne rien écrire dans le body pour OPTIONS
|
|
return
|
|
|
|
def do_GET(self):
|
|
"""Traiter les requêtes GET"""
|
|
self.send_response(200)
|
|
self.send_header('Content-type', 'application/json')
|
|
self.send_cors_headers()
|
|
self.end_headers()
|
|
|
|
try:
|
|
parsed_url = urlparse(self.path)
|
|
path = parsed_url.path
|
|
|
|
# ============================================
|
|
# ENDPOINTS LICENCE
|
|
# ============================================
|
|
|
|
if path == '/license' or path == '/license/status':
|
|
# Récupérer le statut de la licence
|
|
validation = validate_license()
|
|
usage = get_usage_stats()
|
|
hostname = get_splunk_hostname()
|
|
|
|
response = {
|
|
"status": "valid" if validation.get("valid") else "invalid",
|
|
"hostname": hostname,
|
|
"license": validation if validation.get("valid") else None,
|
|
"error": validation.get("error") if not validation.get("valid") else None,
|
|
"error_code": validation.get("error_code") if not validation.get("valid") else None,
|
|
"usage": usage
|
|
}
|
|
self.wfile.write(json.dumps(response).encode())
|
|
|
|
elif path == '/license/hostname':
|
|
# Juste le hostname
|
|
response = {"hostname": get_splunk_hostname()}
|
|
self.wfile.write(json.dumps(response).encode())
|
|
|
|
elif path == '/license/file':
|
|
# Charger la licence depuis le fichier sur le serveur
|
|
license_file = os.path.join(APP_HOME, 'local', 'license.lic')
|
|
|
|
if os.path.exists(license_file):
|
|
try:
|
|
with open(license_file, 'r') as f:
|
|
license_content = f.read()
|
|
response = {
|
|
"success": True,
|
|
"content": license_content
|
|
}
|
|
except Exception as e:
|
|
response = {
|
|
"success": False,
|
|
"error": f"Erreur lecture fichier: {str(e)}"
|
|
}
|
|
else:
|
|
response = {
|
|
"success": False,
|
|
"error": "Aucun fichier de licence sur le serveur"
|
|
}
|
|
|
|
self.wfile.write(json.dumps(response).encode())
|
|
|
|
# ============================================
|
|
# ENDPOINT CONFIGURATION
|
|
# ============================================
|
|
|
|
elif path == '/config':
|
|
# Retourner la configuration actuelle
|
|
config = load_config()
|
|
# Masquer le token pour la sécurité
|
|
if 'deployer' in config and 'token' in config['deployer']:
|
|
config['deployer']['token'] = '***' if config['deployer']['token'] else ''
|
|
self.wfile.write(json.dumps(config).encode())
|
|
|
|
elif path == '/health':
|
|
# Health check
|
|
response = {
|
|
"status": "ok",
|
|
"service": "git_pusher",
|
|
"timestamp": datetime.now().isoformat(),
|
|
"sh_deployer": {
|
|
"enabled": SH_DEPLOYER_CONFIG.get("enabled", True),
|
|
"host": SH_DEPLOYER_CONFIG.get("host"),
|
|
"port": SH_DEPLOYER_CONFIG.get("port")
|
|
}
|
|
}
|
|
self.wfile.write(json.dumps(response).encode())
|
|
|
|
# ============================================
|
|
# ENDPOINTS SH DEPLOYER
|
|
# ============================================
|
|
|
|
elif path == '/deployer/health':
|
|
# Vérifier la santé du SH Deployer
|
|
result = call_deployer_agent("/health")
|
|
if result.get("success"):
|
|
response = {
|
|
"status": "ok",
|
|
"deployer": result.get("data"),
|
|
"config": {
|
|
"host": SH_DEPLOYER_CONFIG.get("host"),
|
|
"port": SH_DEPLOYER_CONFIG.get("port")
|
|
}
|
|
}
|
|
else:
|
|
response = {
|
|
"status": "error",
|
|
"error": result.get("error"),
|
|
"config": {
|
|
"host": SH_DEPLOYER_CONFIG.get("host"),
|
|
"port": SH_DEPLOYER_CONFIG.get("port")
|
|
}
|
|
}
|
|
self.wfile.write(json.dumps(response).encode())
|
|
|
|
elif path == '/deployer/status':
|
|
# Statut du SH Deployer
|
|
result = get_deployer_status()
|
|
self.wfile.write(json.dumps(result).encode())
|
|
|
|
elif path == '/deployer/config':
|
|
# Configuration actuelle du SH Deployer
|
|
response = {
|
|
"enabled": SH_DEPLOYER_CONFIG.get("enabled", True),
|
|
"host": SH_DEPLOYER_CONFIG.get("host"),
|
|
"port": SH_DEPLOYER_CONFIG.get("port"),
|
|
"use_ssl": SH_DEPLOYER_CONFIG.get("use_ssl", True)
|
|
}
|
|
self.wfile.write(json.dumps(response).encode())
|
|
|
|
else:
|
|
response = {"error": "Unknown endpoint", "path": path}
|
|
self.wfile.write(json.dumps(response).encode())
|
|
|
|
except Exception as e:
|
|
logger.error(f"GET error: {e}")
|
|
self.wfile.write(json.dumps({"error": str(e)}).encode())
|
|
|
|
def do_POST(self):
|
|
"""Traiter les requêtes POST"""
|
|
self.send_response(200)
|
|
self.send_header('Content-type', 'application/json')
|
|
self.send_cors_headers()
|
|
self.end_headers()
|
|
|
|
try:
|
|
parsed_url = urlparse(self.path)
|
|
path = parsed_url.path
|
|
query_params = parse_qs(parsed_url.query)
|
|
|
|
logger.info(f"POST request to {path}")
|
|
|
|
# ============================================
|
|
# ENDPOINT CONFIGURATION
|
|
# ============================================
|
|
|
|
if path == '/config':
|
|
# Sauvegarder la configuration
|
|
content_length = int(self.headers.get('Content-Length', 0))
|
|
body = self.rfile.read(content_length).decode('utf-8')
|
|
|
|
try:
|
|
new_config = json.loads(body)
|
|
|
|
# Charger la config existante pour préserver le token si masqué
|
|
existing_config = load_config()
|
|
|
|
# Si le token est masqué (***), garder l'ancien
|
|
if new_config.get('deployer', {}).get('token') == '***':
|
|
new_config['deployer']['token'] = existing_config.get('deployer', {}).get('token', '')
|
|
|
|
# Sauvegarder
|
|
if save_config(new_config):
|
|
# Recharger la config globale
|
|
global APP_CONFIG, SH_DEPLOYER_CONFIG
|
|
APP_CONFIG = load_config()
|
|
SH_DEPLOYER_CONFIG = {
|
|
"enabled": APP_CONFIG.get("deployer", {}).get("enabled", False),
|
|
"host": APP_CONFIG.get("deployer", {}).get("host", ""),
|
|
"port": APP_CONFIG.get("deployer", {}).get("port", 9998),
|
|
"use_ssl": APP_CONFIG.get("deployer", {}).get("useSSL", True),
|
|
"token": APP_CONFIG.get("deployer", {}).get("token", ""),
|
|
"timeout": APP_CONFIG.get("advanced", {}).get("timeout", 30)
|
|
}
|
|
|
|
response = {"success": True, "message": "Configuration sauvegardée"}
|
|
else:
|
|
response = {"success": False, "error": "Erreur lors de la sauvegarde"}
|
|
|
|
except json.JSONDecodeError as e:
|
|
response = {"success": False, "error": f"JSON invalide: {str(e)}"}
|
|
except Exception as e:
|
|
logger.error(f"Erreur sauvegarde config: {e}")
|
|
response = {"success": False, "error": str(e)}
|
|
|
|
self.wfile.write(json.dumps(response).encode())
|
|
return
|
|
|
|
# ============================================
|
|
# ENDPOINTS LICENCE
|
|
# ============================================
|
|
|
|
elif path == '/license/upload' or path == '/license/save':
|
|
# Sauvegarder la licence sur le serveur (fichier)
|
|
# La validation RSA est faite côté client
|
|
content_length = int(self.headers.get('Content-Length', 0))
|
|
body = self.rfile.read(content_length).decode('utf-8')
|
|
|
|
try:
|
|
data = json.loads(body)
|
|
license_content = data.get('license_content', '')
|
|
except:
|
|
license_content = body
|
|
|
|
if not license_content:
|
|
response = {"success": False, "error": "Contenu de licence vide"}
|
|
else:
|
|
# Sauvegarder le fichier de licence
|
|
try:
|
|
local_dir = os.path.join(APP_HOME, 'local')
|
|
os.makedirs(local_dir, exist_ok=True)
|
|
|
|
license_path = os.path.join(local_dir, 'license.lic')
|
|
|
|
with open(license_path, 'w') as f:
|
|
f.write(license_content)
|
|
|
|
os.chmod(license_path, 0o600)
|
|
|
|
logger.info(f"Licence sauvegardée: {license_path}")
|
|
response = {
|
|
"success": True,
|
|
"message": "Licence sauvegardée sur le serveur",
|
|
"path": license_path
|
|
}
|
|
except Exception as e:
|
|
logger.error(f"Erreur sauvegarde licence: {e}")
|
|
response = {
|
|
"success": False,
|
|
"error": f"Erreur sauvegarde: {str(e)}"
|
|
}
|
|
|
|
self.wfile.write(json.dumps(response).encode())
|
|
return
|
|
|
|
elif path == '/license/delete':
|
|
# Supprimer la licence
|
|
license_path = os.path.join(APP_HOME, 'local', 'license.lic')
|
|
if os.path.exists(license_path):
|
|
os.remove(license_path)
|
|
logger.info(f"Licence supprimée: {license_path}")
|
|
response = {"success": True, "message": "Licence supprimée"}
|
|
else:
|
|
response = {"success": False, "error": "Aucune licence à supprimer"}
|
|
|
|
self.wfile.write(json.dumps(response).encode())
|
|
return
|
|
|
|
# ============================================
|
|
# ENDPOINT PUSH GIT
|
|
# ============================================
|
|
|
|
elif path == '/push' or path.startswith('/services/'):
|
|
# NOTE: La vérification de licence est maintenant faite côté client (JavaScript)
|
|
# avec validation RSA. Le serveur fait confiance au client.
|
|
# Si vous voulez réactiver la vérification serveur, décommentez le bloc ci-dessous:
|
|
#
|
|
# license_check = check_limits()
|
|
# if not license_check.get("allowed"):
|
|
# response = {
|
|
# "status": "error",
|
|
# "error_code": "LICENSE_ERROR",
|
|
# "message": license_check.get("error", "Licence invalide ou limite atteinte")
|
|
# }
|
|
# self.wfile.write(json.dumps(response).encode())
|
|
# return
|
|
|
|
# Traiter le push Git
|
|
self.handle_git_push(query_params)
|
|
return
|
|
|
|
else:
|
|
# Traiter comme un push Git (compatibilité)
|
|
self.handle_git_push(query_params)
|
|
|
|
except Exception as e:
|
|
logger.error(f"POST error: {str(e)}", exc_info=True)
|
|
response = {
|
|
"status": "error",
|
|
"message": f"Error: {str(e)}"
|
|
}
|
|
self.wfile.write(json.dumps(response).encode())
|
|
|
|
def handle_git_push(self, query_params):
|
|
"""Gérer le push Git et optionnellement le déploiement vers SH Cluster"""
|
|
try:
|
|
# Extraire les paramètres
|
|
git_url = query_params.get('git_url', [''])[0]
|
|
git_branch = query_params.get('git_branch', ['main'])[0]
|
|
git_token = query_params.get('git_token', [''])[0]
|
|
commit_message = query_params.get('commit_message', [''])[0]
|
|
apps_json = query_params.get('apps', query_params.get('dashboards', ['[]']))[0]
|
|
user = query_params.get('user', ['unknown'])[0]
|
|
|
|
# Paramètres pour le déploiement SH Cluster
|
|
deploy_to_shcluster = query_params.get('deploy_to_shcluster', ['false'])[0].lower() == 'true'
|
|
deployer_host = query_params.get('deployer_host', [SH_DEPLOYER_CONFIG.get('host', '')])[0]
|
|
deployer_token = query_params.get('deployer_token', [SH_DEPLOYER_CONFIG.get('token', '')])[0]
|
|
sh_auth_user = query_params.get('sh_auth_user', [''])[0]
|
|
sh_auth_pass = query_params.get('sh_auth_pass', [''])[0]
|
|
|
|
# Paramètres de licence (envoyés par le client)
|
|
license_type = query_params.get('license_type', [''])[0]
|
|
license_id = query_params.get('license_id', [''])[0]
|
|
|
|
logger.info(f"Parameters: git_url={git_url}, branch={git_branch}, user={user}, deploy_to_shcluster={deploy_to_shcluster}")
|
|
|
|
# Parser les apps
|
|
try:
|
|
apps = json.loads(apps_json) if isinstance(apps_json, str) else apps_json
|
|
except (json.JSONDecodeError, TypeError) as e:
|
|
logger.error(f"JSON parse error: {e}")
|
|
apps = []
|
|
|
|
logger.info(f"Parsed apps: {len(apps)} items")
|
|
|
|
# NOTE: La vérification des limites est maintenant faite côté client
|
|
# Le serveur fait confiance aux informations envoyées par le client
|
|
|
|
# Valider les paramètres
|
|
if not git_url or not git_token or not commit_message or not apps:
|
|
response = {
|
|
"status": "error",
|
|
"message": "Missing required parameters"
|
|
}
|
|
self.wfile.write(json.dumps(response).encode())
|
|
return
|
|
|
|
# Créer un répertoire temporaire
|
|
temp_dir = tempfile.mkdtemp(prefix='splunk_git_')
|
|
logger.info(f"Created temp directory: {temp_dir}")
|
|
|
|
try:
|
|
# Préparer l'URL Git avec le token
|
|
git_url_with_token = self.prepare_git_url(git_url, git_token)
|
|
|
|
logger.info("Cloning repository...")
|
|
self.clone_repository(temp_dir, git_url_with_token, git_branch)
|
|
|
|
# Récupérer les applications
|
|
logger.info("Fetching applications from Splunk...")
|
|
app_directories = self.fetch_apps_directories(apps)
|
|
|
|
# Créer le dossier apps
|
|
apps_dir = os.path.join(temp_dir, 'apps')
|
|
os.makedirs(apps_dir, exist_ok=True)
|
|
|
|
# Copier les applications
|
|
logger.info("Copying applications to repository...")
|
|
apps_copied = 0
|
|
for app_data in app_directories:
|
|
app_name = app_data['name']
|
|
app_path = app_data['path']
|
|
dest_path = os.path.join(apps_dir, app_name)
|
|
|
|
if os.path.exists(app_path):
|
|
# Supprimer l'ancienne version si elle existe
|
|
if os.path.exists(dest_path):
|
|
logger.info(f"Removing old version of {app_name}")
|
|
shutil.rmtree(dest_path)
|
|
|
|
# Copier la nouvelle version
|
|
shutil.copytree(app_path, dest_path)
|
|
apps_copied += 1
|
|
|
|
# Compter les fichiers copiés
|
|
file_count = sum(len(files) for _, _, files in os.walk(dest_path))
|
|
logger.info(f"Copied app: {app_name} ({file_count} files)")
|
|
else:
|
|
logger.warning(f"App path not found: {app_path}")
|
|
|
|
logger.info(f"Total apps copied: {apps_copied}")
|
|
|
|
# Configurer git
|
|
subprocess.run(['git', 'config', 'user.email', 'splunk@splunk.local'],
|
|
cwd=temp_dir, capture_output=True)
|
|
subprocess.run(['git', 'config', 'user.name', 'Splunk Git Pusher'],
|
|
cwd=temp_dir, capture_output=True)
|
|
|
|
# Ajouter TOUS les fichiers (y compris les suppressions)
|
|
subprocess.run(['git', 'add', '--all'], cwd=temp_dir, capture_output=True)
|
|
|
|
# Afficher le statut Git pour debug
|
|
status_check = subprocess.run(['git', 'status', '--short'],
|
|
cwd=temp_dir, capture_output=True, text=True)
|
|
if status_check.stdout.strip():
|
|
logger.info(f"Git changes detected:\n{status_check.stdout[:500]}")
|
|
else:
|
|
logger.info("No Git changes detected")
|
|
|
|
# Message de commit avec infos de licence (envoyées par le client)
|
|
full_message = f"{commit_message}\n\n"
|
|
full_message += f"Pushed by: {user}\n"
|
|
full_message += f"License: {license_id or 'N/A'} ({license_type or 'N/A'})\n"
|
|
full_message += f"Timestamp: {datetime.now().isoformat()}"
|
|
|
|
# Vérifier s'il y a des changements à committer
|
|
status_result = subprocess.run(['git', 'status', '--porcelain'],
|
|
cwd=temp_dir, capture_output=True, text=True)
|
|
|
|
if status_result.stdout.strip():
|
|
# Il y a des changements, faire le commit
|
|
result = subprocess.run(['git', 'commit', '-m', full_message],
|
|
cwd=temp_dir, capture_output=True, text=True)
|
|
|
|
if result.returncode != 0:
|
|
logger.warning(f"Commit warning: {result.stderr}")
|
|
else:
|
|
logger.info("Commit created successfully")
|
|
else:
|
|
logger.info("No changes detected, skipping commit")
|
|
|
|
logger.info("Pushing to Git...")
|
|
result = subprocess.run(['git', 'push', 'origin', git_branch],
|
|
cwd=temp_dir, capture_output=True, text=True, timeout=60)
|
|
|
|
if result.returncode != 0:
|
|
# Vérifier si c'est juste "Everything up-to-date"
|
|
if "Everything up-to-date" in result.stderr or "Everything up-to-date" in result.stdout:
|
|
logger.info("Repository is already up-to-date")
|
|
else:
|
|
raise Exception(f"Push failed: {result.stderr}")
|
|
|
|
# NOTE: L'incrémentation des stats est maintenant faite côté client (JavaScript)
|
|
|
|
logger.info("Git push successful!")
|
|
|
|
# ============================================
|
|
# DÉPLOIEMENT VERS SH CLUSTER (optionnel)
|
|
# ============================================
|
|
|
|
deployer_result = None
|
|
|
|
if deploy_to_shcluster:
|
|
logger.info("Triggering deployment to SH Cluster...")
|
|
|
|
# Configurer le deployer
|
|
deployer_config = SH_DEPLOYER_CONFIG.copy()
|
|
if deployer_host:
|
|
deployer_config["host"] = deployer_host
|
|
if deployer_token:
|
|
deployer_config["token"] = deployer_token
|
|
|
|
# Appeler le SH Deployer pour pull + deploy
|
|
deployer_result = trigger_deployer_pull_and_deploy(
|
|
git_url=git_url,
|
|
git_token=git_token,
|
|
auth_user=sh_auth_user if sh_auth_user else None,
|
|
auth_pass=sh_auth_pass if sh_auth_pass else None,
|
|
config=deployer_config
|
|
)
|
|
|
|
if deployer_result.get("success"):
|
|
logger.info("SH Cluster deployment triggered successfully")
|
|
else:
|
|
logger.error(f"SH Cluster deployment failed: {deployer_result.get('error')}")
|
|
|
|
# Préparer la réponse
|
|
response = {
|
|
"status": "success",
|
|
"message": f"Successfully pushed {len(app_directories)} application(s) to Git",
|
|
"apps_pushed": len(app_directories),
|
|
"license_type": license_type or "N/A"
|
|
}
|
|
|
|
# Ajouter les infos de déploiement si activé
|
|
if deploy_to_shcluster:
|
|
response["shcluster_deployment"] = {
|
|
"triggered": True,
|
|
"success": deployer_result.get("success", False) if deployer_result else False,
|
|
"message": deployer_result.get("data", {}).get("message") if deployer_result and deployer_result.get("success") else deployer_result.get("error") if deployer_result else "Not triggered"
|
|
}
|
|
|
|
if deployer_result and deployer_result.get("success"):
|
|
response["message"] += " and triggered SH Cluster deployment"
|
|
else:
|
|
response["message"] += " (SH Cluster deployment failed)"
|
|
|
|
self.wfile.write(json.dumps(response).encode())
|
|
|
|
finally:
|
|
logger.info(f"Cleaning up {temp_dir}")
|
|
shutil.rmtree(temp_dir, ignore_errors=True)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Git push error: {str(e)}", exc_info=True)
|
|
response = {
|
|
"status": "error",
|
|
"message": f"Error: {str(e)}"
|
|
}
|
|
self.wfile.write(json.dumps(response).encode())
|
|
|
|
def log_message(self, format, *args):
|
|
logger.debug(format % args)
|
|
|
|
@staticmethod
|
|
def prepare_git_url(git_url, token):
|
|
"""Préparer l'URL Git avec le token"""
|
|
if '@' in git_url:
|
|
protocol = git_url.split('://')[0]
|
|
rest = git_url.split('://', 1)[1]
|
|
host_and_path = rest.split('@', 1)[1] if '@' in rest else rest
|
|
return f"{protocol}://{token}@{host_and_path}"
|
|
|
|
if git_url.startswith('https://') or git_url.startswith('http://'):
|
|
protocol = git_url.split('://')[0]
|
|
host_and_path = git_url.split('://', 1)[1]
|
|
return f"{protocol}://{token}@{host_and_path}"
|
|
|
|
return git_url
|
|
|
|
@staticmethod
|
|
def clone_repository(dest_dir, git_url, branch):
|
|
"""Cloner le repository"""
|
|
try:
|
|
cmd = ['git', 'clone', '--depth', '1', '--branch', branch, git_url, dest_dir]
|
|
result = subprocess.run(cmd, capture_output=True, text=True, timeout=60)
|
|
|
|
if result.returncode != 0:
|
|
raise Exception(f"Clone failed: {result.stderr}")
|
|
|
|
logger.info("Repository cloned successfully")
|
|
except subprocess.TimeoutExpired:
|
|
raise Exception("Git clone operation timed out")
|
|
except FileNotFoundError:
|
|
raise Exception("Git is not installed on this system")
|
|
|
|
@staticmethod
|
|
def fetch_apps_directories(apps):
|
|
"""Récupérer les dossiers des applications"""
|
|
logger.info(f"Fetching directories for {len(apps)} applications")
|
|
|
|
splunk_home = '/opt/splunk'
|
|
apps_base_path = os.path.join(splunk_home, 'etc', 'apps')
|
|
|
|
app_directories = []
|
|
|
|
for app in apps:
|
|
app_id = app.get('id') or app.get('app_id')
|
|
app_path = os.path.join(apps_base_path, app_id)
|
|
|
|
if os.path.isdir(app_path):
|
|
app_directories.append({
|
|
'name': app_id,
|
|
'path': app_path,
|
|
'size': sum(os.path.getsize(os.path.join(dirpath, filename))
|
|
for dirpath, dirnames, filenames in os.walk(app_path)
|
|
for filename in filenames)
|
|
})
|
|
logger.info(f"Found app: {app_id}")
|
|
else:
|
|
logger.warning(f"App directory not found: {app_path}")
|
|
|
|
return app_directories
|
|
|
|
|
|
# ============================================
|
|
# FONCTIONS SH DEPLOYER
|
|
# ============================================
|
|
|
|
def call_deployer_agent(endpoint, method="GET", data=None, config=None):
|
|
"""
|
|
Appeler l'agent SH Deployer
|
|
|
|
Args:
|
|
endpoint: Endpoint à appeler (ex: /health, /pull, /deploy)
|
|
method: GET ou POST
|
|
data: Données à envoyer (dict)
|
|
config: Configuration (override SH_DEPLOYER_CONFIG)
|
|
|
|
Returns:
|
|
dict avec success, data ou error
|
|
"""
|
|
if config is None:
|
|
config = SH_DEPLOYER_CONFIG
|
|
|
|
if not config.get("enabled", True):
|
|
return {"success": False, "error": "SH Deployer is disabled"}
|
|
|
|
host = config.get("host", "10.10.40.14")
|
|
port = config.get("port", 9998)
|
|
use_ssl = config.get("use_ssl", True)
|
|
token = config.get("token", "")
|
|
timeout = config.get("timeout", 30)
|
|
|
|
protocol = "https" if use_ssl else "http"
|
|
url = f"{protocol}://{host}:{port}{endpoint}"
|
|
|
|
logger.info(f"Calling SH Deployer: {method} {url}")
|
|
|
|
try:
|
|
# Créer le contexte SSL (ignorer les certificats auto-signés)
|
|
ssl_context = ssl.create_default_context()
|
|
ssl_context.check_hostname = False
|
|
ssl_context.verify_mode = ssl.CERT_NONE
|
|
|
|
# Préparer les données
|
|
if data:
|
|
json_data = json.dumps(data).encode('utf-8')
|
|
else:
|
|
json_data = None
|
|
|
|
# Créer la requête
|
|
req = urllib.request.Request(url, data=json_data, method=method)
|
|
req.add_header('Content-Type', 'application/json')
|
|
req.add_header('X-Auth-Token', token)
|
|
|
|
# Exécuter la requête
|
|
with urllib.request.urlopen(req, timeout=timeout, context=ssl_context) as response:
|
|
response_data = json.loads(response.read().decode('utf-8'))
|
|
logger.info(f"SH Deployer response: {response_data}")
|
|
return {"success": True, "data": response_data}
|
|
|
|
except urllib.error.HTTPError as e:
|
|
error_body = e.read().decode('utf-8') if e.fp else str(e)
|
|
logger.error(f"SH Deployer HTTP error {e.code}: {error_body}")
|
|
return {"success": False, "error": f"HTTP {e.code}: {error_body}"}
|
|
|
|
except urllib.error.URLError as e:
|
|
logger.error(f"SH Deployer connection error: {e.reason}")
|
|
return {"success": False, "error": f"Connection error: {e.reason}"}
|
|
|
|
except Exception as e:
|
|
logger.error(f"SH Deployer error: {str(e)}")
|
|
return {"success": False, "error": str(e)}
|
|
|
|
|
|
def check_deployer_health(config=None):
|
|
"""Vérifier si l'agent SH Deployer est accessible"""
|
|
result = call_deployer_agent("/health", config=config)
|
|
return result.get("success", False)
|
|
|
|
|
|
def trigger_deployer_pull(git_url, git_token, config=None):
|
|
"""
|
|
Déclencher un pull sur le SH Deployer
|
|
|
|
Args:
|
|
git_url: URL du repository Git
|
|
git_token: Token Git pour l'authentification
|
|
config: Configuration du deployer
|
|
"""
|
|
data = {
|
|
"repo_url": git_url,
|
|
"git_token": git_token,
|
|
"apps_subdir": "apps"
|
|
}
|
|
|
|
return call_deployer_agent("/pull", method="POST", data=data, config=config)
|
|
|
|
|
|
def trigger_deployer_deploy(target_uri=None, auth_user=None, auth_pass=None, config=None):
|
|
"""
|
|
Déclencher le déploiement du bundle sur le SH Cluster
|
|
|
|
Args:
|
|
target_uri: URI du captain du SH Cluster (optionnel)
|
|
auth_user: Utilisateur Splunk
|
|
auth_pass: Mot de passe Splunk
|
|
config: Configuration du deployer
|
|
"""
|
|
data = {}
|
|
if target_uri:
|
|
data["target_uri"] = target_uri
|
|
if auth_user:
|
|
data["auth_user"] = auth_user
|
|
if auth_pass:
|
|
data["auth_pass"] = auth_pass
|
|
|
|
return call_deployer_agent("/deploy", method="POST", data=data, config=config)
|
|
|
|
|
|
def trigger_deployer_pull_and_deploy(git_url, git_token, target_uri=None, auth_user=None, auth_pass=None, config=None):
|
|
"""
|
|
Déclencher pull + deploy en une seule opération
|
|
"""
|
|
data = {
|
|
"repo_url": git_url,
|
|
"git_token": git_token,
|
|
"apps_subdir": "apps"
|
|
}
|
|
if target_uri:
|
|
data["target_uri"] = target_uri
|
|
if auth_user:
|
|
data["auth_user"] = auth_user
|
|
if auth_pass:
|
|
data["auth_pass"] = auth_pass
|
|
|
|
return call_deployer_agent("/pull-and-deploy", method="POST", data=data, config=config)
|
|
|
|
|
|
def get_deployer_status(config=None):
|
|
"""Récupérer le statut du SH Deployer"""
|
|
return call_deployer_agent("/status", config=config)
|
|
|
|
|
|
def start_server(port=9999, use_ssl=True):
|
|
"""Démarrer le serveur HTTP/HTTPS"""
|
|
import ssl
|
|
|
|
server = HTTPServer(('0.0.0.0', port), GitPusherRequestHandler)
|
|
|
|
ssl_enabled = False
|
|
|
|
if use_ssl:
|
|
# Chemins possibles pour les certificats (ordre de priorité)
|
|
cert_paths = [
|
|
# Certificats dédiés pour Git Pusher (recommandé)
|
|
('/opt/splunk/etc/apps/pusher_app_prem/local/certs/server.crt',
|
|
'/opt/splunk/etc/apps/pusher_app_prem/local/certs/server.key'),
|
|
# Certificats splunkweb
|
|
('/opt/splunk/etc/auth/splunkweb/cert.pem',
|
|
'/opt/splunk/etc/auth/splunkweb/privkey.pem'),
|
|
# Autre emplacement splunkweb
|
|
('/opt/splunk/etc/auth/splunkweb/splunkweb.pem',
|
|
'/opt/splunk/etc/auth/splunkweb/splunkweb.key'),
|
|
]
|
|
|
|
for cert_file, key_file in cert_paths:
|
|
logger.info(f"Trying SSL cert: {cert_file}")
|
|
if os.path.exists(cert_file) and os.path.exists(key_file):
|
|
try:
|
|
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
|
ssl_context.check_hostname = False
|
|
ssl_context.verify_mode = ssl.CERT_NONE
|
|
|
|
# Charger le certificat et la clé
|
|
ssl_context.load_cert_chain(certfile=cert_file, keyfile=key_file)
|
|
|
|
server.socket = ssl_context.wrap_socket(server.socket, server_side=True)
|
|
ssl_enabled = True
|
|
logger.info(f"SSL enabled using: {cert_file}")
|
|
break
|
|
except Exception as e:
|
|
logger.warning(f"Could not load SSL cert {cert_file}: {e}")
|
|
continue
|
|
else:
|
|
logger.debug(f"Cert not found: {cert_file} or {key_file}")
|
|
|
|
if not ssl_enabled:
|
|
logger.error("=" * 60)
|
|
logger.error("SSL CERTIFICATES NOT FOUND OR INVALID!")
|
|
logger.error("HTTPS requests from browser will fail!")
|
|
logger.error("")
|
|
logger.error("To fix, run these commands:")
|
|
logger.error(" mkdir -p /opt/splunk/etc/apps/pusher_app_prem/local/certs")
|
|
logger.error(" openssl req -x509 -newkey rsa:4096 \\")
|
|
logger.error(" -keyout /opt/splunk/etc/apps/pusher_app_prem/local/certs/server.key \\")
|
|
logger.error(" -out /opt/splunk/etc/apps/pusher_app_prem/local/certs/server.crt \\")
|
|
logger.error(" -days 365 -nodes -subj \"/CN=git-pusher\"")
|
|
logger.error("=" * 60)
|
|
|
|
protocol = "HTTPS" if ssl_enabled else "HTTP"
|
|
logger.info(f"Git Pusher server listening on 0.0.0.0:{port} ({protocol})")
|
|
|
|
# Afficher le statut de la licence au démarrage
|
|
license_status = validate_license()
|
|
if license_status.get("valid"):
|
|
logger.info(f"License: {license_status.get('type_name')} - {license_status.get('days_remaining')} days remaining")
|
|
else:
|
|
logger.warning(f"License: {license_status.get('error', 'Invalid')}")
|
|
|
|
server.serve_forever()
|
|
|
|
|
|
if __name__ == '__main__':
|
|
import argparse
|
|
parser = argparse.ArgumentParser(description='Git Pusher Server')
|
|
parser.add_argument('--no-ssl', action='store_true', help='Disable SSL/HTTPS')
|
|
parser.add_argument('--port', type=int, default=9999, help='Port number (default: 9999)')
|
|
args = parser.parse_args()
|
|
|
|
port = args.port
|
|
use_ssl = not args.no_ssl
|
|
|
|
logger.info(f"Starting Git Pusher on port {port} (SSL: {use_ssl})")
|
|
start_server(port, use_ssl) |