chore: sauvegarde complète avant factorisation executor
Some checks failed
security-audit / Bandit (scan statique) (push) Successful in 12s
security-audit / pip-audit (CVE dépendances) (push) Successful in 10s
security-audit / Scan secrets (grep) (push) Successful in 8s
tests / Lint (ruff + black) (push) Successful in 13s
tests / Tests unitaires (sans GPU) (push) Failing after 14s
tests / Tests sécurité (critique) (push) Has been skipped
Some checks failed
security-audit / Bandit (scan statique) (push) Successful in 12s
security-audit / pip-audit (CVE dépendances) (push) Successful in 10s
security-audit / Scan secrets (grep) (push) Successful in 8s
tests / Lint (ruff + black) (push) Successful in 13s
tests / Tests unitaires (sans GPU) (push) Failing after 14s
tests / Tests sécurité (critique) (push) Has been skipped
Point de sauvegarde incluant les fichiers non committés des sessions précédentes (systemd, docs, agents, GPU manager). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -2218,6 +2218,157 @@ def download_agent_package(machine_id):
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Cartographie des processus — Process Mining
|
||||
# =============================================================================
|
||||
|
||||
LIVE_SESSIONS_PATH = BASE_PATH / "data" / "training" / "live_sessions"
|
||||
|
||||
|
||||
@app.route('/process-mining')
|
||||
def process_mining_page():
|
||||
"""Page Cartographie des processus."""
|
||||
return render_template('process_mining.html')
|
||||
|
||||
|
||||
@app.route('/api/process-mining/machines')
|
||||
def process_mining_machines():
|
||||
"""Liste les machines disponibles (dossiers dans live_sessions/)."""
|
||||
try:
|
||||
machines = []
|
||||
if LIVE_SESSIONS_PATH.exists():
|
||||
for d in sorted(LIVE_SESSIONS_PATH.iterdir()):
|
||||
if d.is_dir() and not d.name.startswith(('sess_', 'embeddings', '.')):
|
||||
# Compter les sessions avec un live_events.jsonl
|
||||
sessions_count = sum(
|
||||
1 for sd in d.iterdir()
|
||||
if sd.is_dir() and (sd / "live_events.jsonl").exists()
|
||||
)
|
||||
if sessions_count > 0:
|
||||
machines.append({
|
||||
'machine_id': d.name,
|
||||
'sessions_count': sessions_count,
|
||||
})
|
||||
return jsonify({'machines': machines})
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
|
||||
@app.route('/api/process-mining/discover', methods=['POST'])
|
||||
def process_mining_discover():
|
||||
"""Lance l'analyse process mining et retourne les résultats."""
|
||||
try:
|
||||
# Import conditionnel du bridge
|
||||
try:
|
||||
from core.analytics.process_mining_bridge import (
|
||||
sessions_to_event_log,
|
||||
discover_bpmn,
|
||||
compute_kpis,
|
||||
load_jsonl_session,
|
||||
PM4PY_AVAILABLE,
|
||||
)
|
||||
except ImportError:
|
||||
return jsonify({
|
||||
'error': "Module d'analyse non disponible",
|
||||
'detail': "Le module core.analytics.process_mining_bridge est introuvable.",
|
||||
}), 503
|
||||
|
||||
if not PM4PY_AVAILABLE:
|
||||
return jsonify({
|
||||
'error': "Module d'analyse non disponible",
|
||||
'detail': "pm4py n'est pas installé. Installez-le : pip install pm4py",
|
||||
}), 503
|
||||
|
||||
data = request.get_json(silent=True) or {}
|
||||
machine_id = data.get('machine_id', '')
|
||||
|
||||
# Déterminer les dossiers de sessions à charger
|
||||
if machine_id:
|
||||
base_dir = LIVE_SESSIONS_PATH / machine_id
|
||||
if not base_dir.exists():
|
||||
return jsonify({'error': f"Machine '{machine_id}' introuvable"}), 404
|
||||
else:
|
||||
base_dir = LIVE_SESSIONS_PATH
|
||||
|
||||
# Collecter les fichiers live_events.jsonl
|
||||
jsonl_files = list(base_dir.rglob("live_events.jsonl"))
|
||||
if not jsonl_files:
|
||||
return jsonify({
|
||||
'error': "Aucune session trouvée",
|
||||
'detail': "Aucun fichier live_events.jsonl dans le dossier sélectionné.",
|
||||
}), 404
|
||||
|
||||
# Charger toutes les sessions
|
||||
all_events = []
|
||||
sessions_loaded = 0
|
||||
for jsonl_path in jsonl_files:
|
||||
try:
|
||||
events = load_jsonl_session(str(jsonl_path))
|
||||
# Injecter le session_id si absent (déduit du nom du dossier parent)
|
||||
session_id = jsonl_path.parent.name
|
||||
for evt in events:
|
||||
if 'session_id' not in evt:
|
||||
evt['session_id'] = session_id
|
||||
all_events.extend(events)
|
||||
sessions_loaded += 1
|
||||
except Exception as e:
|
||||
api_logger.warning(f"Erreur chargement {jsonl_path}: {e}")
|
||||
|
||||
if not all_events:
|
||||
return jsonify({
|
||||
'error': "Aucun événement exploitable",
|
||||
'detail': "Les fichiers JSONL sont vides ou ne contiennent pas d'événements significatifs.",
|
||||
}), 404
|
||||
|
||||
# Convertir en event log
|
||||
event_log_df = sessions_to_event_log(all_events)
|
||||
|
||||
if event_log_df.empty:
|
||||
return jsonify({
|
||||
'error': "Aucune activité détectée",
|
||||
'detail': "Les sessions ne contiennent pas d'événements significatifs (clics, saisies, etc.).",
|
||||
}), 404
|
||||
|
||||
# Dossier de sortie pour les images
|
||||
analytics_output_dir = Path(__file__).parent / "static" / "analytics"
|
||||
analytics_output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Nom unique basé sur machine_id et timestamp
|
||||
run_name = f"{machine_id or 'all'}_{int(time.time())}"
|
||||
|
||||
# Découverte BPMN
|
||||
bpmn_result = discover_bpmn(
|
||||
event_log_df,
|
||||
output_dir=str(analytics_output_dir),
|
||||
name=run_name,
|
||||
)
|
||||
|
||||
# KPIs
|
||||
kpis = compute_kpis(event_log_df)
|
||||
|
||||
# Construire la réponse
|
||||
# Chemin relatif des images pour le front (depuis /static/)
|
||||
bpmn_image_url = None
|
||||
if bpmn_result.get('bpmn_image_path'):
|
||||
bpmn_image_url = f"/static/analytics/{Path(bpmn_result['bpmn_image_path']).name}"
|
||||
dfg_image_url = None
|
||||
if bpmn_result.get('dfg_image_path'):
|
||||
dfg_image_url = f"/static/analytics/{Path(bpmn_result['dfg_image_path']).name}"
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'sessions_loaded': sessions_loaded,
|
||||
'machine_id': machine_id or 'toutes',
|
||||
'bpmn_image_url': bpmn_image_url,
|
||||
'dfg_image_url': dfg_image_url,
|
||||
'kpis': kpis,
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
api_logger.error(f"Erreur process mining: {e}", exc_info=True)
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Audit & Traçabilité — Proxy vers le serveur streaming (port 5005)
|
||||
# =============================================================================
|
||||
|
||||
Reference in New Issue
Block a user