Files
Dom 82d7b38cff
Some checks failed
security-audit / Bandit (scan statique) (push) Successful in 12s
security-audit / pip-audit (CVE dépendances) (push) Successful in 11s
security-audit / Scan secrets (grep) (push) Successful in 8s
tests / Lint (ruff + black) (push) Successful in 13s
tests / Tests unitaires (sans GPU) (push) Failing after 14s
tests / Tests sécurité (critique) (push) Has been skipped
feat(dashboard): page Base de connaissances — métriques FAISS, sessions, patterns
Nouvelle page /knowledge-base avec :
- Mémoire visuelle : 331 vecteurs FAISS / 13666 embeddings (alerte consolidation)
- Sessions observées : 56 sessions, 6.66 Go, 3 machines
- Réflexes natifs : 16 patterns UI en 6 catégories
- Workflows appris : 29

Onglet 📚 Connaissances ajouté dans toute la navigation.
Tout en français, dark theme, zéro jargon.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-20 17:41:23 +02:00

2669 lines
94 KiB
Python

#!/usr/bin/env python3
"""
Application Flask pour le Dashboard Web RPA Vision V3
Fonctionnalités:
- Monitoring temps réel avec WebSocket
- Gestion des workflows
- Visualisation des sessions et screenshots
- Graphiques de performance
Sécurité (Fix P0-A) :
- HTTP Basic Auth sur tous les endpoints (middleware before_request).
- Credentials via DASHBOARD_USER / DASHBOARD_PASSWORD.
- Exceptions : /health, /healthz, /api/health (monitoring externe).
- Désactivation auth en dev local : DASHBOARD_AUTH_DISABLED=true
"""
import base64
import hmac
import io
import os
import sys
import json
import subprocess
import threading
import time
import zipfile
from pathlib import Path
from datetime import datetime
from flask import Flask, render_template, jsonify, request, send_file, Response
from flask_socketio import SocketIO, emit
from prometheus_client import generate_latest, CONTENT_TYPE_LATEST
# Ajouter le répertoire parent au path
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.persistence import StorageManager
from core.models import RawSession
from core.monitoring import get_logger, api_logger
from core.monitoring.chain_manager import ChainManager
from core.monitoring.trigger_manager import TriggerManager
from core.monitoring.log_exporter import LogExporter
# AutomationScheduler retiré — l'onglet Exécution legacy est supprimé
# Modules pour backup et versioning
from core.system.backup_exporter import get_backup_exporter
from core.system.version_manager import get_version_manager
app = Flask(__name__)
app.config['SECRET_KEY'] = os.getenv('SECRET_KEY', 'dev-key-change-in-production')
socketio = SocketIO(app, cors_allowed_origins="*", async_mode='threading')
# =============================================================================
# Fix P0-A : HTTP Basic Auth sur le dashboard (port 5001)
# =============================================================================
# Avant ce fix, 71 endpoints étaient exposés sans authentification.
# On ajoute un middleware Flask (before_request) qui exige un header
# Authorization: Basic <b64>. Les credentials sont pris dans l'environnement.
#
# Chemins publics (pas de challenge) : healthcheck uniquement — ils servent au
# monitoring externe (Prometheus, systemd, k8s, NPM reverse proxy).
#
# Pour désactiver l'auth (dev local, tests) : DASHBOARD_AUTH_DISABLED=true.
# Les tests unitaires définissent cette variable via un flag Flask config.
_DASHBOARD_USER = os.getenv("DASHBOARD_USER", "lea").strip()
_DASHBOARD_PASSWORD = os.getenv("DASHBOARD_PASSWORD", "").strip()
_DASHBOARD_AUTH_DISABLED = os.getenv("DASHBOARD_AUTH_DISABLED", "").lower() in (
"1", "true", "yes",
)
# Si pas de password défini en env ET auth pas explicitement désactivée →
# on utilise un mot de passe par défaut "safe" (long, random-ish) ET on log
# un WARNING très visible au démarrage pour forcer Dom à le configurer
# avant un déploiement prod. On ne veut surtout pas générer un mot de passe
# aléatoire à chaque boot (même problème que l'API token auto-généré).
if not _DASHBOARD_PASSWORD and not _DASHBOARD_AUTH_DISABLED:
_DASHBOARD_PASSWORD = "changeme-dashboard-Medecin2026!"
api_logger.warning(
"[SÉCURITÉ] DASHBOARD_PASSWORD non défini en env — utilisation d'un "
"mot de passe par défaut temporaire. DÉFINIR DASHBOARD_PASSWORD "
"AVANT TOUT DÉPLOIEMENT (identifiant : DASHBOARD_USER)."
)
# Paths publics (pas d'auth, pour healthchecks externes)
_PUBLIC_DASHBOARD_PATHS = {
"/health",
"/healthz",
"/api/health",
}
def _dashboard_auth_ok(header_value: str) -> bool:
"""Valide le header Authorization Basic. Comparaison constant-time."""
if not header_value or not header_value.lower().startswith("basic "):
return False
try:
decoded = base64.b64decode(header_value[6:].strip()).decode("utf-8")
except (ValueError, UnicodeDecodeError):
return False
if ":" not in decoded:
return False
user, _, password = decoded.partition(":")
# Comparaison constant-time pour éviter les timing attacks.
user_ok = hmac.compare_digest(user, _DASHBOARD_USER)
pwd_ok = hmac.compare_digest(password, _DASHBOARD_PASSWORD)
return user_ok and pwd_ok
@app.before_request
def _dashboard_basic_auth_middleware():
"""Middleware d'auth HTTP Basic sur tous les endpoints HTTP du dashboard.
- Bypass complet si DASHBOARD_AUTH_DISABLED=true (dev/tests).
- Bypass complet si app.config['TESTING'] (pytest) et qu'aucun credential
n'est passé : les tests existants du dashboard doivent continuer de
passer sans retoucher chaque fixture.
- Paths dans _PUBLIC_DASHBOARD_PATHS : toujours publics (healthchecks).
- Sinon : header Authorization: Basic <b64> obligatoire.
Note WebSocket : Flask-SocketIO utilise son propre canal pour le handshake.
Le before_request ci-dessus s'applique à la requête HTTP de l'upgrade
(compatible mode threading). Les sockets post-handshake ne passent pas par
Flask, c'est acceptable pour un MVP (le client doit avoir passé l'auth HTTP).
"""
# Dev / tests : bypass total
if _DASHBOARD_AUTH_DISABLED:
return None
if app.config.get("TESTING") and not app.config.get("DASHBOARD_AUTH_ENABLED"):
return None
path = request.path or "/"
if path in _PUBLIC_DASHBOARD_PATHS:
return None
header_value = request.headers.get("Authorization", "")
if _dashboard_auth_ok(header_value):
return None
# Pas authentifié — challenge 401 avec WWW-Authenticate
return Response(
'{"error": "authentication required"}',
status=401,
mimetype="application/json",
headers={"WWW-Authenticate": 'Basic realm="RPA Vision V3 Dashboard"'},
)
@app.get('/healthz')
def healthz():
"""Healthcheck minimal (systemd/k8s). Public — pas d'auth."""
return jsonify({
'status': 'ok',
'service': 'rpa-vision-v3-dashboard',
'timestamp': datetime.now().isoformat(),
})
@app.get('/api/health')
def api_health():
"""Healthcheck JSON public — pas d'auth (monitoring externe)."""
return jsonify({
'status': 'ok',
'service': 'rpa-vision-v3-dashboard',
'timestamp': datetime.now().isoformat(),
})
@app.get('/health')
def health():
"""Healthcheck public — pas d'auth (NPM reverse proxy)."""
return jsonify({
'status': 'ok',
'service': 'rpa-vision-v3-dashboard',
'timestamp': datetime.now().isoformat(),
})
# Chemins
BASE_PATH = Path(__file__).parent.parent
DATA_PATH = BASE_PATH / "data" / "training"
SESSIONS_PATH = DATA_PATH / "sessions"
WORKFLOWS_PATH = DATA_PATH / "workflows"
LOGS_PATH = BASE_PATH / "logs"
# StorageManager
storage = StorageManager(base_path=str(DATA_PATH))
# Monitoring managers
chain_manager = ChainManager(storage_path=DATA_PATH / "chains")
trigger_manager = TriggerManager(storage_path=DATA_PATH / "triggers")
log_exporter = LogExporter(logs_path=LOGS_PATH)
# Automation scheduler, execution_state, performance_metrics retirés
# (onglet Exécution legacy supprimé — le replay passe par Agent V1 + VWB)
# =============================================================================
# Processing Status Tracker
# =============================================================================
PROCESSING_STATUS_FILE = DATA_PATH / "processing_status.json"
def load_processing_status():
"""Charge les statuts de traitement depuis le fichier."""
try:
if PROCESSING_STATUS_FILE.exists():
with open(PROCESSING_STATUS_FILE, 'r') as f:
return json.load(f)
except Exception:
pass
return {}
def save_processing_status(status_dict):
"""Sauvegarde les statuts de traitement."""
try:
PROCESSING_STATUS_FILE.parent.mkdir(parents=True, exist_ok=True)
with open(PROCESSING_STATUS_FILE, 'w') as f:
json.dump(status_dict, f, indent=2)
except Exception as e:
api_logger.error(f"Erreur sauvegarde processing status: {e}")
def get_session_processing_status(session_id):
"""Retourne le statut de traitement d'une session."""
statuses = load_processing_status()
return statuses.get(session_id, {"state": "pending", "processed_at": None})
def set_session_processing_status(session_id, state, stats=None, error=None):
"""Met à jour le statut de traitement d'une session."""
statuses = load_processing_status()
statuses[session_id] = {
"state": state, # pending, processing, completed, failed
"updated_at": datetime.now().isoformat(),
"processed_at": datetime.now().isoformat() if state == "completed" else None,
"stats": stats,
"error": str(error) if error else None
}
save_processing_status(statuses)
# =============================================================================
# Routes principales
# =============================================================================
@app.route('/')
def index():
"""Page d'accueil du dashboard."""
return render_template('index.html')
# =============================================================================
# API Système
# =============================================================================
@app.route('/api/system/status')
def system_status():
"""Statut du système."""
try:
sessions_count = len(list(SESSIONS_PATH.glob('*'))) if SESSIONS_PATH.exists() else 0
workflows_count = len(list(WORKFLOWS_PATH.glob('*.json'))) if WORKFLOWS_PATH.exists() else 0
dependencies_ok = True
try:
import torch
import faiss
except ImportError:
dependencies_ok = False
return jsonify({
'status': 'online',
'sessions_count': sessions_count,
'workflows_count': workflows_count,
'dependencies_ok': dependencies_ok,
'timestamp': datetime.now().isoformat()
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/system/performance')
def system_performance():
"""Métriques de performance."""
try:
# Essayer de récupérer les vraies stats
faiss_stats = {}
embedding_stats = {}
try:
from core.embedding.faiss_manager import FAISSManager
# Charger l'index FAISS existant depuis le disque
faiss_index_path = DATA_PATH / "faiss_index" / "main.index"
faiss_metadata_path = DATA_PATH / "faiss_index" / "main.metadata"
if faiss_index_path.exists() and faiss_metadata_path.exists():
fm = FAISSManager.load(faiss_index_path, faiss_metadata_path)
faiss_stats = fm.get_stats()
else:
faiss_stats = {"total_vectors": 0, "status": "index_not_found"}
except Exception as e:
faiss_stats = {"error": str(e)}
try:
from core.embedding.embedding_cache import EmbeddingCache
cache = EmbeddingCache()
embedding_stats = cache.get_stats()
except Exception:
pass
return jsonify({
'faiss': faiss_stats,
'embedding_cache': embedding_stats,
'timestamp': datetime.now().isoformat()
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/system/faiss/test', methods=['POST'])
def test_faiss_index():
"""Teste l'index FAISS avec une recherche aléatoire."""
try:
import numpy as np
import time
from core.embedding.faiss_manager import FAISSManager
faiss_index_path = DATA_PATH / "faiss_index" / "main.index"
faiss_metadata_path = DATA_PATH / "faiss_index" / "main.metadata"
if not faiss_index_path.exists() or not faiss_metadata_path.exists():
return jsonify({
'success': False,
'error': 'Index FAISS non trouvé',
'recommendation': 'Traitez des sessions pour créer l\'index'
})
# Charger l'index
start_load = time.time()
fm = FAISSManager.load(faiss_index_path, faiss_metadata_path)
load_time = (time.time() - start_load) * 1000
stats = fm.get_stats()
total_vectors = stats.get('total_vectors', 0)
if total_vectors == 0:
return jsonify({
'success': False,
'error': 'Index vide (0 vecteurs)',
'recommendation': 'Traitez des sessions pour ajouter des vecteurs'
})
# Créer un vecteur de test aléatoire
dimensions = stats.get('dimensions', 512)
test_vector = np.random.rand(dimensions).astype(np.float32)
test_vector = test_vector / np.linalg.norm(test_vector) # Normaliser
# Rechercher les plus proches voisins
start_search = time.time()
results = fm.search_similar(test_vector, k=min(5, total_vectors))
search_time = (time.time() - start_search) * 1000
# Analyser les résultats (SearchResult: embedding_id, similarity, distance, metadata)
result_details = []
for i, sr in enumerate(results):
meta = sr.metadata or {}
result_details.append({
'rank': i + 1,
'score': round(sr.similarity, 4),
'target_id': meta.get('target_id', sr.embedding_id[:20] if sr.embedding_id else 'N/A'),
'session_id': meta.get('session_id', '')[:20] if meta.get('session_id') else 'N/A'
})
# Recommandations basées sur les stats
recommendations = []
if total_vectors > 10000 and stats.get('index_type') == 'Flat':
recommendations.append('Passez à un index IVF pour de meilleures performances')
if not stats.get('use_gpu') and total_vectors > 50000:
recommendations.append('Activez le GPU pour accélérer les recherches')
if search_time > 100:
recommendations.append('Temps de recherche élevé - optimisez l\'index')
return jsonify({
'success': True,
'load_time_ms': round(load_time, 2),
'search_time_ms': round(search_time, 2),
'total_vectors': total_vectors,
'results_count': len(results),
'results': result_details,
'index_healthy': search_time < 100 and len(results) > 0,
'recommendations': recommendations,
'timestamp': datetime.now().isoformat()
})
except Exception as e:
return jsonify({
'success': False,
'error': str(e),
'recommendation': 'Vérifiez les logs pour plus de détails'
}), 500
# =============================================================================
# API Sessions
# =============================================================================
@app.route('/api/agent/sessions')
def list_sessions():
"""Liste toutes les sessions agent."""
try:
sessions = []
hide_empty = request.args.get('hide_empty', 'true').lower() == 'true'
if not SESSIONS_PATH.exists():
return jsonify({'sessions': [], 'total': 0, 'hidden_empty': 0})
for session_dir in SESSIONS_PATH.iterdir():
if not session_dir.is_dir():
continue
# Chercher les fichiers JSON dans le répertoire et ses sous-répertoires
json_files = list(session_dir.glob('*.json')) + list(session_dir.glob('*/*.json'))
if not json_files:
continue
# Traiter chaque fichier JSON comme une session séparée
for json_path in json_files:
try:
# Essayer de charger avec RawSession, sinon charger comme JSON brut
try:
session = RawSession.load_from_file(json_path)
session_id = session.session_id
started_at = session.started_at
ended_at = session.ended_at
events_count = len(session.events)
user = session.user
context = session.context
except Exception:
# Fallback: charger comme JSON brut
with open(json_path, 'r') as f:
data = json.load(f)
session_id = data.get('session_id', json_path.stem)
started_at = datetime.fromisoformat(data.get('started_at', '2025-01-01T00:00:00'))
ended_at = datetime.fromisoformat(data.get('ended_at')) if data.get('ended_at') else None
events_count = len(data.get('events', []))
user = data.get('user', 'unknown')
context = data.get('context', {})
# Calculer la taille du fichier JSON
size_bytes = json_path.stat().st_size
# Chercher les screenshots dans différents emplacements possibles
# Utiliser json_path.parent car les shots sont au même niveau que le JSON
json_parent = json_path.parent
screenshots_dir = json_parent / "screenshots" # Structure standard
shots_dir = json_parent / "shots" # Structure agent_v0
screenshot_files = []
if screenshots_dir.exists():
screenshot_files.extend(list(screenshots_dir.glob('*.png')))
if shots_dir.exists():
screenshot_files.extend(list(shots_dir.glob('*.png')))
# Ajouter la taille des screenshots
for img_file in screenshot_files:
size_bytes += img_file.stat().st_size
size_mb = round(size_bytes / (1024 * 1024), 2)
# Récupérer le statut de traitement
processing_info = get_session_processing_status(session_id)
sessions.append({
'session_id': session_id,
'started_at': started_at.isoformat(),
'ended_at': ended_at.isoformat() if ended_at else None,
'events_count': events_count,
'screenshots_count': len(screenshot_files),
'user': user,
'context': context,
'size_mb': size_mb,
'path': str(json_path.parent),
'json_path': str(json_path),
'processing_state': processing_info.get('state', 'pending'),
'processed_at': processing_info.get('processed_at'),
'processing_stats': processing_info.get('stats')
})
except Exception as e:
print(f"Erreur lecture session {json_path.name}: {e}")
continue
sessions.sort(key=lambda x: x['started_at'], reverse=True)
# Filtrer les sessions vides si demandé
if hide_empty:
all_sessions = sessions
sessions = [s for s in sessions if s['screenshots_count'] > 0]
hidden_empty = len(all_sessions) - len(sessions)
else:
hidden_empty = len([s for s in sessions if s['screenshots_count'] == 0])
return jsonify({
'sessions': sessions,
'total': len(sessions),
'hidden_empty': hidden_empty
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/agent/sessions/cleanup-empty', methods=['POST'])
def cleanup_empty_sessions():
"""Supprime les sessions sans screenshots."""
try:
deleted = []
errors = []
if not SESSIONS_PATH.exists():
return jsonify({'deleted': 0, 'errors': []})
for session_dir in SESSIONS_PATH.iterdir():
if not session_dir.is_dir():
continue
# Compter les screenshots
screenshots = list(session_dir.glob('**/*.png'))
if len(screenshots) == 0:
# Vérifier qu'il y a au moins un JSON (c'est bien une session)
json_files = list(session_dir.glob('**/*.json'))
if json_files:
try:
import shutil
shutil.rmtree(session_dir)
deleted.append(session_dir.name)
except Exception as e:
errors.append({'path': str(session_dir), 'error': str(e)})
return jsonify({
'success': True,
'deleted_count': len(deleted),
'deleted': deleted,
'errors': errors
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/agent/sessions/<session_id>')
def get_session(session_id):
"""Détails d'une session."""
try:
# Chercher la session dans tous les répertoires
# Collecter tous les matches et préférer celui avec screenshots
candidates = []
for dir_path in SESSIONS_PATH.iterdir():
if not dir_path.is_dir():
continue
# Chercher dans le répertoire et ses sous-répertoires
json_files = list(dir_path.glob('*.json')) + list(dir_path.glob('*/*.json'))
for json_path in json_files:
try:
session = RawSession.load_from_file(json_path)
if session.session_id == session_id:
session_dir = json_path.parent
has_shots = (session_dir / "shots").exists() or (session_dir / "screenshots").exists()
candidates.append((json_path, session_dir, has_shots))
except Exception:
continue
if not candidates:
return jsonify({'error': 'Session non trouvée'}), 404
# Préférer la version avec screenshots
candidates.sort(key=lambda x: x[2], reverse=True)
session_file, session_dir, _ = candidates[0]
session = RawSession.load_from_file(session_file)
# Lister les screenshots avec URLs
screenshots = []
# Chercher dans différents emplacements possibles
screenshots_dir = session_dir / "screenshots"
shots_dir = session_dir / "shots"
for img_dir in [screenshots_dir, shots_dir]:
if img_dir.exists():
for img_file in sorted(img_dir.glob('*.png')):
screenshots.append({
'filename': img_file.name,
'url': f'/api/agent/sessions/{session_id}/screenshot/{img_file.name}',
'size': img_file.stat().st_size
})
return jsonify({
'session': {
'session_id': session.session_id,
'started_at': session.started_at.isoformat(),
'ended_at': session.ended_at.isoformat() if session.ended_at else None,
'user': session.user,
'context': session.context,
'environment': session.environment,
'events': [
{
'type': e.type,
'timestamp': e.t if hasattr(e, 't') else 0,
'window': e.window,
'screenshot_id': e.screenshot_id
}
for e in session.events
]
},
'screenshots': screenshots,
'screenshots_count': len(screenshots)
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/agent/sessions/<session_id>/screenshot/<filename>')
def get_screenshot(session_id, filename):
"""Récupère un screenshot.
Par défaut, si une version floutée (PII) `<stem>_blurred.png` existe à côté
du fichier demandé, elle est servie à la place (affichage conforme RGPD).
Pour obtenir la version brute, passer `?raw=1` (réservé aux endpoints
d'entraînement/grounding, à protéger par auth).
"""
try:
want_raw = request.args.get('raw', '0') in ('1', 'true', 'yes')
# Chercher le screenshot dans tous les répertoires
screenshot_path = None
for dir_path in SESSIONS_PATH.iterdir():
if not dir_path.is_dir():
continue
# Chercher dans différents emplacements possibles
possible_paths = [
dir_path / "screenshots" / filename,
dir_path / "shots" / filename,
]
# Chercher aussi dans les sous-répertoires
for subdir in dir_path.iterdir():
if subdir.is_dir():
possible_paths.extend([
subdir / "screenshots" / filename,
subdir / "shots" / filename,
])
for path in possible_paths:
if path.exists():
screenshot_path = path
break
if screenshot_path:
break
if not screenshot_path:
return jsonify({'error': 'Screenshot non trouvé'}), 404
# Préférer la version floutée si dispo et si l'appelant ne demande pas le brut
if not want_raw and "_blurred" not in screenshot_path.stem:
blurred_candidate = screenshot_path.with_name(
f"{screenshot_path.stem}_blurred{screenshot_path.suffix}"
)
if blurred_candidate.is_file():
screenshot_path = blurred_candidate
return send_file(screenshot_path, mimetype='image/png')
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/agent/sessions/<session_id>/process', methods=['POST'])
def process_session(session_id):
"""Lance le traitement d'une session."""
try:
session_dir = SESSIONS_PATH / session_id
if not session_dir.exists():
return jsonify({'error': 'Session non trouvée'}), 404
# Marquer comme "en cours"
set_session_processing_status(session_id, "processing")
from server.processing_pipeline import process_session_async
def process_in_background():
try:
stats = process_session_async(session_id, str(DATA_PATH))
# Marquer comme "terminé" avec les stats
set_session_processing_status(session_id, "completed", stats=stats)
socketio.emit('processing_complete', {
'session_id': session_id,
'stats': stats
})
except Exception as e:
# Marquer comme "échoué"
set_session_processing_status(session_id, "failed", error=e)
socketio.emit('processing_error', {
'session_id': session_id,
'error': str(e)
})
thread = threading.Thread(target=process_in_background, daemon=True)
thread.start()
return jsonify({'status': 'started', 'session_id': session_id})
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/agent/sessions/<session_id>/rename', methods=['PATCH', 'POST'])
def rename_session_workflow(session_id):
"""Renomme le workflow (training_label) d'une session."""
try:
data = request.get_json()
if not data or 'name' not in data:
return jsonify({'error': 'Le champ "name" est requis'}), 400
new_name = data['name'].strip()
if not new_name:
return jsonify({'error': 'Le nom ne peut pas être vide'}), 400
# Sanitizer le nom
import re
new_name = re.sub(r'[<>:"/\\|?*]', '_', new_name)
new_name = re.sub(r'_+', '_', new_name).strip('_')
if len(new_name) > 60:
new_name = new_name[:57] + "..."
# Trouver le fichier JSON de la session
session_file = None
for dir_path in SESSIONS_PATH.iterdir():
if not dir_path.is_dir():
continue
json_files = list(dir_path.glob('*.json')) + list(dir_path.glob('*/*.json'))
for json_path in json_files:
try:
session = RawSession.load_from_file(json_path)
if session.session_id == session_id:
session_file = json_path
break
except Exception:
continue
if session_file:
break
if not session_file:
return jsonify({'error': 'Session non trouvée'}), 404
# Charger et mettre à jour la session
session = RawSession.load_from_file(session_file)
old_name = session.context.get('training_label', '')
session.context['training_label'] = new_name
# Sauvegarder
with open(session_file, 'w', encoding='utf-8') as f:
json.dump(session.to_json(), f, ensure_ascii=False, indent=2)
return jsonify({
'success': True,
'session_id': session_id,
'old_name': old_name,
'new_name': new_name
})
except Exception as e:
return jsonify({'error': str(e)}), 500
# =============================================================================
# API Workflows
# =============================================================================
@app.route('/api/workflows')
def list_workflows():
"""Liste tous les workflows."""
try:
workflows = []
hide_unnamed = request.args.get('hide_unnamed', 'true').lower() == 'true'
if not WORKFLOWS_PATH.exists():
WORKFLOWS_PATH.mkdir(parents=True, exist_ok=True)
return jsonify({'workflows': [], 'total': 0, 'hidden_unnamed': 0})
for wf_file in WORKFLOWS_PATH.glob('*.json'):
try:
with open(wf_file, 'r') as f:
wf_data = json.load(f)
workflows.append({
'workflow_id': wf_data.get('workflow_id', wf_file.stem),
'name': wf_data.get('name', wf_file.stem),
'description': wf_data.get('description', ''),
'nodes_count': len(wf_data.get('nodes', [])),
'edges_count': len(wf_data.get('edges', [])),
'learning_state': wf_data.get('learning_state', 'OBSERVATION'),
'created_at': wf_data.get('created_at', ''),
'updated_at': wf_data.get('updated_at', ''),
'execution_count': wf_data.get('execution_count', 0),
'file_path': str(wf_file)
})
except Exception as e:
print(f"Erreur lecture workflow {wf_file}: {e}")
# Filtrer les workflows "Unnamed" si demandé
if hide_unnamed:
all_workflows = workflows
workflows = [w for w in workflows if w['name'] != 'Unnamed Workflow']
hidden_unnamed = len(all_workflows) - len(workflows)
else:
hidden_unnamed = len([w for w in workflows if w['name'] == 'Unnamed Workflow'])
return jsonify({
'workflows': workflows,
'total': len(workflows),
'hidden_unnamed': hidden_unnamed
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/workflows/cleanup-unnamed', methods=['POST'])
def cleanup_unnamed_workflows():
"""Supprime les workflows sans nom (Unnamed Workflow)."""
try:
deleted = []
errors = []
if not WORKFLOWS_PATH.exists():
return jsonify({'deleted': 0, 'errors': []})
for wf_file in WORKFLOWS_PATH.glob('*.json'):
try:
with open(wf_file, 'r') as f:
wf_data = json.load(f)
if wf_data.get('name') == 'Unnamed Workflow':
wf_file.unlink()
deleted.append(wf_file.name)
except Exception as e:
errors.append({'file': str(wf_file), 'error': str(e)})
return jsonify({
'success': True,
'deleted_count': len(deleted),
'deleted': deleted,
'errors': errors
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/workflows/<workflow_id>')
def get_workflow(workflow_id):
"""Détails d'un workflow."""
try:
wf_file = WORKFLOWS_PATH / f"{workflow_id}.json"
if not wf_file.exists():
return jsonify({'error': 'Workflow non trouvé'}), 404
with open(wf_file, 'r') as f:
wf_data = json.load(f)
return jsonify({'workflow': wf_data})
except Exception as e:
return jsonify({'error': str(e)}), 500
# Routes execute_workflow / stop_workflow retirées
# (onglet Exécution legacy supprimé — le replay passe par Agent V1 + VWB)
# Routes API Tests retirées (RCE potentielle via subprocess pytest)
# =============================================================================
# API Chains
# =============================================================================
@app.route('/api/chains')
def api_chains():
"""Liste toutes les chaînes de workflows."""
try:
chains = chain_manager.list_chains()
return jsonify({
'chains': [
{
'chain_id': chain.chain_id,
'name': chain.name,
'workflows': chain.workflows,
'status': chain.status,
'created_at': chain.created_at.isoformat(),
'last_execution': chain.last_execution.isoformat() if chain.last_execution else None,
'success_rate': chain.success_rate
}
for chain in chains
],
'total': len(chains)
})
except Exception as e:
api_logger.error(f"Error listing chains: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/chains', methods=['POST'])
def create_chain():
"""Crée une nouvelle chaîne."""
try:
data = request.get_json()
name = data.get('name')
workflows = data.get('workflows', [])
if not name or not workflows:
return jsonify({'error': 'name and workflows required'}), 400
chain = chain_manager.create_chain(name, workflows)
api_logger.info(f"Created chain {chain.chain_id}")
return jsonify({
'chain': {
'chain_id': chain.chain_id,
'name': chain.name,
'workflows': chain.workflows,
'status': chain.status
}
}), 201
except ValueError as e:
return jsonify({'error': str(e)}), 400
except Exception as e:
api_logger.error(f"Error creating chain: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/chains/<chain_id>/execute', methods=['POST'])
def execute_chain_route(chain_id):
"""Lance l'exécution d'une chaîne."""
try:
def on_progress(workflow_id, current, total):
socketio.emit('chain_progress', {
'chain_id': chain_id,
'workflow_id': workflow_id,
'current': current,
'total': total
})
result = chain_manager.execute_chain(chain_id, on_progress=on_progress)
socketio.emit('chain_complete', {
'chain_id': chain_id,
'success': result.success,
'duration': result.duration
})
return jsonify({
'result': {
'chain_id': result.chain_id,
'success': result.success,
'duration': result.duration,
'workflows_executed': result.workflows_executed,
'failed_at': result.failed_at,
'error_message': result.error_message
}
})
except ValueError as e:
return jsonify({'error': str(e)}), 404
except Exception as e:
api_logger.error(f"Error executing chain {chain_id}: {e}")
return jsonify({'error': str(e)}), 500
# =============================================================================
# API Triggers
# =============================================================================
@app.route('/api/triggers')
def api_triggers():
"""Liste tous les triggers."""
try:
triggers = trigger_manager.list_triggers()
return jsonify({
'triggers': [
{
'trigger_id': trigger.trigger_id,
'trigger_type': trigger.trigger_type,
'workflow_id': trigger.workflow_id,
'config': trigger.config,
'enabled': trigger.enabled,
'created_at': trigger.created_at.isoformat(),
'last_fired': trigger.last_fired.isoformat() if trigger.last_fired else None,
'fire_count': trigger.fire_count
}
for trigger in triggers
],
'total': len(triggers)
})
except Exception as e:
api_logger.error(f"Error listing triggers: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/triggers', methods=['POST'])
def create_trigger():
"""Crée un nouveau trigger."""
try:
data = request.get_json()
trigger_type = data.get('trigger_type')
workflow_id = data.get('workflow_id')
config = data.get('config', {})
if not trigger_type or not workflow_id:
return jsonify({'error': 'trigger_type and workflow_id required'}), 400
trigger = trigger_manager.create_trigger(trigger_type, workflow_id, config)
api_logger.info(f"Created trigger {trigger.trigger_id}")
return jsonify({
'trigger': {
'trigger_id': trigger.trigger_id,
'trigger_type': trigger.trigger_type,
'workflow_id': trigger.workflow_id,
'config': trigger.config,
'enabled': trigger.enabled
}
}), 201
except ValueError as e:
return jsonify({'error': str(e)}), 400
except Exception as e:
api_logger.error(f"Error creating trigger: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/triggers/<trigger_id>/toggle', methods=['POST'])
def toggle_trigger(trigger_id):
"""Active/désactive un trigger."""
try:
trigger = trigger_manager.get_trigger(trigger_id)
if not trigger:
return jsonify({'error': 'Trigger not found'}), 404
if trigger.enabled:
trigger_manager.disable_trigger(trigger_id)
api_logger.info(f"Disabled trigger {trigger_id}")
else:
trigger_manager.enable_trigger(trigger_id)
api_logger.info(f"Enabled trigger {trigger_id}")
trigger = trigger_manager.get_trigger(trigger_id)
return jsonify({
'trigger': {
'trigger_id': trigger.trigger_id,
'enabled': trigger.enabled
}
})
except Exception as e:
api_logger.error(f"Error toggling trigger {trigger_id}: {e}")
return jsonify({'error': str(e)}), 500
# =============================================================================
# API Logs
# =============================================================================
@app.route('/api/logs')
def get_logs():
"""Récupère les logs récents."""
try:
logs = []
if not LOGS_PATH.exists():
return jsonify({'logs': [], 'total': 0})
for log_file in LOGS_PATH.glob('*.log'):
try:
with open(log_file, 'r') as f:
lines = f.readlines()
for line in lines[-100:]:
logs.append({
'file': log_file.name,
'message': line.strip()
})
except Exception as e:
print(f"Erreur lecture log {log_file}: {e}")
return jsonify({
'logs': logs[-200:],
'total': len(logs)
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/logs/download')
def download_logs():
"""Télécharge les logs en format ZIP."""
try:
# Récupérer les paramètres de date optionnels
start_time = request.args.get('start_time')
end_time = request.args.get('end_time')
start_dt = datetime.fromisoformat(start_time) if start_time else None
end_dt = datetime.fromisoformat(end_time) if end_time else None
# Générer le ZIP
zip_file = log_exporter.export_to_zip(start_dt, end_dt)
api_logger.info(f"Logs downloaded (start={start_time}, end={end_time})")
return send_file(
zip_file,
mimetype='application/zip',
as_attachment=True,
download_name=f'rpa_logs_{datetime.now().strftime("%Y%m%d_%H%M%S")}.zip'
)
except Exception as e:
api_logger.error(f"Error downloading logs: {e}")
return jsonify({'error': str(e)}), 500
# Routes API Automation retirées (onglet Exécution legacy supprimé)
# =============================================================================
# API Backup & Export - Sauvegardes client
# =============================================================================
@app.route('/api/backup/stats')
def backup_stats():
"""Statistiques des données disponibles pour backup."""
try:
exporter = get_backup_exporter()
stats = exporter.get_backup_stats()
return jsonify({
'stats': stats,
'timestamp': datetime.now().isoformat()
})
except Exception as e:
api_logger.error(f"Error getting backup stats: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/backup/full', methods=['POST'])
def backup_full():
"""Génère et télécharge un backup complet."""
try:
data = request.get_json() or {}
include_models = data.get('include_models', False)
exporter = get_backup_exporter()
zip_path = exporter.export_full_backup(include_models=include_models)
api_logger.info(f"Full backup created (include_models={include_models})")
return send_file(
zip_path,
mimetype='application/zip',
as_attachment=True,
download_name=f'rpa_vision_backup_{datetime.now().strftime("%Y%m%d_%H%M%S")}.zip'
)
except Exception as e:
api_logger.error(f"Error creating full backup: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/backup/workflows')
def backup_workflows():
"""Génère et télécharge un backup des workflows uniquement."""
try:
exporter = get_backup_exporter()
zip_path = exporter.export_workflows()
api_logger.info("Workflows backup created")
return send_file(
zip_path,
mimetype='application/zip',
as_attachment=True,
download_name=f'rpa_workflows_{datetime.now().strftime("%Y%m%d_%H%M%S")}.zip'
)
except Exception as e:
api_logger.error(f"Error creating workflows backup: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/backup/correction-packs')
def backup_correction_packs():
"""Génère et télécharge un backup des correction packs."""
try:
exporter = get_backup_exporter()
zip_path = exporter.export_correction_packs()
api_logger.info("Correction packs backup created")
return send_file(
zip_path,
mimetype='application/zip',
as_attachment=True,
download_name=f'rpa_correction_packs_{datetime.now().strftime("%Y%m%d_%H%M%S")}.zip'
)
except Exception as e:
api_logger.error(f"Error creating correction packs backup: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/backup/trained-models')
def backup_trained_models():
"""
Génère et télécharge les modèles entraînés (opt-in).
Ces modèles sont anonymisés et contiennent uniquement les patterns
appris, pas les données brutes ou les screenshots.
"""
try:
exporter = get_backup_exporter()
zip_path = exporter.export_trained_models()
api_logger.info("Trained models backup created (opt-in)")
return send_file(
zip_path,
mimetype='application/zip',
as_attachment=True,
download_name=f'rpa_trained_models_{datetime.now().strftime("%Y%m%d_%H%M%S")}.zip'
)
except Exception as e:
api_logger.error(f"Error creating trained models backup: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/backup/config')
def backup_config():
"""Génère et télécharge la configuration (sanitisée)."""
try:
exporter = get_backup_exporter()
zip_path = exporter.export_config()
api_logger.info("Config backup created (sanitized)")
return send_file(
zip_path,
mimetype='application/zip',
as_attachment=True,
download_name=f'rpa_config_{datetime.now().strftime("%Y%m%d_%H%M%S")}.zip'
)
except Exception as e:
api_logger.error(f"Error creating config backup: {e}")
return jsonify({'error': str(e)}), 500
# =============================================================================
# API Version & Updates - Gestion des versions
# =============================================================================
@app.route('/api/version')
def get_version():
"""Retourne la version actuelle du système."""
try:
vm = get_version_manager()
version_info = vm.get_current_version()
return jsonify({
'version': version_info.to_dict(),
'timestamp': datetime.now().isoformat()
})
except Exception as e:
api_logger.error(f"Error getting version: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/version/system-info')
def get_system_info():
"""Retourne les informations système complètes."""
try:
vm = get_version_manager()
system_info = vm.get_system_info()
return jsonify({
'system_info': system_info,
'timestamp': datetime.now().isoformat()
})
except Exception as e:
api_logger.error(f"Error getting system info: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/version/check-update')
def check_update():
"""Vérifie si une mise à jour est disponible."""
try:
vm = get_version_manager()
update = vm.check_for_updates()
if update:
return jsonify({
'update_available': True,
'update': update.to_dict(),
'timestamp': datetime.now().isoformat()
})
else:
return jsonify({
'update_available': False,
'message': 'Système à jour',
'timestamp': datetime.now().isoformat()
})
except Exception as e:
api_logger.error(f"Error checking update: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/version/backups')
def list_version_backups():
"""Liste les backups de version disponibles pour rollback."""
try:
vm = get_version_manager()
backups = vm.list_backups()
return jsonify({
'backups': backups,
'total': len(backups),
'timestamp': datetime.now().isoformat()
})
except Exception as e:
api_logger.error(f"Error listing version backups: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/version/create-backup', methods=['POST'])
def create_version_backup():
"""Crée un backup de la version actuelle (point de restauration)."""
try:
data = request.get_json() or {}
label = data.get('label')
vm = get_version_manager()
backup_path = vm.create_backup(label=label)
api_logger.info(f"Version backup created: {backup_path}")
return jsonify({
'success': True,
'backup_path': str(backup_path),
'label': label,
'timestamp': datetime.now().isoformat()
})
except Exception as e:
api_logger.error(f"Error creating version backup: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/api/version/upload-update', methods=['POST'])
def upload_update_package():
"""
Upload d'un package de mise à jour.
Le package doit être un fichier ZIP contenant:
- update_manifest.json
- Les fichiers de mise à jour
"""
try:
if 'file' not in request.files:
return jsonify({'error': 'Aucun fichier fourni'}), 400
file = request.files['file']
if file.filename == '':
return jsonify({'error': 'Nom de fichier vide'}), 400
if not file.filename.endswith('.zip'):
return jsonify({'error': 'Le fichier doit être un ZIP'}), 400
# Sauvegarder dans le répertoire updates
vm = get_version_manager()
update_path = vm.updates_dir / file.filename
file.save(update_path)
# Extraire et valider le manifest
import zipfile
with zipfile.ZipFile(update_path, 'r') as zf:
if 'update_manifest.json' not in zf.namelist():
update_path.unlink()
return jsonify({'error': 'Package invalide: manifest manquant'}), 400
# Extraire le manifest
zf.extract('update_manifest.json', vm.updates_dir)
with open(vm.updates_dir / 'update_manifest.json', 'r') as f:
manifest = json.load(f)
api_logger.info(f"Update package uploaded: {file.filename} (version {manifest.get('version')})")
return jsonify({
'success': True,
'filename': file.filename,
'manifest': manifest,
'message': 'Package uploadé. Utilisez /api/version/check-update pour vérifier.',
'timestamp': datetime.now().isoformat()
})
except Exception as e:
api_logger.error(f"Error uploading update package: {e}")
return jsonify({'error': str(e)}), 500
# =============================================================================
# API Metrics (Prometheus)
# =============================================================================
@app.route('/metrics')
def prometheus_metrics():
"""Endpoint Prometheus pour les métriques."""
try:
return Response(generate_latest(), mimetype=CONTENT_TYPE_LATEST)
except Exception as e:
api_logger.error(f"Error generating metrics: {e}")
return Response(f"# Error: {e}\n", mimetype='text/plain'), 500
# =============================================================================
# WebSocket Events
# =============================================================================
@socketio.on('connect')
def handle_connect():
"""Client connecté."""
print(f"Client connecté: {request.sid}")
emit('connected', {'status': 'ok'})
@socketio.on('disconnect')
def handle_disconnect():
"""Client déconnecté."""
print(f"Client déconnecté: {request.sid}")
# SocketIO subscribe_execution, get_performance, broadcast_metrics retirés
# (onglet Exécution legacy supprimé)
# =============================================================================
# API Service Manager - Panneau de contrôle pour démos
# =============================================================================
# Configuration des services RPA Vision V3
SERVICES_CONFIG = {
"api_server": {
"name": "API Server",
"description": "API principale RPA Vision V3 (upload, sessions)",
"port": 8000,
"start_cmd": "cd {base} && {base}/.venv/bin/python server/api_upload.py",
"url": "http://localhost:8000",
"icon": "🚀"
},
"monitoring": {
"name": "Monitoring",
"description": "Métriques et surveillance système",
"port": 5003,
"start_cmd": "cd {base} && {base}/.venv/bin/python monitoring_server.py",
"url": "http://localhost:5003",
"icon": "📈"
},
"vwb_backend": {
"name": "VWB Backend",
"description": "API Visual Workflow Builder",
"port": 5002,
"start_cmd": "cd {base}/visual_workflow_builder/backend && {base}/.venv/bin/python app.py",
"url": "http://localhost:5002",
"icon": "⚙️"
},
"vwb_frontend": {
"name": "VWB Frontend",
"description": "Interface React du Workflow Builder (V4)",
"port": 3002,
"start_cmd": "cd {base}/visual_workflow_builder/frontend_v4 && npm run dev -- --port 3002 --host 0.0.0.0",
"url": "http://localhost:3002",
"icon": "🎨"
},
"agent_chat": {
"name": "Agent Chat (LLM)",
"description": "Interface conversationnelle avec Ollama",
"port": 5004,
"start_cmd": "cd {base} && ./.venv/bin/python -m agent_chat.app",
"url": "http://localhost:5004",
"icon": "🤖"
},
"streaming": {
"name": "Streaming Server",
"description": "Serveur de capture et streaming temps réel",
"port": 5005,
"start_cmd": "cd {base} && ./.venv/bin/python -m agent_v0.server_v1.api_stream",
"url": "http://localhost:5005",
"icon": "📡"
},
"session_cleaner": {
"name": "Session Cleaner",
"description": "Nettoyage de sessions avant replay (dépend du Streaming Server)",
"port": 5006,
"start_cmd": "cd {base} && {base}/.venv/bin/python3 tools/session_cleaner.py",
"url": "http://localhost:5006",
"icon": "🧹"
},
"web_dashboard": {
"name": "Dashboard (ce service)",
"description": "Panneau de contrôle RPA Vision V3",
"port": 5001,
"start_cmd": None, # Déjà en cours
"url": "http://localhost:5001",
"icon": "📊"
}
}
# Processus en cours (PID tracking)
_running_processes = {}
def _check_port(port: int) -> bool:
"""Vérifie si un port est en écoute."""
import socket
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.settimeout(1)
try:
s.connect(('127.0.0.1', port))
return True
except (socket.timeout, ConnectionRefusedError):
return False
def _get_service_status(service_id: str) -> dict:
"""Retourne le statut détaillé d'un service."""
config = SERVICES_CONFIG.get(service_id)
if not config:
return {"error": "Service inconnu"}
port = config["port"]
is_running = _check_port(port)
pid = _running_processes.get(service_id)
return {
"service_id": service_id,
"name": config["name"],
"description": config["description"],
"icon": config["icon"],
"port": port,
"url": config["url"],
"status": "running" if is_running else "stopped",
"pid": pid,
"can_start": config["start_cmd"] is not None,
"can_stop": service_id != "web_dashboard"
}
@app.route('/api/services')
def api_services():
"""Liste tous les services avec leur statut."""
services = []
for service_id in SERVICES_CONFIG:
services.append(_get_service_status(service_id))
return jsonify({
"services": services,
"timestamp": datetime.now().isoformat()
})
@app.route('/api/services/<service_id>/status')
def api_service_status(service_id):
"""Statut d'un service spécifique."""
status = _get_service_status(service_id)
if "error" in status:
return jsonify(status), 404
return jsonify(status)
@app.route('/api/services/<service_id>/start', methods=['POST'])
def api_service_start(service_id):
"""Démarre un service."""
config = SERVICES_CONFIG.get(service_id)
if not config:
return jsonify({"error": "Service inconnu"}), 404
if not config["start_cmd"]:
return jsonify({"error": "Ce service ne peut pas être démarré via l'API"}), 400
# Vérifier si déjà en cours
if _check_port(config["port"]):
return jsonify({"status": "already_running", "message": f"{config['name']} est déjà en cours"})
try:
# Préparer la commande
cmd = config["start_cmd"].format(base=str(BASE_PATH))
# Lancer en arrière-plan
process = subprocess.Popen(
cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
start_new_session=True
)
_running_processes[service_id] = process.pid
# Attendre un peu que le service démarre
time.sleep(2)
# Vérifier si le service a démarré
is_running = _check_port(config["port"])
api_logger.info(f"Service {service_id} started (PID: {process.pid}, running: {is_running})")
return jsonify({
"status": "started" if is_running else "starting",
"pid": process.pid,
"message": f"{config['name']} démarré" if is_running else "Démarrage en cours...",
"url": config["url"]
})
except Exception as e:
api_logger.error(f"Error starting service {service_id}: {e}")
return jsonify({"error": str(e)}), 500
@app.route('/api/services/<service_id>/stop', methods=['POST'])
def api_service_stop(service_id):
"""Arrête un service."""
config = SERVICES_CONFIG.get(service_id)
if not config:
return jsonify({"error": "Service inconnu"}), 404
if service_id == "web_dashboard":
return jsonify({"error": "Impossible d'arrêter le dashboard depuis lui-même"}), 400
try:
port = config["port"]
# Trouver et tuer le processus sur ce port
result = subprocess.run(
f"lsof -ti :{port} | xargs -r kill -TERM",
shell=True,
capture_output=True,
text=True
)
# Nettoyer le tracking
if service_id in _running_processes:
del _running_processes[service_id]
# Attendre que le port se libère
time.sleep(1)
is_stopped = not _check_port(port)
api_logger.info(f"Service {service_id} stopped (success: {is_stopped})")
return jsonify({
"status": "stopped" if is_stopped else "stopping",
"message": f"{config['name']} arrêté" if is_stopped else "Arrêt en cours..."
})
except Exception as e:
api_logger.error(f"Error stopping service {service_id}: {e}")
return jsonify({"error": str(e)}), 500
@app.route('/api/services/<service_id>/restart', methods=['POST'])
def api_service_restart(service_id):
"""Redémarre un service."""
# Arrêter
stop_response = api_service_stop(service_id)
if stop_response[1] if isinstance(stop_response, tuple) else 200 >= 400:
return stop_response
# Attendre
time.sleep(2)
# Redémarrer
return api_service_start(service_id)
@app.route('/api/services/start-all', methods=['POST'])
def api_services_start_all():
"""Démarre tous les services."""
results = {}
for service_id, config in SERVICES_CONFIG.items():
if config["start_cmd"]:
try:
if not _check_port(config["port"]):
cmd = config["start_cmd"].format(base=str(BASE_PATH))
process = subprocess.Popen(
cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
start_new_session=True
)
_running_processes[service_id] = process.pid
results[service_id] = {"status": "starting", "pid": process.pid}
else:
results[service_id] = {"status": "already_running"}
except Exception as e:
results[service_id] = {"status": "error", "error": str(e)}
# Attendre que les services démarrent
time.sleep(3)
# Vérifier les statuts
for service_id in results:
if results[service_id].get("status") == "starting":
port = SERVICES_CONFIG[service_id]["port"]
results[service_id]["status"] = "running" if _check_port(port) else "failed"
return jsonify({"results": results, "timestamp": datetime.now().isoformat()})
@app.route('/api/services/stop-all', methods=['POST'])
def api_services_stop_all():
"""Arrête tous les services (sauf le dashboard)."""
results = {}
for service_id, config in SERVICES_CONFIG.items():
if service_id != "web_dashboard" and config["start_cmd"]:
try:
port = config["port"]
subprocess.run(f"lsof -ti :{port} | xargs -r kill -TERM", shell=True)
results[service_id] = {"status": "stopped"}
except Exception as e:
results[service_id] = {"status": "error", "error": str(e)}
_running_processes.clear()
return jsonify({"results": results, "timestamp": datetime.now().isoformat()})
# =============================================================================
# API Configuration - Interface de configuration centralisee
# =============================================================================
CONFIG_FILE_PATH = BASE_PATH / "data" / "config" / "system_config.json"
def load_system_config():
"""Charge la configuration systeme depuis le fichier JSON."""
try:
if CONFIG_FILE_PATH.exists():
with open(CONFIG_FILE_PATH, 'r', encoding='utf-8') as f:
return json.load(f)
except Exception as e:
api_logger.error(f"Erreur chargement config: {e}")
# Config par defaut
return {
"version": "1.0.0",
"services": {},
"llm": {"provider": "ollama", "base_url": "http://localhost:11434", "model": "qwen2.5:7b"},
"vlm": {"provider": "ollama", "base_url": "http://localhost:11434", "model": "gemma4:e4b"},
"detection": {"owl_model": "google/owlv2-base-patch16-ensemble", "confidence_threshold": 0.3},
"database": {"type": "sqlite", "path": "data/training/workflows.db"},
"security": {"enable_encryption": True, "require_authentication": False}
}
def save_system_config(config):
"""Sauvegarde la configuration systeme dans le fichier JSON."""
try:
CONFIG_FILE_PATH.parent.mkdir(parents=True, exist_ok=True)
with open(CONFIG_FILE_PATH, 'w', encoding='utf-8') as f:
json.dump(config, f, indent=2, ensure_ascii=False)
return True
except Exception as e:
api_logger.error(f"Erreur sauvegarde config: {e}")
return False
@app.route('/api/config')
def get_config():
"""Retourne la configuration complete du systeme."""
try:
config = load_system_config()
return jsonify({
"success": True,
"config": config,
"timestamp": datetime.now().isoformat()
})
except Exception as e:
return jsonify({"success": False, "error": str(e)}), 500
@app.route('/api/config', methods=['POST'])
def update_config():
"""Met a jour la configuration complete du systeme."""
try:
new_config = request.get_json()
if not new_config:
return jsonify({"success": False, "error": "Configuration vide"}), 400
# Garder la version
current = load_system_config()
new_config['version'] = current.get('version', '1.0.0')
if save_system_config(new_config):
api_logger.info("Configuration systeme mise a jour")
return jsonify({
"success": True,
"message": "Configuration sauvegardee",
"timestamp": datetime.now().isoformat()
})
else:
return jsonify({"success": False, "error": "Erreur lors de la sauvegarde"}), 500
except Exception as e:
return jsonify({"success": False, "error": str(e)}), 500
@app.route('/api/config/<section>')
def get_config_section(section):
"""Retourne une section specifique de la configuration."""
try:
config = load_system_config()
if section not in config:
return jsonify({"success": False, "error": f"Section '{section}' introuvable"}), 404
return jsonify({
"success": True,
"section": section,
"data": config[section],
"timestamp": datetime.now().isoformat()
})
except Exception as e:
return jsonify({"success": False, "error": str(e)}), 500
@app.route('/api/config/<section>', methods=['PUT'])
def update_config_section(section):
"""Met a jour une section specifique de la configuration."""
try:
section_data = request.get_json()
if not section_data:
return jsonify({"success": False, "error": "Donnees vides"}), 400
config = load_system_config()
config[section] = section_data
if save_system_config(config):
api_logger.info(f"Section config '{section}' mise a jour")
return jsonify({
"success": True,
"message": f"Section '{section}' sauvegardee",
"timestamp": datetime.now().isoformat()
})
else:
return jsonify({"success": False, "error": "Erreur lors de la sauvegarde"}), 500
except Exception as e:
return jsonify({"success": False, "error": str(e)}), 500
@app.route('/api/config/test-connection', methods=['POST'])
def test_connection():
"""Teste la connexion a un service (LLM, base de donnees, etc.)."""
try:
data = request.get_json()
service_type = data.get('type')
if service_type == 'ollama':
# Test connexion Ollama
import urllib.request
url = data.get('base_url', 'http://localhost:11434') + '/api/tags'
try:
with urllib.request.urlopen(url, timeout=5) as response:
models = json.loads(response.read().decode())
return jsonify({
"success": True,
"message": "Connexion Ollama OK",
"models": [m.get('name') for m in models.get('models', [])]
})
except Exception as e:
return jsonify({"success": False, "error": f"Connexion Ollama echouee: {e}"})
elif service_type == 'database':
# Test connexion base de donnees
db_path = data.get('path') or 'data/training/workflows.db'
full_path = BASE_PATH / db_path
if full_path.exists():
return jsonify({"success": True, "message": f"Base de donnees accessible: {db_path}"})
else:
return jsonify({"success": False, "error": f"Fichier introuvable: {full_path}"})
elif service_type == 'service':
# Test connexion a un service HTTP
port = data.get('port')
host = data.get('host', 'localhost')
import socket
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.settimeout(2)
try:
s.connect((host, int(port)))
return jsonify({"success": True, "message": f"Service {host}:{port} accessible"})
except:
return jsonify({"success": False, "error": f"Service {host}:{port} inaccessible"})
return jsonify({"success": False, "error": "Type de test inconnu"})
except Exception as e:
return jsonify({"success": False, "error": str(e)}), 500
@app.route('/api/config/ollama-models')
def get_ollama_models():
"""Recupere la liste des modeles Ollama disponibles."""
try:
config = load_system_config()
base_url = config.get('llm', {}).get('base_url', 'http://localhost:11434')
import urllib.request
url = base_url + '/api/tags'
with urllib.request.urlopen(url, timeout=5) as response:
data = json.loads(response.read().decode())
models = [m.get('name') for m in data.get('models', [])]
return jsonify({
"success": True,
"models": models,
"timestamp": datetime.now().isoformat()
})
except Exception as e:
return jsonify({"success": False, "error": str(e), "models": []}), 500
@app.route('/api/config/export')
def export_config():
"""Exporte la configuration en JSON."""
try:
config = load_system_config()
return Response(
json.dumps(config, indent=2, ensure_ascii=False),
mimetype='application/json',
headers={'Content-Disposition': 'attachment;filename=rpa_config.json'}
)
except Exception as e:
return jsonify({"success": False, "error": str(e)}), 500
@app.route('/api/config/import', methods=['POST'])
def import_config():
"""Importe une configuration depuis un fichier JSON."""
try:
if 'file' in request.files:
file = request.files['file']
config = json.load(file)
else:
config = request.get_json()
if not config:
return jsonify({"success": False, "error": "Configuration vide"}), 400
if save_system_config(config):
return jsonify({
"success": True,
"message": "Configuration importee avec succes",
"timestamp": datetime.now().isoformat()
})
else:
return jsonify({"success": False, "error": "Erreur lors de l'import"}), 500
except json.JSONDecodeError:
return jsonify({"success": False, "error": "Format JSON invalide"}), 400
except Exception as e:
return jsonify({"success": False, "error": str(e)}), 500
# Pages auxiliaires retirées : /gestures, /streaming (page), /extractions, /chat
# (fonctionnalités mortes ou dupliquées avec des services dédiés)
# =============================================================================
# API Streaming - Proxy vers le serveur de streaming (port 5005)
# =============================================================================
STREAMING_BASE_URL = 'http://localhost:5005/api/v1/traces/stream'
@app.route('/api/streaming/<path:endpoint>')
def proxy_streaming(endpoint):
"""Proxy vers le serveur de streaming pour éviter les problèmes CORS."""
import urllib.request
import urllib.error
try:
url = f'{STREAMING_BASE_URL}/{endpoint}'
req = urllib.request.Request(url, headers={'Accept': 'application/json'})
with urllib.request.urlopen(req, timeout=5) as response:
data = json.loads(response.read().decode())
return jsonify(data)
except urllib.error.URLError as e:
return jsonify({'error': f'Serveur streaming inaccessible: {e}'}), 502
except Exception as e:
return jsonify({'error': str(e)}), 500
# =============================================================================
# Fleet Management — Proxy vers le serveur streaming (port 5005)
# =============================================================================
FLEET_BASE_URL = 'http://localhost:5005/api/v1/agents'
@app.route('/api/fleet/<path:endpoint>', methods=['GET', 'POST'])
def proxy_fleet(endpoint):
"""Proxy vers les endpoints fleet du serveur streaming (5005).
Endpoints disponibles :
- GET /api/fleet/fleet -> liste des agents (active + uninstalled)
- POST /api/fleet/enroll -> enregistrer un nouveau poste
- POST /api/fleet/uninstall -> révoquer un poste
"""
import urllib.request
import urllib.error
try:
url = f'{FLEET_BASE_URL}/{endpoint}'
headers = {
'Accept': 'application/json',
'Content-Type': 'application/json',
}
# Token Bearer (même mécanisme que le proxy streaming)
token = os.environ.get('RPA_API_TOKEN', '')
if token:
headers['Authorization'] = f'Bearer {token}'
data_bytes = None
if request.method == 'POST':
payload = request.get_json(silent=True) or {}
data_bytes = json.dumps(payload).encode('utf-8')
req = urllib.request.Request(
url, data=data_bytes, headers=headers, method=request.method,
)
with urllib.request.urlopen(req, timeout=10) as response:
body = response.read().decode('utf-8')
try:
return jsonify(json.loads(body)), response.status
except json.JSONDecodeError:
return body, response.status, {'Content-Type': 'application/json'}
except urllib.error.HTTPError as e:
try:
detail = json.loads(e.read().decode('utf-8'))
except Exception:
detail = {'error': str(e)}
return jsonify(detail), e.code
except urllib.error.URLError as e:
return jsonify({
'error': f'Serveur streaming (5005) inaccessible : {e}',
'hint': 'Vérifiez que le service streaming est démarré.',
}), 502
except Exception as e:
return jsonify({'error': str(e)}), 500
# =============================================================================
# Fleet — Téléchargement du ZIP installeur pré-configuré
# =============================================================================
# Chemin du ZIP template Léa
_LEA_ZIP_TEMPLATE = BASE_PATH / "deploy" / "Lea_v1.0.0.zip"
# URL publique du serveur (env ou fallback)
_RPA_PUBLIC_URL = os.getenv(
"RPA_PUBLIC_URL",
os.getenv("RPA_SERVER_URL", "https://lea.labs.laurinebazin.design"),
)
def _normalize_server_url(url: str) -> str:
"""S'assure que l'URL se termine par /api/v1."""
url = url.rstrip("/")
if not url.endswith("/api/v1"):
url += "/api/v1"
return url
def _extract_host(url: str) -> str:
"""Extrait le hostname sans schema/port/path."""
from urllib.parse import urlparse
parsed = urlparse(url)
return parsed.hostname or parsed.netloc.split(":")[0]
# Calculer une fois au démarrage pour le cas simple
_RPA_PUBLIC_HOST = _extract_host(_RPA_PUBLIC_URL)
def _fetch_fleet_agent(machine_id: str):
"""Récupère un agent depuis le serveur streaming (5005).
Returns:
dict de l'agent si trouvé et actif, None sinon.
"""
import urllib.request
import urllib.error
url = f'{FLEET_BASE_URL}/fleet'
headers = {'Accept': 'application/json'}
token = os.environ.get('RPA_API_TOKEN', '')
if token:
headers['Authorization'] = f'Bearer {token}'
req = urllib.request.Request(url, headers=headers, method='GET')
try:
with urllib.request.urlopen(req, timeout=10) as response:
data = json.loads(response.read().decode('utf-8'))
except Exception:
return None
# Chercher dans les actifs ET les révoqués
for agent in (data.get('active') or []):
if agent.get('machine_id') == machine_id:
return agent
return None
def _build_custom_config(machine_id: str, user_name: str, token: str) -> str:
"""Génère le contenu de config.txt personnalisé pour un agent.
L'URL est toujours normalisée pour se terminer par /api/v1.
Le host est extrait proprement via urlparse (sans schema/port/path).
"""
now = datetime.now().strftime("%Y-%m-%d %H:%M")
server_url = _normalize_server_url(_RPA_PUBLIC_URL)
return f"""\
# ============================================================
# Configuration Lea
# ============================================================
#
# Generee automatiquement depuis le dashboard Fleet.
# Agent : {machine_id}
# Collaborateur : {user_name or 'non renseigne'}
# Date : {now}
#
# ============================================================
# Adresse du serveur Lea (URL complete avec /api/v1)
RPA_SERVER_URL={server_url}
# Cle d'authentification (fournie par l'administrateur)
RPA_API_TOKEN={token}
# Identifiant unique de ce poste
RPA_MACHINE_ID={machine_id}
# Nom du collaborateur associe a ce poste
RPA_USER_LABEL={user_name or ''}
# --- Parametres avances (ne pas modifier sauf indication) ---
# Host Ollama (defaut localhost, ne pas modifier sauf configuration speciale)
# RPA_OLLAMA_HOST=localhost
# Flouter les zones de texte dans les captures cote CLIENT.
# Desactive par defaut — le blur est gere cote serveur.
RPA_BLUR_SENSITIVE=false
# Duree de conservation des logs en jours (minimum 180 pour conformite)
RPA_LOG_RETENTION_DAYS=180
"""
@app.route('/api/fleet/download/<machine_id>')
def download_agent_package(machine_id):
"""Génère et sert un ZIP Léa pré-configuré pour ce machine_id.
- Vérifie que le machine_id est enregistré et actif dans la fleet.
- Lit le ZIP template (deploy/Lea_v1.0.0.zip).
- Remplace config.txt par une version personnalisée.
- Renvoie le ZIP modifié en téléchargement (tout en mémoire).
"""
# Sécurité : l'auth Basic est déjà gérée par before_request
# 1. Vérifier que le ZIP template existe
if not _LEA_ZIP_TEMPLATE.exists():
return jsonify({
'error': 'ZIP template introuvable',
'detail': f'{_LEA_ZIP_TEMPLATE} absent — exécuter deploy/build_package.sh',
}), 500
# 2. Vérifier que le machine_id est enregistré
agent = _fetch_fleet_agent(machine_id)
if agent is None:
return jsonify({
'error': f'Agent "{machine_id}" introuvable ou inactif dans la fleet',
'hint': 'Enregistrez d\'abord l\'agent via Fleet → Enregistrer un agent.',
}), 404
# 3. Récupérer le token API (global, phase 1)
api_token = os.environ.get('RPA_API_TOKEN', '')
user_name = agent.get('user_name') or ''
# 4. Construire le config.txt personnalisé
custom_config = _build_custom_config(machine_id, user_name, api_token)
# 5. Créer le ZIP personnalisé en mémoire
output_buffer = io.BytesIO()
try:
with zipfile.ZipFile(_LEA_ZIP_TEMPLATE, 'r') as src_zip:
with zipfile.ZipFile(output_buffer, 'w', zipfile.ZIP_DEFLATED) as dst_zip:
for item in src_zip.infolist():
if item.filename == 'Lea/config.txt':
# Remplacer config.txt par la version personnalisée
dst_zip.writestr(item, custom_config.encode('utf-8'))
else:
# Copier tous les autres fichiers tels quels
dst_zip.writestr(item, src_zip.read(item.filename))
except zipfile.BadZipFile:
return jsonify({
'error': 'ZIP template corrompu',
'detail': f'{_LEA_ZIP_TEMPLATE} n\'est pas un ZIP valide.',
}), 500
output_buffer.seek(0)
# 6. Nom du fichier téléchargé
safe_id = machine_id.replace('/', '_').replace('\\', '_')
download_name = f"Lea_{safe_id}.zip"
return send_file(
output_buffer,
as_attachment=True,
download_name=download_name,
mimetype='application/zip',
)
# =============================================================================
# Cartographie des processus — Process Mining
# =============================================================================
LIVE_SESSIONS_PATH = BASE_PATH / "data" / "training" / "live_sessions"
@app.route('/process-mining')
def process_mining_page():
"""Page Cartographie des processus."""
return render_template('process_mining.html')
@app.route('/api/process-mining/machines')
def process_mining_machines():
"""Liste les machines disponibles (dossiers dans live_sessions/)."""
try:
machines = []
if LIVE_SESSIONS_PATH.exists():
for d in sorted(LIVE_SESSIONS_PATH.iterdir()):
if d.is_dir() and not d.name.startswith(('sess_', 'embeddings', '.')):
# Compter les sessions avec un live_events.jsonl
sessions_count = sum(
1 for sd in d.iterdir()
if sd.is_dir() and (sd / "live_events.jsonl").exists()
)
if sessions_count > 0:
machines.append({
'machine_id': d.name,
'sessions_count': sessions_count,
})
return jsonify({'machines': machines})
except Exception as e:
return jsonify({'error': str(e)}), 500
@app.route('/api/process-mining/discover', methods=['POST'])
def process_mining_discover():
"""Lance l'analyse process mining et retourne les résultats."""
try:
# Import conditionnel du bridge
try:
from core.analytics.process_mining_bridge import (
sessions_to_event_log,
discover_bpmn,
compute_kpis,
load_jsonl_session,
PM4PY_AVAILABLE,
)
except ImportError:
return jsonify({
'error': "Module d'analyse non disponible",
'detail': "Le module core.analytics.process_mining_bridge est introuvable.",
}), 503
if not PM4PY_AVAILABLE:
return jsonify({
'error': "Module d'analyse non disponible",
'detail': "pm4py n'est pas installé. Installez-le : pip install pm4py",
}), 503
data = request.get_json(silent=True) or {}
machine_id = data.get('machine_id', '')
# Déterminer les dossiers de sessions à charger
if machine_id:
base_dir = LIVE_SESSIONS_PATH / machine_id
if not base_dir.exists():
return jsonify({'error': f"Machine '{machine_id}' introuvable"}), 404
else:
base_dir = LIVE_SESSIONS_PATH
# Collecter les fichiers live_events.jsonl
jsonl_files = list(base_dir.rglob("live_events.jsonl"))
if not jsonl_files:
return jsonify({
'error': "Aucune session trouvée",
'detail': "Aucun fichier live_events.jsonl dans le dossier sélectionné.",
}), 404
# Charger toutes les sessions
all_events = []
sessions_loaded = 0
for jsonl_path in jsonl_files:
try:
events = load_jsonl_session(str(jsonl_path))
# Injecter le session_id si absent (déduit du nom du dossier parent)
session_id = jsonl_path.parent.name
for evt in events:
if 'session_id' not in evt:
evt['session_id'] = session_id
all_events.extend(events)
sessions_loaded += 1
except Exception as e:
api_logger.warning(f"Erreur chargement {jsonl_path}: {e}")
if not all_events:
return jsonify({
'error': "Aucun événement exploitable",
'detail': "Les fichiers JSONL sont vides ou ne contiennent pas d'événements significatifs.",
}), 404
# Convertir en event log
event_log_df = sessions_to_event_log(all_events)
if event_log_df.empty:
return jsonify({
'error': "Aucune activité détectée",
'detail': "Les sessions ne contiennent pas d'événements significatifs (clics, saisies, etc.).",
}), 404
# Dossier de sortie pour les images
analytics_output_dir = Path(__file__).parent / "static" / "analytics"
analytics_output_dir.mkdir(parents=True, exist_ok=True)
# Nom unique basé sur machine_id et timestamp
run_name = f"{machine_id or 'all'}_{int(time.time())}"
# Découverte BPMN
bpmn_result = discover_bpmn(
event_log_df,
output_dir=str(analytics_output_dir),
name=run_name,
)
# KPIs
kpis = compute_kpis(event_log_df)
# Construire la réponse
# Chemin relatif des images pour le front (depuis /static/)
bpmn_image_url = None
if bpmn_result.get('bpmn_image_path'):
bpmn_image_url = f"/static/analytics/{Path(bpmn_result['bpmn_image_path']).name}"
dfg_image_url = None
if bpmn_result.get('dfg_image_path'):
dfg_image_url = f"/static/analytics/{Path(bpmn_result['dfg_image_path']).name}"
return jsonify({
'success': True,
'sessions_loaded': sessions_loaded,
'machine_id': machine_id or 'toutes',
'bpmn_image_url': bpmn_image_url,
'dfg_image_url': dfg_image_url,
'kpis': kpis,
})
except Exception as e:
api_logger.error(f"Erreur process mining: {e}", exc_info=True)
return jsonify({'error': str(e)}), 500
# =============================================================================
# Audit & Traçabilité — Proxy vers le serveur streaming (port 5005)
# =============================================================================
AUDIT_BASE_URL = 'http://localhost:5005/api/v1/audit'
@app.route('/audit')
def audit_page():
"""Page Audit & Traçabilité (conformité AI Act / RGPD)."""
return render_template('audit.html')
@app.route('/api/audit/<path:endpoint>')
def proxy_audit(endpoint):
"""Proxy vers les endpoints audit du serveur streaming (5005).
Propage les query params et le Bearer token.
Endpoints disponibles :
- /api/audit/history → GET, filtres date/user/result/action_type/...
- /api/audit/summary → GET, param date=YYYY-MM-DD
- /api/audit/export → GET, retourne du CSV
"""
import urllib.request
import urllib.error
import urllib.parse
try:
# Construire l'URL avec les query params du client
query_string = request.query_string.decode('utf-8')
url = f'{AUDIT_BASE_URL}/{endpoint}'
if query_string:
url += f'?{query_string}'
headers = {'Accept': 'application/json'}
# Token Bearer (même mécanisme que le proxy chat/streaming)
token = os.environ.get('RPA_API_TOKEN', '')
if token:
headers['Authorization'] = f'Bearer {token}'
req = urllib.request.Request(url, headers=headers)
with urllib.request.urlopen(req, timeout=10) as response:
content_type = response.headers.get('Content-Type', '')
# Export CSV : retourner le fichier brut
if 'text/csv' in content_type or endpoint == 'export':
body = response.read()
resp_headers = {
'Content-Type': content_type or 'text/csv; charset=utf-8',
}
# Propager le header Content-Disposition s'il existe
cd = response.headers.get('Content-Disposition')
if cd:
resp_headers['Content-Disposition'] = cd
return body, response.status, resp_headers
# JSON standard
data = json.loads(response.read().decode('utf-8'))
return jsonify(data)
except urllib.error.HTTPError as e:
try:
detail = json.loads(e.read().decode('utf-8'))
except Exception:
detail = {'error': str(e)}
return jsonify(detail), e.code
except urllib.error.URLError as e:
return jsonify({
'error': f'Serveur streaming (5005) inaccessible : {e}',
'hint': 'Vérifiez que le service streaming est démarré.',
}), 502
except Exception as e:
return jsonify({'error': str(e)}), 500
# =============================================================================
# Base de connaissances — État mémoire et apprentissages
# =============================================================================
@app.route('/knowledge-base')
def knowledge_base_page():
"""Page Base de connaissances."""
return render_template('knowledge_base.html')
@app.route('/api/knowledge-base/stats')
def knowledge_base_stats():
"""Retourne toutes les métriques de la base de connaissances en JSON."""
import glob as glob_module
result = {
'faiss': _kb_faiss_stats(),
'sessions': _kb_sessions_stats(),
'patterns': _kb_patterns_stats(),
'workflows': _kb_workflows_stats(),
}
return jsonify(result)
def _kb_faiss_stats() -> dict:
"""Statistiques de l'index FAISS."""
faiss_index_path = DATA_PATH / "faiss_index" / "main.index"
embeddings_dir = LIVE_SESSIONS_PATH / "embeddings"
vectors_indexed = 0
index_size_mb = "0 Mo"
available = False
if faiss_index_path.exists():
# Taille fichier
size_bytes = faiss_index_path.stat().st_size
index_size_mb = f"{size_bytes / (1024 * 1024):.1f} Mo"
# Nombre de vecteurs via faiss
try:
import faiss
index = faiss.read_index(str(faiss_index_path))
vectors_indexed = index.ntotal
available = True
except ImportError:
# FAISS non installé — lire le metadata si dispo
metadata_path = DATA_PATH / "faiss_index" / "main.metadata"
if metadata_path.exists():
try:
meta = json.loads(metadata_path.read_text())
vectors_indexed = meta.get('ntotal', 0)
available = True
except Exception:
pass
except Exception:
pass
else:
available = False
# Compter les embeddings (.npy)
embeddings_computed = 0
if embeddings_dir.exists():
embeddings_computed = len(list(embeddings_dir.glob("*.npy")))
return {
'vectors_indexed': vectors_indexed,
'embeddings_computed': embeddings_computed,
'index_size_mb': index_size_mb,
'available': available,
}
def _kb_sessions_stats() -> dict:
"""Statistiques des sessions shadow."""
machines = []
total_sessions = 0
total_bytes = 0
if LIVE_SESSIONS_PATH.exists():
for d in sorted(LIVE_SESSIONS_PATH.iterdir()):
if not d.is_dir():
continue
# Ignorer le dossier embeddings
if d.name == 'embeddings':
continue
# Dossiers machines (contiennent des sess_*)
if d.name.startswith('sess_'):
# Session orpheline à la racine
total_sessions += 1
total_bytes += _dir_size(d)
else:
# Dossier machine
sess_dirs = [s for s in d.iterdir() if s.is_dir() and s.name.startswith('sess_')]
count = len(sess_dirs)
total_sessions += count
# Dernière activité
last_activity = None
if sess_dirs:
latest = max(sess_dirs, key=lambda s: s.stat().st_mtime)
last_activity = datetime.fromtimestamp(latest.stat().st_mtime).strftime('%Y-%m-%d %H:%M')
machine_bytes = _dir_size(d)
total_bytes += machine_bytes
if count > 0:
machines.append({
'machine_id': d.name,
'session_count': count,
'last_activity': last_activity,
})
# Volume total
if total_bytes >= 1024 * 1024 * 1024:
total_volume = f"{total_bytes / (1024**3):.2f} Go"
elif total_bytes >= 1024 * 1024:
total_volume = f"{total_bytes / (1024**2):.1f} Mo"
else:
total_volume = f"{total_bytes / 1024:.0f} Ko"
return {
'total_sessions': total_sessions,
'total_volume': total_volume,
'machines': machines,
}
def _kb_patterns_stats() -> dict:
"""Statistiques des patterns UI natifs."""
try:
from core.knowledge.ui_patterns import UIPatternLibrary
lib = UIPatternLibrary()
stats = lib.stats
return {
'total': stats.get('total', 0),
'by_category': stats.get('by_category', {}),
}
except Exception:
return {'total': 0, 'by_category': {}}
def _kb_workflows_stats() -> dict:
"""Statistiques des workflows appris."""
total = 0
workflows_path = DATA_PATH / "workflows"
if workflows_path.exists():
# Compter récursivement les .json
total = len(list(workflows_path.rglob("*.json")))
return {'total': total}
def _dir_size(path: Path) -> int:
"""Calcule la taille totale d'un dossier (non récursif profond pour la perf)."""
total = 0
try:
for f in path.rglob('*'):
if f.is_file():
total += f.stat().st_size
except (PermissionError, OSError):
pass
return total
# =============================================================================
# Main
# =============================================================================
if __name__ == '__main__':
# Valider la sécurité en production (non-bloquant pour le dashboard)
from core.security import get_security_config
try:
from core.security import validate_production_security
config = get_security_config()
validate_production_security(config)
print("✅ Security validation passed")
except Exception as e:
print(f"⚠️ Security validation warning: {e}")
print("Dashboard will start anyway (monitoring service)")
# Ne pas arrêter le dashboard pour des problèmes de sécurité mineurs
# Initialiser le système de cleanup
try:
from core.system import initialize_system_cleanup, shutdown_system
initialize_system_cleanup()
except ImportError:
print("⚠️ System cleanup not available")
print("=" * 50)
print("RPA Vision V3 - Dashboard Web")
print("=" * 50)
print(f"Base path: {BASE_PATH}")
print(f"Data path: {DATA_PATH}")
print(f"Sessions path: {SESSIONS_PATH}")
print(f"Workflows path: {WORKFLOWS_PATH}")
print("")
print("Démarrage sur http://0.0.0.0:5001")
print("WebSocket activé")
print("=" * 50)
try:
socketio.run(
app,
host='0.0.0.0',
port=5001,
debug=False,
allow_unsafe_werkzeug=True
)
except KeyboardInterrupt:
print("\nReceived keyboard interrupt, shutting down...")
try:
shutdown_system()
except:
pass
except Exception as e:
print(f"Dashboard error: {e}")
try:
shutdown_system()
except:
pass
raise