- Frontend v4 accessible sur réseau local (192.168.1.40) - Ports ouverts: 3002 (frontend), 5001 (backend), 5004 (dashboard) - Ollama GPU fonctionnel - Self-healing interactif - Dashboard confiance Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1114 lines
38 KiB
Python
1114 lines
38 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Application Flask pour le Dashboard Web RPA Vision V3
|
|
|
|
Fonctionnalités:
|
|
- Monitoring temps réel avec WebSocket
|
|
- Gestion des workflows
|
|
- Visualisation des sessions et screenshots
|
|
- Graphiques de performance
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import json
|
|
import subprocess
|
|
import threading
|
|
import time
|
|
from pathlib import Path
|
|
from datetime import datetime
|
|
from flask import Flask, render_template, jsonify, request, send_file, Response
|
|
from flask_socketio import SocketIO, emit
|
|
from prometheus_client import generate_latest, CONTENT_TYPE_LATEST
|
|
|
|
# Ajouter le répertoire parent au path
|
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
|
|
from core.persistence import StorageManager
|
|
from core.models import RawSession
|
|
from core.monitoring import get_logger, api_logger
|
|
from core.monitoring.chain_manager import ChainManager
|
|
from core.monitoring.trigger_manager import TriggerManager
|
|
from core.monitoring.log_exporter import LogExporter
|
|
from core.monitoring.automation_scheduler import AutomationScheduler
|
|
|
|
app = Flask(__name__)
|
|
app.config['SECRET_KEY'] = os.getenv('SECRET_KEY', 'dev-key-change-in-production')
|
|
socketio = SocketIO(app, cors_allowed_origins="*", async_mode='threading')
|
|
|
|
|
|
@app.get('/healthz')
|
|
def healthz():
|
|
"""Healthcheck minimal (systemd/k8s)."""
|
|
return jsonify({
|
|
'status': 'ok',
|
|
'service': 'rpa-vision-v3-dashboard',
|
|
'timestamp': datetime.now().isoformat(),
|
|
})
|
|
|
|
# Chemins
|
|
BASE_PATH = Path(__file__).parent.parent
|
|
DATA_PATH = BASE_PATH / "data" / "training"
|
|
SESSIONS_PATH = DATA_PATH / "sessions"
|
|
WORKFLOWS_PATH = DATA_PATH / "workflows"
|
|
LOGS_PATH = BASE_PATH / "logs"
|
|
|
|
# StorageManager
|
|
storage = StorageManager(base_path=str(DATA_PATH))
|
|
|
|
# Monitoring managers
|
|
chain_manager = ChainManager(storage_path=DATA_PATH / "chains")
|
|
trigger_manager = TriggerManager(storage_path=DATA_PATH / "triggers")
|
|
log_exporter = LogExporter(logs_path=LOGS_PATH)
|
|
|
|
# Automation scheduler
|
|
automation_scheduler = AutomationScheduler(
|
|
trigger_manager=trigger_manager,
|
|
chain_manager=chain_manager,
|
|
check_interval=1.0 # Vérifier toutes les secondes
|
|
)
|
|
|
|
# Démarrer le scheduler automatiquement
|
|
automation_scheduler.start()
|
|
api_logger.info("Automation scheduler started")
|
|
|
|
# État global pour le monitoring temps réel
|
|
execution_state = {
|
|
"running": False,
|
|
"workflow_id": None,
|
|
"current_node": None,
|
|
"steps_executed": 0,
|
|
"last_confidence": 0.0,
|
|
"mode": "idle",
|
|
"history": []
|
|
}
|
|
|
|
# Métriques de performance
|
|
performance_metrics = {
|
|
"faiss_searches": [],
|
|
"embedding_times": [],
|
|
"capture_times": [],
|
|
"total_embeddings": 0,
|
|
"cache_hits": 0,
|
|
"cache_misses": 0
|
|
}
|
|
|
|
|
|
# =============================================================================
|
|
# Routes principales
|
|
# =============================================================================
|
|
|
|
@app.route('/')
|
|
def index():
|
|
"""Page d'accueil du dashboard."""
|
|
return render_template('index.html')
|
|
|
|
|
|
# =============================================================================
|
|
# API Système
|
|
# =============================================================================
|
|
|
|
@app.route('/api/system/status')
|
|
def system_status():
|
|
"""Statut du système."""
|
|
try:
|
|
sessions_count = len(list(SESSIONS_PATH.glob('*'))) if SESSIONS_PATH.exists() else 0
|
|
workflows_count = len(list(WORKFLOWS_PATH.glob('*.json'))) if WORKFLOWS_PATH.exists() else 0
|
|
|
|
tests_path = BASE_PATH / "tests"
|
|
unit_tests = len(list(tests_path.glob('unit/test_*.py'))) if tests_path.exists() else 0
|
|
integration_tests = len(list(tests_path.glob('integration/test_*.py'))) if tests_path.exists() else 0
|
|
|
|
dependencies_ok = True
|
|
try:
|
|
import torch
|
|
import faiss
|
|
except ImportError:
|
|
dependencies_ok = False
|
|
|
|
return jsonify({
|
|
'status': 'online',
|
|
'sessions_count': sessions_count,
|
|
'workflows_count': workflows_count,
|
|
'tests': {
|
|
'total': unit_tests + integration_tests,
|
|
'unit': unit_tests,
|
|
'integration': integration_tests
|
|
},
|
|
'dependencies_ok': dependencies_ok,
|
|
'execution': execution_state,
|
|
'timestamp': datetime.now().isoformat()
|
|
})
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/system/performance')
|
|
def system_performance():
|
|
"""Métriques de performance."""
|
|
try:
|
|
# Essayer de récupérer les vraies stats
|
|
faiss_stats = {}
|
|
embedding_stats = {}
|
|
|
|
try:
|
|
from core.embedding.faiss_manager import FAISSManager
|
|
fm = FAISSManager(dimensions=512)
|
|
faiss_stats = fm.get_stats()
|
|
except Exception:
|
|
pass
|
|
|
|
try:
|
|
from core.embedding.embedding_cache import EmbeddingCache
|
|
cache = EmbeddingCache()
|
|
embedding_stats = cache.get_stats()
|
|
except Exception:
|
|
pass
|
|
|
|
return jsonify({
|
|
'faiss': faiss_stats,
|
|
'embedding_cache': embedding_stats,
|
|
'metrics': performance_metrics,
|
|
'timestamp': datetime.now().isoformat()
|
|
})
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
# =============================================================================
|
|
# API Sessions
|
|
# =============================================================================
|
|
|
|
@app.route('/api/agent/sessions')
|
|
def list_sessions():
|
|
"""Liste toutes les sessions agent."""
|
|
try:
|
|
sessions = []
|
|
|
|
if not SESSIONS_PATH.exists():
|
|
return jsonify({'sessions': [], 'total': 0})
|
|
|
|
for session_dir in SESSIONS_PATH.iterdir():
|
|
if not session_dir.is_dir():
|
|
continue
|
|
|
|
# Chercher les JSON dans plusieurs structures possibles
|
|
# AVANT cleanup : sessions/sess_xxx/sess_xxx/*.json (un dossier par session)
|
|
# APRÈS cleanup : sessions/2026-01-07/session_sess_xxx.json (plusieurs sessions par dossier date)
|
|
json_files = list(session_dir.glob('*/*.json')) # Structure avant cleanup
|
|
if not json_files:
|
|
json_files = list(session_dir.glob('*.json')) # Structure après cleanup
|
|
|
|
# Traiter CHAQUE fichier JSON trouvé
|
|
for json_path in json_files:
|
|
try:
|
|
session = RawSession.load_from_file(json_path)
|
|
session_id = session.session_id
|
|
|
|
# Calculer la taille (JSON + screenshots éventuels)
|
|
size_bytes = json_path.stat().st_size
|
|
|
|
# Compter les screenshots
|
|
# Structure AVANT cleanup : sessions/sess_xxx/sess_xxx/shots/*.png
|
|
screenshots_dir = session_dir / session_id / "shots"
|
|
screenshot_files = list(screenshots_dir.glob('*.png')) if screenshots_dir.exists() else []
|
|
|
|
# Ajouter la taille des screenshots à la taille totale
|
|
size_bytes += sum(f.stat().st_size for f in screenshot_files)
|
|
size_mb = round(size_bytes / (1024 * 1024), 2)
|
|
|
|
sessions.append({
|
|
'session_id': session.session_id,
|
|
'started_at': session.started_at.isoformat(),
|
|
'ended_at': session.ended_at.isoformat() if session.ended_at else None,
|
|
'events_count': len(session.events),
|
|
'screenshots_count': len(screenshot_files),
|
|
'user': session.user,
|
|
'context': session.context,
|
|
'size_mb': size_mb,
|
|
'path': str(json_path.parent)
|
|
})
|
|
except Exception as e:
|
|
print(f"Erreur lecture session {json_path.name}: {e}")
|
|
continue
|
|
|
|
sessions.sort(key=lambda x: x['started_at'], reverse=True)
|
|
|
|
return jsonify({
|
|
'sessions': sessions,
|
|
'total': len(sessions)
|
|
})
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/agent/sessions/<session_id>')
|
|
def get_session(session_id):
|
|
"""Détails d'une session."""
|
|
try:
|
|
session_dir = SESSIONS_PATH / session_id
|
|
if not session_dir.exists():
|
|
return jsonify({'error': 'Session non trouvée'}), 404
|
|
|
|
json_files = list(session_dir.glob('*/*.json'))
|
|
if not json_files:
|
|
return jsonify({'error': 'Fichier JSON non trouvé'}), 404
|
|
|
|
session = RawSession.load_from_file(json_files[0])
|
|
|
|
# Lister les screenshots avec URLs
|
|
# Structure : sessions/{session_id}/{session_id}/shots/*.png
|
|
screenshots = []
|
|
screenshots_dir = session_dir / session_id / "shots"
|
|
if screenshots_dir.exists():
|
|
for img_file in sorted(screenshots_dir.glob('*.png')):
|
|
screenshots.append({
|
|
'filename': img_file.name,
|
|
'url': f'/api/agent/sessions/{session_id}/screenshot/{img_file.name}',
|
|
'size': img_file.stat().st_size
|
|
})
|
|
|
|
return jsonify({
|
|
'session': {
|
|
'session_id': session.session_id,
|
|
'started_at': session.started_at.isoformat(),
|
|
'ended_at': session.ended_at.isoformat() if session.ended_at else None,
|
|
'user': session.user,
|
|
'context': session.context,
|
|
'environment': session.environment,
|
|
'events': [
|
|
{
|
|
'type': e.type,
|
|
'timestamp': e.t if hasattr(e, 't') else 0,
|
|
'window': e.window,
|
|
'screenshot_id': e.screenshot_id
|
|
}
|
|
for e in session.events
|
|
]
|
|
},
|
|
'screenshots': screenshots,
|
|
'screenshots_count': len(screenshots)
|
|
})
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/agent/sessions/<session_id>/screenshot/<filename>')
|
|
def get_screenshot(session_id, filename):
|
|
"""Récupère un screenshot."""
|
|
try:
|
|
session_dir = SESSIONS_PATH / session_id
|
|
|
|
# Essayer le chemin correct en premier : {session_id}/shots/
|
|
screenshot_path = session_dir / session_id / "shots" / filename
|
|
|
|
if not screenshot_path.exists():
|
|
# Essayer l'ancien chemin pour compatibilité
|
|
screenshot_path = session_dir / "screenshots" / filename
|
|
|
|
if not screenshot_path.exists():
|
|
# Essayer dans les sous-dossiers (fallback)
|
|
for subdir in session_dir.iterdir():
|
|
if subdir.is_dir():
|
|
# Essayer shots/
|
|
alt_path = subdir / "shots" / filename
|
|
if alt_path.exists():
|
|
screenshot_path = alt_path
|
|
break
|
|
# Essayer screenshots/
|
|
alt_path = subdir / "screenshots" / filename
|
|
if alt_path.exists():
|
|
screenshot_path = alt_path
|
|
break
|
|
|
|
if not screenshot_path.exists():
|
|
return jsonify({'error': 'Screenshot non trouvé ou supprimé après traitement'}), 404
|
|
|
|
return send_file(screenshot_path, mimetype='image/png')
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/agent/sessions/<session_id>/process', methods=['POST'])
|
|
def process_session(session_id):
|
|
"""Lance le traitement d'une session."""
|
|
try:
|
|
session_dir = SESSIONS_PATH / session_id
|
|
if not session_dir.exists():
|
|
return jsonify({'error': 'Session non trouvée'}), 404
|
|
|
|
from server.processing_pipeline import process_session_async
|
|
|
|
def process_in_background():
|
|
try:
|
|
stats = process_session_async(session_id, str(DATA_PATH))
|
|
socketio.emit('processing_complete', {
|
|
'session_id': session_id,
|
|
'stats': stats
|
|
})
|
|
except Exception as e:
|
|
socketio.emit('processing_error', {
|
|
'session_id': session_id,
|
|
'error': str(e)
|
|
})
|
|
|
|
thread = threading.Thread(target=process_in_background, daemon=True)
|
|
thread.start()
|
|
|
|
return jsonify({'status': 'started', 'session_id': session_id})
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
# =============================================================================
|
|
# API Screen States
|
|
# =============================================================================
|
|
|
|
@app.route('/api/screen_states')
|
|
def list_screen_states():
|
|
"""Liste tous les screen states traités."""
|
|
try:
|
|
screen_states = []
|
|
screen_states_path = DATA_PATH / "screen_states"
|
|
|
|
if not screen_states_path.exists():
|
|
return jsonify({'screen_states': [], 'total': 0})
|
|
|
|
# Parcourir tous les fichiers JSON dans screen_states/
|
|
for date_dir in screen_states_path.iterdir():
|
|
if not date_dir.is_dir():
|
|
continue
|
|
|
|
for state_file in date_dir.glob('*.json'):
|
|
try:
|
|
with open(state_file, 'r') as f:
|
|
state_data = json.load(f)
|
|
|
|
screen_states.append({
|
|
'screen_state_id': state_data.get('screen_state_id', state_file.stem),
|
|
'session_id': state_data.get('session_id', 'unknown'),
|
|
'timestamp': state_data.get('timestamp', ''),
|
|
'window': state_data.get('window', {}),
|
|
'tags': state_data.get('context', {}).get('tags', []),
|
|
'workflow_candidate': state_data.get('context', {}).get('current_workflow_candidate'),
|
|
'user_id': state_data.get('context', {}).get('user_id', 'unknown'),
|
|
'business_variables': state_data.get('context', {}).get('business_variables', {}),
|
|
'file_path': str(state_file),
|
|
'date': date_dir.name
|
|
})
|
|
except Exception as e:
|
|
print(f"Erreur lecture screen state {state_file}: {e}")
|
|
continue
|
|
|
|
# Trier par timestamp (plus récent en premier)
|
|
screen_states.sort(key=lambda x: x['timestamp'], reverse=True)
|
|
|
|
# Grouper par session
|
|
sessions_grouped = {}
|
|
for state in screen_states:
|
|
session_id = state['session_id']
|
|
if session_id not in sessions_grouped:
|
|
sessions_grouped[session_id] = {
|
|
'session_id': session_id,
|
|
'screen_states': [],
|
|
'count': 0,
|
|
'first_timestamp': state['timestamp'],
|
|
'last_timestamp': state['timestamp'],
|
|
'tags': state['tags'],
|
|
'user_id': state['user_id']
|
|
}
|
|
sessions_grouped[session_id]['screen_states'].append(state)
|
|
sessions_grouped[session_id]['count'] += 1
|
|
# Mettre à jour last_timestamp (le plus récent étant en premier)
|
|
if state['timestamp'] < sessions_grouped[session_id]['last_timestamp']:
|
|
sessions_grouped[session_id]['last_timestamp'] = state['timestamp']
|
|
|
|
return jsonify({
|
|
'screen_states': screen_states,
|
|
'total': len(screen_states),
|
|
'sessions_grouped': list(sessions_grouped.values()),
|
|
'sessions_count': len(sessions_grouped)
|
|
})
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/screen_states/<session_id>')
|
|
def get_session_screen_states(session_id):
|
|
"""Récupère tous les screen states d'une session."""
|
|
try:
|
|
screen_states = []
|
|
screen_states_path = DATA_PATH / "screen_states"
|
|
|
|
if not screen_states_path.exists():
|
|
return jsonify({'error': 'Screen states directory not found'}), 404
|
|
|
|
# Parcourir tous les fichiers JSON dans screen_states/
|
|
for date_dir in screen_states_path.iterdir():
|
|
if not date_dir.is_dir():
|
|
continue
|
|
|
|
for state_file in date_dir.glob('*.json'):
|
|
try:
|
|
with open(state_file, 'r') as f:
|
|
state_data = json.load(f)
|
|
|
|
if state_data.get('session_id') == session_id:
|
|
screen_states.append(state_data)
|
|
except Exception as e:
|
|
continue
|
|
|
|
# Trier par timestamp
|
|
screen_states.sort(key=lambda x: x.get('timestamp', ''))
|
|
|
|
return jsonify({
|
|
'session_id': session_id,
|
|
'screen_states': screen_states,
|
|
'total': len(screen_states)
|
|
})
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
# =============================================================================
|
|
# API Workflows
|
|
# =============================================================================
|
|
|
|
@app.route('/api/workflows')
|
|
def list_workflows():
|
|
"""Liste tous les workflows."""
|
|
try:
|
|
workflows = []
|
|
|
|
if not WORKFLOWS_PATH.exists():
|
|
WORKFLOWS_PATH.mkdir(parents=True, exist_ok=True)
|
|
return jsonify({'workflows': [], 'total': 0})
|
|
|
|
for wf_file in WORKFLOWS_PATH.glob('*.json'):
|
|
try:
|
|
with open(wf_file, 'r') as f:
|
|
wf_data = json.load(f)
|
|
|
|
workflows.append({
|
|
'workflow_id': wf_data.get('workflow_id', wf_file.stem),
|
|
'name': wf_data.get('name', wf_file.stem),
|
|
'description': wf_data.get('description', ''),
|
|
'nodes_count': len(wf_data.get('nodes', [])),
|
|
'edges_count': len(wf_data.get('edges', [])),
|
|
'learning_state': wf_data.get('learning_state', 'OBSERVATION'),
|
|
'created_at': wf_data.get('created_at', ''),
|
|
'updated_at': wf_data.get('updated_at', ''),
|
|
'execution_count': wf_data.get('execution_count', 0)
|
|
})
|
|
except Exception as e:
|
|
print(f"Erreur lecture workflow {wf_file}: {e}")
|
|
|
|
return jsonify({
|
|
'workflows': workflows,
|
|
'total': len(workflows)
|
|
})
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/workflows/<workflow_id>')
|
|
def get_workflow(workflow_id):
|
|
"""Détails d'un workflow."""
|
|
try:
|
|
wf_file = WORKFLOWS_PATH / f"{workflow_id}.json"
|
|
if not wf_file.exists():
|
|
return jsonify({'error': 'Workflow non trouvé'}), 404
|
|
|
|
with open(wf_file, 'r') as f:
|
|
wf_data = json.load(f)
|
|
|
|
return jsonify({'workflow': wf_data})
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/workflows/<workflow_id>/execute', methods=['POST'])
|
|
def execute_workflow(workflow_id):
|
|
"""Lance l'exécution d'un workflow."""
|
|
global execution_state
|
|
|
|
try:
|
|
data = request.get_json() or {}
|
|
mode = data.get('mode', 'supervised')
|
|
|
|
wf_file = WORKFLOWS_PATH / f"{workflow_id}.json"
|
|
if not wf_file.exists():
|
|
return jsonify({'error': 'Workflow non trouvé'}), 404
|
|
|
|
# Mettre à jour l'état
|
|
execution_state = {
|
|
"running": True,
|
|
"workflow_id": workflow_id,
|
|
"current_node": None,
|
|
"steps_executed": 0,
|
|
"last_confidence": 0.0,
|
|
"mode": mode,
|
|
"started_at": datetime.now().isoformat(),
|
|
"history": []
|
|
}
|
|
|
|
# Notifier via WebSocket
|
|
socketio.emit('execution_started', execution_state)
|
|
|
|
# Lancer l'exécution en arrière-plan
|
|
def run_workflow():
|
|
global execution_state
|
|
try:
|
|
from core.pipeline.workflow_pipeline import WorkflowPipeline
|
|
from core.execution.execution_loop import ExecutionLoop, ExecutionMode
|
|
|
|
pipeline = WorkflowPipeline()
|
|
loop = ExecutionLoop(pipeline)
|
|
|
|
# Callback pour les mises à jour
|
|
def on_step(step_result):
|
|
global execution_state
|
|
execution_state["current_node"] = step_result.node_id
|
|
execution_state["steps_executed"] += 1
|
|
execution_state["last_confidence"] = step_result.match_confidence
|
|
execution_state["history"].append({
|
|
"node_id": step_result.node_id,
|
|
"success": step_result.success,
|
|
"confidence": step_result.match_confidence,
|
|
"timestamp": datetime.now().isoformat()
|
|
})
|
|
socketio.emit('execution_step', execution_state)
|
|
|
|
loop.on_step_complete(on_step)
|
|
|
|
mode_map = {
|
|
'observation': ExecutionMode.OBSERVATION,
|
|
'coaching': ExecutionMode.COACHING,
|
|
'supervised': ExecutionMode.SUPERVISED,
|
|
'automatic': ExecutionMode.AUTOMATIC
|
|
}
|
|
|
|
loop.start(workflow_id, mode=mode_map.get(mode, ExecutionMode.SUPERVISED))
|
|
|
|
except Exception as e:
|
|
execution_state["running"] = False
|
|
execution_state["error"] = str(e)
|
|
socketio.emit('execution_error', execution_state)
|
|
|
|
thread = threading.Thread(target=run_workflow, daemon=True)
|
|
thread.start()
|
|
|
|
return jsonify({'status': 'started', 'execution': execution_state})
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/workflows/<workflow_id>/stop', methods=['POST'])
|
|
def stop_workflow(workflow_id):
|
|
"""Arrête l'exécution d'un workflow."""
|
|
global execution_state
|
|
|
|
execution_state["running"] = False
|
|
socketio.emit('execution_stopped', execution_state)
|
|
|
|
return jsonify({'status': 'stopped'})
|
|
|
|
|
|
# =============================================================================
|
|
# API Tests
|
|
# =============================================================================
|
|
|
|
@app.route('/api/tests')
|
|
def list_tests():
|
|
"""Liste tous les tests disponibles."""
|
|
try:
|
|
tests = []
|
|
tests_path = BASE_PATH / "tests"
|
|
|
|
if not tests_path.exists():
|
|
return jsonify({'tests': [], 'total': 0})
|
|
|
|
for test_type in ['unit', 'integration', 'performance']:
|
|
type_path = tests_path / test_type
|
|
if type_path.exists():
|
|
for test_file in type_path.glob('test_*.py'):
|
|
tests.append({
|
|
'name': test_file.stem,
|
|
'path': str(test_file.relative_to(BASE_PATH)),
|
|
'type': test_type
|
|
})
|
|
|
|
return jsonify({
|
|
'tests': tests,
|
|
'total': len(tests)
|
|
})
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/tests/run', methods=['POST'])
|
|
def run_test():
|
|
"""Lance un test spécifique."""
|
|
try:
|
|
data = request.get_json()
|
|
test_path = data.get('test_path')
|
|
|
|
if not test_path:
|
|
return jsonify({'error': 'test_path requis'}), 400
|
|
|
|
result = subprocess.run(
|
|
['pytest', test_path, '-v', '--tb=short'],
|
|
cwd=BASE_PATH,
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=120
|
|
)
|
|
|
|
return jsonify({
|
|
'success': result.returncode == 0,
|
|
'stdout': result.stdout,
|
|
'stderr': result.stderr,
|
|
'returncode': result.returncode
|
|
})
|
|
except subprocess.TimeoutExpired:
|
|
return jsonify({'error': 'Timeout (120s)'}), 408
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/tests/run-all', methods=['POST'])
|
|
def run_all_tests():
|
|
"""Lance tous les tests d'un type."""
|
|
try:
|
|
data = request.get_json()
|
|
test_type = data.get('type', 'unit')
|
|
|
|
tests_path = BASE_PATH / "tests" / test_type
|
|
|
|
if not tests_path.exists():
|
|
return jsonify({'error': f'Répertoire {test_type} non trouvé'}), 404
|
|
|
|
result = subprocess.run(
|
|
['pytest', str(tests_path), '-v', '--tb=short'],
|
|
cwd=BASE_PATH,
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=300
|
|
)
|
|
|
|
return jsonify({
|
|
'success': result.returncode == 0,
|
|
'stdout': result.stdout,
|
|
'stderr': result.stderr,
|
|
'returncode': result.returncode
|
|
})
|
|
except subprocess.TimeoutExpired:
|
|
return jsonify({'error': 'Timeout (300s)'}), 408
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
# =============================================================================
|
|
# API Chains
|
|
# =============================================================================
|
|
|
|
@app.route('/api/chains')
|
|
def api_chains():
|
|
"""Liste toutes les chaînes de workflows."""
|
|
try:
|
|
chains = chain_manager.list_chains()
|
|
return jsonify({
|
|
'chains': [
|
|
{
|
|
'chain_id': chain.chain_id,
|
|
'name': chain.name,
|
|
'workflows': chain.workflows,
|
|
'status': chain.status,
|
|
'created_at': chain.created_at.isoformat(),
|
|
'last_execution': chain.last_execution.isoformat() if chain.last_execution else None,
|
|
'success_rate': chain.success_rate
|
|
}
|
|
for chain in chains
|
|
],
|
|
'total': len(chains)
|
|
})
|
|
except Exception as e:
|
|
api_logger.error(f"Error listing chains: {e}")
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/chains', methods=['POST'])
|
|
def create_chain():
|
|
"""Crée une nouvelle chaîne."""
|
|
try:
|
|
data = request.get_json()
|
|
name = data.get('name')
|
|
workflows = data.get('workflows', [])
|
|
|
|
if not name or not workflows:
|
|
return jsonify({'error': 'name and workflows required'}), 400
|
|
|
|
chain = chain_manager.create_chain(name, workflows)
|
|
api_logger.info(f"Created chain {chain.chain_id}")
|
|
|
|
return jsonify({
|
|
'chain': {
|
|
'chain_id': chain.chain_id,
|
|
'name': chain.name,
|
|
'workflows': chain.workflows,
|
|
'status': chain.status
|
|
}
|
|
}), 201
|
|
except ValueError as e:
|
|
return jsonify({'error': str(e)}), 400
|
|
except Exception as e:
|
|
api_logger.error(f"Error creating chain: {e}")
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/chains/<chain_id>/execute', methods=['POST'])
|
|
def execute_chain_route(chain_id):
|
|
"""Lance l'exécution d'une chaîne."""
|
|
try:
|
|
def on_progress(workflow_id, current, total):
|
|
socketio.emit('chain_progress', {
|
|
'chain_id': chain_id,
|
|
'workflow_id': workflow_id,
|
|
'current': current,
|
|
'total': total
|
|
})
|
|
|
|
result = chain_manager.execute_chain(chain_id, on_progress=on_progress)
|
|
|
|
socketio.emit('chain_complete', {
|
|
'chain_id': chain_id,
|
|
'success': result.success,
|
|
'duration': result.duration
|
|
})
|
|
|
|
return jsonify({
|
|
'result': {
|
|
'chain_id': result.chain_id,
|
|
'success': result.success,
|
|
'duration': result.duration,
|
|
'workflows_executed': result.workflows_executed,
|
|
'failed_at': result.failed_at,
|
|
'error_message': result.error_message
|
|
}
|
|
})
|
|
except ValueError as e:
|
|
return jsonify({'error': str(e)}), 404
|
|
except Exception as e:
|
|
api_logger.error(f"Error executing chain {chain_id}: {e}")
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
# =============================================================================
|
|
# API Triggers
|
|
# =============================================================================
|
|
|
|
@app.route('/api/triggers')
|
|
def api_triggers():
|
|
"""Liste tous les triggers."""
|
|
try:
|
|
triggers = trigger_manager.list_triggers()
|
|
return jsonify({
|
|
'triggers': [
|
|
{
|
|
'trigger_id': trigger.trigger_id,
|
|
'trigger_type': trigger.trigger_type,
|
|
'workflow_id': trigger.workflow_id,
|
|
'config': trigger.config,
|
|
'enabled': trigger.enabled,
|
|
'created_at': trigger.created_at.isoformat(),
|
|
'last_fired': trigger.last_fired.isoformat() if trigger.last_fired else None,
|
|
'fire_count': trigger.fire_count
|
|
}
|
|
for trigger in triggers
|
|
],
|
|
'total': len(triggers)
|
|
})
|
|
except Exception as e:
|
|
api_logger.error(f"Error listing triggers: {e}")
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/triggers', methods=['POST'])
|
|
def create_trigger():
|
|
"""Crée un nouveau trigger."""
|
|
try:
|
|
data = request.get_json()
|
|
trigger_type = data.get('trigger_type')
|
|
workflow_id = data.get('workflow_id')
|
|
config = data.get('config', {})
|
|
|
|
if not trigger_type or not workflow_id:
|
|
return jsonify({'error': 'trigger_type and workflow_id required'}), 400
|
|
|
|
trigger = trigger_manager.create_trigger(trigger_type, workflow_id, config)
|
|
api_logger.info(f"Created trigger {trigger.trigger_id}")
|
|
|
|
return jsonify({
|
|
'trigger': {
|
|
'trigger_id': trigger.trigger_id,
|
|
'trigger_type': trigger.trigger_type,
|
|
'workflow_id': trigger.workflow_id,
|
|
'config': trigger.config,
|
|
'enabled': trigger.enabled
|
|
}
|
|
}), 201
|
|
except ValueError as e:
|
|
return jsonify({'error': str(e)}), 400
|
|
except Exception as e:
|
|
api_logger.error(f"Error creating trigger: {e}")
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/triggers/<trigger_id>/toggle', methods=['POST'])
|
|
def toggle_trigger(trigger_id):
|
|
"""Active/désactive un trigger."""
|
|
try:
|
|
trigger = trigger_manager.get_trigger(trigger_id)
|
|
if not trigger:
|
|
return jsonify({'error': 'Trigger not found'}), 404
|
|
|
|
if trigger.enabled:
|
|
trigger_manager.disable_trigger(trigger_id)
|
|
api_logger.info(f"Disabled trigger {trigger_id}")
|
|
else:
|
|
trigger_manager.enable_trigger(trigger_id)
|
|
api_logger.info(f"Enabled trigger {trigger_id}")
|
|
|
|
trigger = trigger_manager.get_trigger(trigger_id)
|
|
return jsonify({
|
|
'trigger': {
|
|
'trigger_id': trigger.trigger_id,
|
|
'enabled': trigger.enabled
|
|
}
|
|
})
|
|
except Exception as e:
|
|
api_logger.error(f"Error toggling trigger {trigger_id}: {e}")
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
# =============================================================================
|
|
# API Logs
|
|
# =============================================================================
|
|
|
|
@app.route('/api/logs')
|
|
def get_logs():
|
|
"""Récupère les logs récents."""
|
|
try:
|
|
logs = []
|
|
|
|
if not LOGS_PATH.exists():
|
|
return jsonify({'logs': [], 'total': 0})
|
|
|
|
for log_file in LOGS_PATH.glob('*.log'):
|
|
try:
|
|
with open(log_file, 'r') as f:
|
|
lines = f.readlines()
|
|
for line in lines[-100:]:
|
|
logs.append({
|
|
'file': log_file.name,
|
|
'message': line.strip()
|
|
})
|
|
except Exception as e:
|
|
print(f"Erreur lecture log {log_file}: {e}")
|
|
|
|
return jsonify({
|
|
'logs': logs[-200:],
|
|
'total': len(logs)
|
|
})
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/logs/download')
|
|
def download_logs():
|
|
"""Télécharge les logs en format ZIP."""
|
|
try:
|
|
# Récupérer les paramètres de date optionnels
|
|
start_time = request.args.get('start_time')
|
|
end_time = request.args.get('end_time')
|
|
|
|
start_dt = datetime.fromisoformat(start_time) if start_time else None
|
|
end_dt = datetime.fromisoformat(end_time) if end_time else None
|
|
|
|
# Générer le ZIP
|
|
zip_file = log_exporter.export_to_zip(start_dt, end_dt)
|
|
|
|
api_logger.info(f"Logs downloaded (start={start_time}, end={end_time})")
|
|
|
|
return send_file(
|
|
zip_file,
|
|
mimetype='application/zip',
|
|
as_attachment=True,
|
|
download_name=f'rpa_logs_{datetime.now().strftime("%Y%m%d_%H%M%S")}.zip'
|
|
)
|
|
except Exception as e:
|
|
api_logger.error(f"Error downloading logs: {e}")
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
# =============================================================================
|
|
# API Automation
|
|
# =============================================================================
|
|
|
|
@app.route('/api/automation/status')
|
|
def automation_status():
|
|
"""Statut du scheduler d'automatisation."""
|
|
try:
|
|
status = automation_scheduler.get_status()
|
|
return jsonify(status)
|
|
except Exception as e:
|
|
api_logger.error(f"Error getting automation status: {e}")
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/automation/start', methods=['POST'])
|
|
def automation_start():
|
|
"""Démarre le scheduler."""
|
|
try:
|
|
automation_scheduler.start()
|
|
return jsonify({'status': 'started'})
|
|
except Exception as e:
|
|
api_logger.error(f"Error starting automation: {e}")
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/api/automation/stop', methods=['POST'])
|
|
def automation_stop():
|
|
"""Arrête le scheduler."""
|
|
try:
|
|
automation_scheduler.stop()
|
|
return jsonify({'status': 'stopped'})
|
|
except Exception as e:
|
|
api_logger.error(f"Error stopping automation: {e}")
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
# =============================================================================
|
|
# API Metrics (Prometheus)
|
|
# =============================================================================
|
|
|
|
@app.route('/metrics')
|
|
def prometheus_metrics():
|
|
"""Endpoint Prometheus pour les métriques."""
|
|
try:
|
|
return Response(generate_latest(), mimetype=CONTENT_TYPE_LATEST)
|
|
except Exception as e:
|
|
api_logger.error(f"Error generating metrics: {e}")
|
|
return Response(f"# Error: {e}\n", mimetype='text/plain'), 500
|
|
|
|
|
|
# =============================================================================
|
|
# WebSocket Events
|
|
# =============================================================================
|
|
|
|
@socketio.on('connect')
|
|
def handle_connect():
|
|
"""Client connecté."""
|
|
print(f"Client connecté: {request.sid}")
|
|
emit('connected', {'status': 'ok', 'execution': execution_state})
|
|
|
|
|
|
@socketio.on('disconnect')
|
|
def handle_disconnect():
|
|
"""Client déconnecté."""
|
|
print(f"Client déconnecté: {request.sid}")
|
|
|
|
|
|
@socketio.on('subscribe_execution')
|
|
def handle_subscribe():
|
|
"""S'abonner aux mises à jour d'exécution."""
|
|
emit('execution_state', execution_state)
|
|
|
|
|
|
@socketio.on('get_performance')
|
|
def handle_get_performance():
|
|
"""Demande les métriques de performance."""
|
|
emit('performance_update', performance_metrics)
|
|
|
|
|
|
# =============================================================================
|
|
# Background tasks
|
|
# =============================================================================
|
|
|
|
def broadcast_metrics():
|
|
"""Diffuse les métriques périodiquement."""
|
|
while True:
|
|
time.sleep(5)
|
|
try:
|
|
socketio.emit('metrics_update', {
|
|
'execution': execution_state,
|
|
'performance': performance_metrics,
|
|
'timestamp': datetime.now().isoformat()
|
|
})
|
|
except Exception:
|
|
pass
|
|
|
|
|
|
# Démarrer le thread de broadcast
|
|
metrics_thread = threading.Thread(target=broadcast_metrics, daemon=True)
|
|
metrics_thread.start()
|
|
|
|
|
|
# =============================================================================
|
|
# Main
|
|
# =============================================================================
|
|
|
|
if __name__ == '__main__':
|
|
# Valider la sécurité en production (non-bloquant pour le dashboard)
|
|
from core.security import get_security_config
|
|
try:
|
|
from core.security import validate_production_security
|
|
config = get_security_config()
|
|
validate_production_security(config)
|
|
print("✅ Security validation passed")
|
|
except Exception as e:
|
|
print(f"⚠️ Security validation warning: {e}")
|
|
print("Dashboard will start anyway (monitoring service)")
|
|
# Ne pas arrêter le dashboard pour des problèmes de sécurité mineurs
|
|
|
|
# Initialiser le système de cleanup
|
|
try:
|
|
from core.system import initialize_system_cleanup, shutdown_system
|
|
initialize_system_cleanup()
|
|
except ImportError:
|
|
print("⚠️ System cleanup not available")
|
|
|
|
print("=" * 50)
|
|
print("RPA Vision V3 - Dashboard Web")
|
|
print("=" * 50)
|
|
print(f"Base path: {BASE_PATH}")
|
|
print(f"Data path: {DATA_PATH}")
|
|
print(f"Sessions path: {SESSIONS_PATH}")
|
|
print(f"Workflows path: {WORKFLOWS_PATH}")
|
|
print("")
|
|
print("Démarrage sur http://0.0.0.0:5001")
|
|
print("WebSocket activé")
|
|
print("=" * 50)
|
|
|
|
try:
|
|
socketio.run(
|
|
app,
|
|
host='0.0.0.0',
|
|
port=5001,
|
|
debug=False,
|
|
allow_unsafe_werkzeug=True
|
|
)
|
|
except KeyboardInterrupt:
|
|
print("\nReceived keyboard interrupt, shutting down...")
|
|
try:
|
|
shutdown_system()
|
|
except:
|
|
pass
|
|
except Exception as e:
|
|
print(f"Dashboard error: {e}")
|
|
try:
|
|
shutdown_system()
|
|
except:
|
|
pass
|
|
raise
|