v1.0 - Version stable: multi-PC, détection UI-DETR-1, 3 modes exécution
- Frontend v4 accessible sur réseau local (192.168.1.40) - Ports ouverts: 3002 (frontend), 5001 (backend), 5004 (dashboard) - Ollama GPU fonctionnel - Self-healing interactif - Dashboard confiance Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
5
core/analytics/api/__init__.py
Normal file
5
core/analytics/api/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Analytics API module."""
|
||||
|
||||
from .analytics_api import AnalyticsAPI
|
||||
|
||||
__all__ = ['AnalyticsAPI']
|
||||
387
core/analytics/api/analytics_api.py
Normal file
387
core/analytics/api/analytics_api.py
Normal file
@@ -0,0 +1,387 @@
|
||||
"""REST API for analytics."""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Optional, Any
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
try:
|
||||
from flask import Blueprint, request, jsonify, send_file
|
||||
FLASK_AVAILABLE = True
|
||||
except ImportError:
|
||||
FLASK_AVAILABLE = False
|
||||
Blueprint = None
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AnalyticsAPI:
|
||||
"""REST API for analytics."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
query_engine,
|
||||
performance_analyzer,
|
||||
anomaly_detector,
|
||||
insight_generator,
|
||||
success_rate_calculator,
|
||||
report_generator,
|
||||
dashboard_manager
|
||||
):
|
||||
"""
|
||||
Initialize analytics API.
|
||||
|
||||
Args:
|
||||
query_engine: Query engine instance
|
||||
performance_analyzer: Performance analyzer instance
|
||||
anomaly_detector: Anomaly detector instance
|
||||
insight_generator: Insight generator instance
|
||||
success_rate_calculator: Success rate calculator instance
|
||||
report_generator: Report generator instance
|
||||
dashboard_manager: Dashboard manager instance
|
||||
"""
|
||||
if not FLASK_AVAILABLE:
|
||||
logger.warning("Flask not available - API endpoints will not be registered")
|
||||
self.blueprint = None
|
||||
return
|
||||
|
||||
self.query_engine = query_engine
|
||||
self.performance_analyzer = performance_analyzer
|
||||
self.anomaly_detector = anomaly_detector
|
||||
self.insight_generator = insight_generator
|
||||
self.success_rate_calculator = success_rate_calculator
|
||||
self.report_generator = report_generator
|
||||
self.dashboard_manager = dashboard_manager
|
||||
|
||||
self.blueprint = Blueprint('analytics', __name__, url_prefix='/api/analytics')
|
||||
self._register_routes()
|
||||
|
||||
logger.info("AnalyticsAPI initialized")
|
||||
|
||||
def _register_routes(self) -> None:
|
||||
"""Register API routes."""
|
||||
if not FLASK_AVAILABLE or not self.blueprint:
|
||||
return
|
||||
|
||||
@self.blueprint.route('/metrics', methods=['GET'])
|
||||
def get_metrics():
|
||||
"""Get metrics with filters."""
|
||||
try:
|
||||
metric_type = request.args.get('type', 'execution')
|
||||
workflow_id = request.args.get('workflow_id')
|
||||
hours = int(request.args.get('hours', 24))
|
||||
|
||||
end_time = datetime.now()
|
||||
start_time = end_time - timedelta(hours=hours)
|
||||
|
||||
filters = {}
|
||||
if workflow_id:
|
||||
filters['workflow_id'] = workflow_id
|
||||
|
||||
metrics = self.query_engine.query(
|
||||
metric_type=metric_type,
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
filters=filters
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'count': len(metrics),
|
||||
'metrics': metrics
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting metrics: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/performance', methods=['GET'])
|
||||
def get_performance():
|
||||
"""Get performance analysis."""
|
||||
try:
|
||||
workflow_id = request.args.get('workflow_id')
|
||||
if not workflow_id:
|
||||
return jsonify({'success': False, 'error': 'workflow_id required'}), 400
|
||||
|
||||
hours = int(request.args.get('hours', 24))
|
||||
end_time = datetime.now()
|
||||
start_time = end_time - timedelta(hours=hours)
|
||||
|
||||
stats = self.performance_analyzer.analyze_performance(
|
||||
workflow_id=workflow_id,
|
||||
start_time=start_time,
|
||||
end_time=end_time
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'performance': stats.to_dict()
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting performance: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/performance/bottlenecks', methods=['GET'])
|
||||
def get_bottlenecks():
|
||||
"""Get performance bottlenecks."""
|
||||
try:
|
||||
workflow_id = request.args.get('workflow_id')
|
||||
if not workflow_id:
|
||||
return jsonify({'success': False, 'error': 'workflow_id required'}), 400
|
||||
|
||||
hours = int(request.args.get('hours', 24))
|
||||
end_time = datetime.now()
|
||||
start_time = end_time - timedelta(hours=hours)
|
||||
|
||||
bottlenecks = self.performance_analyzer.identify_bottlenecks(
|
||||
workflow_id=workflow_id,
|
||||
start_time=start_time,
|
||||
end_time=end_time
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'bottlenecks': [b.to_dict() for b in bottlenecks]
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting bottlenecks: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/anomalies', methods=['GET'])
|
||||
def get_anomalies():
|
||||
"""Get detected anomalies."""
|
||||
try:
|
||||
workflow_id = request.args.get('workflow_id')
|
||||
hours = int(request.args.get('hours', 24))
|
||||
|
||||
end_time = datetime.now()
|
||||
start_time = end_time - timedelta(hours=hours)
|
||||
|
||||
anomalies = self.anomaly_detector.detect_anomalies(
|
||||
workflow_id=workflow_id,
|
||||
start_time=start_time,
|
||||
end_time=end_time
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'count': len(anomalies),
|
||||
'anomalies': [a.to_dict() for a in anomalies]
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting anomalies: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/insights', methods=['GET'])
|
||||
def get_insights():
|
||||
"""Get generated insights."""
|
||||
try:
|
||||
hours = int(request.args.get('hours', 168)) # 1 week default
|
||||
|
||||
end_time = datetime.now()
|
||||
start_time = end_time - timedelta(hours=hours)
|
||||
|
||||
insights = self.insight_generator.generate_insights(
|
||||
start_time=start_time,
|
||||
end_time=end_time
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'count': len(insights),
|
||||
'insights': [i.to_dict() for i in insights]
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting insights: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/success-rate', methods=['GET'])
|
||||
def get_success_rate():
|
||||
"""Get success rate statistics."""
|
||||
try:
|
||||
workflow_id = request.args.get('workflow_id')
|
||||
if not workflow_id:
|
||||
return jsonify({'success': False, 'error': 'workflow_id required'}), 400
|
||||
|
||||
hours = int(request.args.get('hours', 24))
|
||||
|
||||
stats = self.success_rate_calculator.calculate_success_rate(
|
||||
workflow_id=workflow_id,
|
||||
time_window_hours=hours
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'stats': stats.to_dict()
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting success rate: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/reliability-ranking', methods=['GET'])
|
||||
def get_reliability_ranking():
|
||||
"""Get workflow reliability rankings."""
|
||||
try:
|
||||
hours = int(request.args.get('hours', 168)) # 1 week default
|
||||
|
||||
rankings = self.success_rate_calculator.rank_workflows_by_reliability(
|
||||
time_window_hours=hours
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'rankings': [r.to_dict() for r in rankings]
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting reliability ranking: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/reports', methods=['POST'])
|
||||
def generate_report():
|
||||
"""Generate a report."""
|
||||
try:
|
||||
data = request.json
|
||||
|
||||
from ..reporting.report_generator import ReportConfig
|
||||
config = ReportConfig(
|
||||
title=data.get('title', 'Analytics Report'),
|
||||
metric_types=data.get('metric_types', ['execution']),
|
||||
start_time=datetime.fromisoformat(data['start_time']),
|
||||
end_time=datetime.fromisoformat(data['end_time']),
|
||||
workflow_ids=data.get('workflow_ids'),
|
||||
include_charts=data.get('include_charts', True),
|
||||
include_insights=data.get('include_insights', True),
|
||||
format=data.get('format', 'json')
|
||||
)
|
||||
|
||||
report_data = self.report_generator.generate_report(config)
|
||||
|
||||
# Export based on format
|
||||
if config.format == 'json':
|
||||
filepath = self.report_generator.export_json(report_data)
|
||||
elif config.format == 'csv':
|
||||
filepath = self.report_generator.export_csv(report_data)
|
||||
elif config.format == 'html':
|
||||
filepath = self.report_generator.export_html(report_data)
|
||||
elif config.format == 'pdf':
|
||||
filepath = self.report_generator.export_pdf(report_data)
|
||||
else:
|
||||
filepath = self.report_generator.export_json(report_data)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'filepath': filepath
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating report: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/reports/<path:filename>', methods=['GET'])
|
||||
def download_report(filename):
|
||||
"""Download a generated report."""
|
||||
try:
|
||||
filepath = self.report_generator.output_dir / filename
|
||||
if not filepath.exists():
|
||||
return jsonify({'success': False, 'error': 'Report not found'}), 404
|
||||
|
||||
return send_file(str(filepath), as_attachment=True)
|
||||
except Exception as e:
|
||||
logger.error(f"Error downloading report: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/dashboards', methods=['GET'])
|
||||
def list_dashboards():
|
||||
"""List dashboards."""
|
||||
try:
|
||||
owner = request.args.get('owner')
|
||||
dashboards = self.dashboard_manager.list_dashboards(owner=owner)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'dashboards': [d.to_dict() for d in dashboards]
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing dashboards: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/dashboards', methods=['POST'])
|
||||
def create_dashboard():
|
||||
"""Create a dashboard."""
|
||||
try:
|
||||
data = request.json
|
||||
dashboard = self.dashboard_manager.create_dashboard(
|
||||
name=data['name'],
|
||||
description=data.get('description', ''),
|
||||
owner=data['owner'],
|
||||
template_id=data.get('template_id')
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'dashboard': dashboard.to_dict()
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating dashboard: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/dashboards/<dashboard_id>', methods=['GET'])
|
||||
def get_dashboard(dashboard_id):
|
||||
"""Get dashboard by ID."""
|
||||
try:
|
||||
dashboard = self.dashboard_manager.get_dashboard(dashboard_id)
|
||||
if not dashboard:
|
||||
return jsonify({'success': False, 'error': 'Dashboard not found'}), 404
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'dashboard': dashboard.to_dict()
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting dashboard: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/dashboards/<dashboard_id>', methods=['PUT'])
|
||||
def update_dashboard(dashboard_id):
|
||||
"""Update dashboard."""
|
||||
try:
|
||||
data = request.json
|
||||
dashboard = self.dashboard_manager.update_dashboard(dashboard_id, data)
|
||||
if not dashboard:
|
||||
return jsonify({'success': False, 'error': 'Dashboard not found'}), 404
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'dashboard': dashboard.to_dict()
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating dashboard: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/dashboards/<dashboard_id>', methods=['DELETE'])
|
||||
def delete_dashboard(dashboard_id):
|
||||
"""Delete dashboard."""
|
||||
try:
|
||||
success = self.dashboard_manager.delete_dashboard(dashboard_id)
|
||||
if not success:
|
||||
return jsonify({'success': False, 'error': 'Dashboard not found'}), 404
|
||||
|
||||
return jsonify({'success': True})
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting dashboard: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@self.blueprint.route('/dashboard-templates', methods=['GET'])
|
||||
def get_dashboard_templates():
|
||||
"""Get dashboard templates."""
|
||||
try:
|
||||
templates = self.dashboard_manager.get_templates()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'templates': [t.to_dict() for t in templates]
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting templates: {e}")
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
def get_blueprint(self) -> Blueprint:
|
||||
"""Get Flask blueprint."""
|
||||
return self.blueprint
|
||||
Reference in New Issue
Block a user