v1.0 - Version stable: multi-PC, détection UI-DETR-1, 3 modes exécution

- Frontend v4 accessible sur réseau local (192.168.1.40)
- Ports ouverts: 3002 (frontend), 5001 (backend), 5004 (dashboard)
- Ollama GPU fonctionnel
- Self-healing interactif
- Dashboard confiance

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Dom
2026-01-29 11:23:51 +01:00
parent 21bfa3b337
commit a27b74cf22
1595 changed files with 412691 additions and 400 deletions

View File

@@ -0,0 +1,13 @@
"""Analytics reporting module."""
from .report_generator import (
ReportGenerator,
ReportConfig,
ScheduledReport
)
__all__ = [
'ReportGenerator',
'ReportConfig',
'ScheduledReport'
]

View File

@@ -0,0 +1,443 @@
"""Report generation for analytics data."""
import logging
import json
import csv
from typing import Dict, List, Optional, Any
from datetime import datetime
from pathlib import Path
from dataclasses import dataclass
from io import StringIO
logger = logging.getLogger(__name__)
@dataclass
class ReportConfig:
"""Report configuration."""
title: str
metric_types: List[str]
start_time: datetime
end_time: datetime
workflow_ids: Optional[List[str]] = None
include_charts: bool = True
include_insights: bool = True
format: str = 'json' # json, csv, html, pdf
def to_dict(self) -> Dict:
"""Convert to dictionary."""
return {
'title': self.title,
'metric_types': self.metric_types,
'start_time': self.start_time.isoformat(),
'end_time': self.end_time.isoformat(),
'workflow_ids': self.workflow_ids,
'include_charts': self.include_charts,
'include_insights': self.include_insights,
'format': self.format
}
@dataclass
class ScheduledReport:
"""Scheduled report configuration."""
report_id: str
config: ReportConfig
schedule_cron: str # Cron expression
delivery_method: str # email, webhook, file
delivery_config: Dict[str, Any]
enabled: bool = True
last_run: Optional[datetime] = None
next_run: Optional[datetime] = None
def to_dict(self) -> Dict:
"""Convert to dictionary."""
return {
'report_id': self.report_id,
'config': self.config.to_dict(),
'schedule_cron': self.schedule_cron,
'delivery_method': self.delivery_method,
'delivery_config': self.delivery_config,
'enabled': self.enabled,
'last_run': self.last_run.isoformat() if self.last_run else None,
'next_run': self.next_run.isoformat() if self.next_run else None
}
class ReportGenerator:
"""Generate analytics reports in various formats."""
def __init__(
self,
query_engine, # QueryEngine
performance_analyzer, # PerformanceAnalyzer
insight_generator, # InsightGenerator
output_dir: str = "data/analytics/reports"
):
"""
Initialize report generator.
Args:
query_engine: Query engine instance
performance_analyzer: Performance analyzer instance
insight_generator: Insight generator instance
output_dir: Output directory for reports
"""
self.query_engine = query_engine
self.performance_analyzer = performance_analyzer
self.insight_generator = insight_generator
self.output_dir = Path(output_dir)
self.output_dir.mkdir(parents=True, exist_ok=True)
self.scheduled_reports: Dict[str, ScheduledReport] = {}
logger.info("ReportGenerator initialized")
def generate_report(
self,
config: ReportConfig
) -> Dict[str, Any]:
"""
Generate a report based on configuration.
Args:
config: Report configuration
Returns:
Report data dictionary
"""
logger.info(f"Generating report: {config.title}")
# Collect data
report_data = {
'title': config.title,
'generated_at': datetime.now().isoformat(),
'time_range': {
'start': config.start_time.isoformat(),
'end': config.end_time.isoformat()
},
'metrics': {},
'performance': {},
'insights': []
}
# Query metrics
for metric_type in config.metric_types:
filters = {}
if config.workflow_ids:
filters['workflow_id'] = config.workflow_ids[0] # Simplified
metrics = self.query_engine.query(
metric_type=metric_type,
start_time=config.start_time,
end_time=config.end_time,
filters=filters
)
report_data['metrics'][metric_type] = metrics
# Add performance analysis
if config.workflow_ids:
for workflow_id in config.workflow_ids:
perf_stats = self.performance_analyzer.analyze_performance(
workflow_id=workflow_id,
start_time=config.start_time,
end_time=config.end_time
)
report_data['performance'][workflow_id] = perf_stats.to_dict()
# Add insights
if config.include_insights:
insights = self.insight_generator.generate_insights(
start_time=config.start_time,
end_time=config.end_time
)
report_data['insights'] = [i.to_dict() for i in insights]
return report_data
def export_json(
self,
report_data: Dict[str, Any],
filename: Optional[str] = None
) -> str:
"""
Export report as JSON.
Args:
report_data: Report data
filename: Output filename (auto-generated if None)
Returns:
Path to exported file
"""
if filename is None:
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
filename = f"report_{timestamp}.json"
filepath = self.output_dir / filename
with open(filepath, 'w', encoding='utf-8') as f:
json.dump(report_data, f, indent=2)
logger.info(f"Exported JSON report: {filepath}")
return str(filepath)
def export_csv(
self,
report_data: Dict[str, Any],
filename: Optional[str] = None
) -> str:
"""
Export report as CSV.
Args:
report_data: Report data
filename: Output filename (auto-generated if None)
Returns:
Path to exported file
"""
if filename is None:
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
filename = f"report_{timestamp}.csv"
filepath = self.output_dir / filename
# Flatten metrics for CSV export
rows = []
for metric_type, metrics in report_data.get('metrics', {}).items():
for metric in metrics:
row = {
'metric_type': metric_type,
**metric
}
rows.append(row)
if rows:
# Get all unique keys
fieldnames = set()
for row in rows:
fieldnames.update(row.keys())
fieldnames = sorted(fieldnames)
with open(filepath, 'w', newline='', encoding='utf-8') as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(rows)
logger.info(f"Exported CSV report: {filepath}")
return str(filepath)
def export_html(
self,
report_data: Dict[str, Any],
filename: Optional[str] = None
) -> str:
"""
Export report as HTML.
Args:
report_data: Report data
filename: Output filename (auto-generated if None)
Returns:
Path to exported file
"""
if filename is None:
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
filename = f"report_{timestamp}.html"
filepath = self.output_dir / filename
# Generate HTML
html = self._generate_html(report_data)
with open(filepath, 'w', encoding='utf-8') as f:
f.write(html)
logger.info(f"Exported HTML report: {filepath}")
return str(filepath)
def _generate_html(self, report_data: Dict[str, Any]) -> str:
"""Generate HTML report."""
html = f"""<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>{report_data['title']}</title>
<style>
body {{ font-family: Arial, sans-serif; margin: 20px; }}
h1 {{ color: #333; }}
h2 {{ color: #666; margin-top: 30px; }}
table {{ border-collapse: collapse; width: 100%; margin: 20px 0; }}
th, td {{ border: 1px solid #ddd; padding: 8px; text-align: left; }}
th {{ background-color: #4CAF50; color: white; }}
.insight {{ background-color: #f9f9f9; padding: 15px; margin: 10px 0; border-left: 4px solid #4CAF50; }}
.metric-section {{ margin: 20px 0; }}
</style>
</head>
<body>
<h1>{report_data['title']}</h1>
<p><strong>Generated:</strong> {report_data['generated_at']}</p>
<p><strong>Time Range:</strong> {report_data['time_range']['start']} to {report_data['time_range']['end']}</p>
"""
# Add performance section
if report_data.get('performance'):
html += "<h2>Performance Analysis</h2>\n"
for workflow_id, perf in report_data['performance'].items():
html += f"<div class='metric-section'>\n"
html += f"<h3>Workflow: {workflow_id}</h3>\n"
html += f"<p>Average Duration: {perf.get('avg_duration', 0):.2f}s</p>\n"
html += f"<p>Success Rate: {perf.get('success_rate', 0):.1f}%</p>\n"
html += "</div>\n"
# Add insights section
if report_data.get('insights'):
html += "<h2>Insights</h2>\n"
for insight in report_data['insights']:
html += f"<div class='insight'>\n"
html += f"<strong>{insight.get('title', 'Insight')}</strong>\n"
html += f"<p>{insight.get('description', '')}</p>\n"
html += "</div>\n"
html += "</body>\n</html>"
return html
def export_pdf(
self,
report_data: Dict[str, Any],
filename: Optional[str] = None
) -> str:
"""
Export report as PDF.
Note: Requires reportlab library. Falls back to HTML if not available.
Args:
report_data: Report data
filename: Output filename (auto-generated if None)
Returns:
Path to exported file
"""
try:
from reportlab.lib.pagesizes import letter
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import inch
if filename is None:
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
filename = f"report_{timestamp}.pdf"
filepath = self.output_dir / filename
# Create PDF
doc = SimpleDocTemplate(str(filepath), pagesize=letter)
styles = getSampleStyleSheet()
story = []
# Title
title = Paragraph(report_data['title'], styles['Title'])
story.append(title)
story.append(Spacer(1, 0.2*inch))
# Metadata
meta = Paragraph(f"Generated: {report_data['generated_at']}", styles['Normal'])
story.append(meta)
story.append(Spacer(1, 0.3*inch))
# Performance section
if report_data.get('performance'):
heading = Paragraph("Performance Analysis", styles['Heading2'])
story.append(heading)
story.append(Spacer(1, 0.1*inch))
for workflow_id, perf in report_data['performance'].items():
text = f"<b>Workflow:</b> {workflow_id}<br/>"
text += f"Average Duration: {perf.get('avg_duration', 0):.2f}s<br/>"
text += f"Success Rate: {perf.get('success_rate', 0):.1f}%"
para = Paragraph(text, styles['Normal'])
story.append(para)
story.append(Spacer(1, 0.2*inch))
# Build PDF
doc.build(story)
logger.info(f"Exported PDF report: {filepath}")
return str(filepath)
except ImportError:
logger.warning("reportlab not available, falling back to HTML")
return self.export_html(report_data, filename.replace('.pdf', '.html') if filename else None)
def schedule_report(
self,
report: ScheduledReport
) -> None:
"""
Schedule a report for automatic generation.
Args:
report: Scheduled report configuration
"""
self.scheduled_reports[report.report_id] = report
logger.info(f"Scheduled report: {report.report_id}")
def get_scheduled_reports(self) -> List[ScheduledReport]:
"""Get all scheduled reports."""
return list(self.scheduled_reports.values())
def run_scheduled_report(self, report_id: str) -> Optional[str]:
"""
Run a scheduled report.
Args:
report_id: Report identifier
Returns:
Path to generated report or None
"""
report = self.scheduled_reports.get(report_id)
if not report or not report.enabled:
return None
# Generate report
report_data = self.generate_report(report.config)
# Export based on format
if report.config.format == 'json':
filepath = self.export_json(report_data)
elif report.config.format == 'csv':
filepath = self.export_csv(report_data)
elif report.config.format == 'html':
filepath = self.export_html(report_data)
elif report.config.format == 'pdf':
filepath = self.export_pdf(report_data)
else:
filepath = self.export_json(report_data)
# Update last run
report.last_run = datetime.now()
# Deliver report
self._deliver_report(report, filepath)
return filepath
def _deliver_report(
self,
report: ScheduledReport,
filepath: str
) -> None:
"""Deliver report via configured method."""
if report.delivery_method == 'file':
# Already saved to file
logger.info(f"Report saved to: {filepath}")
elif report.delivery_method == 'email':
# TODO: Implement email delivery
logger.info(f"Email delivery not yet implemented: {filepath}")
elif report.delivery_method == 'webhook':
# TODO: Implement webhook delivery
logger.info(f"Webhook delivery not yet implemented: {filepath}")