feat(dashboard): page Base de connaissances — métriques FAISS, sessions, patterns
Some checks failed
security-audit / Bandit (scan statique) (push) Successful in 12s
security-audit / pip-audit (CVE dépendances) (push) Successful in 11s
security-audit / Scan secrets (grep) (push) Successful in 8s
tests / Lint (ruff + black) (push) Successful in 13s
tests / Tests unitaires (sans GPU) (push) Failing after 14s
tests / Tests sécurité (critique) (push) Has been skipped
Some checks failed
security-audit / Bandit (scan statique) (push) Successful in 12s
security-audit / pip-audit (CVE dépendances) (push) Successful in 11s
security-audit / Scan secrets (grep) (push) Successful in 8s
tests / Lint (ruff + black) (push) Successful in 13s
tests / Tests unitaires (sans GPU) (push) Failing after 14s
tests / Tests sécurité (critique) (push) Has been skipped
Nouvelle page /knowledge-base avec : - Mémoire visuelle : 331 vecteurs FAISS / 13666 embeddings (alerte consolidation) - Sessions observées : 56 sessions, 6.66 Go, 3 machines - Réflexes natifs : 16 patterns UI en 6 catégories - Workflows appris : 29 Onglet 📚 Connaissances ajouté dans toute la navigation. Tout en français, dark theme, zéro jargon. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -2445,6 +2445,170 @@ def proxy_audit(endpoint):
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Base de connaissances — État mémoire et apprentissages
|
||||
# =============================================================================
|
||||
|
||||
@app.route('/knowledge-base')
|
||||
def knowledge_base_page():
|
||||
"""Page Base de connaissances."""
|
||||
return render_template('knowledge_base.html')
|
||||
|
||||
|
||||
@app.route('/api/knowledge-base/stats')
|
||||
def knowledge_base_stats():
|
||||
"""Retourne toutes les métriques de la base de connaissances en JSON."""
|
||||
import glob as glob_module
|
||||
|
||||
result = {
|
||||
'faiss': _kb_faiss_stats(),
|
||||
'sessions': _kb_sessions_stats(),
|
||||
'patterns': _kb_patterns_stats(),
|
||||
'workflows': _kb_workflows_stats(),
|
||||
}
|
||||
return jsonify(result)
|
||||
|
||||
|
||||
def _kb_faiss_stats() -> dict:
|
||||
"""Statistiques de l'index FAISS."""
|
||||
faiss_index_path = DATA_PATH / "faiss_index" / "main.index"
|
||||
embeddings_dir = LIVE_SESSIONS_PATH / "embeddings"
|
||||
|
||||
vectors_indexed = 0
|
||||
index_size_mb = "0 Mo"
|
||||
available = False
|
||||
|
||||
if faiss_index_path.exists():
|
||||
# Taille fichier
|
||||
size_bytes = faiss_index_path.stat().st_size
|
||||
index_size_mb = f"{size_bytes / (1024 * 1024):.1f} Mo"
|
||||
|
||||
# Nombre de vecteurs via faiss
|
||||
try:
|
||||
import faiss
|
||||
index = faiss.read_index(str(faiss_index_path))
|
||||
vectors_indexed = index.ntotal
|
||||
available = True
|
||||
except ImportError:
|
||||
# FAISS non installé — lire le metadata si dispo
|
||||
metadata_path = DATA_PATH / "faiss_index" / "main.metadata"
|
||||
if metadata_path.exists():
|
||||
try:
|
||||
meta = json.loads(metadata_path.read_text())
|
||||
vectors_indexed = meta.get('ntotal', 0)
|
||||
available = True
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
available = False
|
||||
|
||||
# Compter les embeddings (.npy)
|
||||
embeddings_computed = 0
|
||||
if embeddings_dir.exists():
|
||||
embeddings_computed = len(list(embeddings_dir.glob("*.npy")))
|
||||
|
||||
return {
|
||||
'vectors_indexed': vectors_indexed,
|
||||
'embeddings_computed': embeddings_computed,
|
||||
'index_size_mb': index_size_mb,
|
||||
'available': available,
|
||||
}
|
||||
|
||||
|
||||
def _kb_sessions_stats() -> dict:
|
||||
"""Statistiques des sessions shadow."""
|
||||
machines = []
|
||||
total_sessions = 0
|
||||
total_bytes = 0
|
||||
|
||||
if LIVE_SESSIONS_PATH.exists():
|
||||
for d in sorted(LIVE_SESSIONS_PATH.iterdir()):
|
||||
if not d.is_dir():
|
||||
continue
|
||||
# Ignorer le dossier embeddings
|
||||
if d.name == 'embeddings':
|
||||
continue
|
||||
|
||||
# Dossiers machines (contiennent des sess_*)
|
||||
if d.name.startswith('sess_'):
|
||||
# Session orpheline à la racine
|
||||
total_sessions += 1
|
||||
total_bytes += _dir_size(d)
|
||||
else:
|
||||
# Dossier machine
|
||||
sess_dirs = [s for s in d.iterdir() if s.is_dir() and s.name.startswith('sess_')]
|
||||
count = len(sess_dirs)
|
||||
total_sessions += count
|
||||
|
||||
# Dernière activité
|
||||
last_activity = None
|
||||
if sess_dirs:
|
||||
latest = max(sess_dirs, key=lambda s: s.stat().st_mtime)
|
||||
last_activity = datetime.fromtimestamp(latest.stat().st_mtime).strftime('%Y-%m-%d %H:%M')
|
||||
|
||||
machine_bytes = _dir_size(d)
|
||||
total_bytes += machine_bytes
|
||||
|
||||
if count > 0:
|
||||
machines.append({
|
||||
'machine_id': d.name,
|
||||
'session_count': count,
|
||||
'last_activity': last_activity,
|
||||
})
|
||||
|
||||
# Volume total
|
||||
if total_bytes >= 1024 * 1024 * 1024:
|
||||
total_volume = f"{total_bytes / (1024**3):.2f} Go"
|
||||
elif total_bytes >= 1024 * 1024:
|
||||
total_volume = f"{total_bytes / (1024**2):.1f} Mo"
|
||||
else:
|
||||
total_volume = f"{total_bytes / 1024:.0f} Ko"
|
||||
|
||||
return {
|
||||
'total_sessions': total_sessions,
|
||||
'total_volume': total_volume,
|
||||
'machines': machines,
|
||||
}
|
||||
|
||||
|
||||
def _kb_patterns_stats() -> dict:
|
||||
"""Statistiques des patterns UI natifs."""
|
||||
try:
|
||||
from core.knowledge.ui_patterns import UIPatternLibrary
|
||||
lib = UIPatternLibrary()
|
||||
stats = lib.stats
|
||||
return {
|
||||
'total': stats.get('total', 0),
|
||||
'by_category': stats.get('by_category', {}),
|
||||
}
|
||||
except Exception:
|
||||
return {'total': 0, 'by_category': {}}
|
||||
|
||||
|
||||
def _kb_workflows_stats() -> dict:
|
||||
"""Statistiques des workflows appris."""
|
||||
total = 0
|
||||
workflows_path = DATA_PATH / "workflows"
|
||||
if workflows_path.exists():
|
||||
# Compter récursivement les .json
|
||||
total = len(list(workflows_path.rglob("*.json")))
|
||||
return {'total': total}
|
||||
|
||||
|
||||
def _dir_size(path: Path) -> int:
|
||||
"""Calcule la taille totale d'un dossier (non récursif profond pour la perf)."""
|
||||
total = 0
|
||||
try:
|
||||
for f in path.rglob('*'):
|
||||
if f.is_file():
|
||||
total += f.stat().st_size
|
||||
except (PermissionError, OSError):
|
||||
pass
|
||||
return total
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Main
|
||||
# =============================================================================
|
||||
|
||||
Reference in New Issue
Block a user