Initial commit

This commit is contained in:
Dom
2026-03-05 00:20:25 +01:00
commit dcd4de9945
1954 changed files with 669380 additions and 0 deletions

View File

@@ -0,0 +1,85 @@
#!/usr/bin/env python3
"""
Diagnostic complet du système
"""
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent / "geniusia2"))
from core.config import get_config
from core.logger import Logger
from core.embeddings_manager import EmbeddingsManager
from core.learning_manager import LearningManager
def main():
print("\n" + "="*60)
print(" 🔍 DIAGNOSTIC COMPLET")
print("="*60 + "\n")
config = get_config()
logger = Logger()
# 1. Vérifier les tâches
print("📊 Tâches apprises:")
embeddings_manager = EmbeddingsManager(logger=logger)
learning_manager = LearningManager(
embeddings_manager,
logger,
config,
profiles_path="geniusia2/data/user_profiles"
)
tasks = learning_manager.get_all_tasks()
print(f" Total: {len(tasks)} tâches\n")
if tasks:
print(" Dernières tâches:")
for task in tasks[-5:]:
print(f" - {task['task_name']} (confiance: {task['confidence_score']:.2f})")
# 2. Vérifier l'index FAISS
print(f"\n🧠 Index FAISS:")
index_size = embeddings_manager.get_index_size()
print(f" Embeddings: {index_size}")
# 3. Vérifier la config
print(f"\n⚙️ Configuration:")
print(f" enforce_whitelist: {config['security']['enforce_whitelist']}")
print(f" similarity_threshold: {config.get('assist', {}).get('similarity_threshold', 0.75)}")
print(f" suggestion_timeout: {config.get('assist', {}).get('suggestion_timeout', 10.0)}")
# 4. Diagnostic
print(f"\n" + "="*60)
print(" 💡 DIAGNOSTIC")
print("="*60 + "\n")
if len(tasks) == 0:
print("❌ Aucune tâche apprise")
print(" Solution: Fais 3x la même action")
elif index_size == 0:
print("❌ Index FAISS vide")
print(" Solution: Reconstruis l'index")
print(" $ geniusia2/venv/bin/python rebuild_faiss_simple.py")
else:
print("✅ Tâches et index OK")
print()
print("Le Mode Assisté devrait fonctionner !")
print()
print("Pour tester:")
print(" 1. Lance l'app: cd geniusia2 && ./run.sh")
print(" 2. Clique sur 'Start'")
print(" 3. Fais 3x la MÊME action (ex: 3 clics au même endroit)")
print(" 4. Attends 'Tâche apprise !'")
print(" 5. Refais 1x la MÊME action")
print(" 6. L'overlay devrait apparaître")
print()
print("⚠️ IMPORTANT:")
print(" - Il faut faire la MÊME action 3 fois")
print(" - Pas un workflow complexe")
print(" - Juste: clic, clic, clic au même endroit")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,128 @@
#!/usr/bin/env python3
"""
Script de diagnostic pour vérifier l'état des données.
"""
import sys
import pickle
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent / "geniusia2"))
def check_faiss_index():
"""Vérifie l'index FAISS."""
print("="*60)
print("📊 VÉRIFICATION INDEX FAISS")
print("="*60)
faiss_dir = Path("geniusia2/data/faiss_index")
index_file = faiss_dir / "embeddings.index"
metadata_file = faiss_dir / "metadata.pkl"
print(f"\n📁 Répertoire: {faiss_dir}")
print(f" Index FAISS: {'✅ Existe' if index_file.exists() else '❌ Manquant'}")
print(f" Métadonnées: {'✅ Existe' if metadata_file.exists() else '❌ Manquant'}")
if index_file.exists():
size = index_file.stat().st_size
print(f" Taille index: {size:,} bytes")
if metadata_file.exists():
with open(metadata_file, 'rb') as f:
metadata = pickle.load(f)
print(f" Nombre d'embeddings: {len(metadata)}")
def check_tasks():
"""Vérifie les tâches sauvegardées."""
print("\n" + "="*60)
print("📋 VÉRIFICATION TÂCHES")
print("="*60)
profiles_dir = Path("geniusia2/data/user_profiles")
task_dirs = [d for d in profiles_dir.iterdir() if d.is_dir() and d.name.startswith("task_")]
print(f"\n📁 Nombre de tâches: {len(task_dirs)}")
# Analyser quelques tâches
print("\n📝 Échantillon de tâches:")
for i, task_dir in enumerate(task_dirs[:5]):
print(f"\n{i+1}. {task_dir.name}")
# Vérifier les fichiers
metadata_file = task_dir / "metadata.json"
signatures_file = task_dir / "signatures.pkl"
screenshots_dir = task_dir / "screenshots"
print(f" metadata.json: {'' if metadata_file.exists() else ''}")
print(f" signatures.pkl: {'' if signatures_file.exists() else ''}")
print(f" screenshots/: {'' if screenshots_dir.exists() else ''}")
# Lire metadata
if metadata_file.exists():
import json
with open(metadata_file, 'r') as f:
metadata = json.load(f)
print(f" Nom: {metadata.get('task_name', 'N/A')}")
print(f" Observations: {metadata.get('observation_count', 0)}")
# Lire signatures
if signatures_file.exists():
with open(signatures_file, 'rb') as f:
signatures = pickle.load(f)
print(f" Signatures: {len(signatures)} actions")
# Vérifier si les signatures ont des embeddings
has_embeddings = any('embedding' in sig for sig in signatures)
has_screenshots = any('screenshot' in sig for sig in signatures)
print(f" Embeddings: {'' if has_embeddings else ''}")
print(f" Screenshots: {'' if has_screenshots else ''}")
def check_logs():
"""Vérifie les logs."""
print("\n" + "="*60)
print("📝 VÉRIFICATION LOGS")
print("="*60)
logs_dir = Path("geniusia2/data/logs")
log_files = list(logs_dir.glob("*.enc"))
print(f"\n📁 Nombre de fichiers de logs: {len(log_files)}")
if log_files:
total_size = sum(f.stat().st_size for f in log_files)
print(f" Taille totale: {total_size:,} bytes ({total_size/1024/1024:.2f} MB)")
print("\n📅 Logs par date:")
for log_file in sorted(log_files)[-5:]:
size = log_file.stat().st_size
print(f" {log_file.name}: {size:,} bytes")
def main():
"""Fonction principale."""
print("\n🔍 DIAGNOSTIC DES DONNÉES RPA VISION V2\n")
check_faiss_index()
check_tasks()
check_logs()
print("\n" + "="*60)
print("✅ DIAGNOSTIC TERMINÉ")
print("="*60)
print("\n💡 RÉSUMÉ:")
print(" - Les tâches sont créées et sauvegardées")
print(" - Les signatures.pkl contiennent les actions")
print(" - ⚠️ Vérifier si les embeddings et screenshots sont dans les signatures")
print(" - ⚠️ Vérifier si l'index FAISS est créé")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,86 @@
#!/usr/bin/env python3
"""
Diagnostic de la liste blanche
"""
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent / "geniusia2"))
from core.config import get_config
from core.logger import Logger
from core.whitelist_manager import WhitelistManager
def main():
print("\n" + "="*60)
print(" 🔍 DIAGNOSTIC LISTE BLANCHE")
print("="*60 + "\n")
# Charger la config
config = get_config()
logger = Logger()
print("📋 Configuration:")
print(f" enforce_whitelist: {config['security'].get('enforce_whitelist')}")
print(f" ask_before_new_window: {config['security'].get('ask_before_new_window')}")
print()
# Créer le whitelist manager
whitelist_manager = WhitelistManager(logger=logger)
print("🛡️ Liste Blanche:")
whitelist = whitelist_manager.get_whitelist()
if whitelist:
print(f" {len(whitelist)} fenêtre(s) autorisée(s):")
for window in whitelist:
print(f" - {window}")
else:
print(" ❌ Liste vide (aucune fenêtre autorisée)")
print()
# Tester quelques fenêtres
test_windows = [
"Kiro",
"Firefox",
"Chrome",
"Terminal",
"Code",
"Unknown Window"
]
print("🧪 Test de fenêtres:")
for window in test_windows:
allowed = whitelist_manager.is_window_allowed(window)
status = "✅ Autorisée" if allowed else "❌ Bloquée"
print(f" {window:20} : {status}")
print()
print("="*60)
print(" 💡 SOLUTION")
print("="*60 + "\n")
if not whitelist:
print("La liste blanche est vide !")
print()
print("Options :")
print()
print("1⃣ Mode Permissif (Recommandé pour les tests)")
print(" - Dans l'app, le bouton 'Mode: Tout Autoriser' devrait être activé")
print(" - Vérifie que enforce_whitelist = False dans la config")
print()
print("2⃣ Ajouter des fenêtres à la liste blanche")
print(" - Dans l'app, clique sur 'Gérer la Liste Blanche'")
print(" - Ajoute 'Kiro', 'Firefox', etc.")
print()
else:
print("La liste blanche contient des fenêtres.")
print("Si tu veux observer d'autres fenêtres :")
print(" - Ajoute-les via le bouton 'Gérer la Liste Blanche'")
print(" - Ou active le 'Mode: Tout Autoriser'")
print()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,278 @@
#!/usr/bin/env python3
"""
Exemple de workflow complet : Capture → Apprentissage → Rejeu
Ce script démontre le cycle complet du système RPA Vision V2 :
1. Capture d'événements utilisateur
2. Détection de patterns répétitifs
3. Apprentissage de tâches
4. Rejeu intelligent avec reconnaissance visuelle
"""
import asyncio
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent / "geniusia2"))
from core.event_capture import EventCapture
from core.learning_manager import LearningManager
from core.embeddings_manager import EmbeddingsManager
from core.task_replay import TaskReplayEngine
from core.utils.vision_utils import VisionUtils
from core.utils.input_utils import InputUtils
from core.logger import Logger
from core.config import get_config
class RPAWorkflowDemo:
"""Démonstration du workflow complet RPA Vision V2."""
def __init__(self):
"""Initialise tous les composants."""
self.config = get_config()
self.logger = Logger()
# Composants de base
self.embeddings_manager = EmbeddingsManager(self.logger, self.config)
self.vision_utils = VisionUtils(self.logger, self.config)
self.input_utils = InputUtils(self.logger, self.config)
# Gestionnaire d'apprentissage
self.learning_manager = LearningManager(
self.embeddings_manager,
self.logger,
self.config,
profiles_path="geniusia2/data/user_profiles"
)
# Capture d'événements
self.event_capture = EventCapture(
self.learning_manager,
self.embeddings_manager,
self.vision_utils,
self.logger,
self.config
)
# Moteur de rejeu
self.replay_engine = TaskReplayEngine(
self.learning_manager,
self.embeddings_manager,
self.vision_utils,
self.input_utils,
self.logger,
self.config
)
async def demo_capture_phase(self, duration: int = 30):
"""
Phase 1 : Capture d'événements.
Args:
duration: Durée de capture en secondes
"""
print("\n" + "="*60)
print("📹 PHASE 1 : CAPTURE D'ÉVÉNEMENTS")
print("="*60)
print(f"\nCapture pendant {duration} secondes...")
print("Effectuez des actions répétitives pour que le système apprenne.\n")
# Démarrer la capture
self.event_capture.start()
# Attendre
for i in range(duration, 0, -1):
print(f"⏱️ {i} secondes restantes...", end="\r")
await asyncio.sleep(1)
# Arrêter la capture
self.event_capture.stop()
print("\n✅ Capture terminée!")
# Afficher les statistiques
stats = self.event_capture.get_stats()
print(f"\n📊 Statistiques de capture:")
print(f" - Événements capturés: {stats['total_events']}")
print(f" - Patterns détectés: {stats['patterns_detected']}")
print(f" - Tâches créées: {stats['tasks_created']}")
async def demo_learning_phase(self):
"""Phase 2 : Apprentissage et analyse."""
print("\n" + "="*60)
print("🧠 PHASE 2 : APPRENTISSAGE")
print("="*60)
# Obtenir les tâches apprises
tasks = self.learning_manager.get_all_tasks()
if not tasks:
print("\n⚠️ Aucune tâche apprise.")
print(" Effectuez plus d'actions répétitives pendant la capture.")
return None
print(f"\n{len(tasks)} tâche(s) apprise(s):\n")
for i, task in enumerate(tasks, 1):
print(f"{i}. {task['task_name']}")
print(f" Mode: {task['mode']}")
print(f" Observations: {task['observation_count']}")
print(f" Concordance: {task['concordance_rate']:.2%}")
print(f" Confiance: {task['confidence_score']:.2%}")
print()
return tasks
async def demo_replay_phase(self, task_id: str):
"""
Phase 3 : Rejeu de tâche.
Args:
task_id: ID de la tâche à rejouer
"""
print("\n" + "="*60)
print("🎮 PHASE 3 : REJEU DE TÂCHE")
print("="*60)
print(f"\nTâche: {task_id}\n")
# Callback pour monitoring
def on_step(step_result):
status_icons = {
"success": "",
"failed": "",
"not_found": "🔍",
"pending": ""
}
icon = status_icons.get(step_result["status"], "")
print(f"{icon} Étape {step_result['step']}: {step_result['description']}")
if step_result["status"] == "success" and "location" in step_result:
loc = step_result["location"]
print(f" Position: ({loc['x']}, {loc['y']}) - Confiance: {loc.get('confidence', 0):.2%}")
print("⏳ Démarrage du rejeu dans 3 secondes...")
print(" (Préparez l'interface si nécessaire)\n")
await asyncio.sleep(3)
# Rejouer avec monitoring
results = await self.replay_engine.replay_task_with_monitoring(
task_id,
on_step_completed=on_step
)
# Afficher les résultats
print(f"\n📊 Résultats du rejeu:")
print(f" Succès: {'' if results['success'] else ''}")
success_count = sum(1 for s in results['steps'] if s['status'] == 'success')
total_count = len(results['steps'])
print(f" Actions réussies: {success_count}/{total_count}")
return results
async def run_complete_workflow(self, capture_duration: int = 30):
"""
Exécute le workflow complet.
Args:
capture_duration: Durée de la phase de capture
"""
print("\n" + "="*60)
print("🚀 WORKFLOW COMPLET RPA VISION V2")
print("="*60)
print("\nCe workflow démontre :")
print("1. 📹 Capture d'événements utilisateur")
print("2. 🧠 Apprentissage automatique de tâches")
print("3. 🎮 Rejeu intelligent avec reconnaissance visuelle")
try:
# Phase 1 : Capture
await self.demo_capture_phase(capture_duration)
# Phase 2 : Apprentissage
tasks = await self.demo_learning_phase()
if not tasks:
return
# Phase 3 : Rejeu
print("\n" + "="*60)
print("Voulez-vous rejouer une tâche ?")
for i, task in enumerate(tasks, 1):
print(f"{i}. {task['task_name']}")
choice = input("\nNuméro de la tâche (ou 'n' pour passer): ").strip()
if choice.lower() != 'n' and choice.isdigit():
task_index = int(choice) - 1
if 0 <= task_index < len(tasks):
task_id = tasks[task_index]['task_id']
await self.demo_replay_phase(task_id)
print("\n" + "="*60)
print("✅ WORKFLOW TERMINÉ")
print("="*60)
except KeyboardInterrupt:
print("\n\n⚠️ Workflow interrompu par l'utilisateur")
except Exception as e:
print(f"\n❌ Erreur: {e}")
import traceback
traceback.print_exc()
async def main():
"""Fonction principale."""
print("\n🎯 Démonstration RPA Vision V2")
print("="*60)
# Créer le workflow
workflow = RPAWorkflowDemo()
# Menu
print("\nOptions:")
print("1. Workflow complet (capture + apprentissage + rejeu)")
print("2. Seulement lister les tâches existantes")
print("3. Rejouer une tâche existante")
print("4. Quitter")
choice = input("\nVotre choix: ").strip()
if choice == "1":
duration = input("Durée de capture (secondes, défaut=30): ").strip()
duration = int(duration) if duration.isdigit() else 30
await workflow.run_complete_workflow(duration)
elif choice == "2":
tasks = await workflow.demo_learning_phase()
elif choice == "3":
tasks = workflow.replay_engine.list_available_tasks()
if not tasks:
print("\n❌ Aucune tâche disponible")
return
print("\nTâches disponibles:")
for i, task in enumerate(tasks, 1):
print(f"{i}. {task['task_name']} ({task['task_id']})")
choice = input("\nNuméro de la tâche: ").strip()
if choice.isdigit():
task_index = int(choice) - 1
if 0 <= task_index < len(tasks):
task_id = tasks[task_index]['task_id']
await workflow.demo_replay_phase(task_id)
elif choice == "4":
print("Au revoir!")
else:
print("❌ Choix invalide")
if __name__ == "__main__":
asyncio.run(main())

144
archive/old_scripts/read_pkl.py Executable file
View File

@@ -0,0 +1,144 @@
#!/usr/bin/env python3
"""
Script pour lire et afficher le contenu des fichiers .pkl (signatures).
"""
import sys
import pickle
import json
from pathlib import Path
import numpy as np
def read_pkl_file(pkl_path: Path):
"""Lit et affiche le contenu d'un fichier .pkl."""
print(f"\n{'='*60}")
print(f"📄 Fichier: {pkl_path.name}")
print(f"{'='*60}")
try:
with open(pkl_path, 'rb') as f:
data = pickle.load(f)
if isinstance(data, list):
print(f"\n📊 Type: Liste de {len(data)} éléments\n")
for i, item in enumerate(data, 1):
print(f"--- Action {i} ---")
if isinstance(item, dict):
for key, value in item.items():
# Afficher différemment selon le type
if key == 'embedding' and isinstance(value, np.ndarray):
print(f" {key}: numpy array shape {value.shape}, dtype {value.dtype}")
print(f" Premiers éléments: {value[:5]}")
elif key == 'screenshot' and value is not None:
if isinstance(value, np.ndarray):
print(f" {key}: numpy array shape {value.shape}")
else:
print(f" {key}: {type(value).__name__}")
elif key == 'bbox' and isinstance(value, (list, tuple)):
print(f" {key}: {value}")
elif isinstance(value, (str, int, float, bool)):
print(f" {key}: {value}")
elif value is None:
print(f" {key}: None")
else:
print(f" {key}: {type(value).__name__}")
else:
print(f" {item}")
print()
elif isinstance(data, dict):
print(f"\n📊 Type: Dictionnaire avec {len(data)} clés\n")
for key, value in data.items():
if isinstance(value, np.ndarray):
print(f" {key}: numpy array shape {value.shape}")
elif isinstance(value, (list, dict)):
print(f" {key}: {type(value).__name__} ({len(value)} éléments)")
else:
print(f" {key}: {value}")
else:
print(f"\n📊 Type: {type(data).__name__}")
print(f"\n{data}")
except Exception as e:
print(f"\n❌ Erreur lors de la lecture: {e}")
import traceback
traceback.print_exc()
def main():
"""Fonction principale."""
if len(sys.argv) < 2:
print("Usage: python3 read_pkl.py <fichier.pkl>")
print("\nOu pour lire une tâche spécifique:")
print("python3 read_pkl.py task_fc1d3e52")
print("\nOu pour lister toutes les tâches:")
print("python3 read_pkl.py --list")
return
arg = sys.argv[1]
if arg == "--list":
# Lister toutes les tâches
profiles_dir = Path("geniusia2/data/user_profiles")
task_dirs = sorted([d for d in profiles_dir.iterdir() if d.is_dir() and d.name.startswith("task_")])
print(f"\n📋 Tâches disponibles ({len(task_dirs)}):\n")
for i, task_dir in enumerate(task_dirs, 1):
metadata_file = task_dir / "metadata.json"
signatures_file = task_dir / "signatures.pkl"
if metadata_file.exists():
with open(metadata_file, 'r') as f:
metadata = json.load(f)
task_name = metadata.get('task_name', 'N/A')
obs_count = metadata.get('observation_count', 0)
else:
task_name = "N/A"
obs_count = 0
has_sig = "" if signatures_file.exists() else ""
print(f"{i:3d}. {task_dir.name}")
print(f" Nom: {task_name}")
print(f" Observations: {obs_count}")
print(f" Signatures: {has_sig}")
print()
return
# Déterminer le chemin du fichier
if arg.startswith("task_"):
# C'est un ID de tâche
pkl_path = Path(f"geniusia2/data/user_profiles/{arg}/signatures.pkl")
# Afficher aussi les métadonnées
metadata_path = Path(f"geniusia2/data/user_profiles/{arg}/metadata.json")
if metadata_path.exists():
print(f"\n{'='*60}")
print(f"📋 Métadonnées de {arg}")
print(f"{'='*60}\n")
with open(metadata_path, 'r') as f:
metadata = json.load(f)
print(json.dumps(metadata, indent=2, ensure_ascii=False))
else:
# C'est un chemin de fichier
pkl_path = Path(arg)
if not pkl_path.exists():
print(f"❌ Fichier non trouvé: {pkl_path}")
return
read_pkl_file(pkl_path)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,149 @@
#!/usr/bin/env python3
"""
Script pour reconstruire l'index FAISS à partir des tâches existantes.
"""
import sys
import pickle
import json
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent / "geniusia2"))
from core.embeddings_manager import EmbeddingsManager
from core.logger import Logger
from core.config import get_config
def rebuild_index():
"""Reconstruit l'index FAISS à partir des tâches existantes."""
print("="*60)
print("🔨 RECONSTRUCTION DE L'INDEX FAISS")
print("="*60)
# Initialiser les composants
config = get_config()
logger = Logger()
embeddings_manager = EmbeddingsManager(logger=logger)
profiles_dir = Path("geniusia2/data/user_profiles")
task_dirs = [d for d in profiles_dir.iterdir() if d.is_dir() and d.name.startswith("task_")]
print(f"\n📁 Nombre de tâches trouvées: {len(task_dirs)}")
total_embeddings = 0
total_actions = 0
# Parcourir toutes les tâches
for i, task_dir in enumerate(task_dirs, 1):
signatures_file = task_dir / "signatures.pkl"
metadata_file = task_dir / "metadata.json"
if not signatures_file.exists():
continue
# Charger les métadonnées
task_name = "Unknown"
if metadata_file.exists():
with open(metadata_file, 'r') as f:
metadata = json.load(f)
task_name = metadata.get('task_name', 'Unknown')
# Charger les signatures
with open(signatures_file, 'rb') as f:
signatures = pickle.load(f)
total_actions += len(signatures)
# Ajouter les embeddings à l'index
for j, signature in enumerate(signatures):
embedding = signature.get('embedding')
if embedding is not None:
# Créer les métadonnées
meta = {
"task_id": task_dir.name,
"task_name": task_name,
"action_index": j,
"action_type": signature.get('action_type', 'unknown'),
"description": signature.get('description', ''),
"window": signature.get('window', ''),
"timestamp": signature.get('timestamp', '')
}
# Ajouter à l'index
embeddings_manager.add_to_index(embedding, meta)
total_embeddings += 1
if i % 10 == 0:
print(f" Traité {i}/{len(task_dirs)} tâches...")
print(f"\n✅ Traitement terminé:")
print(f" - Tâches traitées: {len(task_dirs)}")
print(f" - Actions totales: {total_actions}")
print(f" - Embeddings ajoutés: {total_embeddings}")
# Sauvegarder l'index
print(f"\n💾 Sauvegarde de l'index FAISS...")
embeddings_manager.save_index()
# Vérifier la sauvegarde
faiss_dir = Path("geniusia2/data/faiss_index")
index_file = faiss_dir / "embeddings.index"
metadata_file = faiss_dir / "metadata.pkl"
if index_file.exists() and metadata_file.exists():
index_size = index_file.stat().st_size
meta_size = metadata_file.stat().st_size
print(f"\n✅ Index FAISS créé avec succès!")
print(f" - embeddings.index: {index_size:,} bytes")
print(f" - metadata.pkl: {meta_size:,} bytes")
# Tester la recherche
print(f"\n🔍 Test de recherche...")
if total_embeddings > 0:
# Prendre le premier embedding pour tester
for task_dir in task_dirs:
signatures_file = task_dir / "signatures.pkl"
if signatures_file.exists():
with open(signatures_file, 'rb') as f:
signatures = pickle.load(f)
for sig in signatures:
if sig.get('embedding') is not None:
test_embedding = sig['embedding']
results = embeddings_manager.search_similar(test_embedding, k=3)
print(f" Résultats de recherche: {len(results)} trouvés")
for r in results[:3]:
print(f" - Similarité: {r['similarity']:.3f} | {r['metadata'].get('description', 'N/A')}")
break
break
else:
print(f"\n❌ Erreur: Index non créé")
def main():
"""Fonction principale."""
print("\n🔧 RECONSTRUCTION DE L'INDEX FAISS\n")
try:
rebuild_index()
print("\n" + "="*60)
print("✅ RECONSTRUCTION TERMINÉE")
print("="*60)
print("\n💡 L'index FAISS est maintenant disponible pour:")
print(" - Recherche de similarité")
print(" - Suggestions d'actions")
print(" - Rejeu intelligent")
except Exception as e:
print(f"\n❌ Erreur: {e}")
import traceback
traceback.print_exc()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,21 @@
#!/bin/bash
# Script wrapper pour reconstruire l'index FAISS avec le bon Python
echo "🔨 Reconstruction de l'index FAISS..."
echo ""
# Vérifier que le venv existe
if [ ! -d "geniusia2/venv" ]; then
echo "❌ Environnement virtuel non trouvé"
echo " Exécutez d'abord: cd geniusia2 && ./setup.sh"
exit 1
fi
# Utiliser le Python du venv
geniusia2/venv/bin/python rebuild_faiss_simple.py
echo ""
echo "✅ Terminé !"
echo ""
echo "Vérification:"
ls -lh geniusia2/data/faiss_index/