Compare commits
5 Commits
7f2bc6fe97
...
2fa864b5c7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2fa864b5c7 | ||
|
|
10739c33fa | ||
|
|
39bea1b042 | ||
|
|
26b4e6d8ce | ||
|
|
4fb84b1090 |
124
scripts/backup_vwb_and_audit.sh
Executable file
124
scripts/backup_vwb_and_audit.sh
Executable file
@@ -0,0 +1,124 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
################################################################################
|
||||||
|
# backup_vwb_and_audit.sh
|
||||||
|
#
|
||||||
|
# Backup quotidien critique avant POC Anouste.
|
||||||
|
# Cf. challenge du 16 avril 2026 : sans backup, perte de workflows.db =
|
||||||
|
# perte directe de travail client. Ce script doit tourner AVANT tout
|
||||||
|
# déploiement chez un client.
|
||||||
|
#
|
||||||
|
# Ce qu'il sauvegarde :
|
||||||
|
# - visual_workflow_builder/backend/instance/workflows.db
|
||||||
|
# → ~/backups/vwb/workflows_YYYY-MM-DD.db
|
||||||
|
# - data/audit/*.jsonl
|
||||||
|
# → ~/backups/audit/audit_YYYY-MM-DD/
|
||||||
|
#
|
||||||
|
# Rétention : 30 jours (suppression automatique des backups plus anciens).
|
||||||
|
# Log : ~/backups/backup.log (append, horodaté).
|
||||||
|
#
|
||||||
|
# Installation (non automatique — à faire à la main) :
|
||||||
|
# crontab -e
|
||||||
|
# 0 2 * * * /home/dom/ai/rpa_vision_v3/scripts/backup_vwb_and_audit.sh
|
||||||
|
# → s'exécute tous les jours à 2h du matin.
|
||||||
|
#
|
||||||
|
# Procédure de restore : voir ~/backups/README.md
|
||||||
|
#
|
||||||
|
# Auteur : Dom + Claude — 16 avril 2026
|
||||||
|
################################################################################
|
||||||
|
|
||||||
|
set -u # strict: variable non définie = erreur
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Chemins
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||||
|
|
||||||
|
SRC_WORKFLOWS_DB="$PROJECT_ROOT/visual_workflow_builder/backend/instance/workflows.db"
|
||||||
|
SRC_AUDIT_DIR="$PROJECT_ROOT/data/audit"
|
||||||
|
|
||||||
|
BACKUP_ROOT="${BACKUP_ROOT:-$HOME/backups}"
|
||||||
|
BACKUP_VWB_DIR="$BACKUP_ROOT/vwb"
|
||||||
|
BACKUP_AUDIT_DIR="$BACKUP_ROOT/audit"
|
||||||
|
BACKUP_LOG="$BACKUP_ROOT/backup.log"
|
||||||
|
|
||||||
|
RETENTION_DAYS="${RETENTION_DAYS:-30}"
|
||||||
|
DATE_TAG="$(date +%Y-%m-%d)"
|
||||||
|
NOW="$(date '+%Y-%m-%d %H:%M:%S')"
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Helpers
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
log() {
|
||||||
|
local msg="$1"
|
||||||
|
echo "[$NOW] $msg" >> "$BACKUP_LOG"
|
||||||
|
echo "$msg"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Préparation
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
mkdir -p "$BACKUP_VWB_DIR" "$BACKUP_AUDIT_DIR"
|
||||||
|
touch "$BACKUP_LOG"
|
||||||
|
|
||||||
|
log "=== Début backup VWB + audit ==="
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# 1. workflows.db
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
if [ -f "$SRC_WORKFLOWS_DB" ]; then
|
||||||
|
DEST_DB="$BACKUP_VWB_DIR/workflows_${DATE_TAG}.db"
|
||||||
|
# On utilise sqlite3 .backup si possible (safe, même si la DB est
|
||||||
|
# ouverte par le backend). Fallback : cp simple.
|
||||||
|
if command -v sqlite3 > /dev/null 2>&1; then
|
||||||
|
if sqlite3 "$SRC_WORKFLOWS_DB" ".backup '$DEST_DB'" 2>/dev/null; then
|
||||||
|
size=$(stat -c %s "$DEST_DB" 2>/dev/null || echo "?")
|
||||||
|
log " [OK] workflows.db → $DEST_DB (${size} octets) via sqlite3 .backup"
|
||||||
|
else
|
||||||
|
cp "$SRC_WORKFLOWS_DB" "$DEST_DB"
|
||||||
|
log " [OK fallback] workflows.db → $DEST_DB via cp"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
cp "$SRC_WORKFLOWS_DB" "$DEST_DB"
|
||||||
|
log " [OK] workflows.db → $DEST_DB via cp (sqlite3 absent)"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log " [WARN] workflows.db introuvable : $SRC_WORKFLOWS_DB"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# 2. data/audit/*.jsonl
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
if [ -d "$SRC_AUDIT_DIR" ]; then
|
||||||
|
DEST_AUDIT="$BACKUP_AUDIT_DIR/audit_${DATE_TAG}"
|
||||||
|
mkdir -p "$DEST_AUDIT"
|
||||||
|
copied=0
|
||||||
|
# shellcheck disable=SC2045
|
||||||
|
for f in "$SRC_AUDIT_DIR"/*.jsonl; do
|
||||||
|
[ -f "$f" ] || continue
|
||||||
|
cp "$f" "$DEST_AUDIT/"
|
||||||
|
copied=$((copied + 1))
|
||||||
|
done
|
||||||
|
log " [OK] $copied fichiers audit → $DEST_AUDIT"
|
||||||
|
else
|
||||||
|
log " [WARN] dossier audit introuvable : $SRC_AUDIT_DIR"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# 3. Rétention : suppression des backups > RETENTION_DAYS jours
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# On retire les fichiers .db du dossier vwb
|
||||||
|
if [ -d "$BACKUP_VWB_DIR" ]; then
|
||||||
|
deleted_db=$(find "$BACKUP_VWB_DIR" -maxdepth 1 -name "workflows_*.db" \
|
||||||
|
-type f -mtime +"$RETENTION_DAYS" -print -delete 2>/dev/null | wc -l)
|
||||||
|
[ "$deleted_db" -gt 0 ] && log " [CLEAN] $deleted_db backup(s) vwb > ${RETENTION_DAYS}j supprimé(s)"
|
||||||
|
fi
|
||||||
|
# On retire les répertoires audit daté
|
||||||
|
if [ -d "$BACKUP_AUDIT_DIR" ]; then
|
||||||
|
deleted_audit=$(find "$BACKUP_AUDIT_DIR" -maxdepth 1 -type d \
|
||||||
|
-name "audit_*" -mtime +"$RETENTION_DAYS" -print -exec rm -rf {} \; 2>/dev/null | wc -l)
|
||||||
|
[ "$deleted_audit" -gt 0 ] && log " [CLEAN] $deleted_audit backup(s) audit > ${RETENTION_DAYS}j supprimé(s)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "=== Fin backup ==="
|
||||||
|
exit 0
|
||||||
11
visual_workflow_builder/.gitignore
vendored
11
visual_workflow_builder/.gitignore
vendored
@@ -39,3 +39,14 @@ backend/logs/
|
|||||||
|
|
||||||
# OS
|
# OS
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
|
|
||||||
|
# Artefacts de démarrage (run.sh / run_v4.sh)
|
||||||
|
*.pid
|
||||||
|
*.lock
|
||||||
|
.backend.pid
|
||||||
|
.frontend.pid
|
||||||
|
.frontend_v4.pid
|
||||||
|
|
||||||
|
# Éditeurs (fichiers de sauvegarde)
|
||||||
|
*.orig
|
||||||
|
*.bak
|
||||||
|
|||||||
@@ -2,18 +2,19 @@
|
|||||||
|
|
||||||
Interface graphique pour créer des workflows RPA par glisser-déposer, sans écrire de code.
|
Interface graphique pour créer des workflows RPA par glisser-déposer, sans écrire de code.
|
||||||
|
|
||||||
|
> **État actuel (avril 2026)** : la version active est `frontend_v4/` (Vite + React, port 3002), lancée par `./run_v4.sh` ou `./launch.sh` (wrapper).
|
||||||
|
> Le dossier `frontend/` est conservé pour référence legacy (Create React App, port 3000), lancé par `./run.sh`.
|
||||||
|
> Les sections `launch.sh setup/stop/restart/logs` ci-dessous sont historiques : seules `./launch.sh` (= `./run_v4.sh`) et `./launch.sh legacy` (= `./run.sh`) sont effectivement implémentées.
|
||||||
|
|
||||||
## 🚀 Démarrage Ultra-Rapide
|
## 🚀 Démarrage Ultra-Rapide
|
||||||
|
|
||||||
### Méthode Simple (Recommandée)
|
### Méthode Simple (Recommandée)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Configuration initiale (une seule fois)
|
# Démarrer l'application complète (frontend_v4, port 3002)
|
||||||
./launch.sh setup
|
./launch.sh
|
||||||
|
|
||||||
# Démarrer l'application complète
|
# Ouvrir http://localhost:3002 dans votre navigateur
|
||||||
./launch.sh start
|
|
||||||
|
|
||||||
# Ouvrir http://localhost:3000 dans votre navigateur
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**Sur Windows :**
|
**Sur Windows :**
|
||||||
|
|||||||
@@ -40,6 +40,71 @@ if _ROOT not in sys.path:
|
|||||||
STREAMING_SERVER_URL = "http://localhost:5005"
|
STREAMING_SERVER_URL = "http://localhost:5005"
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Helpers — nom par défaut à l'import
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _derive_default_name(core_dict: Dict[str, Any]) -> str:
|
||||||
|
"""
|
||||||
|
Génère un nom par défaut explicite pour un workflow appris importé,
|
||||||
|
quand son champ `name` est vide ou vaut « Unnamed Workflow ».
|
||||||
|
|
||||||
|
Stratégie, par ordre de priorité :
|
||||||
|
1. Premier `template.window.title_pattern` exploitable dans les nodes
|
||||||
|
(après filtrage de "Unknown"/"unknown_window") ; on extrait le nom
|
||||||
|
de l'app derrière un séparateur « – » / « - » typique de Windows
|
||||||
|
(« Sans titre – Bloc-notes » → « Bloc-notes »).
|
||||||
|
2. Premier `template.window.process_name` non-null.
|
||||||
|
3. Fallback : 8 premiers caractères de `workflow_id`.
|
||||||
|
|
||||||
|
La date de l'import (YYYY-MM-DD HH:MM) est toujours ajoutée en suffixe.
|
||||||
|
L'utilisateur peut renommer ensuite dans le VWB.
|
||||||
|
"""
|
||||||
|
from datetime import datetime as _dt
|
||||||
|
|
||||||
|
def _extract_app(title: str) -> Optional[str]:
|
||||||
|
if not title:
|
||||||
|
return None
|
||||||
|
t = title.strip()
|
||||||
|
if not t or t.lower() in {"unknown", "unknown_window"}:
|
||||||
|
return None
|
||||||
|
# Séparateurs Windows classiques : « – » (em dash), « — », « - »
|
||||||
|
for sep in (" – ", " — ", " - "):
|
||||||
|
if sep in t:
|
||||||
|
# Le nom de l'app est généralement la partie droite
|
||||||
|
right = t.rsplit(sep, 1)[-1].strip()
|
||||||
|
if right:
|
||||||
|
return right
|
||||||
|
# Pas de séparateur → renvoyer le titre brut (ex : "Rechercher")
|
||||||
|
return t
|
||||||
|
|
||||||
|
app_name: Optional[str] = None
|
||||||
|
for node in (core_dict.get("nodes") or []):
|
||||||
|
window = ((node.get("template") or {}).get("window") or {})
|
||||||
|
app_name = _extract_app(window.get("title_pattern") or "")
|
||||||
|
if app_name:
|
||||||
|
break
|
||||||
|
proc = window.get("process_name")
|
||||||
|
if proc:
|
||||||
|
app_name = str(proc).strip()
|
||||||
|
break
|
||||||
|
|
||||||
|
timestamp = _dt.now().strftime("%Y-%m-%d %H:%M")
|
||||||
|
|
||||||
|
if app_name:
|
||||||
|
return f"Léa {app_name} — {timestamp}"
|
||||||
|
|
||||||
|
wf_id = core_dict.get("workflow_id") or ""
|
||||||
|
# Nettoyer les préfixes techniques courants (workflow_, sess_) pour garder
|
||||||
|
# un identifiant lisible de 8 caractères.
|
||||||
|
for prefix in ("workflow_sess_", "workflow_", "sess_", "session_"):
|
||||||
|
if wf_id.startswith(prefix):
|
||||||
|
wf_id = wf_id[len(prefix):]
|
||||||
|
break
|
||||||
|
suffix = wf_id[:8] if wf_id else "?"
|
||||||
|
return f"Léa {suffix} — {timestamp}"
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# GET /api/v3/learned-workflows
|
# GET /api/v3/learned-workflows
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
@@ -209,7 +274,14 @@ def import_learned_workflow(workflow_id: str):
|
|||||||
|
|
||||||
wf_meta, steps_list, warnings = convert_learned_to_vwb_steps(core_dict)
|
wf_meta, steps_list, warnings = convert_learned_to_vwb_steps(core_dict)
|
||||||
|
|
||||||
# Surcharger le nom si fourni
|
# B2 — nom par défaut explicite pour les workflows arrivant en
|
||||||
|
# "Unnamed Workflow" depuis Léa. N'affecte pas les workflows déjà
|
||||||
|
# nommés manuellement. L'humain peut renommer ensuite dans le VWB.
|
||||||
|
current_name = (wf_meta.get("name") or "").strip()
|
||||||
|
if current_name.lower() in {"", "unnamed workflow", "workflow importé"}:
|
||||||
|
wf_meta["name"] = _derive_default_name(core_dict)
|
||||||
|
|
||||||
|
# Surcharger le nom si fourni explicitement dans la requête
|
||||||
if data.get("name"):
|
if data.get("name"):
|
||||||
wf_meta["name"] = data["name"]
|
wf_meta["name"] = data["name"]
|
||||||
|
|
||||||
|
|||||||
@@ -23,28 +23,40 @@ load_dotenv()
|
|||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
# ============================================================
|
# ============================================================
|
||||||
# Logging — fichier rotatif + console
|
# Logging — fichier rotatif + console (idempotent)
|
||||||
# ============================================================
|
# ============================================================
|
||||||
|
# ATTENTION : ce module peut être importé 2 fois (une fois comme __main__
|
||||||
|
# via `python app.py`, puis comme module `app` via `from app import socketio`
|
||||||
|
# dans api/websocket_handlers.py). Sans garde idempotente, le RotatingFileHandler
|
||||||
|
# est ajouté 2× au root logger → chaque ligne loguée apparaît en double.
|
||||||
_log_dir = os.path.join(os.path.dirname(__file__), 'logs')
|
_log_dir = os.path.join(os.path.dirname(__file__), 'logs')
|
||||||
os.makedirs(_log_dir, exist_ok=True)
|
os.makedirs(_log_dir, exist_ok=True)
|
||||||
|
_LOG_FILE_PATH = os.path.abspath(os.path.join(_log_dir, 'vwb.log'))
|
||||||
|
|
||||||
_file_handler = RotatingFileHandler(
|
_root_logger = logging.getLogger()
|
||||||
os.path.join(_log_dir, 'vwb.log'),
|
_already_configured = any(
|
||||||
maxBytes=5 * 1024 * 1024, # 5 MB
|
isinstance(h, RotatingFileHandler)
|
||||||
backupCount=3
|
and os.path.abspath(getattr(h, 'baseFilename', '')) == _LOG_FILE_PATH
|
||||||
|
for h in _root_logger.handlers
|
||||||
)
|
)
|
||||||
_file_handler.setLevel(logging.INFO)
|
|
||||||
_file_handler.setFormatter(logging.Formatter(
|
|
||||||
'%(asctime)s [%(levelname)s] %(name)s: %(message)s'
|
|
||||||
))
|
|
||||||
|
|
||||||
logging.getLogger().addHandler(_file_handler)
|
if not _already_configured:
|
||||||
logging.getLogger().setLevel(logging.INFO)
|
_file_handler = RotatingFileHandler(
|
||||||
|
_LOG_FILE_PATH,
|
||||||
|
maxBytes=5 * 1024 * 1024, # 5 MB
|
||||||
|
backupCount=3
|
||||||
|
)
|
||||||
|
_file_handler.setLevel(logging.INFO)
|
||||||
|
_file_handler.setFormatter(logging.Formatter(
|
||||||
|
'%(asctime)s [%(levelname)s] %(name)s: %(message)s'
|
||||||
|
))
|
||||||
|
_root_logger.addHandler(_file_handler)
|
||||||
|
_root_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
# Configuration
|
# Configuration
|
||||||
import secrets as _secrets
|
import secrets as _secrets
|
||||||
app.config['SECRET_KEY'] = os.getenv('SECRET_KEY', _secrets.token_hex(32))
|
app.config['SECRET_KEY'] = os.getenv('SECRET_KEY', _secrets.token_hex(32))
|
||||||
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('DATABASE_URL', 'sqlite:///vwb_v3.db')
|
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('DATABASE_URL', 'sqlite:///workflows.db')
|
||||||
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
|
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
|
||||||
app.config['MAX_CONTENT_LENGTH'] = 10 * 1024 * 1024 # 10MB max upload
|
app.config['MAX_CONTENT_LENGTH'] = 10 * 1024 * 1024 # 10MB max upload
|
||||||
app.config['CACHE_TYPE'] = 'redis' if os.getenv('REDIS_URL') else 'simple'
|
app.config['CACHE_TYPE'] = 'redis' if os.getenv('REDIS_URL') else 'simple'
|
||||||
|
|||||||
Binary file not shown.
@@ -1,5 +1,10 @@
|
|||||||
import { useState, useEffect } from 'react';
|
import { useState, useEffect } from 'react';
|
||||||
import type { Capture } from '../types';
|
import type { Capture } from '../types';
|
||||||
|
import {
|
||||||
|
loadLibrary,
|
||||||
|
saveLibrary,
|
||||||
|
compressThumbnail,
|
||||||
|
} from '../services/captureLibraryStorage';
|
||||||
|
|
||||||
interface LibraryItem {
|
interface LibraryItem {
|
||||||
id: string;
|
id: string;
|
||||||
@@ -22,58 +27,43 @@ export default function CaptureLibrary({ currentCapture, onSelectCapture, onCapt
|
|||||||
const [viewMode, setViewMode] = useState<'all' | 'session' | 'favorites'>('session');
|
const [viewMode, setViewMode] = useState<'all' | 'session' | 'favorites'>('session');
|
||||||
const [selectedItems, setSelectedItems] = useState<Set<string>>(new Set());
|
const [selectedItems, setSelectedItems] = useState<Set<string>>(new Set());
|
||||||
|
|
||||||
// Charger la bibliothèque depuis sessionStorage (avec migration de l'ancienne clé)
|
// Charger la bibliothèque depuis localStorage (persiste entre onglets/sessions).
|
||||||
|
// Le helper loadLibrary() gère la migration des anciennes clés et de sessionStorage.
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Essayer la nouvelle clé d'abord
|
const loaded = loadLibrary(currentSessionId) as LibraryItem[];
|
||||||
let stored = sessionStorage.getItem('captureLibrary_v2');
|
setLibrary(
|
||||||
|
loaded.map((item) => ({
|
||||||
// Migration depuis l'ancienne clé si nécessaire
|
|
||||||
if (!stored) {
|
|
||||||
const oldStored = sessionStorage.getItem('captureLibrary');
|
|
||||||
if (oldStored) {
|
|
||||||
try {
|
|
||||||
const oldData = JSON.parse(oldStored);
|
|
||||||
// Migrer les anciennes données vers le nouveau format
|
|
||||||
const migrated = oldData.map((item: any) => ({
|
|
||||||
...item,
|
|
||||||
sessionId: currentSessionId,
|
|
||||||
favorite: false
|
|
||||||
}));
|
|
||||||
sessionStorage.setItem('captureLibrary_v2', JSON.stringify(migrated));
|
|
||||||
stored = JSON.stringify(migrated);
|
|
||||||
console.log(`✅ Migration de ${oldData.length} captures vers le nouveau format`);
|
|
||||||
} catch (e) {
|
|
||||||
console.error('Erreur migration captures:', e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stored) {
|
|
||||||
const parsed = JSON.parse(stored);
|
|
||||||
setLibrary(parsed.map((item: any) => ({
|
|
||||||
...item,
|
...item,
|
||||||
timestamp: new Date(item.timestamp)
|
timestamp:
|
||||||
})));
|
typeof item.timestamp === 'string'
|
||||||
}
|
? new Date(item.timestamp)
|
||||||
|
: item.timestamp,
|
||||||
|
}))
|
||||||
|
);
|
||||||
}, [currentSessionId]);
|
}, [currentSessionId]);
|
||||||
|
|
||||||
// Sauvegarder la bibliothèque
|
// Sauvegarder la bibliothèque (localStorage + gestion de quota)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
sessionStorage.setItem('captureLibrary_v2', JSON.stringify(library));
|
saveLibrary(library);
|
||||||
}, [library]);
|
}, [library]);
|
||||||
|
|
||||||
// Ajouter capture à la bibliothèque
|
// Ajouter capture à la bibliothèque (thumbnail compressé JPEG 320x240)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (currentCapture) {
|
if (!currentCapture) return;
|
||||||
|
let cancelled = false;
|
||||||
|
(async () => {
|
||||||
|
const compressed = await compressThumbnail(currentCapture.screenshot_base64);
|
||||||
|
if (cancelled) return;
|
||||||
const newItem: LibraryItem = {
|
const newItem: LibraryItem = {
|
||||||
id: `cap_${Date.now()}`,
|
id: `cap_${Date.now()}`,
|
||||||
capture: currentCapture,
|
capture: { ...currentCapture, screenshot_base64: compressed },
|
||||||
timestamp: new Date(),
|
timestamp: new Date(),
|
||||||
sessionId: currentSessionId,
|
sessionId: currentSessionId,
|
||||||
favorite: false
|
favorite: false,
|
||||||
};
|
};
|
||||||
setLibrary(prev => [newItem, ...prev.slice(0, 49)]); // Max 50 captures
|
setLibrary(prev => [newItem, ...prev.slice(0, 49)]); // Max 50 captures
|
||||||
}
|
})();
|
||||||
|
return () => { cancelled = true; };
|
||||||
}, [currentCapture, currentSessionId]);
|
}, [currentCapture, currentSessionId]);
|
||||||
|
|
||||||
// Filtrer selon le mode de vue
|
// Filtrer selon le mode de vue
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
import { useState, useRef, useEffect } from 'react';
|
import { useState, useRef, useEffect } from 'react';
|
||||||
import type { Capture, ExecutionMode } from '../types';
|
import type { Capture, ExecutionMode } from '../types';
|
||||||
import type { UIElement } from '../services/uiDetection';
|
import type { UIElement } from '../services/uiDetection';
|
||||||
|
import {
|
||||||
|
loadLibrary,
|
||||||
|
saveLibrary,
|
||||||
|
compressThumbnail,
|
||||||
|
} from '../services/captureLibraryStorage';
|
||||||
|
|
||||||
interface DetectionZone {
|
interface DetectionZone {
|
||||||
x: number;
|
x: number;
|
||||||
@@ -23,6 +28,8 @@ interface LibraryItem {
|
|||||||
id: string;
|
id: string;
|
||||||
capture: Capture;
|
capture: Capture;
|
||||||
timestamp: Date;
|
timestamp: Date;
|
||||||
|
sessionId?: string;
|
||||||
|
favorite?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export default function CapturePanel({
|
export default function CapturePanel({
|
||||||
@@ -48,30 +55,43 @@ export default function CapturePanel({
|
|||||||
|
|
||||||
const isDebugMode = executionMode === 'debug';
|
const isDebugMode = executionMode === 'debug';
|
||||||
|
|
||||||
// Charger la bibliothèque depuis sessionStorage
|
// Charger la bibliothèque depuis localStorage (clé unifiée 'captureLibrary_v2').
|
||||||
|
// Le helper loadLibrary() migre aussi les données de l'ancienne clé 'captureLibrary'.
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const stored = sessionStorage.getItem('captureLibrary');
|
const loaded = loadLibrary() as LibraryItem[];
|
||||||
if (stored) {
|
setLibrary(
|
||||||
setLibrary(JSON.parse(stored));
|
loaded.map((item) => ({
|
||||||
}
|
...item,
|
||||||
|
timestamp:
|
||||||
|
typeof item.timestamp === 'string'
|
||||||
|
? new Date(item.timestamp)
|
||||||
|
: item.timestamp,
|
||||||
|
}))
|
||||||
|
);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// Sauvegarder la bibliothèque
|
// Sauvegarder la bibliothèque (localStorage + gestion de quota)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
sessionStorage.setItem('captureLibrary', JSON.stringify(library));
|
saveLibrary(library);
|
||||||
}, [library]);
|
}, [library]);
|
||||||
|
|
||||||
// Ajouter capture à la bibliothèque
|
// Ajouter capture à la bibliothèque (thumbnail compressé JPEG 320x240)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (capture) {
|
if (!capture) return;
|
||||||
setCurrentCapture(capture);
|
setCurrentCapture(capture);
|
||||||
|
let cancelled = false;
|
||||||
|
(async () => {
|
||||||
|
const compressed = await compressThumbnail(capture.screenshot_base64);
|
||||||
|
if (cancelled) return;
|
||||||
const newItem: LibraryItem = {
|
const newItem: LibraryItem = {
|
||||||
id: `cap_${Date.now()}`,
|
id: `cap_${Date.now()}`,
|
||||||
capture,
|
capture: { ...capture, screenshot_base64: compressed },
|
||||||
timestamp: new Date()
|
timestamp: new Date(),
|
||||||
|
favorite: false,
|
||||||
};
|
};
|
||||||
setLibrary(prev => [newItem, ...prev.slice(0, 19)]);
|
setLibrary(prev => [newItem, ...prev.slice(0, 19)]);
|
||||||
}
|
})();
|
||||||
|
return () => { cancelled = true; };
|
||||||
}, [capture]);
|
}, [capture]);
|
||||||
|
|
||||||
// Détecter les éléments UI quand une capture arrive
|
// Détecter les éléments UI quand une capture arrive
|
||||||
|
|||||||
@@ -0,0 +1,175 @@
|
|||||||
|
/**
|
||||||
|
* Stockage unifié de la bibliothèque de captures.
|
||||||
|
*
|
||||||
|
* CONTEXTE (bug B1, 16 avril 2026) :
|
||||||
|
* Avant ce module, deux composants manipulaient la bibliothèque avec des
|
||||||
|
* politiques divergentes :
|
||||||
|
* - CaptureLibrary.tsx : sessionStorage + clé 'captureLibrary_v2'
|
||||||
|
* - CapturePanel.tsx : sessionStorage + clé 'captureLibrary'
|
||||||
|
* Résultat :
|
||||||
|
* 1. Bibliothèque purgée à la fermeture de l'onglet (sessionStorage).
|
||||||
|
* 2. Deux listes désynchronisées (clés différentes).
|
||||||
|
*
|
||||||
|
* Ce module centralise :
|
||||||
|
* - localStorage (persiste entre sessions)
|
||||||
|
* - clé unique 'captureLibrary_v2'
|
||||||
|
* - compression JPEG 80% / max 320×240 des thumbnails avant stockage
|
||||||
|
* pour rester sous le quota navigateur (typiquement 5–10 MB).
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Capture } from '../types';
|
||||||
|
|
||||||
|
export interface LibraryItem {
|
||||||
|
id: string;
|
||||||
|
capture: Capture;
|
||||||
|
timestamp: Date | string; // JSON.parse ne restaure pas les Date
|
||||||
|
sessionId?: string;
|
||||||
|
favorite?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
const STORAGE_KEY = 'captureLibrary_v2';
|
||||||
|
const LEGACY_KEY = 'captureLibrary';
|
||||||
|
const THUMB_MAX_WIDTH = 320;
|
||||||
|
const THUMB_MAX_HEIGHT = 240;
|
||||||
|
const THUMB_QUALITY = 0.8;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Charge la bibliothèque depuis localStorage. Migre depuis sessionStorage
|
||||||
|
* et l'ancienne clé 'captureLibrary' si présentes.
|
||||||
|
*/
|
||||||
|
export function loadLibrary(defaultSessionId?: string): LibraryItem[] {
|
||||||
|
// 1) Clé principale en localStorage
|
||||||
|
let raw = localStorage.getItem(STORAGE_KEY);
|
||||||
|
|
||||||
|
// 2) Migration sessionStorage → localStorage (même clé)
|
||||||
|
if (!raw) {
|
||||||
|
const fromSession = sessionStorage.getItem(STORAGE_KEY);
|
||||||
|
if (fromSession) {
|
||||||
|
raw = fromSession;
|
||||||
|
try {
|
||||||
|
localStorage.setItem(STORAGE_KEY, raw);
|
||||||
|
sessionStorage.removeItem(STORAGE_KEY);
|
||||||
|
console.log('[CaptureLibrary] Migration sessionStorage → localStorage');
|
||||||
|
} catch (e) {
|
||||||
|
console.warn('[CaptureLibrary] Échec migration sessionStorage → localStorage', e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3) Migration ancienne clé 'captureLibrary' (CapturePanel legacy)
|
||||||
|
if (!raw) {
|
||||||
|
const legacy =
|
||||||
|
localStorage.getItem(LEGACY_KEY) || sessionStorage.getItem(LEGACY_KEY);
|
||||||
|
if (legacy) {
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(legacy) as LibraryItem[];
|
||||||
|
const migrated = parsed.map((item) => ({
|
||||||
|
...item,
|
||||||
|
sessionId: item.sessionId ?? defaultSessionId,
|
||||||
|
favorite: item.favorite ?? false,
|
||||||
|
}));
|
||||||
|
raw = JSON.stringify(migrated);
|
||||||
|
localStorage.setItem(STORAGE_KEY, raw);
|
||||||
|
localStorage.removeItem(LEGACY_KEY);
|
||||||
|
sessionStorage.removeItem(LEGACY_KEY);
|
||||||
|
console.log(
|
||||||
|
`[CaptureLibrary] Migration ancienne clé → ${migrated.length} captures`
|
||||||
|
);
|
||||||
|
} catch (e) {
|
||||||
|
console.warn('[CaptureLibrary] Erreur migration ancienne clé', e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!raw) return [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(raw) as LibraryItem[];
|
||||||
|
return parsed.map((item) => ({
|
||||||
|
...item,
|
||||||
|
timestamp:
|
||||||
|
typeof item.timestamp === 'string'
|
||||||
|
? new Date(item.timestamp)
|
||||||
|
: item.timestamp,
|
||||||
|
}));
|
||||||
|
} catch (e) {
|
||||||
|
console.error('[CaptureLibrary] JSON invalide, reset', e);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sauvegarde la bibliothèque dans localStorage. Gère les erreurs de quota
|
||||||
|
* en élaguant les items les plus anciens jusqu'à ce que ça passe.
|
||||||
|
*/
|
||||||
|
export function saveLibrary(library: LibraryItem[]): void {
|
||||||
|
let toStore = library;
|
||||||
|
// Jusqu'à 5 tentatives : si QuotaExceededError, on tronque de moitié.
|
||||||
|
for (let attempt = 0; attempt < 5; attempt++) {
|
||||||
|
try {
|
||||||
|
localStorage.setItem(STORAGE_KEY, JSON.stringify(toStore));
|
||||||
|
return;
|
||||||
|
} catch (e: any) {
|
||||||
|
const isQuota =
|
||||||
|
e?.name === 'QuotaExceededError' ||
|
||||||
|
e?.code === 22 ||
|
||||||
|
e?.code === 1014; // Firefox
|
||||||
|
if (!isQuota) {
|
||||||
|
console.error('[CaptureLibrary] Erreur save', e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Garder la moitié la plus récente
|
||||||
|
const half = Math.max(1, Math.floor(toStore.length / 2));
|
||||||
|
console.warn(
|
||||||
|
`[CaptureLibrary] Quota dépassé, élagage ${toStore.length} → ${half}`
|
||||||
|
);
|
||||||
|
toStore = toStore.slice(0, half);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.error('[CaptureLibrary] Impossible de sauvegarder même après élagage');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compresse une image base64 (PNG ou JPEG) en JPEG basse qualité pour la
|
||||||
|
* bibliothèque. Retourne la base64 JPEG sans le préfixe data: URL.
|
||||||
|
* Fallback : renvoie la base64 d'origine si la compression échoue.
|
||||||
|
*/
|
||||||
|
export async function compressThumbnail(base64Png: string): Promise<string> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
try {
|
||||||
|
const img = new Image();
|
||||||
|
img.onload = () => {
|
||||||
|
try {
|
||||||
|
const ratio = Math.min(
|
||||||
|
THUMB_MAX_WIDTH / img.width,
|
||||||
|
THUMB_MAX_HEIGHT / img.height,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
const w = Math.max(1, Math.round(img.width * ratio));
|
||||||
|
const h = Math.max(1, Math.round(img.height * ratio));
|
||||||
|
const canvas = document.createElement('canvas');
|
||||||
|
canvas.width = w;
|
||||||
|
canvas.height = h;
|
||||||
|
const ctx = canvas.getContext('2d');
|
||||||
|
if (!ctx) {
|
||||||
|
resolve(base64Png);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ctx.drawImage(img, 0, 0, w, h);
|
||||||
|
const dataUrl = canvas.toDataURL('image/jpeg', THUMB_QUALITY);
|
||||||
|
// Retirer le préfixe 'data:image/jpeg;base64,'
|
||||||
|
const prefixEnd = dataUrl.indexOf(',');
|
||||||
|
resolve(prefixEnd >= 0 ? dataUrl.slice(prefixEnd + 1) : base64Png);
|
||||||
|
} catch (e) {
|
||||||
|
console.warn('[CaptureLibrary] Compression échouée', e);
|
||||||
|
resolve(base64Png);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
img.onerror = () => resolve(base64Png);
|
||||||
|
img.src = `data:image/png;base64,${base64Png}`;
|
||||||
|
} catch (e) {
|
||||||
|
console.warn('[CaptureLibrary] Compression échouée (sync)', e);
|
||||||
|
resolve(base64Png);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
37
visual_workflow_builder/launch.sh
Executable file
37
visual_workflow_builder/launch.sh
Executable file
@@ -0,0 +1,37 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
################################################################################
|
||||||
|
# launch.sh — wrapper de compatibilité pour VWB
|
||||||
|
#
|
||||||
|
# Le README historique fait référence à ./launch.sh mais le script actif est
|
||||||
|
# run_v4.sh (frontend_v4 Vite + React — version active).
|
||||||
|
# run.sh original pointe vers frontend/ (legacy v3), conservé pour archéologie.
|
||||||
|
#
|
||||||
|
# Usage :
|
||||||
|
# ./launch.sh # équivalent à ./run_v4.sh (version active)
|
||||||
|
# ./launch.sh start # idem
|
||||||
|
# ./launch.sh legacy # lance l'ancien run.sh (frontend v3)
|
||||||
|
#
|
||||||
|
# Frontend actif : frontend_v4/ (Vite + React, port 3002)
|
||||||
|
# Frontend legacy : frontend/ (Create React App, port 3000)
|
||||||
|
################################################################################
|
||||||
|
|
||||||
|
set -e
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
|
||||||
|
case "${1:-start}" in
|
||||||
|
legacy)
|
||||||
|
echo "→ Lancement legacy (frontend/) via run.sh"
|
||||||
|
exec "$SCRIPT_DIR/run.sh"
|
||||||
|
;;
|
||||||
|
start|""|--dev|--prod)
|
||||||
|
echo "→ Lancement VWB v4 (frontend_v4/) via run_v4.sh"
|
||||||
|
exec "$SCRIPT_DIR/run_v4.sh"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "launch.sh : commande inconnue '$1'"
|
||||||
|
echo "Usage : $0 [start|legacy]"
|
||||||
|
echo " start (défaut) → run_v4.sh (frontend_v4, port 3002)"
|
||||||
|
echo " legacy → run.sh (frontend v3, port 3000)"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
@@ -1,8 +1,12 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# Visual Workflow Builder - Script de Lancement Complet
|
# Visual Workflow Builder - Script de Lancement Complet (LEGACY v3)
|
||||||
#
|
#
|
||||||
|
# ATTENTION : ce script cible frontend/ (Create React App, port 3000) — version
|
||||||
|
# historique. La version active est frontend_v4/ (Vite + React, port 3002),
|
||||||
|
# lancée via ./run_v4.sh ou ./launch.sh (qui y délègue par défaut).
|
||||||
|
#
|
||||||
# Ce script :
|
# Ce script :
|
||||||
# - Crée un environnement virtuel Python si nécessaire
|
# - Crée un environnement virtuel Python si nécessaire
|
||||||
# - Installe les dépendances backend
|
# - Installe les dépendances backend
|
||||||
|
|||||||
@@ -1,11 +1,14 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# Visual Workflow Builder v4 - Script de Lancement
|
# Visual Workflow Builder v4 - Script de Lancement (VERSION ACTIVE)
|
||||||
#
|
#
|
||||||
# Ce script lance :
|
# Ce script lance :
|
||||||
# - Le backend Flask (port 5002)
|
# - Le backend Flask (port 5002)
|
||||||
# - Le frontend React v4 avec Vite (port 3002)
|
# - Le frontend React v4 avec Vite (port 3002) ← frontend actif
|
||||||
|
#
|
||||||
|
# frontend_v4/ = version active (Vite + React, port 3002)
|
||||||
|
# frontend/ = legacy v3 (Create React App, port 3000) — lancé par run.sh
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|||||||
Reference in New Issue
Block a user