feat: méthode TIM experte CPAM + moteur de règles étendu
CPAM — Méthode TIM (mémoire en défense) : - Réécriture CPAM_ARGUMENTATION avec raisonnement 5 passes TIM (contexte admin → motif réel → confrontation bio → hiérarchie → validation défensive) - _BIO_THRESHOLDS (19 entrées) + _build_bio_confrontation() pour confrontation biologie/diagnostic avec seuils chiffrés et verdicts - _format_response() dual format : nouveau TIM (moyens numérotés, tableau bio, codes non défendables, conclusion dispositive) + rétrocompat legacy - CPAM_ADVERSARIAL mis à jour pour vérifier honnêteté intellectuelle - Tests adaptés + 12 nouveaux tests (bio confrontation, format TIM) Moteur de règles : - Nouvelles règles YAML : demographic, diagnostic_conflicts, procedure_diagnosis, temporal, parcours - Bio extraction FAISS (synonymes vectoriels) - Veto engine enrichi (citations, Trackare skip, règles démographiques) - Decision engine : _apply_bio_rules_gen() + matchers analytiques Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -4,10 +4,14 @@ from __future__ import annotations
|
||||
|
||||
import re
|
||||
import unicodedata
|
||||
import logging
|
||||
|
||||
import numpy as np
|
||||
from ..config import BiologieCle, DossierMedical, load_lab_value_sanity
|
||||
from .bio_normals import BIO_NORMALS, _is_abnormal
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _norm_key(s: str) -> str:
|
||||
"""Normalise une clé (minuscules, sans accents) pour index YAML."""
|
||||
@@ -68,6 +72,100 @@ def _sanitize_bio_value(test_name: str, raw_value: str, sanity_cfg: dict) -> tup
|
||||
return token, val, quality, reason
|
||||
|
||||
|
||||
def _extract_biologie_faiss(text: str, dossier: DossierMedical) -> None:
|
||||
"""Extraction biologique via recherche vectorielle FAISS pour les synonymes.
|
||||
|
||||
Complète les regex pour les termes non prévus ou les variations complexes.
|
||||
"""
|
||||
from .rag_index import get_index
|
||||
from .rag_search import _get_embed_model
|
||||
|
||||
res = get_index(kind="bio")
|
||||
if not res:
|
||||
return
|
||||
faiss_index, metadata = res
|
||||
|
||||
try:
|
||||
model = _get_embed_model()
|
||||
except Exception as e:
|
||||
logger.warning("FAISS Bio: modèle d'embedding indisponible (%s)", e)
|
||||
return
|
||||
|
||||
# 1. Découpage du texte en segments glissants (phrases ou groupes de mots)
|
||||
lines = [l.strip() for l in text.split("\n") if len(l.strip()) > 5]
|
||||
if not lines:
|
||||
return
|
||||
|
||||
segments = []
|
||||
for line in lines:
|
||||
if len(line.split()) > 15:
|
||||
words = line.split()
|
||||
for i in range(0, len(words), 10):
|
||||
segments.append(" ".join(words[i:i+12]))
|
||||
else:
|
||||
segments.append(line)
|
||||
|
||||
if not segments:
|
||||
return
|
||||
|
||||
# 2. Encodage des segments
|
||||
try:
|
||||
embeddings = model.encode(segments, normalize_embeddings=True, show_progress_bar=False)
|
||||
embeddings = np.array(embeddings, dtype=np.float32)
|
||||
except Exception as e:
|
||||
logger.warning("FAISS Bio: erreur encodage segments (%s)", e)
|
||||
return
|
||||
|
||||
# 3. Recherche dans l'index bio
|
||||
MIN_SCORE_BIO = 0.82
|
||||
scores, indices = faiss_index.search(embeddings, 1)
|
||||
|
||||
sanity_cfg = load_lab_value_sanity()
|
||||
seen_faiss = set()
|
||||
|
||||
for i, (score, idx) in enumerate(zip(scores, indices)):
|
||||
s = float(score[0])
|
||||
if s < MIN_SCORE_BIO or idx[0] < 0:
|
||||
continue
|
||||
|
||||
meta = metadata[idx[0]]
|
||||
concept_name = meta.get("code")
|
||||
synonym_matched = meta.get("extrait")
|
||||
segment = segments[i]
|
||||
|
||||
# 4. Capture de la valeur numérique
|
||||
val_match = re.search(r"(?:[=àa:]\s*)?(\d+(?:[.,]\d+)?)\s*(?:[a-zA-Z/%/µ/mm3/G/L/U/I]+)?", segment)
|
||||
if not val_match:
|
||||
continue
|
||||
|
||||
raw_value = val_match.group(1)
|
||||
entry_key = (concept_name, raw_value)
|
||||
if entry_key in seen_faiss:
|
||||
continue
|
||||
seen_faiss.add(entry_key)
|
||||
|
||||
sanitized = _sanitize_bio_value(concept_name, raw_value, sanity_cfg)
|
||||
if sanitized:
|
||||
token, val_num, quality, reason = sanitized
|
||||
anomalie = _is_abnormal(concept_name, token)
|
||||
|
||||
is_dup = any(b.test == concept_name and b.valeur == raw_value for b in dossier.biologie_cle)
|
||||
if is_dup:
|
||||
continue
|
||||
|
||||
dossier.biologie_cle.append(
|
||||
BiologieCle(
|
||||
test=concept_name,
|
||||
valeur=raw_value,
|
||||
valeur_num=val_num,
|
||||
anomalie=anomalie,
|
||||
quality=quality,
|
||||
discard_reason=reason,
|
||||
)
|
||||
)
|
||||
logger.debug("FAISS Bio match: %s (%s) = %s dans '%s'", concept_name, synonym_matched, raw_value, segment)
|
||||
|
||||
|
||||
def _extract_biologie(text: str, dossier: DossierMedical) -> None:
|
||||
"""Extrait des résultats biologiques clés.
|
||||
|
||||
@@ -90,12 +188,20 @@ def _extract_biologie(text: str, dossier: DossierMedical) -> None:
|
||||
# Ionogramme / électrolytes
|
||||
(r"(?:[Ss]odium|[Nn]atr[ée]mie|(?<![A-Za-z])Na\+?(?![A-Za-z]))\s*[=:àa]?\s*([0-9]{2,3}(?:[.,][0-9]+)?)\s*(?:mmol/L|mEq/L)?", "Sodium"),
|
||||
(r"(?:[Pp]otassium|[Kk]ali[ée]mie|(?<![A-Za-z])K\+?(?![A-Za-z]))\s*[=:àa]?\s*([0-9](?:[.,][0-9]+)?)\s*(?:mmol/L|mEq/L)?", "Potassium"),
|
||||
(r"(?:[Cc]hlore|[Cc]hlor[ée]mie|(?<![A-Za-z])Cl-?(?![A-Za-z]))\s*[=:àa]?\s*(\d+(?:[.,]\d+)?)\s*(?:mmol/L)?", "Chlore"),
|
||||
(r"(?:[Cc]alcium|[Cc]alci[ée]mie|(?<![A-Za-z])Ca\+?(?![A-Za-z]))\s*[=:àa]?\s*(\d+(?:[.,]\d+)?)\s*(?:mmol/L|mg/dL)?", "Calcium"),
|
||||
|
||||
(r"[Tt]roponine\s+(?:us\s+)?(n[ée]gative|positive|normale)", "Troponine"),
|
||||
(r"(?:[Hh][ée]moglobine|\bHb\b)\s*[=:àa]?\s*(\d+(?:[.,]\d+)?)\s*(?:g/dL|g/L)?", "Hémoglobine"),
|
||||
(r"\bVGM\b\s*[=:àa]?\s*(\d+(?:[.,]\d+)?)\s*(?:fL)?", "VGM"),
|
||||
(r"\bFerritine\b\s*[=:àa]?\s*(\d+(?:[.,]\d+)?)\s*(?:µg/L|ng/mL)?", "Ferritine"),
|
||||
(r"[Pp]laquettes?\s*[=:àa]?\s*(\d+(?:[.,]\d+)?)\s*(?:/mm3|G/L)?", "Plaquettes"),
|
||||
(r"[Ll]eucocytes?\s*[=:àa]?\s*(\d+(?:[.,]\d+)?)\s*(?:/mm3|G/L)?", "Leucocytes"),
|
||||
(r"[Cc]r[ée]atinine?\s*[=:àa]?\s*(\d+(?:[.,]\d+)?)\s*(?:µmol/L|mg/dL)?", "Créatinine"),
|
||||
(r"\bUr[ée]e\b\s*[=:àa]?\s*(\d+(?:[.,]\d+)?)\s*(?:mmol/L|g/L)?", "Urée"),
|
||||
(r"(?:[Gg]lyc[ée]mie|[Gg]lucose)\s*[=:àa]?\s*(\d+(?:[.,]\d+)?)\s*(?:mmol/L|g/L)?", "Glycémie"),
|
||||
(r"\bHbA1c\b\s*[=:àa]?\s*(\d+(?:[.,]\d+)?)\s*(?:%)?", "HbA1c"),
|
||||
(r"\bTSH\b\s*[=:àa]?\s*(\d+(?:[.,]\d+)?)\s*(?:mUI/L)?", "TSH"),
|
||||
]
|
||||
|
||||
|
||||
@@ -182,3 +288,6 @@ def _extract_biologie(text: str, dossier: DossierMedical) -> None:
|
||||
discard_reason=reason,
|
||||
)
|
||||
)
|
||||
|
||||
# --- Complément par recherche vectorielle (Synonymes) ---
|
||||
_extract_biologie_faiss(text, dossier)
|
||||
|
||||
Reference in New Issue
Block a user