v1.0 - Version stable: multi-PC, détection UI-DETR-1, 3 modes exécution

- Frontend v4 accessible sur réseau local (192.168.1.40)
- Ports ouverts: 3002 (frontend), 5001 (backend), 5004 (dashboard)
- Ollama GPU fonctionnel
- Self-healing interactif
- Dashboard confiance

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Dom
2026-01-29 11:23:51 +01:00
parent 21bfa3b337
commit a27b74cf22
1595 changed files with 412691 additions and 400 deletions

View File

@@ -0,0 +1 @@
Single-database configuration for Flask.

View File

@@ -0,0 +1,50 @@
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -0,0 +1,113 @@
import logging
from logging.config import fileConfig
from flask import current_app
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
def get_engine():
try:
# this works with Flask-SQLAlchemy<3 and Alchemical
return current_app.extensions['migrate'].db.get_engine()
except (TypeError, AttributeError):
# this works with Flask-SQLAlchemy>=3
return current_app.extensions['migrate'].db.engine
def get_engine_url():
try:
return get_engine().url.render_as_string(hide_password=False).replace(
'%', '%%')
except AttributeError:
return str(get_engine().url).replace('%', '%%')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option('sqlalchemy.url', get_engine_url())
target_db = current_app.extensions['migrate'].db
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def get_metadata():
if hasattr(target_db, 'metadatas'):
return target_db.metadatas[None]
return target_db.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=get_metadata(), literal_binds=True
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
conf_args = current_app.extensions['migrate'].configure_args
if conf_args.get("process_revision_directives") is None:
conf_args["process_revision_directives"] = process_revision_directives
connectable = get_engine()
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=get_metadata(),
**conf_args
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,104 @@
"""Initial schema with workflow metadata
Revision ID: 001_initial
Revises:
Create Date: 2026-01-23
Tables:
- workflows: Workflow definitions with metadata (tags, trigger examples)
- steps: Workflow steps with action types and parameters
- visual_anchors: Visual anchors for UI element detection
- executions: Workflow execution history
- execution_steps: Individual step execution results
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '001_initial'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# Workflows table
op.create_table('workflows',
sa.Column('id', sa.String(64), primary_key=True),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('tags_json', sa.Text(), nullable=True),
sa.Column('trigger_examples_json', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('is_active', sa.Boolean(), default=True),
)
# Visual Anchors table
op.create_table('visual_anchors',
sa.Column('id', sa.String(64), primary_key=True),
sa.Column('image_path', sa.String(512), nullable=True),
sa.Column('thumbnail_path', sa.String(512), nullable=True),
sa.Column('bbox_x', sa.Float(), nullable=True),
sa.Column('bbox_y', sa.Float(), nullable=True),
sa.Column('bbox_width', sa.Float(), nullable=True),
sa.Column('bbox_height', sa.Float(), nullable=True),
sa.Column('screen_width', sa.Integer(), nullable=True),
sa.Column('screen_height', sa.Integer(), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('confidence_threshold', sa.Float(), default=0.8),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('capture_method', sa.String(64), default='screen_capture'),
)
# Steps table
op.create_table('steps',
sa.Column('id', sa.String(64), primary_key=True),
sa.Column('workflow_id', sa.String(64), sa.ForeignKey('workflows.id'), nullable=False),
sa.Column('action_type', sa.String(64), nullable=False),
sa.Column('order', sa.Integer(), nullable=False, default=0),
sa.Column('position_x', sa.Float(), default=0),
sa.Column('position_y', sa.Float(), default=0),
sa.Column('parameters_json', sa.Text(), default='{}'),
sa.Column('anchor_id', sa.String(64), sa.ForeignKey('visual_anchors.id'), nullable=True),
sa.Column('label', sa.String(255), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
)
# Executions table
op.create_table('executions',
sa.Column('id', sa.String(64), primary_key=True),
sa.Column('workflow_id', sa.String(64), sa.ForeignKey('workflows.id'), nullable=False),
sa.Column('status', sa.String(32), default='pending'),
sa.Column('started_at', sa.DateTime(), nullable=True),
sa.Column('ended_at', sa.DateTime(), nullable=True),
sa.Column('current_step_index', sa.Integer(), default=0),
sa.Column('total_steps', sa.Integer(), default=0),
sa.Column('completed_steps', sa.Integer(), default=0),
sa.Column('failed_steps', sa.Integer(), default=0),
sa.Column('error_message', sa.Text(), nullable=True),
)
# Execution Steps table
op.create_table('execution_steps',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('execution_id', sa.String(64), sa.ForeignKey('executions.id'), nullable=False),
sa.Column('step_id', sa.String(64), nullable=False),
sa.Column('status', sa.String(32), default='pending'),
sa.Column('started_at', sa.DateTime(), nullable=True),
sa.Column('ended_at', sa.DateTime(), nullable=True),
sa.Column('duration_ms', sa.Integer(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('evidence_path', sa.String(512), nullable=True),
sa.Column('output_json', sa.Text(), default='{}'),
)
def downgrade():
op.drop_table('execution_steps')
op.drop_table('executions')
op.drop_table('steps')
op.drop_table('visual_anchors')
op.drop_table('workflows')