feat: Implement comprehensive database schema with new models, CRUD operations, and documentation for host metrics, Docker management, and terminal sessions, while removing old test files.
Some checks failed
Tests / Backend Tests (Python) (3.10) (push) Has been cancelled
Tests / Backend Tests (Python) (3.11) (push) Has been cancelled
Tests / Backend Tests (Python) (3.12) (push) Has been cancelled
Tests / Frontend Tests (JS) (push) Has been cancelled
Tests / Integration Tests (push) Has been cancelled
Tests / All Tests Passed (push) Has been cancelled

This commit is contained in:
Bruno Charest 2026-03-05 10:16:13 -05:00
parent 121aab9d42
commit 984d06a223
72 changed files with 3781 additions and 501 deletions

View File

@ -4,68 +4,119 @@
# Copy this file to .env and fill in the values.
# DO NOT commit the .env file with real credentials!
# --- General ---
# --- GENERAL ---
TZ="America/Montreal"
DEBUG_MODE=NO
# Utiliser le mode de debug (YES/NO)
DEBUG_MODE=YES
# ----------------
# --- API Authentication ---
# --- API AUTHENTICATION ---
# REQUIRED: Set a strong, unique API key
# Generate a random API key using: python -c "import secrets; print(secrets.token_hex(32))"
API_KEY=CHANGE_ME_TO_A_STRONG_API_KEY
# ---------------------------
# --- JWT Authentication ---
# --- JWT AUTHENTICATION ---
# REQUIRED: Set a strong secret key (min 32 chars)
# Generate a random JWT secret key using: python -c "import secrets; print(secrets.token_hex(32))"
JWT_SECRET_KEY=CHANGE_ME_TO_A_STRONG_SECRET_KEY_MIN_32_CHARS
JWT_EXPIRE_MINUTES=60
# ---------------------------
# --- Database ---
DATABASE_URL=sqlite+aiosqlite:///./data/homelab.db
# --- DATABASE ---
# Database engine mysql ou sqlite
DB_ENGINE=mysql
# Database SQLite
# DATABASE_URL=sqlite+aiosqlite:///./data/homelab.db
# DB_PATH=./data/homelab.db
# Database MySQL
MYSQL_HOST=localhost
MYSQL_PORT=3306
MYSQL_DATABASE=homelab
MYSQL_USER=homelab
MYSQL_PASSWORD=CHANGE_ME
MYSQL_ROOT_PASSWORD=CHANGE_ME
# Auto migration (true/false)
DB_AUTO_MIGRATE=true
# Database MySQL
DATABASE_URL=mysql+aiomysql://user:password@localhost:3306/homelab
DB_PATH=./data/homelab.db
# DB_ENGINE=mysql
# MYSQL_HOST=mysql
# MYSQL_USER=homelab
# MYSQL_PASSWORD=CHANGE_ME
# DB_AUTO_MIGRATE=true
# ---------------------------
# --- Logging ---
# --- LOGS ---
LOGS_DIR=./logs/Server_log
DIR_LOGS_TASKS=./logs/tasks_logs
# ---------------------------
# --- Ansible ---
# --- ANSIBLE ---
ANSIBLE_INVENTORY=./ansible/inventory
ANSIBLE_PLAYBOOKS=./ansible/playbooks
ANSIBLE_GROUP_VARS=./ansible/inventory/group_vars
# ANSIBLE_CONFIG=/path/to/ansible.cfg
# ---------------------------
# --- SSH ---
# Utilisateur SSH pour les connexions
SSH_USER=automation
# Utilisateur sur le remote SSH pour les connexions
SSH_REMOTE_USER=automation
# Dossier des clés SSH
SSH_KEY_DIR=~/.ssh
# Chemin de la clé SSH
SSH_KEY_PATH=~/.ssh/id_automation_ansible
# ---------------------------
# --- CORS ---
# Comma-separated list of allowed origins (no wildcard in production!)
CORS_ORIGINS=http://localhost:3000,http://localhost:8008
# ---------------------------
# --- Notifications (ntfy) ---
# --- NOTIFICATIONS (ntfy) ---
# URL de base du serveur ntfy (self-hosted ou ntfy.sh)
NTFY_BASE_URL=https://ntfy.sh
# Topic par défaut pour les notifications générales
NTFY_DEFAULT_TOPIC=homelab-events
# Activer/désactiver les notifications (true/false)
NTFY_ENABLED=true
# envoyer toutes notifications(ALL) // erreurs(ERR) // warning(WARN)
# ex: NTFY_MSG_TYPE=ERR,WARN
# ex: NTFY_MSG_TYPE=ERR
# ex: NTFY_MSG_TYPE=ALL
NTFY_MSG_TYPE=ERR
# Timeout pour les requêtes HTTP vers ntfy (en secondes)
NTFY_TIMEOUT=5
# Authentification ntfy if needed (username et password ou token)
# NTFY_USERNAME=
# NTFY_PASSWORD=CHANGE_ME
# NTFY_TOKEN=CHANGE_ME
# ---------------------------
# --- Terminal SSH Web ---
# --- TERMINAL SSH WEB ---
# Session TTL in minutes (default: 30)
TERMINAL_SESSION_TTL_MINUTES=30
# ttyd interface
TERMINAL_TTYD_INTERFACE=eth0
# Maximum active sessions per user (default: 3)
TERMINAL_MAX_SESSIONS_PER_USER=3
# Idle timeout - sessions without heartbeat for this long are closed (default: 120s)
TERMINAL_SESSION_IDLE_TIMEOUT_SECONDS=120
# Heartbeat interval - how often client should send heartbeat (default: 15s)
TERMINAL_HEARTBEAT_INTERVAL_SECONDS=15
# GC interval - how often to run garbage collection (default: 30s)
TERMINAL_GC_INTERVAL_SECONDS=30
# Port range for ttyd instances (default: 7680-7700)
TERMINAL_PORT_RANGE_START=7682
TERMINAL_PORT_RANGE_END=7699
# SSH user for terminal connections (default: automation)
TERMINAL_SSH_USER=automation
# Retention period for terminal command history (default: 30 days)
TERMINAL_COMMAND_RETENTION_DAYS=30
# ---------------------------

View File

@ -5,7 +5,8 @@ from logging.config import fileConfig
import os
from pathlib import Path
from sqlalchemy import pool
from sqlalchemy import pool, text
import sqlalchemy as sa
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
@ -50,6 +51,7 @@ def run_migrations_offline() -> None:
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
version_table_col_args={'type_': sa.String(255)},
)
with context.begin_transaction():
@ -57,10 +59,12 @@ def run_migrations_offline() -> None:
def do_run_migrations(connection: Connection) -> None:
print(f"[Alembic] Configuring context for connection {connection}")
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
version_table_col_args={'type_': sa.String(255)},
)
with context.begin_transaction():

26
alembic/script.py.mako Normal file
View File

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@ -20,11 +20,11 @@ depends_on = None
def upgrade() -> None:
op.create_table(
"hosts",
sa.Column("id", sa.String(), primary_key=True),
sa.Column("name", sa.String(), nullable=False),
sa.Column("ip_address", sa.String(), nullable=False, unique=True),
sa.Column("status", sa.String(), nullable=False, server_default=sa.text("'unknown'")),
sa.Column("ansible_group", sa.String(), nullable=True),
sa.Column("id", sa.String(255), primary_key=True),
sa.Column("name", sa.String(255), nullable=False),
sa.Column("ip_address", sa.String(255), nullable=False, unique=True),
sa.Column("status", sa.String(255), nullable=False, server_default=sa.text("'unknown'")),
sa.Column("ansible_group", sa.String(255), nullable=True),
sa.Column("last_seen", sa.DateTime(timezone=True), nullable=True),
sa.Column("reachable", sa.Boolean(), nullable=False, server_default=sa.text("0")),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
@ -35,9 +35,9 @@ def upgrade() -> None:
op.create_table(
"bootstrap_status",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("host_id", sa.String(), sa.ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False),
sa.Column("status", sa.String(), nullable=False),
sa.Column("automation_user", sa.String(), nullable=True),
sa.Column("host_id", sa.String(255), sa.ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False),
sa.Column("status", sa.String(255), nullable=False),
sa.Column("automation_user", sa.String(255), nullable=True),
sa.Column("last_attempt", sa.DateTime(timezone=True), nullable=True),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
@ -45,11 +45,11 @@ def upgrade() -> None:
op.create_table(
"tasks",
sa.Column("id", sa.String(), primary_key=True),
sa.Column("action", sa.String(), nullable=False),
sa.Column("target", sa.String(), nullable=False),
sa.Column("status", sa.String(), nullable=False, server_default=sa.text("'pending'")),
sa.Column("playbook", sa.String(), nullable=True),
sa.Column("id", sa.String(255), primary_key=True),
sa.Column("action", sa.String(255), nullable=False),
sa.Column("target", sa.String(255), nullable=False),
sa.Column("status", sa.String(255), nullable=False, server_default=sa.text("'pending'")),
sa.Column("playbook", sa.String(255), nullable=True),
sa.Column("started_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("error_message", sa.Text(), nullable=True),
@ -59,16 +59,16 @@ def upgrade() -> None:
op.create_table(
"schedules",
sa.Column("id", sa.String(), primary_key=True),
sa.Column("name", sa.String(), nullable=False),
sa.Column("playbook", sa.String(), nullable=False),
sa.Column("target", sa.String(), nullable=False),
sa.Column("schedule_type", sa.String(), nullable=False),
sa.Column("id", sa.String(255), primary_key=True),
sa.Column("name", sa.String(255), nullable=False),
sa.Column("playbook", sa.String(255), nullable=False),
sa.Column("target", sa.String(255), nullable=False),
sa.Column("schedule_type", sa.String(255), nullable=False),
sa.Column("schedule_time", sa.DateTime(timezone=True), nullable=True),
sa.Column("recurrence_type", sa.String(), nullable=True),
sa.Column("recurrence_time", sa.String(), nullable=True),
sa.Column("recurrence_type", sa.String(255), nullable=True),
sa.Column("recurrence_time", sa.String(255), nullable=True),
sa.Column("recurrence_days", sa.Text(), nullable=True),
sa.Column("cron_expression", sa.String(), nullable=True),
sa.Column("cron_expression", sa.String(255), nullable=True),
sa.Column("enabled", sa.Boolean(), nullable=False, server_default=sa.text("1")),
sa.Column("tags", sa.Text(), nullable=True),
sa.Column("next_run", sa.DateTime(timezone=True), nullable=True),
@ -81,9 +81,9 @@ def upgrade() -> None:
op.create_table(
"schedule_runs",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("schedule_id", sa.String(), sa.ForeignKey("schedules.id", ondelete="CASCADE"), nullable=False),
sa.Column("task_id", sa.String(), sa.ForeignKey("tasks.id", ondelete="SET NULL"), nullable=True),
sa.Column("status", sa.String(), nullable=False),
sa.Column("schedule_id", sa.String(255), sa.ForeignKey("schedules.id", ondelete="CASCADE"), nullable=False),
sa.Column("task_id", sa.String(255), sa.ForeignKey("tasks.id", ondelete="SET NULL"), nullable=True),
sa.Column("status", sa.String(255), nullable=False),
sa.Column("started_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("duration", sa.Float(), nullable=True),
@ -95,13 +95,13 @@ def upgrade() -> None:
op.create_table(
"logs",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("level", sa.String(), nullable=False),
sa.Column("source", sa.String(), nullable=True),
sa.Column("level", sa.String(255), nullable=False),
sa.Column("source", sa.String(255), nullable=True),
sa.Column("message", sa.Text(), nullable=False),
sa.Column("details", sa.JSON(), nullable=True),
sa.Column("host_id", sa.String(), sa.ForeignKey("hosts.id", ondelete="SET NULL"), nullable=True),
sa.Column("task_id", sa.String(), sa.ForeignKey("tasks.id", ondelete="SET NULL"), nullable=True),
sa.Column("schedule_id", sa.String(), sa.ForeignKey("schedules.id", ondelete="SET NULL"), nullable=True),
sa.Column("host_id", sa.String(255), sa.ForeignKey("hosts.id", ondelete="SET NULL"), nullable=True),
sa.Column("task_id", sa.String(255), sa.ForeignKey("tasks.id", ondelete="SET NULL"), nullable=True),
sa.Column("schedule_id", sa.String(255), sa.ForeignKey("schedules.id", ondelete="SET NULL"), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
)

View File

@ -20,12 +20,12 @@ depends_on = None
def upgrade() -> None:
# Ajouter les colonnes manquantes à la table schedules
op.add_column("schedules", sa.Column("description", sa.Text(), nullable=True))
op.add_column("schedules", sa.Column("target_type", sa.String(), nullable=True, server_default="group"))
op.add_column("schedules", sa.Column("target_type", sa.String(255), nullable=True, server_default="group"))
op.add_column("schedules", sa.Column("extra_vars", sa.JSON(), nullable=True))
op.add_column("schedules", sa.Column("timezone", sa.String(), nullable=True, server_default="America/Montreal"))
op.add_column("schedules", sa.Column("timezone", sa.String(255), nullable=True, server_default="America/Montreal"))
op.add_column("schedules", sa.Column("start_at", sa.DateTime(timezone=True), nullable=True))
op.add_column("schedules", sa.Column("end_at", sa.DateTime(timezone=True), nullable=True))
op.add_column("schedules", sa.Column("last_status", sa.String(), nullable=True, server_default="never"))
op.add_column("schedules", sa.Column("last_status", sa.String(255), nullable=True, server_default="never"))
op.add_column("schedules", sa.Column("retry_on_failure", sa.Integer(), nullable=True, server_default="0"))
op.add_column("schedules", sa.Column("timeout", sa.Integer(), nullable=True, server_default="3600"))
op.add_column("schedules", sa.Column("run_count", sa.Integer(), nullable=True, server_default="0"))

View File

@ -22,7 +22,7 @@ def upgrade() -> None:
"""Add notification_type column to schedules table."""
op.add_column(
'schedules',
sa.Column('notification_type', sa.String(), nullable=True, server_default='all')
sa.Column('notification_type', sa.String(255), nullable=True, server_default='all')
)

View File

@ -21,7 +21,7 @@ def upgrade() -> None:
op.create_table(
'host_metrics',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('host_id', sa.String(), nullable=False),
sa.Column('host_id', sa.String(255), nullable=False),
sa.Column('metric_type', sa.String(50), nullable=False),
# CPU metrics

View File

@ -25,13 +25,13 @@ def upgrade():
op.create_table(
'logs_new',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('level', sa.String(), nullable=False),
sa.Column('source', sa.String(), nullable=True),
sa.Column('level', sa.String(255), nullable=False),
sa.Column('source', sa.String(255), nullable=True),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('details', sa.JSON(), nullable=True),
sa.Column('host_id', sa.String(), nullable=True),
sa.Column('task_id', sa.String(), nullable=True),
sa.Column('schedule_id', sa.String(), nullable=True),
sa.Column('host_id', sa.String(255), nullable=True),
sa.Column('task_id', sa.String(255), nullable=True),
sa.Column('schedule_id', sa.String(255), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.PrimaryKeyConstraint('id')
)

View File

@ -29,7 +29,7 @@ def upgrade() -> None:
op.create_table(
'docker_containers',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('host_id', sa.String(), nullable=False),
sa.Column('host_id', sa.String(255), nullable=False),
sa.Column('container_id', sa.String(64), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('image', sa.String(255), nullable=True),
@ -51,7 +51,7 @@ def upgrade() -> None:
op.create_table(
'docker_images',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('host_id', sa.String(), nullable=False),
sa.Column('host_id', sa.String(255), nullable=False),
sa.Column('image_id', sa.String(64), nullable=False),
sa.Column('repo_tags', sa.JSON(), nullable=True),
sa.Column('size', sa.BigInteger(), nullable=True),
@ -66,7 +66,7 @@ def upgrade() -> None:
op.create_table(
'docker_volumes',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('host_id', sa.String(), nullable=False),
sa.Column('host_id', sa.String(255), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('driver', sa.String(50), nullable=True),
sa.Column('mountpoint', sa.Text(), nullable=True),
@ -81,7 +81,7 @@ def upgrade() -> None:
op.create_table(
'docker_alerts',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('host_id', sa.String(), nullable=False),
sa.Column('host_id', sa.String(255), nullable=False),
sa.Column('container_name', sa.String(255), nullable=False),
sa.Column('severity', sa.String(20), nullable=False, server_default='warning'),
sa.Column('state', sa.String(20), nullable=False, server_default='open'),

View File

@ -26,8 +26,10 @@ def upgrade() -> None:
op.execute("""
DELETE FROM docker_containers
WHERE id NOT IN (
SELECT MAX(id) FROM docker_containers
SELECT id FROM (
SELECT MAX(id) AS id FROM docker_containers
GROUP BY host_id, container_id
) AS tmp
)
""")
@ -35,8 +37,10 @@ def upgrade() -> None:
op.execute("""
DELETE FROM docker_images
WHERE id NOT IN (
SELECT MAX(id) FROM docker_images
SELECT id FROM (
SELECT MAX(id) AS id FROM docker_images
GROUP BY host_id, image_id
) AS tmp
)
""")
@ -44,8 +48,10 @@ def upgrade() -> None:
op.execute("""
DELETE FROM docker_volumes
WHERE id NOT IN (
SELECT MAX(id) FROM docker_volumes
SELECT id FROM (
SELECT MAX(id) AS id FROM docker_volumes
GROUP BY host_id, name
) AS tmp
)
""")

View File

@ -22,11 +22,11 @@ def upgrade() -> None:
op.create_table(
'terminal_sessions',
sa.Column('id', sa.String(64), primary_key=True),
sa.Column('host_id', sa.String(), nullable=False, index=True),
sa.Column('host_name', sa.String(), nullable=False),
sa.Column('host_ip', sa.String(), nullable=False),
sa.Column('user_id', sa.String(), nullable=True),
sa.Column('username', sa.String(), nullable=True),
sa.Column('host_id', sa.String(255), nullable=False, index=True),
sa.Column('host_name', sa.String(255), nullable=False),
sa.Column('host_ip', sa.String(255), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('username', sa.String(255), nullable=True),
sa.Column('token_hash', sa.String(128), nullable=False),
sa.Column('ttyd_port', sa.Integer(), nullable=False),
sa.Column('ttyd_pid', sa.Integer(), nullable=True),

View File

@ -43,8 +43,8 @@ def upgrade() -> None:
'terminal_command_logs',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column('host_id', sa.String(), nullable=False),
sa.Column('user_id', sa.String(), nullable=True),
sa.Column('host_id', sa.String(255), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('terminal_session_id', sa.String(64), nullable=True),
sa.Column('command', sa.Text(), nullable=False),
sa.Column('command_hash', sa.String(64), nullable=False),

View File

@ -41,7 +41,7 @@ def upgrade() -> None:
'container_customizations',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('host_id', sa.String(), nullable=False),
sa.Column('host_id', sa.String(255), nullable=False),
sa.Column('container_id', sa.String(length=64), nullable=False),
sa.Column('icon_key', sa.String(length=100), nullable=True),
sa.Column('icon_color', sa.String(length=20), nullable=True),

View File

@ -1,97 +1,17 @@
INFO [alembic.runtime.migration] Context impl MySQLImpl.
INFO [alembic.runtime.migration] Context impl MySQLImpl.
INFO [alembic.runtime.migration] Will assume non-transactional DDL.
INFO [alembic.runtime.migration] Will assume non-transactional DDL.
[DB] DATABASE_URL=mysql+aiomysql://homelab:CHANGE_ME@127.0.0.1:3306/homelab, DEFAULT_DB_PATH=/mnt/c/dev/git/python/homelab-automation-api-v2/data/homelab.db, parent_exists=True, parent=/mnt/c/dev/git/python/homelab-automation-api-v2/data
[DB] Found alembic.ini at: /mnt/c/dev/git/python/homelab-automation-api-v2/alembic.ini
[DB] Running Alembic upgrade to head...
[DB] Alembic upgrade completed successfully
Exception ignored in: <function Connection.__del__ at 0x7cb5c11d0c20>
Traceback (most recent call last):
File "<frozen runpy>", line 198, in _run_module_as_main
File "<frozen runpy>", line 88, in _run_code
File "C:\Users\bruno\scoop\apps\python\current\Scripts\alembic.exe\__main__.py", line 5, in <module>
sys.exit(main())
~~~~^^
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\alembic\config.py", line 636, in main
CommandLine(prog=prog).main(argv=argv)
~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\alembic\config.py", line 626, in main
self.run_cmd(cfg, options)
~~~~~~~~~~~~^^^^^^^^^^^^^^
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\alembic\config.py", line 603, in run_cmd
fn(
~~^
config,
^^^^^^^
*[getattr(options, k, None) for k in positional],
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**{k: getattr(options, k, None) for k in kwarg},
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\alembic\command.py", line 236, in revision
script_directory.run_env()
~~~~~~~~~~~~~~~~~~~~~~~~^^
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\alembic\script\base.py", line 586, in run_env
util.load_python_file(self.dir, "env.py")
~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\alembic\util\pyfiles.py", line 95, in load_python_file
module = load_module_py(module_id, path)
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\alembic\util\pyfiles.py", line 113, in load_module_py
spec.loader.exec_module(module) # type: ignore
~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^
File "<frozen importlib._bootstrap_external>", line 759, in exec_module
File "<frozen importlib._bootstrap>", line 491, in _call_with_frames_removed
File "C:\dev\git\python\homelab-automation-api-v2\alembic\env.py", line 20, in <module>
from app.models.database import Base, metadata_obj, DATABASE_URL # noqa: E402
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\git\python\homelab-automation-api-v2\app\__init__.py", line 14, in <module>
from app.factory import create_app
File "C:\dev\git\python\homelab-automation-api-v2\app\factory.py", line 17, in <module>
from app.models.database import init_db, async_session_maker
File "C:\dev\git\python\homelab-automation-api-v2\app\models\__init__.py", line 2, in <module>
from .host import Host
File "C:\dev\git\python\homelab-automation-api-v2\app\models\host.py", line 13, in <module>
class Host(Base):
...<42 lines>...
return f"<Host id={self.id} name={self.name} ip={self.ip_address}>"
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\sqlalchemy\orm\decl_api.py", line 196, in __init__
_as_declarative(reg, cls, dict_)
~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\sqlalchemy\orm\decl_base.py", line 244, in _as_declarative
return _MapperConfig.setup_mapping(registry, cls, dict_, None, {})
~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\sqlalchemy\orm\decl_base.py", line 325, in setup_mapping
return _ClassScanMapperConfig(
registry, cls_, dict_, table, mapper_kw
)
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\sqlalchemy\orm\decl_base.py", line 572, in __init__
self._extract_mappable_attributes()
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\sqlalchemy\orm\decl_base.py", line 1560, in _extract_mappable_attributes
value.declarative_scan(
~~~~~~~~~~~~~~~~~~~~~~^
self,
^^^^^
...<7 lines>...
is_dataclass,
^^^^^^^^^^^^^
)
^
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\sqlalchemy\orm\properties.py", line 709, in declarative_scan
self._init_column_for_annotation(
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^
cls,
^^^^
...<2 lines>...
originating_module,
^^^^^^^^^^^^^^^^^^^
)
^
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\sqlalchemy\orm\properties.py", line 751, in _init_column_for_annotation
argument = de_stringify_union_elements(
cls, argument, originating_module
)
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\sqlalchemy\util\typing.py", line 341, in de_stringify_union_elements
return make_union_type(
*[
...<8 lines>...
]
)
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\sqlalchemy\util\typing.py", line 478, in make_union_type
return cast(Any, Union).__getitem__(types) # type: ignore
~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^
TypeError: descriptor '__getitem__' requires a 'typing.Union' object but received a 'tuple'
[DB] DATABASE_URL=sqlite+aiosqlite:///C:\dev\git\python\homelab-automation-api-v2\data\homelab.db, DEFAULT_DB_PATH=C:\dev\git\python\homelab-automation-api-v2\data\homelab.db, parent_exists=True, parent=C:\dev\git\python\homelab-automation-api-v2\data
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/aiomysql/connection.py", line 1131, in __del__
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/aiomysql/connection.py", line 339, in close
File "/usr/lib/python3.12/asyncio/selector_events.py", line 1211, in close
File "/usr/lib/python3.12/asyncio/selector_events.py", line 875, in close
File "/usr/lib/python3.12/asyncio/base_events.py", line 795, in call_soon
File "/usr/lib/python3.12/asyncio/base_events.py", line 541, in _check_closed
RuntimeError: Event loop is closed

View File

@ -75,6 +75,9 @@ class Settings(BaseSettings):
jwt_algorithm: str = "HS256"
# === Database ===
db_engine: str = Field(
default_factory=lambda: os.environ.get("DB_ENGINE", "sqlite")
)
database_url: Optional[str] = Field(default=None)
@property
@ -85,13 +88,21 @@ class Settings(BaseSettings):
return f"sqlite+aiosqlite:///{self.db_path}"
# === CORS ===
cors_origins: list = Field(
cors_origins: str | list = Field(
default_factory=lambda: [
o.strip() for o in os.environ.get(
"CORS_ORIGINS", "http://localhost:3000,http://localhost:8008"
).split(",") if o.strip()
]
)
@field_validator("cors_origins", mode="before")
@classmethod
def assemble_cors_origins(cls, v: object) -> list[str]:
if isinstance(v, str):
return [i.strip() for i in v.split(",") if i.strip()]
return v
cors_allow_credentials: bool = True
cors_allow_methods: list = Field(default=["*"])
cors_allow_headers: list = Field(default=["*"])

View File

@ -88,22 +88,42 @@ class DockerContainerRepository:
compose_project: Optional[str] = None
) -> DockerContainer:
"""Create or update a container using SQLite upsert."""
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
# Determine if we are using MySQL or SQLite
dialect = self.session.bind.dialect.name
stmt = sqlite_insert(DockerContainer).values(
host_id=host_id,
container_id=container_id,
name=name,
image=image,
state=state,
status=status,
health=health,
created_at=created_at,
ports=ports,
labels=labels,
compose_project=compose_project,
last_update_at=datetime.utcnow()
values = {
"host_id": host_id,
"container_id": container_id,
"name": name,
"image": image,
"state": state,
"status": status,
"health": health,
"created_at": created_at,
"ports": ports,
"labels": labels,
"compose_project": compose_project,
"last_update_at": datetime.utcnow()
}
if dialect == "mysql":
from sqlalchemy.dialects.mysql import insert as mysql_insert
stmt = mysql_insert(DockerContainer).values(**values)
stmt = stmt.on_duplicate_key_update(
name=stmt.inserted.name,
image=stmt.inserted.image,
state=stmt.inserted.state,
status=stmt.inserted.status,
health=stmt.inserted.health,
created_at=stmt.inserted.created_at,
ports=stmt.inserted.ports,
labels=stmt.inserted.labels,
compose_project=stmt.inserted.compose_project,
last_update_at=stmt.inserted.last_update_at
)
else:
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
stmt = sqlite_insert(DockerContainer).values(**values)
stmt = stmt.on_conflict_do_update(
index_elements=['host_id', 'container_id'],
set_={
@ -119,6 +139,7 @@ class DockerContainerRepository:
'last_update_at': stmt.excluded.last_update_at
}
)
await self.session.execute(stmt)
# Return the upserted container

View File

@ -63,16 +63,30 @@ class DockerImageRepository:
created: Optional[datetime] = None
) -> DockerImage:
"""Create or update an image using SQLite upsert."""
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
# Determine if we are using MySQL or SQLite
dialect = self.session.bind.dialect.name
stmt = sqlite_insert(DockerImage).values(
host_id=host_id,
image_id=image_id,
repo_tags=repo_tags,
size=size,
created=created,
last_update_at=datetime.utcnow()
values = {
"host_id": host_id,
"image_id": image_id,
"repo_tags": repo_tags,
"size": size,
"created": created,
"last_update_at": datetime.utcnow()
}
if dialect == "mysql":
from sqlalchemy.dialects.mysql import insert as mysql_insert
stmt = mysql_insert(DockerImage).values(**values)
stmt = stmt.on_duplicate_key_update(
repo_tags=stmt.inserted.repo_tags,
size=stmt.inserted.size,
created=stmt.inserted.created,
last_update_at=stmt.inserted.last_update_at
)
else:
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
stmt = sqlite_insert(DockerImage).values(**values)
stmt = stmt.on_conflict_do_update(
index_elements=['host_id', 'image_id'],
set_={
@ -82,6 +96,7 @@ class DockerImageRepository:
'last_update_at': stmt.excluded.last_update_at
}
)
await self.session.execute(stmt)
# Return the upserted image

View File

@ -56,16 +56,30 @@ class DockerVolumeRepository:
scope: Optional[str] = None
) -> DockerVolume:
"""Create or update a volume using SQLite upsert."""
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
# Determine if we are using MySQL or SQLite
dialect = self.session.bind.dialect.name
stmt = sqlite_insert(DockerVolume).values(
host_id=host_id,
name=name,
driver=driver,
mountpoint=mountpoint,
scope=scope,
last_update_at=datetime.utcnow()
values = {
"host_id": host_id,
"name": name,
"driver": driver,
"mountpoint": mountpoint,
"scope": scope,
"last_update_at": datetime.utcnow()
}
if dialect == "mysql":
from sqlalchemy.dialects.mysql import insert as mysql_insert
stmt = mysql_insert(DockerVolume).values(**values)
stmt = stmt.on_duplicate_key_update(
driver=stmt.inserted.driver,
mountpoint=stmt.inserted.mountpoint,
scope=stmt.inserted.scope,
last_update_at=stmt.inserted.last_update_at
)
else:
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
stmt = sqlite_insert(DockerVolume).values(**values)
stmt = stmt.on_conflict_do_update(
index_elements=['host_id', 'name'],
set_={
@ -75,6 +89,7 @@ class DockerVolumeRepository:
'last_update_at': stmt.excluded.last_update_at
}
)
await self.session.execute(stmt)
# Return the upserted volume

View File

@ -89,7 +89,7 @@ class UserRepository:
role=role,
is_active=is_active,
is_superuser=is_superuser,
password_changed_at=datetime.now(timezone.utc),
password_changed_at=datetime.now(timezone.utc).replace(tzinfo=None),
)
self.session.add(user)
await self.session.flush()
@ -106,13 +106,13 @@ class UserRepository:
async def update_password(self, user: User, hashed_password: str) -> User:
"""Update user password and timestamp."""
user.hashed_password = hashed_password
user.password_changed_at = datetime.now(timezone.utc)
user.password_changed_at = datetime.now(timezone.utc).replace(tzinfo=None)
await self.session.flush()
return user
async def update_last_login(self, user: User) -> User:
"""Update last login timestamp."""
user.last_login = datetime.now(timezone.utc)
user.last_login = datetime.now(timezone.utc).replace(tzinfo=None)
await self.session.flush()
return user

View File

@ -179,7 +179,8 @@ def create_app() -> FastAPI:
# Initialiser la base de données
await init_db()
print("📦 Base de données SQLite initialisée")
engine_name = "MySQL" if settings.db_engine.lower() == "mysql" else "SQLite"
print(f"📦 Base de données {engine_name} initialisée")
# Charger les services
from app.services import (

View File

@ -3741,10 +3741,10 @@
id="setup-password"
name="password"
required
minlength="6"
minlength="8"
autocomplete="new-password"
class="w-full px-4 py-3 bg-gray-800 border border-gray-700 rounded-lg text-white placeholder-gray-500 focus:border-purple-500 focus:ring-1 focus:ring-purple-500 transition-colors pr-12"
placeholder="Minimum 6 caractères"
placeholder="Min. 8 caractères, Maj, Min, Chiffre, Spécial"
>
<button
type="button"
@ -3765,7 +3765,7 @@
id="setup-password-confirm"
name="password_confirm"
required
minlength="6"
minlength="8"
autocomplete="new-password"
class="w-full px-4 py-3 bg-gray-800 border border-gray-700 rounded-lg text-white placeholder-gray-500 focus:border-purple-500 focus:ring-1 focus:ring-purple-500 transition-colors"
placeholder="Confirmez votre mot de passe"
@ -6027,13 +6027,63 @@
submitBtn.innerHTML = '<i class="fas fa-spinner fa-spin"></i><span>Création...</span>';
try {
const success = await dashboard.setupAdmin(username, password, email, displayName);
if (!success) {
errorText.textContent = 'Erreur lors de la création du compte';
// Direct API call to see the exact response
const apiBase = window.location.origin;
const payload = {
username,
password,
email: email || null,
display_name: displayName || null
};
console.log('[Setup] Sending setup request:', JSON.stringify(payload));
const response = await fetch(`${apiBase}/api/auth/setup`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload)
});
const responseBody = await response.text();
console.log('[Setup] Response status:', response.status);
console.log('[Setup] Response body:', responseBody);
if (!response.ok) {
let errorMessage = 'Échec de configuration';
try {
const errorData = JSON.parse(responseBody);
if (errorData.detail) {
if (Array.isArray(errorData.detail)) {
errorMessage = errorData.detail.map(err => err.msg).join(', ');
} else {
errorMessage = errorData.detail;
}
}
} catch (e) {
errorMessage = `Erreur serveur (${response.status}): ${responseBody.substring(0, 200)}`;
}
errorText.textContent = errorMessage;
errorEl.classList.remove('hidden');
return;
}
// Setup succeeded, now login
console.log('[Setup] Setup successful, attempting login...');
const loginResult = await dashboard.login(username, password);
console.log('[Setup] Login result:', loginResult);
if (!loginResult) {
// Login failed but setup succeeded — show success and redirect to login
errorEl.classList.remove('hidden');
errorEl.querySelector('i').className = 'fas fa-check-circle mr-2';
errorEl.classList.remove('text-red-400', 'bg-red-900/20', 'border-red-800');
errorEl.classList.add('text-green-400', 'bg-green-900/20', 'border-green-800');
errorText.textContent = 'Compte créé avec succès ! Redirection vers la connexion...';
setTimeout(() => { window.location.reload(); }, 2000);
}
} catch (error) {
errorText.textContent = error.message || 'Erreur de configuration';
console.error('[Setup] Error:', error);
const msg = error.message || String(error) || 'Erreur inconnue lors de la création du compte';
errorText.textContent = msg;
errorEl.classList.remove('hidden');
} finally {
submitBtn.disabled = false;

View File

@ -736,7 +736,6 @@ class DashboardManager {
}
async setupAdmin(username, password, email = null, displayName = null) {
try {
const response = await fetch(`${this.apiBase}/api/auth/setup`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
@ -749,17 +748,24 @@ class DashboardManager {
});
if (!response.ok) {
const error = await response.json();
throw new Error(error.detail || 'Échec de configuration');
let errorMessage = 'Échec de configuration';
try {
const errorData = await response.json();
if (errorData.detail) {
if (Array.isArray(errorData.detail)) {
errorMessage = errorData.detail.map(err => err.msg).join(', ');
} else {
errorMessage = errorData.detail;
}
}
} catch (e) {
console.error('Could not parse error response:', e);
}
throw new Error(errorMessage);
}
// Auto-login after setup
return await this.login(username, password);
} catch (error) {
console.error('Setup failed:', error);
this.showNotification(error.message, 'error');
return false;
}
}
logout() {

View File

@ -14,9 +14,9 @@ class BootstrapStatus(Base):
__tablename__ = "bootstrap_status"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
host_id: Mapped[str] = mapped_column(String, ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
status: Mapped[str] = mapped_column(String, nullable=False)
automation_user: Mapped[str] = mapped_column(String, nullable=True)
host_id: Mapped[str] = mapped_column(String(50), ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
status: Mapped[str] = mapped_column(String(50), nullable=False)
automation_user: Mapped[str] = mapped_column(String(50), nullable=True)
last_attempt: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True)
error_message: Mapped[str] = mapped_column(Text, nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())

View File

@ -16,7 +16,7 @@ class ContainerCustomization(Base):
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[int] = mapped_column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=True)
host_id: Mapped[str] = mapped_column(String, ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
host_id: Mapped[str] = mapped_column(String(50), ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
container_id: Mapped[str] = mapped_column(String(64), nullable=False)
icon_key: Mapped[str] = mapped_column(String(100), nullable=True)

View File

@ -11,7 +11,7 @@ from urllib.parse import urlparse
from alembic import command
from alembic.config import Config
from sqlalchemy import event, MetaData
from sqlalchemy import event, MetaData, text
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
from sqlalchemy.orm import declarative_base
@ -115,8 +115,9 @@ async def get_db() -> AsyncGenerator[AsyncSession, None]:
async def init_db() -> None:
"""Create all tables (mostly for dev/tests; migrations should be handled by Alembic)."""
from . import (
from . import ( # noqa: F401 — register ALL models with Base.metadata
host,
host_metrics,
task,
schedule,
schedule_run,
@ -130,10 +131,19 @@ async def init_db() -> None:
playbook_lint,
favorite_group,
favorite_container,
) # noqa: F401
user,
terminal_session,
terminal_command_log,
bootstrap_status,
container_customization,
)
def _to_sync_database_url(db_url: str) -> str:
if db_url.startswith("sqlite+aiosqlite:"):
return db_url.replace("sqlite+aiosqlite:", "sqlite:")
elif db_url.startswith("mysql+aiomysql:"):
return db_url.replace("mysql+aiomysql:", "mysql+pymysql:")
return db_url
def _run_alembic_upgrade() -> None:
# Try multiple locations for alembic.ini (dev vs Docker)
@ -155,8 +165,26 @@ async def init_db() -> None:
return
try:
# For MySQL, pre-create alembic_version with VARCHAR(255) to prevent
# "Data too long for column 'version_num'" error on long revision IDs.
# SQLite doesn't enforce VARCHAR length, so this is only needed for MySQL.
sync_url = _to_sync_database_url(DATABASE_URL)
if "mysql" in sync_url:
import sqlalchemy as sa
sync_engine = sa.create_engine(sync_url)
with sync_engine.connect() as conn:
conn.execute(sa.text(
"CREATE TABLE IF NOT EXISTS alembic_version "
"(version_num VARCHAR(255) NOT NULL, "
"PRIMARY KEY (version_num))"
))
conn.commit()
sync_engine.dispose()
print("[DB] MySQL: alembic_version table ensured with VARCHAR(255)")
cfg = Config(str(alembic_ini))
cfg.set_main_option("sqlalchemy.url", _to_sync_database_url(DATABASE_URL))
cfg.set_main_option("sqlalchemy.url", sync_url)
print(f"[DB] Running Alembic upgrade to head...")
command.upgrade(cfg, "head")
print(f"[DB] Alembic upgrade completed successfully")
@ -164,10 +192,9 @@ async def init_db() -> None:
print(f"[DB] Alembic upgrade failed: {e}")
raise
try:
print(f"[DB] Initializing database with URL: {DATABASE_URL}")
await asyncio.to_thread(_run_alembic_upgrade)
except Exception as e:
print(f"[DB] Exception during Alembic migration: {e}")
print("[DB] Ensuring all tables exist (metadata create_all)...")
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
print("[DB] Database initialization complete.")

View File

@ -16,7 +16,7 @@ class DockerAlert(Base):
__tablename__ = "docker_alerts"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
host_id: Mapped[str] = mapped_column(String, ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
host_id: Mapped[str] = mapped_column(String(50), ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
container_name: Mapped[str] = mapped_column(String(255), nullable=False)
severity: Mapped[str] = mapped_column(String(20), nullable=False, default="warning") # warning/error/critical
state: Mapped[str] = mapped_column(String(20), nullable=False, default="open") # open/closed/acknowledged

View File

@ -16,7 +16,7 @@ class DockerContainer(Base):
__tablename__ = "docker_containers"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
host_id: Mapped[str] = mapped_column(String, ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
host_id: Mapped[str] = mapped_column(String(50), ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
container_id: Mapped[str] = mapped_column(String(64), nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False)
image: Mapped[str] = mapped_column(String(255), nullable=True)

View File

@ -16,7 +16,7 @@ class DockerImage(Base):
__tablename__ = "docker_images"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
host_id: Mapped[str] = mapped_column(String, ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
host_id: Mapped[str] = mapped_column(String(50), ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
image_id: Mapped[str] = mapped_column(String(64), nullable=False)
repo_tags: Mapped[list] = mapped_column(JSON, nullable=True) # ["nginx:latest", "nginx:1.25"]
size: Mapped[int] = mapped_column(BigInteger, nullable=True)

View File

@ -16,7 +16,7 @@ class DockerVolume(Base):
__tablename__ = "docker_volumes"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
host_id: Mapped[str] = mapped_column(String, ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
host_id: Mapped[str] = mapped_column(String(50), ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False)
driver: Mapped[str] = mapped_column(String(50), nullable=True)
mountpoint: Mapped[str] = mapped_column(Text, nullable=True)

View File

@ -13,11 +13,11 @@ from .database import Base
class Host(Base):
__tablename__ = "hosts"
id: Mapped[str] = mapped_column(String, primary_key=True)
name: Mapped[str] = mapped_column(String, nullable=False)
ip_address: Mapped[str] = mapped_column(String, nullable=False, unique=True)
status: Mapped[str] = mapped_column(String, nullable=False, server_default=text("'unknown'"))
ansible_group: Mapped[str] = mapped_column(String, nullable=True)
id: Mapped[str] = mapped_column(String(50), primary_key=True)
name: Mapped[str] = mapped_column(String(255), nullable=False)
ip_address: Mapped[str] = mapped_column(String(50), nullable=False, unique=True)
status: Mapped[str] = mapped_column(String(50), nullable=False, server_default=text("'unknown'"))
ansible_group: Mapped[str] = mapped_column(String(50), nullable=True)
last_seen: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True)
reachable: Mapped[bool] = mapped_column(Boolean, nullable=False, server_default=text("0"))
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())

View File

@ -20,7 +20,7 @@ class HostMetrics(Base):
)
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
host_id: Mapped[str] = mapped_column(String, ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
host_id: Mapped[str] = mapped_column(String(50), ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
metric_type: Mapped[str] = mapped_column(String(50), nullable=False) # 'system_info', 'disk_usage', 'memory', etc.
# Métriques CPU

View File

@ -19,13 +19,13 @@ class Log(Base):
)
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
level: Mapped[str] = mapped_column(String, nullable=False)
source: Mapped[str] = mapped_column(String, nullable=True)
level: Mapped[str] = mapped_column(String(50), nullable=False)
source: Mapped[str] = mapped_column(String(255), nullable=True)
message: Mapped[str] = mapped_column(Text, nullable=False)
details: Mapped[dict] = mapped_column(JSON, nullable=True)
host_id: Mapped[str] = mapped_column(String, nullable=True)
task_id: Mapped[str] = mapped_column(String, nullable=True)
schedule_id: Mapped[str] = mapped_column(String, nullable=True)
host_id: Mapped[str] = mapped_column(String(50), nullable=True)
task_id: Mapped[str] = mapped_column(String(50), nullable=True)
schedule_id: Mapped[str] = mapped_column(String(50), nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())

View File

@ -13,31 +13,31 @@ from .database import Base
class Schedule(Base):
__tablename__ = "schedules"
id: Mapped[str] = mapped_column(String, primary_key=True)
name: Mapped[str] = mapped_column(String, nullable=False)
id: Mapped[str] = mapped_column(String(50), primary_key=True)
name: Mapped[str] = mapped_column(String(255), nullable=False)
description: Mapped[str] = mapped_column(Text, nullable=True)
playbook: Mapped[str] = mapped_column(String, nullable=False)
target_type: Mapped[str] = mapped_column(String, default="group", nullable=True)
target: Mapped[str] = mapped_column(String, nullable=False)
playbook: Mapped[str] = mapped_column(String(255), nullable=False)
target_type: Mapped[str] = mapped_column(String(50), default="group", nullable=True)
target: Mapped[str] = mapped_column(String(255), nullable=False)
extra_vars: Mapped[Dict[str, Any]] = mapped_column(JSON, nullable=True)
schedule_type: Mapped[str] = mapped_column(String, nullable=False)
schedule_type: Mapped[str] = mapped_column(String(50), nullable=False)
schedule_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True)
recurrence_type: Mapped[str] = mapped_column(String, nullable=True)
recurrence_time: Mapped[str] = mapped_column(String, nullable=True)
recurrence_type: Mapped[str] = mapped_column(String(50), nullable=True)
recurrence_time: Mapped[str] = mapped_column(String(50), nullable=True)
recurrence_days: Mapped[str] = mapped_column(Text, nullable=True)
cron_expression: Mapped[str] = mapped_column(String, nullable=True)
timezone: Mapped[str] = mapped_column(String, default="America/Montreal", nullable=True)
cron_expression: Mapped[str] = mapped_column(String(100), nullable=True)
timezone: Mapped[str] = mapped_column(String(100), default="America/Montreal", nullable=True)
start_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True)
end_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True)
enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
tags: Mapped[str] = mapped_column(Text, nullable=True)
next_run: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True)
last_run: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True)
last_status: Mapped[str] = mapped_column(String, default="never", nullable=True)
last_status: Mapped[str] = mapped_column(String(50), default="never", nullable=True)
retry_on_failure: Mapped[int] = mapped_column(Integer, default=0, nullable=True)
timeout: Mapped[int] = mapped_column(Integer, default=3600, nullable=True)
# Type de notification: "none" (aucune), "all" (toujours), "errors" (erreurs seulement)
notification_type: Mapped[str] = mapped_column(String, default="all", nullable=True)
notification_type: Mapped[str] = mapped_column(String(50), default="all", nullable=True)
run_count: Mapped[int] = mapped_column(Integer, default=0, nullable=True)
success_count: Mapped[int] = mapped_column(Integer, default=0, nullable=True)
failure_count: Mapped[int] = mapped_column(Integer, default=0, nullable=True)

View File

@ -14,9 +14,9 @@ class ScheduleRun(Base):
__tablename__ = "schedule_runs"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
schedule_id: Mapped[str] = mapped_column(String, ForeignKey("schedules.id", ondelete="CASCADE"), nullable=False)
task_id: Mapped[str] = mapped_column(String, ForeignKey("tasks.id", ondelete="SET NULL"), nullable=True)
status: Mapped[str] = mapped_column(String, nullable=False)
schedule_id: Mapped[str] = mapped_column(String(50), ForeignKey("schedules.id", ondelete="CASCADE"), nullable=False)
task_id: Mapped[str] = mapped_column(String(50), ForeignKey("tasks.id", ondelete="SET NULL"), nullable=True)
status: Mapped[str] = mapped_column(String(50), nullable=False)
started_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
completed_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True)
duration: Mapped[float] = mapped_column(Float, nullable=True)

View File

@ -13,11 +13,11 @@ from .database import Base
class Task(Base):
__tablename__ = "tasks"
id: Mapped[str] = mapped_column(String, primary_key=True)
action: Mapped[str] = mapped_column(String, nullable=False)
target: Mapped[str] = mapped_column(String, nullable=False)
status: Mapped[str] = mapped_column(String, nullable=False, server_default=text("'pending'"))
playbook: Mapped[str] = mapped_column(String, nullable=True)
id: Mapped[str] = mapped_column(String(50), primary_key=True)
action: Mapped[str] = mapped_column(String(100), nullable=False)
target: Mapped[str] = mapped_column(String(255), nullable=False)
status: Mapped[str] = mapped_column(String(50), nullable=False, server_default=text("'pending'"))
playbook: Mapped[str] = mapped_column(String(255), nullable=True)
started_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True)
completed_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True)
error_message: Mapped[str] = mapped_column(Text, nullable=True)

View File

@ -36,10 +36,10 @@ class TerminalCommandLog(Base):
# Foreign keys
host_id: Mapped[str] = mapped_column(
String, ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False, index=True
String(50), ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False, index=True
)
user_id: Mapped[str] = mapped_column(
String, ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
user_id: Mapped[Optional[int]] = mapped_column(
Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
)
# Session reference (not FK as sessions may be cleaned up)

View File

@ -53,12 +53,12 @@ class TerminalSession(Base):
Index('ix_terminal_sessions_last_seen', 'last_seen_at'),
)
id: Mapped[str] = mapped_column(String(64), primary_key=True)
host_id: Mapped[str] = mapped_column(String, nullable=False, index=True)
host_name: Mapped[str] = mapped_column(String, nullable=False)
host_ip: Mapped[str] = mapped_column(String, nullable=False)
user_id: Mapped[str] = mapped_column(String, nullable=True)
username: Mapped[str] = mapped_column(String, nullable=True)
id: Mapped[str] = mapped_column(String(50), primary_key=True)
host_id: Mapped[str] = mapped_column(String(50), nullable=False, index=True)
host_name: Mapped[str] = mapped_column(String(255), nullable=False)
host_ip: Mapped[str] = mapped_column(String(50), nullable=False)
user_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
username: Mapped[str] = mapped_column(String(50), nullable=True)
# Token hash for session authentication (never store plain token)
token_hash: Mapped[str] = mapped_column(String(128), nullable=False)

View File

@ -27,3 +27,6 @@ asyncssh>=2.14.0
slowapi>=0.1.9
cachetools>=5.3.0
jinja2>=3.1.0
# MySQL support
aiomysql>=0.2.0
cryptography>=42.0.0

View File

@ -74,6 +74,7 @@ async def setup_admin(
detail="Le setup a déjà été effectué. Utilisez /login pour vous connecter."
)
try:
# Hasher le mot de passe
hashed_password = auth_service.hash_password(user_data.password)
@ -86,6 +87,13 @@ async def setup_admin(
role="admin"
)
await db_session.commit()
except Exception as e:
import traceback
traceback.print_exc()
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Erreur lors de la création du compte: {str(e)}"
)
return {
"message": "Compte administrateur créé avec succès",

View File

@ -0,0 +1,18 @@
import re
import os
docs_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'docs'))
schema_md_path = os.path.join(docs_dir, 'database_schema.md')
mmd_path = os.path.join(docs_dir, 'erd.mmd')
with open(schema_md_path, 'r', encoding='utf-8') as f:
content = f.read()
match = re.search(r'```mermaid\n(.*?)```', content, re.DOTALL)
if match:
mermaid_code = match.group(1).strip()
with open(mmd_path, 'w', encoding='utf-8') as f:
f.write(mermaid_code)
print(f"Mermaid code extracted to {mmd_path}")
else:
print("Could not find mermaid block in database_schema.md")

View File

@ -0,0 +1,114 @@
import os
import sys
# Ensure the parent directory is in the python path to load app.models
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
if parent_dir not in sys.path:
sys.path.insert(0, parent_dir)
from app.models import Base
from sqlalchemy import MetaData
metadata: MetaData = Base.metadata
def get_mermaid_type(column_type):
"""Clean the column type for mermaid ERD compatibility"""
t = str(column_type).upper().split('(')[0]
return t.replace(' ', '_')
def generate_mermaid_erd(metadata: MetaData):
lines = ["erDiagram"]
# Render tables and columns
for table_name, table in sorted(metadata.tables.items()):
lines.append(f" {table_name} {{")
for column in table.columns:
col_type = get_mermaid_type(column.type)
constraints = []
if column.primary_key:
constraints.append("PK")
if column.foreign_keys:
constraints.append("FK")
# Additional constraint notes
notes = '"nullable"' if column.nullable else ""
constraints_str = " ".join(constraints)
if notes:
constraints_str += f" {notes}"
lines.append(f" {col_type} {column.name} {constraints_str}")
lines.append(" }")
# Render relationships
for table_name, table in sorted(metadata.tables.items()):
for column in table.columns:
for fk in column.foreign_keys:
target_table = fk.column.table.name
# Usually a 1-to-many from target to source in a relational schema
lines.append(f" {target_table} ||--o{{ {table_name} : \"{column.name}\"")
return "\n".join(lines)
def generate_markdown_docs(metadata: MetaData):
lines = ["# Documentation du Modèle de Données (Base de Données)\n"]
lines.append("Cette documentation a été générée automatiquement à partir des modèles SQLAlchemy.\n")
lines.append("## 1. Diagramme Entité-Association (ERD)\n")
lines.append("Le schéma ci-dessous montre l'architecture des tables et leurs relations :\n")
lines.append("```mermaid")
lines.append(generate_mermaid_erd(metadata))
lines.append("```\n")
lines.append("## 2. Dictionnaire de Données (Tables)\n")
for table_name, table in sorted(metadata.tables.items()):
lines.append(f"### Table : `{table_name}`")
if table.comment:
lines.append(f"> {table.comment}\n")
else:
lines.append("\n")
lines.append("| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |")
lines.append("|---------|------|--------------|---------------|----------|--------|")
for column in table.columns:
col_type = str(column.type)
pk = "✅ Oui" if column.primary_key else "-"
fk_list = list(column.foreign_keys)
if fk_list:
fk_target = fk_list[0].column.table.name + "." + fk_list[0].column.name
fk = f"🔗 `{fk_target}`"
else:
fk = "-"
nullable = "Oui" if column.nullable else "**Non**"
# Format default value safely
default_val = "-"
if column.default:
if column.default.is_scalar:
default_val = f"`{column.default.arg}`"
elif column.default.is_callable:
default_val = "*(auto generate)*"
elif column.server_default:
default_val = f"`{column.server_default.arg}`"
lines.append(f"| `{column.name}` | `{col_type}` | {pk} | {fk} | {nullable} | {default_val} |")
lines.append("\n")
return "\n".join(lines)
if __name__ == "__main__":
docs_content = generate_markdown_docs(metadata)
docs_dir = os.path.join(parent_dir, 'docs')
os.makedirs(docs_dir, exist_ok=True)
output_path = os.path.join(docs_dir, 'database_schema.md')
with open(output_path, "w", encoding="utf-8") as f:
f.write(docs_content)
print(f"La documentation a été générée avec succès : {output_path}")

Binary file not shown.

View File

@ -0,0 +1,716 @@
# Documentation du Modèle de Données (Base de Données)
Cette documentation a été générée automatiquement à partir des modèles SQLAlchemy.
## 1. Diagramme Entité-Association (ERD)
Le schéma ci-dessous montre l'architecture des tables et leurs relations :
```mermaid
erDiagram
alerts {
INTEGER id PK
INTEGER user_id FK "nullable"
VARCHAR category
VARCHAR level "nullable"
VARCHAR title "nullable"
TEXT message
VARCHAR source "nullable"
JSON details "nullable"
DATETIME read_at "nullable"
DATETIME created_at
}
app_settings {
VARCHAR key PK
TEXT value "nullable"
DATETIME created_at
DATETIME updated_at
}
bootstrap_status {
INTEGER id PK
VARCHAR host_id FK
VARCHAR status
VARCHAR automation_user "nullable"
DATETIME last_attempt "nullable"
TEXT error_message "nullable"
DATETIME created_at
}
container_customizations {
INTEGER id PK
INTEGER user_id FK "nullable"
VARCHAR host_id FK
VARCHAR container_id
VARCHAR icon_key "nullable"
VARCHAR icon_color "nullable"
VARCHAR bg_color "nullable"
DATETIME created_at
DATETIME updated_at
}
docker_alerts {
INTEGER id PK
VARCHAR host_id FK
VARCHAR container_name
VARCHAR severity
VARCHAR state
TEXT message "nullable"
DATETIME opened_at
DATETIME closed_at "nullable"
DATETIME acknowledged_at "nullable"
VARCHAR acknowledged_by "nullable"
DATETIME last_notified_at "nullable"
}
docker_containers {
INTEGER id PK
VARCHAR host_id FK
VARCHAR container_id
VARCHAR name
VARCHAR image "nullable"
VARCHAR state
VARCHAR status "nullable"
VARCHAR health "nullable"
DATETIME created_at "nullable"
JSON ports "nullable"
JSON labels "nullable"
VARCHAR compose_project "nullable"
DATETIME last_update_at
}
docker_images {
INTEGER id PK
VARCHAR host_id FK
VARCHAR image_id
JSON repo_tags "nullable"
BIGINT size "nullable"
DATETIME created "nullable"
DATETIME last_update_at
}
docker_volumes {
INTEGER id PK
VARCHAR host_id FK
VARCHAR name
VARCHAR driver "nullable"
TEXT mountpoint "nullable"
VARCHAR scope "nullable"
DATETIME last_update_at
}
favorite_containers {
INTEGER id PK
INTEGER user_id FK "nullable"
INTEGER docker_container_id FK
INTEGER group_id FK "nullable"
DATETIME created_at
}
favorite_groups {
INTEGER id PK
INTEGER user_id FK "nullable"
VARCHAR name
INTEGER sort_order
VARCHAR color "nullable"
VARCHAR icon_key "nullable"
DATETIME created_at
DATETIME updated_at
}
host_metrics {
INTEGER id PK
VARCHAR host_id FK
VARCHAR metric_type
INTEGER cpu_count "nullable"
VARCHAR cpu_model "nullable"
INTEGER cpu_cores "nullable"
INTEGER cpu_threads "nullable"
INTEGER cpu_threads_per_core "nullable"
INTEGER cpu_sockets "nullable"
FLOAT cpu_mhz "nullable"
FLOAT cpu_max_mhz "nullable"
FLOAT cpu_min_mhz "nullable"
FLOAT cpu_load_1m "nullable"
FLOAT cpu_load_5m "nullable"
FLOAT cpu_load_15m "nullable"
FLOAT cpu_usage_percent "nullable"
FLOAT cpu_temperature "nullable"
INTEGER memory_total_mb "nullable"
INTEGER memory_used_mb "nullable"
INTEGER memory_free_mb "nullable"
FLOAT memory_usage_percent "nullable"
INTEGER swap_total_mb "nullable"
INTEGER swap_used_mb "nullable"
FLOAT swap_usage_percent "nullable"
JSON disk_info "nullable"
JSON disk_devices
FLOAT disk_root_total_gb "nullable"
FLOAT disk_root_used_gb "nullable"
FLOAT disk_root_usage_percent "nullable"
JSON lvm_info "nullable"
JSON zfs_info "nullable"
JSON storage_details "nullable"
VARCHAR os_name "nullable"
VARCHAR os_version "nullable"
VARCHAR kernel_version "nullable"
VARCHAR hostname "nullable"
INTEGER uptime_seconds "nullable"
VARCHAR uptime_human "nullable"
JSON network_info "nullable"
JSON raw_data "nullable"
VARCHAR collection_source "nullable"
INTEGER collection_duration_ms "nullable"
TEXT error_message "nullable"
DATETIME collected_at
DATETIME created_at
}
hosts {
VARCHAR id PK
VARCHAR name
VARCHAR ip_address
VARCHAR status
VARCHAR ansible_group "nullable"
DATETIME last_seen "nullable"
BOOLEAN reachable
DATETIME created_at
DATETIME updated_at
DATETIME deleted_at "nullable"
BOOLEAN docker_enabled
VARCHAR docker_version "nullable"
VARCHAR docker_status "nullable"
DATETIME docker_last_collect_at "nullable"
}
logs {
INTEGER id PK
VARCHAR level
VARCHAR source "nullable"
TEXT message
JSON details "nullable"
VARCHAR host_id "nullable"
VARCHAR task_id "nullable"
VARCHAR schedule_id "nullable"
DATETIME created_at
}
playbook_lint_results {
INTEGER id PK
VARCHAR filename
INTEGER quality_score
INTEGER total_issues
INTEGER errors_count
INTEGER warnings_count
INTEGER execution_time_ms
TEXT issues_json "nullable"
TEXT raw_output "nullable"
DATETIME created_at
DATETIME updated_at
}
schedule_runs {
INTEGER id PK
VARCHAR schedule_id FK
VARCHAR task_id FK "nullable"
VARCHAR status
DATETIME started_at
DATETIME completed_at "nullable"
FLOAT duration "nullable"
INTEGER hosts_impacted "nullable"
TEXT error_message "nullable"
TEXT output "nullable"
DATETIME created_at
}
schedules {
VARCHAR id PK
VARCHAR name
TEXT description "nullable"
VARCHAR playbook
VARCHAR target_type "nullable"
VARCHAR target
JSON extra_vars "nullable"
VARCHAR schedule_type
DATETIME schedule_time "nullable"
VARCHAR recurrence_type "nullable"
VARCHAR recurrence_time "nullable"
TEXT recurrence_days "nullable"
VARCHAR cron_expression "nullable"
VARCHAR timezone "nullable"
DATETIME start_at "nullable"
DATETIME end_at "nullable"
BOOLEAN enabled
TEXT tags "nullable"
DATETIME next_run "nullable"
DATETIME last_run "nullable"
VARCHAR last_status "nullable"
INTEGER retry_on_failure "nullable"
INTEGER timeout "nullable"
VARCHAR notification_type "nullable"
INTEGER run_count "nullable"
INTEGER success_count "nullable"
INTEGER failure_count "nullable"
DATETIME created_at
DATETIME updated_at
DATETIME deleted_at "nullable"
}
tasks {
VARCHAR id PK
VARCHAR action
VARCHAR target
VARCHAR status
VARCHAR playbook "nullable"
DATETIME started_at "nullable"
DATETIME completed_at "nullable"
TEXT error_message "nullable"
JSON result_data "nullable"
DATETIME created_at
}
terminal_command_logs {
INTEGER id PK
DATETIME created_at
VARCHAR host_id FK
INTEGER user_id FK "nullable"
VARCHAR terminal_session_id "nullable"
TEXT command
VARCHAR command_hash
VARCHAR source
BOOLEAN is_pinned
BOOLEAN is_blocked
VARCHAR blocked_reason "nullable"
VARCHAR username "nullable"
VARCHAR host_name "nullable"
}
terminal_sessions {
VARCHAR id PK
VARCHAR host_id
VARCHAR host_name
VARCHAR host_ip
INTEGER user_id "nullable"
VARCHAR username "nullable"
VARCHAR token_hash
INTEGER ttyd_port
INTEGER ttyd_pid "nullable"
VARCHAR mode
VARCHAR status
VARCHAR reason_closed "nullable"
DATETIME created_at
DATETIME last_seen_at
DATETIME expires_at
DATETIME closed_at "nullable"
}
users {
INTEGER id PK
VARCHAR username
VARCHAR email "nullable"
VARCHAR hashed_password
VARCHAR role
BOOLEAN is_active
BOOLEAN is_superuser
VARCHAR display_name "nullable"
DATETIME created_at
DATETIME updated_at
DATETIME last_login "nullable"
DATETIME password_changed_at "nullable"
DATETIME deleted_at "nullable"
}
users ||--o{ alerts : "user_id"
hosts ||--o{ bootstrap_status : "host_id"
users ||--o{ container_customizations : "user_id"
hosts ||--o{ container_customizations : "host_id"
hosts ||--o{ docker_alerts : "host_id"
hosts ||--o{ docker_containers : "host_id"
hosts ||--o{ docker_images : "host_id"
hosts ||--o{ docker_volumes : "host_id"
users ||--o{ favorite_containers : "user_id"
docker_containers ||--o{ favorite_containers : "docker_container_id"
favorite_groups ||--o{ favorite_containers : "group_id"
users ||--o{ favorite_groups : "user_id"
hosts ||--o{ host_metrics : "host_id"
schedules ||--o{ schedule_runs : "schedule_id"
tasks ||--o{ schedule_runs : "task_id"
hosts ||--o{ terminal_command_logs : "host_id"
users ||--o{ terminal_command_logs : "user_id"
```
## 2. Dictionnaire de Données (Tables)
### Table : `alerts`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `user_id` | `INTEGER` | - | 🔗 `users.id` | Oui | - |
| `category` | `VARCHAR(50)` | - | - | **Non** | - |
| `level` | `VARCHAR(20)` | - | - | Oui | - |
| `title` | `VARCHAR(255)` | - | - | Oui | - |
| `message` | `TEXT` | - | - | **Non** | - |
| `source` | `VARCHAR(50)` | - | - | Oui | - |
| `details` | `JSON` | - | - | Oui | - |
| `read_at` | `DATETIME` | - | - | Oui | - |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `app_settings`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `key` | `VARCHAR(100)` | ✅ Oui | - | **Non** | - |
| `value` | `TEXT` | - | - | Oui | - |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
| `updated_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `bootstrap_status`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `host_id` | `VARCHAR(50)` | - | 🔗 `hosts.id` | **Non** | - |
| `status` | `VARCHAR(50)` | - | - | **Non** | - |
| `automation_user` | `VARCHAR(50)` | - | - | Oui | - |
| `last_attempt` | `DATETIME` | - | - | Oui | - |
| `error_message` | `TEXT` | - | - | Oui | - |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `container_customizations`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `user_id` | `INTEGER` | - | 🔗 `users.id` | Oui | - |
| `host_id` | `VARCHAR(50)` | - | 🔗 `hosts.id` | **Non** | - |
| `container_id` | `VARCHAR(64)` | - | - | **Non** | - |
| `icon_key` | `VARCHAR(100)` | - | - | Oui | - |
| `icon_color` | `VARCHAR(20)` | - | - | Oui | - |
| `bg_color` | `VARCHAR(20)` | - | - | Oui | - |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
| `updated_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `docker_alerts`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `host_id` | `VARCHAR(50)` | - | 🔗 `hosts.id` | **Non** | - |
| `container_name` | `VARCHAR(255)` | - | - | **Non** | - |
| `severity` | `VARCHAR(20)` | - | - | **Non** | `warning` |
| `state` | `VARCHAR(20)` | - | - | **Non** | `open` |
| `message` | `TEXT` | - | - | Oui | - |
| `opened_at` | `DATETIME` | - | - | **Non** | `now()` |
| `closed_at` | `DATETIME` | - | - | Oui | - |
| `acknowledged_at` | `DATETIME` | - | - | Oui | - |
| `acknowledged_by` | `VARCHAR(100)` | - | - | Oui | - |
| `last_notified_at` | `DATETIME` | - | - | Oui | - |
### Table : `docker_containers`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `host_id` | `VARCHAR(50)` | - | 🔗 `hosts.id` | **Non** | - |
| `container_id` | `VARCHAR(64)` | - | - | **Non** | - |
| `name` | `VARCHAR(255)` | - | - | **Non** | - |
| `image` | `VARCHAR(255)` | - | - | Oui | - |
| `state` | `VARCHAR(20)` | - | - | **Non** | `unknown` |
| `status` | `VARCHAR(255)` | - | - | Oui | - |
| `health` | `VARCHAR(20)` | - | - | Oui | - |
| `created_at` | `DATETIME` | - | - | Oui | - |
| `ports` | `JSON` | - | - | Oui | - |
| `labels` | `JSON` | - | - | Oui | - |
| `compose_project` | `VARCHAR(255)` | - | - | Oui | - |
| `last_update_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `docker_images`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `host_id` | `VARCHAR(50)` | - | 🔗 `hosts.id` | **Non** | - |
| `image_id` | `VARCHAR(64)` | - | - | **Non** | - |
| `repo_tags` | `JSON` | - | - | Oui | - |
| `size` | `BIGINT` | - | - | Oui | - |
| `created` | `DATETIME` | - | - | Oui | - |
| `last_update_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `docker_volumes`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `host_id` | `VARCHAR(50)` | - | 🔗 `hosts.id` | **Non** | - |
| `name` | `VARCHAR(255)` | - | - | **Non** | - |
| `driver` | `VARCHAR(50)` | - | - | Oui | - |
| `mountpoint` | `TEXT` | - | - | Oui | - |
| `scope` | `VARCHAR(20)` | - | - | Oui | - |
| `last_update_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `favorite_containers`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `user_id` | `INTEGER` | - | 🔗 `users.id` | Oui | - |
| `docker_container_id` | `INTEGER` | - | 🔗 `docker_containers.id` | **Non** | - |
| `group_id` | `INTEGER` | - | 🔗 `favorite_groups.id` | Oui | - |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `favorite_groups`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `user_id` | `INTEGER` | - | 🔗 `users.id` | Oui | - |
| `name` | `VARCHAR(100)` | - | - | **Non** | - |
| `sort_order` | `INTEGER` | - | - | **Non** | `0` |
| `color` | `VARCHAR(20)` | - | - | Oui | - |
| `icon_key` | `VARCHAR(100)` | - | - | Oui | - |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
| `updated_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `host_metrics`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `host_id` | `VARCHAR(50)` | - | 🔗 `hosts.id` | **Non** | - |
| `metric_type` | `VARCHAR(50)` | - | - | **Non** | - |
| `cpu_count` | `INTEGER` | - | - | Oui | - |
| `cpu_model` | `VARCHAR(200)` | - | - | Oui | - |
| `cpu_cores` | `INTEGER` | - | - | Oui | - |
| `cpu_threads` | `INTEGER` | - | - | Oui | - |
| `cpu_threads_per_core` | `INTEGER` | - | - | Oui | - |
| `cpu_sockets` | `INTEGER` | - | - | Oui | - |
| `cpu_mhz` | `FLOAT` | - | - | Oui | - |
| `cpu_max_mhz` | `FLOAT` | - | - | Oui | - |
| `cpu_min_mhz` | `FLOAT` | - | - | Oui | - |
| `cpu_load_1m` | `FLOAT` | - | - | Oui | - |
| `cpu_load_5m` | `FLOAT` | - | - | Oui | - |
| `cpu_load_15m` | `FLOAT` | - | - | Oui | - |
| `cpu_usage_percent` | `FLOAT` | - | - | Oui | - |
| `cpu_temperature` | `FLOAT` | - | - | Oui | - |
| `memory_total_mb` | `INTEGER` | - | - | Oui | - |
| `memory_used_mb` | `INTEGER` | - | - | Oui | - |
| `memory_free_mb` | `INTEGER` | - | - | Oui | - |
| `memory_usage_percent` | `FLOAT` | - | - | Oui | - |
| `swap_total_mb` | `INTEGER` | - | - | Oui | - |
| `swap_used_mb` | `INTEGER` | - | - | Oui | - |
| `swap_usage_percent` | `FLOAT` | - | - | Oui | - |
| `disk_info` | `JSON` | - | - | Oui | - |
| `disk_devices` | `JSON` | - | - | **Non** | - |
| `disk_root_total_gb` | `FLOAT` | - | - | Oui | - |
| `disk_root_used_gb` | `FLOAT` | - | - | Oui | - |
| `disk_root_usage_percent` | `FLOAT` | - | - | Oui | - |
| `lvm_info` | `JSON` | - | - | Oui | - |
| `zfs_info` | `JSON` | - | - | Oui | - |
| `storage_details` | `JSON` | - | - | Oui | - |
| `os_name` | `VARCHAR(100)` | - | - | Oui | - |
| `os_version` | `VARCHAR(100)` | - | - | Oui | - |
| `kernel_version` | `VARCHAR(100)` | - | - | Oui | - |
| `hostname` | `VARCHAR(200)` | - | - | Oui | - |
| `uptime_seconds` | `INTEGER` | - | - | Oui | - |
| `uptime_human` | `VARCHAR(100)` | - | - | Oui | - |
| `network_info` | `JSON` | - | - | Oui | - |
| `raw_data` | `JSON` | - | - | Oui | - |
| `collection_source` | `VARCHAR(100)` | - | - | Oui | - |
| `collection_duration_ms` | `INTEGER` | - | - | Oui | - |
| `error_message` | `TEXT` | - | - | Oui | - |
| `collected_at` | `DATETIME` | - | - | **Non** | `now()` |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `hosts`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `VARCHAR(50)` | ✅ Oui | - | **Non** | - |
| `name` | `VARCHAR(255)` | - | - | **Non** | - |
| `ip_address` | `VARCHAR(50)` | - | - | **Non** | - |
| `status` | `VARCHAR(50)` | - | - | **Non** | `'unknown'` |
| `ansible_group` | `VARCHAR(50)` | - | - | Oui | - |
| `last_seen` | `DATETIME` | - | - | Oui | - |
| `reachable` | `BOOLEAN` | - | - | **Non** | `0` |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
| `updated_at` | `DATETIME` | - | - | **Non** | `now()` |
| `deleted_at` | `DATETIME` | - | - | Oui | - |
| `docker_enabled` | `BOOLEAN` | - | - | **Non** | `0` |
| `docker_version` | `VARCHAR(50)` | - | - | Oui | - |
| `docker_status` | `VARCHAR(20)` | - | - | Oui | - |
| `docker_last_collect_at` | `DATETIME` | - | - | Oui | - |
### Table : `logs`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `level` | `VARCHAR(50)` | - | - | **Non** | - |
| `source` | `VARCHAR(255)` | - | - | Oui | - |
| `message` | `TEXT` | - | - | **Non** | - |
| `details` | `JSON` | - | - | Oui | - |
| `host_id` | `VARCHAR(50)` | - | - | Oui | - |
| `task_id` | `VARCHAR(50)` | - | - | Oui | - |
| `schedule_id` | `VARCHAR(50)` | - | - | Oui | - |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `playbook_lint_results`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `filename` | `VARCHAR(255)` | - | - | **Non** | - |
| `quality_score` | `INTEGER` | - | - | **Non** | `100` |
| `total_issues` | `INTEGER` | - | - | **Non** | `0` |
| `errors_count` | `INTEGER` | - | - | **Non** | `0` |
| `warnings_count` | `INTEGER` | - | - | **Non** | `0` |
| `execution_time_ms` | `INTEGER` | - | - | **Non** | `0` |
| `issues_json` | `TEXT` | - | - | Oui | - |
| `raw_output` | `TEXT` | - | - | Oui | - |
| `created_at` | `DATETIME` | - | - | **Non** | *(auto generate)* |
| `updated_at` | `DATETIME` | - | - | **Non** | *(auto generate)* |
### Table : `schedule_runs`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `schedule_id` | `VARCHAR(50)` | - | 🔗 `schedules.id` | **Non** | - |
| `task_id` | `VARCHAR(50)` | - | 🔗 `tasks.id` | Oui | - |
| `status` | `VARCHAR(50)` | - | - | **Non** | - |
| `started_at` | `DATETIME` | - | - | **Non** | - |
| `completed_at` | `DATETIME` | - | - | Oui | - |
| `duration` | `FLOAT` | - | - | Oui | - |
| `hosts_impacted` | `INTEGER` | - | - | Oui | `0` |
| `error_message` | `TEXT` | - | - | Oui | - |
| `output` | `TEXT` | - | - | Oui | - |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `schedules`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `VARCHAR(50)` | ✅ Oui | - | **Non** | - |
| `name` | `VARCHAR(255)` | - | - | **Non** | - |
| `description` | `TEXT` | - | - | Oui | - |
| `playbook` | `VARCHAR(255)` | - | - | **Non** | - |
| `target_type` | `VARCHAR(50)` | - | - | Oui | `group` |
| `target` | `VARCHAR(255)` | - | - | **Non** | - |
| `extra_vars` | `JSON` | - | - | Oui | - |
| `schedule_type` | `VARCHAR(50)` | - | - | **Non** | - |
| `schedule_time` | `DATETIME` | - | - | Oui | - |
| `recurrence_type` | `VARCHAR(50)` | - | - | Oui | - |
| `recurrence_time` | `VARCHAR(50)` | - | - | Oui | - |
| `recurrence_days` | `TEXT` | - | - | Oui | - |
| `cron_expression` | `VARCHAR(100)` | - | - | Oui | - |
| `timezone` | `VARCHAR(100)` | - | - | Oui | `America/Montreal` |
| `start_at` | `DATETIME` | - | - | Oui | - |
| `end_at` | `DATETIME` | - | - | Oui | - |
| `enabled` | `BOOLEAN` | - | - | **Non** | `True` |
| `tags` | `TEXT` | - | - | Oui | - |
| `next_run` | `DATETIME` | - | - | Oui | - |
| `last_run` | `DATETIME` | - | - | Oui | - |
| `last_status` | `VARCHAR(50)` | - | - | Oui | `never` |
| `retry_on_failure` | `INTEGER` | - | - | Oui | `0` |
| `timeout` | `INTEGER` | - | - | Oui | `3600` |
| `notification_type` | `VARCHAR(50)` | - | - | Oui | `all` |
| `run_count` | `INTEGER` | - | - | Oui | `0` |
| `success_count` | `INTEGER` | - | - | Oui | `0` |
| `failure_count` | `INTEGER` | - | - | Oui | `0` |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
| `updated_at` | `DATETIME` | - | - | **Non** | `now()` |
| `deleted_at` | `DATETIME` | - | - | Oui | - |
### Table : `tasks`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `VARCHAR(50)` | ✅ Oui | - | **Non** | - |
| `action` | `VARCHAR(100)` | - | - | **Non** | - |
| `target` | `VARCHAR(255)` | - | - | **Non** | - |
| `status` | `VARCHAR(50)` | - | - | **Non** | `'pending'` |
| `playbook` | `VARCHAR(255)` | - | - | Oui | - |
| `started_at` | `DATETIME` | - | - | Oui | - |
| `completed_at` | `DATETIME` | - | - | Oui | - |
| `error_message` | `TEXT` | - | - | Oui | - |
| `result_data` | `JSON` | - | - | Oui | - |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
### Table : `terminal_command_logs`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
| `host_id` | `VARCHAR(50)` | - | 🔗 `hosts.id` | **Non** | - |
| `user_id` | `INTEGER` | - | 🔗 `users.id` | Oui | - |
| `terminal_session_id` | `VARCHAR(64)` | - | - | Oui | - |
| `command` | `TEXT` | - | - | **Non** | - |
| `command_hash` | `VARCHAR(64)` | - | - | **Non** | - |
| `source` | `VARCHAR(20)` | - | - | **Non** | `'terminal'` |
| `is_pinned` | `BOOLEAN` | - | - | **Non** | `0` |
| `is_blocked` | `BOOLEAN` | - | - | **Non** | `0` |
| `blocked_reason` | `VARCHAR(255)` | - | - | Oui | - |
| `username` | `VARCHAR(100)` | - | - | Oui | - |
| `host_name` | `VARCHAR(100)` | - | - | Oui | - |
### Table : `terminal_sessions`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `VARCHAR(50)` | ✅ Oui | - | **Non** | - |
| `host_id` | `VARCHAR(50)` | - | - | **Non** | - |
| `host_name` | `VARCHAR(255)` | - | - | **Non** | - |
| `host_ip` | `VARCHAR(50)` | - | - | **Non** | - |
| `user_id` | `INTEGER` | - | - | Oui | - |
| `username` | `VARCHAR(50)` | - | - | Oui | - |
| `token_hash` | `VARCHAR(128)` | - | - | **Non** | - |
| `ttyd_port` | `INTEGER` | - | - | **Non** | - |
| `ttyd_pid` | `INTEGER` | - | - | Oui | - |
| `mode` | `VARCHAR(20)` | - | - | **Non** | `'embedded'` |
| `status` | `VARCHAR(20)` | - | - | **Non** | `'active'` |
| `reason_closed` | `VARCHAR(30)` | - | - | Oui | - |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
| `last_seen_at` | `DATETIME` | - | - | **Non** | `now()` |
| `expires_at` | `DATETIME` | - | - | **Non** | - |
| `closed_at` | `DATETIME` | - | - | Oui | - |
### Table : `users`
| Colonne | Type | Clé Primaire | Clé Étrangère | Nullable | Défaut |
|---------|------|--------------|---------------|----------|--------|
| `id` | `INTEGER` | ✅ Oui | - | **Non** | - |
| `username` | `VARCHAR(50)` | - | - | **Non** | - |
| `email` | `VARCHAR(255)` | - | - | Oui | - |
| `hashed_password` | `VARCHAR(255)` | - | - | **Non** | - |
| `role` | `VARCHAR(20)` | - | - | **Non** | `'admin'` |
| `is_active` | `BOOLEAN` | - | - | **Non** | `1` |
| `is_superuser` | `BOOLEAN` | - | - | **Non** | `0` |
| `display_name` | `VARCHAR(100)` | - | - | Oui | - |
| `created_at` | `DATETIME` | - | - | **Non** | `now()` |
| `updated_at` | `DATETIME` | - | - | **Non** | `now()` |
| `last_login` | `DATETIME` | - | - | Oui | - |
| `password_changed_at` | `DATETIME` | - | - | Oui | - |
| `deleted_at` | `DATETIME` | - | - | Oui | - |

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 676 KiB

View File

@ -0,0 +1,655 @@
import React, { useState, useMemo } from "react";
const SCHEMA = {
alerts: {
group: "Notifications",
description: "Alertes système générées pour les utilisateurs",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "user_id", type: "INTEGER", pk: false, fk: "users.id", nullable: true, default: null },
{ name: "category", type: "VARCHAR(50)", pk: false, fk: null, nullable: false, default: null },
{ name: "level", type: "VARCHAR(20)", pk: false, fk: null, nullable: true, default: null },
{ name: "title", type: "VARCHAR(255)", pk: false, fk: null, nullable: true, default: null },
{ name: "message", type: "TEXT", pk: false, fk: null, nullable: false, default: null },
{ name: "source", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: null },
{ name: "details", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "read_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
app_settings: {
group: "Système",
description: "Paramètres globaux de l'application",
columns: [
{ name: "key", type: "VARCHAR(100)", pk: true, fk: null, nullable: false, default: null },
{ name: "value", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "updated_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
bootstrap_status: {
group: "Hôtes",
description: "État du bootstrapping Ansible par hôte",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "host_id", type: "VARCHAR(50)", pk: false, fk: "hosts.id", nullable: false, default: null },
{ name: "status", type: "VARCHAR(50)", pk: false, fk: null, nullable: false, default: null },
{ name: "automation_user", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: null },
{ name: "last_attempt", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "error_message", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
container_customizations: {
group: "Docker",
description: "Personnalisations visuelles de conteneurs par utilisateur",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "user_id", type: "INTEGER", pk: false, fk: "users.id", nullable: true, default: null },
{ name: "host_id", type: "VARCHAR(50)", pk: false, fk: "hosts.id", nullable: false, default: null },
{ name: "container_id", type: "VARCHAR(64)", pk: false, fk: null, nullable: false, default: null },
{ name: "icon_key", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: null },
{ name: "icon_color", type: "VARCHAR(20)", pk: false, fk: null, nullable: true, default: null },
{ name: "bg_color", type: "VARCHAR(20)", pk: false, fk: null, nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "updated_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
docker_alerts: {
group: "Docker",
description: "Alertes liées aux conteneurs Docker",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "host_id", type: "VARCHAR(50)", pk: false, fk: "hosts.id", nullable: false, default: null },
{ name: "container_name", type: "VARCHAR(255)", pk: false, fk: null, nullable: false, default: null },
{ name: "severity", type: "VARCHAR(20)", pk: false, fk: null, nullable: false, default: "warning" },
{ name: "state", type: "VARCHAR(20)", pk: false, fk: null, nullable: false, default: "open" },
{ name: "message", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "opened_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "closed_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "acknowledged_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "acknowledged_by", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: null },
{ name: "last_notified_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
]
},
docker_containers: {
group: "Docker",
description: "Inventaire des conteneurs Docker par hôte",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "host_id", type: "VARCHAR(50)", pk: false, fk: "hosts.id", nullable: false, default: null },
{ name: "container_id", type: "VARCHAR(64)", pk: false, fk: null, nullable: false, default: null },
{ name: "name", type: "VARCHAR(255)", pk: false, fk: null, nullable: false, default: null },
{ name: "image", type: "VARCHAR(255)", pk: false, fk: null, nullable: true, default: null },
{ name: "state", type: "VARCHAR(20)", pk: false, fk: null, nullable: false, default: "unknown" },
{ name: "status", type: "VARCHAR(255)", pk: false, fk: null, nullable: true, default: null },
{ name: "health", type: "VARCHAR(20)", pk: false, fk: null, nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "ports", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "labels", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "compose_project", type: "VARCHAR(255)", pk: false, fk: null, nullable: true, default: null },
{ name: "last_update_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
docker_images: {
group: "Docker",
description: "Images Docker disponibles sur les hôtes",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "host_id", type: "VARCHAR(50)", pk: false, fk: "hosts.id", nullable: false, default: null },
{ name: "image_id", type: "VARCHAR(64)", pk: false, fk: null, nullable: false, default: null },
{ name: "repo_tags", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "size", type: "BIGINT", pk: false, fk: null, nullable: true, default: null },
{ name: "created", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "last_update_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
docker_volumes: {
group: "Docker",
description: "Volumes Docker par hôte",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "host_id", type: "VARCHAR(50)", pk: false, fk: "hosts.id", nullable: false, default: null },
{ name: "name", type: "VARCHAR(255)", pk: false, fk: null, nullable: false, default: null },
{ name: "driver", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: null },
{ name: "mountpoint", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "scope", type: "VARCHAR(20)", pk: false, fk: null, nullable: true, default: null },
{ name: "last_update_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
favorite_containers: {
group: "Utilisateurs",
description: "Conteneurs favoris par utilisateur et groupe",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "user_id", type: "INTEGER", pk: false, fk: "users.id", nullable: true, default: null },
{ name: "docker_container_id", type: "INTEGER", pk: false, fk: "docker_containers.id", nullable: false, default: null },
{ name: "group_id", type: "INTEGER", pk: false, fk: "favorite_groups.id", nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
favorite_groups: {
group: "Utilisateurs",
description: "Groupes de favoris définis par l'utilisateur",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "user_id", type: "INTEGER", pk: false, fk: "users.id", nullable: true, default: null },
{ name: "name", type: "VARCHAR(100)", pk: false, fk: null, nullable: false, default: null },
{ name: "sort_order", type: "INTEGER", pk: false, fk: null, nullable: false, default: "0" },
{ name: "color", type: "VARCHAR(20)", pk: false, fk: null, nullable: true, default: null },
{ name: "icon_key", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "updated_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
host_metrics: {
group: "Hôtes",
description: "Métriques système collectées par hôte (CPU, RAM, disque, réseau…)",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "host_id", type: "VARCHAR(50)", pk: false, fk: "hosts.id", nullable: false, default: null },
{ name: "metric_type", type: "VARCHAR(50)", pk: false, fk: null, nullable: false, default: null },
{ name: "cpu_count", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_model", type: "VARCHAR(200)", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_cores", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_threads", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_threads_per_core", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_sockets", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_mhz", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_max_mhz", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_min_mhz", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_load_1m", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_load_5m", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_load_15m", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_usage_percent", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "cpu_temperature", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "memory_total_mb", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "memory_used_mb", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "memory_free_mb", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "memory_usage_percent", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "swap_total_mb", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "swap_used_mb", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "swap_usage_percent", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "disk_info", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "disk_devices", type: "JSON", pk: false, fk: null, nullable: false, default: null },
{ name: "disk_root_total_gb", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "disk_root_used_gb", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "disk_root_usage_percent", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "lvm_info", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "zfs_info", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "storage_details", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "os_name", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: null },
{ name: "os_version", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: null },
{ name: "kernel_version", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: null },
{ name: "hostname", type: "VARCHAR(200)", pk: false, fk: null, nullable: true, default: null },
{ name: "uptime_seconds", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "uptime_human", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: null },
{ name: "network_info", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "raw_data", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "collection_source", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: null },
{ name: "collection_duration_ms", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "error_message", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "collected_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
hosts: {
group: "Hôtes",
description: "Inventaire des serveurs/hôtes gérés",
columns: [
{ name: "id", type: "VARCHAR(50)", pk: true, fk: null, nullable: false, default: null },
{ name: "name", type: "VARCHAR(255)", pk: false, fk: null, nullable: false, default: null },
{ name: "ip_address", type: "VARCHAR(50)", pk: false, fk: null, nullable: false, default: null },
{ name: "status", type: "VARCHAR(50)", pk: false, fk: null, nullable: false, default: "'unknown'" },
{ name: "ansible_group", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: null },
{ name: "last_seen", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "reachable", type: "BOOLEAN", pk: false, fk: null, nullable: false, default: "0" },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "updated_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "deleted_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "docker_enabled", type: "BOOLEAN", pk: false, fk: null, nullable: false, default: "0" },
{ name: "docker_version", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: null },
{ name: "docker_status", type: "VARCHAR(20)", pk: false, fk: null, nullable: true, default: null },
{ name: "docker_last_collect_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
]
},
logs: {
group: "Système",
description: "Journal applicatif général",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "level", type: "VARCHAR(50)", pk: false, fk: null, nullable: false, default: null },
{ name: "source", type: "VARCHAR(255)", pk: false, fk: null, nullable: true, default: null },
{ name: "message", type: "TEXT", pk: false, fk: null, nullable: false, default: null },
{ name: "details", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "host_id", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: null },
{ name: "task_id", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: null },
{ name: "schedule_id", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
playbook_lint_results: {
group: "Ansible",
description: "Résultats d'analyse de qualité des playbooks Ansible",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "filename", type: "VARCHAR(255)", pk: false, fk: null, nullable: false, default: null },
{ name: "quality_score", type: "INTEGER", pk: false, fk: null, nullable: false, default: "100" },
{ name: "total_issues", type: "INTEGER", pk: false, fk: null, nullable: false, default: "0" },
{ name: "errors_count", type: "INTEGER", pk: false, fk: null, nullable: false, default: "0" },
{ name: "warnings_count", type: "INTEGER", pk: false, fk: null, nullable: false, default: "0" },
{ name: "execution_time_ms", type: "INTEGER", pk: false, fk: null, nullable: false, default: "0" },
{ name: "issues_json", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "raw_output", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "auto" },
{ name: "updated_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "auto" },
]
},
schedule_runs: {
group: "Ansible",
description: "Historique des exécutions de planifications",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "schedule_id", type: "VARCHAR(50)", pk: false, fk: "schedules.id", nullable: false, default: null },
{ name: "task_id", type: "VARCHAR(50)", pk: false, fk: "tasks.id", nullable: true, default: null },
{ name: "status", type: "VARCHAR(50)", pk: false, fk: null, nullable: false, default: null },
{ name: "started_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: null },
{ name: "completed_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "duration", type: "FLOAT", pk: false, fk: null, nullable: true, default: null },
{ name: "hosts_impacted", type: "INTEGER", pk: false, fk: null, nullable: true, default: "0" },
{ name: "error_message", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "output", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
schedules: {
group: "Ansible",
description: "Planifications d'exécution de playbooks",
columns: [
{ name: "id", type: "VARCHAR(50)", pk: true, fk: null, nullable: false, default: null },
{ name: "name", type: "VARCHAR(255)", pk: false, fk: null, nullable: false, default: null },
{ name: "description", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "playbook", type: "VARCHAR(255)", pk: false, fk: null, nullable: false, default: null },
{ name: "target_type", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: "group" },
{ name: "target", type: "VARCHAR(255)", pk: false, fk: null, nullable: false, default: null },
{ name: "extra_vars", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "schedule_type", type: "VARCHAR(50)", pk: false, fk: null, nullable: false, default: null },
{ name: "schedule_time", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "recurrence_type", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: null },
{ name: "recurrence_time", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: null },
{ name: "recurrence_days", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "cron_expression", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: null },
{ name: "timezone", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: "America/Montreal" },
{ name: "start_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "end_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "enabled", type: "BOOLEAN", pk: false, fk: null, nullable: false, default: "True" },
{ name: "tags", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "next_run", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "last_run", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "last_status", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: "never" },
{ name: "retry_on_failure", type: "INTEGER", pk: false, fk: null, nullable: true, default: "0" },
{ name: "timeout", type: "INTEGER", pk: false, fk: null, nullable: true, default: "3600" },
{ name: "notification_type", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: "all" },
{ name: "run_count", type: "INTEGER", pk: false, fk: null, nullable: true, default: "0" },
{ name: "success_count", type: "INTEGER", pk: false, fk: null, nullable: true, default: "0" },
{ name: "failure_count", type: "INTEGER", pk: false, fk: null, nullable: true, default: "0" },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "updated_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "deleted_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
]
},
tasks: {
group: "Ansible",
description: "Tâches Ansible déclenchées manuellement ou via planification",
columns: [
{ name: "id", type: "VARCHAR(50)", pk: true, fk: null, nullable: false, default: null },
{ name: "action", type: "VARCHAR(100)", pk: false, fk: null, nullable: false, default: null },
{ name: "target", type: "VARCHAR(255)", pk: false, fk: null, nullable: false, default: null },
{ name: "status", type: "VARCHAR(50)", pk: false, fk: null, nullable: false, default: "'pending'" },
{ name: "playbook", type: "VARCHAR(255)", pk: false, fk: null, nullable: true, default: null },
{ name: "started_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "completed_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "error_message", type: "TEXT", pk: false, fk: null, nullable: true, default: null },
{ name: "result_data", type: "JSON", pk: false, fk: null, nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
]
},
terminal_command_logs: {
group: "Terminal",
description: "Journal des commandes exécutées dans le terminal",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "host_id", type: "VARCHAR(50)", pk: false, fk: "hosts.id", nullable: false, default: null },
{ name: "user_id", type: "INTEGER", pk: false, fk: "users.id", nullable: true, default: null },
{ name: "terminal_session_id", type: "VARCHAR(64)", pk: false, fk: null, nullable: true, default: null },
{ name: "command", type: "TEXT", pk: false, fk: null, nullable: false, default: null },
{ name: "command_hash", type: "VARCHAR(64)", pk: false, fk: null, nullable: false, default: null },
{ name: "source", type: "VARCHAR(20)", pk: false, fk: null, nullable: false, default: "'terminal'" },
{ name: "is_pinned", type: "BOOLEAN", pk: false, fk: null, nullable: false, default: "0" },
{ name: "is_blocked", type: "BOOLEAN", pk: false, fk: null, nullable: false, default: "0" },
{ name: "blocked_reason", type: "VARCHAR(255)", pk: false, fk: null, nullable: true, default: null },
{ name: "username", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: null },
{ name: "host_name", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: null },
]
},
terminal_sessions: {
group: "Terminal",
description: "Sessions de terminal actives ou fermées",
columns: [
{ name: "id", type: "VARCHAR(50)", pk: true, fk: null, nullable: false, default: null },
{ name: "host_id", type: "VARCHAR(50)", pk: false, fk: null, nullable: false, default: null },
{ name: "host_name", type: "VARCHAR(255)", pk: false, fk: null, nullable: false, default: null },
{ name: "host_ip", type: "VARCHAR(50)", pk: false, fk: null, nullable: false, default: null },
{ name: "user_id", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "username", type: "VARCHAR(50)", pk: false, fk: null, nullable: true, default: null },
{ name: "token_hash", type: "VARCHAR(128)", pk: false, fk: null, nullable: false, default: null },
{ name: "ttyd_port", type: "INTEGER", pk: false, fk: null, nullable: false, default: null },
{ name: "ttyd_pid", type: "INTEGER", pk: false, fk: null, nullable: true, default: null },
{ name: "mode", type: "VARCHAR(20)", pk: false, fk: null, nullable: false, default: "'embedded'" },
{ name: "status", type: "VARCHAR(20)", pk: false, fk: null, nullable: false, default: "'active'" },
{ name: "reason_closed", type: "VARCHAR(30)", pk: false, fk: null, nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "last_seen_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "expires_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: null },
{ name: "closed_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
]
},
users: {
group: "Utilisateurs",
description: "Comptes utilisateurs de l'application",
columns: [
{ name: "id", type: "INTEGER", pk: true, fk: null, nullable: false, default: null },
{ name: "username", type: "VARCHAR(50)", pk: false, fk: null, nullable: false, default: null },
{ name: "email", type: "VARCHAR(255)", pk: false, fk: null, nullable: true, default: null },
{ name: "hashed_password", type: "VARCHAR(255)", pk: false, fk: null, nullable: false, default: null },
{ name: "role", type: "VARCHAR(20)", pk: false, fk: null, nullable: false, default: "'admin'" },
{ name: "is_active", type: "BOOLEAN", pk: false, fk: null, nullable: false, default: "1" },
{ name: "is_superuser", type: "BOOLEAN", pk: false, fk: null, nullable: false, default: "0" },
{ name: "display_name", type: "VARCHAR(100)", pk: false, fk: null, nullable: true, default: null },
{ name: "created_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "updated_at", type: "DATETIME", pk: false, fk: null, nullable: false, default: "now()" },
{ name: "last_login", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "password_changed_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
{ name: "deleted_at", type: "DATETIME", pk: false, fk: null, nullable: true, default: null },
]
},
};
const GROUP_COLORS = {
"Hôtes": { bg: "#0f172a", accent: "#38bdf8", badge: "#0369a1", badgeText: "#e0f2fe" },
"Docker": { bg: "#0a1628", accent: "#22d3ee", badge: "#0e7490", badgeText: "#cffafe" },
"Ansible": { bg: "#1a0a2e", accent: "#a78bfa", badge: "#6d28d9", badgeText: "#ede9fe" },
"Utilisateurs": { bg: "#0f1f0f", accent: "#4ade80", badge: "#15803d", badgeText: "#dcfce7" },
"Terminal": { bg: "#1a1400", accent: "#fbbf24", badge: "#b45309", badgeText: "#fef3c7" },
"Notifications": { bg: "#1a0a0a", accent: "#f87171", badge: "#b91c1c", badgeText: "#fee2e2" },
"Système": { bg: "#111827", accent: "#94a3b8", badge: "#475569", badgeText: "#f1f5f9" },
};
const TYPE_COLOR = (t) => {
if (t.startsWith("VARCHAR") || t === "TEXT") return "#7dd3fc";
if (t === "INTEGER" || t === "BIGINT" || t === "FLOAT") return "#86efac";
if (t === "DATETIME") return "#fcd34d";
if (t === "BOOLEAN") return "#f9a8d4";
if (t === "JSON") return "#c4b5fd";
return "#e2e8f0";
};
const GROUPS = [...new Set(Object.values(SCHEMA).map(t => t.group))];
export default function App() {
const [search, setSearch] = useState("");
const [activeGroup, setActiveGroup] = useState("Tous");
const [selectedTable, setSelectedTable] = useState(null);
const [colSearch, setColSearch] = useState("");
const tableNames = useMemo(() => Object.keys(SCHEMA).sort(), []);
const filtered = useMemo(() => {
return tableNames.filter(name => {
const matchGroup = activeGroup === "Tous" || SCHEMA[name].group === activeGroup;
const matchSearch = name.toLowerCase().includes(search.toLowerCase()) ||
SCHEMA[name].description.toLowerCase().includes(search.toLowerCase());
return matchGroup && matchSearch;
});
}, [tableNames, search, activeGroup]);
const tableData = selectedTable ? SCHEMA[selectedTable] : null;
const groupColor = tableData ? GROUP_COLORS[tableData.group] : null;
const filteredCols = useMemo(() => {
if (!tableData) return [];
return tableData.columns.filter(c =>
c.name.toLowerCase().includes(colSearch.toLowerCase()) ||
c.type.toLowerCase().includes(colSearch.toLowerCase())
);
}, [tableData, colSearch]);
const grouped = useMemo(() => {
const g = {};
GROUPS.forEach(grp => {
const tables = filtered.filter(n => SCHEMA[n].group === grp);
if (tables.length) g[grp] = tables;
});
return g;
}, [filtered]);
return (
<div style={{ display: "flex", height: "100vh", background: "#0d1117", fontFamily: "'JetBrains Mono', 'Fira Code', monospace", color: "#c9d1d9", overflow: "hidden" }}>
<style>{`
@import url('https://fonts.googleapis.com/css2?family=JetBrains+Mono:wght@300;400;600;700&display=swap');
::-webkit-scrollbar { width: 6px; height: 6px; }
::-webkit-scrollbar-track { background: #161b22; }
::-webkit-scrollbar-thumb { background: #30363d; border-radius: 3px; }
::-webkit-scrollbar-thumb:hover { background: #484f58; }
.table-item { transition: background 0.15s, border-color 0.15s; }
.table-item:hover { background: #21262d !important; }
.col-row:hover { background: #1c2128 !important; }
.group-btn { transition: all 0.15s; }
.group-btn:hover { filter: brightness(1.2); }
`}</style>
{/* SIDEBAR */}
<div style={{ width: 280, background: "#161b22", borderRight: "1px solid #21262d", display: "flex", flexDirection: "column", flexShrink: 0 }}>
{/* Header */}
<div style={{ padding: "20px 16px 16px", borderBottom: "1px solid #21262d" }}>
<div style={{ fontSize: 11, color: "#58a6ff", fontWeight: 700, letterSpacing: 3, marginBottom: 6, textTransform: "uppercase" }}>Schema Explorer</div>
<div style={{ fontSize: 12, color: "#484f58" }}>{tableNames.length} tables · SQLAlchemy</div>
</div>
{/* Search */}
<div style={{ padding: "12px 12px 8px" }}>
<div style={{ position: "relative" }}>
<span style={{ position: "absolute", left: 10, top: "50%", transform: "translateY(-50%)", color: "#484f58", fontSize: 13 }}></span>
<input
value={search}
onChange={e => setSearch(e.target.value)}
placeholder="Rechercher une table…"
style={{ width: "100%", background: "#0d1117", border: "1px solid #30363d", borderRadius: 6, padding: "7px 10px 7px 30px", color: "#c9d1d9", fontSize: 12, outline: "none", boxSizing: "border-box" }}
/>
</div>
</div>
{/* Groups filter */}
<div style={{ padding: "0 12px 10px", display: "flex", flexWrap: "wrap", gap: 5 }}>
{["Tous", ...GROUPS].map(g => {
const isActive = activeGroup === g;
const gc = GROUP_COLORS[g];
return (
<button key={g} className="group-btn" onClick={() => setActiveGroup(g)} style={{
fontSize: 10, padding: "3px 8px", borderRadius: 4, cursor: "pointer", border: "none", fontFamily: "inherit", fontWeight: 600,
background: isActive ? (gc ? gc.badge : "#21262d") : "#0d1117",
color: isActive ? (gc ? gc.badgeText : "#c9d1d9") : "#58616a",
letterSpacing: 0.5
}}>
{g}
</button>
);
})}
</div>
{/* Table list */}
<div style={{ flex: 1, overflowY: "auto", padding: "0 8px 16px" }}>
{Object.keys(grouped).map(grp => {
const gc = GROUP_COLORS[grp];
return (
<div key={grp} style={{ marginBottom: 8 }}>
<div style={{ fontSize: 10, color: gc.accent, fontWeight: 700, letterSpacing: 2, textTransform: "uppercase", padding: "8px 8px 4px" }}>
{grp}
</div>
{grouped[grp].map(name => {
const isSelected = selectedTable === name;
return (
<div key={name} className="table-item" onClick={() => { setSelectedTable(name); setColSearch(""); }} style={{
padding: "8px 10px", borderRadius: 6, cursor: "pointer", marginBottom: 2,
background: isSelected ? "#1f2d3d" : "transparent",
border: isSelected ? `1px solid ${gc.accent}44` : "1px solid transparent",
}}>
<div style={{ display: "flex", alignItems: "center", gap: 7 }}>
<span style={{ color: isSelected ? gc.accent : "#484f58", fontSize: 12 }}></span>
<span style={{ fontSize: 12, color: isSelected ? "#e6edf3" : "#8b949e", fontWeight: isSelected ? 600 : 400 }}>{name}</span>
<span style={{ marginLeft: "auto", fontSize: 10, color: "#484f58" }}>{SCHEMA[name].columns.length}</span>
</div>
</div>
);
})}
</div>
);
})}
{filtered.length === 0 && (
<div style={{ padding: 20, color: "#484f58", fontSize: 12, textAlign: "center" }}>Aucune table trouvée</div>
)}
</div>
</div>
{/* MAIN CONTENT */}
<div style={{ flex: 1, display: "flex", flexDirection: "column", overflow: "hidden" }}>
{selectedTable && tableData ? (
<>
{/* Table header */}
<div style={{ padding: "20px 28px 16px", borderBottom: "1px solid #21262d", background: "#161b22" }}>
<div style={{ display: "flex", alignItems: "center", gap: 12, marginBottom: 6 }}>
<span style={{ fontSize: 22, color: groupColor.accent }}></span>
<h1 style={{ margin: 0, fontSize: 22, fontWeight: 700, color: "#e6edf3", letterSpacing: 1 }}>{selectedTable}</h1>
<span style={{ fontSize: 11, padding: "2px 10px", borderRadius: 4, fontWeight: 700, background: groupColor.badge, color: groupColor.badgeText }}>
{tableData.group}
</span>
</div>
<p style={{ margin: 0, fontSize: 13, color: "#8b949e" }}>{tableData.description}</p>
{/* Stats row */}
<div style={{ display: "flex", gap: 20, marginTop: 12 }}>
{[
{ label: "Colonnes", val: tableData.columns.length },
{ label: "Clés primaires", val: tableData.columns.filter(c => c.pk).length },
{ label: "Clés étrangères", val: tableData.columns.filter(c => c.fk).length },
{ label: "Nullable", val: tableData.columns.filter(c => c.nullable).length },
].map(s => (
<div key={s.label} style={{ background: "#0d1117", border: "1px solid #21262d", borderRadius: 8, padding: "6px 14px", textAlign: "center" }}>
<div style={{ fontSize: 18, fontWeight: 700, color: groupColor.accent }}>{s.val}</div>
<div style={{ fontSize: 10, color: "#484f58", letterSpacing: 1 }}>{s.label}</div>
</div>
))}
</div>
</div>
{/* Column search */}
<div style={{ padding: "12px 28px 0", background: "#0d1117" }}>
<div style={{ position: "relative", maxWidth: 340 }}>
<span style={{ position: "absolute", left: 10, top: "50%", transform: "translateY(-50%)", color: "#484f58", fontSize: 13 }}></span>
<input
value={colSearch}
onChange={e => setColSearch(e.target.value)}
placeholder="Filtrer les colonnes…"
style={{ width: "100%", background: "#161b22", border: "1px solid #30363d", borderRadius: 6, padding: "7px 10px 7px 30px", color: "#c9d1d9", fontSize: 12, outline: "none", boxSizing: "border-box" }}
/>
</div>
</div>
{/* Columns table */}
<div style={{ flex: 1, overflowY: "auto", padding: "12px 28px 28px", background: "#0d1117" }}>
<table style={{ width: "100%", borderCollapse: "collapse", fontSize: 12 }}>
<thead>
<tr style={{ borderBottom: "2px solid #21262d" }}>
{["Colonne", "Type", "Flags", "Défaut"].map(h => (
<th key={h} style={{ textAlign: "left", padding: "8px 12px", color: "#484f58", fontWeight: 700, fontSize: 10, letterSpacing: 2, textTransform: "uppercase" }}>{h}</th>
))}
</tr>
</thead>
<tbody>
{filteredCols.map((col, i) => (
<tr key={col.name} className="col-row" style={{ borderBottom: "1px solid #161b22", background: i % 2 === 0 ? "transparent" : "#0a0f16" }}>
<td style={{ padding: "10px 12px", fontWeight: col.pk ? 700 : 400 }}>
<div style={{ display: "flex", alignItems: "center", gap: 7 }}>
{col.pk && <span title="Clé primaire" style={{ fontSize: 11, color: "#f59e0b" }}>🔑</span>}
{col.fk && !col.pk && <span title={`Clé étrangère → ${col.fk}`} style={{ fontSize: 11 }}>🔗</span>}
{!col.pk && !col.fk && <span style={{ fontSize: 11, color: "#30363d" }}>·</span>}
<span style={{ color: col.pk ? "#fbbf24" : col.fk ? "#60a5fa" : "#e6edf3" }}>{col.name}</span>
</div>
{col.fk && (
<div style={{ fontSize: 10, color: "#484f58", marginLeft: 22, marginTop: 2 }}> {col.fk}</div>
)}
</td>
<td style={{ padding: "10px 12px" }}>
<span style={{ color: TYPE_COLOR(col.type), fontWeight: 600 }}>{col.type}</span>
</td>
<td style={{ padding: "10px 12px" }}>
<div style={{ display: "flex", gap: 5, flexWrap: "wrap" }}>
{!col.nullable && (
<span style={{ fontSize: 10, padding: "1px 7px", borderRadius: 3, background: "#1c2e1c", color: "#4ade80", fontWeight: 600 }}>NOT NULL</span>
)}
{col.nullable && (
<span style={{ fontSize: 10, padding: "1px 7px", borderRadius: 3, background: "#1c1c1c", color: "#484f58" }}>NULL</span>
)}
</div>
</td>
<td style={{ padding: "10px 12px" }}>
{col.default ? (
<code style={{ fontSize: 11, color: "#c4b5fd", background: "#1e1a2e", padding: "2px 7px", borderRadius: 3 }}>{col.default}</code>
) : (
<span style={{ color: "#30363d" }}></span>
)}
</td>
</tr>
))}
</tbody>
</table>
{filteredCols.length === 0 && (
<div style={{ padding: 40, textAlign: "center", color: "#484f58", fontSize: 12 }}>Aucune colonne trouvée</div>
)}
</div>
</>
) : (
/* Welcome / overview */
<div style={{ flex: 1, display: "flex", flexDirection: "column", alignItems: "center", justifyContent: "center", padding: 40, gap: 32 }}>
<div style={{ textAlign: "center" }}>
<div style={{ fontSize: 48, marginBottom: 12 }}></div>
<h2 style={{ margin: 0, fontSize: 24, color: "#e6edf3", fontWeight: 700 }}>Modèle de Données</h2>
<p style={{ color: "#484f58", fontSize: 13, marginTop: 8 }}>Sélectionnez une table dans le panneau de gauche</p>
</div>
<div style={{ display: "grid", gridTemplateColumns: "repeat(auto-fit, minmax(160px, 1fr))", gap: 12, maxWidth: 700, width: "100%" }}>
{GROUPS.map(grp => {
const gc = GROUP_COLORS[grp];
const count = tableNames.filter(n => SCHEMA[n].group === grp).length;
const firstTable = tableNames.find(n => SCHEMA[n].group === grp);
return (
<div key={grp} onClick={() => { setActiveGroup(grp); setSelectedTable(firstTable); setColSearch(""); }} style={{
background: "#161b22", border: `1px solid ${gc.accent}33`, borderRadius: 10, padding: "16px 18px", cursor: "pointer",
transition: "all 0.15s"
}}
onMouseEnter={e => e.currentTarget.style.borderColor = gc.accent}
onMouseLeave={e => e.currentTarget.style.borderColor = gc.accent + "33"}
>
<div style={{ fontSize: 20, marginBottom: 8 }}></div>
<div style={{ fontSize: 13, fontWeight: 700, color: gc.accent, marginBottom: 4 }}>{grp}</div>
<div style={{ fontSize: 11, color: "#484f58" }}>{count} table{count > 1 ? "s" : ""}</div>
</div>
);
})}
</div>
</div>
)}
</div>
</div>
);
}

311
docs/database/erd.mmd Normal file
View File

@ -0,0 +1,311 @@
erDiagram
alerts {
INTEGER id PK
INTEGER user_id FK "nullable"
VARCHAR category
VARCHAR level "nullable"
VARCHAR title "nullable"
TEXT message
VARCHAR source "nullable"
JSON details "nullable"
DATETIME read_at "nullable"
DATETIME created_at
}
app_settings {
VARCHAR key PK
TEXT value "nullable"
DATETIME created_at
DATETIME updated_at
}
bootstrap_status {
INTEGER id PK
VARCHAR host_id FK
VARCHAR status
VARCHAR automation_user "nullable"
DATETIME last_attempt "nullable"
TEXT error_message "nullable"
DATETIME created_at
}
container_customizations {
INTEGER id PK
INTEGER user_id FK "nullable"
VARCHAR host_id FK
VARCHAR container_id
VARCHAR icon_key "nullable"
VARCHAR icon_color "nullable"
VARCHAR bg_color "nullable"
DATETIME created_at
DATETIME updated_at
}
docker_alerts {
INTEGER id PK
VARCHAR host_id FK
VARCHAR container_name
VARCHAR severity
VARCHAR state
TEXT message "nullable"
DATETIME opened_at
DATETIME closed_at "nullable"
DATETIME acknowledged_at "nullable"
VARCHAR acknowledged_by "nullable"
DATETIME last_notified_at "nullable"
}
docker_containers {
INTEGER id PK
VARCHAR host_id FK
VARCHAR container_id
VARCHAR name
VARCHAR image "nullable"
VARCHAR state
VARCHAR status "nullable"
VARCHAR health "nullable"
DATETIME created_at "nullable"
JSON ports "nullable"
JSON labels "nullable"
VARCHAR compose_project "nullable"
DATETIME last_update_at
}
docker_images {
INTEGER id PK
VARCHAR host_id FK
VARCHAR image_id
JSON repo_tags "nullable"
BIGINT size "nullable"
DATETIME created "nullable"
DATETIME last_update_at
}
docker_volumes {
INTEGER id PK
VARCHAR host_id FK
VARCHAR name
VARCHAR driver "nullable"
TEXT mountpoint "nullable"
VARCHAR scope "nullable"
DATETIME last_update_at
}
favorite_containers {
INTEGER id PK
INTEGER user_id FK "nullable"
INTEGER docker_container_id FK
INTEGER group_id FK "nullable"
DATETIME created_at
}
favorite_groups {
INTEGER id PK
INTEGER user_id FK "nullable"
VARCHAR name
INTEGER sort_order
VARCHAR color "nullable"
VARCHAR icon_key "nullable"
DATETIME created_at
DATETIME updated_at
}
host_metrics {
INTEGER id PK
VARCHAR host_id FK
VARCHAR metric_type
INTEGER cpu_count "nullable"
VARCHAR cpu_model "nullable"
INTEGER cpu_cores "nullable"
INTEGER cpu_threads "nullable"
INTEGER cpu_threads_per_core "nullable"
INTEGER cpu_sockets "nullable"
FLOAT cpu_mhz "nullable"
FLOAT cpu_max_mhz "nullable"
FLOAT cpu_min_mhz "nullable"
FLOAT cpu_load_1m "nullable"
FLOAT cpu_load_5m "nullable"
FLOAT cpu_load_15m "nullable"
FLOAT cpu_usage_percent "nullable"
FLOAT cpu_temperature "nullable"
INTEGER memory_total_mb "nullable"
INTEGER memory_used_mb "nullable"
INTEGER memory_free_mb "nullable"
FLOAT memory_usage_percent "nullable"
INTEGER swap_total_mb "nullable"
INTEGER swap_used_mb "nullable"
FLOAT swap_usage_percent "nullable"
JSON disk_info "nullable"
JSON disk_devices
FLOAT disk_root_total_gb "nullable"
FLOAT disk_root_used_gb "nullable"
FLOAT disk_root_usage_percent "nullable"
JSON lvm_info "nullable"
JSON zfs_info "nullable"
JSON storage_details "nullable"
VARCHAR os_name "nullable"
VARCHAR os_version "nullable"
VARCHAR kernel_version "nullable"
VARCHAR hostname "nullable"
INTEGER uptime_seconds "nullable"
VARCHAR uptime_human "nullable"
JSON network_info "nullable"
JSON raw_data "nullable"
VARCHAR collection_source "nullable"
INTEGER collection_duration_ms "nullable"
TEXT error_message "nullable"
DATETIME collected_at
DATETIME created_at
}
hosts {
VARCHAR id PK
VARCHAR name
VARCHAR ip_address
VARCHAR status
VARCHAR ansible_group "nullable"
DATETIME last_seen "nullable"
BOOLEAN reachable
DATETIME created_at
DATETIME updated_at
DATETIME deleted_at "nullable"
BOOLEAN docker_enabled
VARCHAR docker_version "nullable"
VARCHAR docker_status "nullable"
DATETIME docker_last_collect_at "nullable"
}
logs {
INTEGER id PK
VARCHAR level
VARCHAR source "nullable"
TEXT message
JSON details "nullable"
VARCHAR host_id "nullable"
VARCHAR task_id "nullable"
VARCHAR schedule_id "nullable"
DATETIME created_at
}
playbook_lint_results {
INTEGER id PK
VARCHAR filename
INTEGER quality_score
INTEGER total_issues
INTEGER errors_count
INTEGER warnings_count
INTEGER execution_time_ms
TEXT issues_json "nullable"
TEXT raw_output "nullable"
DATETIME created_at
DATETIME updated_at
}
schedule_runs {
INTEGER id PK
VARCHAR schedule_id FK
VARCHAR task_id FK "nullable"
VARCHAR status
DATETIME started_at
DATETIME completed_at "nullable"
FLOAT duration "nullable"
INTEGER hosts_impacted "nullable"
TEXT error_message "nullable"
TEXT output "nullable"
DATETIME created_at
}
schedules {
VARCHAR id PK
VARCHAR name
TEXT description "nullable"
VARCHAR playbook
VARCHAR target_type "nullable"
VARCHAR target
JSON extra_vars "nullable"
VARCHAR schedule_type
DATETIME schedule_time "nullable"
VARCHAR recurrence_type "nullable"
VARCHAR recurrence_time "nullable"
TEXT recurrence_days "nullable"
VARCHAR cron_expression "nullable"
VARCHAR timezone "nullable"
DATETIME start_at "nullable"
DATETIME end_at "nullable"
BOOLEAN enabled
TEXT tags "nullable"
DATETIME next_run "nullable"
DATETIME last_run "nullable"
VARCHAR last_status "nullable"
INTEGER retry_on_failure "nullable"
INTEGER timeout "nullable"
VARCHAR notification_type "nullable"
INTEGER run_count "nullable"
INTEGER success_count "nullable"
INTEGER failure_count "nullable"
DATETIME created_at
DATETIME updated_at
DATETIME deleted_at "nullable"
}
tasks {
VARCHAR id PK
VARCHAR action
VARCHAR target
VARCHAR status
VARCHAR playbook "nullable"
DATETIME started_at "nullable"
DATETIME completed_at "nullable"
TEXT error_message "nullable"
JSON result_data "nullable"
DATETIME created_at
}
terminal_command_logs {
INTEGER id PK
DATETIME created_at
VARCHAR host_id FK
INTEGER user_id FK "nullable"
VARCHAR terminal_session_id "nullable"
TEXT command
VARCHAR command_hash
VARCHAR source
BOOLEAN is_pinned
BOOLEAN is_blocked
VARCHAR blocked_reason "nullable"
VARCHAR username "nullable"
VARCHAR host_name "nullable"
}
terminal_sessions {
VARCHAR id PK
VARCHAR host_id
VARCHAR host_name
VARCHAR host_ip
INTEGER user_id "nullable"
VARCHAR username "nullable"
VARCHAR token_hash
INTEGER ttyd_port
INTEGER ttyd_pid "nullable"
VARCHAR mode
VARCHAR status
VARCHAR reason_closed "nullable"
DATETIME created_at
DATETIME last_seen_at
DATETIME expires_at
DATETIME closed_at "nullable"
}
users {
INTEGER id PK
VARCHAR username
VARCHAR email "nullable"
VARCHAR hashed_password
VARCHAR role
BOOLEAN is_active
BOOLEAN is_superuser
VARCHAR display_name "nullable"
DATETIME created_at
DATETIME updated_at
DATETIME last_login "nullable"
DATETIME password_changed_at "nullable"
DATETIME deleted_at "nullable"
}
users ||--o{ alerts : "user_id"
hosts ||--o{ bootstrap_status : "host_id"
users ||--o{ container_customizations : "user_id"
hosts ||--o{ container_customizations : "host_id"
hosts ||--o{ docker_alerts : "host_id"
hosts ||--o{ docker_containers : "host_id"
hosts ||--o{ docker_images : "host_id"
hosts ||--o{ docker_volumes : "host_id"
users ||--o{ favorite_containers : "user_id"
docker_containers ||--o{ favorite_containers : "docker_container_id"
favorite_groups ||--o{ favorite_containers : "group_id"
users ||--o{ favorite_groups : "user_id"
hosts ||--o{ host_metrics : "host_id"
schedules ||--o{ schedule_runs : "schedule_id"
tasks ||--o{ schedule_runs : "task_id"
hosts ||--o{ terminal_command_logs : "host_id"
users ||--o{ terminal_command_logs : "user_id"

BIN
docs/database/erd.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 MiB

51
docs/database/help.md Normal file
View File

@ -0,0 +1,51 @@
# 📊 Explorateur de Schéma Interactive
Ce répertoire contient un outil de documentation interactif permettant de visualiser et de naviguer dans le schéma de la base de données de l'application.
## 🔍 À propos de l'outil
Le fichier `database_schema_explorer.jsx` est une application React autonome qui :
- Listes toutes les tables par catégorie (Docker, Ansible, Système, etc.).
- Affiche les détails complets des colonnes (types, pk, fk, nullable, valeurs par défaut).
- Propose une interface moderne avec recherche temps réel et filtrage par groupes.
## 🚀 Comment l'exécuter
### Méthode 1 : Zéro-Installation (La plus rapide pour consulter)
Si vous ne souhaitez pas installer de nouvelles dépendances Node.js, ouvrez simplement le fichier suivant dans votre navigateur :
📄 `docs/database/standalone.html`
*Note : Cette méthode utilise des bibliothèques via CDN (React + Babel Standalone) pour transpiler le JSX à la volée directement dans le navigateur. C'est idéal pour une consultation ponctuelle.*
### Méthode 2 : Méthode Professionnelle (Vite)
C'est la méthode recommandée pour un développement fluide avec Hot Module Replacement (HMR).
1. **Installer les dépendances** :
```powershell
npm install react react-dom
npm install -D vite @vitejs/plugin-react
```
2. **Lancer l'explorateur** :
```powershell
npx vite docs/database
```
👉 L'application sera disponible sur `http://localhost:5173`.
## 🛠️ Configuration d'entrée
Un fichier `index.html` est présent dans ce répertoire pour servir de point d'entrée. Il charge dynamiquement le composant React et le monte dans la page.
## 💡 Astuce de Productivité
Vous pouvez ajouter un raccourci dans votre `package.json` pour un accès rapide :
```json
"scripts": {
"docs:db": "vite docs/database"
}
```
Cela vous permettra de lancer l'outil simplement avec : `npm run docs:db`.

22
docs/database/index.html Normal file
View File

@ -0,0 +1,22 @@
<!DOCTYPE html>
<html lang="fr">
<head>
<meta charset="UTF-8">
<link rel="icon" type="image/svg+xml" href="data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'><text y='.9em' font-size='90'>📊</text></svg>" />
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Base de données - Explorateur de Schéma</title>
</head>
<body style="margin: 0; background: #0d1117;">
<div id="root"></div>
<script type="module">
import React from 'react';
import ReactDOM from 'react-dom/client';
import App from './database_schema_explorer.jsx';
// Rendu du composant principal
ReactDOM.createRoot(document.getElementById('root')).render(
React.createElement(React.StrictMode, null, React.createElement(App))
);
</script>
</body>
</html>

View File

@ -0,0 +1,39 @@
<!DOCTYPE html>
<html lang="fr">
<head>
<meta charset="UTF-8">
<link rel="icon" type="image/svg+xml" href="data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'><text y='.9em' font-size='90'>📊</text></svg>" />
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Base de données - Explorateur de Schéma (CDN Mode)</title>
<script type="importmap">
{
"imports": {
"react": "https://esm.sh/react@18",
"react-dom": "https://esm.sh/react-dom@18",
"react-dom/client": "https://esm.sh/react-dom@18/client"
}
}
</script>
<!-- Babel Standalone (Transpilation JSX) -->
<script src="https://unpkg.com/@babel/standalone/babel.min.js"></script>
</head>
<body style="margin: 0; background: #0d1117;">
<div id="root"></div>
<!-- Script principal utilisant Babel Standalone -->
<script type="text/babel" data-type="module">
import React from 'react';
import ReactDOM from 'react-dom/client';
import App from './database_schema_explorer.jsx';
const root = ReactDOM.createRoot(document.getElementById('root'));
root.render(
<React.StrictMode>
<App />
</React.StrictMode>
);
</script>
</body>
</html>

180
log_alembic.txt Normal file
View File

@ -0,0 +1,180 @@
Traceback (most recent call last):
[DB] DATABASE_URL=mysql+aiomysql://homelab:password@127.0.0.1:3306/homelab, DEFAULT_DB_PATH=/mnt/c/dev/git/python/homelab-automation-api-v2/data/homelab.db, parent_exists=True, parent=/mnt/c/dev/git/python/homelab-automation-api-v2/data
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 143, in __init__
self._dbapi_connection = engine.raw_connection()
^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 3309, in raw_connection
return self.pool.connect()
^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 447, in connect
return _ConnectionFairy._checkout(self)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 1264, in _checkout
fairy = _ConnectionRecord.checkout(pool)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 711, in checkout
rec = pool._do_get()
^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/impl.py", line 306, in _do_get
return self._create_connection()
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 388, in _create_connection
return _ConnectionRecord(self)
^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 673, in __init__
self.__connect()
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 899, in __connect
with util.safe_reraise():
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/util/langhelpers.py", line 224, in __exit__
raise exc_value.with_traceback(exc_tb)
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 895, in __connect
self.dbapi_connection = connection = pool._invoke_creator(self)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/engine/create.py", line 661, in connect
return dialect.connect(*cargs, **cparams)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/engine/default.py", line 630, in connect
return self.loaded_dbapi.connect(*cargs, **cparams) # type: ignore[no-any-return] # NOQA: E501
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/aiomysql.py", line 170, in connect
await_only(creator_fn(*arg, **kw)),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/util/_concurrency_py3k.py", line 132, in await_only
return current.parent.switch(awaitable) # type: ignore[no-any-return,attr-defined] # noqa: E501
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/util/_concurrency_py3k.py", line 196, in greenlet_spawn
value = await result
^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/aiomysql/connection.py", line 74, in _connect
await conn._connect()
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/aiomysql/connection.py", line 540, in _connect
await self._request_authentication()
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/aiomysql/connection.py", line 865, in _request_authentication
await self.caching_sha2_password_auth(auth_packet)
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/aiomysql/connection.py", line 989, in caching_sha2_password_auth
pkt = await self._read_packet()
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/aiomysql/connection.py", line 652, in _read_packet
packet.raise_for_error()
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/pymysql/protocol.py", line 219, in raise_for_error
err.raise_mysql_exception(self._data)
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/pymysql/err.py", line 150, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.OperationalError: (1045, "Access denied for user 'homelab'@'172.17.0.1' (using password: YES)")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/bin/alembic", line 8, in <module>
sys.exit(main())
^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/alembic/config.py", line 1033, in main
CommandLine(prog=prog).main(argv=argv)
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/alembic/config.py", line 1023, in main
self.run_cmd(cfg, options)
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/alembic/config.py", line 957, in run_cmd
fn(
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/alembic/command.py", line 483, in upgrade
script.run_env()
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/alembic/script/base.py", line 545, in run_env
util.load_python_file(self.dir, "env.py")
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/alembic/util/pyfiles.py", line 116, in load_python_file
module = load_module_py(module_id, path)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/alembic/util/pyfiles.py", line 136, in load_module_py
spec.loader.exec_module(module) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<frozen importlib._bootstrap_external>", line 995, in exec_module
File "<frozen importlib._bootstrap>", line 488, in _call_with_frames_removed
File "/mnt/c/dev/git/python/homelab-automation-api-v2/alembic/env.py", line 95, in <module>
run_migrations_online()
File "/mnt/c/dev/git/python/homelab-automation-api-v2/alembic/env.py", line 89, in run_migrations_online
asyncio.run(async_main())
File "/usr/lib/python3.12/asyncio/runners.py", line 194, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "/usr/lib/python3.12/asyncio/runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/lib/python3.12/asyncio/base_events.py", line 687, in run_until_complete
return future.result()
^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/alembic/env.py", line 85, in async_main
async with connectable.connect() as connection:
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/base.py", line 121, in __aenter__
return await self.start(is_ctxmanager=True)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/ext/asyncio/engine.py", line 275, in start
await greenlet_spawn(self.sync_engine.connect)
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/util/_concurrency_py3k.py", line 201, in greenlet_spawn
result = context.throw(*sys.exc_info())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 3285, in connect
return self._connection_cls(self)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 145, in __init__
Connection._handle_dbapi_exception_noconnection(
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 2448, in _handle_dbapi_exception_noconnection
raise sqlalchemy_exception.with_traceback(exc_info[2]) from e
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 143, in __init__
self._dbapi_connection = engine.raw_connection()
^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 3309, in raw_connection
return self.pool.connect()
^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 447, in connect
return _ConnectionFairy._checkout(self)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 1264, in _checkout
fairy = _ConnectionRecord.checkout(pool)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 711, in checkout
rec = pool._do_get()
^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/impl.py", line 306, in _do_get
return self._create_connection()
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 388, in _create_connection
return _ConnectionRecord(self)
^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 673, in __init__
self.__connect()
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 899, in __connect
with util.safe_reraise():
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/util/langhelpers.py", line 224, in __exit__
raise exc_value.with_traceback(exc_tb)
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/pool/base.py", line 895, in __connect
self.dbapi_connection = connection = pool._invoke_creator(self)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/engine/create.py", line 661, in connect
return dialect.connect(*cargs, **cparams)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/engine/default.py", line 630, in connect
return self.loaded_dbapi.connect(*cargs, **cparams) # type: ignore[no-any-return] # NOQA: E501
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/aiomysql.py", line 170, in connect
await_only(creator_fn(*arg, **kw)),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/util/_concurrency_py3k.py", line 132, in await_only
return current.parent.switch(awaitable) # type: ignore[no-any-return,attr-defined] # noqa: E501
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/sqlalchemy/util/_concurrency_py3k.py", line 196, in greenlet_spawn
value = await result
^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/aiomysql/connection.py", line 74, in _connect
await conn._connect()
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/aiomysql/connection.py", line 540, in _connect
await self._request_authentication()
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/aiomysql/connection.py", line 865, in _request_authentication
await self.caching_sha2_password_auth(auth_packet)
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/aiomysql/connection.py", line 989, in caching_sha2_password_auth
pkt = await self._read_packet()
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/aiomysql/connection.py", line 652, in _read_packet
packet.raise_for_error()
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/pymysql/protocol.py", line 219, in raise_for_error
err.raise_mysql_exception(self._data)
File "/mnt/c/dev/git/python/homelab-automation-api-v2/.venv/lib/python3.12/site-packages/pymysql/err.py", line 150, in raise_mysql_exception
raise errorclass(errno, errval)
sqlalchemy.exc.OperationalError: (pymysql.err.OperationalError) (1045, "Access denied for user 'homelab'@'172.17.0.1' (using password: YES)")
(Background on this error at: https://sqlalche.me/e/20/e3q8)

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,25 @@
# ❌ Vérification de santé
## Informations
| Propriété | Valeur |
|-----------|--------|
| **ID** | `a45d9a22d0544a629ff47f2d3ecfa2e5` |
| **Nom** | Vérification de santé |
| **Cible** | `raspi.4gb.home` |
| **Statut** | failed |
| **Type** | Manuel |
| **Progression** | 10% |
| **Début** | 2026-03-04T14:56:41.205885+00:00 |
| **Fin** | 2026-03-04T14:56:41.206242+00:00 |
| **Durée** | 0.0s |
## Sortie
```
(Aucune sortie)
```
---
*Généré automatiquement par Homelab Automation Dashboard*
*Date: 2026-03-04T14:56:41.231207+00:00*

View File

@ -0,0 +1,25 @@
# ❌ Vérification de santé
## Informations
| Propriété | Valeur |
|-----------|--------|
| **ID** | `136eb1ad329947949c04de7579520468` |
| **Nom** | Vérification de santé |
| **Cible** | `hp.nas.home` |
| **Statut** | failed |
| **Type** | Manuel |
| **Progression** | 10% |
| **Début** | 2026-03-04T15:07:03.080230+00:00 |
| **Fin** | 2026-03-04T15:07:03.080745+00:00 |
| **Durée** | 0.0s |
## Sortie
```
(Aucune sortie)
```
---
*Généré automatiquement par Homelab Automation Dashboard*
*Date: 2026-03-04T15:07:03.106300+00:00*

View File

@ -0,0 +1,43 @@
# ❌ Ad-hoc: hostname
## Informations
| Propriété | Valeur |
|-----------|--------|
| **ID** | `adhoc_1cbcac313a9d` |
| **Nom** | Ad-hoc: hostname |
| **Cible** | `ali2v.xeon.home` |
| **Statut** | failed |
| **Type** | Ad-hoc |
| **Progression** | 100% |
| **Début** | 2026-03-04T15:07:40.665129+00:00 |
| **Fin** | 2026-03-04T15:07:41.011436+00:00 |
| **Durée** | 0.35s |
## Sortie
```
(Aucune sortie)
```
## Erreurs
```
Traceback (most recent call last):
File "<frozen runpy>", line 198, in _run_module_as_main
File "<frozen runpy>", line 88, in _run_code
File "C:\Users\bruno\scoop\apps\python\current\Scripts\ansible.exe\__main__.py", line 2, in <module>
from ansible.cli.adhoc import main
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\ansible\cli\__init__.py", line 54, in <module>
check_blocking_io()
~~~~~~~~~~~~~~~~~^^
File "C:\Users\bruno\scoop\apps\python\current\Lib\site-packages\ansible\cli\__init__.py", line 46, in check_blocking_io
if not os.get_blocking(fd):
~~~~~~~~~~~~~~~^^^^
OSError: [WinError 1] Incorrect function
```
---
*Généré automatiquement par Homelab Automation Dashboard*
*Date: 2026-03-04T15:07:41.011766+00:00*

View File

@ -0,0 +1,25 @@
# ❌ Playbook: Mon Playbook
## Informations
| Propriété | Valeur |
|-----------|--------|
| **ID** | `pb_28c762ccfe08` |
| **Nom** | Playbook: Mon Playbook |
| **Cible** | `hp.nas.home` |
| **Statut** | failed |
| **Type** | Manuel |
| **Progression** | 0% |
| **Début** | 2026-03-04T15:09:35.140029+00:00 |
| **Fin** | 2026-03-04T15:09:35.179480+00:00 |
| **Durée** | N/A |
## Sortie
```
(Aucune sortie)
```
---
*Généré automatiquement par Homelab Automation Dashboard*
*Date: 2026-03-04T15:09:35.179813+00:00*

View File

@ -0,0 +1,25 @@
# ❌ Playbook: Mon Playbook
## Informations
| Propriété | Valeur |
|-----------|--------|
| **ID** | `pb_4d810188aa4c` |
| **Nom** | Playbook: Mon Playbook |
| **Cible** | `hp.nas.home` |
| **Statut** | failed |
| **Type** | Manuel |
| **Progression** | 0% |
| **Début** | 2026-03-04T15:11:49.626414+00:00 |
| **Fin** | 2026-03-04T15:11:49.656127+00:00 |
| **Durée** | N/A |
## Sortie
```
(Aucune sortie)
```
---
*Généré automatiquement par Homelab Automation Dashboard*
*Date: 2026-03-04T15:11:49.656477+00:00*

View File

@ -0,0 +1,58 @@
# ✅ Vérification de santé
## Informations
| Propriété | Valeur |
|-----------|--------|
| **ID** | `244195d2592247f88ce069800a58cce6` |
| **Nom** | Vérification de santé |
| **Cible** | `ali2v.xeon.home` |
| **Statut** | completed |
| **Type** | Manuel |
| **Progression** | 100% |
| **Début** | 2026-03-05T14:29:43.754204+00:00 |
| **Fin** | 2026-03-05T14:29:49.019508+00:00 |
| **Durée** | 5.3s |
## Sortie
```
Using /mnt/c/dev/git/python/homelab-automation-api-v2/ansible/ansible.cfg as config file
PLAY [Health check on target host] *********************************************
TASK [Check if host is reachable (ping)] ***************************************
ok: [ali2v.xeon.home] => {"changed": false, "ping": "pong"}
TASK [Gather minimal facts] ****************************************************
ok: [ali2v.xeon.home]
TASK [Get system uptime] *******************************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": ["uptime"], "delta": "0:00:00.002997", "end": "2026-03-05 09:29:48.100160", "msg": "", "rc": 0, "start": "2026-03-05 09:29:48.097163", "stderr": "", "stderr_lines": [], "stdout": " 09:29:48 up 50 days, 2:55, 1 user, load average: 0.43, 0.49, 0.54", "stdout_lines": [" 09:29:48 up 50 days, 2:55, 1 user, load average: 0.43, 0.49, 0.54"]}
TASK [Get disk usage] **********************************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": "df -h / | tail -1 | awk '{print $5}'", "delta": "0:00:00.004415", "end": "2026-03-05 09:29:48.494245", "msg": "", "rc": 0, "start": "2026-03-05 09:29:48.489830", "stderr": "", "stderr_lines": [], "stdout": "24%", "stdout_lines": ["24%"]}
TASK [Get memory usage (Linux)] ************************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": "if command -v free >/dev/null 2>&1; then\n free -m | grep Mem | awk '{printf \"%.1f%%\", $3/$2 * 100}'\nelse\n # Fallback for systems without free command\n cat /proc/meminfo | awk '/MemTotal/{total=$2} /MemAvailable/{avail=$2} END{printf \"%.1f%%\", (total-avail)/total*100}'\nfi\n", "delta": "0:00:00.004554", "end": "2026-03-05 09:29:48.881478", "msg": "", "rc": 0, "start": "2026-03-05 09:29:48.876924", "stderr": "", "stderr_lines": [], "stdout": "45.0%", "stdout_lines": ["45.0%"]}
TASK [Get CPU temperature (ARM/SBC)] *******************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": "if [ -f /sys/class/thermal/thermal_zone0/temp ]; then\n temp=$(cat /sys/class/thermal/thermal_zone0/temp)\n # Use awk instead of bc for better compatibility\n echo \"${temp}\" | awk '{printf \"%.1f°C\", $1/1000}'\nelse\n echo \"N/A\"\nfi\n", "delta": "0:00:00.004869", "end": "2026-03-05 09:29:49.284290", "msg": "", "rc": 0, "start": "2026-03-05 09:29:49.279421", "stderr": "", "stderr_lines": [], "stdout": "50.0°C", "stdout_lines": ["50.0°C"]}
TASK [Get CPU load] ************************************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": "if [ -f /proc/loadavg ]; then\n cat /proc/loadavg | awk '{print $1}'\nelse\n uptime | awk -F'load average:' '{print $2}' | awk -F',' '{print $1}' | tr -d ' '\nfi\n", "delta": "0:00:00.004193", "end": "2026-03-05 09:29:49.661910", "msg": "", "rc": 0, "start": "2026-03-05 09:29:49.657717", "stderr": "", "stderr_lines": [], "stdout": "0.43", "stdout_lines": ["0.43"]}
TASK [Display health status] ***************************************************
ok: [ali2v.xeon.home] => {
"msg": "═══════════════════════════════════════\nHost: ali2v.xeon.home\nStatus: OK\n═══════════════════════════════════════\nUptime: 09:29:48 up 50 days, 2:55, 1 user, load average: 0.43, 0.49, 0.54\nDisk Usage: 24%\nMemory Usage: 45.0%\nCPU Load: 0.43\nCPU Temp: 50.0°C\n═══════════════════════════════════════\n"
}
PLAY RECAP *********************************************************************
ali2v.xeon.home : ok=8 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
---
*Généré automatiquement par Homelab Automation Dashboard*
*Date: 2026-03-05T14:29:49.225759+00:00*

View File

@ -0,0 +1,58 @@
# ✅ Vérification de santé
## Informations
| Propriété | Valeur |
|-----------|--------|
| **ID** | `118e1c04d8ed4eb7b820823ed3c5ed9d` |
| **Nom** | Vérification de santé |
| **Cible** | `ali2v.xeon.home` |
| **Statut** | completed |
| **Type** | Manuel |
| **Progression** | 100% |
| **Début** | 2026-03-05T14:32:12.140415+00:00 |
| **Fin** | 2026-03-05T14:32:17.189985+00:00 |
| **Durée** | 5.0s |
## Sortie
```
Using /mnt/c/dev/git/python/homelab-automation-api-v2/ansible/ansible.cfg as config file
PLAY [Health check on target host] *********************************************
TASK [Check if host is reachable (ping)] ***************************************
ok: [ali2v.xeon.home] => {"changed": false, "ping": "pong"}
TASK [Gather minimal facts] ****************************************************
ok: [ali2v.xeon.home]
TASK [Get system uptime] *******************************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": ["uptime"], "delta": "0:00:00.002977", "end": "2026-03-05 09:32:15.904080", "msg": "", "rc": 0, "start": "2026-03-05 09:32:15.901103", "stderr": "", "stderr_lines": [], "stdout": " 09:32:15 up 50 days, 2:57, 2 users, load average: 0.81, 0.74, 0.63", "stdout_lines": [" 09:32:15 up 50 days, 2:57, 2 users, load average: 0.81, 0.74, 0.63"]}
TASK [Get disk usage] **********************************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": "df -h / | tail -1 | awk '{print $5}'", "delta": "0:00:00.004335", "end": "2026-03-05 09:32:16.289858", "msg": "", "rc": 0, "start": "2026-03-05 09:32:16.285523", "stderr": "", "stderr_lines": [], "stdout": "24%", "stdout_lines": ["24%"]}
TASK [Get memory usage (Linux)] ************************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": "if command -v free >/dev/null 2>&1; then\n free -m | grep Mem | awk '{printf \"%.1f%%\", $3/$2 * 100}'\nelse\n # Fallback for systems without free command\n cat /proc/meminfo | awk '/MemTotal/{total=$2} /MemAvailable/{avail=$2} END{printf \"%.1f%%\", (total-avail)/total*100}'\nfi\n", "delta": "0:00:00.004562", "end": "2026-03-05 09:32:16.676937", "msg": "", "rc": 0, "start": "2026-03-05 09:32:16.672375", "stderr": "", "stderr_lines": [], "stdout": "45.0%", "stdout_lines": ["45.0%"]}
TASK [Get CPU temperature (ARM/SBC)] *******************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": "if [ -f /sys/class/thermal/thermal_zone0/temp ]; then\n temp=$(cat /sys/class/thermal/thermal_zone0/temp)\n # Use awk instead of bc for better compatibility\n echo \"${temp}\" | awk '{printf \"%.1f°C\", $1/1000}'\nelse\n echo \"N/A\"\nfi\n", "delta": "0:00:00.005127", "end": "2026-03-05 09:32:17.062078", "msg": "", "rc": 0, "start": "2026-03-05 09:32:17.056951", "stderr": "", "stderr_lines": [], "stdout": "49.0°C", "stdout_lines": ["49.0°C"]}
TASK [Get CPU load] ************************************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": "if [ -f /proc/loadavg ]; then\n cat /proc/loadavg | awk '{print $1}'\nelse\n uptime | awk -F'load average:' '{print $2}' | awk -F',' '{print $1}' | tr -d ' '\nfi\n", "delta": "0:00:00.004164", "end": "2026-03-05 09:32:17.455194", "msg": "", "rc": 0, "start": "2026-03-05 09:32:17.451030", "stderr": "", "stderr_lines": [], "stdout": "0.81", "stdout_lines": ["0.81"]}
TASK [Display health status] ***************************************************
ok: [ali2v.xeon.home] => {
"msg": "═══════════════════════════════════════\nHost: ali2v.xeon.home\nStatus: OK\n═══════════════════════════════════════\nUptime: 09:32:15 up 50 days, 2:57, 2 users, load average: 0.81, 0.74, 0.63\nDisk Usage: 24%\nMemory Usage: 45.0%\nCPU Load: 0.81\nCPU Temp: 49.0°C\n═══════════════════════════════════════\n"
}
PLAY RECAP *********************************************************************
ali2v.xeon.home : ok=8 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
---
*Généré automatiquement par Homelab Automation Dashboard*
*Date: 2026-03-05T14:32:17.229972+00:00*

View File

@ -0,0 +1,58 @@
# ✅ Vérification de santé
## Informations
| Propriété | Valeur |
|-----------|--------|
| **ID** | `df227df90fee44aa8c790e4f91c207a3` |
| **Nom** | Vérification de santé |
| **Cible** | `ali2v.xeon.home` |
| **Statut** | completed |
| **Type** | Manuel |
| **Progression** | 100% |
| **Début** | 2026-03-05T14:40:22.963907+00:00 |
| **Fin** | 2026-03-05T14:40:28.119825+00:00 |
| **Durée** | 5.2s |
## Sortie
```
Using /mnt/c/dev/git/python/homelab-automation-api-v2/ansible/ansible.cfg as config file
PLAY [Health check on target host] *********************************************
TASK [Check if host is reachable (ping)] ***************************************
ok: [ali2v.xeon.home] => {"changed": false, "ping": "pong"}
TASK [Gather minimal facts] ****************************************************
ok: [ali2v.xeon.home]
TASK [Get system uptime] *******************************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": ["uptime"], "delta": "0:00:00.003062", "end": "2026-03-05 09:40:26.683582", "msg": "", "rc": 0, "start": "2026-03-05 09:40:26.680520", "stderr": "", "stderr_lines": [], "stdout": " 09:40:26 up 50 days, 3:06, 1 user, load average: 0.77, 0.79, 0.72", "stdout_lines": [" 09:40:26 up 50 days, 3:06, 1 user, load average: 0.77, 0.79, 0.72"]}
TASK [Get disk usage] **********************************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": "df -h / | tail -1 | awk '{print $5}'", "delta": "0:00:00.004664", "end": "2026-03-05 09:40:27.052641", "msg": "", "rc": 0, "start": "2026-03-05 09:40:27.047977", "stderr": "", "stderr_lines": [], "stdout": "24%", "stdout_lines": ["24%"]}
TASK [Get memory usage (Linux)] ************************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": "if command -v free >/dev/null 2>&1; then\n free -m | grep Mem | awk '{printf \"%.1f%%\", $3/$2 * 100}'\nelse\n # Fallback for systems without free command\n cat /proc/meminfo | awk '/MemTotal/{total=$2} /MemAvailable/{avail=$2} END{printf \"%.1f%%\", (total-avail)/total*100}'\nfi\n", "delta": "0:00:00.004236", "end": "2026-03-05 09:40:27.457799", "msg": "", "rc": 0, "start": "2026-03-05 09:40:27.453563", "stderr": "", "stderr_lines": [], "stdout": "45.0%", "stdout_lines": ["45.0%"]}
TASK [Get CPU temperature (ARM/SBC)] *******************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": "if [ -f /sys/class/thermal/thermal_zone0/temp ]; then\n temp=$(cat /sys/class/thermal/thermal_zone0/temp)\n # Use awk instead of bc for better compatibility\n echo \"${temp}\" | awk '{printf \"%.1f°C\", $1/1000}'\nelse\n echo \"N/A\"\nfi\n", "delta": "0:00:00.004978", "end": "2026-03-05 09:40:27.828238", "msg": "", "rc": 0, "start": "2026-03-05 09:40:27.823260", "stderr": "", "stderr_lines": [], "stdout": "50.0°C", "stdout_lines": ["50.0°C"]}
TASK [Get CPU load] ************************************************************
ok: [ali2v.xeon.home] => {"changed": false, "cmd": "if [ -f /proc/loadavg ]; then\n cat /proc/loadavg | awk '{print $1}'\nelse\n uptime | awk -F'load average:' '{print $2}' | awk -F',' '{print $1}' | tr -d ' '\nfi\n", "delta": "0:00:00.004234", "end": "2026-03-05 09:40:28.194738", "msg": "", "rc": 0, "start": "2026-03-05 09:40:28.190504", "stderr": "", "stderr_lines": [], "stdout": "0.77", "stdout_lines": ["0.77"]}
TASK [Display health status] ***************************************************
ok: [ali2v.xeon.home] => {
"msg": "═══════════════════════════════════════\nHost: ali2v.xeon.home\nStatus: OK\n═══════════════════════════════════════\nUptime: 09:40:26 up 50 days, 3:06, 1 user, load average: 0.77, 0.79, 0.72\nDisk Usage: 24%\nMemory Usage: 45.0%\nCPU Load: 0.77\nCPU Temp: 50.0°C\n═══════════════════════════════════════\n"
}
PLAY RECAP *********************************************************************
ali2v.xeon.home : ok=8 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
---
*Généré automatiquement par Homelab Automation Dashboard*
*Date: 2026-03-05T14:40:28.151831+00:00*

View File

@ -0,0 +1,85 @@
# ✅ [Planifié] Test de cédule
## Informations
| Propriété | Valeur |
|-----------|--------|
| **ID** | `5103b7c3b76847baa48a1897283d61ca` |
| **Nom** | [Planifié] Test de cédule |
| **Cible** | `role_docker` |
| **Statut** | completed |
| **Type** | Planifié |
| **Progression** | 100% |
| **Début** | 2026-03-05T15:05:01.207361+00:00 |
| **Fin** | 2026-03-05T15:05:13.803137+00:00 |
| **Durée** | 12.6s |
## Sortie
```
Using /mnt/c/dev/git/python/homelab-automation-api-v2/ansible/ansible.cfg as config file
PLAY [mon playbook] ************************************************************
TASK [Gathering Facts] *********************************************************
ok: [media-1.lab.home]
ok: [dev.lab.home]
ok: [media.labb.home]
ok: [openclaw.lab.home]
ok: [raspi.8gb.home]
ok: [raspi.4gb.home]
ok: [jump.point.home]
ok: [automate.prod.home]
ok: [dev.prod.home]
ok: [orangepi.pc.home]
TASK [Exemple de tâche] ********************************************************
ok: [orangepi.pc.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [raspi.4gb.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [raspi.8gb.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [dev.lab.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [media-1.lab.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [media.labb.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [openclaw.lab.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [automate.prod.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [dev.prod.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [jump.point.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
PLAY RECAP *********************************************************************
automate.prod.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
dev.lab.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
dev.prod.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
jump.point.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
media-1.lab.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
media.labb.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
openclaw.lab.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
orangepi.pc.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
raspi.4gb.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
raspi.8gb.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
---
*Généré automatiquement par Homelab Automation Dashboard*
*Date: 2026-03-05T15:05:13.863909+00:00*

View File

@ -0,0 +1,85 @@
# ✅ [Planifié] Test de cédule
## Informations
| Propriété | Valeur |
|-----------|--------|
| **ID** | `2e510c0e10514babaee016f2fb13e6fc` |
| **Nom** | [Planifié] Test de cédule |
| **Cible** | `role_docker` |
| **Statut** | completed |
| **Type** | Planifié |
| **Progression** | 100% |
| **Début** | 2026-03-05T15:10:01.212117+00:00 |
| **Fin** | 2026-03-05T15:10:08.779761+00:00 |
| **Durée** | 7.6s |
## Sortie
```
Using /mnt/c/dev/git/python/homelab-automation-api-v2/ansible/ansible.cfg as config file
PLAY [mon playbook] ************************************************************
TASK [Gathering Facts] *********************************************************
ok: [media-1.lab.home]
ok: [dev.lab.home]
ok: [media.labb.home]
ok: [openclaw.lab.home]
ok: [raspi.8gb.home]
ok: [raspi.4gb.home]
ok: [automate.prod.home]
ok: [dev.prod.home]
ok: [jump.point.home]
ok: [orangepi.pc.home]
TASK [Exemple de tâche] ********************************************************
ok: [orangepi.pc.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [raspi.4gb.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [raspi.8gb.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [dev.lab.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [media-1.lab.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [media.labb.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [openclaw.lab.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [automate.prod.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [dev.prod.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
ok: [jump.point.home] => {
"msg": "Playbook mon-playbook exécuté avec succès!"
}
PLAY RECAP *********************************************************************
automate.prod.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
dev.lab.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
dev.prod.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
jump.point.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
media-1.lab.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
media.labb.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
openclaw.lab.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
orangepi.pc.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
raspi.4gb.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
raspi.8gb.home : ok=2 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
---
*Généré automatiquement par Homelab Automation Dashboard*
*Date: 2026-03-05T15:10:08.845398+00:00*

571
package-lock.json generated
View File

@ -16,15 +16,19 @@
"@codemirror/search": "^6.5.11",
"@codemirror/state": "^6.5.2",
"@codemirror/theme-one-dark": "^6.1.2",
"@codemirror/view": "^6.38.6"
"@codemirror/view": "^6.38.6",
"react": "^19.2.4",
"react-dom": "^19.2.4"
},
"devDependencies": {
"@testing-library/dom": "^10.0.0",
"@testing-library/user-event": "^14.5.0",
"@vitejs/plugin-react": "^5.1.4",
"@vitest/coverage-v8": "^2.0.0",
"@vitest/ui": "^2.0.0",
"esbuild": "^0.24.0",
"jsdom": "^24.0.0",
"vite": "^5.4.21",
"vitest": "^2.0.0"
}
},
@ -57,13 +61,13 @@
}
},
"node_modules/@babel/code-frame": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
"integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
"version": "7.29.0",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz",
"integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-validator-identifier": "^7.27.1",
"@babel/helper-validator-identifier": "^7.28.5",
"js-tokens": "^4.0.0",
"picocolors": "^1.1.1"
},
@ -71,6 +75,163 @@
"node": ">=6.9.0"
}
},
"node_modules/@babel/compat-data": {
"version": "7.29.0",
"resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz",
"integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/core": {
"version": "7.29.0",
"resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz",
"integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.29.0",
"@babel/generator": "^7.29.0",
"@babel/helper-compilation-targets": "^7.28.6",
"@babel/helper-module-transforms": "^7.28.6",
"@babel/helpers": "^7.28.6",
"@babel/parser": "^7.29.0",
"@babel/template": "^7.28.6",
"@babel/traverse": "^7.29.0",
"@babel/types": "^7.29.0",
"@jridgewell/remapping": "^2.3.5",
"convert-source-map": "^2.0.0",
"debug": "^4.1.0",
"gensync": "^1.0.0-beta.2",
"json5": "^2.2.3",
"semver": "^6.3.1"
},
"engines": {
"node": ">=6.9.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/babel"
}
},
"node_modules/@babel/core/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/@babel/generator": {
"version": "7.29.1",
"resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz",
"integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/parser": "^7.29.0",
"@babel/types": "^7.29.0",
"@jridgewell/gen-mapping": "^0.3.12",
"@jridgewell/trace-mapping": "^0.3.28",
"jsesc": "^3.0.2"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-compilation-targets": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz",
"integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/compat-data": "^7.28.6",
"@babel/helper-validator-option": "^7.27.1",
"browserslist": "^4.24.0",
"lru-cache": "^5.1.1",
"semver": "^6.3.1"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
"integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
"dev": true,
"license": "ISC",
"dependencies": {
"yallist": "^3.0.2"
}
},
"node_modules/@babel/helper-compilation-targets/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/@babel/helper-globals": {
"version": "7.28.0",
"resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz",
"integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-module-imports": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz",
"integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/traverse": "^7.28.6",
"@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-module-transforms": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz",
"integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-module-imports": "^7.28.6",
"@babel/helper-validator-identifier": "^7.28.5",
"@babel/traverse": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
},
"peerDependencies": {
"@babel/core": "^7.0.0"
}
},
"node_modules/@babel/helper-plugin-utils": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz",
"integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-string-parser": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
@ -91,14 +252,38 @@
"node": ">=6.9.0"
}
},
"node_modules/@babel/parser": {
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz",
"integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==",
"node_modules/@babel/helper-validator-option": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz",
"integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helpers": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz",
"integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.28.5"
"@babel/template": "^7.28.6",
"@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/parser": {
"version": "7.29.0",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz",
"integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.29.0"
},
"bin": {
"parser": "bin/babel-parser.js"
@ -107,6 +292,38 @@
"node": ">=6.0.0"
}
},
"node_modules/@babel/plugin-transform-react-jsx-self": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz",
"integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-plugin-utils": "^7.27.1"
},
"engines": {
"node": ">=6.9.0"
},
"peerDependencies": {
"@babel/core": "^7.0.0-0"
}
},
"node_modules/@babel/plugin-transform-react-jsx-source": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz",
"integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-plugin-utils": "^7.27.1"
},
"engines": {
"node": ">=6.9.0"
},
"peerDependencies": {
"@babel/core": "^7.0.0-0"
}
},
"node_modules/@babel/runtime": {
"version": "7.28.4",
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz",
@ -117,10 +334,44 @@
"node": ">=6.9.0"
}
},
"node_modules/@babel/template": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz",
"integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.28.6",
"@babel/parser": "^7.28.6",
"@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/traverse": {
"version": "7.29.0",
"resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz",
"integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.29.0",
"@babel/generator": "^7.29.0",
"@babel/helper-globals": "^7.28.0",
"@babel/parser": "^7.29.0",
"@babel/template": "^7.28.6",
"@babel/types": "^7.29.0",
"debug": "^4.3.1"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/types": {
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz",
"integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==",
"version": "7.29.0",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz",
"integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==",
"dev": true,
"license": "MIT",
"dependencies": {
@ -825,6 +1076,17 @@
"@jridgewell/trace-mapping": "^0.3.24"
}
},
"node_modules/@jridgewell/remapping": {
"version": "2.3.5",
"resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz",
"integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.5",
"@jridgewell/trace-mapping": "^0.3.24"
}
},
"node_modules/@jridgewell/resolve-uri": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
@ -912,6 +1174,13 @@
"dev": true,
"license": "MIT"
},
"node_modules/@rolldown/pluginutils": {
"version": "1.0.0-rc.3",
"resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.3.tgz",
"integrity": "sha512-eybk3TjzzzV97Dlj5c+XrBFW57eTNhzod66y9HrBlzJ6NsCrWCp/2kaPS3K9wJmurBC0Tdw4yPjXKZqlznim3Q==",
"dev": true,
"license": "MIT"
},
"node_modules/@rollup/rollup-android-arm-eabi": {
"version": "4.53.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.53.3.tgz",
@ -1261,6 +1530,51 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/babel__core": {
"version": "7.20.5",
"resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz",
"integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/parser": "^7.20.7",
"@babel/types": "^7.20.7",
"@types/babel__generator": "*",
"@types/babel__template": "*",
"@types/babel__traverse": "*"
}
},
"node_modules/@types/babel__generator": {
"version": "7.27.0",
"resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz",
"integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.0.0"
}
},
"node_modules/@types/babel__template": {
"version": "7.4.4",
"resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz",
"integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/parser": "^7.1.0",
"@babel/types": "^7.0.0"
}
},
"node_modules/@types/babel__traverse": {
"version": "7.28.0",
"resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz",
"integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.28.2"
}
},
"node_modules/@types/estree": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
@ -1268,6 +1582,27 @@
"dev": true,
"license": "MIT"
},
"node_modules/@vitejs/plugin-react": {
"version": "5.1.4",
"resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.1.4.tgz",
"integrity": "sha512-VIcFLdRi/VYRU8OL/puL7QXMYafHmqOnwTZY50U1JPlCNj30PxCMx65c494b1K9be9hX83KVt0+gTEwTWLqToA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/core": "^7.29.0",
"@babel/plugin-transform-react-jsx-self": "^7.27.1",
"@babel/plugin-transform-react-jsx-source": "^7.27.1",
"@rolldown/pluginutils": "1.0.0-rc.3",
"@types/babel__core": "^7.20.5",
"react-refresh": "^0.18.0"
},
"engines": {
"node": "^20.19.0 || >=22.12.0"
},
"peerDependencies": {
"vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0"
}
},
"node_modules/@vitest/coverage-v8": {
"version": "2.1.9",
"resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-2.1.9.tgz",
@ -1503,6 +1838,19 @@
"dev": true,
"license": "MIT"
},
"node_modules/baseline-browser-mapping": {
"version": "2.10.0",
"resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.0.tgz",
"integrity": "sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"baseline-browser-mapping": "dist/cli.cjs"
},
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/brace-expansion": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
@ -1513,6 +1861,40 @@
"balanced-match": "^1.0.0"
}
},
"node_modules/browserslist": {
"version": "4.28.1",
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz",
"integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/browserslist"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/browserslist"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"baseline-browser-mapping": "^2.9.0",
"caniuse-lite": "^1.0.30001759",
"electron-to-chromium": "^1.5.263",
"node-releases": "^2.0.27",
"update-browserslist-db": "^1.2.0"
},
"bin": {
"browserslist": "cli.js"
},
"engines": {
"node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7"
}
},
"node_modules/cac": {
"version": "6.7.14",
"resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz",
@ -1537,6 +1919,27 @@
"node": ">= 0.4"
}
},
"node_modules/caniuse-lite": {
"version": "1.0.30001776",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001776.tgz",
"integrity": "sha512-sg01JDPzZ9jGshqKSckOQthXnYwOEP50jeVFhaSFbZcOy05TiuuaffDOfcwtCisJ9kNQuLBFibYywv2Bgm9osw==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/browserslist"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/caniuse-lite"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "CC-BY-4.0"
},
"node_modules/chai": {
"version": "5.3.3",
"resolved": "https://registry.npmjs.org/chai/-/chai-5.3.3.tgz",
@ -1597,6 +2000,13 @@
"node": ">= 0.8"
}
},
"node_modules/convert-source-map": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
"integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
"dev": true,
"license": "MIT"
},
"node_modules/crelt": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz",
@ -1737,6 +2147,13 @@
"dev": true,
"license": "MIT"
},
"node_modules/electron-to-chromium": {
"version": "1.5.307",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.307.tgz",
"integrity": "sha512-5z3uFKBWjiNR44nFcYdkcXjKMbg5KXNdciu7mhTPo9tB7NbqSNP2sSnGR+fqknZSCwKkBN+oxiiajWs4dT6ORg==",
"dev": true,
"license": "ISC"
},
"node_modules/emoji-regex": {
"version": "9.2.2",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
@ -1854,6 +2271,16 @@
"@esbuild/win32-x64": "0.24.2"
}
},
"node_modules/escalade": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
"integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/estree-walker": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz",
@ -1965,6 +2392,16 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/gensync": {
"version": "1.0.0-beta.2",
"resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
"integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/get-intrinsic": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
@ -2293,6 +2730,32 @@
}
}
},
"node_modules/jsesc": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz",
"integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==",
"dev": true,
"license": "MIT",
"bin": {
"jsesc": "bin/jsesc"
},
"engines": {
"node": ">=6"
}
},
"node_modules/json5": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
"integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
"dev": true,
"license": "MIT",
"bin": {
"json5": "lib/cli.js"
},
"engines": {
"node": ">=6"
}
},
"node_modules/loupe": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz",
@ -2450,6 +2913,13 @@
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
}
},
"node_modules/node-releases": {
"version": "2.0.36",
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.36.tgz",
"integrity": "sha512-TdC8FSgHz8Mwtw9g5L4gR/Sh9XhSP/0DEkQxfEFXOpiul5IiHgHan2VhYYb6agDSfp4KuvltmGApc8HMgUrIkA==",
"dev": true,
"license": "MIT"
},
"node_modules/nwsapi": {
"version": "2.2.23",
"resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.23.tgz",
@ -2615,6 +3085,27 @@
"dev": true,
"license": "MIT"
},
"node_modules/react": {
"version": "19.2.4",
"resolved": "https://registry.npmjs.org/react/-/react-19.2.4.tgz",
"integrity": "sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/react-dom": {
"version": "19.2.4",
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.4.tgz",
"integrity": "sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==",
"license": "MIT",
"dependencies": {
"scheduler": "^0.27.0"
},
"peerDependencies": {
"react": "^19.2.4"
}
},
"node_modules/react-is": {
"version": "17.0.2",
"resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz",
@ -2622,6 +3113,16 @@
"dev": true,
"license": "MIT"
},
"node_modules/react-refresh": {
"version": "0.18.0",
"resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.18.0.tgz",
"integrity": "sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
@ -2698,6 +3199,12 @@
"node": ">=v12.22.7"
}
},
"node_modules/scheduler": {
"version": "0.27.0",
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz",
"integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==",
"license": "MIT"
},
"node_modules/semver": {
"version": "7.7.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz",
@ -3041,6 +3548,37 @@
"node": ">= 4.0.0"
}
},
"node_modules/update-browserslist-db": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz",
"integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/browserslist"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/browserslist"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"escalade": "^3.2.0",
"picocolors": "^1.1.1"
},
"bin": {
"update-browserslist-db": "cli.js"
},
"peerDependencies": {
"browserslist": ">= 4.21.0"
}
},
"node_modules/url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
@ -3869,6 +4407,13 @@
"integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==",
"dev": true,
"license": "MIT"
},
"node_modules/yallist": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
"integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
"dev": true,
"license": "ISC"
}
}
}

View File

@ -13,10 +13,12 @@
"devDependencies": {
"@testing-library/dom": "^10.0.0",
"@testing-library/user-event": "^14.5.0",
"@vitejs/plugin-react": "^5.1.4",
"@vitest/coverage-v8": "^2.0.0",
"@vitest/ui": "^2.0.0",
"esbuild": "^0.24.0",
"jsdom": "^24.0.0",
"vite": "^5.4.21",
"vitest": "^2.0.0"
},
"dependencies": {
@ -27,7 +29,9 @@
"@codemirror/lint": "^6.9.0",
"@codemirror/search": "^6.5.11",
"@codemirror/state": "^6.5.2",
"@codemirror/theme-one-dark": "^6.1.2",
"@codemirror/view": "^6.38.6",
"@codemirror/theme-one-dark": "^6.1.2"
"react": "^19.2.4",
"react-dom": "^19.2.4"
}
}

19
patch_migrations.py Normal file
View File

@ -0,0 +1,19 @@
import os
import glob
import re
versions_dir = os.path.join("alembic", "versions")
for py_file in glob.glob(os.path.join(versions_dir, "*.py")):
with open(py_file, "r", encoding="utf-8") as f:
content = f.read()
# Replace sa.String() with sa.String(255)
# except we don't want to replace sa.String(xx) that already have a length
new_content = re.sub(r"sa\.String\(\)", "sa.String(255)", content)
if new_content != content:
print(f"Patched {py_file}")
with open(py_file, "w", encoding="utf-8") as f:
f.write(new_content)
print("Patching complete.")

View File

@ -10,9 +10,82 @@ set +a
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
# --- Gestion MySQL via Docker (si DB_ENGINE=mysql) ---
if [ "$DB_ENGINE" == "mysql" ]; then
MYSQL_CONTAINER="homelab-mysql"
echo "🔍 Mode MySQL détecté. Vérification du conteneur $MYSQL_CONTAINER..."
# Paramètres par défaut si non définis dans .env
M_USER="${MYSQL_USER:-homelab}"
M_PASS="${MYSQL_PASSWORD:-password}"
M_DB="${MYSQL_DATABASE:-homelab}"
M_PORT="${MYSQL_PORT:-3306}"
M_ROOT_PASS="${MYSQL_ROOT_PASSWORD:-rootpassword}"
if ! docker ps -a --format '{{.Names}}' | grep -q "^${MYSQL_CONTAINER}$"; then
echo "🚀 Création du conteneur MySQL $MYSQL_CONTAINER..."
docker run --name "$MYSQL_CONTAINER" \
-e MYSQL_ROOT_PASSWORD="$M_ROOT_PASS" \
-e MYSQL_DATABASE="$M_DB" \
-e MYSQL_USER="$M_USER" \
-e MYSQL_PASSWORD="$M_PASS" \
-p "$M_PORT:3306" \
-d mysql:8.0 --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
echo "⏳ Attente du démarrage de MySQL..."
until docker exec "$MYSQL_CONTAINER" mysqladmin ping -h localhost --silent; do
sleep 2
done
echo "✅ MySQL est prêt et base de données '$M_DB' créée."
else
if ! docker ps --format '{{.Names}}' | grep -q "^${MYSQL_CONTAINER}$"; then
echo "▶️ Démarrage du conteneur MySQL existant..."
docker start "$MYSQL_CONTAINER"
echo "⏳ Attente du démarrage de MySQL..."
until docker exec "$MYSQL_CONTAINER" mysqladmin ping -h localhost --silent; do
sleep 2
done
else
echo "✅ Conteneur MySQL déjà en cours d'exécution."
fi
# Vérifier si la base de données existe, sinon la créer
echo "Check/Create database $M_DB..."
docker exec "$MYSQL_CONTAINER" mysql -uroot -p"$M_ROOT_PASS" -e "CREATE DATABASE IF NOT EXISTS \`$M_DB\`;" 2>/dev/null || \
docker exec "$MYSQL_CONTAINER" mysql -uroot -p"$M_ROOT_PASS" -e "CREATE DATABASE IF NOT EXISTS $M_DB;"
fi
# --- Gestion CloudBeaver (Interface Web MySQL) ---
CLOUDBEAVER_CONTAINER="homelab-cloudbeaver"
CLOUDBEAVER_PORT="${CLOUDBEAVER_PORT:-8080}"
echo "🔍 Vérification du conteneur $CLOUDBEAVER_CONTAINER..."
if ! docker ps -a --format '{{.Names}}' | grep -q "^${CLOUDBEAVER_CONTAINER}$"; then
echo "🚀 Création du conteneur CloudBeaver $CLOUDBEAVER_CONTAINER..."
docker run --name "$CLOUDBEAVER_CONTAINER" \
-p "$CLOUDBEAVER_PORT:8978" \
-v cloudbeaver_data:/opt/cloudbeaver/workspace \
-d dbeaver/cloudbeaver:latest
echo "✅ CloudBeaver est prêt sur http://localhost:$CLOUDBEAVER_PORT"
else
if ! docker ps --format '{{.Names}}' | grep -q "^${CLOUDBEAVER_CONTAINER}$"; then
echo "▶️ Démarrage du conteneur CloudBeaver existant..."
docker start "$CLOUDBEAVER_CONTAINER"
else
echo "✅ Conteneur CloudBeaver déjà en cours d'exécution."
fi
echo "🌐 CloudBeaver accessible sur http://localhost:$CLOUDBEAVER_PORT"
fi
# Forcer la DATABASE_URL correcte pour l'application si elle utilise des placeholders ou est absente
# On reconstruit l'URL avec les variables réelles pour éviter les erreurs de connexion
export DATABASE_URL="mysql+aiomysql://$M_USER:$M_PASS@localhost:$M_PORT/$M_DB"
echo "🔗 DATABASE_URL configurée pour MySQL."
fi
# Workaround pour WSL: SQLite ne fonctionne pas bien sur /mnt/c/...
# Utiliser un chemin Linux natif pour la base de données (force override)
if [[ "$(pwd)" == /mnt/* ]]; then
# Utiliser un chemin Linux natif pour la base de données (force override) uniquement en mode SQLite
if [[ "$(pwd)" == /mnt/* ]] && [ "$DB_ENGINE" != "mysql" ]; then
mkdir -p ~/homelab-data/logs
export DATABASE_URL="sqlite+aiosqlite:////home/$USER/homelab-data/homelab.db"
export LOGS_DIR="$HOME/homelab-data/logs"
@ -47,7 +120,8 @@ if [ -x ".venv/bin/python" ]; then
fi
fi
$ALEMBIC_BIN upgrade head
# Les migrations sont gérées automatiquement par l'application au démarrage (init_db)
# $ALEMBIC_BIN upgrade head
# Commande de démarrage du backend FastAPI
$PYTHON_BIN -m uvicorn main:app --host 0.0.0.0 --port 8000 --reload
$PYTHON_BIN -m uvicorn main:app --host 0.0.0.0 --port 8008 --reload

5
run_init_db.py Normal file
View File

@ -0,0 +1,5 @@
import asyncio
from app.models.database import init_db
if __name__ == "__main__":
asyncio.run(init_db())

View File

@ -1,27 +0,0 @@
import requests
# Use the API key from .env if possible, or try a dummy one to see the error type
url = "http://localhost:8000/api/help/catalog"
headers = {
"X-API-Key": "test-key" # Will likely fail auth but show if route exists
}
try:
print(f"Calling {url}...")
response = requests.get(url, headers=headers)
print(f"Status Code: {response.status_code}")
print(f"Response: {response.text}")
except Exception as e:
print(f"Error: {e}")
# Try another route that definitely exists
url2 = "http://localhost:8000/api/help/content"
url3 = "http://localhost:8000/api/auth/me"
for url in [url2, url3]:
try:
print(f"\nCalling {url}...")
response = requests.get(url, headers=headers)
print(f"Status Code: {response.status_code}")
print(f"Response: {response.text}")
except Exception as e:
print(f"Error: {e}")

View File

@ -1,93 +0,0 @@
#!/usr/bin/env python3
"""
Script de test pour valider le filtrage des playbooks par compatibilité host/group.
"""
import sys
from pathlib import Path
# Ajouter le répertoire app au path
sys.path.insert(0, str(Path(__file__).parent / "app"))
from app_optimized import AnsibleService
def test_playbook_filtering():
"""Test le filtrage des playbooks selon les hôtes/groupes"""
# Initialiser le service
ansible_dir = Path(__file__).parent / "ansible"
service = AnsibleService(ansible_dir)
print("=" * 80)
print("TEST: Filtrage des playbooks par compatibilité host/group")
print("=" * 80)
print()
# 1. Lister tous les playbooks avec leur champ 'hosts'
print("1. Liste de tous les playbooks avec leur champ 'hosts':")
print("-" * 80)
all_playbooks = service.get_playbooks()
for pb in all_playbooks:
print(f" - {pb['filename']:30} | hosts: {pb.get('hosts', 'all'):20} | {pb.get('description', 'N/A')}")
print()
# 2. Tester la compatibilité pour différentes cibles
test_cases = [
("role_proxmox", "Groupe role_proxmox"),
("ali2v.xeon.home", "Hôte ali2v.xeon.home (membre de role_proxmox)"),
("raspi.4gb.home", "Hôte raspi.4gb.home (membre de role_sbc, pas de role_proxmox)"),
("all", "Groupe all"),
("env_homelab", "Groupe env_homelab"),
]
for target, description in test_cases:
print(f"2. Playbooks compatibles avec: {description}")
print("-" * 80)
compatible = service.get_compatible_playbooks(target)
if compatible:
for pb in compatible:
print(f"{pb['filename']:30} | hosts: {pb.get('hosts', 'all'):20}")
else:
print(" (Aucun playbook compatible)")
print()
# 3. Tester la validation de compatibilité
print("3. Tests de validation de compatibilité:")
print("-" * 80)
validation_tests = [
("backup-proxmox-config.yml", "role_proxmox", True, "Playbook Proxmox sur groupe role_proxmox"),
("backup-proxmox-config.yml", "ali2v.xeon.home", True, "Playbook Proxmox sur hôte du groupe role_proxmox"),
("backup-proxmox-config.yml", "raspi.4gb.home", False, "Playbook Proxmox sur hôte hors groupe role_proxmox"),
("backup-proxmox-config.yml", "env_homelab", False, "Playbook Proxmox sur groupe env_homelab"),
("health-check.yml", "all", True, "Playbook 'all' sur groupe all"),
("health-check.yml", "role_proxmox", True, "Playbook 'all' sur n'importe quel groupe"),
("health-check.yml", "raspi.4gb.home", True, "Playbook 'all' sur n'importe quel hôte"),
("bootstrap-host.yml", "raspi.4gb.home", True, "Playbook 'all' sur hôte quelconque"),
]
for playbook_file, target, expected, description in validation_tests:
# Récupérer le champ hosts du playbook
pb_info = next((pb for pb in all_playbooks if pb['filename'] == playbook_file), None)
if pb_info:
playbook_hosts = pb_info.get('hosts', 'all')
result = service.is_target_compatible_with_playbook(target, playbook_hosts)
status = "✓ PASS" if result == expected else "✗ FAIL"
print(f" {status} | {description}")
print(f" Playbook hosts='{playbook_hosts}', target='{target}', résultat={result}, attendu={expected}")
else:
print(f" ✗ SKIP | Playbook {playbook_file} non trouvé")
print()
print("=" * 80)
print("Tests terminés!")
print("=" * 80)
if __name__ == "__main__":
try:
test_playbook_filtering()
except Exception as e:
print(f"ERREUR: {e}")
import traceback
traceback.print_exc()
sys.exit(1)

View File

@ -1,15 +0,0 @@
import sys
from typing import Union, Any, cast
print(f"Python version: {sys.version}")
try:
from typing import Union
types = (int, str)
# This is what SQLAlchemy does:
res = cast(Any, Union).__getitem__(types)
print(f"Success: {res}")
except Exception as e:
print(f"Error: {e}")
import traceback
traceback.print_exc()

View File

@ -1,9 +0,0 @@
from typing import Union
try:
print(f"Union[int, str]: {Union[int, str]}")
# Let's try what SQLAlchemy tries via casting
from typing import Any, cast
print(f"Casting and getting item: {cast(Any, Union)[int, str]}")
except Exception as e:
print(f"Normal indexing error: {e}")

19
wait_for_db.py Normal file
View File

@ -0,0 +1,19 @@
import time
import pymysql
print("Waiting for MySQL to boot...")
retries = 30
while retries > 0:
try:
conn = pymysql.connect(host='127.0.0.1', port=3306, user='homelab', password='CHANGE_ME', database='homelab')
conn.close()
print("MySQL is ready!")
break
except Exception as e:
print(f"Not ready yet, retrying in 2 seconds... ({e})")
time.sleep(2)
retries -= 1
if retries == 0:
print("Failed to connect to MySQL after 60 seconds.")
exit(1)