Add SQLite database integration with SQLAlchemy async, migration tooling, and comprehensive CRUD operations for hosts, tasks, schedules, and logs
This commit is contained in:
parent
28276f326b
commit
ad3a8a5639
@ -39,11 +39,16 @@ COPY app/ ./
|
||||
COPY ansible/ /ansible/
|
||||
|
||||
# Création du répertoire pour les clés SSH (sera monté en volume)
|
||||
RUN mkdir -p /app/ssh_keys
|
||||
RUN mkdir -p /app/ssh_keys /app/data
|
||||
|
||||
# Configuration Ansible pour utiliser le bon répertoire
|
||||
ENV ANSIBLE_CONFIG=/ansible/ansible.cfg
|
||||
|
||||
# Variables par défaut pour la base SQLite dans le conteneur
|
||||
ENV HOMELAB_DATA_DIR=/app/data
|
||||
ENV DB_PATH=/app/data/homelab.db
|
||||
ENV DATABASE_URL=sqlite+aiosqlite:////app/data/homelab.db
|
||||
|
||||
# Exposition du port
|
||||
EXPOSE 8000
|
||||
|
||||
|
||||
48
README.md
48
README.md
@ -365,16 +365,56 @@ ANSIBLE_DIR=/etc/ansible
|
||||
|
||||
### Base de Données
|
||||
|
||||
Par défaut, l'application utilise une base de données en mémoire. Pour une utilisation en production, configurez PostgreSQL ou SQLite en modifiant la classe `InMemoryDB`.
|
||||
L'application utilise **SQLite** avec **SQLAlchemy 2.x async** pour le stockage persistant des données (hôtes, tâches, schedules, logs). La base est créée automatiquement au démarrage dans `data/homelab.db`.
|
||||
|
||||
#### Migration depuis les fichiers JSON
|
||||
|
||||
Si vous migrez depuis une version antérieure utilisant des fichiers JSON :
|
||||
|
||||
```bash
|
||||
# 1. Installer les nouvelles dépendances
|
||||
pip install -r app/requirements.txt
|
||||
|
||||
# 2. Exécuter le script de migration
|
||||
python migrate_json_to_sqlite.py
|
||||
```
|
||||
|
||||
Le script :
|
||||
- Importe les données depuis `ansible/.host_status.json`, `tasks_logs/.bootstrap_status.json`, etc.
|
||||
- Crée des sauvegardes `.bak` des fichiers JSON originaux
|
||||
- Génère un rapport de migration
|
||||
|
||||
#### Structure de la base de données
|
||||
|
||||
```
|
||||
data/homelab.db
|
||||
├── hosts # Inventaire des hôtes
|
||||
├── bootstrap_status # Statut bootstrap SSH par hôte
|
||||
├── tasks # Historique des tâches exécutées
|
||||
├── schedules # Planifications récurrentes
|
||||
├── schedule_runs # Historique des exécutions de schedules
|
||||
└── logs # Logs système
|
||||
```
|
||||
|
||||
#### Migrations Alembic
|
||||
|
||||
Pour les évolutions de schéma :
|
||||
|
||||
```bash
|
||||
# Appliquer les migrations
|
||||
alembic upgrade head
|
||||
|
||||
# Créer une nouvelle migration
|
||||
alembic revision --autogenerate -m "description"
|
||||
```
|
||||
|
||||
## 🚀 Déploiement
|
||||
|
||||
### Production
|
||||
|
||||
1. **Configuration de la base de données**
|
||||
```python
|
||||
# Remplacer InMemoryDB par une vraie base de données
|
||||
```
|
||||
- Par défaut : SQLite dans `data/homelab.db`
|
||||
- Variable d'environnement : `DATABASE_URL=sqlite+aiosqlite:///./data/homelab.db`
|
||||
|
||||
2. **Sécurité**
|
||||
- Utilisez une clé API forte
|
||||
|
||||
35
alembic.ini
Normal file
35
alembic.ini
Normal file
@ -0,0 +1,35 @@
|
||||
[alembic]
|
||||
script_location = alembic
|
||||
sqlalchemy.url = sqlite+aiosqlite:///./data/homelab.db
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers = console
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
91
alembic/env.py
Normal file
91
alembic/env.py
Normal file
@ -0,0 +1,91 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add project root to sys.path for module imports
|
||||
import sys
|
||||
ROOT_DIR = Path(__file__).resolve().parents[1]
|
||||
if str(ROOT_DIR) not in sys.path:
|
||||
sys.path.insert(0, str(ROOT_DIR))
|
||||
|
||||
from app.models.database import Base, metadata_obj, DATABASE_URL # noqa: E402
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Override sqlalchemy.url from environment if provided
|
||||
url_from_env = os.environ.get("DATABASE_URL")
|
||||
if url_from_env:
|
||||
config.set_main_option("sqlalchemy.url", url_from_env)
|
||||
else:
|
||||
config.set_main_option("sqlalchemy.url", str(DATABASE_URL))
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = metadata_obj
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
future=True,
|
||||
)
|
||||
|
||||
async def async_main() -> None:
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
await connectable.dispose()
|
||||
|
||||
asyncio.run(async_main())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
122
alembic/versions/0001_initial_schema.py
Normal file
122
alembic/versions/0001_initial_schema.py
Normal file
@ -0,0 +1,122 @@
|
||||
"""Initial database schema for Homelab Automation
|
||||
|
||||
Revision ID: 0001_initial
|
||||
Revises:
|
||||
Create Date: 2025-12-04
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "0001_initial"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"hosts",
|
||||
sa.Column("id", sa.String(), primary_key=True),
|
||||
sa.Column("name", sa.String(), nullable=False),
|
||||
sa.Column("ip_address", sa.String(), nullable=False, unique=True),
|
||||
sa.Column("status", sa.String(), nullable=False, server_default=sa.text("'unknown'")),
|
||||
sa.Column("ansible_group", sa.String(), nullable=True),
|
||||
sa.Column("last_seen", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("reachable", sa.Boolean(), nullable=False, server_default=sa.text("0")),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
"bootstrap_status",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
|
||||
sa.Column("host_id", sa.String(), sa.ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("status", sa.String(), nullable=False),
|
||||
sa.Column("automation_user", sa.String(), nullable=True),
|
||||
sa.Column("last_attempt", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("error_message", sa.Text(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
"tasks",
|
||||
sa.Column("id", sa.String(), primary_key=True),
|
||||
sa.Column("action", sa.String(), nullable=False),
|
||||
sa.Column("target", sa.String(), nullable=False),
|
||||
sa.Column("status", sa.String(), nullable=False, server_default=sa.text("'pending'")),
|
||||
sa.Column("playbook", sa.String(), nullable=True),
|
||||
sa.Column("started_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("error_message", sa.Text(), nullable=True),
|
||||
sa.Column("result_data", sa.JSON(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
"schedules",
|
||||
sa.Column("id", sa.String(), primary_key=True),
|
||||
sa.Column("name", sa.String(), nullable=False),
|
||||
sa.Column("playbook", sa.String(), nullable=False),
|
||||
sa.Column("target", sa.String(), nullable=False),
|
||||
sa.Column("schedule_type", sa.String(), nullable=False),
|
||||
sa.Column("schedule_time", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("recurrence_type", sa.String(), nullable=True),
|
||||
sa.Column("recurrence_time", sa.String(), nullable=True),
|
||||
sa.Column("recurrence_days", sa.Text(), nullable=True),
|
||||
sa.Column("cron_expression", sa.String(), nullable=True),
|
||||
sa.Column("enabled", sa.Boolean(), nullable=False, server_default=sa.text("1")),
|
||||
sa.Column("tags", sa.Text(), nullable=True),
|
||||
sa.Column("next_run", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("last_run", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
"schedule_runs",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
|
||||
sa.Column("schedule_id", sa.String(), sa.ForeignKey("schedules.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("task_id", sa.String(), sa.ForeignKey("tasks.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("status", sa.String(), nullable=False),
|
||||
sa.Column("started_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("duration", sa.Float(), nullable=True),
|
||||
sa.Column("error_message", sa.Text(), nullable=True),
|
||||
sa.Column("output", sa.Text(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
"logs",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
|
||||
sa.Column("level", sa.String(), nullable=False),
|
||||
sa.Column("source", sa.String(), nullable=True),
|
||||
sa.Column("message", sa.Text(), nullable=False),
|
||||
sa.Column("details", sa.JSON(), nullable=True),
|
||||
sa.Column("host_id", sa.String(), sa.ForeignKey("hosts.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("task_id", sa.String(), sa.ForeignKey("tasks.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("schedule_id", sa.String(), sa.ForeignKey("schedules.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
|
||||
op.create_index("idx_logs_created_at", "logs", ["created_at"])
|
||||
op.create_index("idx_logs_level", "logs", ["level"])
|
||||
op.create_index("idx_logs_source", "logs", ["source"])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index("idx_logs_source", table_name="logs")
|
||||
op.drop_index("idx_logs_level", table_name="logs")
|
||||
op.drop_index("idx_logs_created_at", table_name="logs")
|
||||
op.drop_table("logs")
|
||||
op.drop_table("schedule_runs")
|
||||
op.drop_table("schedules")
|
||||
op.drop_table("tasks")
|
||||
op.drop_table("bootstrap_status")
|
||||
op.drop_table("hosts")
|
||||
1108
app/app_optimized.py
1108
app/app_optimized.py
File diff suppressed because it is too large
Load Diff
13
app/crud/__init__.py
Normal file
13
app/crud/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
from .host import HostRepository
|
||||
from .bootstrap_status import BootstrapStatusRepository
|
||||
from .task import TaskRepository
|
||||
from .schedule import ScheduleRepository
|
||||
from .log import LogRepository
|
||||
|
||||
__all__ = [
|
||||
"HostRepository",
|
||||
"BootstrapStatusRepository",
|
||||
"TaskRepository",
|
||||
"ScheduleRepository",
|
||||
"LogRepository",
|
||||
]
|
||||
41
app/crud/bootstrap_status.py
Normal file
41
app/crud/bootstrap_status.py
Normal file
@ -0,0 +1,41 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from models.bootstrap_status import BootstrapStatus
|
||||
|
||||
|
||||
class BootstrapStatusRepository:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def list_for_host(self, host_id: str) -> list[BootstrapStatus]:
|
||||
stmt = select(BootstrapStatus).where(BootstrapStatus.host_id == host_id).order_by(BootstrapStatus.created_at.desc())
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def latest_for_host(self, host_id: str) -> Optional[BootstrapStatus]:
|
||||
stmt = (
|
||||
select(BootstrapStatus)
|
||||
.where(BootstrapStatus.host_id == host_id)
|
||||
.order_by(BootstrapStatus.created_at.desc())
|
||||
.limit(1)
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create(self, *, host_id: str, status: str, automation_user: Optional[str] = None,
|
||||
last_attempt=None, error_message: Optional[str] = None) -> BootstrapStatus:
|
||||
record = BootstrapStatus(
|
||||
host_id=host_id,
|
||||
status=status,
|
||||
automation_user=automation_user,
|
||||
last_attempt=last_attempt,
|
||||
error_message=error_message,
|
||||
)
|
||||
self.session.add(record)
|
||||
await self.session.flush()
|
||||
return record
|
||||
67
app/crud/host.py
Normal file
67
app/crud/host.py
Normal file
@ -0,0 +1,67 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import Iterable, Optional
|
||||
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from models.host import Host
|
||||
|
||||
|
||||
class HostRepository:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def list(self, limit: int = 100, offset: int = 0, include_deleted: bool = False) -> list[Host]:
|
||||
stmt = select(Host).order_by(Host.created_at.desc()).offset(offset).limit(limit)
|
||||
if not include_deleted:
|
||||
stmt = stmt.where(Host.deleted_at.is_(None))
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get(self, host_id: str, include_deleted: bool = False) -> Optional[Host]:
|
||||
stmt = select(Host).where(Host.id == host_id).options(selectinload(Host.bootstrap_statuses))
|
||||
if not include_deleted:
|
||||
stmt = stmt.where(Host.deleted_at.is_(None))
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_by_ip(self, ip_address: str, include_deleted: bool = False) -> Optional[Host]:
|
||||
stmt = select(Host).where(Host.ip_address == ip_address)
|
||||
if not include_deleted:
|
||||
stmt = stmt.where(Host.deleted_at.is_(None))
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create(self, *, id: str, name: str, ip_address: str, ansible_group: Optional[str] = None,
|
||||
status: str = "unknown", reachable: bool = False, last_seen: Optional[datetime] = None) -> Host:
|
||||
host = Host(
|
||||
id=id,
|
||||
name=name,
|
||||
ip_address=ip_address,
|
||||
ansible_group=ansible_group,
|
||||
status=status,
|
||||
reachable=reachable,
|
||||
last_seen=last_seen,
|
||||
)
|
||||
self.session.add(host)
|
||||
await self.session.flush()
|
||||
return host
|
||||
|
||||
async def update(self, host: Host, **fields) -> Host:
|
||||
for key, value in fields.items():
|
||||
if value is not None:
|
||||
setattr(host, key, value)
|
||||
await self.session.flush()
|
||||
return host
|
||||
|
||||
async def soft_delete(self, host_id: str) -> bool:
|
||||
stmt = (
|
||||
update(Host)
|
||||
.where(Host.id == host_id, Host.deleted_at.is_(None))
|
||||
.values(deleted_at=datetime.now(timezone.utc))
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.rowcount > 0
|
||||
28
app/crud/log.py
Normal file
28
app/crud/log.py
Normal file
@ -0,0 +1,28 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from models.log import Log
|
||||
|
||||
|
||||
class LogRepository:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def list(self, limit: int = 100, offset: int = 0, level: Optional[str] = None, source: Optional[str] = None) -> list[Log]:
|
||||
stmt = select(Log).order_by(Log.created_at.desc()).offset(offset).limit(limit)
|
||||
if level:
|
||||
stmt = stmt.where(Log.level == level)
|
||||
if source:
|
||||
stmt = stmt.where(Log.source == source)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def create(self, **fields) -> Log:
|
||||
log = Log(**fields)
|
||||
self.session.add(log)
|
||||
await self.session.flush()
|
||||
return log
|
||||
60
app/crud/schedule.py
Normal file
60
app/crud/schedule.py
Normal file
@ -0,0 +1,60 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from models.schedule import Schedule
|
||||
from models.schedule_run import ScheduleRun
|
||||
|
||||
|
||||
class ScheduleRepository:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def list(self, limit: int = 100, offset: int = 0, include_deleted: bool = False) -> list[Schedule]:
|
||||
stmt = select(Schedule).order_by(Schedule.created_at.desc()).offset(offset).limit(limit)
|
||||
if not include_deleted:
|
||||
stmt = stmt.where(Schedule.deleted_at.is_(None))
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get(self, schedule_id: str, include_deleted: bool = False) -> Optional[Schedule]:
|
||||
stmt = select(Schedule).where(Schedule.id == schedule_id).options(
|
||||
selectinload(Schedule.runs)
|
||||
)
|
||||
if not include_deleted:
|
||||
stmt = stmt.where(Schedule.deleted_at.is_(None))
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create(self, **fields) -> Schedule:
|
||||
schedule = Schedule(**fields)
|
||||
self.session.add(schedule)
|
||||
await self.session.flush()
|
||||
return schedule
|
||||
|
||||
async def update(self, schedule: Schedule, **fields) -> Schedule:
|
||||
for key, value in fields.items():
|
||||
if value is not None:
|
||||
setattr(schedule, key, value)
|
||||
await self.session.flush()
|
||||
return schedule
|
||||
|
||||
async def soft_delete(self, schedule_id: str) -> bool:
|
||||
stmt = (
|
||||
update(Schedule)
|
||||
.where(Schedule.id == schedule_id, Schedule.deleted_at.is_(None))
|
||||
.values(deleted_at=datetime.now(timezone.utc))
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.rowcount > 0
|
||||
|
||||
async def add_run(self, **fields) -> ScheduleRun:
|
||||
run = ScheduleRun(**fields)
|
||||
self.session.add(run)
|
||||
await self.session.flush()
|
||||
return run
|
||||
35
app/crud/schedule_run.py
Normal file
35
app/crud/schedule_run.py
Normal file
@ -0,0 +1,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from models.schedule_run import ScheduleRun
|
||||
|
||||
|
||||
class ScheduleRunRepository:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def get(self, run_id: int) -> Optional[ScheduleRun]:
|
||||
stmt = select(ScheduleRun).where(ScheduleRun.id == run_id)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def list_for_schedule(self, schedule_id: str, limit: int = 100, offset: int = 0) -> list[ScheduleRun]:
|
||||
stmt = (
|
||||
select(ScheduleRun)
|
||||
.where(ScheduleRun.schedule_id == schedule_id)
|
||||
.order_by(ScheduleRun.started_at.desc())
|
||||
.offset(offset)
|
||||
.limit(limit)
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def create(self, **fields) -> ScheduleRun:
|
||||
run = ScheduleRun(**fields)
|
||||
self.session.add(run)
|
||||
await self.session.flush()
|
||||
return run
|
||||
44
app/crud/task.py
Normal file
44
app/crud/task.py
Normal file
@ -0,0 +1,44 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from models.task import Task
|
||||
|
||||
|
||||
class TaskRepository:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def list(self, limit: int = 100, offset: int = 0) -> list[Task]:
|
||||
stmt = select(Task).order_by(Task.created_at.desc()).offset(offset).limit(limit)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get(self, task_id: str) -> Optional[Task]:
|
||||
stmt = select(Task).where(Task.id == task_id).options(selectinload(Task.schedule_runs))
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create(self, *, id: str, action: str, target: str, playbook: Optional[str] = None,
|
||||
status: str = "pending") -> Task:
|
||||
task = Task(
|
||||
id=id,
|
||||
action=action,
|
||||
target=target,
|
||||
playbook=playbook,
|
||||
status=status,
|
||||
)
|
||||
self.session.add(task)
|
||||
await self.session.flush()
|
||||
return task
|
||||
|
||||
async def update(self, task: Task, **fields) -> Task:
|
||||
for key, value in fields.items():
|
||||
if value is not None:
|
||||
setattr(task, key, value)
|
||||
await self.session.flush()
|
||||
return task
|
||||
17
app/models/__init__.py
Normal file
17
app/models/__init__.py
Normal file
@ -0,0 +1,17 @@
|
||||
from .database import Base
|
||||
from .host import Host
|
||||
from .bootstrap_status import BootstrapStatus
|
||||
from .task import Task
|
||||
from .schedule import Schedule
|
||||
from .schedule_run import ScheduleRun
|
||||
from .log import Log
|
||||
|
||||
__all__ = [
|
||||
"Base",
|
||||
"Host",
|
||||
"BootstrapStatus",
|
||||
"Task",
|
||||
"Schedule",
|
||||
"ScheduleRun",
|
||||
"Log",
|
||||
]
|
||||
27
app/models/bootstrap_status.py
Normal file
27
app/models/bootstrap_status.py
Normal file
@ -0,0 +1,27 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .database import Base
|
||||
|
||||
|
||||
class BootstrapStatus(Base):
|
||||
__tablename__ = "bootstrap_status"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
host_id: Mapped[str] = mapped_column(String, ForeignKey("hosts.id", ondelete="CASCADE"), nullable=False)
|
||||
status: Mapped[str] = mapped_column(String, nullable=False)
|
||||
automation_user: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
||||
last_attempt: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
|
||||
host: Mapped["Host"] = relationship("Host", back_populates="bootstrap_statuses")
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover - debug helper
|
||||
return f"<BootstrapStatus id={self.id} host_id={self.host_id} status={self.status}>"
|
||||
106
app/models/database.py
Normal file
106
app/models/database.py
Normal file
@ -0,0 +1,106 @@
|
||||
"""Database configuration and session management for Homelab Automation.
|
||||
Uses SQLAlchemy 2.x async engine with SQLite + aiosqlite driver.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import AsyncGenerator
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from sqlalchemy import event, MetaData
|
||||
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.orm import declarative_base
|
||||
|
||||
# Naming convention to keep Alembic happy with constraints
|
||||
NAMING_CONVENTION = {
|
||||
"ix": "ix_%(column_0_label)s",
|
||||
"uq": "uq_%(table_name)s_%(column_0_name)s",
|
||||
"ck": "ck_%(table_name)s_%(constraint_name)s",
|
||||
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
|
||||
"pk": "pk_%(table_name)s",
|
||||
}
|
||||
|
||||
metadata_obj = MetaData(naming_convention=NAMING_CONVENTION)
|
||||
Base = declarative_base(metadata=metadata_obj)
|
||||
|
||||
# Resolve base path (project root)
|
||||
ROOT_DIR = Path(__file__).resolve().parents[2]
|
||||
DEFAULT_DB_PATH = Path(os.environ.get("DB_PATH") or (ROOT_DIR / "data" / "homelab.db"))
|
||||
DATABASE_URL = os.environ.get("DATABASE_URL", f"sqlite+aiosqlite:///{DEFAULT_DB_PATH}")
|
||||
|
||||
# Ensure SQLite directory exists even if DATABASE_URL overrides DB_PATH
|
||||
def _ensure_sqlite_dir(db_url: str) -> None:
|
||||
if not db_url.startswith("sqlite"):
|
||||
return
|
||||
parsed = urlparse(db_url.replace("sqlite+aiosqlite", "sqlite"))
|
||||
if parsed.scheme != "sqlite":
|
||||
return
|
||||
db_path = Path(parsed.path)
|
||||
if db_path.parent:
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
DEFAULT_DB_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
_ensure_sqlite_dir(DATABASE_URL)
|
||||
|
||||
|
||||
def _debug_db_paths() -> None:
|
||||
try:
|
||||
print(
|
||||
"[DB] DATABASE_URL=%s, DEFAULT_DB_PATH=%s, parent_exists=%s, parent=%s"
|
||||
% (
|
||||
DATABASE_URL,
|
||||
DEFAULT_DB_PATH,
|
||||
DEFAULT_DB_PATH.parent.exists(),
|
||||
DEFAULT_DB_PATH.parent,
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
# Debug logging should never break startup
|
||||
pass
|
||||
|
||||
|
||||
_debug_db_paths()
|
||||
|
||||
engine: AsyncEngine = create_async_engine(
|
||||
DATABASE_URL,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
future=True,
|
||||
)
|
||||
|
||||
# Ensure SQLite pragmas (WAL + FK) when using SQLite
|
||||
if DATABASE_URL.startswith("sqlite"):
|
||||
@event.listens_for(engine.sync_engine, "connect")
|
||||
def _set_sqlite_pragmas(dbapi_connection, connection_record): # type: ignore[override]
|
||||
cursor = dbapi_connection.cursor()
|
||||
cursor.execute("PRAGMA foreign_keys=ON")
|
||||
cursor.execute("PRAGMA journal_mode=WAL")
|
||||
cursor.close()
|
||||
|
||||
async_session_maker = async_sessionmaker(
|
||||
bind=engine,
|
||||
autoflush=False,
|
||||
expire_on_commit=False,
|
||||
class_=AsyncSession,
|
||||
)
|
||||
|
||||
|
||||
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
"""FastAPI dependency that yields an AsyncSession with automatic rollback on error."""
|
||||
async with async_session_maker() as session: # type: AsyncSession
|
||||
try:
|
||||
yield session
|
||||
except Exception:
|
||||
await session.rollback()
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
|
||||
async def init_db() -> None:
|
||||
"""Create all tables (mostly for dev/tests; migrations should be handled by Alembic)."""
|
||||
from . import host, task, schedule, schedule_run, log # noqa: F401
|
||||
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
33
app/models/host.py
Normal file
33
app/models/host.py
Normal file
@ -0,0 +1,33 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy import Boolean, DateTime, String, text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .database import Base
|
||||
|
||||
|
||||
class Host(Base):
|
||||
__tablename__ = "hosts"
|
||||
|
||||
id: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
name: Mapped[str] = mapped_column(String, nullable=False)
|
||||
ip_address: Mapped[str] = mapped_column(String, nullable=False, unique=True)
|
||||
status: Mapped[str] = mapped_column(String, nullable=False, server_default=text("'unknown'"))
|
||||
ansible_group: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
||||
last_seen: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
reachable: Mapped[bool] = mapped_column(Boolean, nullable=False, server_default=text("0"))
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
bootstrap_statuses: Mapped[List["BootstrapStatus"]] = relationship(
|
||||
"BootstrapStatus", back_populates="host", cascade="all, delete-orphan"
|
||||
)
|
||||
logs: Mapped[List["Log"]] = relationship("Log", back_populates="host")
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover - debug helper
|
||||
return f"<Host id={self.id} name={self.name} ip={self.ip_address}>"
|
||||
36
app/models/log.py
Normal file
36
app/models/log.py
Normal file
@ -0,0 +1,36 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Integer, JSON, String, Text, Index
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .database import Base
|
||||
|
||||
|
||||
class Log(Base):
|
||||
__tablename__ = "logs"
|
||||
__table_args__ = (
|
||||
Index("idx_logs_created_at", "created_at"),
|
||||
Index("idx_logs_level", "level"),
|
||||
Index("idx_logs_source", "source"),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
level: Mapped[str] = mapped_column(String, nullable=False)
|
||||
source: Mapped[Optional[str]] = mapped_column(String)
|
||||
message: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
details: Mapped[Optional[dict]] = mapped_column(JSON)
|
||||
host_id: Mapped[Optional[str]] = mapped_column(String, ForeignKey("hosts.id", ondelete="SET NULL"))
|
||||
task_id: Mapped[Optional[str]] = mapped_column(String, ForeignKey("tasks.id", ondelete="SET NULL"))
|
||||
schedule_id: Mapped[Optional[str]] = mapped_column(String, ForeignKey("schedules.id", ondelete="SET NULL"))
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
|
||||
host: Mapped[Optional["Host"]] = relationship("Host", back_populates="logs")
|
||||
task: Mapped[Optional["Task"]] = relationship("Task", back_populates="logs")
|
||||
schedule: Mapped[Optional["Schedule"]] = relationship("Schedule", back_populates="logs")
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover - debug helper
|
||||
return f"<Log id={self.id} level={self.level} source={self.source}>"
|
||||
40
app/models/schedule.py
Normal file
40
app/models/schedule.py
Normal file
@ -0,0 +1,40 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy import Boolean, DateTime, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .database import Base
|
||||
|
||||
|
||||
class Schedule(Base):
|
||||
__tablename__ = "schedules"
|
||||
|
||||
id: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
name: Mapped[str] = mapped_column(String, nullable=False)
|
||||
playbook: Mapped[str] = mapped_column(String, nullable=False)
|
||||
target: Mapped[str] = mapped_column(String, nullable=False)
|
||||
schedule_type: Mapped[str] = mapped_column(String, nullable=False)
|
||||
schedule_time: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
recurrence_type: Mapped[Optional[str]] = mapped_column(String)
|
||||
recurrence_time: Mapped[Optional[str]] = mapped_column(String)
|
||||
recurrence_days: Mapped[Optional[str]] = mapped_column(Text)
|
||||
cron_expression: Mapped[Optional[str]] = mapped_column(String)
|
||||
enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||
tags: Mapped[Optional[str]] = mapped_column(Text)
|
||||
next_run: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
last_run: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
runs: Mapped[List["ScheduleRun"]] = relationship(
|
||||
"ScheduleRun", back_populates="schedule", cascade="all, delete-orphan"
|
||||
)
|
||||
logs: Mapped[List["Log"]] = relationship("Log", back_populates="schedule")
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover - debug helper
|
||||
return f"<Schedule id={self.id} name={self.name} target={self.target}>"
|
||||
31
app/models/schedule_run.py
Normal file
31
app/models/schedule_run.py
Normal file
@ -0,0 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Integer, String, Text, Float
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .database import Base
|
||||
|
||||
|
||||
class ScheduleRun(Base):
|
||||
__tablename__ = "schedule_runs"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
schedule_id: Mapped[str] = mapped_column(String, ForeignKey("schedules.id", ondelete="CASCADE"), nullable=False)
|
||||
task_id: Mapped[Optional[str]] = mapped_column(String, ForeignKey("tasks.id", ondelete="SET NULL"))
|
||||
status: Mapped[str] = mapped_column(String, nullable=False)
|
||||
started_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
completed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
duration: Mapped[Optional[float]] = mapped_column(Float)
|
||||
error_message: Mapped[Optional[str]] = mapped_column(Text)
|
||||
output: Mapped[Optional[str]] = mapped_column(Text)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
|
||||
schedule: Mapped["Schedule"] = relationship("Schedule", back_populates="runs")
|
||||
task: Mapped[Optional["Task"]] = relationship("Task", back_populates="schedule_runs")
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover - debug helper
|
||||
return f"<ScheduleRun id={self.id} schedule_id={self.schedule_id} status={self.status}>"
|
||||
31
app/models/task.py
Normal file
31
app/models/task.py
Normal file
@ -0,0 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, String, Text, JSON, text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .database import Base
|
||||
|
||||
|
||||
class Task(Base):
|
||||
__tablename__ = "tasks"
|
||||
|
||||
id: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
action: Mapped[str] = mapped_column(String, nullable=False)
|
||||
target: Mapped[str] = mapped_column(String, nullable=False)
|
||||
status: Mapped[str] = mapped_column(String, nullable=False, server_default=text("'pending'"))
|
||||
playbook: Mapped[Optional[str]] = mapped_column(String)
|
||||
started_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
completed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
error_message: Mapped[Optional[str]] = mapped_column(Text)
|
||||
result_data: Mapped[Optional[dict]] = mapped_column(JSON)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
|
||||
schedule_runs: Mapped[list["ScheduleRun"]] = relationship("ScheduleRun", back_populates="task")
|
||||
logs: Mapped[list["Log"]] = relationship("Log", back_populates="task")
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover - debug helper
|
||||
return f"<Task id={self.id} action={self.action} target={self.target} status={self.status}>"
|
||||
@ -10,4 +10,9 @@ requests>=2.32.0
|
||||
httpx>=0.28.0
|
||||
apscheduler>=3.10.0
|
||||
croniter>=2.0.0
|
||||
pytz>=2024.1
|
||||
pytz>=2024.1
|
||||
sqlalchemy>=2.0.0
|
||||
alembic>=1.12.0
|
||||
aiosqlite>=0.19.0
|
||||
pytest>=7.0.0
|
||||
pytest-asyncio>=0.21.0
|
||||
21
app/schemas/__init__.py
Normal file
21
app/schemas/__init__.py
Normal file
@ -0,0 +1,21 @@
|
||||
from .host import HostCreate, HostUpdate, HostOut
|
||||
from .bootstrap_status import BootstrapStatusOut
|
||||
from .task import TaskCreate, TaskUpdate, TaskOut
|
||||
from .schedule import ScheduleCreate, ScheduleUpdate, ScheduleOut, ScheduleRunOut
|
||||
from .log import LogCreate, LogOut
|
||||
|
||||
__all__ = [
|
||||
"HostCreate",
|
||||
"HostUpdate",
|
||||
"HostOut",
|
||||
"BootstrapStatusOut",
|
||||
"TaskCreate",
|
||||
"TaskUpdate",
|
||||
"TaskOut",
|
||||
"ScheduleCreate",
|
||||
"ScheduleUpdate",
|
||||
"ScheduleOut",
|
||||
"ScheduleRunOut",
|
||||
"LogCreate",
|
||||
"LogOut",
|
||||
]
|
||||
18
app/schemas/bootstrap_status.py
Normal file
18
app/schemas/bootstrap_status.py
Normal file
@ -0,0 +1,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class BootstrapStatusOut(BaseModel):
|
||||
id: int
|
||||
host_id: str
|
||||
status: str
|
||||
automation_user: Optional[str] = None
|
||||
last_attempt: Optional[datetime] = None
|
||||
error_message: Optional[str] = None
|
||||
created_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
36
app/schemas/host.py
Normal file
36
app/schemas/host.py
Normal file
@ -0,0 +1,36 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
|
||||
|
||||
class HostBase(BaseModel):
|
||||
name: str = Field(..., min_length=1)
|
||||
ip_address: str = Field(..., min_length=3)
|
||||
ansible_group: Optional[str] = None
|
||||
status: Optional[str] = Field(default="unknown")
|
||||
reachable: Optional[bool] = False
|
||||
last_seen: Optional[datetime] = None
|
||||
|
||||
|
||||
class HostCreate(HostBase):
|
||||
pass
|
||||
|
||||
|
||||
class HostUpdate(BaseModel):
|
||||
ansible_group: Optional[str] = None
|
||||
status: Optional[str] = None
|
||||
reachable: Optional[bool] = None
|
||||
last_seen: Optional[datetime] = None
|
||||
deleted_at: Optional[datetime] = None
|
||||
|
||||
|
||||
class HostOut(HostBase):
|
||||
id: str
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
deleted_at: Optional[datetime] = None
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
23
app/schemas/log.py
Normal file
23
app/schemas/log.py
Normal file
@ -0,0 +1,23 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
|
||||
|
||||
class LogCreate(BaseModel):
|
||||
level: str = Field(..., description="Log level: info/warning/error/debug")
|
||||
source: Optional[str] = None
|
||||
message: str
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
host_id: Optional[str] = None
|
||||
task_id: Optional[str] = None
|
||||
schedule_id: Optional[str] = None
|
||||
|
||||
|
||||
class LogOut(LogCreate):
|
||||
id: int
|
||||
created_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
67
app/schemas/schedule.py
Normal file
67
app/schemas/schedule.py
Normal file
@ -0,0 +1,67 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
|
||||
|
||||
class ScheduleBase(BaseModel):
|
||||
name: str
|
||||
playbook: str
|
||||
target: str
|
||||
schedule_type: str
|
||||
schedule_time: Optional[datetime] = None
|
||||
recurrence_type: Optional[str] = None
|
||||
recurrence_time: Optional[str] = None
|
||||
recurrence_days: Optional[List[int]] = None
|
||||
cron_expression: Optional[str] = None
|
||||
enabled: bool = True
|
||||
tags: Optional[List[str]] = None
|
||||
next_run: Optional[datetime] = None
|
||||
last_run: Optional[datetime] = None
|
||||
|
||||
|
||||
class ScheduleCreate(ScheduleBase):
|
||||
pass
|
||||
|
||||
|
||||
class ScheduleUpdate(BaseModel):
|
||||
name: Optional[str] = None
|
||||
playbook: Optional[str] = None
|
||||
target: Optional[str] = None
|
||||
schedule_type: Optional[str] = None
|
||||
schedule_time: Optional[datetime] = None
|
||||
recurrence_type: Optional[str] = None
|
||||
recurrence_time: Optional[str] = None
|
||||
recurrence_days: Optional[List[int]] = None
|
||||
cron_expression: Optional[str] = None
|
||||
enabled: Optional[bool] = None
|
||||
tags: Optional[List[str]] = None
|
||||
next_run: Optional[datetime] = None
|
||||
last_run: Optional[datetime] = None
|
||||
deleted_at: Optional[datetime] = None
|
||||
|
||||
|
||||
class ScheduleOut(ScheduleBase):
|
||||
id: str
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
deleted_at: Optional[datetime] = None
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class ScheduleRunOut(BaseModel):
|
||||
id: int
|
||||
schedule_id: str
|
||||
task_id: Optional[str] = None
|
||||
status: str
|
||||
started_at: datetime
|
||||
completed_at: Optional[datetime] = None
|
||||
duration: Optional[float] = None
|
||||
error_message: Optional[str] = None
|
||||
output: Optional[str] = None
|
||||
created_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
36
app/schemas/task.py
Normal file
36
app/schemas/task.py
Normal file
@ -0,0 +1,36 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
|
||||
|
||||
class TaskBase(BaseModel):
|
||||
action: str
|
||||
target: str
|
||||
playbook: Optional[str] = None
|
||||
status: str = Field(default="pending")
|
||||
result_data: Optional[Dict[str, Any]] = None
|
||||
error_message: Optional[str] = None
|
||||
|
||||
|
||||
class TaskCreate(TaskBase):
|
||||
pass
|
||||
|
||||
|
||||
class TaskUpdate(BaseModel):
|
||||
status: Optional[str] = None
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
error_message: Optional[str] = None
|
||||
result_data: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class TaskOut(TaskBase):
|
||||
id: str
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
created_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
BIN
data/homelab.db
Normal file
BIN
data/homelab.db
Normal file
Binary file not shown.
BIN
data/homelab.db-shm
Normal file
BIN
data/homelab.db-shm
Normal file
Binary file not shown.
BIN
data/homelab.db-wal
Normal file
BIN
data/homelab.db-wal
Normal file
Binary file not shown.
@ -19,6 +19,11 @@ services:
|
||||
- ANSIBLE_HOST_KEY_CHECKING=False
|
||||
# Timeout SSH
|
||||
- ANSIBLE_TIMEOUT=30
|
||||
# Homelab Data folder (monté sur /app/data)
|
||||
- HOMELAB_DATA_DIR=${HOMELAB_DATA_DIR:-./data}
|
||||
# SQLite DB path et URL (doivent correspondre à app/models/database.py)
|
||||
- DB_PATH=/app/data/homelab.db
|
||||
- DATABASE_URL=sqlite+aiosqlite:////app/data/homelab.db
|
||||
# Répertoire des logs de tâches (format YYYY/MM/JJ)
|
||||
- DIR_LOGS_TASKS=/app/tasks_logs
|
||||
# Ansible inventory
|
||||
@ -28,6 +33,8 @@ services:
|
||||
# Ansible group_vars
|
||||
- ANSIBLE_GROUP_VARS=./ansible/inventory/group_vars
|
||||
volumes:
|
||||
# Monter le dossier des données
|
||||
- ${HOMELAB_DATA_DIR:-./data}:/app/data
|
||||
# Monter l'inventaire Ansible (permet de modifier sans rebuild)
|
||||
- ${ANSIBLE_INVENTORY:-./ansible/inventory}:/ansible/inventory
|
||||
# Monter les playbooks (permet de modifier sans rebuild)
|
||||
|
||||
259
migrate_json_to_sqlite.py
Normal file
259
migrate_json_to_sqlite.py
Normal file
@ -0,0 +1,259 @@
|
||||
"""Migration des fichiers JSON vers SQLite (SQLAlchemy async).
|
||||
|
||||
- Importe host_status, bootstrap_status, schedule_runs, et logs JSON journaliers si présents.
|
||||
- Sauvegarde les fichiers JSON source en .bak après succès.
|
||||
- Génère un rapport de migration en sortie standard.
|
||||
|
||||
Usage :
|
||||
python migrate_json_to_sqlite.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import yaml
|
||||
|
||||
from app.models.database import async_session_maker, init_db
|
||||
from app.crud.host import HostRepository
|
||||
from app.crud.bootstrap_status import BootstrapStatusRepository
|
||||
from app.crud.schedule import ScheduleRepository
|
||||
from app.crud.schedule_run import ScheduleRunRepository
|
||||
from app.crud.task import TaskRepository
|
||||
from app.crud.log import LogRepository
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent
|
||||
ANSIBLE_DIR = BASE_DIR / "ansible"
|
||||
TASKS_LOGS_DIR = BASE_DIR / "tasks_logs"
|
||||
HOST_STATUS_FILE = ANSIBLE_DIR / ".host_status.json"
|
||||
BOOTSTRAP_STATUS_FILE = TASKS_LOGS_DIR / ".bootstrap_status.json"
|
||||
SCHEDULE_RUNS_FILE = TASKS_LOGS_DIR / ".schedule_runs.json"
|
||||
|
||||
|
||||
def load_json(path: Path, default: Any) -> Any:
|
||||
if not path.exists():
|
||||
return default
|
||||
try:
|
||||
with path.open("r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except Exception:
|
||||
return default
|
||||
|
||||
|
||||
def backup_file(path: Path) -> None:
|
||||
if path.exists():
|
||||
bak = path.with_suffix(path.suffix + ".bak")
|
||||
path.rename(bak)
|
||||
|
||||
|
||||
def parse_datetime(value: Optional[str]) -> Optional[datetime]:
|
||||
if not value:
|
||||
return None
|
||||
try:
|
||||
return datetime.fromisoformat(value)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
async def migrate_hosts(session, report: Dict[str, Any]) -> Dict[str, str]:
|
||||
repo = HostRepository(session)
|
||||
inventory_hosts: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
# Lire inventaire Ansible pour récupérer les groupes
|
||||
inventory_file = ANSIBLE_DIR / "inventory" / "hosts.yml"
|
||||
if inventory_file.exists():
|
||||
data = yaml.safe_load(inventory_file.read_text(encoding="utf-8")) or {}
|
||||
all_children = data.get("all", {}).get("children", {})
|
||||
for group_name, group_data in all_children.items():
|
||||
hosts = group_data.get("hosts", {}) or {}
|
||||
for host_name in hosts.keys():
|
||||
entry = inventory_hosts.setdefault(host_name, {"groups": set()})
|
||||
entry["groups"].add(group_name)
|
||||
else:
|
||||
report.setdefault("warnings", []).append("Inventaire Ansible introuvable, ip=hostname et groupe vide")
|
||||
|
||||
host_status_data = load_json(HOST_STATUS_FILE, {"hosts": {}}).get("hosts", {})
|
||||
|
||||
created = 0
|
||||
host_map: Dict[str, str] = {} # hostname -> host_id
|
||||
for host_name, meta in inventory_hosts.items():
|
||||
status_entry = host_status_data.get(host_name, {})
|
||||
status = status_entry.get("status", "unknown")
|
||||
last_seen = parse_datetime(status_entry.get("last_seen"))
|
||||
ansible_group = next(iter(g for g in meta.get("groups", []) if g.startswith("env_")), None)
|
||||
reachable = status != "offline"
|
||||
host_id = uuid.uuid4().hex
|
||||
|
||||
host = await repo.create(
|
||||
id=host_id,
|
||||
name=host_name,
|
||||
ip_address=host_name,
|
||||
ansible_group=ansible_group,
|
||||
status=status,
|
||||
reachable=reachable,
|
||||
last_seen=last_seen,
|
||||
)
|
||||
created += 1
|
||||
host_map[host_name] = host.id
|
||||
|
||||
report["hosts"] = created
|
||||
backup_file(HOST_STATUS_FILE)
|
||||
return host_map
|
||||
|
||||
|
||||
async def migrate_bootstrap_status(session, report: Dict[str, Any], host_map: Dict[str, str]) -> None:
|
||||
repo = BootstrapStatusRepository(session)
|
||||
data = load_json(BOOTSTRAP_STATUS_FILE, {"hosts": {}}).get("hosts", {})
|
||||
created = 0
|
||||
for host_name, meta in data.items():
|
||||
host_id = host_map.get(host_name)
|
||||
if not host_id:
|
||||
report.setdefault("warnings", []).append(f"Bootstrap ignoré: host inconnu {host_name}")
|
||||
continue
|
||||
bootstrap_ok = meta.get("bootstrap_ok", False)
|
||||
status = "success" if bootstrap_ok else "failed"
|
||||
last_attempt = parse_datetime(meta.get("bootstrap_date"))
|
||||
error_message = None if bootstrap_ok else meta.get("details")
|
||||
await repo.create(
|
||||
host_id=host_id,
|
||||
status=status,
|
||||
automation_user="automation",
|
||||
last_attempt=last_attempt,
|
||||
error_message=error_message,
|
||||
)
|
||||
created += 1
|
||||
report["bootstrap_status"] = created
|
||||
backup_file(BOOTSTRAP_STATUS_FILE)
|
||||
|
||||
|
||||
async def migrate_schedule_runs(session, report: Dict[str, Any]) -> None:
|
||||
repo = ScheduleRunRepository(session)
|
||||
task_repo = TaskRepository(session)
|
||||
schedule_repo = ScheduleRepository(session)
|
||||
|
||||
data = load_json(SCHEDULE_RUNS_FILE, {"runs": []}).get("runs", [])
|
||||
if not data:
|
||||
report["schedule_runs"] = 0
|
||||
return
|
||||
|
||||
created = 0
|
||||
for run in data:
|
||||
schedule_id = run.get("schedule_id") or uuid.uuid4().hex
|
||||
task_id = run.get("task_id")
|
||||
status = run.get("status") or "unknown"
|
||||
started_at = parse_datetime(run.get("started_at")) or datetime.utcnow()
|
||||
completed_at = parse_datetime(run.get("completed_at"))
|
||||
duration = run.get("duration")
|
||||
error_message = run.get("error_message")
|
||||
output = run.get("output")
|
||||
|
||||
# Assure une entrée schedule/task minimaliste si absente
|
||||
existing_schedule = await schedule_repo.get(schedule_id, include_deleted=True)
|
||||
if existing_schedule is None:
|
||||
await schedule_repo.create(
|
||||
id=schedule_id,
|
||||
name=run.get("name", "Imported schedule"),
|
||||
playbook=run.get("playbook", "unknown"),
|
||||
target=run.get("target", "all"),
|
||||
schedule_type=run.get("schedule_type", "once"),
|
||||
enabled=True,
|
||||
)
|
||||
if task_id:
|
||||
existing_task = await task_repo.get(task_id)
|
||||
if existing_task is None:
|
||||
await task_repo.create(
|
||||
id=task_id,
|
||||
action=run.get("action", "unknown"),
|
||||
target=run.get("target", "all"),
|
||||
playbook=run.get("playbook"),
|
||||
status=status,
|
||||
)
|
||||
|
||||
await repo.create(
|
||||
schedule_id=schedule_id,
|
||||
task_id=task_id,
|
||||
status=status,
|
||||
started_at=started_at,
|
||||
completed_at=completed_at,
|
||||
duration=duration,
|
||||
error_message=error_message,
|
||||
output=output,
|
||||
)
|
||||
created += 1
|
||||
|
||||
report["schedule_runs"] = created
|
||||
backup_file(SCHEDULE_RUNS_FILE)
|
||||
|
||||
|
||||
async def migrate_logs(session, report: Dict[str, Any], host_map: Dict[str, str]) -> None:
|
||||
repo = LogRepository(session)
|
||||
created = 0
|
||||
|
||||
json_files: List[Path] = []
|
||||
if TASKS_LOGS_DIR.exists():
|
||||
json_files = [p for p in TASKS_LOGS_DIR.rglob("*.json") if not p.name.startswith(".")]
|
||||
|
||||
for path in json_files:
|
||||
data = load_json(path, {})
|
||||
if isinstance(data, dict):
|
||||
# Tentative de mapping simple
|
||||
level = data.get("level") or "info"
|
||||
message = data.get("message") or json.dumps(data, ensure_ascii=False)
|
||||
source = data.get("source")
|
||||
details = data.get("details")
|
||||
host_val = data.get("host_id")
|
||||
task_id = data.get("task_id")
|
||||
schedule_id = data.get("schedule_id")
|
||||
else:
|
||||
level = "info"
|
||||
message = str(data)
|
||||
source = None
|
||||
details = None
|
||||
host_val = None
|
||||
task_id = None
|
||||
schedule_id = None
|
||||
|
||||
host_id = host_map.get(host_val) if isinstance(host_val, str) else host_val
|
||||
await repo.create(
|
||||
level=level,
|
||||
source=source,
|
||||
message=message,
|
||||
details=details,
|
||||
host_id=host_id,
|
||||
task_id=task_id,
|
||||
schedule_id=schedule_id,
|
||||
)
|
||||
created += 1
|
||||
backup_file(path)
|
||||
|
||||
report["logs"] = created
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
report: Dict[str, Any] = {"warnings": []}
|
||||
await init_db()
|
||||
|
||||
async with async_session_maker() as session:
|
||||
async with session.begin():
|
||||
host_map = await migrate_hosts(session, report)
|
||||
await migrate_bootstrap_status(session, report, host_map)
|
||||
await migrate_schedule_runs(session, report)
|
||||
await migrate_logs(session, report, host_map)
|
||||
|
||||
print("Migration terminée")
|
||||
for key, value in report.items():
|
||||
if key == "warnings":
|
||||
if value:
|
||||
print("Warnings:")
|
||||
for w in value:
|
||||
print(f" - {w}")
|
||||
continue
|
||||
print(f" - {key}: {value}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
3
pytest.ini
Normal file
3
pytest.ini
Normal file
@ -0,0 +1,3 @@
|
||||
[pytest]
|
||||
asyncio_mode = auto
|
||||
asyncio_default_fixture_loop_scope = module
|
||||
@ -1,5 +1,173 @@
|
||||
{
|
||||
"runs": [
|
||||
{
|
||||
"id": "run_e16db5ac6f5c",
|
||||
"schedule_id": "sched_110c001afe0c",
|
||||
"task_id": "2",
|
||||
"started_at": "2025-12-05 02:35:00.012993+00:00",
|
||||
"finished_at": "2025-12-05 02:35:32.549542+00:00",
|
||||
"status": "success",
|
||||
"duration_seconds": 32.45821054699991,
|
||||
"hosts_impacted": 15,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_6c434a169263",
|
||||
"schedule_id": "sched_110c001afe0c",
|
||||
"task_id": "1",
|
||||
"started_at": "2025-12-05 02:30:00.004595+00:00",
|
||||
"finished_at": "2025-12-05 02:30:30.003032+00:00",
|
||||
"status": "success",
|
||||
"duration_seconds": 29.95905439800117,
|
||||
"hosts_impacted": 15,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_debf96da90dd",
|
||||
"schedule_id": "sched_110c001afe0c",
|
||||
"task_id": "2",
|
||||
"started_at": "2025-12-05 02:25:00.016354+00:00",
|
||||
"finished_at": "2025-12-05 02:25:27.580495+00:00",
|
||||
"status": "success",
|
||||
"duration_seconds": 27.521959419998893,
|
||||
"hosts_impacted": 15,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_bda871b98a7c",
|
||||
"schedule_id": "sched_31a7ffb99bfd",
|
||||
"task_id": "1",
|
||||
"started_at": "2025-12-05 02:20:00.004169+00:00",
|
||||
"finished_at": "2025-12-05 02:20:28.118352+00:00",
|
||||
"status": "success",
|
||||
"duration_seconds": 28.0753927859987,
|
||||
"hosts_impacted": 15,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_9acaf3ee6040",
|
||||
"schedule_id": "sched_d5370726086b",
|
||||
"task_id": "4",
|
||||
"started_at": "2025-12-05 02:05:01.066895+00:00",
|
||||
"finished_at": null,
|
||||
"status": "running",
|
||||
"duration_seconds": null,
|
||||
"hosts_impacted": 0,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_25dee59d8f54",
|
||||
"schedule_id": "sched_178b8e511908",
|
||||
"task_id": "3",
|
||||
"started_at": "2025-12-05 02:05:00.942939+00:00",
|
||||
"finished_at": null,
|
||||
"status": "running",
|
||||
"duration_seconds": null,
|
||||
"hosts_impacted": 0,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_06b2fe4c75f9",
|
||||
"schedule_id": "sched_d5370726086b",
|
||||
"task_id": "2",
|
||||
"started_at": "2025-12-05 02:00:00.048675+00:00",
|
||||
"finished_at": "2025-12-05 02:00:31.174698+00:00",
|
||||
"status": "success",
|
||||
"duration_seconds": 31.10493237799892,
|
||||
"hosts_impacted": 15,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_5a3ada10451e",
|
||||
"schedule_id": "sched_178b8e511908",
|
||||
"task_id": "1",
|
||||
"started_at": "2025-12-05 02:00:00.004396+00:00",
|
||||
"finished_at": "2025-12-05 02:00:30.956215+00:00",
|
||||
"status": "success",
|
||||
"duration_seconds": 30.92840002899902,
|
||||
"hosts_impacted": 15,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_484f67657ee4",
|
||||
"schedule_id": "sched_d5370726086b",
|
||||
"task_id": "3",
|
||||
"started_at": "2025-12-05 01:55:00.084088+00:00",
|
||||
"finished_at": "2025-12-05 01:55:32.096250+00:00",
|
||||
"status": "success",
|
||||
"duration_seconds": 31.975180113000533,
|
||||
"hosts_impacted": 15,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_7c9cbee2fe69",
|
||||
"schedule_id": "sched_178b8e511908",
|
||||
"task_id": "2",
|
||||
"started_at": "2025-12-05 01:55:00.018967+00:00",
|
||||
"finished_at": "2025-12-05 01:55:32.306141+00:00",
|
||||
"status": "success",
|
||||
"duration_seconds": 32.26106233700193,
|
||||
"hosts_impacted": 15,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_a45e3d80323d",
|
||||
"schedule_id": "sched_d5370726086b",
|
||||
"task_id": "1",
|
||||
"started_at": "2025-12-05 01:50:00.003670+00:00",
|
||||
"finished_at": "2025-12-05 01:50:27.635237+00:00",
|
||||
"status": "success",
|
||||
"duration_seconds": 27.58177596600217,
|
||||
"hosts_impacted": 15,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_6ebb5bb47219",
|
||||
"schedule_id": "sched_d5370726086b",
|
||||
"task_id": "2",
|
||||
"started_at": "2025-12-05 01:45:00.003641+00:00",
|
||||
"finished_at": "2025-12-05 01:45:26.015984+00:00",
|
||||
"status": "success",
|
||||
"duration_seconds": 25.9568110279979,
|
||||
"hosts_impacted": 15,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_f07c8820abcf",
|
||||
"schedule_id": "sched_d5370726086b",
|
||||
"task_id": "1",
|
||||
"started_at": "2025-12-05 01:40:00.003609+00:00",
|
||||
"finished_at": "2025-12-05 01:40:27.800302+00:00",
|
||||
"status": "success",
|
||||
"duration_seconds": 27.77215807200264,
|
||||
"hosts_impacted": 15,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_c831165b16d9",
|
||||
"schedule_id": "sched_d5370726086b",
|
||||
"task_id": null,
|
||||
"started_at": "2025-12-05 01:35:00.003976+00:00",
|
||||
"finished_at": null,
|
||||
"status": "running",
|
||||
"duration_seconds": null,
|
||||
"hosts_impacted": 0,
|
||||
"error_message": null,
|
||||
"retry_attempt": 0
|
||||
},
|
||||
{
|
||||
"id": "run_9eaff32da049",
|
||||
"schedule_id": "sched_31a7ffb99bfd",
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
{
|
||||
"schedules": [
|
||||
{
|
||||
"id": "sched_31a7ffb99bfd",
|
||||
"id": "sched_110c001afe0c",
|
||||
"name": "Health-check-5min",
|
||||
"description": "Health-check-5min",
|
||||
"description": null,
|
||||
"playbook": "health-check.yml",
|
||||
"target_type": "group",
|
||||
"target": "all",
|
||||
@ -19,20 +19,20 @@
|
||||
"timezone": "America/Montreal",
|
||||
"start_at": null,
|
||||
"end_at": null,
|
||||
"next_run_at": "2025-12-04 15:35:00-05:00",
|
||||
"last_run_at": "2025-12-04 20:30:00.003138+00:00",
|
||||
"next_run_at": "2025-12-04T21:40:00-05:00",
|
||||
"last_run_at": "2025-12-05 02:35:00.012919+00:00",
|
||||
"last_status": "success",
|
||||
"enabled": false,
|
||||
"enabled": true,
|
||||
"retry_on_failure": 0,
|
||||
"timeout": 3600,
|
||||
"tags": [
|
||||
"Test"
|
||||
],
|
||||
"run_count": 22,
|
||||
"success_count": 19,
|
||||
"failure_count": 3,
|
||||
"created_at": "2025-12-04 18:45:18.318152+00:00",
|
||||
"updated_at": "2025-12-04 20:30:29.181594+00:00"
|
||||
"run_count": 3,
|
||||
"success_count": 3,
|
||||
"failure_count": 0,
|
||||
"created_at": "2025-12-05 02:24:06.110100+00:00",
|
||||
"updated_at": "2025-12-05 02:35:32.549928+00:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
206
tests/test_db.py
Normal file
206
tests/test_db.py
Normal file
@ -0,0 +1,206 @@
|
||||
"""Tests basiques pour valider la couche DB SQLAlchemy async."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add project root to path
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[1]))
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
|
||||
from app.models.database import async_session_maker, init_db, engine
|
||||
from app.crud.host import HostRepository
|
||||
from app.crud.bootstrap_status import BootstrapStatusRepository
|
||||
from app.crud.task import TaskRepository
|
||||
from app.crud.schedule import ScheduleRepository
|
||||
from app.crud.log import LogRepository
|
||||
|
||||
# Configure pytest-asyncio
|
||||
pytestmark = pytest.mark.asyncio
|
||||
|
||||
|
||||
@pytest_asyncio.fixture(scope="module")
|
||||
async def setup_db():
|
||||
await init_db()
|
||||
yield
|
||||
# Cleanup: drop all tables
|
||||
from app.models.database import Base
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_host(setup_db):
|
||||
async with async_session_maker() as session:
|
||||
repo = HostRepository(session)
|
||||
host = await repo.create(
|
||||
id="test-host-001",
|
||||
name="test.host.local",
|
||||
ip_address="192.168.1.100",
|
||||
ansible_group="env_test",
|
||||
status="online",
|
||||
reachable=True,
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
assert host.id == "test-host-001"
|
||||
assert host.name == "test.host.local"
|
||||
assert host.ip_address == "192.168.1.100"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_hosts(setup_db):
|
||||
async with async_session_maker() as session:
|
||||
repo = HostRepository(session)
|
||||
hosts = await repo.list(limit=10, offset=0)
|
||||
assert isinstance(hosts, list)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_task(setup_db):
|
||||
async with async_session_maker() as session:
|
||||
repo = TaskRepository(session)
|
||||
task = await repo.create(
|
||||
id="task-001",
|
||||
action="health-check",
|
||||
target="all",
|
||||
playbook="health-check.yml",
|
||||
status="pending",
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
assert task.id == "task-001"
|
||||
assert task.action == "health-check"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_log(setup_db):
|
||||
async with async_session_maker() as session:
|
||||
repo = LogRepository(session)
|
||||
log = await repo.create(
|
||||
level="INFO",
|
||||
message="Test log entry",
|
||||
source="test",
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
assert log.id is not None
|
||||
assert log.level == "INFO"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_soft_delete_host(setup_db):
|
||||
async with async_session_maker() as session:
|
||||
repo = HostRepository(session)
|
||||
# Create a host to delete
|
||||
host = await repo.create(
|
||||
id="host-to-delete",
|
||||
name="delete.me.local",
|
||||
ip_address="192.168.1.200",
|
||||
status="unknown",
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
# Soft delete
|
||||
deleted = await repo.soft_delete("host-to-delete")
|
||||
await session.commit()
|
||||
assert deleted is True
|
||||
|
||||
# Should not appear in normal list
|
||||
hosts = await repo.list()
|
||||
host_ids = [h.id for h in hosts]
|
||||
assert "host-to-delete" not in host_ids
|
||||
|
||||
# But should appear with include_deleted=True
|
||||
host_with_deleted = await repo.get("host-to-delete", include_deleted=True)
|
||||
assert host_with_deleted is not None
|
||||
assert host_with_deleted.deleted_at is not None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_schedule(setup_db):
|
||||
from app.crud.schedule import ScheduleRepository
|
||||
async with async_session_maker() as session:
|
||||
repo = ScheduleRepository(session)
|
||||
schedule = await repo.create(
|
||||
id="schedule-001",
|
||||
name="Daily Backup",
|
||||
playbook="backup.yml",
|
||||
target="all",
|
||||
schedule_type="recurring",
|
||||
recurrence_type="daily",
|
||||
recurrence_time="02:00",
|
||||
enabled=True,
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
assert schedule.id == "schedule-001"
|
||||
assert schedule.name == "Daily Backup"
|
||||
assert schedule.enabled is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_schedule_soft_delete(setup_db):
|
||||
from app.crud.schedule import ScheduleRepository
|
||||
async with async_session_maker() as session:
|
||||
repo = ScheduleRepository(session)
|
||||
# Create
|
||||
schedule = await repo.create(
|
||||
id="schedule-to-delete",
|
||||
name="To Delete",
|
||||
playbook="test.yml",
|
||||
target="all",
|
||||
schedule_type="once",
|
||||
enabled=True,
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
# Soft delete
|
||||
deleted = await repo.soft_delete("schedule-to-delete")
|
||||
await session.commit()
|
||||
assert deleted is True
|
||||
|
||||
# Should not appear in normal list
|
||||
schedules = await repo.list()
|
||||
schedule_ids = [s.id for s in schedules]
|
||||
assert "schedule-to-delete" not in schedule_ids
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_schedule_run(setup_db):
|
||||
from app.crud.schedule import ScheduleRepository
|
||||
from app.crud.schedule_run import ScheduleRunRepository
|
||||
from datetime import datetime, timezone
|
||||
|
||||
async with async_session_maker() as session:
|
||||
# Create schedule first
|
||||
sched_repo = ScheduleRepository(session)
|
||||
schedule = await sched_repo.create(
|
||||
id="schedule-for-run",
|
||||
name="Run Test",
|
||||
playbook="test.yml",
|
||||
target="all",
|
||||
schedule_type="once",
|
||||
enabled=True,
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
# Create run
|
||||
run_repo = ScheduleRunRepository(session)
|
||||
run = await run_repo.create(
|
||||
schedule_id="schedule-for-run",
|
||||
status="running",
|
||||
started_at=datetime.now(timezone.utc),
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
assert run.id is not None
|
||||
assert run.schedule_id == "schedule-for-run"
|
||||
assert run.status == "running"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
Loading…
x
Reference in New Issue
Block a user