- Implement tests for database generator to ensure proper session handling. - Create tests for EXIF extraction and conversion functions. - Add tests for image-related endpoints, ensuring proper data retrieval and isolation between clients. - Develop tests for OCR functionality, including language detection and text extraction. - Introduce tests for the image processing pipeline, covering success and failure scenarios. - Validate rate limiting functionality and ensure independent counters for different clients. - Implement scraper tests to verify HTML content fetching and error handling. - Add unit tests for various services, including storage and filename generation. - Establish worker entry point for ARQ to handle background image processing tasks.
103 lines
2.9 KiB
Python
103 lines
2.9 KiB
Python
"""
|
|
Router — AI : résumé d'URL, rédaction de tâches
|
|
Sécurisé : authentification par API Key + scope ai:use + rate limiting.
|
|
"""
|
|
from fastapi import APIRouter, Depends, HTTPException, Request
|
|
|
|
from app.dependencies.auth import get_current_client, require_scope
|
|
from app.models.client import APIClient
|
|
from app.schemas import (
|
|
SummarizeRequest, SummarizeResponse,
|
|
DraftTaskRequest, DraftTaskResponse,
|
|
)
|
|
from app.services.scraper import fetch_page_content
|
|
from app.services.ai_vision import summarize_url, draft_task
|
|
from app.config import settings
|
|
from app.middleware import limiter
|
|
|
|
|
|
router = APIRouter(prefix="/ai", tags=["Intelligence Artificielle"])
|
|
|
|
|
|
@router.post(
|
|
"/summarize",
|
|
response_model=SummarizeResponse,
|
|
summary="Résumé AI d'une URL",
|
|
description=(
|
|
"Scrappe le contenu d'une URL et génère un résumé structuré + tags via AI. "
|
|
"Utile pour enrichir les bookmarks Shaarli."
|
|
),
|
|
dependencies=[Depends(require_scope("ai:use"))],
|
|
)
|
|
@limiter.limit("1000/hour")
|
|
async def summarize_link(
|
|
request: Request,
|
|
body: SummarizeRequest,
|
|
client: APIClient = Depends(get_current_client),
|
|
):
|
|
if not settings.AI_ENABLED:
|
|
raise HTTPException(status_code=503, detail="AI désactivée")
|
|
|
|
# Scraping
|
|
page = await fetch_page_content(body.url)
|
|
if page.get("error"):
|
|
raise HTTPException(
|
|
status_code=422,
|
|
detail=f"Impossible de récupérer la page : {page['error']}",
|
|
)
|
|
|
|
content = " ".join(filter(None, [
|
|
page.get("title"),
|
|
page.get("description"),
|
|
page.get("text"),
|
|
]))
|
|
|
|
if not content.strip():
|
|
raise HTTPException(status_code=422, detail="Aucun contenu texte trouvé sur cette page")
|
|
|
|
# Résumé AI
|
|
result = await summarize_url(
|
|
url=body.url,
|
|
content=content,
|
|
language=body.language,
|
|
)
|
|
|
|
return SummarizeResponse(
|
|
url=body.url,
|
|
title=page.get("title"),
|
|
summary=result["summary"],
|
|
tags=result["tags"],
|
|
model=result["model"],
|
|
)
|
|
|
|
|
|
@router.post(
|
|
"/draft-task",
|
|
response_model=DraftTaskResponse,
|
|
summary="Rédaction AI d'une tâche",
|
|
description=(
|
|
"Génère une tâche structurée (titre, description, étapes, priorité) "
|
|
"à partir d'une description libre."
|
|
),
|
|
dependencies=[Depends(require_scope("ai:use"))],
|
|
)
|
|
@limiter.limit("1000/hour")
|
|
async def generate_task(
|
|
request: Request,
|
|
body: DraftTaskRequest,
|
|
client: APIClient = Depends(get_current_client),
|
|
):
|
|
if not settings.AI_ENABLED:
|
|
raise HTTPException(status_code=503, detail="AI désactivée")
|
|
|
|
result = await draft_task(
|
|
description=body.description,
|
|
context=body.context,
|
|
language=body.language,
|
|
)
|
|
|
|
if not result.get("title"):
|
|
raise HTTPException(status_code=500, detail="Échec de la génération de la tâche")
|
|
|
|
return DraftTaskResponse(**result)
|