chore: update Angular cache and TypeScript build info
This commit is contained in:
parent
9c03f5f5fe
commit
3f04191623
2
.angular/cache/20.2.2/app/.tsbuildinfo
vendored
2
.angular/cache/20.2.2/app/.tsbuildinfo
vendored
File diff suppressed because one or more lines are too long
@ -1,61 +1,61 @@
|
||||
{
|
||||
"hash": "70aeb477",
|
||||
"hash": "534e7bec",
|
||||
"configHash": "d859ec53",
|
||||
"lockfileHash": "38d89503",
|
||||
"browserHash": "a9625742",
|
||||
"lockfileHash": "891162b0",
|
||||
"browserHash": "b971f174",
|
||||
"optimized": {
|
||||
"@angular/common": {
|
||||
"src": "../../../../../../node_modules/@angular/common/fesm2022/common.mjs",
|
||||
"file": "@angular_common.js",
|
||||
"fileHash": "0c3a1cb9",
|
||||
"fileHash": "76f579d7",
|
||||
"needsInterop": false
|
||||
},
|
||||
"@angular/common/http": {
|
||||
"src": "../../../../../../node_modules/@angular/common/fesm2022/http.mjs",
|
||||
"file": "@angular_common_http.js",
|
||||
"fileHash": "56f4d3d3",
|
||||
"fileHash": "3f81fe6e",
|
||||
"needsInterop": false
|
||||
},
|
||||
"@angular/core": {
|
||||
"src": "../../../../../../node_modules/@angular/core/fesm2022/core.mjs",
|
||||
"file": "@angular_core.js",
|
||||
"fileHash": "d890be7e",
|
||||
"fileHash": "817c1079",
|
||||
"needsInterop": false
|
||||
},
|
||||
"@angular/forms": {
|
||||
"src": "../../../../../../node_modules/@angular/forms/fesm2022/forms.mjs",
|
||||
"file": "@angular_forms.js",
|
||||
"fileHash": "905d0ee2",
|
||||
"fileHash": "5c59f890",
|
||||
"needsInterop": false
|
||||
},
|
||||
"@angular/platform-browser": {
|
||||
"src": "../../../../../../node_modules/@angular/platform-browser/fesm2022/platform-browser.mjs",
|
||||
"file": "@angular_platform-browser.js",
|
||||
"fileHash": "44676ec1",
|
||||
"fileHash": "4f20f29c",
|
||||
"needsInterop": false
|
||||
},
|
||||
"@angular/router": {
|
||||
"src": "../../../../../../node_modules/@angular/router/fesm2022/router.mjs",
|
||||
"file": "@angular_router.js",
|
||||
"fileHash": "c691e369",
|
||||
"fileHash": "ae70e479",
|
||||
"needsInterop": false
|
||||
},
|
||||
"@google/genai": {
|
||||
"src": "../../../../../../node_modules/@google/genai/dist/web/index.mjs",
|
||||
"file": "@google_genai.js",
|
||||
"fileHash": "cd430349",
|
||||
"fileHash": "4d8ae55a",
|
||||
"needsInterop": false
|
||||
},
|
||||
"rxjs": {
|
||||
"src": "../../../../../../node_modules/rxjs/dist/esm5/index.js",
|
||||
"file": "rxjs.js",
|
||||
"fileHash": "467e2c35",
|
||||
"fileHash": "490b7fef",
|
||||
"needsInterop": false
|
||||
},
|
||||
"rxjs/operators": {
|
||||
"src": "../../../../../../node_modules/rxjs/dist/esm5/operators/index.js",
|
||||
"file": "rxjs_operators.js",
|
||||
"fileHash": "9aa95ff5",
|
||||
"fileHash": "938cbe53",
|
||||
"needsInterop": false
|
||||
}
|
||||
},
|
||||
|
@ -1,8 +1,8 @@
|
||||
{
|
||||
"hash": "be6806eb",
|
||||
"hash": "f1fac02c",
|
||||
"configHash": "3d00a7fd",
|
||||
"lockfileHash": "38d89503",
|
||||
"browserHash": "f3292f98",
|
||||
"lockfileHash": "891162b0",
|
||||
"browserHash": "10427b09",
|
||||
"optimized": {},
|
||||
"chunks": {}
|
||||
}
|
@ -3,7 +3,7 @@ node_modules
|
||||
.tmp
|
||||
tmp
|
||||
npm-debug.log
|
||||
Dockerfile*
|
||||
/Dockerfile*
|
||||
.dockerignore
|
||||
.git
|
||||
.gitignore
|
||||
|
27
README.md
27
README.md
@ -41,13 +41,38 @@ NewTube est un agrégateur de vidéos multi-plateformes, conçu pour une expéri
|
||||
|
||||
## Prérequis :checklist:
|
||||
|
||||
### Étapes d'Installation 🔧
|
||||
### Configuration avec Docker (Recommandé) 🐳
|
||||
|
||||
1. Copiez le fichier `docker-compose/.env.example` vers `docker-compose/.env` :
|
||||
```bash
|
||||
cp docker-compose/.env.example docker-compose/.env
|
||||
```
|
||||
2. Modifiez le fichier `.env` pour y ajouter vos clés API
|
||||
3. Lancez l'application avec Docker Compose :
|
||||
```bash
|
||||
docker-compose -f docker-compose/docker-compose.yml up -d
|
||||
```
|
||||
4. L'application sera disponible sur http://localhost:8080
|
||||
|
||||
### Configuration manuelle (Développement) 🔧
|
||||
|
||||
1. Installer Node.js (LTS). :node:
|
||||
2. Exécuter `npm install` pour les dépendances. 📦
|
||||
3. Copier et configurer `assets/config.local.example.js` en `assets/config.local.js`. 🔑
|
||||
4. Lancer avec `npm run dev`. :play_button:
|
||||
|
||||
### Variables d'environnement 🌐
|
||||
|
||||
Les variables d'environnement suivantes peuvent être configurées :
|
||||
|
||||
- `GEMINI_API_KEY` - Clé API pour Gemini (IA)
|
||||
- `YOUTUBE_API_KEY` - Clé API YouTube
|
||||
- `YOUTUBE_API_KEYS` - Liste de clés API YouTube (séparées par des virgules)
|
||||
- `VIMEO_ACCESS_TOKEN` - Token d'accès Vimeo
|
||||
- `TWITCH_CLIENT_ID` - ID client Twitch
|
||||
- `TWITCH_CLIENT_SECRET` - Secret client Twitch
|
||||
- `YT_CACHE_TTL_MS` - Durée de vie du cache en millisecondes (défaut: 3600000 - 1 heure)
|
||||
|
||||
### Lancement en Développement 💻
|
||||
|
||||
- `npm run dev` pour démarrer le serveur Angular. :play_button:
|
||||
|
BIN
db/newtube.db
BIN
db/newtube.db
Binary file not shown.
17
docker-compose/.env
Normal file
17
docker-compose/.env
Normal file
@ -0,0 +1,17 @@
|
||||
NGINX_HOSTNAME=AsusVivobook
|
||||
NGINX_SERVER_IP=172.26.11.25
|
||||
DIR_NEWTUBE=/DOCKER_CONFIG/newtube-angular
|
||||
GEMINI_API_KEY=AIzaSyALfJiYQb6QYUXyUM2AKd7sV1nWV0JALJw
|
||||
YOUTUBE_API_KEY=AIzaSyBbeV0sUA7hyNRZsloYPG88yqnSAjefBMo
|
||||
YOUTUBE_API_KEYS=["AIzaSyBXL5y8bRMO5WJwIhPK-fhrEPu2aROuKag", "AIzaSyC1rV2Er1n1FHZQoGgQhOxFc3nSlQWhgks", "AIzaSyCumEzXNPJuQjpPpxhP2PYdExqRBDVJqRY"]
|
||||
VIMEO_ACCESS_TOKEN=40e09bf643b2ecddf68ed0f5d1574c72
|
||||
TWITCH_CLIENT_ID=ikxrqgjivwn4rurhjy2mcfkcq62y3m
|
||||
TWITCH_CLIENT_SECRET=kezgpj5vqrk96tu2x3ghmhwipg980p
|
||||
YT_CACHE_TTL_MS=1800000
|
||||
JWT_SECRET=Ceci-est-mon-programme-newtube-je-sais-que-c-est-moche
|
||||
ACCESS_TTL_MIN=15
|
||||
REFRESH_TTL_DAYS=2
|
||||
REMEMBER_TTL_DAYS=30
|
||||
PORT=4000
|
||||
NODE_ENV=production
|
||||
TZ=America/Montreal
|
19
docker-compose/.env.example
Normal file
19
docker-compose/.env.example
Normal file
@ -0,0 +1,19 @@
|
||||
# Fichier d'exemple pour les variables d'environnement
|
||||
# Copiez ce fichier en .env et remplissez les valeurs
|
||||
|
||||
# Configuration du serveur NGINX
|
||||
NGINX_HOSTNAME=localhost
|
||||
|
||||
# Clés API pour les différents services
|
||||
GEMINI_API_KEY=votre_cle_gemini_ici
|
||||
YOUTUBE_API_KEY=votre_cle_youtube_ici
|
||||
YOUTUBE_API_KEYS=votre_cle_youtube_ici,deuxieme_cle_youtube_ici
|
||||
VIMEO_ACCESS_TOKEN=votre_token_vimeo_ici
|
||||
TWITCH_CLIENT_ID=votre_client_id_twitch_ici
|
||||
TWITCH_CLIENT_SECRET=votre_client_secret_twitch_ici
|
||||
|
||||
# Configuration du cache (en millisecondes)
|
||||
YT_CACHE_TTL_MS=3600000 # 1 heure par défaut
|
||||
|
||||
# Configuration du menu (optionnel)
|
||||
# BASE_MENU_CONFIG_ASSETS=assets/configuration.yml
|
27
docker-compose/docker-compose.yml
Normal file
27
docker-compose/docker-compose.yml
Normal file
@ -0,0 +1,27 @@
|
||||
services:
|
||||
newtube:
|
||||
image: docker-registry.dev.home:5000/newtube-angular:latest
|
||||
container_name: newtube
|
||||
hostname: ${NGINX_HOSTNAME}
|
||||
ports:
|
||||
- "8080:4000"
|
||||
environment:
|
||||
- PORT=${PORT}
|
||||
- NODE_ENV=${NODE_ENV}
|
||||
- JWT_SECRET=${JWT_SECRET}
|
||||
- ACCESS_TTL_MIN=${ACCESS_TTL_MIN}
|
||||
- REFRESH_TTL_DAYS=${REFRESH_TTL_DAYS}
|
||||
- REMEMBER_TTL_DAYS=${REMEMBER_TTL_DAYS}
|
||||
- YT_CACHE_TTL_MS=${YT_CACHE_TTL_MS}
|
||||
- GEMINI_API_KEY=${GEMINI_API_KEY}
|
||||
- YOUTUBE_API_KEY=${YOUTUBE_API_KEY}
|
||||
- YOUTUBE_API_KEYS=${YOUTUBE_API_KEYS}
|
||||
- VIMEO_ACCESS_TOKEN=${VIMEO_ACCESS_TOKEN}
|
||||
- TWITCH_CLIENT_ID=${TWITCH_CLIENT_ID}
|
||||
- TWITCH_CLIENT_SECRET=${TWITCH_CLIENT_SECRET}
|
||||
- TZ=${TZ}
|
||||
volumes:
|
||||
- ${DIR_NEWTUBE}/assets:/app/assets
|
||||
- ${DIR_NEWTUBE}/db:/app/db
|
||||
- ${DIR_NEWTUBE}/tmp/downloads:/app/tmp/downloads
|
||||
restart: unless-stopped
|
49
docker-compose/init.sh
Normal file
49
docker-compose/init.sh
Normal file
@ -0,0 +1,49 @@
|
||||
#!/bin/bash
|
||||
|
||||
HOSTNAME=$(hostname)
|
||||
GEMINI_API_KEY="AIzaSyALfJiYQb6QYUXyUM2AKd7sV1nWV0JALJw"
|
||||
YOUTUBE_API_KEY="AIzaSyBbeV0sUA7hyNRZsloYPG88yqnSAjefBMo"
|
||||
YOUTUBE_API_KEYS='["AIzaSyBXL5y8bRMO5WJwIhPK-fhrEPu2aROuKag", "AIzaSyC1rV2Er1n1FHZQoGgQhOxFc3nSlQWhgks", "AIzaSyCumEzXNPJuQjpPpxhP2PYdExqRBDVJqRY"]'
|
||||
VIMEO_ACCESS_TOKEN="40e09bf643b2ecddf68ed0f5d1574c72"
|
||||
TWITCH_CLIENT_ID="ikxrqgjivwn4rurhjy2mcfkcq62y3m"
|
||||
TWITCH_CLIENT_SECRET="kezgpj5vqrk96tu2x3ghmhwipg980p"
|
||||
YT_CACHE_TTL_MS="1800000"
|
||||
JWT_SECRET="Ceci-est-mon-programme-newtube-je-sais-que-c-est-moche"
|
||||
ACCESS_TTL_MIN="15"
|
||||
REFRESH_TTL_DAYS="2"
|
||||
REMEMBER_TTL_DAYS="30"
|
||||
PORT="4000"
|
||||
NODE_ENV="production"
|
||||
TZ="America/Montreal"
|
||||
|
||||
DEFAULT_INTERFACE=$(ip route show default | awk '{print $5}')
|
||||
|
||||
SERVER_IP=$(hostname -I | tr ' ' '\n' | grep -E "^$(ip -o -4 addr show $DEFAULT_INTERFACE | awk '{print $4}' | cut -d/ -f1)")
|
||||
|
||||
DIR_NEWTUBE=/DOCKER_CONFIG/newtube-angular
|
||||
|
||||
echo "NGINX_HOSTNAME=$HOSTNAME" > .env
|
||||
echo "NGINX_SERVER_IP=$SERVER_IP" >> .env
|
||||
echo "DIR_NEWTUBE=$DIR_NEWTUBE" >> .env
|
||||
|
||||
echo "GEMINI_API_KEY=$GEMINI_API_KEY" >> .env
|
||||
echo "YOUTUBE_API_KEY=$YOUTUBE_API_KEY" >> .env
|
||||
echo "YOUTUBE_API_KEYS=$YOUTUBE_API_KEYS" >> .env
|
||||
echo "VIMEO_ACCESS_TOKEN=$VIMEO_ACCESS_TOKEN" >> .env
|
||||
echo "TWITCH_CLIENT_ID=$TWITCH_CLIENT_ID" >> .env
|
||||
echo "TWITCH_CLIENT_SECRET=$TWITCH_CLIENT_SECRET" >> .env
|
||||
echo "YT_CACHE_TTL_MS=$YT_CACHE_TTL_MS" >> .env
|
||||
echo "JWT_SECRET=$JWT_SECRET" >> .env
|
||||
echo "ACCESS_TTL_MIN=$ACCESS_TTL_MIN" >> .env
|
||||
echo "REFRESH_TTL_DAYS=$REFRESH_TTL_DAYS" >> .env
|
||||
echo "REMEMBER_TTL_DAYS=$REMEMBER_TTL_DAYS" >> .env
|
||||
echo "PORT=$PORT" >> .env
|
||||
echo "NODE_ENV=$NODE_ENV" >> .env
|
||||
echo "TZ=$TZ" >> .env
|
||||
|
||||
sudo mkdir -p $DIR_NEWTUBE
|
||||
sudo mkdir -p $DIR_NEWTUBE/db
|
||||
sudo mkdir -p $DIR_NEWTUBE/tmp/downloads
|
||||
sudo mkdir -p $DIR_NEWTUBE/assets
|
||||
sudo mkdir -p $DIR_NEWTUBE/public
|
||||
|
17
docker-compose/maj.sh
Normal file
17
docker-compose/maj.sh
Normal file
@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Always execute relative to this script directory
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
REGISTRY="docker-registry.dev.home:5000"
|
||||
IMAGE="newtube-angular:latest"
|
||||
|
||||
# Pull latest image from private registry
|
||||
docker image pull "${REGISTRY}/${IMAGE}"
|
||||
|
||||
# Restart stack with the current compose file
|
||||
docker compose down
|
||||
docker compose up -d
|
||||
|
||||
echo "Stack updated and running."
|
50
docker/Dockerfile
Normal file
50
docker/Dockerfile
Normal file
@ -0,0 +1,50 @@
|
||||
# Utilisez une image Node.js
|
||||
FROM node:20-alpine
|
||||
|
||||
# Créez le répertoire de l'application
|
||||
WORKDIR /app
|
||||
|
||||
# D'abord, copiez uniquement les fichiers nécessaires pour l'installation
|
||||
COPY package*.json ./
|
||||
|
||||
# Installez les dépendances
|
||||
RUN npm install --only=production
|
||||
|
||||
# Puis copiez le reste du code
|
||||
COPY . .
|
||||
|
||||
# Construisez l'application Angular
|
||||
RUN npm run build -- --configuration=production
|
||||
|
||||
# Exposez le port
|
||||
EXPOSE 4000
|
||||
|
||||
# Commande de démarrage
|
||||
CMD ["node", "server/index.mjs"]
|
||||
|
||||
# Add a HEALTHCHECK for NGINX
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost/ || exit 1
|
||||
|
||||
# FROM nginx:alpine
|
||||
|
||||
# # Install dos2unix
|
||||
# RUN apk add --no-cache dos2unix
|
||||
|
||||
# # Copy and normalize entrypoint script
|
||||
# COPY scripts/env-dump.sh /docker-entrypoint.d/env-dump.sh
|
||||
# RUN dos2unix /docker-entrypoint.d/env-dump.sh \
|
||||
# && chmod +x /docker-entrypoint.d/env-dump.sh
|
||||
|
||||
# # Copy and normalize nginx config
|
||||
# COPY config/nginx.conf /etc/nginx/nginx.conf
|
||||
# RUN dos2unix /etc/nginx/nginx.conf
|
||||
|
||||
# # Copy built Angular app
|
||||
# COPY dist /usr/share/nginx/html
|
||||
|
||||
# # Optional: ensure correct file ownership and permissions
|
||||
# RUN mkdir -p /usr/share/nginx/html/assets \
|
||||
# && chown -R nginx:nginx /usr/share/nginx/html \
|
||||
# && chmod -R 755 /usr/share/nginx/html
|
||||
|
@ -28,7 +28,7 @@ RUN mkdir -p /app/db /app/tmp/downloads
|
||||
|
||||
# youtube-dl-exec (yt-dlp) requires Python at runtime
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends python3 python-is-python3 ca-certificates \
|
||||
&& apt-get install -y --no-install-recommends python3 python-is-python3 ca-certificates curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy runtime server and built frontend
|
||||
@ -36,6 +36,9 @@ COPY --from=builder /app/server ./server
|
||||
COPY --from=builder /app/dist ./dist
|
||||
# Copy only the DB schema; the actual DB file will be created on first run
|
||||
COPY --from=builder /app/db/schema.sql ./db/schema.sql
|
||||
## Also copy schema to an immutable location that won't be shadowed by a bind mount
|
||||
RUN mkdir -p /app/db-schema
|
||||
COPY --from=builder /app/db/schema.sql /app/db-schema/schema.sql
|
||||
|
||||
# Copy production dependencies from builder
|
||||
COPY --from=builder /app/node_modules ./node_modules
|
||||
@ -52,5 +55,9 @@ RUN ls -la /app/assets/ && \
|
||||
# Expose API/web port
|
||||
EXPOSE 4000
|
||||
|
||||
# Healthcheck against API
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
||||
CMD curl -fsS http://localhost:4000/api/health || exit 1
|
||||
|
||||
# Start the API server (serves the Angular build from ./dist)
|
||||
CMD ["node", "./server/index.mjs"]
|
42
docker/build-img.ps1
Normal file
42
docker/build-img.ps1
Normal file
@ -0,0 +1,42 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Construit l'image Docker avec toutes les dépendances requises via PowerShell et WSL (Debian).
|
||||
.DESCRIPTION
|
||||
Ce script prépare les dépendances et construit l'image Docker sous WSL (Debian).
|
||||
.PARAMETRES
|
||||
-full : Si présent, effectue une construction complète de l'image Docker (équivalent à --no-cache).
|
||||
.EXEMPLE
|
||||
.\build-img.ps1 # Construction rapide (avec cache)
|
||||
.\build-img.ps1 -full # Construction complète sans cache
|
||||
.RETOURNE
|
||||
Affiche le succès ou les erreurs pendant la construction de l'image Docker.
|
||||
#>
|
||||
|
||||
param(
|
||||
[switch]$full
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
try {
|
||||
# Determine paths
|
||||
$scriptDir = $PSScriptRoot
|
||||
$projectRoot = (Get-Item (Join-Path $scriptDir '..')).FullName
|
||||
|
||||
# Convert Windows path to WSL path (manual, no wslpath)
|
||||
$wslProjectRoot = $projectRoot -replace '^([A-Za-z]):\\','/mnt/$1/' -replace '\\','/'
|
||||
$wslProjectRoot = $wslProjectRoot.ToLower()
|
||||
|
||||
# Compose build command
|
||||
$noCache = if ($full) { '--no-cache' } else { '' }
|
||||
$innerCmd = "cd '$wslProjectRoot' && docker build $noCache -t newtube-angular:latest -f docker/Dockerfile.origi ."
|
||||
|
||||
# Run build inside WSL Debian to use Linux Docker daemon
|
||||
wsl -d Debian bash -lc $innerCmd
|
||||
|
||||
Write-Host "Image Docker newtube-angular:latest construite avec succès via WSL Debian." -ForegroundColor Green
|
||||
}
|
||||
catch {
|
||||
Write-Error "Erreur lors de la construction de l'image Docker : $_"
|
||||
exit 1
|
||||
}
|
18
docker/config/nginx.conf
Normal file
18
docker/config/nginx.conf
Normal file
@ -0,0 +1,18 @@
|
||||
events {}
|
||||
|
||||
http {
|
||||
include mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
}
|
||||
}
|
46
docker/deploy-img.ps1
Normal file
46
docker/deploy-img.ps1
Normal file
@ -0,0 +1,46 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Exécute le script deploy-img.sh dans WSL Debian.
|
||||
.DESCRIPTION
|
||||
Ce script PowerShell lance le script Bash deploy-img.sh dans le répertoire courant sous la distribution WSL Debian.
|
||||
Le script déploie l'image Docker locale vers le registre distant et gère les versions.
|
||||
.PARAMETRES
|
||||
Aucun paramètre requis actuellement.
|
||||
.EXEMPLE
|
||||
.\deploy-img.ps1
|
||||
.RETOURNE
|
||||
Affiche le succès ou les erreurs lors du déploiement de l'image Docker.
|
||||
#>
|
||||
|
||||
param()
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
try {
|
||||
Write-Verbose "[Étape 1] Détermination du répertoire du script."
|
||||
$scriptDir = $PSScriptRoot
|
||||
|
||||
Write-Verbose "[Étape 2] Conversion du chemin Windows vers WSL (manuel, sans wslpath)."
|
||||
# Remplacement manuel du préfixe du chemin Windows par le préfixe WSL
|
||||
$wslScriptDir = $scriptDir -replace '^([A-Za-z]):\\', '/mnt/$1/' -replace '\\','/'
|
||||
$wslScriptDir = $wslScriptDir.ToLower()
|
||||
|
||||
Write-Verbose "[Étape 3] Vérification des permissions du script bash."
|
||||
wsl -d Debian -- chmod +x "$wslScriptDir/deploy-img.sh"
|
||||
|
||||
Write-Verbose "[Étape 4] Construction de la commande Bash à exécuter."
|
||||
$innerCmd = "cd '$wslScriptDir' && ./deploy-img.sh"
|
||||
|
||||
Write-Verbose "[Étape 5] Exécution du script deploy-img.sh dans WSL Debian."
|
||||
wsl -d Debian bash -lc $innerCmd
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
throw "Le script de déploiement a échoué avec le code d'erreur: $LASTEXITCODE"
|
||||
}
|
||||
|
||||
Write-Host "Déploiement de l'image Docker newtube-angular:latest terminé avec succès." -ForegroundColor Green
|
||||
}
|
||||
catch {
|
||||
Write-Error "Erreur lors de l'exécution du déploiement de l'image Docker : $_"
|
||||
exit 1
|
||||
}
|
240
docker/deploy-img.sh
Normal file
240
docker/deploy-img.sh
Normal file
@ -0,0 +1,240 @@
|
||||
#!/usr/bin/env bash
|
||||
# Script: deploy-img.sh
|
||||
# Description: Pousse l'image Docker locale vers un registre HTTP (insecure), gère le versioning semver et la rétention
|
||||
# Date: 2025-04-18
|
||||
# Mise à jour: 2025-09-19 - Vérification stricte du daemon (insecure-registries), compatibilité Windows/Linux
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
#####################################
|
||||
# CONFIG UTILISATEUR #
|
||||
#####################################
|
||||
IMAGE_NAME="newtube-angular"
|
||||
REGISTRY_HOST="docker-registry.dev.home"
|
||||
REGISTRY_PORT="5000"
|
||||
MAX_VERSIONS=5 # nombre de versions (semver) à conserver
|
||||
LOCAL_TAG="latest" # tag local source
|
||||
PUSH_LATEST="yes" # yes/no : pousser aussi :latest
|
||||
|
||||
#####################################
|
||||
# DÉDUCTIONS & CONSTANTES #
|
||||
#####################################
|
||||
REGISTRY="${REGISTRY_HOST}:${REGISTRY_PORT}"
|
||||
REMOTE_REPO="${REGISTRY}/${IMAGE_NAME}"
|
||||
LOCAL_IMAGE="${IMAGE_NAME}:${LOCAL_TAG}"
|
||||
TEMP_DIR="/tmp/docker-deploy-$(date +%s)"
|
||||
CURL="curl -fsSL"
|
||||
JQ_BIN="${JQ_BIN:-jq}" # possibilité de surcharger via env
|
||||
|
||||
# Headers pour obtenir le digest (manifest v2)
|
||||
ACCEPT_MANIFEST='application/vnd.docker.distribution.manifest.v2+json'
|
||||
|
||||
#####################################
|
||||
# UTILITAIRES #
|
||||
#####################################
|
||||
info() { echo -e "\033[1;34m[INFO]\033[0m $*"; }
|
||||
warn() { echo -e "\033[1;33m[AVERT]\033[0m $*"; }
|
||||
error() { echo -e "\033[1;31m[ERREUR]\033[0m $*" >&2; }
|
||||
success() { echo -e "\033[1;32m[SUCCÈS]\033[0m $*"; }
|
||||
|
||||
cleanup() {
|
||||
set +e
|
||||
[[ -d "$TEMP_DIR" ]] && rm -rf "$TEMP_DIR"
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
is_windows_shell() {
|
||||
# Git Bash / MSYS / CYGWIN ou variable OS=Windows_NT
|
||||
if [[ "${OS:-}" == "Windows_NT" ]] || uname -s | grep -qiE 'mingw|msys|cygwin'; then
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
require_cmd() {
|
||||
command -v "$1" >/dev/null 2>&1 || { error "Commande requise manquante: $1"; exit 1; }
|
||||
}
|
||||
|
||||
#####################################
|
||||
# PRÉ-VÉRIFICATIONS #
|
||||
#####################################
|
||||
require_cmd docker
|
||||
require_cmd curl
|
||||
require_cmd sed
|
||||
require_cmd grep
|
||||
require_cmd awk
|
||||
require_cmd sort
|
||||
|
||||
# Vérifier l'image locale
|
||||
info "Vérification de l'image locale '${LOCAL_IMAGE}'…"
|
||||
if ! docker image inspect "${LOCAL_IMAGE}" >/dev/null 2>&1; then
|
||||
error "L'image locale ${LOCAL_IMAGE} n'existe pas. Construis-la d'abord (docker build …)."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Vérifier config daemon: insecure-registries
|
||||
info "Vérification de la config du daemon Docker (insecure-registries)…"
|
||||
if ! docker info >/dev/null 2>&1; then
|
||||
error "Impossible de contacter le daemon Docker. Est-il démarré ?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! docker info 2>/dev/null \
|
||||
| tr -d '\r' \
|
||||
| awk '/Insecure Registries:/,/^$/ {print}' \
|
||||
| grep -q -E "(^|[[:space:]])${REGISTRY_HOST}:${REGISTRY_PORT}([[:space:]]|$)"; then
|
||||
error "Le daemon Docker n'a PAS '${REGISTRY}' dans insecure-registries."
|
||||
if is_windows_shell; then
|
||||
info "Sous Windows (Docker Desktop) : Settings → Docker Engine, ajoute dans le JSON :"
|
||||
printf '%s\n' ' "insecure-registries": ["'"${REGISTRY}"'"]'
|
||||
info "Puis clique 'Apply & Restart' et relance ce script."
|
||||
else
|
||||
info "Sous Linux : édite /etc/docker/daemon.json et ajoute par ex. :"
|
||||
printf '%s\n' '{ "insecure-registries": ["'"${REGISTRY}"'"] }'
|
||||
info "Puis : sudo systemctl restart docker"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ping HTTP direct du registre (API v2)
|
||||
info "Vérification HTTP directe du registre (http://${REGISTRY}/v2/)…"
|
||||
if ! ${CURL} "http://${REGISTRY}/v2/" >/dev/null 2>&1; then
|
||||
error "Impossible de joindre http://${REGISTRY}/v2/. Le registre écoute-t-il bien en HTTP sur :${REGISTRY_PORT} ?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Auth facultative via env (DOCKER_USERNAME / DOCKER_PASSWORD)
|
||||
if [[ -n "${DOCKER_USERNAME:-}" && -n "${DOCKER_PASSWORD:-}" ]]; then
|
||||
info "Authentification au registre (docker login)…"
|
||||
# NB: docker login respecte le mode HTTP si daemon configuré en insecure-registries
|
||||
echo "${DOCKER_PASSWORD}" | docker login "${REGISTRY}" --username "${DOCKER_USERNAME}" --password-stdin >/dev/null
|
||||
else
|
||||
info "Aucun identifiant Docker fourni (DOCKER_USERNAME/DOCKER_PASSWORD). Tentative sans authentification."
|
||||
fi
|
||||
|
||||
mkdir -p "${TEMP_DIR}"
|
||||
|
||||
#####################################
|
||||
# RÉCUPÉRATION DES TAGS #
|
||||
#####################################
|
||||
get_all_tags() {
|
||||
# Renvoie la liste des tags (un par ligne) ou rien si vide/erreur
|
||||
# Utilise jq si dispo, sinon un parseur simple.
|
||||
local tags_json
|
||||
if ! tags_json="$(${CURL} "http://${REGISTRY}/v2/${IMAGE_NAME}/tags/list" 2>/dev/null)"; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
if command -v "${JQ_BIN}" >/dev/null 2>&1; then
|
||||
echo "${tags_json}" | ${JQ_BIN} -r '.tags[]?' 2>/dev/null || true
|
||||
else
|
||||
# fallback très simple (non robuste à tous les cas, mais suffisant ici)
|
||||
echo "${tags_json}" \
|
||||
| tr -d '\n' \
|
||||
| sed -n 's/.*"tags":[[]\([^]]*\)[]].*/\1/p' \
|
||||
| tr -d '"' \
|
||||
| tr ',' '\n' \
|
||||
| sed 's/^[[:space:]]*//; s/[[:space:]]*$//'
|
||||
fi
|
||||
}
|
||||
|
||||
filter_semver() {
|
||||
# Garde uniquement X.Y.Z (numériques)
|
||||
grep -E '^[0-9]+\.[0-9]+\.[0-9]+$' || true
|
||||
}
|
||||
|
||||
increment_patch() {
|
||||
# Lit un tag semver X.Y.Z et augmente Z
|
||||
local tag="$1"
|
||||
local major minor patch
|
||||
IFS='.' read -r major minor patch <<< "${tag}"
|
||||
echo "${major}.${minor}.$((patch+1))"
|
||||
}
|
||||
|
||||
info "Récupération des tags existants…"
|
||||
ALL_TAGS="$(get_all_tags || true)"
|
||||
SEMVER_TAGS="$(printf '%s\n' "${ALL_TAGS}" | filter_semver | sort -V || true)"
|
||||
|
||||
if [[ -z "${SEMVER_TAGS}" ]]; then
|
||||
NEW_TAG="1.0.0"
|
||||
else
|
||||
LATEST_TAG="$(printf '%s\n' "${SEMVER_TAGS}" | tail -n1)"
|
||||
NEW_TAG="$(increment_patch "${LATEST_TAG}")"
|
||||
fi
|
||||
info "Tag retenu pour cette release : ${NEW_TAG}"
|
||||
|
||||
#####################################
|
||||
# TAG + PUSH (HTTP via daemon)
|
||||
#####################################
|
||||
info "Taggage local → ${REMOTE_REPO}:${NEW_TAG}"
|
||||
docker tag "${LOCAL_IMAGE}" "${REMOTE_REPO}:${NEW_TAG}"
|
||||
|
||||
if [[ "${PUSH_LATEST}" == "yes" ]]; then
|
||||
info "Taggage local → ${REMOTE_REPO}:latest"
|
||||
docker tag "${LOCAL_IMAGE}" "${REMOTE_REPO}:latest"
|
||||
fi
|
||||
|
||||
info "Push de ${REMOTE_REPO}:${NEW_TAG} (HTTP via insecure-registries)…"
|
||||
docker push --disable-content-trust "${REMOTE_REPO}:${NEW_TAG}"
|
||||
|
||||
if [[ "${PUSH_LATEST}" == "yes" ]]; then
|
||||
info "Push de ${REMOTE_REPO}:latest…"
|
||||
docker push --disable-content-trust "${REMOTE_REPO}:latest"
|
||||
fi
|
||||
|
||||
#####################################
|
||||
# RÉTENTION: SUPPRIMER ANCIENS #
|
||||
#####################################
|
||||
delete_by_tag() {
|
||||
# Supprime un manifest par son tag en récupérant le digest via HEAD
|
||||
local tag="$1"
|
||||
local digest
|
||||
# On demande le digest via HEAD + Accept manifest v2
|
||||
digest="$(curl -fsSI -H "Accept: ${ACCEPT_MANIFEST}" "http://${REGISTRY}/v2/${IMAGE_NAME}/manifests/${tag}" \
|
||||
| tr -d '\r' \
|
||||
| awk -F': ' 'tolower($1)=="docker-content-digest"{print $2}' \
|
||||
| tail -n1)"
|
||||
if [[ -z "${digest}" ]]; then
|
||||
warn "Digest introuvable pour ${tag} (manifest v2 absent ?). Skip."
|
||||
return 0
|
||||
fi
|
||||
|
||||
info "Suppression manifest ${tag} (digest: ${digest})…"
|
||||
# La delete API ne renvoie rien en cas de succès (204)
|
||||
if ! curl -fsS -X DELETE "http://${REGISTRY}/v2/${IMAGE_NAME}/manifests/${digest}" >/dev/null; then
|
||||
warn "Échec suppression manifest pour ${tag} (digest: ${digest})."
|
||||
fi
|
||||
}
|
||||
|
||||
# Mettre à jour la liste après push
|
||||
ALL_TAGS="$(get_all_tags || true)"
|
||||
SEMVER_TAGS="$(printf '%s\n' "${ALL_TAGS}" | filter_semver | sort -V || true)"
|
||||
|
||||
if [[ -n "${SEMVER_TAGS}" ]]; then
|
||||
COUNT="$(printf '%s\n' "${SEMVER_TAGS}" | wc -l | awk '{print $1}')"
|
||||
if (( COUNT > MAX_VERSIONS )); then
|
||||
TO_DELETE_COUNT=$(( COUNT - MAX_VERSIONS ))
|
||||
# On supprime les plus anciennes
|
||||
OLDEST="$(printf '%s\n' "${SEMVER_TAGS}" | head -n "${TO_DELETE_COUNT}")"
|
||||
while IFS= read -r old_tag; do
|
||||
[[ -z "${old_tag}" ]] && continue
|
||||
# Ne jamais supprimer le tag que l'on vient de pousser (sécurité)
|
||||
if [[ "${old_tag}" == "${NEW_TAG}" ]]; then
|
||||
continue
|
||||
fi
|
||||
delete_by_tag "${old_tag}"
|
||||
done <<< "${OLDEST}"
|
||||
fi
|
||||
fi
|
||||
|
||||
#####################################
|
||||
# NETTOYAGE LOCAL OPTIONNEL #
|
||||
#####################################
|
||||
info "Nettoyage des tags locaux temporaires…"
|
||||
set +e
|
||||
docker rmi "${REMOTE_REPO}:${NEW_TAG}" >/dev/null 2>&1
|
||||
[[ "${PUSH_LATEST}" == "yes" ]] && docker rmi "${REMOTE_REPO}:latest" >/dev/null 2>&1
|
||||
set -e
|
||||
|
||||
success "Déploiement terminé. Version publiée : ${NEW_TAG}"
|
||||
exit 0
|
0
docker/newest.txt
Normal file
0
docker/newest.txt
Normal file
30
docker/scripts/env-dump.sh
Normal file
30
docker/scripts/env-dump.sh
Normal file
@ -0,0 +1,30 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Create output dir if missing
|
||||
mkdir -p /usr/share/nginx/html/assets
|
||||
|
||||
# Create a JavaScript config file with environment variables
|
||||
cat > /usr/share/nginx/html/assets/config.js << 'EOL'
|
||||
// Configuration générée automatiquement à partir des variables d'environnement
|
||||
(function(window) {
|
||||
// API Keys and configuration from environment variables
|
||||
window.GEMINI_API_KEY = '${GEMINI_API_KEY}';
|
||||
window.YOUTUBE_API_KEY = '${YOUTUBE_API_KEY}';
|
||||
window.YOUTUBE_API_KEYS = '${YOUTUBE_API_KEYS}';
|
||||
window.VIMEO_ACCESS_TOKEN = '${VIMEO_ACCESS_TOKEN}';
|
||||
window.TWITCH_CLIENT_ID = '${TWITCH_CLIENT_ID}';
|
||||
window.TWITCH_CLIENT_SECRET = '${TWITCH_CLIENT_SECRET}';
|
||||
window.YT_CACHE_TTL_MS = '${YT_CACHE_TTL_MS}';
|
||||
|
||||
// Base menu configuration
|
||||
window.BASE_MENU_CONFIG_ASSETS = '${BASE_MENU_CONFIG_ASSETS:-}';
|
||||
})(window);
|
||||
EOL
|
||||
|
||||
# Set proper permissions
|
||||
chmod 644 /usr/share/nginx/html/assets/config.js
|
||||
|
||||
# Keep the original env.json for backward compatibility
|
||||
if [ ! -f /usr/share/nginx/html/assets/env.json ]; then
|
||||
printf '{\n "BASE_MENU_CONFIG_ASSETS": ""\n}\n' > /usr/share/nginx/html/assets/env.json
|
||||
fi
|
@ -56,6 +56,8 @@
|
||||
<link rel="stylesheet" href="/index.css">
|
||||
<!-- Local, non-versioned config (define YOUTUBE_API_KEY, GEMINI_API_KEY, etc.) -->
|
||||
<script src="assets/config.local.js"></script>
|
||||
<!-- Dynamically generated config from environment variables -->
|
||||
<script src="assets/config.js"></script>
|
||||
</head>
|
||||
<body class="bg-slate-900 text-slate-200 antialiased">
|
||||
<app-root></app-root>
|
||||
|
@ -9,7 +9,12 @@ const overrideDbFile = process.env.NEWTUBE_DB_FILE && String(process.env.NEWTUBE
|
||||
: null;
|
||||
const dbDir = overrideDbFile ? path.dirname(overrideDbFile) : path.join(root, 'db');
|
||||
const dbFile = overrideDbFile || path.join(dbDir, 'newtube.db');
|
||||
const schemaFile = path.join(root, 'db', 'schema.sql');
|
||||
// Try multiple schema locations to survive when /app/db is a mounted volume
|
||||
const schemaCandidates = [
|
||||
path.join(root, 'db', 'schema.sql'), // normal repo path (may be hidden by a volume)
|
||||
path.join(root, 'db-schema', 'schema.sql'), // immutable path bundled in image
|
||||
];
|
||||
const schemaFile = schemaCandidates.find(p => fs.existsSync(p));
|
||||
|
||||
if (!fs.existsSync(dbDir)) {
|
||||
fs.mkdirSync(dbDir, { recursive: true });
|
||||
@ -19,12 +24,19 @@ if (!fs.existsSync(dbDir)) {
|
||||
const db = new Database(dbFile);
|
||||
db.pragma('foreign_keys = ON');
|
||||
|
||||
// Run schema if present
|
||||
if (fs.existsSync(schemaFile)) {
|
||||
const ddl = fs.readFileSync(schemaFile, 'utf8');
|
||||
if (ddl && ddl.trim().length) {
|
||||
db.exec(ddl);
|
||||
// Run schema if present (first boot)
|
||||
if (schemaFile && fs.existsSync(schemaFile)) {
|
||||
try {
|
||||
const ddl = fs.readFileSync(schemaFile, 'utf8');
|
||||
if (ddl && ddl.trim().length) {
|
||||
db.exec(ddl);
|
||||
console.log(`[db] Applied schema from ${schemaFile}`);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn(`[db] Failed to apply schema from ${schemaFile}:`, e?.message || e);
|
||||
}
|
||||
} else {
|
||||
console.warn('[db] No schema.sql found in expected locations:', schemaCandidates.join(', '));
|
||||
}
|
||||
|
||||
// Lightweight schema upgrades for existing databases (SQLite is permissive)
|
||||
|
@ -967,7 +967,7 @@ r.post('/auth/refresh', async (req, res) => {
|
||||
// rotate token
|
||||
const nextToken = cryptoRandomId();
|
||||
const nextHash = await hashToken(nextToken);
|
||||
const days = session.is_remember ? REMEMBER_TTL_DAYS : REFRESH_TTL_DAYS;
|
||||
const days = session.isRemember ? REMEMBER_TTL_DAYS : REFRESH_TTL_DAYS;
|
||||
const expiresAt = new Date(Date.now() + days * 86400_000).toISOString();
|
||||
updateSessionToken(session.id, nextHash, expiresAt);
|
||||
setRefreshCookies(res, { sessionId: session.id, token: nextToken, days });
|
||||
@ -1132,9 +1132,7 @@ r.post('/user/likes', authMiddleware, async (req, res) => {
|
||||
const meta = (typeof raw === 'string') ? JSON.parse(raw || '{}') : (raw || {});
|
||||
if (needTitle) title = meta?.title || title || '';
|
||||
if (needThumb) thumbnail = meta?.thumbnail || (Array.isArray(meta?.thumbnails) && meta.thumbnails.length ? meta.thumbnails[0].url : thumbnail || '');
|
||||
} catch (e) {
|
||||
console.warn('[POST /user/likes] details fetch failed, continuing without enrichment:', e?.message || e);
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
@ -1267,6 +1265,8 @@ r.get('/img/odysee', async (req, res) => {
|
||||
|
||||
// Mount API router (prod) and alias for dev proxy
|
||||
app.use('/api', r);
|
||||
// Health endpoint for container checks
|
||||
app.get('/api/health', (_req, res) => res.json({ status: 'ok' }));
|
||||
// Alias to support Angular dev proxy paths in both dev and production builds
|
||||
app.use('/proxy/api', r);
|
||||
// Mount dedicated Rumble router (browse, search, video)
|
||||
@ -1274,7 +1274,7 @@ app.use('/api/rumble', rumbleRouter);
|
||||
|
||||
// -------------------- Client config from environment --------------------
|
||||
function jsVal(v) { return JSON.stringify(v == null ? '' : v); }
|
||||
app.get(['/assets/config.local.js', '/config.js'], (_req, res) => {
|
||||
app.get(['/assets/config.local.js', '/assets/config.js', '/config.js'], (_req, res) => {
|
||||
// WARNING: Values served here are exposed to the browser. Do not put secrets here unless you accept this.
|
||||
const lines = [];
|
||||
const env = process.env || {};
|
||||
@ -1319,23 +1319,34 @@ app.all('/api/twitch-api/*', (req, res) => forwardJson(req, res, 'https://api.tw
|
||||
app.all('/api/twitch-auth/*', (req, res) => forwardJson(req, res, 'https://id.twitch.tv'));
|
||||
|
||||
// -------------------- Static Frontend (Angular build) --------------------
|
||||
try {
|
||||
const staticDir = path.join(process.cwd(), 'dist');
|
||||
if (fs.existsSync(staticDir)) {
|
||||
app.use(express.static(staticDir, { maxAge: '1h', index: 'index.html' }));
|
||||
// SPA fallback: route non-API GETs to index.html
|
||||
app.get('*', (req, res, next) => {
|
||||
const url = req.originalUrl || req.url || '';
|
||||
if (url.startsWith('/api/') || url.startsWith('/health')) return next();
|
||||
const indexPath = path.join(staticDir, 'index.html');
|
||||
if (fs.existsSync(indexPath)) return res.sendFile(indexPath);
|
||||
return next();
|
||||
});
|
||||
const distRoot = path.join(process.cwd(), 'dist');
|
||||
const distBrowser = path.join(distRoot, 'browser');
|
||||
const staticDir = fs.existsSync(distBrowser) ? distBrowser : distRoot;
|
||||
// Mount static files unconditionally; if path missing, it will just not serve anything
|
||||
app.use(express.static(staticDir, { maxAge: '1h', index: 'index.html' }));
|
||||
// SPA fallback: any non-API GET should serve index.html
|
||||
app.get('*', (req, res, next) => {
|
||||
try {
|
||||
const url = req.originalUrl || req.url || '';
|
||||
if (url.startsWith('/api/')) return next();
|
||||
const indexPath = path.join(staticDir, 'index.html');
|
||||
if (fs.existsSync(indexPath)) return res.sendFile(indexPath);
|
||||
return next();
|
||||
} catch {
|
||||
return next();
|
||||
}
|
||||
} catch {}
|
||||
});
|
||||
|
||||
app.listen(PORT, () => {
|
||||
const cwd = process.cwd();
|
||||
const hasDistRoot = fs.existsSync(distRoot);
|
||||
const hasDistBrowser = fs.existsSync(distBrowser);
|
||||
const hasIndex = fs.existsSync(path.join(staticDir, 'index.html'));
|
||||
console.log(`[newtube-api] listening on http://localhost:${PORT}`);
|
||||
console.log(`[newtube-api] cwd=${cwd}`);
|
||||
console.log(`[newtube-api] distRoot=${distRoot} exists=${hasDistRoot}`);
|
||||
console.log(`[newtube-api] distBrowser=${distBrowser} exists=${hasDistBrowser}`);
|
||||
console.log(`[newtube-api] staticDir=${staticDir} indexExists=${hasIndex}`);
|
||||
});
|
||||
|
||||
// --- Playlists ---
|
||||
|
@ -223,11 +223,56 @@ async function scrapeRumbleVideo(videoIdOrUrl) {
|
||||
/* ------------------ Scraper de liste (search / browse) ------------------ */
|
||||
function parseDurationToSeconds(text) {
|
||||
if (!text) return 0;
|
||||
// supporte mm:ss ou hh:mm:ss
|
||||
const m = text.trim().match(/^(\d{1,2}):(\d{2})(?::(\d{2}))?$/);
|
||||
if (!m) return 0;
|
||||
const h = parseInt(m[3] || '0', 10), mn = parseInt(m[1] || '0', 10), s = parseInt(m[2] || '0', 10);
|
||||
return h * 3600 + mn * 60 + s;
|
||||
|
||||
// Nettoyer le texte en supprimant les espaces et caractères non numériques inutiles
|
||||
const cleanText = text.trim().replace(/\s+/g, '');
|
||||
|
||||
// Format hh:mm:ss
|
||||
let m = cleanText.match(/^(\d+):(\d{2}):(\d{2})$/);
|
||||
if (m) {
|
||||
const h = parseInt(m[1], 10) || 0;
|
||||
const mn = parseInt(m[2], 10) || 0;
|
||||
const s = parseInt(m[3], 10) || 0;
|
||||
return h * 3600 + mn * 60 + s;
|
||||
}
|
||||
|
||||
// Format mm:ss
|
||||
m = cleanText.match(/^(\d+):(\d{2})$/);
|
||||
if (m) {
|
||||
const mn = parseInt(m[1], 10) || 0;
|
||||
const s = parseInt(m[2], 10) || 0;
|
||||
return mn * 60 + s;
|
||||
}
|
||||
|
||||
// Format avec unités (ex: 1h 30m 45s)
|
||||
m = cleanText.match(/(\d+h)?(\d+m)?(\d+s)?/);
|
||||
if (m) {
|
||||
const hours = m[1] ? parseInt(m[1], 10) : 0;
|
||||
const minutes = m[2] ? parseInt(m[2], 10) : 0;
|
||||
const seconds = m[3] ? parseInt(m[3], 10) : 0;
|
||||
return (hours * 3600) + (minutes * 60) + seconds;
|
||||
}
|
||||
|
||||
// Si on arrive ici, on essaie d'extraire tous les nombres et on suppose un format mmss
|
||||
const numbers = cleanText.match(/\d+/g);
|
||||
if (numbers && numbers.length > 0) {
|
||||
// Si un seul nombre, on suppose que c'est en secondes
|
||||
if (numbers.length === 1) {
|
||||
return parseInt(numbers[0], 10) || 0;
|
||||
}
|
||||
// Si deux nombres, on suppose mm:ss
|
||||
if (numbers.length === 2) {
|
||||
return (parseInt(numbers[0], 10) * 60) + (parseInt(numbers[1], 10) || 0);
|
||||
}
|
||||
// Si trois nombres, on suppose hh:mm:ss
|
||||
if (numbers.length >= 3) {
|
||||
return (parseInt(numbers[0], 10) * 3600) +
|
||||
(parseInt(numbers[1], 10) * 60) +
|
||||
(parseInt(numbers[2], 10) || 0);
|
||||
}
|
||||
}
|
||||
|
||||
return 0; // Par défaut si aucun format n'est reconnu
|
||||
}
|
||||
|
||||
async function scrapeRumbleList({ q, page = 1, limit = 24, sort = 'viral' }) {
|
||||
@ -263,12 +308,47 @@ async function scrapeRumbleList({ q, page = 1, limit = 24, sort = 'viral' }) {
|
||||
|| '';
|
||||
if (thumb && thumb.startsWith('//')) thumb = 'https:' + thumb;
|
||||
|
||||
const durationText =
|
||||
card.find('.video-item--duration, .video-duration, .duration, .video-item__duration').first().text().trim();
|
||||
// Essayer plusieurs sélecteurs pour la durée, y compris les attributs data-
|
||||
let durationText = '';
|
||||
// Ajout de plus de sélecteurs spécifiques à Rumble
|
||||
const durationElement = card.find(
|
||||
'.video-item--duration, .video-duration, .duration, .video-item__duration, ' +
|
||||
'[data-duration], .videoDuration, .video-time, .time, ' +
|
||||
'.video-card__duration, .media__duration, .thumb-time, ' +
|
||||
'.video-listing-entry__duration, .video-item__duration'
|
||||
).first();
|
||||
|
||||
if (durationElement.length) {
|
||||
// Essayer d'abord les attributs data-
|
||||
durationText = durationElement.attr('data-duration') ||
|
||||
durationElement.attr('data-time') ||
|
||||
// Essayer aussi les attributs style ou autres qui pourraient contenir la durée
|
||||
durationElement.attr('aria-label') ||
|
||||
durationElement.attr('title') ||
|
||||
// Essayer le contenu textuel
|
||||
durationElement.text().trim();
|
||||
}
|
||||
|
||||
// Si on n'a pas trouvé de durée, essayer de la trouver dans le contenu de la card
|
||||
if (!durationText) {
|
||||
// Chercher un élément qui ressemble à une durée (mm:ss ou hh:mm:ss)
|
||||
const timeMatch = card.html().match(/>\s*([0-9]+:[0-9]{2}(?::[0-9]{2})?)\s*</);
|
||||
if (timeMatch && timeMatch[1]) {
|
||||
durationText = timeMatch[1];
|
||||
}
|
||||
}
|
||||
|
||||
// Nettoyer le texte de durée avant de le parser
|
||||
const cleanDurationText = durationText.replace(/[^0-9:]/g, '').trim();
|
||||
|
||||
// Extraire les vues
|
||||
const viewsText =
|
||||
card.find('.video-item--views, .rumbles-views, .views, .video-item__views').first().text().trim();
|
||||
|
||||
const duration = parseDurationToSeconds(durationText);
|
||||
card.find('.video-item--views, .rumbles-views, .views, .video-item__views, [data-views]').first()
|
||||
.attr('data-views') ||
|
||||
card.find('.video-item--views, .rumbles-views, .views, .video-item__views, .video-views').first().text().trim();
|
||||
|
||||
// Parser la durée
|
||||
const duration = parseDurationToSeconds(cleanDurationText);
|
||||
const views = parseInt((viewsText || '').replace(/[^\d]/g, ''), 10) || 0;
|
||||
|
||||
// Important: on renvoie TOUJOURS une URL canonique cohérente
|
||||
|
@ -76,7 +76,11 @@
|
||||
<a class="px-4 py-2 rounded bg-emerald-600 hover:bg-emerald-500 text-white" href="https://joinpeertube.org/" target="_blank" rel="noopener noreferrer">Ouvrir joinpeertube.org ↗️</a>
|
||||
</div>
|
||||
<div class="p-5 text-slate-200">
|
||||
<p>Une instance PeerTube est un serveur hébergeant des vidéos. Vous pouvez en ajouter plusieurs dans <span class="font-medium">Compte → Préférences → PeerTube</span> puis choisir celle active. Pour en trouver, consultez le <a class="text-sky-400 hover:text-sky-300 underline" href="https://joinpeertube.org/" target="_blank" rel="noopener noreferrer">répertoire des instances</a>.</p>
|
||||
<p>Une instance PeerTube est un serveur hébergeant des vidéos. Vous pouvez en ajouter plusieurs dans <span class="font-medium">Compte → Préférences → PeerTube</span> puis choisir celle active. Pour en trouver, consultez :</p>
|
||||
<ul class="mt-2 space-y-1 list-disc list-inside text-slate-300">
|
||||
<li>Le <a class="text-sky-400 hover:text-sky-300 underline" href="https://joinpeertube.org/" target="_blank" rel="noopener noreferrer">site officiel de PeerTube</a></li>
|
||||
<li>Le <a class="text-sky-400 hover:text-sky-300 underline" href="https://instances.joinpeertube.org/instances" target="_blank" rel="noopener noreferrer">répertoire des instances publiques</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user