fix(import): decouple scan from HTTP connection, prevent duplicate imports
- Add scan_manager: background asyncio task + Redis event store so scans survive UI navigation; SSE stream reads from Redis and is reconnectable - Replace SSE-tied scan endpoint with POST /nc-scan/start + GET /nc-scan/stream - Fix frontend: AbortController + useEffect cleanup cancels stream on unmount without stopping the server-side scan - Add unique constraint on audio_versions.nc_file_path (migration 0009) to prevent duplicate imports from concurrent scans; handle IntegrityError gracefully in nc_scan with rollback + skip - Fix API health check: use plain python instead of uv (not in dev image) - Optimize Taskfile: fix duplicate dev:restart, add dev:fresh/dev:rebuild/ dev:status, migrate uses run --rm, check includes typecheck Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
206
Taskfile.yml
206
Taskfile.yml
@@ -3,28 +3,76 @@ version: "3"
|
||||
vars:
|
||||
COMPOSE: docker compose
|
||||
DEV_FLAGS: -f docker-compose.yml -f docker-compose.dev.yml
|
||||
INFRA_SERVICES: db redis
|
||||
APP_SERVICES: api audio-worker nc-watcher
|
||||
DEV_SERVICES: db redis api web audio-worker nc-watcher
|
||||
|
||||
# ── Production ────────────────────────────────────────────────────────────────
|
||||
# ── Development ───────────────────────────────────────────────────────────────
|
||||
|
||||
tasks:
|
||||
help:
|
||||
desc: Show available tasks
|
||||
dev:up:
|
||||
desc: "Start full dev environment and follow logs (recommended)"
|
||||
cmds:
|
||||
- echo "Available tasks:"
|
||||
- echo " dev:up - Start complete development server (recommended)"
|
||||
- echo " dev:build - Build development containers"
|
||||
- echo " dev:clean - Safe cleanup (preserves network)"
|
||||
- echo " dev:nuke - Full cleanup (removes everything)"
|
||||
- echo " dev:restart - Restart development services"
|
||||
- echo " dev:down - Stop development environment"
|
||||
- echo " dev:logs - Follow logs from all services"
|
||||
- echo " api:logs - Follow API service logs"
|
||||
- echo " web:logs - Follow Web service logs"
|
||||
- echo " db:migrate - Run database migrations"
|
||||
- echo " db:seed - Seed database with test data"
|
||||
- echo " test:e2e - Run end-to-end tests"
|
||||
- echo " test:unit - Run unit tests"
|
||||
- echo "Starting services..."
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} up -d --wait {{.DEV_SERVICES}}"
|
||||
- echo "All services healthy. Following logs... (Ctrl+C to stop)"
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} logs -f api web audio-worker nc-watcher"
|
||||
|
||||
dev:down:
|
||||
desc: "Stop all dev services"
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} down"
|
||||
|
||||
dev:restart:
|
||||
desc: "Restart one service or all (e.g. task dev:restart SERVICE=api)"
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} restart {{if .SERVICE}}{{.SERVICE}}{{else}}{{.DEV_SERVICES}}{{end}}"
|
||||
|
||||
dev:rebuild:
|
||||
desc: "Rebuild and restart one service (e.g. task dev:rebuild SERVICE=api)"
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} up -d --build --wait {{.SERVICE}}"
|
||||
|
||||
dev:build:
|
||||
desc: "Rebuild all dev images (run after dependency changes)"
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} build api web audio-worker nc-watcher"
|
||||
|
||||
dev:fresh:
|
||||
desc: "Wipe volumes, rebuild all images, and start clean"
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} down -v"
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} build api web audio-worker nc-watcher"
|
||||
- task: dev:up
|
||||
|
||||
dev:clean:
|
||||
desc: "Stop services and remove volumes (preserves images)"
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} down -v"
|
||||
|
||||
dev:nuke:
|
||||
desc: "Full cleanup — removes containers, volumes, images, and build cache"
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} down -v --rmi local"
|
||||
- docker system prune -f
|
||||
|
||||
dev:status:
|
||||
desc: "Show status of all dev services"
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} ps"
|
||||
|
||||
dev:logs:
|
||||
desc: "Follow dev logs (all services, or pass SERVICE=api)"
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} logs -f {{.SERVICE}}"
|
||||
|
||||
dev:web:
|
||||
desc: "Run Vite dev server locally with HMR (run alongside dev:up)"
|
||||
dir: web
|
||||
cmds:
|
||||
- npm run dev
|
||||
|
||||
# ── Production ────────────────────────────────────────────────────────────────
|
||||
|
||||
up:
|
||||
desc: Start all services (production)
|
||||
@@ -32,18 +80,12 @@ tasks:
|
||||
- "{{.COMPOSE}} up -d"
|
||||
|
||||
down:
|
||||
desc: Stop all services
|
||||
desc: Stop all services (production)
|
||||
cmds:
|
||||
- "{{.COMPOSE}} down"
|
||||
|
||||
build:
|
||||
desc: Build all images
|
||||
cmds:
|
||||
- task: check
|
||||
- "{{.COMPOSE}} build"
|
||||
|
||||
logs:
|
||||
desc: Follow logs for all services (pass SERVICE= to filter)
|
||||
desc: Follow logs (pass SERVICE= to filter)
|
||||
cmds:
|
||||
- "{{.COMPOSE}} logs -f {{.SERVICE}}"
|
||||
|
||||
@@ -52,82 +94,23 @@ tasks:
|
||||
cmds:
|
||||
- "{{.COMPOSE}} restart {{.SERVICE}}"
|
||||
|
||||
# ── Dev / Debug ───────────────────────────────────────────────────────────────
|
||||
|
||||
dev:
|
||||
desc: Start backend in dev mode (hot reload, source mounts)
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} up {{.DEV_SERVICES}}"
|
||||
|
||||
dev:detach:
|
||||
desc: Start backend in dev mode, detached
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} up -d {{.DEV_SERVICES}}"
|
||||
|
||||
dev:web:
|
||||
desc: Start Vite dev server (proxies /api to localhost:8000)
|
||||
dir: web
|
||||
cmds:
|
||||
- npm run dev
|
||||
|
||||
dev:up:
|
||||
desc: Start complete development server (recommended)
|
||||
cmds:
|
||||
- echo "Starting development environment..."
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} up -d {{.DEV_SERVICES}}"
|
||||
- echo "Following logs... (Ctrl+C to stop)"
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} logs -f api web audio-worker nc-watcher"
|
||||
|
||||
dev:build:
|
||||
desc: Build development containers (only when dependencies change)
|
||||
cmds:
|
||||
- echo "Building development containers..."
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} build --pull api web"
|
||||
- echo "Containers built successfully"
|
||||
|
||||
dev:logs:
|
||||
desc: Follow logs in dev mode
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} logs -f {{.SERVICE}}"
|
||||
|
||||
dev:restart:
|
||||
desc: Restart a service in dev mode (e.g. task dev:restart SERVICE=audio-worker)
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} restart {{.SERVICE}}"
|
||||
|
||||
dev:clean:
|
||||
desc: Safe cleanup (preserves network/proxy, removes containers/volumes)
|
||||
cmds:
|
||||
- echo "Stopping development services..."
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} down"
|
||||
- echo "Removing development volumes..."
|
||||
- docker volume rm -f $(docker volume ls -q | grep rehearsalhub) || true
|
||||
- echo "Development environment cleaned (network preserved)"
|
||||
|
||||
dev:nuke:
|
||||
desc: Full cleanup (removes everything including network - use when network is corrupted)
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} down -v"
|
||||
- docker system prune -f --volumes
|
||||
|
||||
dev:restart:
|
||||
desc: Restart development services (preserves build cache)
|
||||
cmds:
|
||||
- echo "Restarting development services..."
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} restart {{.DEV_SERVICES}}"
|
||||
- echo "Services restarted"
|
||||
|
||||
# ── Database ──────────────────────────────────────────────────────────────────
|
||||
|
||||
migrate:
|
||||
desc: Run Alembic migrations
|
||||
desc: Run Alembic migrations (works whether or not the API container is running)
|
||||
cmds:
|
||||
- "{{.COMPOSE}} exec api alembic upgrade head"
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} run --rm --no-deps api alembic upgrade head"
|
||||
|
||||
migrate:auto:
|
||||
desc: Autogenerate a migration (e.g. task migrate:auto M="add users table")
|
||||
cmds:
|
||||
- "{{.COMPOSE}} exec api alembic revision --autogenerate -m '{{.M}}'"
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} run --rm --no-deps api alembic revision --autogenerate -m '{{.M}}'"
|
||||
|
||||
db:reset:
|
||||
desc: "Drop and recreate schema (dev only — destroys all data)"
|
||||
cmds:
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} run --rm --no-deps api alembic downgrade base"
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} run --rm --no-deps api alembic upgrade head"
|
||||
|
||||
# ── Setup ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
@@ -142,18 +125,16 @@ tasks:
|
||||
|
||||
# ── Testing ───────────────────────────────────────────────────────────────────
|
||||
|
||||
# Run this after every feature branch — fast, no external services required.
|
||||
test:feature:
|
||||
desc: "Post-feature pipeline: typecheck + frontend tests + backend unit tests (no services needed)"
|
||||
desc: "Fast post-feature check: typecheck + frontend + backend unit tests (no services needed)"
|
||||
cmds:
|
||||
- task: typecheck:web
|
||||
- task: lint
|
||||
- task: test:web
|
||||
- task: test:api:unit
|
||||
- task: test:worker
|
||||
- task: test:watcher
|
||||
|
||||
# Full CI pipeline — runs everything including integration tests.
|
||||
# Requires: services up (task dev:detach), DB migrated.
|
||||
ci:
|
||||
desc: "Full CI pipeline: lint + typecheck + all tests (requires services running)"
|
||||
cmds:
|
||||
@@ -169,11 +150,10 @@ tasks:
|
||||
deps: [test:api, test:worker, test:watcher]
|
||||
|
||||
test:web:
|
||||
desc: Run frontend unit tests (via podman — no local Node required)
|
||||
desc: Run frontend unit tests
|
||||
dir: web
|
||||
cmds:
|
||||
- podman run --rm -v "$(pwd)":/app:Z -w /app node:20-alpine
|
||||
sh -c "npm install --legacy-peer-deps --silent && npm run test"
|
||||
- npm run test
|
||||
|
||||
test:api:
|
||||
desc: Run all API tests with coverage (unit + integration)
|
||||
@@ -209,7 +189,9 @@ tasks:
|
||||
|
||||
check:
|
||||
desc: Run all linters and type checkers
|
||||
deps: [lint]
|
||||
cmds:
|
||||
- task: lint
|
||||
- task: typecheck:web
|
||||
|
||||
lint:
|
||||
desc: Lint all services
|
||||
@@ -226,7 +208,7 @@ tasks:
|
||||
- npm run typecheck
|
||||
|
||||
format:
|
||||
desc: Auto-format Python source
|
||||
desc: Auto-format all Python source
|
||||
cmds:
|
||||
- cd api && uv run ruff format src/ tests/
|
||||
- cd worker && uv run ruff format src/ tests/
|
||||
@@ -238,29 +220,29 @@ tasks:
|
||||
desc: Shell into the API container
|
||||
interactive: true
|
||||
cmds:
|
||||
- "{{.COMPOSE}} exec api bash"
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} exec api bash"
|
||||
|
||||
shell:db:
|
||||
desc: psql shell
|
||||
desc: Open a psql shell
|
||||
interactive: true
|
||||
cmds:
|
||||
- "{{.COMPOSE}} exec db psql -U $POSTGRES_USER -d $POSTGRES_DB"
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} exec db psql -U ${POSTGRES_USER:-rh_user} -d ${POSTGRES_DB:-rehearsalhub}"
|
||||
|
||||
shell:redis:
|
||||
desc: redis-cli shell
|
||||
desc: Open a redis-cli shell
|
||||
interactive: true
|
||||
cmds:
|
||||
- "{{.COMPOSE}} exec redis redis-cli"
|
||||
- "{{.COMPOSE}} {{.DEV_FLAGS}} exec redis redis-cli"
|
||||
|
||||
# ── Container Build & Release ──────────────────────────────────────────────
|
||||
# ── Container Build & Release ─────────────────────────────────────────────────
|
||||
|
||||
build:containers:
|
||||
desc: Build all container images with current git tag
|
||||
build:
|
||||
desc: Build all production images
|
||||
cmds:
|
||||
- bash scripts/build-containers.sh
|
||||
|
||||
push:containers:
|
||||
desc: Push all container images to Gitea registry
|
||||
push:
|
||||
desc: Push all container images to the registry
|
||||
cmds:
|
||||
- bash scripts/upload-containers-simple.sh
|
||||
|
||||
|
||||
36
api/alembic/versions/0009_audio_version_nc_path_unique.py
Normal file
36
api/alembic/versions/0009_audio_version_nc_path_unique.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""Add unique constraint on audio_versions.nc_file_path.
|
||||
|
||||
Prevents duplicate imports when concurrent scans race on the same file.
|
||||
|
||||
Revision ID: 0009_av_nc_path_uq
|
||||
Revises: 0008_drop_nc_columns
|
||||
Create Date: 2026-04-12
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
|
||||
revision = "0009_av_nc_path_uq"
|
||||
down_revision = "0008_drop_nc_columns"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Remove any existing duplicates first (keep the oldest version per path)
|
||||
op.execute("""
|
||||
DELETE FROM audio_versions
|
||||
WHERE id NOT IN (
|
||||
SELECT DISTINCT ON (nc_file_path) id
|
||||
FROM audio_versions
|
||||
ORDER BY nc_file_path, uploaded_at ASC
|
||||
)
|
||||
""")
|
||||
op.create_unique_constraint(
|
||||
"uq_audio_version_nc_file_path",
|
||||
"audio_versions",
|
||||
["nc_file_path"],
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_constraint("uq_audio_version_nc_file_path", "audio_versions", type_="unique")
|
||||
@@ -277,7 +277,7 @@ class AudioVersion(Base):
|
||||
)
|
||||
version_number: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
label: Mapped[str | None] = mapped_column(String(255))
|
||||
nc_file_path: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
nc_file_path: Mapped[str] = mapped_column(Text, nullable=False, unique=True)
|
||||
nc_file_etag: Mapped[str | None] = mapped_column(String(255))
|
||||
cdn_hls_base: Mapped[str | None] = mapped_column(Text)
|
||||
waveform_url: Mapped[str | None] = mapped_column(Text)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
@@ -8,8 +9,7 @@ from pydantic import BaseModel
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from rehearsalhub.config import get_settings
|
||||
from rehearsalhub.db.engine import get_session, get_session_factory
|
||||
from rehearsalhub.queue.redis_queue import flush_pending_pushes
|
||||
from rehearsalhub.db.engine import get_session
|
||||
from rehearsalhub.db.models import Member
|
||||
from rehearsalhub.dependencies import get_current_member
|
||||
from rehearsalhub.repositories.band import BandRepository
|
||||
@@ -21,6 +21,7 @@ from rehearsalhub.schemas.comment import SongCommentCreate, SongCommentRead
|
||||
from rehearsalhub.schemas.song import SongCreate, SongRead, SongUpdate
|
||||
from rehearsalhub.services.band import BandService
|
||||
from rehearsalhub.services.nc_scan import scan_band_folder
|
||||
from rehearsalhub.services.scan_manager import get_events, is_scanning, start_scan
|
||||
from rehearsalhub.services.song import SongService
|
||||
from rehearsalhub.storage.factory import StorageFactory
|
||||
|
||||
@@ -175,45 +176,60 @@ async def _get_band_and_assert_member(
|
||||
return band
|
||||
|
||||
|
||||
@router.get("/bands/{band_id}/nc-scan/stream")
|
||||
async def scan_nextcloud_stream(
|
||||
@router.post("/bands/{band_id}/nc-scan/start", status_code=202)
|
||||
async def scan_nextcloud_start(
|
||||
band_id: uuid.UUID,
|
||||
session: AsyncSession = Depends(get_session),
|
||||
current_member: Member = Depends(_member_from_request),
|
||||
):
|
||||
"""
|
||||
SSE endpoint: streams scan progress as newline-delimited JSON events.
|
||||
Each event is a JSON object on its own line.
|
||||
Accepts ?token= for EventSource clients that can't set headers.
|
||||
Start a background scan. Returns 202 immediately; progress is streamed via
|
||||
/nc-scan/stream. Returns 409 if a scan is already running for this band.
|
||||
"""
|
||||
band = await _get_band_and_assert_member(band_id, current_member, session)
|
||||
bs = await BandStorageRepository(session).get_active_for_band(band_id)
|
||||
band_folder = (bs.root_path if bs and bs.root_path else None) or f"bands/{band.slug}/"
|
||||
member_id = current_member.id
|
||||
settings = get_settings()
|
||||
|
||||
try:
|
||||
await start_scan(band_id, band_folder, current_member.id, get_settings())
|
||||
except LookupError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=str(exc))
|
||||
except RuntimeError:
|
||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="Scan already in progress")
|
||||
|
||||
return {"status": "started"}
|
||||
|
||||
|
||||
@router.get("/bands/{band_id}/nc-scan/stream")
|
||||
async def scan_nextcloud_stream(
|
||||
band_id: uuid.UUID,
|
||||
cursor: int = Query(default=0, ge=0),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
current_member: Member = Depends(_member_from_request),
|
||||
):
|
||||
"""
|
||||
Stream scan events as newline-delimited JSON. Reads from Redis so this
|
||||
endpoint is independent of the scan's lifecycle — safe to reconnect after
|
||||
navigating away. Pass ?cursor=N to resume from event index N.
|
||||
"""
|
||||
await _get_band_and_assert_member(band_id, current_member, session)
|
||||
|
||||
async def event_generator():
|
||||
async with get_session_factory()() as db:
|
||||
try:
|
||||
storage = await StorageFactory.create(db, band_id, settings)
|
||||
async for event in scan_band_folder(db, storage, band_id, band_folder, member_id):
|
||||
idx = cursor
|
||||
while True:
|
||||
events = await get_events(band_id, start=idx)
|
||||
for event in events:
|
||||
yield json.dumps(event) + "\n"
|
||||
if event.get("type") in ("song", "session"):
|
||||
await db.commit()
|
||||
await flush_pending_pushes(db)
|
||||
except LookupError as exc:
|
||||
yield json.dumps({"type": "error", "message": str(exc)}) + "\n"
|
||||
except Exception:
|
||||
log.exception("SSE scan error for band %s", band_id)
|
||||
yield json.dumps({"type": "error", "message": "Scan failed due to an internal error."}) + "\n"
|
||||
finally:
|
||||
await db.commit()
|
||||
await flush_pending_pushes(db)
|
||||
idx += 1
|
||||
if event.get("type") in ("done", "error"):
|
||||
return
|
||||
if not events:
|
||||
scanning = await is_scanning(band_id)
|
||||
if not scanning:
|
||||
return
|
||||
await asyncio.sleep(0.3)
|
||||
|
||||
return StreamingResponse(
|
||||
event_generator(),
|
||||
media_type="application/x-ndjson",
|
||||
)
|
||||
return StreamingResponse(event_generator(), media_type="application/x-ndjson")
|
||||
|
||||
|
||||
@router.post("/bands/{band_id}/nc-scan", response_model=NcScanResult)
|
||||
|
||||
@@ -12,6 +12,7 @@ import logging
|
||||
from collections.abc import AsyncGenerator
|
||||
from pathlib import Path
|
||||
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from rehearsalhub.repositories.audio_version import AudioVersionRepository
|
||||
@@ -179,6 +180,13 @@ async def scan_band_folder(
|
||||
)
|
||||
yield {"type": "song", "song": read.model_dump(mode="json"), "is_new": is_new}
|
||||
|
||||
except IntegrityError:
|
||||
# Unique constraint on nc_file_path — another concurrent scan already
|
||||
# imported this file. Roll back the savepoint and treat as skipped.
|
||||
await db_session.rollback()
|
||||
log.debug("scan: concurrent import collision on '%s', skipping", nc_file_path)
|
||||
skipped += 1
|
||||
yield {"type": "skipped", "path": nc_file_path, "reason": "already imported"}
|
||||
except Exception as exc:
|
||||
log.error("Failed to import '%s': %s", nc_file_path, exc, exc_info=True)
|
||||
skipped += 1
|
||||
|
||||
126
api/src/rehearsalhub/services/scan_manager.py
Normal file
126
api/src/rehearsalhub/services/scan_manager.py
Normal file
@@ -0,0 +1,126 @@
|
||||
"""Background scan manager.
|
||||
|
||||
Runs nc_scan.scan_band_folder as an asyncio task independent of any HTTP
|
||||
connection. Events are pushed to a Redis list so the SSE endpoint can read
|
||||
them whether or not the original requester is still connected.
|
||||
|
||||
Redis keys (all expire after EVENTS_TTL_SECONDS):
|
||||
scan:{band_id}:status — "running" | "done" | "failed" (string)
|
||||
scan:{band_id}:events — list of JSON-encoded event dicts (rpush / lrange)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import contextlib
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
import redis.asyncio as aioredis
|
||||
|
||||
from rehearsalhub.config import get_settings
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
EVENTS_TTL_SECONDS = 3600 # events visible for 1 hour after scan completes
|
||||
_STATUS_KEY = "scan:{band_id}:status"
|
||||
_EVENTS_KEY = "scan:{band_id}:events"
|
||||
|
||||
# In-process task registry — prevents duplicate scans within the same worker pod.
|
||||
_running: dict[str, asyncio.Task] = {}
|
||||
|
||||
|
||||
def _status_key(band_id: uuid.UUID) -> str:
|
||||
return f"scan:{band_id}:status"
|
||||
|
||||
|
||||
def _events_key(band_id: uuid.UUID) -> str:
|
||||
return f"scan:{band_id}:events"
|
||||
|
||||
|
||||
async def _get_redis() -> aioredis.Redis:
|
||||
return aioredis.from_url(get_settings().redis_url, decode_responses=True)
|
||||
|
||||
|
||||
async def is_scanning(band_id: uuid.UUID) -> bool:
|
||||
r = await _get_redis()
|
||||
status = await r.get(_status_key(band_id))
|
||||
await r.aclose()
|
||||
return status == "running"
|
||||
|
||||
|
||||
async def get_events(band_id: uuid.UUID, start: int = 0) -> list[dict]:
|
||||
"""Return events from index *start* onwards (0-based)."""
|
||||
r = await _get_redis()
|
||||
raw = await r.lrange(_events_key(band_id), start, -1)
|
||||
await r.aclose()
|
||||
events = []
|
||||
for item in raw:
|
||||
with contextlib.suppress(Exception):
|
||||
events.append(json.loads(item))
|
||||
return events
|
||||
|
||||
|
||||
async def start_scan(
|
||||
band_id: uuid.UUID,
|
||||
band_folder: str,
|
||||
member_id: uuid.UUID,
|
||||
settings,
|
||||
) -> None:
|
||||
"""Launch a background scan task. Raises RuntimeError if already running."""
|
||||
key = str(band_id)
|
||||
task = _running.get(key)
|
||||
if task and not task.done():
|
||||
raise RuntimeError("Scan already in progress")
|
||||
|
||||
# Clear previous events
|
||||
r = await _get_redis()
|
||||
await r.delete(_events_key(band_id))
|
||||
await r.set(_status_key(band_id), "running", ex=EVENTS_TTL_SECONDS)
|
||||
await r.aclose()
|
||||
|
||||
task = asyncio.create_task(_run_scan(band_id, band_folder, member_id, settings))
|
||||
_running[key] = task
|
||||
|
||||
|
||||
async def _run_scan(
|
||||
band_id: uuid.UUID,
|
||||
band_folder: str,
|
||||
member_id: uuid.UUID,
|
||||
settings,
|
||||
) -> None:
|
||||
from rehearsalhub.db.engine import get_session_factory
|
||||
from rehearsalhub.queue.redis_queue import flush_pending_pushes
|
||||
from rehearsalhub.services.nc_scan import scan_band_folder
|
||||
from rehearsalhub.storage.factory import StorageFactory
|
||||
|
||||
r = await _get_redis()
|
||||
events_key = _events_key(band_id)
|
||||
status_key = _status_key(band_id)
|
||||
|
||||
async def push(event: dict) -> None:
|
||||
await r.rpush(events_key, json.dumps(event))
|
||||
await r.expire(events_key, EVENTS_TTL_SECONDS)
|
||||
|
||||
try:
|
||||
async with get_session_factory()() as db:
|
||||
storage = await StorageFactory.create(db, band_id, settings)
|
||||
async for event in scan_band_folder(db, storage, band_id, band_folder, member_id):
|
||||
await push(event)
|
||||
if event.get("type") in ("song", "session"):
|
||||
await db.commit()
|
||||
await flush_pending_pushes(db)
|
||||
await db.commit()
|
||||
await flush_pending_pushes(db)
|
||||
|
||||
await r.set(status_key, "done", ex=EVENTS_TTL_SECONDS)
|
||||
log.info("Background scan completed for band %s", band_id)
|
||||
|
||||
except Exception as exc:
|
||||
log.exception("Background scan failed for band %s", band_id)
|
||||
await push({"type": "error", "message": "Scan failed due to an internal error."})
|
||||
await r.set(status_key, "failed", ex=EVENTS_TTL_SECONDS)
|
||||
finally:
|
||||
await r.aclose()
|
||||
_running.pop(str(band_id), None)
|
||||
@@ -60,7 +60,7 @@ services:
|
||||
redis:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "uv run python -c \"import httpx; exit(0 if httpx.get('http://localhost:8000/api/health').status_code == 200 else 1)\" || exit 1"]
|
||||
test: ["CMD-SHELL", "python -c \"import httpx; exit(0 if httpx.get('http://localhost:8000/api/health').status_code == 200 else 1)\" || exit 1"]
|
||||
interval: 20s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { useState, useEffect, useRef } from "react";
|
||||
import { useSearchParams } from "react-router-dom";
|
||||
import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query";
|
||||
import { api } from "../api/client";
|
||||
@@ -278,6 +278,11 @@ function StorageSection({ bandId, band, amAdmin }: { bandId: string; band: Band;
|
||||
const [scanning, setScanning] = useState(false);
|
||||
const [scanProgress, setScanProgress] = useState<string | null>(null);
|
||||
const [scanMsg, setScanMsg] = useState<string | null>(null);
|
||||
const scanAbortRef = useRef<AbortController | null>(null);
|
||||
|
||||
// Cancel the stream reader on unmount so the component doesn't update after
|
||||
// the user navigates away. The background scan on the server keeps running.
|
||||
useEffect(() => () => { scanAbortRef.current?.abort(); }, []);
|
||||
|
||||
const { data: storageConfigs, isLoading: storageLoading } = useQuery({
|
||||
queryKey: ["storage", bandId],
|
||||
@@ -310,8 +315,33 @@ function StorageSection({ bandId, band, amAdmin }: { bandId: string; band: Band;
|
||||
async function startScan() {
|
||||
if (scanning) return;
|
||||
setScanning(true); setScanMsg(null); setScanProgress("Starting scan…");
|
||||
|
||||
// Start the background scan on the server
|
||||
try {
|
||||
const resp = await fetch(`/api/v1/bands/${bandId}/nc-scan/stream`, { credentials: "include" });
|
||||
const startResp = await fetch(`/api/v1/bands/${bandId}/nc-scan/start`, {
|
||||
method: "POST",
|
||||
credentials: "include",
|
||||
});
|
||||
if (startResp.status === 409) {
|
||||
setScanProgress("Scan already running…");
|
||||
// Fall through to stream — a scan is already in progress
|
||||
} else if (!startResp.ok) {
|
||||
throw new Error(`HTTP ${startResp.status}`);
|
||||
}
|
||||
} catch (err) {
|
||||
setScanMsg(err instanceof Error ? err.message : "Failed to start scan");
|
||||
setScanning(false); setScanProgress(null);
|
||||
return;
|
||||
}
|
||||
|
||||
// Connect to the event stream. Aborting this does NOT stop the server scan.
|
||||
const controller = new AbortController();
|
||||
scanAbortRef.current = controller;
|
||||
try {
|
||||
const resp = await fetch(`/api/v1/bands/${bandId}/nc-scan/stream`, {
|
||||
credentials: "include",
|
||||
signal: controller.signal,
|
||||
});
|
||||
if (!resp.ok || !resp.body) throw new Error(`HTTP ${resp.status}`);
|
||||
const reader = resp.body.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
@@ -338,8 +368,12 @@ function StorageSection({ bandId, band, amAdmin }: { bandId: string; band: Band;
|
||||
} else if (ev.type === "error") setScanMsg(`Scan error: ${ev.message}`);
|
||||
}
|
||||
}
|
||||
} catch (err) { setScanMsg(err instanceof Error ? err.message : "Scan failed"); }
|
||||
finally { setScanning(false); setScanProgress(null); }
|
||||
} catch (err) {
|
||||
if (err instanceof DOMException && err.name === "AbortError") return; // clean unmount, scan still running
|
||||
setScanMsg(err instanceof Error ? err.message : "Scan failed");
|
||||
} finally {
|
||||
setScanning(false); setScanProgress(null);
|
||||
}
|
||||
}
|
||||
|
||||
const canConnect = ncUrl.trim() && ncUsername.trim() && ncPassword;
|
||||
|
||||
Reference in New Issue
Block a user