Initial commit: RehearsalHub POC
Full-stack self-hosted band rehearsal platform: Backend (FastAPI + SQLAlchemy 2.0 async): - Auth with JWT (register, login, /me, settings) - Band management with Nextcloud folder integration - Song management with audio version tracking - Nextcloud scan to auto-import audio files - Band membership with link-based invite system - Song comments - Audio analysis worker (BPM, key, loudness, waveform) - Nextcloud activity watcher for auto-import - WebSocket support for real-time annotation updates - Alembic migrations (0001–0003) - Repository pattern, Ruff + mypy configured Frontend (React 18 + Vite + TypeScript strict): - Login/register page with post-login redirect - Home page with band list and creation form - Band page with member panel, invite link, song list, NC scan - Song page with waveform player, annotations, comment thread - Settings page for per-user Nextcloud credentials - Invite acceptance page (/invite/:token) - ESLint v9 flat config + TypeScript strict mode Infrastructure: - Docker Compose: PostgreSQL, Redis, API, worker, watcher, nginx - nginx reverse proxy for static files + /api/ proxy - make check runs all linters before docker compose build Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
4
api/src/rehearsalhub/queue/__init__.py
Normal file
4
api/src/rehearsalhub/queue/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from rehearsalhub.queue.protocol import JobQueue
|
||||
from rehearsalhub.queue.redis_queue import RedisJobQueue
|
||||
|
||||
__all__ = ["JobQueue", "RedisJobQueue"]
|
||||
28
api/src/rehearsalhub/queue/protocol.py
Normal file
28
api/src/rehearsalhub/queue/protocol.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Job queue abstraction. Swap Redis for any other backend by implementing this Protocol."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from typing import Any, Protocol
|
||||
|
||||
|
||||
class JobQueue(Protocol):
|
||||
async def enqueue(self, job_type: str, payload: dict[str, Any]) -> uuid.UUID:
|
||||
"""Persist job to DB + push UUID onto queue. Returns the job UUID."""
|
||||
...
|
||||
|
||||
async def dequeue(self, timeout: int = 5) -> tuple[uuid.UUID, str, dict[str, Any]] | None:
|
||||
"""Block up to `timeout` seconds for a job. Returns (id, type, payload) or None."""
|
||||
...
|
||||
|
||||
async def mark_running(self, job_id: uuid.UUID) -> None:
|
||||
"""Mark a job as running. Called by the worker when it picks up the job."""
|
||||
...
|
||||
|
||||
async def mark_done(self, job_id: uuid.UUID) -> None:
|
||||
"""Mark a job as successfully completed."""
|
||||
...
|
||||
|
||||
async def mark_failed(self, job_id: uuid.UUID, error: str) -> None:
|
||||
"""Mark a job as failed with an error message. Increments attempt counter."""
|
||||
...
|
||||
81
api/src/rehearsalhub/queue/redis_queue.py
Normal file
81
api/src/rehearsalhub/queue/redis_queue.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""Redis-backed job queue.
|
||||
|
||||
Strategy: Postgres is the source of truth (durable audit log + retry counts).
|
||||
Redis holds a list of job UUIDs for fast signaling. Workers pop a UUID, load
|
||||
the full payload from Postgres, process, then update status in Postgres.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any
|
||||
|
||||
import redis.asyncio as aioredis
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from rehearsalhub.config import get_settings
|
||||
from rehearsalhub.db.models import Job
|
||||
|
||||
|
||||
class RedisJobQueue:
|
||||
def __init__(self, session: AsyncSession, redis_client: aioredis.Redis | None = None) -> None:
|
||||
self._session = session
|
||||
self._redis: aioredis.Redis | None = redis_client
|
||||
|
||||
async def _get_redis(self) -> aioredis.Redis:
|
||||
if self._redis is None:
|
||||
self._redis = aioredis.from_url(get_settings().redis_url, decode_responses=True)
|
||||
return self._redis
|
||||
|
||||
async def enqueue(self, job_type: str, payload: dict[str, Any]) -> uuid.UUID:
|
||||
job = Job(type=job_type, payload=payload, status="queued")
|
||||
self._session.add(job)
|
||||
await self._session.flush()
|
||||
await self._session.refresh(job)
|
||||
|
||||
r = await self._get_redis()
|
||||
queue_key = get_settings().job_queue_key
|
||||
await r.rpush(queue_key, str(job.id))
|
||||
return job.id
|
||||
|
||||
async def dequeue(self, timeout: int = 5) -> tuple[uuid.UUID, str, dict[str, Any]] | None:
|
||||
r = await self._get_redis()
|
||||
queue_key = get_settings().job_queue_key
|
||||
result = await r.blpop(queue_key, timeout=timeout)
|
||||
if result is None:
|
||||
return None
|
||||
_, raw_id = result
|
||||
job_id = uuid.UUID(raw_id)
|
||||
job = await self._session.get(Job, job_id)
|
||||
if job is None:
|
||||
return None
|
||||
return job.id, job.type, job.payload
|
||||
|
||||
async def mark_running(self, job_id: uuid.UUID) -> None:
|
||||
job = await self._session.get(Job, job_id)
|
||||
if job:
|
||||
job.status = "running"
|
||||
job.started_at = datetime.now(timezone.utc)
|
||||
job.attempt = (job.attempt or 0) + 1
|
||||
await self._session.flush()
|
||||
|
||||
async def mark_done(self, job_id: uuid.UUID) -> None:
|
||||
job = await self._session.get(Job, job_id)
|
||||
if job:
|
||||
job.status = "done"
|
||||
job.finished_at = datetime.now(timezone.utc)
|
||||
await self._session.flush()
|
||||
|
||||
async def mark_failed(self, job_id: uuid.UUID, error: str) -> None:
|
||||
job = await self._session.get(Job, job_id)
|
||||
if job:
|
||||
job.status = "failed"
|
||||
job.error = error[:2000]
|
||||
job.finished_at = datetime.now(timezone.utc)
|
||||
await self._session.flush()
|
||||
|
||||
async def close(self) -> None:
|
||||
if self._redis:
|
||||
await self._redis.aclose()
|
||||
Reference in New Issue
Block a user