This commit is contained in:
Mistral Vibe
2026-04-08 20:07:20 +02:00
parent 9c4c3cda34
commit 1629272adb
3 changed files with 334 additions and 66 deletions

View File

@@ -9,6 +9,14 @@ enum LogLevel {
ERROR = 3
}
// Initialization state enum
enum InitializationState {
NotStarted = 'not_started',
InProgress = 'in_progress',
Completed = 'completed',
Failed = 'failed'
}
// Type extension for WaveSurfer backend access
interface WaveSurferWithBackend extends WaveSurfer {
backend?: {
@@ -39,6 +47,12 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
private lastLogTime: number = 0;
private readonly LOG_THROTTLE_MS: number = 100;
// Initialization tracking
private initializationState: InitializationState = InitializationState.NotStarted;
private initializationError: Error | null = null;
private initializationPromise: Promise<void> | null = null;
private initializationResolve: (() => void) | null = null;
private constructor() {
// Set appropriate log level based on environment
this.setLogLevel(this.detectLogLevel());
@@ -132,6 +146,13 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
this.log(LogLevel.DEBUG, 'Audio context state changed:', this.audioContext?.state);
};
}
// Resume suspended audio context
if (this.audioContext && this.audioContext.state === 'suspended') {
await this.audioContext.resume();
this.log(LogLevel.INFO, 'Audio context resumed successfully');
}
return this.audioContext;
} catch (error) {
this.log(LogLevel.ERROR, 'Failed to initialize audio context:', error);
@@ -170,17 +191,32 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
this.instance = undefined as any;
}
public async initialize(container: HTMLElement, url: string) {
public async initialize(container: HTMLElement, url: string): Promise<void> {
this.log(LogLevel.DEBUG, 'AudioService.initialize called', { url, containerExists: !!container });
// Reset initialization state
this.initializationState = InitializationState.InProgress;
this.initializationError = null;
this.initializationPromise = new Promise<void>((resolve) => {
this.initializationResolve = resolve;
});
// Validate inputs
if (!container) {
this.log(LogLevel.ERROR, 'AudioService: container element is null');
this.initializationState = InitializationState.Failed;
this.initializationError = new Error('Container element is required');
this.initializationResolve?.();
this.initializationResolve = null;
throw new Error('Container element is required');
}
if (!url || url === 'null' || url === 'undefined') {
this.log(LogLevel.ERROR, 'AudioService: invalid URL', { url });
this.initializationState = InitializationState.Failed;
this.initializationError = new Error('Valid audio URL is required');
this.initializationResolve?.();
this.initializationResolve = null;
throw new Error('Valid audio URL is required');
}
@@ -256,7 +292,7 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
// Get audio context from wavesurfer
// Note: In WaveSurfer v7+, backend might not be available immediately
// We'll try to access it now, but also set up a handler to get it when ready
this.setupAudioContext(ws);
await this.setupAudioContext(ws);
// Set up event handlers before loading
this.setupEventHandlers();
@@ -264,17 +300,31 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
// Load the audio with error handling
this.log(LogLevel.DEBUG, 'Loading audio URL:', url);
try {
const loadPromise = new Promise<void>((resolve, reject) => {
ws.on('ready', () => {
const loadPromise = new Promise<void>(async (resolve, reject) => {
ws.on('ready', async () => {
this.log(LogLevel.DEBUG, 'WaveSurfer ready event fired');
// Now that WaveSurfer is ready, set up audio context and finalize initialization
this.setupAudioContext(ws);
// Update player store with duration
const playerStore = usePlayerStore.getState();
playerStore.setDuration(ws.getDuration());
resolve();
try {
await this.setupAudioContext(ws);
// Update player store with duration
const playerStore = usePlayerStore.getState();
playerStore.setDuration(ws.getDuration());
// Signal initialization completion
this.initializationState = InitializationState.Completed;
this.initializationResolve?.();
this.initializationResolve = null;
resolve();
} catch (error) {
this.log(LogLevel.ERROR, 'Initialization failed in ready handler:', error);
this.initializationState = InitializationState.Failed;
this.initializationError = error instanceof Error ? error : new Error(String(error));
this.initializationResolve?.();
this.initializationResolve = null;
reject(error);
}
});
ws.on('error', (error) => {
@@ -289,13 +339,16 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
await loadPromise;
this.log(LogLevel.INFO, 'Audio loaded successfully');
return this.initializationPromise;
} catch (error) {
this.log(LogLevel.ERROR, 'Failed to load audio:', error);
this.initializationState = InitializationState.Failed;
this.initializationError = error instanceof Error ? error : new Error(String(error));
this.initializationResolve?.();
this.initializationResolve = null;
this.cleanup();
throw error;
}
return ws;
}
private setupEventHandlers() {
@@ -341,6 +394,12 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
return;
}
// Check if waveform is actually ready for playback
if (this.getDuration() <= 0) {
this.log(LogLevel.ERROR, 'Waveform not ready for playback - duration is 0');
return;
}
// Debounce rapid play calls
const now = Date.now();
if (now - this.lastPlayTime < this.PLAY_DEBOUNCE_MS) {
@@ -514,36 +573,15 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
}
private setupAudioContext(ws: WaveSurferWithBackend) {
// Simplified audio context setup - we now manage audio context centrally
private async setupAudioContext(ws: WaveSurferWithBackend) {
// Simplified and more robust audio context setup
try {
// If we already have an audio context, ensure WaveSurfer uses it
if (this.audioContext) {
// Try multiple ways to share the audio context with WaveSurfer
try {
// Method 1: Try to set via backend if available
if (ws.backend) {
ws.backend.audioContext = this.audioContext;
this.log(LogLevel.DEBUG, 'Shared audio context with WaveSurfer backend');
}
// Method 2: Try to access and replace the audio context
if (ws.backend?.getAudioContext) {
// @ts-expect-error - Replace the method
ws.backend.getAudioContext = () => this.audioContext;
this.log(LogLevel.DEBUG, 'Overrode backend.getAudioContext with shared context');
}
// Method 3: Try top-level getAudioContext
if (typeof ws.getAudioContext === 'function') {
// @ts-expect-error - Replace the method
ws.getAudioContext = () => this.audioContext;
this.log(LogLevel.DEBUG, 'Overrode ws.getAudioContext with shared context');
}
} catch (error) {
this.log(LogLevel.WARN, 'Could not share audio context with WaveSurfer, but continuing:', error);
}
this.log(LogLevel.DEBUG, 'Using existing audio context');
// Centralized method to share audio context with WaveSurfer
this.shareAudioContextWithWaveSurfer(ws);
return;
}
@@ -562,23 +600,69 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
sampleRate: this.audioContext.sampleRate
});
// Note: We don't automatically resume suspended audio contexts here
// because that requires a user gesture. The resume will be handled
// in handleAudioContextResume() when the user clicks play.
// Resume suspended audio context automatically
if (this.audioContext.state === 'suspended') {
this.log(LogLevel.DEBUG, 'Audio context is suspended, will resume on user gesture');
try {
await this.audioContext.resume();
this.log(LogLevel.INFO, 'Audio context resumed successfully');
} catch (error) {
this.log(LogLevel.WARN, 'Failed to resume audio context:', error);
}
}
// Set up state change monitoring
this.audioContext.onstatechange = () => {
this.log(LogLevel.DEBUG, 'Audio context state changed:', this.audioContext?.state);
};
return;
}
// Don't create new audio context if WaveSurfer doesn't provide methods
// This maintains backward compatibility and allows graceful degradation
this.log(LogLevel.DEBUG, 'No audio context available from WaveSurfer, continuing without it');
} catch (error) {
this.log(LogLevel.ERROR, 'Error setting up audio context:', error);
// Don't throw - we can continue with our existing audio context
}
}
private shareAudioContextWithWaveSurfer(ws: WaveSurferWithBackend) {
if (!this.audioContext) {
this.log(LogLevel.WARN, 'No audio context available to share');
return;
}
try {
// Method 1: Try to set via backend if available
if (ws.backend) {
ws.backend.audioContext = this.audioContext;
this.log(LogLevel.DEBUG, 'Shared audio context with WaveSurfer backend');
return; // Success, exit early
}
// Method 2: Try to access and replace the audio context via backend methods
if (ws.backend?.getAudioContext) {
// @ts-expect-error - Replace the method
ws.backend.getAudioContext = () => this.audioContext;
this.log(LogLevel.DEBUG, 'Overrode backend.getAudioContext with shared context');
return; // Success, exit early
}
// Method 3: Try top-level getAudioContext
if (typeof ws.getAudioContext === 'function') {
// @ts-expect-error - Replace the method
ws.getAudioContext = () => this.audioContext;
this.log(LogLevel.DEBUG, 'Overrode ws.getAudioContext with shared context');
return; // Success, exit early
}
this.log(LogLevel.WARN, 'Could not share audio context with WaveSurfer - no compatible method found');
} catch (error) {
this.log(LogLevel.WARN, 'Could not share audio context with WaveSurfer:', error);
}
}
@@ -617,6 +701,75 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
return !!this.wavesurfer && this.getDuration() > 0;
}
// Method to check if audio service is properly initialized
public isInitialized(): boolean {
return !!this.wavesurfer && this.getDuration() > 0 && !!this.audioContext;
}
// Method to check if ready for playback (unified readiness check)
public isReadyForPlayback(): boolean {
return (
this.initializationState === InitializationState.Completed &&
this.isWaveformReady() &&
!!this.audioContext &&
(this.audioContext.state === 'running' || this.audioContext.state === 'suspended')
);
}
// Initialization state management
public getInitializationState(): InitializationState {
return this.initializationState;
}
public getInitializationError(): Error | null {
return this.initializationError;
}
public isInitializationComplete(): boolean {
return this.initializationState === InitializationState.Completed;
}
public async waitForInitialization(): Promise<void> {
return this.initializationPromise ?? Promise.resolve();
}
// Method to ensure audio context is available (for backward compatibility)
private async ensureAudioContext(): Promise<AudioContext> {
// If we already have a valid audio context, return it
if (this.audioContext) {
// Resume if suspended (common in mobile browsers)
if (this.audioContext.state === 'suspended') {
try {
await this.audioContext.resume();
this.log(LogLevel.INFO, 'Audio context resumed successfully');
} catch (error) {
this.log(LogLevel.ERROR, 'Failed to resume audio context:', error);
throw error;
}
}
return this.audioContext;
}
// Create new audio context
try {
this.audioContext = new (window.AudioContext || (window as { webkitAudioContext?: new () => AudioContext }).webkitAudioContext)();
this.log(LogLevel.INFO, 'New audio context created', {
state: this.audioContext.state,
sampleRate: this.audioContext.sampleRate
});
// Set up state change monitoring
this.audioContext.onstatechange = () => {
this.log(LogLevel.DEBUG, 'Audio context state changed:', this.audioContext?.state);
};
return this.audioContext;
} catch (error) {
this.log(LogLevel.ERROR, 'Failed to create audio context:', error);
throw new Error(`Audio context creation failed: ${error instanceof Error ? error.message : String(error)}`);
}
}
// Method to get WaveSurfer version for debugging
public getWaveSurferVersion(): string | null {
if (this.wavesurfer) {
@@ -638,4 +791,4 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
}
export const audioService = AudioService.getInstance();
export { AudioService, LogLevel }; // Export class and enum for testing
export { AudioService, LogLevel, InitializationState }; // Export class and enums for testing