WIP: Audio context fixes - single context, playback switching, playhead sync improvements

This commit is contained in:
Mistral Vibe
2026-04-08 16:52:10 +02:00
parent e8862d99b3
commit 5f95d88741
5 changed files with 147 additions and 41 deletions

View File

@@ -91,10 +91,10 @@ export function useWaveform(
setCurrentSong(options.songId, options.bandId);
}
// If this is the same song that was playing globally, restore play state
// If this is the currently playing song, restore play state
if (options.songId && options.bandId &&
currentSongId === options.songId &&
globalBandId === options.bandId &&
currentPlayingSongId === options.songId &&
currentPlayingBandId === options.bandId &&
globalIsPlaying) {
@@ -103,7 +103,7 @@ export function useWaveform(
const checkReady = setInterval(() => {
if (audioService.getDuration() > 0) {
clearInterval(checkReady);
audioService.play();
audioService.play(options.songId, options.bandId);
if (globalCurrentTime > 0) {
audioService.seekTo(globalCurrentTime);
}
@@ -136,7 +136,7 @@ export function useWaveform(
const play = () => {
try {
audioService.play();
audioService.play(options.songId || null, options.bandId || null);
} catch (error) {
console.error('useWaveform.play failed:', error);
}

View File

@@ -1,10 +1,18 @@
import { StrictMode } from "react";
import { createRoot } from "react-dom/client";
import App from "./App.tsx";
import { audioService } from "./services/audioService";
const root = document.getElementById("root");
if (!root) throw new Error("No #root element found");
// Initialize audio context at app startup for better performance
// This prevents audio context creation delays during first playback
audioService.initializeAudioContext().catch(error => {
console.error('Failed to initialize audio context:', error);
// Continue app initialization even if audio context fails
});
createRoot(root).render(
<StrictMode>
<App />

View File

@@ -26,6 +26,8 @@ class AudioService {
private wavesurfer: WaveSurfer | null = null;
private audioContext: AudioContext | null = null;
private currentUrl: string | null = null;
private currentPlayingSongId: string | null = null;
private currentPlayingBandId: string | null = null;
private lastPlayTime: number = 0;
private lastTimeUpdate: number = 0;
private readonly PLAY_DEBOUNCE_MS: number = 100;
@@ -115,6 +117,34 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
return this.instance;
}
// Initialize audio context at app startup
public async initializeAudioContext() {
try {
if (!this.audioContext) {
this.audioContext = new (window.AudioContext || (window as { webkitAudioContext?: new () => AudioContext }).webkitAudioContext)();
this.log(LogLevel.INFO, 'Audio context initialized at app startup', {
state: this.audioContext.state,
sampleRate: this.audioContext.sampleRate
});
// Handle audio context suspension (common in mobile browsers)
if (this.audioContext.state === 'suspended') {
await this.audioContext.resume();
this.log(LogLevel.INFO, 'Audio context resumed successfully');
}
// Set up state change monitoring
this.audioContext.onstatechange = () => {
this.log(LogLevel.DEBUG, 'Audio context state changed:', this.audioContext?.state);
};
}
return this.audioContext;
} catch (error) {
this.log(LogLevel.ERROR, 'Failed to initialize audio context:', error);
throw new Error(`Failed to initialize audio context: ${error instanceof Error ? error.message : String(error)}`);
}
}
// Method for testing: reset the singleton instance
public static resetInstance(): void {
this.instance = undefined as any;
@@ -279,7 +309,7 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
// to avoid duplicate event handlers
}
public async play(): Promise<void> {
public async play(songId: string | null = null, bandId: string | null = null): Promise<void> {
if (!this.wavesurfer) {
this.log(LogLevel.WARN, 'AudioService: no wavesurfer instance');
return;
@@ -294,9 +324,21 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
this.lastPlayTime = now;
// Only log play calls in debug mode to reduce noise
this.log(LogLevel.DEBUG, 'AudioService.play called');
this.log(LogLevel.DEBUG, 'AudioService.play called', { songId, bandId });
try {
// Check if we need to switch songs
const isDifferentSong = songId && bandId &&
(this.currentPlayingSongId !== songId || this.currentPlayingBandId !== bandId);
// If switching to a different song, stop current playback first
if (isDifferentSong && this.isPlaying()) {
this.log(LogLevel.INFO, 'Switching songs - stopping current playback first');
this.pause();
// Small delay to ensure cleanup
await new Promise(resolve => setTimeout(resolve, 50));
}
// Ensure we have a valid audio context
await this.ensureAudioContext();
@@ -307,6 +349,15 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
}
await this.wavesurfer.play();
// Update currently playing song tracking
if (songId && bandId) {
this.currentPlayingSongId = songId;
this.currentPlayingBandId = bandId;
const playerStore = usePlayerStore.getState();
playerStore.setCurrentPlayingSong(songId, bandId);
}
// Success logs are redundant, only log in debug mode
this.log(LogLevel.DEBUG, 'Playback started successfully');
this.playbackAttempts = 0; // Reset on success
@@ -322,7 +373,7 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
try {
await this.audioContext.resume();
this.log(LogLevel.INFO, 'Audio context resumed, retrying playback');
return this.play(); // Retry after resuming
return this.play(songId, bandId); // Retry after resuming
} catch (resumeError) {
this.log(LogLevel.ERROR, 'Failed to resume audio context:', resumeError);
}
@@ -338,7 +389,7 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
const delay = 100 * this.playbackAttempts;
this.log(LogLevel.WARN, `Retrying playback in ${delay}ms...`);
await new Promise(resolve => setTimeout(resolve, delay));
return this.play(); // Retry
return this.play(songId, bandId); // Retry
}
}
}
@@ -404,6 +455,13 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
}
this.currentUrl = null;
this.currentPlayingSongId = null;
this.currentPlayingBandId = null;
// Reset player store
const playerStore = usePlayerStore.getState();
playerStore.setCurrentPlayingSong(null, null);
// Note: We intentionally don't nullify audioContext to keep it alive
}
@@ -423,7 +481,7 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
return this.audioContext;
}
// Create new audio context
// Create new audio context (this should only happen if initializeAudioContext wasn't called)
try {
this.audioContext = new (window.AudioContext || (window as { webkitAudioContext?: new () => AudioContext }).webkitAudioContext)();
this.log(LogLevel.INFO, 'New audio context created', {
@@ -444,35 +502,37 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
}
private setupAudioContext(ws: WaveSurferWithBackend) {
// Simplified audio context access for WaveSurfer 7.12.5
// Based on WaveSurfer.js documentation and testing
// Simplified audio context setup - we now manage audio context centrally
try {
// Primary method for WaveSurfer 7.x: Use backend.getAudioContext()
// This is the officially documented and reliable method
// If we already have an audio context, use it for WaveSurfer
if (this.audioContext) {
// Try to set the audio context for WaveSurfer if possible
if (ws.backend && typeof ws.backend.getAudioContext === 'function') {
// Some WaveSurfer versions allow setting the audio context
try {
// @ts-expect-error - WaveSurfer typing doesn't expose this
ws.backend.audioContext = this.audioContext;
this.log(LogLevel.DEBUG, 'Shared audio context with WaveSurfer backend');
} catch (error) {
this.log(LogLevel.DEBUG, 'Could not share audio context with WaveSurfer, but continuing');
}
}
return;
}
// Fallback: Try to get audio context from WaveSurfer (for compatibility)
if (ws.backend?.getAudioContext) {
this.audioContext = ws.backend.getAudioContext();
this.log(LogLevel.DEBUG, 'Audio context accessed via backend.getAudioContext()');
}
// Fallback: Try wavesurfer.getAudioContext() if available
// Some WaveSurfer versions expose this at the top level
if (!this.audioContext && typeof ws.getAudioContext === 'function') {
} else if (typeof ws.getAudioContext === 'function') {
this.audioContext = ws.getAudioContext();
this.log(LogLevel.DEBUG, 'Audio context accessed via ws.getAudioContext()');
}
// Final fallback: Create new audio context if none found
// This should rarely be needed with proper WaveSurfer integration
if (!this.audioContext) {
this.log(LogLevel.WARN, 'Could not access audio context from WaveSurfer, creating new AudioContext');
this.audioContext = new (window.AudioContext || (window as { webkitAudioContext?: new () => AudioContext }).webkitAudioContext)();
}
if (this.audioContext) {
this.log(LogLevel.INFO, `Audio context initialized (version: ${typeof window !== 'undefined' && window.location && window.location.hostname === 'localhost' ? 'DEV' : 'PROD'})`, {
this.log(LogLevel.INFO, 'Audio context initialized from WaveSurfer', {
state: this.audioContext.state,
sampleRate: this.audioContext.sampleRate,
destination: this.audioContext.destination?.channelCount || 'unknown'
sampleRate: this.audioContext.sampleRate
});
// Handle audio context suspension (common in mobile browsers)
@@ -486,13 +546,10 @@ private readonly PLAY_DEBOUNCE_MS: number = 100;
this.audioContext.onstatechange = () => {
this.log(LogLevel.DEBUG, 'Audio context state changed:', this.audioContext?.state);
};
} else {
this.log(LogLevel.ERROR, 'Failed to create or access audio context - playback will not work');
throw new Error('Audio context initialization failed');
}
} catch (error) {
this.log(LogLevel.ERROR, 'Error accessing audio context:', error);
throw error;
this.log(LogLevel.ERROR, 'Error setting up audio context:', error);
// Don't throw - we can continue with our existing audio context
}
}

View File

@@ -6,12 +6,15 @@ interface PlayerState {
duration: number;
currentSongId: string | null;
currentBandId: string | null;
currentPlayingSongId: string | null; // Track which song is actively playing
currentPlayingBandId: string | null; // Track which band's song is actively playing
setPlaying: (isPlaying: boolean) => void;
setCurrentTime: (currentTime: number) => void;
setDuration: (duration: number) => void;
setCurrentSong: (songId: string | null, bandId: string | null) => void;
setCurrentPlayingSong: (songId: string | null, bandId: string | null) => void;
reset: () => void;
batchUpdate: (updates: Partial<Omit<PlayerState, 'setPlaying' | 'setCurrentTime' | 'setDuration' | 'setCurrentSong' | 'reset' | 'batchUpdate'>>) => void;
batchUpdate: (updates: Partial<Omit<PlayerState, 'setPlaying' | 'setCurrentTime' | 'setDuration' | 'setCurrentSong' | 'setCurrentPlayingSong' | 'reset' | 'batchUpdate'>>) => void;
}
export const usePlayerStore = create<PlayerState>()((set) => ({
@@ -20,16 +23,21 @@ export const usePlayerStore = create<PlayerState>()((set) => ({
duration: 0,
currentSongId: null,
currentBandId: null,
currentPlayingSongId: null,
currentPlayingBandId: null,
setPlaying: (isPlaying) => set({ isPlaying }),
setCurrentTime: (currentTime) => set({ currentTime }),
setDuration: (duration) => set({ duration }),
setCurrentSong: (songId, bandId) => set({ currentSongId: songId, currentBandId: bandId }),
setCurrentPlayingSong: (songId, bandId) => set({ currentPlayingSongId: songId, currentPlayingBandId: bandId }),
batchUpdate: (updates) => set(updates),
reset: () => set({
isPlaying: false,
currentTime: 0,
duration: 0,
currentSongId: null,
currentBandId: null
currentBandId: null,
currentPlayingSongId: null,
currentPlayingBandId: null
})
}));

View File

@@ -84,7 +84,7 @@ describe('setupAudioContext', () => {
expect(audioService['audioContext']).toBeDefined();
});
it('should create new AudioContext if no methods work', () => {
it('should handle case when no audio context methods work but not throw error', () => {
const mockWaveSurferNoMethods = {
...mockWaveSurfer,
backend: {
@@ -95,10 +95,11 @@ describe('setupAudioContext', () => {
getAudioContext: null
};
// Should not throw error - just continue without audio context
audioService['setupAudioContext'](mockWaveSurferNoMethods);
expect((globalThis as any).window.AudioContext).toHaveBeenCalled();
expect(audioService['audioContext']).toBeDefined();
// Audio context should remain null in this case
expect(audioService['audioContext']).toBeNull();
});
it('should handle suspended audio context by resuming it', () => {
@@ -110,7 +111,7 @@ describe('setupAudioContext', () => {
expect(suspendedContext.resume).toHaveBeenCalled();
});
it('should throw error if audio context cannot be created', () => {
it('should not throw error if audio context cannot be created - just continue', () => {
global.window.AudioContext = vi.fn(() => {
throw new Error('AudioContext creation failed');
}) as any;
@@ -125,8 +126,10 @@ describe('setupAudioContext', () => {
getAudioContext: null
};
// Should not throw error - just continue without audio context
expect(() => audioService['setupAudioContext'](mockWaveSurferNoMethods))
.toThrow('AudioContext creation failed');
.not.toThrow();
expect(audioService['audioContext']).toBeNull();
});
});
@@ -190,4 +193,34 @@ describe('getWaveSurferVersion', () => {
});
});
describe('initializeAudioContext', () => {
it('should initialize audio context successfully', async () => {
const result = await audioService.initializeAudioContext();
expect(result).toBeDefined();
expect(result.state).toBe('running');
expect(audioService['audioContext']).toBe(result);
});
it('should resume suspended audio context', async () => {
const suspendedContext = createMockAudioContext('suspended');
global.window.AudioContext = vi.fn(() => suspendedContext) as any;
const result = await audioService.initializeAudioContext();
expect(suspendedContext.resume).toHaveBeenCalled();
expect(result).toBe(suspendedContext);
});
it('should handle audio context creation errors', async () => {
global.window.AudioContext = vi.fn(() => {
throw new Error('AudioContext creation failed');
}) as any;
await expect(audioService.initializeAudioContext())
.rejects
.toThrow('Failed to initialize audio context: AudioContext creation failed');
});
});
});