waveform AI changes

Signed-off-by: ale <ale@manalejandro.com>
Este commit está contenido en:
ale
2025-06-04 22:31:33 +02:00
padre d58313e276
commit ba66d4c02a
Se han modificado 4 ficheros con 372 adiciones y 28 borrados

Ver fichero

@@ -13,6 +13,7 @@ import useBackgroundImages from "./hooks/useBackgroundImages";
import Header from "./components/Header";
import TrackInfo from "./components/TrackInfo";
import AudioControls from "./components/AudioControls";
import AudioDebugTest from "./components/AudioDebugTest";
/**
* Main App component
@@ -23,7 +24,6 @@ const App = () => {
// Custom hook for streaming data
const {
json,
currentListeners,
maxListeners,
title,
@@ -41,7 +41,7 @@ const App = () => {
play,
pause,
toggleMute
} = useAudioPlayer("/stream.mp3");
} = useAudioPlayer("/test-multi-tone.mp3");
// Initialization flag to prevent multiple initializations
const initialized = useRef(false);
@@ -100,12 +100,15 @@ const App = () => {
/>
<audio
src="/stream.mp3"
src="/test-multi-tone.mp3"
ref={audioElmRef}
preload="none"
preload="metadata"
muted={muted}
controls={false}
controls={false}
loop
/>
<AudioDebugTest />
</>
)
}

Ver fichero

@@ -37,12 +37,25 @@ const animateBars = (analyser, canvas, ctx, dataArray, bufferLength) => {
lastDataSnapshot = currentSnapshot;
}
// Stricter criteria for real audio data detection
const hasRealAudioData = maxValue > 20 &&
averageLevel > 3 &&
activeValues > 20 &&
variance > 10 &&
consecutiveStaticFrames < 10;
// More lenient detection for real audio data (lowered thresholds)
const hasRealAudioData = maxValue > 1 &&
averageLevel > 0.1 &&
activeValues > 5 &&
variance > 0.5 &&
consecutiveStaticFrames < 30;
// Debug logging every 60 frames (roughly once per second)
if (Math.random() < 0.016) { // ~1/60 chance
console.log('🎨 WaveForm Animation Debug:', {
maxValue,
averageLevel: averageLevel.toFixed(2),
activeValues,
variance: variance.toFixed(2),
consecutiveStaticFrames,
hasRealAudioData,
sampleData: Array.from(dataArray.slice(0, 10))
});
}
if (!hasRealAudioData) {
// TV browser fallback animation
@@ -68,7 +81,7 @@ const animateBars = (analyser, canvas, ctx, dataArray, bufferLength) => {
ctx.fillRect(i * barWidth + 2, barY, barWidth - 4, barHeight);
ctx.shadowBlur = 0;
}
return true;
return true; // Using fallback animation
}
// Real audio data rendering for PC browsers
@@ -98,10 +111,12 @@ const animateBars = (analyser, canvas, ctx, dataArray, bufferLength) => {
x += barWidth;
}
return false; // Using real audio data
} catch (error) {
console.error('❌ WaveForm animation error:', error);
return false;
}
return true;
};
const WaveForm = ({ analyzerData }) => {

Ver fichero

@@ -0,0 +1,192 @@
import React, { useRef, useEffect, useState } from 'react';
const AudioDebugTest = () => {
const audioRef = useRef(null);
const [debugInfo, setDebugInfo] = useState(null);
const [isPlaying, setIsPlaying] = useState(false);
const intervalRef = useRef(null);
const testWebAudio = async () => {
if (!audioRef.current) {
console.log('❌ No audio element');
return;
}
try {
// Create AudioContext and analyzer
const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
console.log('🎛️ AudioContext created:', audioCtx.state);
// Resume context if suspended (required by most browsers)
if (audioCtx.state === 'suspended') {
await audioCtx.resume();
console.log('🎛️ AudioContext resumed');
}
// Ensure audio is ready
if (audioRef.current.readyState < 2) {
console.log('⏳ Waiting for audio to be ready...');
await new Promise((resolve) => {
const handleCanPlay = () => {
audioRef.current.removeEventListener('canplay', handleCanPlay);
resolve();
};
audioRef.current.addEventListener('canplay', handleCanPlay);
});
}
const source = audioCtx.createMediaElementSource(audioRef.current);
const analyzer = audioCtx.createAnalyser();
analyzer.fftSize = 2048;
const bufferLength = analyzer.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
source.connect(analyzer);
analyzer.connect(audioCtx.destination);
console.log('🔗 Web Audio connected');
// Start audio playback
await audioRef.current.play();
console.log('🎵 Audio started playing');
// Start monitoring with a delay to ensure audio is flowing
setTimeout(() => {
intervalRef.current = setInterval(() => {
analyzer.getByteFrequencyData(dataArray);
const maxValue = Math.max(...dataArray);
const avgValue = dataArray.reduce((a, b) => a + b, 0) / dataArray.length;
const activeValues = dataArray.filter(v => v > 5).length;
const variance = dataArray.reduce((sum, value) => sum + Math.pow(value - avgValue, 2), 0) / dataArray.length;
setDebugInfo({
timestamp: new Date().toLocaleTimeString(),
audioContext: audioCtx.state,
audioElement: {
paused: audioRef.current.paused,
currentTime: audioRef.current.currentTime?.toFixed(2),
duration: audioRef.current.duration?.toFixed(2),
readyState: audioRef.current.readyState,
networkState: audioRef.current.networkState,
volume: audioRef.current.volume
},
audioData: {
maxValue,
avgValue: avgValue.toFixed(2),
activeValues,
variance: variance.toFixed(2),
sampleData: Array.from(dataArray.slice(0, 20))
}
});
}, 1000);
}, 1000); // Wait 1 second before starting monitoring
} catch (error) {
console.error('❌ Web Audio test failed:', error);
setDebugInfo({ error: error.message });
}
};
const playAudio = async () => {
try {
await audioRef.current.play();
setIsPlaying(true);
testWebAudio();
} catch (error) {
console.error('❌ Play failed:', error);
}
};
const pauseAudio = () => {
audioRef.current.pause();
setIsPlaying(false);
if (intervalRef.current) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
};
useEffect(() => {
return () => {
if (intervalRef.current) {
clearInterval(intervalRef.current);
}
};
}, []);
return (
<div style={{
position: 'fixed',
top: 20,
right: 20,
background: 'rgba(0,0,0,0.8)',
color: 'white',
padding: '20px',
borderRadius: '10px',
maxWidth: '400px',
zIndex: 1000,
fontSize: '12px',
fontFamily: 'monospace'
}}>
<h3>Web Audio API Debug Test</h3>
<audio
ref={audioRef}
src="/test-multi-tone.mp3"
loop
style={{ width: '100%', marginBottom: '10px' }}
/>
<div style={{ marginBottom: '10px' }}>
<button onClick={playAudio} disabled={isPlaying}>Play & Test</button>
<button onClick={pauseAudio} disabled={!isPlaying} style={{ marginLeft: '10px' }}>Pause</button>
</div>
{debugInfo && (
<div>
<h4>Debug Info ({debugInfo.timestamp})</h4>
{debugInfo.error ? (
<div style={{ color: 'red' }}>Error: {debugInfo.error}</div>
) : (
<>
<div><strong>AudioContext:</strong> {debugInfo.audioContext}</div>
<div><strong>Audio Element:</strong></div>
<ul style={{ margin: 0, paddingLeft: '20px' }}>
<li>Paused: {debugInfo.audioElement.paused ? 'Yes' : 'No'}</li>
<li>Time: {debugInfo.audioElement.currentTime}s / {debugInfo.audioElement.duration}s</li>
<li>Ready State: {debugInfo.audioElement.readyState}</li>
<li>Network State: {debugInfo.audioElement.networkState}</li>
<li>Volume: {debugInfo.audioElement.volume}</li>
</ul>
<div><strong>Audio Data:</strong></div>
<ul style={{ margin: 0, paddingLeft: '20px' }}>
<li>Max Value: {debugInfo.audioData.maxValue}</li>
<li>Avg Value: {debugInfo.audioData.avgValue}</li>
<li>Active Values: {debugInfo.audioData.activeValues}</li>
<li>Variance: {debugInfo.audioData.variance}</li>
</ul>
<div><strong>Sample Data (first 20):</strong></div>
<div style={{
fontSize: '10px',
wordBreak: 'break-all',
backgroundColor: 'rgba(255,255,255,0.1)',
padding: '5px',
marginTop: '5px'
}}>
[{debugInfo.audioData.sampleData.join(', ')}]
</div>
</>
)}
</div>
)}
</div>
);
};
export default AudioDebugTest;

Ver fichero

@@ -13,15 +13,54 @@ const useAudioPlayer = (audioUrl) => {
const audioElmRef = useRef(null);
const loadedAnalyzer = useRef(false);
const debugIntervalRef = useRef(null);
// Debug function to continuously monitor audio data
const startAudioDebugMonitoring = useCallback((analyzer, dataArray) => {
if (debugIntervalRef.current) {
clearInterval(debugIntervalRef.current);
}
debugIntervalRef.current = setInterval(() => {
if (!analyzer || !dataArray || !audioElmRef.current) return;
analyzer.getByteFrequencyData(dataArray);
const maxValue = Math.max(...dataArray);
const averageLevel = dataArray.reduce((sum, value) => sum + value, 0) / dataArray.length;
const activeValues = dataArray.filter(value => value > 5).length;
const variance = dataArray.reduce((sum, value) => sum + Math.pow(value - averageLevel, 2), 0) / dataArray.length;
console.log('🎵 Audio Debug:', {
timestamp: new Date().toLocaleTimeString(),
audioElement: {
paused: audioElmRef.current.paused,
currentTime: audioElmRef.current.currentTime?.toFixed(2),
duration: audioElmRef.current.duration?.toFixed(2),
volume: audioElmRef.current.volume,
readyState: audioElmRef.current.readyState,
networkState: audioElmRef.current.networkState,
src: audioElmRef.current.src?.split('/').pop()
},
audioData: {
maxValue,
averageLevel: averageLevel.toFixed(2),
activeValues,
variance: variance.toFixed(2),
sampleData: Array.from(dataArray.slice(0, 20))
}
});
}, 3000); // Every 3 seconds
}, []);
// Initialize audio analyzer for visualization
const initAudioAnalyzer = useCallback(() => {
if (!audioElmRef.current) return;
if (!audioElmRef.current) {
console.log('❌ Audio element not available');
return;
}
// Detect if we're on a TV platform and skip Web Audio API
const userAgent = navigator.userAgent.toLowerCase();
const platform = navigator.platform;
const vendor = navigator.vendor;
// Manual override for testing
const forceTV = window.location.search.includes('tv=true') ||
@@ -47,22 +86,31 @@ const useAudioPlayer = (audioUrl) => {
// TV-specific properties
(window.opera && window.opera.tv);
console.log('TV Detection:', {
console.log('🔍 Platform Detection:', {
isTVPlatform,
forceTV,
screenSize: `${window.screen.width}x${window.screen.height}`,
userAgentSnippet: userAgent.substring(0, 50) + '...'
userAgentSnippet: userAgent.substring(0, 50) + '...',
audioElementReady: !!audioElmRef.current,
audioSrc: audioElmRef.current?.src
});
if (isTVPlatform) {
// Skip Web Audio API for TV platforms and use fallback
console.log('📺 TV platform detected - using fallback animation');
setAnalyzerData(null);
loadedAnalyzer.current = true;
return;
}
try {
console.log('🎛️ Initializing Web Audio API...');
const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
console.log('🎛️ AudioContext created:', {
state: audioCtx.state,
sampleRate: audioCtx.sampleRate
});
const source = audioCtx.createMediaElementSource(audioElmRef.current);
const analyzer = audioCtx.createAnalyser();
@@ -70,17 +118,44 @@ const useAudioPlayer = (audioUrl) => {
const bufferLength = analyzer.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
console.log('🎛️ Analyzer created:', {
fftSize: analyzer.fftSize,
bufferLength,
smoothingTimeConstant: analyzer.smoothingTimeConstant
});
source.connect(analyzer);
analyzer.connect(audioCtx.destination);
source.onended = () => source.disconnect();
source.onended = () => {
console.log('🎵 Audio source ended');
source.disconnect();
};
// Test connection after a short delay
setTimeout(() => {
// Resume AudioContext if suspended (required by many browsers)
if (audioCtx.state === 'suspended') {
console.log('🎛️ Resuming suspended AudioContext...');
audioCtx.resume().then(() => {
console.log('🎛️ AudioContext resumed');
});
}
// Test connection after audio starts playing
const testConnection = () => {
const testData = new Uint8Array(bufferLength);
analyzer.getByteFrequencyData(testData);
const maxValue = Math.max(...testData);
const avgValue = testData.reduce((a, b) => a + b, 0) / testData.length;
const isConnected = maxValue > 5 || avgValue > 1;
const isConnected = maxValue > 0 || avgValue > 0;
console.log('🔗 Web Audio Connection Test:', {
maxValue,
avgValue: avgValue.toFixed(2),
isConnected,
audioContextState: audioCtx.state,
audioElementPlaying: !audioElmRef.current?.paused,
audioElementCurrentTime: audioElmRef.current?.currentTime?.toFixed(2),
sampleData: Array.from(testData.slice(0, 10))
});
setAnalyzerData({
analyzer,
@@ -90,41 +165,90 @@ const useAudioPlayer = (audioUrl) => {
mediaSource: source,
isConnected
});
}, 1000);
// Start continuous monitoring
startAudioDebugMonitoring(analyzer, dataArray);
};
// Test immediately and after delay
testConnection();
setTimeout(testConnection, 3000);
loadedAnalyzer.current = true;
} catch (error) {
console.error('Web Audio API initialization failed:', error);
console.error('Web Audio API initialization failed:', error);
// Set analyzer data as null to trigger fallback
setAnalyzerData(null);
loadedAnalyzer.current = true;
}
}, []);
}, [startAudioDebugMonitoring]);
// Play audio function
const play = useCallback(async () => {
console.log('▶️ Play function called');
if (!loadedAnalyzer.current) {
console.log('🎛️ Initializing analyzer before play...');
initAudioAnalyzer();
}
setPaused(false);
setMuted(false);
try {
await audioElmRef.current?.play();
if (audioElmRef.current) {
console.log('🎵 Starting audio playback...', {
src: audioElmRef.current.src,
readyState: audioElmRef.current.readyState
});
// Ensure audio is loaded
if (audioElmRef.current.readyState < 2) {
console.log('⏳ Waiting for audio to load...');
await new Promise((resolve) => {
const handleCanPlay = () => {
audioElmRef.current.removeEventListener('canplay', handleCanPlay);
resolve();
};
audioElmRef.current.addEventListener('canplay', handleCanPlay);
});
}
await audioElmRef.current.play();
console.log('✅ Audio playback started');
// Re-initialize analyzer after audio starts playing
if (!loadedAnalyzer.current) {
setTimeout(() => {
console.log('🔄 Re-initializing analyzer after play...');
initAudioAnalyzer();
}, 500);
}
}
} catch (error) {
console.error('Failed to play audio:', error);
console.error('Failed to play audio:', error);
}
}, [initAudioAnalyzer]);
// Pause audio function
const pause = useCallback(() => {
console.log('⏸️ Pause function called');
audioElmRef.current?.pause();
setPaused(true);
// Clear debug monitoring
if (debugIntervalRef.current) {
clearInterval(debugIntervalRef.current);
debugIntervalRef.current = null;
}
}, []);
// Toggle mute function
const toggleMute = useCallback(() => {
setMuted(prevMuted => !prevMuted);
setMuted(prevMuted => {
console.log('🔇 Mute toggled:', !prevMuted);
return !prevMuted;
});
}, []);
// Update volume when currentVolume changes
@@ -139,6 +263,7 @@ const useAudioPlayer = (audioUrl) => {
}
audioElmRef.current.volume = currentVolume;
console.log('🔊 Volume updated:', currentVolume);
}, [currentVolume]);
// Setup volume change listener
@@ -158,6 +283,15 @@ const useAudioPlayer = (audioUrl) => {
};
}, []);
// Cleanup on unmount
useEffect(() => {
return () => {
if (debugIntervalRef.current) {
clearInterval(debugIntervalRef.current);
}
};
}, []);
return {
audioElmRef,
analyzerData,