waveform AI changes

Signed-off-by: ale <ale@manalejandro.com>
Este commit está contenido en:
ale
2025-06-04 22:05:42 +02:00
padre 824bb94eeb
commit 3cba898769
Se han modificado 2 ficheros con 138 adiciones y 17 borrados

Ver fichero

@@ -7,6 +7,10 @@ const BLUE_SHADES = [
"rgba(255,255,255,0.3)",
];
// Detection state for TV browsers
let consecutiveStaticFrames = 0;
let lastDataSnapshot = null;
// Real Web Audio API animation
const animateBars = (analyser, canvas, ctx, dataArray, bufferLength) => {
try {
@@ -16,13 +20,29 @@ const animateBars = (analyser, canvas, ctx, dataArray, bufferLength) => {
const barWidth = Math.max(3, Math.floor(canvas.width / barCount)); // Thinner bars
const step = Math.floor(bufferLength / barCount);
// More sophisticated detection for TV browsers
// Enhanced detection for TV browsers that return fake/static data
const maxValue = Math.max(...dataArray);
const averageLevel = dataArray.reduce((sum, value) => sum + value, 0) / dataArray.length;
const activeValues = dataArray.filter(value => value > 5).length;
const variance = dataArray.reduce((sum, value) => sum + Math.pow(value - averageLevel, 2), 0) / dataArray.length;
// TV browsers often return very low values or all zeros
const hasRealAudioData = maxValue > 10 && averageLevel > 1 && activeValues > 10;
// Check if data is suspiciously static (TV browsers often return same pattern)
const currentSnapshot = dataArray.slice(0, 20).join(','); // Sample first 20 values
const isStaticData = lastDataSnapshot && currentSnapshot === lastDataSnapshot;
if (isStaticData) {
consecutiveStaticFrames++;
} else {
consecutiveStaticFrames = 0;
lastDataSnapshot = currentSnapshot;
}
// Stricter criteria for real audio data detection
const hasRealAudioData = maxValue > 20 &&
averageLevel > 3 &&
activeValues > 20 &&
variance > 10 &&
consecutiveStaticFrames < 10;
if (!hasRealAudioData) {
// TV browser fallback animation
@@ -87,6 +107,7 @@ const animateBars = (analyser, canvas, ctx, dataArray, bufferLength) => {
const WaveForm = ({ analyzerData }) => {
const canvasRef = useRef(null);
const animationRef = useRef(null);
const debugRef = useRef(null);
useEffect(() => {
const canvas = canvasRef.current;
@@ -95,12 +116,17 @@ const WaveForm = ({ analyzerData }) => {
const ctx = canvas.getContext("2d");
if (!ctx) return;
// Check if we have valid analyzer data
// Check if we have valid analyzer data and connection
const hasValidAnalyzer = analyzerData &&
analyzerData.analyzer &&
analyzerData.dataArray &&
analyzerData.bufferLength;
// Check if running on Samsung TV (for debug info)
const isTizenTV = navigator.userAgent.includes('Tizen') ||
navigator.userAgent.includes('Samsung') ||
window.tizen;
const render = () => {
// Set canvas dimensions
canvas.width = window.innerWidth;
@@ -116,13 +142,59 @@ const WaveForm = ({ analyzerData }) => {
ctx.save();
if (hasValidAnalyzer) {
animateBars(
const success = animateBars(
analyzerData.analyzer,
canvas,
ctx,
analyzerData.dataArray,
analyzerData.bufferLength
);
// Debug info for TV browsers
if (isTizenTV && debugRef.current) {
const testData = new Uint8Array(analyzerData.bufferLength);
analyzerData.analyzer.getByteFrequencyData(testData);
const maxVal = Math.max(...testData);
const avgVal = testData.reduce((a, b) => a + b, 0) / testData.length;
debugRef.current.innerHTML = `
<div style="position: fixed; top: 10px; right: 10px; background: rgba(0,0,0,0.7); color: white; padding: 10px; font-family: monospace; font-size: 12px; z-index: 1000;">
<div>Tizen TV Detected: ${isTizenTV}</div>
<div>Analyzer Connected: ${analyzerData.isConnected || 'Unknown'}</div>
<div>Max Value: ${maxVal}</div>
<div>Avg Value: ${avgVal.toFixed(2)}</div>
<div>Using Fallback: ${!success}</div>
<div>Static Frames: ${consecutiveStaticFrames}</div>
</div>
`;
}
} else {
// Always show fallback when no analyzer
const time = Date.now();
const barCount = 128;
const barWidth = Math.max(3, Math.floor(canvas.width / barCount));
for (let i = 0; i < barCount; i++) {
const wave1 = Math.sin(time * 0.003 + i * 0.3) * 0.4;
const wave2 = Math.sin(time * 0.002 + i * 0.2) * 0.3;
const normalizedHeight = Math.max(0.1, Math.min(0.8, 0.3 + wave1 + wave2));
const barHeight = Math.max(20, normalizedHeight * canvas.height * 0.4);
const blueShade = Math.min(3, Math.floor(normalizedHeight * 4));
const barY = canvas.height - barHeight;
const gradient = ctx.createLinearGradient(0, canvas.height, 0, barY);
gradient.addColorStop(0, BLUE_SHADES[3]);
gradient.addColorStop(0.5, BLUE_SHADES[blueShade] || BLUE_SHADES[0]);
gradient.addColorStop(1, BLUE_SHADES[0]);
ctx.fillStyle = gradient;
ctx.fillRect(i * barWidth + 2, barY, barWidth - 4, barHeight);
ctx.shadowColor = "rgba(255,255,255,0.8)";
ctx.shadowBlur = 5;
ctx.fillRect(i * barWidth + 2, barY, barWidth - 4, barHeight);
ctx.shadowBlur = 0;
}
}
ctx.restore();
@@ -132,10 +204,20 @@ const WaveForm = ({ analyzerData }) => {
render();
// Create debug element for TV browsers
if (isTizenTV && !debugRef.current) {
debugRef.current = document.createElement('div');
document.body.appendChild(debugRef.current);
}
return () => {
if (animationRef.current) {
cancelAnimationFrame(animationRef.current);
}
if (debugRef.current) {
document.body.removeChild(debugRef.current);
debugRef.current = null;
}
};
}, [analyzerData]);

Ver fichero

@@ -18,20 +18,59 @@ const useAudioPlayer = (audioUrl) => {
const initAudioAnalyzer = useCallback(() => {
if (!audioElmRef.current) return;
const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
const source = audioCtx.createMediaElementSource(audioElmRef.current);
const analyzer = audioCtx.createAnalyser();
// Detect if we're on a TV platform and skip Web Audio API
const isTVPlatform = navigator.userAgent.includes('Tizen') ||
navigator.userAgent.includes('Samsung') ||
navigator.userAgent.includes('LG') ||
navigator.userAgent.includes('webOS') ||
window.tizen ||
window.webOS;
analyzer.fftSize = 2048;
const bufferLength = analyzer.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
if (isTVPlatform) {
// Skip Web Audio API for TV platforms and use fallback
setAnalyzerData(null);
loadedAnalyzer.current = true;
return;
}
source.connect(analyzer);
analyzer.connect(audioCtx.destination);
source.onended = () => source.disconnect();
setAnalyzerData({ analyzer, bufferLength, dataArray });
loadedAnalyzer.current = true;
try {
const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
const source = audioCtx.createMediaElementSource(audioElmRef.current);
const analyzer = audioCtx.createAnalyser();
analyzer.fftSize = 2048;
const bufferLength = analyzer.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
source.connect(analyzer);
analyzer.connect(audioCtx.destination);
source.onended = () => source.disconnect();
// Test connection after a short delay
setTimeout(() => {
const testData = new Uint8Array(bufferLength);
analyzer.getByteFrequencyData(testData);
const maxValue = Math.max(...testData);
const avgValue = testData.reduce((a, b) => a + b, 0) / testData.length;
const isConnected = maxValue > 5 || avgValue > 1;
setAnalyzerData({
analyzer,
bufferLength,
dataArray,
audioContext: audioCtx,
mediaSource: source,
isConnected
});
}, 1000);
loadedAnalyzer.current = true;
} catch (error) {
console.error('Web Audio API initialization failed:', error);
// Set analyzer data as null to trigger fallback
setAnalyzerData(null);
loadedAnalyzer.current = true;
}
}, []);
// Play audio function