diff --git a/fixtures/long-audio.mp3 b/fixtures/long-audio.mp3 new file mode 100644 index 000000000..574a0e37e Binary files /dev/null and b/fixtures/long-audio.mp3 differ diff --git a/ts/components/GlobalAudioContext.tsx b/ts/components/GlobalAudioContext.tsx index 7a3cceb36..610b250ec 100644 --- a/ts/components/GlobalAudioContext.tsx +++ b/ts/components/GlobalAudioContext.tsx @@ -9,6 +9,7 @@ import { WaveformCache } from '../types/Audio'; const MAX_WAVEFORM_COUNT = 1000; const MAX_PARALLEL_COMPUTE = 8; +const MAX_AUDIO_DURATION = 15 * 60; // 15 minutes export type ComputePeaksResult = { duration: number; @@ -35,6 +36,36 @@ const computeQueue = new PQueue({ concurrency: MAX_PARALLEL_COMPUTE, }); +async function getAudioDuration( + url: string, + buffer: ArrayBuffer +): Promise { + const blob = new Blob([buffer]); + const blobURL = URL.createObjectURL(blob); + + const audio = new Audio(); + audio.muted = true; + audio.src = blobURL; + + await new Promise((resolve, reject) => { + audio.addEventListener('loadedmetadata', () => { + resolve(); + }); + + audio.addEventListener('error', event => { + const error = new Error( + `Failed to load audio from: ${url} due to error: ${event.type}` + ); + reject(error); + }); + }); + + if (Number.isNaN(audio.duration)) { + throw new Error(`Invalid audio duration for: ${url}`); + } + return audio.duration; +} + /** * Load audio from `url`, decode PCM data, and compute RMS peaks for displaying * the waveform. @@ -61,10 +92,21 @@ async function doComputePeaks( const response = await fetch(url); const raw = await response.arrayBuffer(); + const duration = await getAudioDuration(url, raw); + + const peaks = new Array(barCount).fill(0); + if (duration > MAX_AUDIO_DURATION) { + window.log.info( + `GlobalAudioContext: audio ${url} duration ${duration}s is too long` + ); + const emptyResult = { peaks, duration }; + waveformCache.set(url, emptyResult); + return emptyResult; + } + const data = await audioContext.decodeAudioData(raw); // Compute RMS peaks - const peaks = new Array(barCount).fill(0); const norms = new Array(barCount).fill(0); const samplesPerPeak = data.length / peaks.length; @@ -94,7 +136,7 @@ async function doComputePeaks( peaks[i] /= max; } - const result = { peaks, duration: data.duration }; + const result = { peaks, duration }; waveformCache.set(url, result); return result; } @@ -118,10 +160,11 @@ export async function computePeaks( const promise = computeQueue.add(() => doComputePeaks(url, barCount)); inProgressMap.set(computeKey, promise); - const result = await promise; - inProgressMap.delete(computeKey); - - return result; + try { + return await promise; + } finally { + inProgressMap.delete(computeKey); + } } const globalContents: Contents = { diff --git a/ts/components/conversation/Message.stories.tsx b/ts/components/conversation/Message.stories.tsx index 71f0698fd..8c0ae60fa 100644 --- a/ts/components/conversation/Message.stories.tsx +++ b/ts/components/conversation/Message.stories.tsx @@ -804,6 +804,21 @@ story.add('Audio', () => { return renderBothDirections(props); }); +story.add('Long Audio', () => { + const props = createProps({ + attachments: [ + { + contentType: AUDIO_MP3, + fileName: 'long-audio.mp3', + url: '/fixtures/long-audio.mp3', + }, + ], + status: 'sent', + }); + + return renderBothDirections(props); +}); + story.add('Audio with Caption', () => { const props = createProps({ attachments: [