diff --git a/src/components/Attachment/Audio.tsx b/src/components/Attachment/Audio.tsx index a6e4fc71d8..686cf70a45 100644 --- a/src/components/Attachment/Audio.tsx +++ b/src/components/Attachment/Audio.tsx @@ -2,48 +2,84 @@ import React from 'react'; import type { Attachment } from 'stream-chat'; import { DownloadButton, FileSizeIndicator, PlayButton, ProgressBar } from './components'; -import { useAudioController } from './hooks/useAudioController'; +import { type AudioPlayerState, useAudioPlayer } from '../AudioPlayback'; +import { useStateStore } from '../../store'; +import { useMessageContext } from '../../context'; +import type { AudioPlayer } from '../AudioPlayback/AudioPlayer'; -export type AudioProps = { - // fixme: rename og to attachment - og: Attachment; +type AudioAttachmentUIProps = { + audioPlayer: AudioPlayer; }; -const UnMemoizedAudio = (props: AudioProps) => { - const { - og: { asset_url, file_size, mime_type, title }, - } = props; - const { audioRef, isPlaying, progress, seek, togglePlay } = useAudioController({ - mimeType: mime_type, - }); - - if (!asset_url) return null; - +// todo: finish creating a BaseAudioPlayer derived from VoiceRecordingPlayerUI and AudioAttachmentUI +const AudioAttachmentUI = ({ audioPlayer }: AudioAttachmentUIProps) => { const dataTestId = 'audio-widget'; const rootClassName = 'str-chat__message-attachment-audio-widget'; + const { isPlaying, progress } = + useStateStore(audioPlayer?.state, audioPlayerStateSelector) ?? {}; + return (
-
- +
-
{title}
- +
+ {audioPlayer.title} +
+
- - + +
); }; +export type AudioProps = { + // fixme: rename og to attachment + og: Attachment; +}; + +const audioPlayerStateSelector = (state: AudioPlayerState) => ({ + isPlaying: state.isPlaying, + progress: state.progressPercent, +}); + +const UnMemoizedAudio = (props: AudioProps) => { + const { + og: { asset_url, file_size, mime_type, title }, + } = props; + + /** + * Introducing message context. This could be breaking change, therefore the fallback to {} is provided. + * If this component is used outside the message context, then there will be no audio player namespacing + * => scrolling away from the message in virtualized ML would create a new AudioPlayer instance. + * + * Edge case: the requester (message) has multiple attachments with the same assetURL - does not happen + * with the default SDK components, but can be done with custom API calls.In this case all the Audio + * widgets will share the state. + */ + const { message, threadList } = useMessageContext() ?? {}; + + const audioPlayer = useAudioPlayer({ + fileSize: file_size, + mimeType: mime_type, + requester: + message?.id && + `${threadList ? (message.parent_id ?? message.id) : ''}${message.id}`, + src: asset_url, + title, + waveformData: props.og.waveform_data, + }); + + return audioPlayer ? : null; +}; + /** * Audio attachment with play/pause button and progress bar */ diff --git a/src/components/Attachment/Card.tsx b/src/components/Attachment/Card.tsx index f5d2419d1b..2d0125bb17 100644 --- a/src/components/Attachment/Card.tsx +++ b/src/components/Attachment/Card.tsx @@ -6,13 +6,15 @@ import type { AudioProps } from './Audio'; import { ImageComponent } from '../Gallery'; import { SafeAnchor } from '../SafeAnchor'; import { PlayButton, ProgressBar } from './components'; -import { useAudioController } from './hooks/useAudioController'; import { useChannelStateContext } from '../../context/ChannelStateContext'; import { useTranslationContext } from '../../context/TranslationContext'; import type { Attachment } from 'stream-chat'; import type { RenderAttachmentProps } from './utils'; import type { Dimensions } from '../../types/types'; +import { type AudioPlayerState, useAudioPlayer } from '../AudioPlayback'; +import { useStateStore } from '../../store'; +import { useMessageContext } from '../../context'; const getHostFromURL = (url?: string | null) => { if (url !== undefined && url !== null) { @@ -126,31 +128,55 @@ const CardContent = (props: CardContentProps) => { ); }; +const audioPlayerStateSelector = (state: AudioPlayerState) => ({ + isPlaying: state.isPlaying, + progress: state.progressPercent, +}); + +const AudioWidget = ({ mimeType, src }: { src: string; mimeType?: string }) => { + /** + * Introducing message context. This could be breaking change, therefore the fallback to {} is provided. + * If this component is used outside the message context, then there will be no audio player namespacing + * => scrolling away from the message in virtualized ML would create a new AudioPlayer instance. + * + * Edge case: the requester (message) has multiple attachments with the same assetURL - does not happen + * with the default SDK components, but can be done with custom API calls.In this case all the Audio + * widgets will share the state. + */ + const { message, threadList } = useMessageContext() ?? {}; + + const audioPlayer = useAudioPlayer({ + mimeType, + requester: + message?.id && + `${threadList ? (message.parent_id ?? message.id) : ''}${message.id}`, + src, + }); + + const { isPlaying, progress } = + useStateStore(audioPlayer?.state, audioPlayerStateSelector) ?? {}; + + if (!audioPlayer) return; + + return ( +
+
+ +
+ +
+ ); +}; + export const CardAudio = ({ og: { asset_url, author_name, mime_type, og_scrape_url, text, title, title_link }, }: AudioProps) => { - const { audioRef, isPlaying, progress, seek, togglePlay } = useAudioController({ - mimeType: mime_type, - }); - const url = title_link || og_scrape_url; const dataTestId = 'card-audio-widget'; const rootClassName = 'str-chat__message-attachment-card-audio-widget'; return (
- {asset_url && ( - <> - -
-
- -
- -
- - )} + {asset_url && }
{url && } {title && ( diff --git a/src/components/Attachment/VoiceRecording.tsx b/src/components/Attachment/VoiceRecording.tsx index 7800207268..2ce46ee314 100644 --- a/src/components/Attachment/VoiceRecording.tsx +++ b/src/components/Attachment/VoiceRecording.tsx @@ -7,95 +7,125 @@ import { PlayButton, WaveProgressBar, } from './components'; -import { useAudioController } from './hooks/useAudioController'; import { displayDuration } from './utils'; import { FileIcon } from '../ReactFileUtilities'; -import { useTranslationContext } from '../../context'; +import { useMessageContext, useTranslationContext } from '../../context'; +import { type AudioPlayerState, useAudioPlayer } from '../AudioPlayback/'; +import { useStateStore } from '../../store'; +import type { AudioPlayer } from '../AudioPlayback/AudioPlayer'; const rootClassName = 'str-chat__message-attachment__voice-recording-widget'; -export type VoiceRecordingPlayerProps = Pick & { - /** An array of fractional numeric values of playback speed to override the defaults (1.0, 1.5, 2.0) */ - playbackRates?: number[]; -}; - -export const VoiceRecordingPlayer = ({ - attachment, - playbackRates, -}: VoiceRecordingPlayerProps) => { - const { t } = useTranslationContext('VoiceRecordingPlayer'); - const { - asset_url, - duration = 0, - mime_type, - title = t('Voice message'), - waveform_data, - } = attachment; +const audioPlayerStateSelector = (state: AudioPlayerState) => ({ + canPlayRecord: state.canPlayRecord, + isPlaying: state.isPlaying, + playbackRate: state.currentPlaybackRate, + progress: state.progressPercent, + secondsElapsed: state.secondsElapsed, +}); - const { - audioRef, - increasePlaybackRate, - isPlaying, - playbackRate, - progress, - secondsElapsed, - seek, - togglePlay, - } = useAudioController({ - durationSeconds: duration ?? 0, - mimeType: mime_type, - playbackRates, - }); +type VoiceRecordingPlayerUIProps = { + audioPlayer: AudioPlayer; +}; - if (!asset_url) return null; +// todo: finish creating a BaseAudioPlayer derived from VoiceRecordingPlayerUI and AudioAttachmentUI +const VoiceRecordingPlayerUI = ({ audioPlayer }: VoiceRecordingPlayerUIProps) => { + const { canPlayRecord, isPlaying, playbackRate, progress, secondsElapsed } = + useStateStore(audioPlayer?.state, audioPlayerStateSelector) ?? {}; - const displayedDuration = secondsElapsed || duration; + const displayedDuration = secondsElapsed || audioPlayer.durationSeconds; return (
- - +
- {title} + {audioPlayer.title}
- {attachment.duration ? ( + {audioPlayer.durationSeconds ? ( displayDuration(displayedDuration) ) : ( )}
{isPlaying ? ( - - {playbackRate.toFixed(1)}x + + {playbackRate?.toFixed(1)}x ) : ( - + )}
); }; +export type VoiceRecordingPlayerProps = Pick & { + /** An array of fractional numeric values of playback speed to override the defaults (1.0, 1.5, 2.0) */ + playbackRates?: number[]; +}; + +export const VoiceRecordingPlayer = ({ + attachment, + playbackRates, +}: VoiceRecordingPlayerProps) => { + const { t } = useTranslationContext(); + const { + asset_url, + duration = 0, + file_size, + mime_type, + title = t('Voice message'), + waveform_data, + } = attachment; + + /** + * Introducing message context. This could be breaking change, therefore the fallback to {} is provided. + * If this component is used outside the message context, then there will be no audio player namespacing + * => scrolling away from the message in virtualized ML would create a new AudioPlayer instance. + * + * Edge case: the requester (message) has multiple attachments with the same assetURL - does not happen + * with the default SDK components, but can be done with custom API calls.In this case all the Audio + * widgets will share the state. + */ + const { message, threadList } = useMessageContext() ?? {}; + + const audioPlayer = useAudioPlayer({ + durationSeconds: duration ?? 0, + fileSize: file_size, + mimeType: mime_type, + playbackRates, + requester: + message?.id && + `${threadList ? (message.parent_id ?? message.id) : ''}${message.id}`, + src: asset_url, + title, + waveformData: waveform_data, + }); + + return audioPlayer ? : null; +}; + export type QuotedVoiceRecordingProps = Pick; export const QuotedVoiceRecording = ({ attachment }: QuotedVoiceRecordingProps) => { diff --git a/src/components/Attachment/__tests__/Audio.test.js b/src/components/Attachment/__tests__/Audio.test.js index b88b201091..20becd1474 100644 --- a/src/components/Attachment/__tests__/Audio.test.js +++ b/src/components/Attachment/__tests__/Audio.test.js @@ -3,13 +3,32 @@ import { act, cleanup, fireEvent, render, screen, waitFor } from '@testing-libra import '@testing-library/jest-dom'; import { Audio } from '../Audio'; - -import { ChannelActionProvider } from '../../../context'; -import { generateAudioAttachment } from '../../../mock-builders'; +import { generateAudioAttachment, generateMessage } from '../../../mock-builders'; import { prettifyFileSize } from '../../MessageInput/hooks/utils'; - -const AUDIO = generateAudioAttachment(); -const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); +import { WithAudioPlayback } from '../../AudioPlayback'; +import { MessageProvider } from '../../../context'; + +jest.mock('../../../context/ChatContext', () => ({ + useChatContext: () => ({ client: mockClient }), +})); +jest.mock('../../../context/TranslationContext', () => ({ + useTranslationContext: () => ({ t: (s) => tSpy(s) }), +})); + +const addErrorSpy = jest.fn(); +const mockClient = { + notifications: { addError: addErrorSpy }, +}; +const tSpy = (s) => s; + +// capture created Audio() elements so we can assert src & dispatch events +const createdAudios = []; //HTMLAudioElement[] +const RealAudio = window.Audio; +jest.spyOn(window, 'Audio').mockImplementation(function AudioMock(...args) { + const el = new RealAudio(...args); + createdAudios.push(el); + return el; +}); const originalConsoleError = console.error; jest.spyOn(console, 'error').mockImplementationOnce((...errorOrTextorArg) => { @@ -20,52 +39,76 @@ jest.spyOn(console, 'error').mockImplementationOnce((...errorOrTextorArg) => { originalConsoleError(...errorOrTextorArg); }); -const addNotificationSpy = jest.fn(); -const defaultChannelActionContext = { addNotification: addNotificationSpy }; +const audioAttachment = generateAudioAttachment({ mime_type: undefined }); +const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); + const renderComponent = ( props = { - channelActionContext: defaultChannelActionContext, - og: AUDIO, + og: audioAttachment, }, ) => render( - + , + , ); -const playButtonTestId = 'play-audio'; -const pauseButtonTestId = 'pause-audio'; -const playButton = () => screen.queryByTestId(playButtonTestId); -const pauseButton = () => screen.queryByTestId(pauseButtonTestId); +const playButton = () => screen.queryByTestId('play-audio'); +const pauseButton = () => screen.queryByTestId('pause-audio'); + +const clickToPlay = async () => { + await act(async () => { + await fireEvent.click(playButton()); + }); +}; + +const clickToPause = async () => { + await act(async () => { + await fireEvent.click(pauseButton()); + }); +}; + +const expectAddErrorMessage = (message) => { + expect(addErrorSpy).toHaveBeenCalled(); + const hit = addErrorSpy.mock.calls.find((c) => c?.[0]?.message === message); + expect(hit).toBeTruthy(); +}; describe('Audio', () => { - beforeAll(() => { + beforeEach(() => { // jsdom doesn't define these, so mock them instead // see https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement#Methods jest.spyOn(HTMLMediaElement.prototype, 'play').mockImplementation(() => {}); jest.spyOn(HTMLMediaElement.prototype, 'pause').mockImplementation(() => {}); + jest.spyOn(HTMLMediaElement.prototype, 'load').mockImplementation(() => {}); }); + afterEach(() => { cleanup(); - jest.resetAllMocks(); + jest.clearAllMocks(); + createdAudios.length = 0; }); - it('should render title and file size', () => { + it('renders title and file size', () => { const { container, getByText } = renderComponent({ - og: AUDIO, + og: audioAttachment, }); - expect(getByText(AUDIO.title)).toBeInTheDocument(); - expect(getByText(prettifyFileSize(AUDIO.file_size))).toBeInTheDocument(); + expect(getByText(audioAttachment.title)).toBeInTheDocument(); + expect(getByText(prettifyFileSize(audioAttachment.file_size))).toBeInTheDocument(); expect(container.querySelector('img')).not.toBeInTheDocument(); }); - it('should show the correct progress after clicking to the middle of a progress bar (seeking)', async () => { - const { getByTestId } = renderComponent({ og: AUDIO }); + it('creates a playback Audio() with the right src only after clicked to play', async () => { + renderComponent({ og: audioAttachment }); + await clickToPlay(); + expect(createdAudios.length).toBe(1); + expect(createdAudios[0].src).toBe(audioAttachment.asset_url); + }); + it('shows the correct progress after clicking to the middle of a progress bar (seeking)', async () => { + const { getByTestId } = renderComponent({ og: audioAttachment }); + await clickToPlay(); jest .spyOn(HTMLDivElement.prototype, 'getBoundingClientRect') .mockImplementationOnce(() => ({ width: 120, x: 0 })); @@ -86,170 +129,161 @@ describe('Audio', () => { }); }); - it('should render an audio element with the right source', () => { - const { getByTestId } = renderComponent({ og: AUDIO }); - - const source = getByTestId('audio-source'); + it('shows the correct button if the song is paused/playing', async () => { + renderComponent({ og: { ...audioAttachment } }); - expect(source).toBeInTheDocument(); - expect(source.src).toBe(AUDIO.asset_url); - expect(source.parentElement).toBeInstanceOf(HTMLAudioElement); - }); - - it('should show the correct button if the song is paused/playing', async () => { - const { container } = renderComponent({ - og: { ...AUDIO, mime_type: undefined }, - }); - const audioPausedMock = jest.spyOn(container.querySelector('audio'), 'paused', 'get'); - expect(await playButton()).toBeInTheDocument(); - expect(await pauseButton()).not.toBeInTheDocument(); + expect(playButton()).toBeInTheDocument(); - audioPausedMock.mockReturnValueOnce(true); - await act(async () => { - await fireEvent.click(playButton()); - }); + await clickToPlay(); + const audioPausedMock = jest.spyOn(createdAudios[0], 'paused', 'get'); - expect(await playButton()).not.toBeInTheDocument(); - expect(await pauseButton()).toBeInTheDocument(); + expect(pauseButton()).toBeInTheDocument(); audioPausedMock.mockReturnValueOnce(false); - await act(async () => { - await fireEvent.click(pauseButton()); - }); - expect(await playButton()).toBeInTheDocument(); - expect(await pauseButton()).not.toBeInTheDocument(); - expect(addNotificationSpy).not.toHaveBeenCalled(); + await clickToPause(); + expect(playButton()).toBeInTheDocument(); + + expect(addErrorSpy).not.toHaveBeenCalled(); audioPausedMock.mockRestore(); }); - it('should pause the audio if the playback has not started in 2000ms', async () => { - jest.useFakeTimers('modern'); - const { container } = renderComponent({ - og: { ...AUDIO, mime_type: undefined }, + it('pauses the audio if the playback has not started in 2000ms', async () => { + jest.useFakeTimers({ now: Date.now() }); + renderComponent({ + og: audioAttachment, }); + expect(playButton()).toBeInTheDocument(); + expect(pauseButton()).not.toBeInTheDocument(); + jest + .spyOn(HTMLAudioElement.prototype, 'play') + .mockImplementationOnce(() => sleep(3000)); + await clickToPlay(); - const audio = container.querySelector('audio'); - const audioPlayMock = jest.spyOn(audio, 'play').mockImplementation(() => delay(3000)); - const audioPauseMock = jest.spyOn(audio, 'pause'); - - expect(await playButton()).toBeInTheDocument(); - expect(await pauseButton()).not.toBeInTheDocument(); - - await act(async () => { - await fireEvent.click(playButton()); + await waitFor(() => { + expect(playButton()).toBeInTheDocument(); + expect(pauseButton()).not.toBeInTheDocument(); }); - expect(await playButton()).toBeInTheDocument(); - expect(await pauseButton()).not.toBeInTheDocument(); jest.advanceTimersByTime(2000); - - await waitFor(async () => { - expect(audioPauseMock).toHaveBeenCalledWith(); - expect(await playButton()).toBeInTheDocument(); - expect(await pauseButton()).not.toBeInTheDocument(); - expect(addNotificationSpy).not.toHaveBeenCalled(); + await waitFor(() => { + expect(playButton()).toBeInTheDocument(); + expect(pauseButton()).not.toBeInTheDocument(); + expect(addErrorSpy).not.toHaveBeenCalled(); }); jest.useRealTimers(); - audioPlayMock.mockRestore(); - audioPauseMock.mockRestore(); }); - it('should register error if pausing the audio after 2000ms of inactivity failed', async () => { + it('registers error if pausing the audio after 2000ms of inactivity failed', async () => { jest.useFakeTimers('modern'); - const { container } = renderComponent({ - og: { ...AUDIO, mime_type: undefined }, - }); - const audio = container.querySelector('audio'); - const audioPlayMock = jest.spyOn(audio, 'play').mockImplementation(() => delay(3000)); - const audioPauseMock = jest.spyOn(audio, 'pause').mockImplementationOnce(() => { + renderComponent({ og: audioAttachment }); + + jest + .spyOn(HTMLAudioElement.prototype, 'play') + .mockImplementationOnce(() => sleep(3000)); + jest.spyOn(HTMLAudioElement.prototype, 'pause').mockImplementationOnce(() => { throw new Error(''); }); - await act(() => { - fireEvent.click(playButton()); - }); + await clickToPlay(); + jest.advanceTimersByTime(2000); await waitFor(() => { - expect(audioPauseMock).toHaveBeenCalledWith(); - expect(addNotificationSpy).toHaveBeenCalledWith( - 'Failed to play the recording', - 'error', - ); + expectAddErrorMessage('Failed to play the recording'); }); jest.useRealTimers(); - audioPlayMock.mockRestore(); - audioPauseMock.mockRestore(); }); - it('should register error if playing the audio failed', async () => { + it('registers error if playing the audio failed', async () => { const errorText = 'Test error'; - const { container } = renderComponent({ - og: AUDIO, + renderComponent({ + og: audioAttachment, }); - const audio = container.querySelector('audio'); - const audioPlayMock = jest - .spyOn(audio, 'play') + jest + .spyOn(HTMLAudioElement.prototype, 'play') .mockRejectedValueOnce(new Error(errorText)); - const audioCanPlayTypeMock = jest - .spyOn(audio, 'canPlayType') + const canPlaySpy = jest + .spyOn(HTMLAudioElement.prototype, 'canPlayType') .mockReturnValue('maybe'); - expect(await playButton()).toBeInTheDocument(); - expect(await pauseButton()).not.toBeInTheDocument(); + expect(playButton()).toBeInTheDocument(); + expect(pauseButton()).not.toBeInTheDocument(); - await act(async () => { - await fireEvent.click(playButton()); - }); - expect(await playButton()).toBeInTheDocument(); - expect(await pauseButton()).not.toBeInTheDocument(); - expect(addNotificationSpy).toHaveBeenCalledWith(errorText, 'error'); - audioPlayMock.mockRestore(); - audioCanPlayTypeMock.mockRestore(); + await clickToPlay(); + expect(playButton()).toBeInTheDocument(); + expect(pauseButton()).not.toBeInTheDocument(); + expectAddErrorMessage(errorText); + canPlaySpy.mockRestore(); }); it('should register error if the audio MIME type is not playable', async () => { - const { container } = renderComponent({ - og: AUDIO, - }); - const audio = container.querySelector('audio'); - const audioPlayMock = jest.spyOn(audio, 'play'); - const audioCanPlayTypeMock = jest.spyOn(audio, 'canPlayType').mockReturnValue(''); - - expect(await playButton()).toBeInTheDocument(); - expect(await pauseButton()).not.toBeInTheDocument(); - - await act(async () => { - await fireEvent.click(playButton()); - }); - expect(audioPlayMock).not.toHaveBeenCalled(); - expect(addNotificationSpy).toHaveBeenCalledWith( - 'Recording format is not supported and cannot be reproduced', - 'error', - ); - expect(await playButton()).toBeInTheDocument(); - expect(await pauseButton()).not.toBeInTheDocument(); - - audioPlayMock.mockRestore(); - audioCanPlayTypeMock.mockRestore(); + renderComponent({ og: { ...audioAttachment, mime_type: 'audio/mp4' } }); + const spy = jest.spyOn(HTMLAudioElement.prototype, 'canPlayType').mockReturnValue(''); + + await clickToPlay(); + expect(playButton()).toBeInTheDocument(); + expect(pauseButton()).not.toBeInTheDocument(); + expectAddErrorMessage('Recording format is not supported and cannot be reproduced'); + spy.mockRestore(); }); - it('should show the correct progress', async () => { - const { container } = renderComponent({ og: AUDIO }); + it('shows the correct progress on timeupdate', async () => { + renderComponent({ og: audioAttachment }); + await clickToPlay(); + const audio = createdAudios[0]; + jest.spyOn(audio, 'duration', 'get').mockReturnValue(100); + jest.spyOn(audio, 'currentTime', 'get').mockReturnValue(50); - jest - .spyOn(HTMLAudioElement.prototype, 'duration', 'get') - .mockImplementationOnce(() => 100); - jest - .spyOn(HTMLAudioElement.prototype, 'currentTime', 'get') - .mockImplementationOnce(() => 50); - const audioElement = container.querySelector('audio'); - fireEvent.timeUpdate(audioElement); + audio.dispatchEvent(new Event('timeupdate')); await waitFor(() => { expect(screen.getByTestId('audio-progress')).toHaveAttribute('data-progress', '50'); }); }); + + it('differentiates between in thread and in channel audio player', async () => { + const message = generateMessage(); + render( + + + + + + , + ); + const playButtons = screen.queryAllByTestId('play-audio'); + expect(playButtons.length).toBe(2); + await Promise.all( + playButtons.map(async (button) => { + await fireEvent.click(button); + }), + ); + expect(createdAudios).toHaveLength(2); + }); + + it('keeps a single copy of audio player for the same requester', async () => { + const message = generateMessage(); + render( + + + + + + , + ); + const playButtons = screen.queryAllByTestId('play-audio'); + expect(playButtons.length).toBe(2); + await Promise.all( + playButtons.map(async (button) => { + await fireEvent.click(button); + }), + ); + expect(createdAudios).toHaveLength(1); + }); }); diff --git a/src/components/Attachment/__tests__/Card.test.js b/src/components/Attachment/__tests__/Card.test.js index 5582dd599f..28333a607e 100644 --- a/src/components/Attachment/__tests__/Card.test.js +++ b/src/components/Attachment/__tests__/Card.test.js @@ -1,10 +1,14 @@ import React from 'react'; -import { cleanup, render, waitFor } from '@testing-library/react'; +import { cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'; import '@testing-library/jest-dom'; import { Card } from '../Card'; -import { ChannelActionProvider, TranslationContext } from '../../../context'; +import { + ChannelActionProvider, + MessageProvider, + TranslationContext, +} from '../../../context'; import { ChannelStateProvider } from '../../../context/ChannelStateContext'; import { ChatProvider } from '../../../context/ChatContext'; import { ComponentProvider } from '../../../context/ComponentContext'; @@ -13,18 +17,22 @@ import { generateChannel, generateGiphyAttachment, generateMember, + generateMessage, generateUser, getOrCreateChannelApi, getTestClientWithUser, mockTranslationContext, useMockedApis, } from '../../../mock-builders'; +import { WithAudioPlayback } from '../../AudioPlayback'; let chatClient; let channel; const user = generateUser({ id: 'userId', name: 'username' }); +jest.spyOn(window.HTMLMediaElement.prototype, 'play').mockImplementation(); jest.spyOn(window.HTMLMediaElement.prototype, 'pause').mockImplementation(); +jest.spyOn(window.HTMLMediaElement.prototype, 'load').mockImplementation(); const addNotificationSpy = jest.fn(); const channelActionContext = { addNotification: addNotificationSpy }; @@ -41,7 +49,9 @@ const renderCard = ({ cardProps, chatContext, theRenderer = render }) => - + + + @@ -286,6 +296,103 @@ describe('Card', () => { }, chatContext: { chatClient }, }); - expect(getByText('theverge.com')).toBeInTheDocument(); + await waitFor(() => { + expect(getByText('theverge.com')).toBeInTheDocument(); + }); + }); + + it('differentiates between in thread and in channel audio player', async () => { + const createdAudios = []; //HTMLAudioElement[] + const RealAudio = window.Audio; + const spy = jest.spyOn(window, 'Audio').mockImplementation(function AudioMock( + ...args + ) { + const el = new RealAudio(...args); + createdAudios.push(el); + return el; + }); + + const audioAttachment = { + ...dummyAttachment, + image_url: undefined, + thumb_url: undefined, + title: 'test', + type: 'audio', + }; + + const message = generateMessage(); + + render( + + + + + + + + + + + + , + ); + const playButtons = screen.queryAllByTestId('play-audio'); + expect(playButtons.length).toBe(2); + await Promise.all( + playButtons.map(async (button) => { + await fireEvent.click(button); + }), + ); + await waitFor(() => { + expect(createdAudios).toHaveLength(2); + }); + spy.mockRestore(); + }); + + it('keeps a single copy of audio player for the same requester', async () => { + const createdAudios = []; //HTMLAudioElement[] + const RealAudio = window.Audio; + const spy = jest.spyOn(window, 'Audio').mockImplementation(function AudioMock( + ...args + ) { + const el = new RealAudio(...args); + createdAudios.push(el); + return el; + }); + + const audioAttachment = { + ...dummyAttachment, + image_url: undefined, + thumb_url: undefined, + title: 'test', + type: 'audio', + }; + + const message = generateMessage(); + render( + + + + + + + + + + + + , + ); + const playButtons = screen.queryAllByTestId('play-audio'); + expect(playButtons.length).toBe(2); + await Promise.all( + playButtons.map(async (button) => { + await fireEvent.click(button); + }), + ); + await waitFor(() => { + expect(createdAudios).toHaveLength(1); + }); + spy.mockRestore(); }); }); diff --git a/src/components/Attachment/__tests__/VoiceRecording.test.js b/src/components/Attachment/__tests__/VoiceRecording.test.js index fb145257f5..180f50a96c 100644 --- a/src/components/Attachment/__tests__/VoiceRecording.test.js +++ b/src/components/Attachment/__tests__/VoiceRecording.test.js @@ -2,10 +2,14 @@ import React from 'react'; import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'; import '@testing-library/jest-dom'; -import { generateVoiceRecordingAttachment } from '../../../mock-builders'; +import { + generateMessage, + generateVoiceRecordingAttachment, +} from '../../../mock-builders'; import { VoiceRecording, VoiceRecordingPlayer } from '../VoiceRecording'; -import { ChannelActionProvider } from '../../../context'; +import { ChatProvider, MessageProvider } from '../../../context'; import { ResizeObserverMock } from '../../../mock-builders/browser'; +import { WithAudioPlayback } from '../../AudioPlayback'; const AUDIO_RECORDING_PLAYER_TEST_ID = 'voice-recording-widget'; const QUOTED_AUDIO_RECORDING_TEST_ID = 'quoted-voice-recording-widget'; @@ -29,12 +33,13 @@ const clickPlay = async () => { jest.spyOn(window.HTMLMediaElement.prototype, 'play').mockImplementation(() => {}); jest.spyOn(window.HTMLMediaElement.prototype, 'pause').mockImplementation(() => {}); -const addNotificationSpy = jest.fn(); const renderComponent = (props, VoiceRecordingComponent = VoiceRecording) => render( - - - , + + + + + , ); describe('VoiceRecording', () => { @@ -48,6 +53,58 @@ describe('VoiceRecording', () => { expect(queryByTestId(QUOTED_AUDIO_RECORDING_TEST_ID)).toBeInTheDocument(); expect(queryByTestId(AUDIO_RECORDING_PLAYER_TEST_ID)).not.toBeInTheDocument(); }); + it('differentiates between in thread and in channel audio player', () => { + const createdAudios = []; //HTMLAudioElement[] + const RealAudio = window.Audio; + const spy = jest.spyOn(window, 'Audio').mockImplementation(function AudioMock( + ...args + ) { + const el = new RealAudio(...args); + createdAudios.push(el); + return el; + }); + const message = generateMessage(); + render( + + + + + + + + , + ); + expect(createdAudios).toHaveLength(2); + spy.mockRestore(); + }); + + it('keeps a single copy of audio player for the same requester', () => { + const createdAudios = []; //HTMLAudioElement[] + const RealAudio = window.Audio; + const spy = jest.spyOn(window, 'Audio').mockImplementation(function AudioMock( + ...args + ) { + const el = new RealAudio(...args); + createdAudios.push(el); + return el; + }); + const message = generateMessage(); + render( + + + + + + + + + + + , + ); + expect(createdAudios).toHaveLength(1); + spy.mockRestore(); + }); }); describe('VoiceRecordingPlayer', () => { @@ -56,7 +113,7 @@ describe('VoiceRecordingPlayer', () => { jest.spyOn(window.HTMLMediaElement.prototype, 'play').mockImplementation(() => {}); jest.spyOn(window.HTMLMediaElement.prototype, 'canPlayType').mockReturnValue('maybe'); }); - afterAll(jest.restoreAllMocks); + afterAll(jest.clearAllMocks); it('should not render the component if asset_url is missing', () => { const { container } = renderComponent({ @@ -133,22 +190,36 @@ describe('VoiceRecordingPlayer', () => { }); it('should show the correct progress', async () => { - const { container } = renderComponent({ attachment }); + const createdAudios = []; // HTMLAudioElement[] + + const RealAudio = window.Audio; + const constructorSpy = jest + .spyOn(window, 'Audio') + .mockImplementation(function AudioMock(...args) { + const el = new RealAudio(...args); + createdAudios.push(el); + return el; + }); + renderComponent({ attachment }); + await clickPlay(); jest .spyOn(HTMLAudioElement.prototype, 'duration', 'get') .mockImplementationOnce(() => 100); jest .spyOn(HTMLAudioElement.prototype, 'currentTime', 'get') .mockImplementationOnce(() => 50); - const audioElement = container.querySelector('audio'); - fireEvent.timeUpdate(audioElement); + expect(createdAudios.length).toBe(2); + const actualPlayingAudio = createdAudios[1]; + fireEvent.timeUpdate(actualPlayingAudio); await waitFor(() => { expect(screen.getByTestId('wave-progress-bar-progress-indicator')).toHaveStyle({ left: '50%', }); }); + + constructorSpy.mockRestore(); }); }); diff --git a/src/components/Attachment/__tests__/__snapshots__/Card.test.js.snap b/src/components/Attachment/__tests__/__snapshots__/Card.test.js.snap index eedea8c26b..c77a49c329 100644 --- a/src/components/Attachment/__tests__/__snapshots__/Card.test.js.snap +++ b/src/components/Attachment/__tests__/__snapshots__/Card.test.js.snap @@ -25,13 +25,6 @@ exports[`Card (1) should render card without caption if attachment type is audio class="str-chat__message-attachment-card-audio-widget" data-testid="card-audio-widget" > -
@@ -242,13 +235,6 @@ exports[`Card (7) should render audio with caption using og_scrape_url and with class="str-chat__message-attachment-card-audio-widget" data-testid="card-audio-widget" > -
@@ -454,13 +440,6 @@ exports[`Card (10) should render audio without title if attachment type is audio class="str-chat__message-attachment-card-audio-widget" data-testid="card-audio-widget" > -
@@ -651,13 +630,6 @@ exports[`Card (13) should render audio without title and with caption using og_s class="str-chat__message-attachment-card-audio-widget" data-testid="card-audio-widget" > -
@@ -835,13 +807,6 @@ exports[`Card (16) should render audio widget with title & text in Card content class="str-chat__message-attachment-card-audio-widget" data-testid="card-audio-widget" > -
@@ -1368,13 +1333,6 @@ exports[`Card (25) should render audio widget with image loaded from thumb_url a class="str-chat__message-attachment-card-audio-widget" data-testid="card-audio-widget" > -
diff --git a/src/components/Attachment/hooks/useAudioController.ts b/src/components/Attachment/hooks/useAudioController.ts index 846370a428..ed881b4568 100644 --- a/src/components/Attachment/hooks/useAudioController.ts +++ b/src/components/Attachment/hooks/useAudioController.ts @@ -23,6 +23,7 @@ type AudioControllerParams = { playbackRates?: number[]; }; +/** @deprecated use useAudioPlayer instead */ export const useAudioController = ({ durationSeconds, mimeType, diff --git a/src/components/Attachment/index.ts b/src/components/Attachment/index.ts index 377cbe2468..70509c8a3a 100644 --- a/src/components/Attachment/index.ts +++ b/src/components/Attachment/index.ts @@ -9,5 +9,6 @@ export * from './FileAttachment'; export * from './Geolocation'; export * from './UnsupportedAttachment'; export * from './utils'; +export * from './VoiceRecording'; export { useAudioController } from './hooks/useAudioController'; export * from '../Location/hooks/useLiveLocationSharingManager'; diff --git a/src/components/AudioPlayback/AudioPlayer.ts b/src/components/AudioPlayback/AudioPlayer.ts new file mode 100644 index 0000000000..94ea3a119c --- /dev/null +++ b/src/components/AudioPlayback/AudioPlayer.ts @@ -0,0 +1,579 @@ +import { StateStore } from 'stream-chat'; +import throttle from 'lodash.throttle'; +import type { AudioPlayerPlugin } from './plugins'; +import type { AudioPlayerPool } from './AudioPlayerPool'; + +export type AudioPlayerErrorCode = + | 'failed-to-start' + | 'not-playable' + | 'seek-not-supported' + | (string & {}); + +export type RegisterAudioPlayerErrorParams = { + error?: Error; + errCode?: AudioPlayerErrorCode; +}; + +export type AudioPlayerDescriptor = { + id: string; + src: string; + /** Audio duration in seconds. */ + durationSeconds?: number; + fileSize?: number | string; + mimeType?: string; + title?: string; + waveformData?: number[]; +}; + +export type AudioPlayerPlayAudioParams = { + currentPlaybackRate?: number; + playbackRates?: number[]; +}; + +export type AudioPlayerState = { + /** Signals whether the browser can play the record. */ + canPlayRecord: boolean; + /** Current playback speed. Initiated with the first item of the playbackRates array. */ + currentPlaybackRate: number; + /** The audio element ref */ + elementRef: HTMLAudioElement | null; + /** Signals whether the playback is in progress. */ + isPlaying: boolean; + /** Keeps the latest playback error reference. */ + playbackError: Error | null; + /** An array of fractional numeric values of playback speed to override the defaults (1.0, 1.5, 2.0) */ + playbackRates: number[]; + /** Playback progress expressed in percent. */ + progressPercent: number; + /** Playback progress expressed in seconds. */ + secondsElapsed: number; +}; + +export type AudioPlayerOptions = AudioPlayerDescriptor & { + /** An array of fractional numeric values of playback speed to override the defaults (1.0, 1.5, 2.0) */ + playbackRates?: number[]; + plugins?: AudioPlayerPlugin[]; + pool: AudioPlayerPool; +}; + +const DEFAULT_PLAYBACK_RATES = [1.0, 1.5, 2.0]; + +const isSeekable = (audioElement: HTMLAudioElement) => + !(audioElement.duration === Infinity || isNaN(audioElement.duration)); + +export const defaultRegisterAudioPlayerError = ({ + error, +}: RegisterAudioPlayerErrorParams = {}) => { + if (!error) return; + console.error('[AUDIO PLAYER]', error); +}; + +export const elementIsPlaying = (audioElement: HTMLAudioElement | null) => + audioElement && !(audioElement.paused || audioElement.ended); + +export type SeekFn = (params: { + clientX: number; + currentTarget: HTMLDivElement; +}) => Promise; + +export class AudioPlayer { + state: StateStore; + /** The audio MIME type that is checked before the audio is played. If the type is not supported the controller registers error in playbackError. */ + private _data: AudioPlayerDescriptor; + private _plugins = new Map(); + private playTimeout: ReturnType | undefined = undefined; + private unsubscribeEventListeners: (() => void) | null = null; + private _pool: AudioPlayerPool; + private _disposed = false; + private _pendingLoadedMeta?: { element: HTMLAudioElement; onLoaded: () => void }; + private _elementIsReadyPromise?: Promise; + private _restoringPosition = false; + private _removalTimeout: ReturnType | undefined = undefined; + + constructor({ + durationSeconds, + fileSize, + id, + mimeType, + playbackRates: customPlaybackRates, + plugins, + pool, + src, + title, + waveformData, + }: AudioPlayerOptions) { + this._data = { + durationSeconds, + fileSize, + id, + mimeType, + src, + title, + waveformData, + }; + this._pool = pool; + this.setPlugins(() => plugins ?? []); + + const playbackRates = customPlaybackRates?.length + ? customPlaybackRates + : DEFAULT_PLAYBACK_RATES; + + // do not create element here; only evaluate canPlayRecord cheaply + const canPlayRecord = mimeType ? !!new Audio().canPlayType(mimeType) : true; + + this.state = new StateStore({ + canPlayRecord, + currentPlaybackRate: playbackRates[0], + elementRef: null, + isPlaying: false, + playbackError: null, + playbackRates, + progressPercent: 0, + secondsElapsed: 0, + }); + + this.plugins.forEach((p) => p.onInit?.({ player: this })); + } + + private get plugins(): AudioPlayerPlugin[] { + return Array.from(this._plugins.values()); + } + + get canPlayRecord() { + return this.state.getLatestValue().canPlayRecord; + } + + get elementRef() { + return this.state.getLatestValue().elementRef; + } + + get isPlaying(): boolean { + return this.state.getLatestValue().isPlaying; + } + + get currentPlaybackRate() { + return this.state.getLatestValue().currentPlaybackRate; + } + + get playbackRates() { + return this.state.getLatestValue().playbackRates; + } + + get durationSeconds() { + return this._data.durationSeconds; + } + + get fileSize() { + return this._data.fileSize; + } + + get id() { + return this._data.id; + } + + get src() { + return this._data.src; + } + + get mimeType() { + return this._data.mimeType; + } + + get title() { + return this._data.title; + } + + get waveformData() { + return this._data.waveformData; + } + + get secondsElapsed() { + return this.state.getLatestValue().secondsElapsed; + } + + get progressPercent() { + return this.state.getLatestValue().progressPercent; + } + + get disposed() { + return this._disposed; + } + + private ensureElementRef(): HTMLAudioElement { + if (this._disposed) { + throw new Error('AudioPlayer is disposed'); + } + if (!this.elementRef) { + const el = this._pool.acquireElement({ + ownerId: this.id, + src: this.src, + }); + this.setRef(el); + } + return this.elementRef as HTMLAudioElement; + } + private setPlaybackStartSafetyTimeout = () => { + clearTimeout(this.playTimeout); + this.playTimeout = setTimeout(() => { + if (!this.elementRef) return; + try { + this.elementRef.pause(); + this.state.partialNext({ isPlaying: false }); + } catch (e) { + this.registerError({ errCode: 'failed-to-start' }); + } + }, 2000); + }; + + private clearPlaybackStartSafetyTimeout = () => { + if (!this.elementRef) return; + clearTimeout(this.playTimeout); + this.playTimeout = undefined; + }; + + private clearPendingLoadedMeta = () => { + const pending = this._pendingLoadedMeta; + if (pending?.element && pending.onLoaded) { + pending.element.removeEventListener('loadedmetadata', pending.onLoaded); + } + this._pendingLoadedMeta = undefined; + }; + + private restoreSavedPosition = (elementRef: HTMLAudioElement) => { + const saved = this.secondsElapsed; + if (!saved || saved <= 0) return; + const apply = () => { + const duration = elementRef.duration; + const clamped = + typeof duration === 'number' && !isNaN(duration) && isFinite(duration) + ? Math.min(saved, duration) + : saved; + try { + if (elementRef.currentTime === clamped) return; + elementRef.currentTime = clamped; + // Preempt UI with restored position to avoid flicker + this.setSecondsElapsed(clamped); + } catch { + // ignore + } + }; + // No information is available about the media resource. + if (elementRef.readyState < 1) { + this.clearPendingLoadedMeta(); + this._restoringPosition = true; + const onLoaded = () => { + // Ensure this callback still belongs to the same pending registration and same element + if (this._pendingLoadedMeta?.onLoaded !== onLoaded) return; + this._pendingLoadedMeta = undefined; + if (this.elementRef !== elementRef) { + this._restoringPosition = false; + return; + } + apply(); + this._restoringPosition = false; + }; + elementRef.addEventListener('loadedmetadata', onLoaded, { once: true }); + this._pendingLoadedMeta = { element: elementRef, onLoaded }; + } else { + this._restoringPosition = true; + apply(); + this._restoringPosition = false; + } + }; + + setDescriptor(descriptor: AudioPlayerDescriptor) { + this._data = { ...this._data, ...descriptor }; + if (descriptor.src !== this.src && this.elementRef) { + this.elementRef.src = descriptor.src; + } + } + + private releaseElement({ resetState }: { resetState: boolean }) { + this.clearPendingLoadedMeta(); + this._restoringPosition = false; + if (resetState) { + this.stop(); + } else { + // Ensure isPlaying reflects reality, but keep progress/seconds + this.state.partialNext({ isPlaying: false }); + if (this.elementRef) { + try { + this.elementRef.pause(); + } catch { + // ignore + } + } + } + if (this.elementRef) { + this._pool.releaseElement(this.id); + this.setRef(null); + } + } + + private elementIsReady = (): Promise => { + if (this._elementIsReadyPromise) return this._elementIsReadyPromise; + + this._elementIsReadyPromise = new Promise((resolve) => { + if (!this.elementRef) return resolve(false); + const element = this.elementRef; + const handleLoaded = () => { + element.removeEventListener('loadedmetadata', handleLoaded); + resolve(element.readyState > 0); + }; + element.addEventListener('loadedmetadata', handleLoaded); + }); + + return this._elementIsReadyPromise; + }; + + private setRef = (elementRef: HTMLAudioElement | null) => { + if (elementIsPlaying(this.elementRef)) { + // preserve state during swap + this.releaseElement({ resetState: false }); + } + this.clearPendingLoadedMeta(); + this._restoringPosition = false; + this._elementIsReadyPromise = undefined; + this.state.partialNext({ elementRef }); + // When a new element is attached, make sure listeners are wired to it + if (elementRef) { + this.registerSubscriptions(); + } + }; + + setSecondsElapsed = (secondsElapsed: number) => { + this.state.partialNext({ + progressPercent: + this.elementRef && secondsElapsed + ? (secondsElapsed / this.elementRef.duration) * 100 + : 0, + secondsElapsed, + }); + }; + + setPlugins(setter: (currentPlugins: AudioPlayerPlugin[]) => AudioPlayerPlugin[]) { + this._plugins = setter(this.plugins).reduce((acc, plugin) => { + if (plugin.id) { + acc.set(plugin.id, plugin); + } + return acc; + }, new Map()); + } + + canPlayMimeType = (mimeType: string) => { + if (!mimeType) return false; + if (this.elementRef) return !!this.elementRef.canPlayType(mimeType); + return !!new Audio().canPlayType(mimeType); + }; + + play = async (params?: AudioPlayerPlayAudioParams) => { + if (this._disposed) return; + const elementRef = this.ensureElementRef(); + if (elementIsPlaying(this.elementRef)) { + if (this.isPlaying) return; + this.state.partialNext({ isPlaying: true }); + return; + } + + const { currentPlaybackRate, playbackRates } = { + currentPlaybackRate: this.currentPlaybackRate, + playbackRates: this.playbackRates, + ...params, + }; + + if (!this.canPlayRecord) { + this.registerError({ errCode: 'not-playable' }); + return; + } + + // Restore last known position for this player before attempting to play + this.restoreSavedPosition(elementRef); + + elementRef.playbackRate = currentPlaybackRate ?? this.currentPlaybackRate; + + this.setPlaybackStartSafetyTimeout(); + + try { + await elementRef.play(); + this.state.partialNext({ + currentPlaybackRate, + isPlaying: true, + playbackRates, + }); + this._pool.setActiveAudioPlayer(this); + } catch (e) { + this.registerError({ error: e as Error }); + this.state.partialNext({ isPlaying: false }); + } finally { + this.clearPlaybackStartSafetyTimeout(); + } + }; + + pause = () => { + if (!elementIsPlaying(this.elementRef)) return; + this.clearPlaybackStartSafetyTimeout(); + + // existence of the element already checked by elementIsPlaying + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + this.elementRef!.pause(); + this.state.partialNext({ isPlaying: false }); + }; + + stop = () => { + this.pause(); + this.setSecondsElapsed(0); + if (this.elementRef) this.elementRef.currentTime = 0; + }; + + togglePlay = async () => (this.isPlaying ? this.pause() : await this.play()); + + increasePlaybackRate = () => { + if (!this.elementRef) return; + let currentPlaybackRateIndex = this.state + .getLatestValue() + .playbackRates.findIndex((rate) => rate === this.currentPlaybackRate); + if (currentPlaybackRateIndex === -1) { + currentPlaybackRateIndex = 0; + } + const nextIndex = + currentPlaybackRateIndex === this.playbackRates.length - 1 + ? 0 + : currentPlaybackRateIndex + 1; + const currentPlaybackRate = this.playbackRates[nextIndex]; + this.state.partialNext({ currentPlaybackRate }); + this.elementRef.playbackRate = currentPlaybackRate; + }; + + seek = throttle(async ({ clientX, currentTarget }) => { + let element = this.elementRef; + if (!this.elementRef) { + element = this.ensureElementRef(); + const isReady = await this.elementIsReady(); + if (!isReady) return; + } + if (!currentTarget || !element) return; + if (!isSeekable(element)) { + this.registerError({ errCode: 'seek-not-supported' }); + return; + } + + const { width, x } = currentTarget.getBoundingClientRect(); + + const ratio = (clientX - x) / width; + if (ratio > 1 || ratio < 0) return; + const currentTime = ratio * element.duration; + this.setSecondsElapsed(currentTime); + element.currentTime = currentTime; + }, 16); + + registerError = (params: RegisterAudioPlayerErrorParams) => { + defaultRegisterAudioPlayerError(params); + this.plugins.forEach(({ onError }) => onError?.({ player: this, ...params })); + }; + + /** + * Removes the audio element reference, event listeners and audio player from the player pool. + * Helpful when only a single AudioPlayer instance is to be removed from the AudioPlayerPool. + */ + requestRemoval = () => { + this._disposed = true; + this.cancelScheduledRemoval(); + this.clearPendingLoadedMeta(); + this._restoringPosition = false; + this.releaseElement({ resetState: true }); + this.unsubscribeEventListeners?.(); + this.unsubscribeEventListeners = null; + this.plugins.forEach(({ onRemove }) => onRemove?.({ player: this })); + this._pool.deregister(this.id); + }; + + cancelScheduledRemoval = () => { + clearTimeout(this._removalTimeout); + this._removalTimeout = undefined; + }; + + scheduleRemoval = (ms: number = 0) => { + this.cancelScheduledRemoval(); + this._removalTimeout = setTimeout(() => { + if (this.disposed) return; + this.requestRemoval(); + }, ms); + }; + + /** + * Releases only the underlying element back to the pool without disposing the player instance. + * Used by the pool to hand off the shared element in single-playback mode. + */ + releaseElementForHandoff = () => { + if (!this.elementRef) return; + this.releaseElement({ resetState: false }); + this.unsubscribeEventListeners?.(); + this.unsubscribeEventListeners = null; + }; + + registerSubscriptions = () => { + this.unsubscribeEventListeners?.(); + + const audioElement = this.elementRef; + if (!audioElement) return; + + const handleEnded = () => { + this.state.partialNext({ + isPlaying: false, + secondsElapsed: audioElement?.duration ?? this.durationSeconds ?? 0, + }); + }; + + const handleError = (e: HTMLMediaElementEventMap['error']) => { + // if fired probably is one of these (e.srcElement.error.code) + // 1 = MEDIA_ERR_ABORTED (fetch aborted by user/JS) + // 2 = MEDIA_ERR_NETWORK (network failed while fetching) + // 3 = MEDIA_ERR_DECODE (data fetched but couldn’t decode) + // 4 = MEDIA_ERR_SRC_NOT_SUPPORTED (no resource supported / bad type) + // reported during the mount so only logging to the console + const audio = e.currentTarget as HTMLAudioElement | null; + const state: Partial = { isPlaying: false }; + + if (!audio?.error?.code) { + this.state.partialNext(state); + return; + } + + if (audio.error.code === 4) { + state.canPlayRecord = false; + this.state.partialNext(state); + } + + const errorMsg = [ + undefined, + 'MEDIA_ERR_ABORTED: fetch aborted by user', + 'MEDIA_ERR_NETWORK: network failed while fetching', + 'MEDIA_ERR_DECODE: audio fetched but couldn’t decode', + 'MEDIA_ERR_SRC_NOT_SUPPORTED: source not supported', + ][audio?.error?.code]; + if (!errorMsg) return; + + defaultRegisterAudioPlayerError({ error: new Error(errorMsg + ` (${audio.src})`) }); + }; + + const handleTimeupdate = () => { + const t = audioElement?.currentTime ?? 0; + // Ignore spurious zero during restore/handoff to avoid UI flicker + if (this._restoringPosition && t === 0) return; + // Also avoid regressing UI to zero if we already have non-zero progress and we're not playing + if (!this.isPlaying && t === 0 && this.secondsElapsed > 0) return; + this.setSecondsElapsed(t); + }; + + audioElement.addEventListener('ended', handleEnded); + audioElement.addEventListener('error', handleError); + audioElement.addEventListener('timeupdate', handleTimeupdate); + + this.unsubscribeEventListeners = () => { + audioElement.pause(); + audioElement.removeEventListener('ended', handleEnded); + audioElement.removeEventListener('error', handleError); + audioElement.removeEventListener('timeupdate', handleTimeupdate); + }; + }; +} diff --git a/src/components/AudioPlayback/AudioPlayerPool.ts b/src/components/AudioPlayback/AudioPlayerPool.ts new file mode 100644 index 0000000000..f5936daa48 --- /dev/null +++ b/src/components/AudioPlayback/AudioPlayerPool.ts @@ -0,0 +1,169 @@ +import { AudioPlayer, type AudioPlayerOptions } from './AudioPlayer'; +import { StateStore } from 'stream-chat'; + +export type AudioPlayerPoolState = { + activeAudioPlayer: AudioPlayer | null; +}; + +export class AudioPlayerPool { + state: StateStore = new StateStore({ + activeAudioPlayer: null, + }); + private pool = new Map(); + private audios = new Map(); + private sharedAudio: HTMLAudioElement | null = null; + private sharedOwnerId: string | null = null; + private readonly allowConcurrentPlayback: boolean; + + constructor(config?: { allowConcurrentPlayback?: boolean }) { + this.allowConcurrentPlayback = !!config?.allowConcurrentPlayback; + } + + get players() { + return Array.from(this.pool.values()); + } + + get activeAudioPlayer() { + return this.state.getLatestValue().activeAudioPlayer; + } + + getOrAdd = (params: Omit) => { + const { playbackRates, plugins, ...descriptor } = params; + let player = this.pool.get(params.id); + if (player) { + if (!player.disposed) { + player.setDescriptor(descriptor); + return player; + } + this.deregister(params.id); + } + player = new AudioPlayer({ + playbackRates, + plugins, + ...descriptor, + pool: this, + }); + this.pool.set(params.id, player); + return player; + }; + + /** + * In case of allowConcurrentPlayback enabled, a new Audio is created and assigned to the given audioPlayer owner. + * In case of disabled concurrency, the shared audio ownership is transferred to the new owner loading the owner's + * source. + * + * @param ownerId + * @param src + */ + acquireElement = ({ ownerId, src }: { ownerId: string; src: string }) => { + if (!this.allowConcurrentPlayback) { + // Single shared element mode + if (!this.sharedAudio) { + this.sharedAudio = new Audio(); + } + // Handoff from previous owner if different + if (this.sharedOwnerId && this.sharedOwnerId !== ownerId) { + const previous = this.pool.get(this.sharedOwnerId); + // Ask previous to pause and drop ref, but keep player in pool + previous?.pause(); + previous?.releaseElementForHandoff(); + } + this.sharedOwnerId = ownerId; + if (this.sharedAudio.src !== src) { + // setting src starts loading; avoid explicit load() to prevent currentTime reset flicker + this.sharedAudio.src = src; + } + return this.sharedAudio; + } + + // Concurrent-per-owner mode + let audio = this.audios.get(ownerId); + if (!audio) { + audio = new Audio(); + this.audios.set(ownerId, audio); + } + if (audio.src !== src) { + // setting src starts loading; avoid explicit load() here as well + audio.src = src; + } + return audio; + }; + + /** + * Removes the given audio players ownership of the shared audio element (in case of concurrent playback is disabled) + * and pauses the reproduction of the audio. + * In case of concurrent playback mode (allowConcurrentPlayback enabled), the audio is paused, + * its source cleared and removed from the audios pool readied for garbage collection. + * + * @param ownerId + */ + releaseElement = (ownerId: string) => { + if (!this.allowConcurrentPlayback) { + if (this.sharedOwnerId !== ownerId) return; + const el = this.sharedAudio; + if (el) { + try { + el.pause(); + } catch { + // ignore + } + el.removeAttribute('src'); + el.load(); + } + // Keep shared element instance for reuse + this.sharedOwnerId = null; + return; + } + + const el = this.audios.get(ownerId); + if (!el) return; + try { + el.pause(); + } catch { + // ignore + } + el.removeAttribute('src'); + el.load(); + this.audios.delete(ownerId); + }; + + /** Sets active audio player when allowConcurrentPlayback is disabled */ + setActiveAudioPlayer = (activeAudioPlayer: AudioPlayer | null) => { + if (this.allowConcurrentPlayback) return; + this.state.partialNext({ activeAudioPlayer }); + }; + + /** Removes the AudioPlayer instance from the pool of players */ + deregister(id: string) { + if (this.pool.has(id)) { + this.pool.delete(id); + } + if (this.activeAudioPlayer?.id === id) { + this.setActiveAudioPlayer(null); + } + } + + /** Performs all the necessary cleanup actions and removes the player from the pool */ + remove = (id: string) => { + const player = this.pool.get(id); + if (!player) return; + player.requestRemoval(); + }; + + /** Removes and cleans up all the players from the pool */ + clear = () => { + this.players.forEach((player) => { + this.remove(player.id); + }); + }; + + registerSubscriptions = () => { + // Only register subscriptions for players that have an attached element. + // Avoid creating elements or cross-wiring listeners on the shared element in single-playback mode. + this.players.forEach((p) => { + if (p.elementRef) { + p.registerSubscriptions(); + } + }); + }; +} diff --git a/src/components/AudioPlayback/WithAudioPlayback.tsx b/src/components/AudioPlayback/WithAudioPlayback.tsx new file mode 100644 index 0000000000..bd6904de8d --- /dev/null +++ b/src/components/AudioPlayback/WithAudioPlayback.tsx @@ -0,0 +1,113 @@ +import React, { useContext, useState } from 'react'; +import { useEffect } from 'react'; +import type { AudioPlayerOptions } from './AudioPlayer'; +import type { AudioPlayerPoolState } from './AudioPlayerPool'; +import { AudioPlayerPool } from './AudioPlayerPool'; +import { audioPlayerNotificationsPluginFactory } from './plugins/AudioPlayerNotificationsPlugin'; +import { useChatContext, useTranslationContext } from '../../context'; +import { useStateStore } from '../../store'; + +export type WithAudioPlaybackProps = { + children?: React.ReactNode; + allowConcurrentPlayback?: boolean; +}; + +const AudioPlayerContext = React.createContext<{ audioPlayers: AudioPlayerPool | null }>({ + audioPlayers: null, +}); + +export const WithAudioPlayback = ({ + allowConcurrentPlayback, + children, +}: WithAudioPlaybackProps) => { + const [audioPlayers] = useState(() => new AudioPlayerPool({ allowConcurrentPlayback })); + + useEffect( + () => () => { + audioPlayers.clear(); + }, + [audioPlayers], + ); + + return ( + + {children} + + ); +}; + +export type UseAudioPlayerProps = { + /** + * Identifier of the entity that requested the audio playback, e.g. message ID. + * Asset to specific audio player is a many-to-many relationship + * - one URL can be associated with multiple UI elements, + * - one UI element can display multiple audio sources. + * Therefore, the AudioPlayer ID is a combination of request:src. + * + * The requester string can take into consideration whether there are multiple instances of + * the same URL requested by the same requester (message has multiple attachments with the same asset URL). + * In reality the fact that one message has multiple attachments with the same asset URL + * could be considered a bad practice or a bug. + */ + requester?: string; +} & Partial>; + +const makeAudioPlayerId = ({ requester, src }: { src: string; requester?: string }) => + `${requester ?? 'requester-unknown'}:${src}`; + +export const useAudioPlayer = ({ + durationSeconds, + fileSize, + mimeType, + playbackRates, + plugins, + requester = '', + src, + title, + waveformData, +}: UseAudioPlayerProps) => { + const { client } = useChatContext(); + const { t } = useTranslationContext(); + const { audioPlayers } = useContext(AudioPlayerContext); + + const audioPlayer = + src && audioPlayers + ? audioPlayers.getOrAdd({ + durationSeconds, + fileSize, + id: makeAudioPlayerId({ requester, src }), + mimeType, + playbackRates, + plugins, + src, + title, + waveformData, + }) + : undefined; + + useEffect(() => { + if (!audioPlayer) return; + /** + * Avoid having to pass client and translation function to AudioPlayer instances + * and instead provide plugin that takes care of translated notifications. + */ + const notificationsPlugin = audioPlayerNotificationsPluginFactory({ client, t }); + audioPlayer.setPlugins((currentPlugins) => [ + ...currentPlugins.filter((plugin) => plugin.id !== notificationsPlugin.id), + notificationsPlugin, + ]); + }, [audioPlayer, client, t]); + + return audioPlayer; +}; + +const activeAudioPlayerSelector = ({ activeAudioPlayer }: AudioPlayerPoolState) => ({ + activeAudioPlayer, +}); + +export const useActiveAudioPlayer = () => { + const { audioPlayers } = useContext(AudioPlayerContext); + const { activeAudioPlayer } = + useStateStore(audioPlayers?.state, activeAudioPlayerSelector) ?? {}; + return activeAudioPlayer; +}; diff --git a/src/components/AudioPlayback/__tests__/AudioPlayer.test.js b/src/components/AudioPlayback/__tests__/AudioPlayer.test.js new file mode 100644 index 0000000000..0c180027c8 --- /dev/null +++ b/src/components/AudioPlayback/__tests__/AudioPlayer.test.js @@ -0,0 +1,387 @@ +import { AudioPlayer, elementIsPlaying } from '../AudioPlayer'; + +// ---- Keep throttle synchronous so seek assertions are deterministic ---- +jest.mock('lodash.throttle', () => (fn) => fn); + +// ---- Stable console noise filter (optional) ---- +const originalConsoleError = console.error; +beforeAll(() => { + jest.spyOn(console, 'error').mockImplementation((...args) => { + const msg = String(args[0]?.message ?? args[0] ?? ''); + if (/Not implemented/i.it(msg)) return; + originalConsoleError(...args); + }); +}); + +// ---- Helpers ---- +const SRC = 'https://example.com/a.mp3'; +const MIME = 'audio/mpeg'; + +const createdAudios = []; +const makeErrorPlugin = () => { + const onError = jest.fn(); + return { + onError, + plugin: { id: 'TestErrorPlugin', onError }, + }; +}; + +const makePlayer = (overrides = {}) => { + const pool = { + acquireElement: ({ src }) => new Audio(src), + deregister: () => {}, + releaseElement: () => {}, + setActiveAudioPlayer: jest.fn(), + }; + return new AudioPlayer({ + durationSeconds: 100, + id: 'id-1', + mimeType: MIME, + pool, + src: SRC, + ...overrides, + }); +}; + +// ---- Tests ---- +describe('AudioPlayer', () => { + beforeEach(() => { + const RealAudio = window.Audio; + jest.spyOn(window, 'Audio').mockImplementation(function AudioMock(...args) { + const el = new RealAudio(...args); + createdAudios.push(el); + return el; + }); + + // Stub core media methods + jest.spyOn(HTMLMediaElement.prototype, 'load').mockImplementation(() => ({})); + jest + .spyOn(HTMLMediaElement.prototype, 'play') + .mockImplementation(() => Promise.resolve()); + jest.spyOn(HTMLMediaElement.prototype, 'pause').mockImplementation(() => ({})); + // Default media flags + jest.spyOn(HTMLMediaElement.prototype, 'paused', 'get').mockReturnValue(true); + jest.spyOn(HTMLMediaElement.prototype, 'ended', 'get').mockReturnValue(false); + jest.spyOn(HTMLMediaElement.prototype, 'duration', 'get').mockReturnValue(100); + }); + + afterEach(() => { + jest.restoreAllMocks(); + createdAudios.length = 0; + }); + + it('constructor sets initial state (canPlayRecord & playbackRates)', () => { + jest.spyOn(HTMLMediaElement.prototype, 'canPlayType').mockReturnValue('maybe'); + + const player = makePlayer({ playbackRates: [1, 1.25, 1.5] }); + + // State comes from the real StateStore + expect(player.isPlaying).toBe(false); + expect(player.canPlayRecord).toBe(true); + expect(player.currentPlaybackRate).toBe(1); + expect(player.playbackRates).toEqual([1, 1.25, 1.5]); + expect(player.src).toBe(SRC); + expect(player.mimeType).toBe(MIME); + expect(player.durationSeconds).toBe(100); + }); + + it('constructor marks not playable when mimeType unsupported', () => { + jest.spyOn(HTMLMediaElement.prototype, 'canPlayType').mockReturnValue(''); + expect(makePlayer({ mimeType: 'audio/unknown' }).canPlayRecord).toBe(false); + }); + + it('canPlayMimeType delegates to elementRef.canPlayType', () => { + const player = makePlayer(); + // attach an element so canPlayMimeType uses elementRef + player.ensureElementRef(); + const spy = jest.spyOn(player.elementRef, 'canPlayType').mockReturnValue('probably'); + expect(player.canPlayMimeType('audio/ogg')).toBe(true); + expect(spy).toHaveBeenCalledWith('audio/ogg'); + }); + + it('play() success updates isPlaying and playbackRate', async () => { + jest.spyOn(HTMLMediaElement.prototype, 'canPlayType').mockReturnValue('maybe'); + const player = makePlayer({ playbackRates: [1, 1.5, 2] }); + + await player.play({ currentPlaybackRate: 1.5 }); + + expect(player.isPlaying).toBe(true); + expect(player.currentPlaybackRate).toBe(1.5); + expect(player.elementRef.playbackRate).toBe(1.5); + // eslint-disable-next-line no-underscore-dangle + expect(player._pool.setActiveAudioPlayer).toHaveBeenCalledWith(player); + }); + + it('play() early-return path when element is already playing', async () => { + const player = makePlayer(); + + // Make element look like it's already playing + jest.spyOn(HTMLMediaElement.prototype, 'paused', 'get').mockReturnValue(false); + + // attach and spy on the concrete element + player.ensureElementRef(); + const playSpy = jest.spyOn(player.elementRef, 'play'); + + await player.play(); + expect(player.isPlaying).toBe(true); + expect(playSpy).not.toHaveBeenCalled(); + }); + + it('play() when not playable triggers registerError {errCode:not-playable}', async () => { + jest.spyOn(HTMLMediaElement.prototype, 'canPlayType').mockReturnValue(''); + const { onError, plugin } = makeErrorPlugin(); + const player = makePlayer({ mimeType: 'audio/zzz', plugins: [plugin] }); + await player.play(); + expect(onError).toHaveBeenCalledWith( + expect.objectContaining({ errCode: 'not-playable', player }), + ); + expect(player.isPlaying).toBe(false); + }); + + it('play() when element.play rejects triggers registerError(error) and isPlaying=false', async () => { + const { onError, plugin } = makeErrorPlugin(); + const player = makePlayer({ plugins: [plugin] }); + player.ensureElementRef(); + jest.spyOn(player.elementRef, 'play').mockRejectedValueOnce(new Error('x')); + await player.play(); + expect(onError).toHaveBeenCalledWith( + expect.objectContaining({ errCode: 'not-playable', player }), + ); + expect(player.isPlaying).toBe(false); + }); + + it('safety timeout pauses if play did not resolve within 2000ms', async () => { + jest.spyOn(HTMLMediaElement.prototype, 'canPlayType').mockReturnValue('maybe'); + jest.useFakeTimers({ now: Date.now() }); + const { onError, plugin } = makeErrorPlugin(); + const player = makePlayer({ plugins: [plugin] }); + + let resolve; + // attach and stub play to a pending promise + player.ensureElementRef(); + jest.spyOn(player.elementRef, 'play').mockImplementation( + () => + new Promise((res) => { + resolve = res; + }), + ); + const pauseSpy = jest.spyOn(player.elementRef, 'pause').mockImplementation(); + + const playPromise = player.play(); + jest.advanceTimersByTime(2000); + resolve(); + expect(pauseSpy).toHaveBeenCalledTimes(1); + expect(player.isPlaying).toBe(false); + expect(onError).not.toHaveBeenCalled(); + + jest.useRealTimers(); + await Promise.resolve(playPromise); + }); + + it('safety timeout registers failed-to-start if pause throws', () => { + jest.spyOn(HTMLMediaElement.prototype, 'canPlayType').mockReturnValue('maybe'); + jest.useFakeTimers({ now: Date.now() }); + const { onError, plugin } = makeErrorPlugin(); + const player = makePlayer({ plugins: [plugin] }); + + let resolve; + player.ensureElementRef(); + jest.spyOn(player.elementRef, 'play').mockImplementation( + () => + new Promise((res) => { + resolve = res; + }), + ); + jest.spyOn(player.elementRef, 'pause').mockImplementation(() => { + throw new Error('nope'); + }); + + player.play(); + jest.advanceTimersByTime(2000); + resolve(); + expect(onError).toHaveBeenCalledWith( + expect.objectContaining({ errCode: 'failed-to-start', player }), + ); + + jest.useRealTimers(); + }); + + it('pause() when element is playing updates state and calls audioElement.pause()', () => { + const player = makePlayer(); + jest.spyOn(HTMLMediaElement.prototype, 'paused', 'get').mockReturnValue(false); + + player.ensureElementRef(); + const pauseSpy = jest.spyOn(player.elementRef, 'pause'); + player.pause(); + expect(pauseSpy).toHaveBeenCalled(); + expect(player.isPlaying).toBe(false); + }); + + it('pause() when element is not playing does nothing', () => { + const player = makePlayer(); + player.ensureElementRef(); + const pauseSpy = jest.spyOn(player.elementRef, 'pause'); + player.pause(); + expect(pauseSpy).not.toHaveBeenCalled(); + }); + + it('stop() pauses, resets secondsElapsed and currentTime', () => { + const player = makePlayer(); + player.ensureElementRef(); + const pauseSpy = jest.spyOn(player, 'pause'); + player.state.partialNext({ secondsElapsed: 50 }); + expect(player.secondsElapsed).toBe(50); + + player.stop(); + expect(pauseSpy).toHaveBeenCalled(); + expect(player.secondsElapsed).toBe(0); + expect(player.elementRef.currentTime).toBe(0); + }); + + it('togglePlay delegates to play() / pause()', async () => { + const p = makePlayer(); + + const playSpy = jest.spyOn(p, 'play'); + const pauseSpy = jest.spyOn(p, 'pause'); + + await p.togglePlay(); + expect(playSpy).toHaveBeenCalled(); + + jest.spyOn(HTMLMediaElement.prototype, 'paused', 'get').mockReturnValue(false); + p.state.partialNext({ isPlaying: true }); + await p.togglePlay(); + expect(pauseSpy).toHaveBeenCalled(); + p.state.partialNext({ isPlaying: false }); + }); + + it('increasePlaybackRate cycles through playbackRates', () => { + const p = makePlayer({ playbackRates: [1, 1.25, 1.5] }); + p.play(); + expect(p.currentPlaybackRate).toBe(1); + expect(p.elementRef.playbackRate).toBe(1); + + p.increasePlaybackRate(); + expect(p.currentPlaybackRate).toBe(1.25); + expect(p.elementRef.playbackRate).toBe(1.25); + + p.increasePlaybackRate(); + expect(p.currentPlaybackRate).toBe(1.5); + expect(p.elementRef.playbackRate).toBe(1.5); + + p.increasePlaybackRate(); + expect(p.currentPlaybackRate).toBe(1); + expect(p.elementRef.playbackRate).toBe(1); + }); + + it('seek updates currentTime and progress when seekable', () => { + const p = makePlayer(); + p.play(); + jest.spyOn(p.elementRef, 'duration', 'get').mockReturnValue(120); + + const target = document.createElement('div'); + jest.spyOn(target, 'getBoundingClientRect').mockReturnValue({ width: 100, x: 0 }); + + p.seek({ clientX: 50, currentTarget: target }); + + expect(p.elementRef.currentTime).toBeCloseTo(60, 5); + expect(p.state.getLatestValue().progressPercent).toBeCloseTo(50, 5); + expect(p.state.getLatestValue().secondsElapsed).toBeCloseTo(60, 5); + }); + + it('seek does nothing if ratio is out of 0..1', () => { + const p = makePlayer(); + p.play(); + jest.spyOn(p.elementRef, 'duration', 'get').mockReturnValue(120); + const target = document.createElement('div'); + jest.spyOn(target, 'getBoundingClientRect').mockReturnValue({ width: 100, x: 0 }); + + p.seek({ clientX: 150, currentTarget: target }); // clientX > width + expect(p.state.getLatestValue().secondsElapsed).toBe(0); + }); + + it('seek emits errCode seek-not-supported when not seekable', () => { + const { onError, plugin } = makeErrorPlugin(); + const player = makePlayer({ plugins: [plugin] }); + player.ensureElementRef(); + + // not seekable + jest.spyOn(player.elementRef, 'duration', 'get').mockReturnValue(NaN); + + const target = document.createElement('div'); + jest.spyOn(target, 'getBoundingClientRect').mockReturnValue({ width: 100, x: 0 }); + + player.seek({ clientX: 50, currentTarget: target }); + + expect(onError).toHaveBeenCalledWith( + expect.objectContaining({ errCode: 'seek-not-supported', player }), + ); + }); + + it('setSecondsElapsed updates seconds and progressPercent in state', () => { + const p = makePlayer(); + p.play(); + jest.spyOn(p.elementRef, 'duration', 'get').mockReturnValue(200); + + p.setSecondsElapsed(40); + const st = p.state.getLatestValue(); + expect(st.secondsElapsed).toBe(40); + expect(st.progressPercent).toBeCloseTo(20, 5); // 40/200*100 + }); + + it('elementIsPlaying utility', () => { + const el = document.createElement('audio'); + + const pausedSpy = jest + .spyOn(HTMLMediaElement.prototype, 'paused', 'get') + .mockReturnValue(true); + const endedSpy = jest + .spyOn(HTMLMediaElement.prototype, 'ended', 'get') + .mockReturnValue(false); + + expect(elementIsPlaying(el)).toBe(false); + + pausedSpy.mockReturnValue(false); + expect(elementIsPlaying(el)).toBe(true); + + endedSpy.mockReturnValue(true); + expect(elementIsPlaying(el)).toBe(false); + }); + + it('requestRemoval clears element (load not called) and nulls elementRef, notifies plugins', () => { + const onRemove = jest.fn(); + const player = makePlayer({ plugins: [{ id: 'TestOnRemove', onRemove }] }); + + // attach concrete element to spy on load() + player.ensureElementRef(); + const el = createdAudios[1]; + const loadSpy = jest.spyOn(el, 'load'); + + expect(player.elementRef).toBe(el); + + player.requestRemoval(); + + expect(loadSpy).not.toHaveBeenCalled(); + expect(player.elementRef).toBeNull(); + expect(onRemove).toHaveBeenCalledWith(expect.objectContaining({ player })); + }); + + it('play() after requestRemoval is a no-op (player disposed)', async () => { + jest.spyOn(HTMLMediaElement.prototype, 'canPlayType').mockReturnValue('maybe'); + const player = makePlayer(); + + // ensure element exists before removal + player.ensureElementRef(); + const firstEl = createdAudios[1]; + expect(player.elementRef).toBe(firstEl); + + player.requestRemoval(); + expect(player.elementRef).toBeNull(); + + await player.play(); + + // disposed: play() should not recreate element or change state + expect(player.elementRef).toBeNull(); + expect(player.isPlaying).toBe(false); + expect(createdAudios.length).toBe(2); + }); +}); diff --git a/src/components/AudioPlayback/__tests__/AudioPlayerPool.test.js b/src/components/AudioPlayback/__tests__/AudioPlayerPool.test.js new file mode 100644 index 0000000000..bfad61d582 --- /dev/null +++ b/src/components/AudioPlayback/__tests__/AudioPlayerPool.test.js @@ -0,0 +1,209 @@ +import { AudioPlayerPool } from '../AudioPlayerPool'; + +// make throttle a no-op where indirectly used +jest.mock('lodash.throttle', () => (fn) => fn); + +describe('AudioPlayerPool', () => { + const createdAudios = []; + + beforeEach(() => { + const RealAudio = window.Audio; + jest.spyOn(window, 'Audio').mockImplementation(function AudioMock(...args) { + const el = new RealAudio(...args); + createdAudios.push(el); + return el; + }); + + jest.spyOn(HTMLMediaElement.prototype, 'load').mockImplementation(() => ({})); + jest + .spyOn(HTMLMediaElement.prototype, 'play') + .mockImplementation(() => Promise.resolve()); + jest.spyOn(HTMLMediaElement.prototype, 'pause').mockImplementation(() => ({})); + + jest.spyOn(HTMLMediaElement.prototype, 'paused', 'get').mockReturnValue(true); + jest.spyOn(HTMLMediaElement.prototype, 'ended', 'get').mockReturnValue(false); + jest.spyOn(HTMLMediaElement.prototype, 'duration', 'get').mockReturnValue(100); + }); + + afterEach(() => { + jest.restoreAllMocks(); + createdAudios.length = 0; + }); + const defaultDescriptor = { durationSeconds: 100, mimeType: 'audio/mpeg' }; + const makePlayer = (pool, descriptor) => + pool.getOrAdd({ + ...defaultDescriptor, + ...descriptor, + }); + + it('getOrAdd returns same instance for same id and does not auto-register listeners, updates descriptor fields', () => { + const pool = new AudioPlayerPool(); + const p1 = makePlayer(pool, { + durationSeconds: 3, + fileSize: 35, + id: 'a', + mimeType: 'audio/abc', + src: 'https://example.com/a.mp3', + title: 'Title A', + waveformData: [1], + }); + const regSpy = jest.spyOn(p1, 'registerSubscriptions'); + const p1Again = makePlayer(pool, { + durationSeconds: 10, + id: 'a', + mimeType: 'audio/mpeg', + src: 'https://example.com/b.mp3', + waveformData: [2], + }); + expect(p1Again).toBe(p1); + expect(regSpy).not.toHaveBeenCalled(); + // eslint-disable-next-line no-underscore-dangle + expect(p1._data).toStrictEqual({ + durationSeconds: 10, + fileSize: 35, + id: 'a', + mimeType: 'audio/mpeg', + src: 'https://example.com/b.mp3', + title: 'Title A', + waveformData: [2], + }); + }); + + it('concurrent mode: per-owner elements are created lazily; src set without explicit load()', () => { + const pool = new AudioPlayerPool({ allowConcurrentPlayback: true }); + const p1 = makePlayer(pool, { id: 'o1', src: 'https://example.com/a.mp3' }); + const p2 = makePlayer(pool, { id: 'o2', src: 'https://example.com/b.mp3' }); + + const el1 = pool.acquireElement({ ownerId: p1.id, src: p1.src }); + const el2 = pool.acquireElement({ ownerId: p2.id, src: p2.src }); + expect(el1).toBeInstanceOf(HTMLAudioElement); + expect(el2).toBeInstanceOf(HTMLAudioElement); + expect(el1).not.toBe(el2); + + const loadSpy1 = jest.spyOn(el1, 'load'); + const loadSpy2 = jest.spyOn(el2, 'load'); + + // change sources; pool should set src but not call load() + const el1again = pool.acquireElement({ + ownerId: p1.id, + src: 'https://example.com/a2.mp3', + }); + const el2again = pool.acquireElement({ + ownerId: p2.id, + src: 'https://example.com/b2.mp3', + }); + expect(el1again).toBe(el1); + expect(el2again).toBe(el2); + expect(loadSpy1).not.toHaveBeenCalled(); + expect(loadSpy2).not.toHaveBeenCalled(); + }); + + it('concurrent mode: releaseElement pauses, clears src, calls load, and allows recreation', () => { + const pool = new AudioPlayerPool({ allowConcurrentPlayback: true }); + const p1 = makePlayer(pool, { id: 'o1', src: 'https://example.com/a.mp3' }); + const el1 = pool.acquireElement({ ownerId: p1.id, src: p1.src }); + const pauseSpy = jest.spyOn(el1, 'pause'); + const loadSpy = jest.spyOn(el1, 'load'); + + pool.releaseElement(p1.id); + expect(pauseSpy).toHaveBeenCalled(); + expect(loadSpy).toHaveBeenCalled(); + expect(el1.getAttribute('src')).toBe(null); + + // re-acquire -> new element instance is created + const el1new = pool.acquireElement({ ownerId: p1.id, src: p1.src }); + expect(el1new).toBeInstanceOf(HTMLAudioElement); + expect(el1new).not.toBe(el1); + }); + + it('single-playback mode: handoff pauses previous owner and does not call load() on src switch', () => { + const pool = new AudioPlayerPool({ allowConcurrentPlayback: false }); + const p1 = makePlayer(pool, { id: 'o1', src: 'https://example.com/a.mp3' }); + const p2 = makePlayer(pool, { id: 'o2', src: 'https://example.com/b.mp3' }); + + const el1 = pool.acquireElement({ ownerId: p1.id, src: p1.src }); + const loadSpy = jest.spyOn(el1, 'load'); + const pauseSpyPrev = jest.spyOn(p1, 'pause'); + const releaseForHandoffSpy = jest.spyOn(p1, 'releaseElementForHandoff'); + + const el2 = pool.acquireElement({ ownerId: p2.id, src: p2.src }); + expect(el2).toBe(el1); // shared element + expect(pauseSpyPrev).toHaveBeenCalled(); + expect(releaseForHandoffSpy).toHaveBeenCalled(); + expect(loadSpy).not.toHaveBeenCalled(); + }); + + it('single-playback mode: release keeps shared instance but clears src and calls load()', () => { + const pool = new AudioPlayerPool({ allowConcurrentPlayback: false }); + const p1 = makePlayer(pool, { id: 'o1', src: 'https://example.com/a.mp3' }); + const el = pool.acquireElement({ ownerId: p1.id, src: p1.src }); + const pauseSpy = jest.spyOn(el, 'pause'); + const loadSpy = jest.spyOn(el, 'load'); + + pool.releaseElement(p1.id); + expect(pauseSpy).toHaveBeenCalled(); + expect(loadSpy).toHaveBeenCalled(); + expect(el.getAttribute('src')).toBe(null); + + // same shared instance is reused on next acquire + const elAfter = pool.acquireElement({ ownerId: p1.id, src: p1.src }); + expect(elAfter).toBe(el); + }); + + it('registerSubscriptions only calls players that already have an elementRef', () => { + const pool = new AudioPlayerPool({ allowConcurrentPlayback: true }); + const p1 = makePlayer(pool, { id: 'o1', src: 'https://example.com/a.mp3' }); + const p2 = makePlayer(pool, { id: 'o2', src: 'https://example.com/b.mp3' }); + + const spy1 = jest.spyOn(p1, 'registerSubscriptions'); + const spy2 = jest.spyOn(p2, 'registerSubscriptions'); + + // give only p2 an elementRef + const el = document.createElement('audio'); + p2.state.partialNext({ elementRef: el }); + + pool.registerSubscriptions(); + expect(spy1).not.toHaveBeenCalled(); + expect(spy2).toHaveBeenCalled(); + }); + + it('single-playback mode: removes a player', () => { + const pool = new AudioPlayerPool({ allowConcurrentPlayback: false }); + const player = makePlayer(pool, { id: 'o1', src: 'https://example.com/a.mp3' }); + pool.acquireElement({ ownerId: player.id, src: player.src }); + expect(pool.players).toHaveLength(1); + expect(Object.keys(pool.audios)).toHaveLength(0); + pool.remove(player.id); + expect(pool.players).toHaveLength(0); + }); + + it('concurrent-playback mode: removes a player', () => { + const pool = new AudioPlayerPool({ allowConcurrentPlayback: true }); + const player = makePlayer(pool, { id: 'o1', src: 'https://example.com/a.mp3' }); + const element = pool.acquireElement({ ownerId: player.id, src: player.src }); + expect(pool.players).toHaveLength(1); + expect(pool.audios.get(player.id)).toBe(element); + pool.remove(player.id); + expect(pool.players).toHaveLength(0); + expect(Object.keys(pool.audios)).toHaveLength(0); + }); + + it('sets active player only in single-playback mode', () => { + const poolConcurrent = new AudioPlayerPool({ allowConcurrentPlayback: true }); + const player1 = makePlayer(poolConcurrent, { + id: 'o1', + src: 'https://example.com/a.mp3', + }); + const poolSingle = new AudioPlayerPool({ allowConcurrentPlayback: false }); + const player2 = makePlayer(poolSingle, { + id: 'o1', + src: 'https://example.com/b.mp3', + }); + poolConcurrent.setActiveAudioPlayer(player1); + expect(poolConcurrent.players).toHaveLength(1); + expect(poolConcurrent.activeAudioPlayer).toBeNull(); + poolSingle.setActiveAudioPlayer(player2); + expect(poolSingle.players).toHaveLength(1); + expect(poolSingle.activeAudioPlayer).toBe(player2); + }); +}); diff --git a/src/components/AudioPlayback/__tests__/WithAudioPlayback.test.js b/src/components/AudioPlayback/__tests__/WithAudioPlayback.test.js new file mode 100644 index 0000000000..d65bee1fbb --- /dev/null +++ b/src/components/AudioPlayback/__tests__/WithAudioPlayback.test.js @@ -0,0 +1,486 @@ +// WithAudioPlayback.test.js +import React, { useEffect } from 'react'; +import '@testing-library/jest-dom'; +import { act, cleanup, render } from '@testing-library/react'; + +import { useAudioPlayer, WithAudioPlayback } from '../WithAudioPlayback'; +import * as audioModule from '../AudioPlayer'; // to spy on defaultRegisterAudioPlayerError + +// mock context used by WithAudioPlayback +jest.mock('../../../context', () => { + const mockAddError = jest.fn(); + const mockClient = { notifications: { addError: mockAddError } }; + const t = (s) => s; + return { + __esModule: true, + mockAddError, + useChatContext: () => ({ client: mockClient }), + useTranslationContext: () => ({ t }), + // export spy so tests can assert on it + }; +}); + +// make throttle a no-op (so seek/time-related stuff runs synchronously) +jest.mock('lodash.throttle', () => (fn) => fn); + +// ------------------ imports FROM mocks ------------------ + +import { mockAddError as addErrorSpy } from '../../../context'; + +const defaultRegisterSpy = jest.spyOn(audioModule, 'defaultRegisterAudioPlayerError'); + +// silence console.error in tests +jest.spyOn(console, 'error').mockImplementation(() => {}); + +// ------------------ window.Audio + media stubs ------------------ + +const createdAudios = []; + +beforeEach(() => { + // Return a real