Consecutive playback and per-conversation playback rate

This commit is contained in:
Alvaro 2022-09-15 14:10:46 -06:00 committed by GitHub
parent eb10aafd7c
commit 6cfe2a09df
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 783 additions and 319 deletions

Binary file not shown.

Binary file not shown.

View File

@ -37,3 +37,10 @@ global.window = {
// For ducks/network.getEmptyState()
global.navigator = {};
global.WebSocket = {};
// For GlobalAudioContext.tsx
/* eslint max-classes-per-file: ["error", 2] */
global.AudioContext = class {};
global.Audio = class {
addEventListener() {}
};

View File

@ -1,4 +1,4 @@
// Copyright 2021 Signal Messenger, LLC
// Copyright 2021-2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import * as React from 'react';
@ -18,7 +18,6 @@ export type ComputePeaksResult = {
};
export type Contents = {
audio: HTMLAudioElement;
computePeaks(url: string, barCount: number): Promise<ComputePeaksResult>;
};
@ -168,7 +167,6 @@ export async function computePeaks(
}
const globalContents: Contents = {
audio: new Audio(),
computePeaks,
};
@ -178,6 +176,7 @@ export type GlobalAudioProps = {
conversationId: string | undefined;
isPaused: boolean;
children?: React.ReactNode | React.ReactChildren;
unloadMessageAudio: () => void;
};
/**
@ -186,22 +185,15 @@ export type GlobalAudioProps = {
*/
export const GlobalAudioProvider: React.FC<GlobalAudioProps> = ({
conversationId,
isPaused,
children,
unloadMessageAudio,
}) => {
// When moving between conversations - stop audio
React.useEffect(() => {
return () => {
globalContents.audio.pause();
unloadMessageAudio();
};
}, [conversationId]);
// Pause when requested by parent
React.useEffect(() => {
if (isPaused) {
globalContents.audio.pause();
}
}, [isPaused]);
}, [conversationId, unloadMessageAudio]);
return (
<GlobalAudioContext.Provider value={globalContents}>

View File

@ -116,24 +116,99 @@ const renderEmojiPicker: Props['renderEmojiPicker'] = ({
const renderReactionPicker: Props['renderReactionPicker'] = () => <div />;
const MessageAudioContainer: React.FC<AudioAttachmentProps> = props => {
const [active, setActive] = React.useState<{
id?: string;
context?: string;
}>({});
const audio = React.useMemo(() => new Audio(), []);
/**
* It doesn't handle consecutive playback
* since that logic mostly lives in the audioPlayer duck
*/
const MessageAudioContainer: React.FC<AudioAttachmentProps> = ({
played,
...props
}) => {
const [isActive, setIsActive] = React.useState<boolean>(false);
const [currentTime, setCurrentTime] = React.useState<number>(0);
const [playbackRate, setPlaybackRate] = React.useState<number>(1);
const [playing, setPlaying] = React.useState<boolean>(false);
const [_played, setPlayed] = React.useState<boolean>(played);
const audio = React.useMemo(() => {
const a = new Audio();
a.addEventListener('timeupdate', () => {
setCurrentTime(a.currentTime);
});
a.addEventListener('ended', () => {
setIsActive(false);
});
a.addEventListener('loadeddata', () => {
a.currentTime = currentTime;
});
return a;
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const loadAndPlayMessageAudio = (
_id: string,
url: string,
_context: string,
position: number
) => {
if (!active) {
audio.src = url;
setIsActive(true);
}
if (!playing) {
audio.play();
setPlaying(true);
}
audio.currentTime = audio.duration * position;
if (!Number.isNaN(audio.currentTime)) {
setCurrentTime(audio.currentTime);
}
};
const setPlaybackRateAction = (_conversationId: string, rate: number) => {
audio.playbackRate = rate;
setPlaybackRate(rate);
};
const setIsPlayingAction = (value: boolean) => {
if (value) {
audio.play();
} else {
audio.pause();
}
setPlaying(value);
};
const setCurrentTimeAction = (value: number) => {
audio.currentTime = value;
setCurrentTime(currentTime);
};
const active = isActive
? { playing, playbackRate, currentTime, duration: audio.duration }
: undefined;
const setPlayedAction = () => {
setPlayed(true);
};
return (
<MessageAudio
{...props}
id="storybook"
renderingContext="storybook"
audio={audio}
computePeaks={computePeaks}
setActiveAudioID={(id, context) => setActive({ id, context })}
onFirstPlayed={action('onFirstPlayed')}
activeAudioID={active.id}
activeAudioContext={active.context}
active={active}
played={_played}
loadAndPlayMessageAudio={loadAndPlayMessageAudio}
onFirstPlayed={setPlayedAction}
setIsPlaying={setIsPlayingAction}
setPlaybackRate={setPlaybackRateAction}
setCurrentTime={setCurrentTimeAction}
/>
);
};
@ -1263,6 +1338,7 @@ export const _Audio = (): JSX.Element => {
contentType: AUDIO_MP3,
fileName: 'incompetech-com-Agnus-Dei-X.mp3',
url: '/fixtures/incompetech-com-Agnus-Dei-X.mp3',
path: 'somepath',
}),
],
...(isPlayed
@ -1305,6 +1381,7 @@ LongAudio.args = {
contentType: AUDIO_MP3,
fileName: 'long-audio.mp3',
url: '/fixtures/long-audio.mp3',
path: 'somepath',
}),
],
status: 'sent',
@ -1317,6 +1394,7 @@ AudioWithCaption.args = {
contentType: AUDIO_MP3,
fileName: 'incompetech-com-Agnus-Dei-X.mp3',
url: '/fixtures/incompetech-com-Agnus-Dei-X.mp3',
path: 'somepath',
}),
],
status: 'sent',

View File

@ -170,6 +170,7 @@ export type AudioAttachmentProps = {
expirationLength?: number;
expirationTimestamp?: number;
id: string;
conversationId: string;
played: boolean;
showMessageDetail: (id: string) => void;
status?: MessageStatusType;
@ -898,6 +899,7 @@ export class Message extends React.PureComponent<Props, State> {
expirationTimestamp,
i18n,
id,
conversationId,
isSticker,
kickOffAttachmentDownload,
markAttachmentAsCorrupted,
@ -1044,6 +1046,7 @@ export class Message extends React.PureComponent<Props, State> {
expirationLength,
expirationTimestamp,
id,
conversationId,
played,
showMessageDetail,
status,

View File

@ -1,28 +1,22 @@
// Copyright 2021-2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import React, {
useRef,
useEffect,
useState,
useReducer,
useCallback,
} from 'react';
import React, { useRef, useEffect, useState } from 'react';
import classNames from 'classnames';
import { noop } from 'lodash';
import { assertDev } from '../../util/assert';
import type { LocalizerType } from '../../types/Util';
import type { AttachmentType } from '../../types/Attachment';
import { isDownloaded } from '../../types/Attachment';
import { missingCaseError } from '../../util/missingCaseError';
import type { DirectionType, MessageStatusType } from './Message';
import type { ComputePeaksResult } from '../GlobalAudioContext';
import { MessageMetadata } from './MessageMetadata';
import * as log from '../../logging/log';
import type { ActiveAudioPlayerStateType } from '../../state/ducks/audioPlayer';
export type Props = {
export type OwnProps = Readonly<{
active: ActiveAudioPlayerStateType | undefined;
renderingContext: string;
i18n: LocalizerType;
attachment: AttachmentType;
@ -35,25 +29,33 @@ export type Props = {
expirationLength?: number;
expirationTimestamp?: number;
id: string;
conversationId: string;
played: boolean;
showMessageDetail: (id: string) => void;
status?: MessageStatusType;
textPending?: boolean;
timestamp: number;
// See: GlobalAudioContext.tsx
audio: HTMLAudioElement;
buttonRef: React.RefObject<HTMLButtonElement>;
kickOffAttachmentDownload(): void;
onCorrupted(): void;
onFirstPlayed(): void;
computePeaks(url: string, barCount: number): Promise<ComputePeaksResult>;
activeAudioID: string | undefined;
activeAudioContext: string | undefined;
setActiveAudioID: (id: string | undefined, context: string) => void;
};
}>;
export type DispatchProps = Readonly<{
loadAndPlayMessageAudio: (
id: string,
url: string,
context: string,
position: number,
isConsecutive: boolean
) => void;
setCurrentTime: (currentTime: number) => void;
setPlaybackRate: (conversationId: string, rate: number) => void;
setIsPlaying: (value: boolean) => void;
}>;
export type Props = OwnProps & DispatchProps;
type ButtonProps = {
i18n: LocalizerType;
@ -142,45 +144,6 @@ const Button: React.FC<ButtonProps> = props => {
);
};
type StateType = Readonly<{
isPlaying: boolean;
currentTime: number;
lastAriaTime: number;
playbackRate: number;
}>;
type ActionType = Readonly<
| {
type: 'SET_IS_PLAYING';
value: boolean;
}
| {
type: 'SET_CURRENT_TIME';
value: number;
}
| {
type: 'SET_PLAYBACK_RATE';
value: number;
}
>;
function reducer(state: StateType, action: ActionType): StateType {
if (action.type === 'SET_IS_PLAYING') {
return {
...state,
isPlaying: action.value,
lastAriaTime: state.currentTime,
};
}
if (action.type === 'SET_CURRENT_TIME') {
return { ...state, currentTime: action.value };
}
if (action.type === 'SET_PLAYBACK_RATE') {
return { ...state, playbackRate: action.value };
}
throw missingCaseError(action);
}
/**
* Display message audio attachment along with its waveform, duration, and
* toggle Play/Pause button.
@ -196,10 +159,12 @@ function reducer(state: StateType, action: ActionType): StateType {
*/
export const MessageAudio: React.FC<Props> = (props: Props) => {
const {
active,
i18n,
renderingContext,
attachment,
collapseMetadata,
conversationId,
withContentAbove,
withContentBelow,
@ -217,52 +182,25 @@ export const MessageAudio: React.FC<Props> = (props: Props) => {
kickOffAttachmentDownload,
onCorrupted,
onFirstPlayed,
audio,
computePeaks,
activeAudioID,
activeAudioContext,
setActiveAudioID,
setPlaybackRate,
loadAndPlayMessageAudio,
setCurrentTime,
setIsPlaying,
} = props;
assertDev(audio != null, 'GlobalAudioContext always provides audio');
const isActive =
activeAudioID === id && activeAudioContext === renderingContext;
const waveformRef = useRef<HTMLDivElement | null>(null);
const [{ isPlaying, currentTime, lastAriaTime, playbackRate }, dispatch] =
useReducer(reducer, {
isPlaying: isActive && !(audio.paused || audio.ended),
currentTime: isActive ? audio.currentTime : 0,
lastAriaTime: isActive ? audio.currentTime : 0,
playbackRate: isActive ? audio.playbackRate : 1,
});
const setIsPlaying = useCallback(
(value: boolean) => {
dispatch({ type: 'SET_IS_PLAYING', value });
},
[dispatch]
);
const setCurrentTime = useCallback(
(value: number) => {
dispatch({ type: 'SET_CURRENT_TIME', value });
},
[dispatch]
);
const setPlaybackRate = useCallback(
(value: number) => {
dispatch({ type: 'SET_PLAYBACK_RATE', value });
},
[dispatch]
);
const isPlaying = active?.playing ?? false;
// if it's playing, use the duration passed as props as it might
// change during loading/playback (?)
// NOTE: Avoid division by zero
const [duration, setDuration] = useState(1e-23);
const activeDuration =
active?.duration && !Number.isNaN(active.duration)
? active.duration
: undefined;
const [duration, setDuration] = useState(activeDuration ?? 1e-23);
const [hasPeaks, setHasPeaks] = useState(false);
const [peaks, setPeaks] = useState<ReadonlyArray<number>>(
@ -334,122 +272,23 @@ export const MessageAudio: React.FC<Props> = (props: Props) => {
state,
]);
// This effect attaches/detaches event listeners to the global <audio/>
// instance that we reuse from the GlobalAudioContext.
//
// Audio playback changes `audio.currentTime` so we have to propagate this
// to the waveform UI.
//
// When audio ends - we have to change state and reset the position of the
// waveform.
useEffect(() => {
// Owner of Audio instance changed
if (!isActive) {
log.info('MessageAudio: pausing old owner', id);
setIsPlaying(false);
setCurrentTime(0);
return noop;
}
const onTimeUpdate = () => {
setCurrentTime(audio.currentTime);
if (audio.currentTime > duration) {
setDuration(audio.currentTime);
}
};
const onEnded = () => {
log.info('MessageAudio: ended, changing UI', id);
setIsPlaying(false);
setCurrentTime(0);
};
const onLoadedMetadata = () => {
assertDev(
!Number.isNaN(audio.duration),
'Audio should have definite duration on `loadedmetadata` event'
);
log.info('MessageAudio: `loadedmetadata` event', id);
// Sync-up audio's time in case if <audio/> loaded its source after
// user clicked on waveform
audio.currentTime = currentTime;
};
const onDurationChange = () => {
log.info('MessageAudio: `durationchange` event', id);
if (!Number.isNaN(audio.duration)) {
setDuration(Math.max(audio.duration, 1e-23));
}
};
audio.addEventListener('timeupdate', onTimeUpdate);
audio.addEventListener('ended', onEnded);
audio.addEventListener('loadedmetadata', onLoadedMetadata);
audio.addEventListener('durationchange', onDurationChange);
return () => {
audio.removeEventListener('timeupdate', onTimeUpdate);
audio.removeEventListener('ended', onEnded);
audio.removeEventListener('loadedmetadata', onLoadedMetadata);
audio.removeEventListener('durationchange', onDurationChange);
};
}, [
id,
audio,
isActive,
currentTime,
duration,
setCurrentTime,
setIsPlaying,
]);
// This effect detects `isPlaying` changes and starts/pauses playback when
// needed (+keeps waveform position and audio position in sync).
useEffect(() => {
if (!isActive) {
return;
}
audio.playbackRate = playbackRate;
if (isPlaying) {
if (!audio.paused) {
return;
}
log.info('MessageAudio: resuming playback for', id);
audio.currentTime = currentTime;
audio.play().catch(error => {
log.info('MessageAudio: resume error', id, error.stack || error);
});
} else {
log.info('MessageAudio: pausing playback for', id);
audio.pause();
}
}, [id, audio, isActive, isPlaying, currentTime, playbackRate]);
const toggleIsPlaying = () => {
setIsPlaying(!isPlaying);
if (!isActive && !isPlaying) {
log.info('MessageAudio: changing owner', id);
setActiveAudioID(id, renderingContext);
// Pause old audio
if (!audio.paused) {
audio.pause();
}
if (!isPlaying) {
if (!attachment.url) {
throw new Error(
'Expected attachment url in the MessageAudio with ' +
`state: ${state}`
);
}
audio.src = attachment.url;
if (active) {
setIsPlaying(true);
} else {
loadAndPlayMessageAudio(id, attachment.url, renderingContext, 0, false);
}
} else {
// stop
setIsPlaying(false);
}
};
@ -467,11 +306,6 @@ export const MessageAudio: React.FC<Props> = (props: Props) => {
if (state !== State.Normal) {
return;
}
if (!isPlaying) {
toggleIsPlaying();
}
if (!waveformRef.current) {
return;
}
@ -483,10 +317,16 @@ export const MessageAudio: React.FC<Props> = (props: Props) => {
progress = 0;
}
if (isPlaying && !Number.isNaN(audio.duration)) {
audio.currentTime = audio.duration * progress;
if (attachment.url) {
loadAndPlayMessageAudio(
id,
attachment.url,
renderingContext,
progress,
false
);
} else {
setCurrentTime(duration * progress);
log.warn('Waveform clicked on attachment with no url');
}
};
@ -511,13 +351,15 @@ export const MessageAudio: React.FC<Props> = (props: Props) => {
event.stopPropagation();
// There is no audio to rewind
if (!isActive) {
if (!active) {
return;
}
audio.currentTime = Math.min(
Number.isNaN(audio.duration) ? Infinity : audio.duration,
Math.max(0, audio.currentTime + increment)
setCurrentTime(
Math.min(
Number.isNaN(duration) ? Infinity : duration,
Math.max(0, active.currentTime + increment)
)
);
if (!isPlaying) {
@ -525,7 +367,9 @@ export const MessageAudio: React.FC<Props> = (props: Props) => {
}
};
const peakPosition = peaks.length * (currentTime / duration);
const currentTimeOrZero = active?.currentTime ?? 0;
const peakPosition = peaks.length * (currentTimeOrZero / duration);
const waveform = (
<div
@ -537,10 +381,10 @@ export const MessageAudio: React.FC<Props> = (props: Props) => {
role="slider"
aria-label={i18n('MessageAudio--slider')}
aria-orientation="horizontal"
aria-valuenow={lastAriaTime}
aria-valuenow={currentTimeOrZero}
aria-valuemin={0}
aria-valuemax={duration}
aria-valuetext={timeToText(lastAriaTime)}
aria-valuetext={timeToText(currentTimeOrZero)}
>
{peaks.map((peak, i) => {
let height = Math.max(BAR_MIN_HEIGHT, BAR_MAX_HEIGHT * peak);
@ -606,7 +450,7 @@ export const MessageAudio: React.FC<Props> = (props: Props) => {
);
}
const countDown = Math.max(0, duration - currentTime);
const countDown = Math.max(0, duration - (active?.currentTime ?? 0));
const nextPlaybackRate = (currentRate: number): number => {
// cycle through the rates
@ -642,17 +486,20 @@ export const MessageAudio: React.FC<Props> = (props: Props) => {
`${CSS_BASE}__dot--${played ? 'played' : 'unplayed'}`
)}
/>
{isPlaying && (
{active && active.playing && (
<button
type="button"
className={classNames(`${CSS_BASE}__playback-rate-button`)}
onClick={ev => {
ev.stopPropagation();
setPlaybackRate(nextPlaybackRate(playbackRate));
setPlaybackRate(
conversationId,
nextPlaybackRate(active.playbackRate)
);
}}
tabIndex={0}
>
{playbackRateLabels[playbackRate]}
{playbackRateLabels[active.playbackRate]}
</button>
)}
</div>

1
ts/model-types.d.ts vendored
View File

@ -298,6 +298,7 @@ export type ConversationAttributesType = {
sealedSender?: unknown;
sentMessageCount?: number;
sharedGroupNames?: Array<string>;
voiceNotePlaybackRate?: number;
id: string;
type: ConversationAttributesTypeType;

View File

@ -1871,6 +1871,7 @@ export class ConversationModel extends window.Backbone
this.get('acknowledgedGroupNameCollisions') || {},
sharedGroupNames: [],
}),
voiceNotePlaybackRate: this.get('voiceNotePlaybackRate'),
};
}

View File

@ -0,0 +1,85 @@
// Copyright 2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { noop } from 'lodash';
/**
* Wrapper around a global HTMLAudioElement that can update the
* source and callbacks without requiring removeEventListener
*/
class GlobalMessageAudio {
#audio: HTMLAudioElement = new Audio();
#onLoadedMetadata = noop;
#onTimeUpdate = noop;
#onEnded = noop;
#onDurationChange = noop;
constructor() {
// callbacks must be wrapped by function (not attached directly)
// so changes to the callbacks are effected
this.#audio.addEventListener('loadedmetadata', () =>
this.#onLoadedMetadata()
);
this.#audio.addEventListener('timeupdate', () => this.#onTimeUpdate());
this.#audio.addEventListener('durationchange', () =>
this.#onDurationChange()
);
this.#audio.addEventListener('ended', () => this.#onEnded());
}
load({
src,
onLoadedMetadata,
onTimeUpdate,
onDurationChange,
onEnded,
}: {
src: string;
onLoadedMetadata: () => void;
onTimeUpdate: () => void;
onDurationChange: () => void;
onEnded: () => void;
}) {
this.#audio.pause();
this.#audio.currentTime = 0;
// update callbacks
this.#onLoadedMetadata = onLoadedMetadata;
this.#onTimeUpdate = onTimeUpdate;
this.#onDurationChange = onDurationChange;
this.#onEnded = onEnded;
this.#audio.src = src;
}
play(): Promise<void> {
return this.#audio.play();
}
pause(): void {
this.#audio.pause();
}
get playbackRate() {
return this.#audio.playbackRate;
}
set playbackRate(rate: number) {
this.#audio.playbackRate = rate;
}
get duration() {
return this.#audio.duration;
}
get currentTime() {
return this.#audio.currentTime;
}
set currentTime(value: number) {
this.#audio.currentTime = value;
}
}
export const globalMessageAudio = new GlobalMessageAudio();

View File

@ -1,10 +1,14 @@
// Copyright 2021 Signal Messenger, LLC
// Copyright 2021-2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { ipcRenderer } from 'electron';
import type { Middleware } from 'redux';
import { COLORS_CHANGED, COLOR_SELECTED } from '../state/ducks/conversations';
import {
COLORS_CHANGED,
COLOR_SELECTED,
SET_VOICE_NOTE_PLAYBACK_RATE,
} from '../state/ducks/conversations';
export const dispatchItemsMiddleware: Middleware =
({ getState }) =>
@ -18,7 +22,8 @@ export const dispatchItemsMiddleware: Middleware =
action.type === 'items/REMOVE_EXTERNAL' ||
action.type === 'items/RESET' ||
action.type === COLOR_SELECTED ||
action.type === COLORS_CHANGED
action.type === COLORS_CHANGED ||
action.type === SET_VOICE_NOTE_PLAYBACK_RATE
) {
ipcRenderer.send('preferences-changed', getState().items);
}

View File

@ -1,58 +1,290 @@
// Copyright 2021 Signal Messenger, LLC
// Copyright 2021-2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { ThunkAction } from 'redux-thunk';
import { useBoundActions } from '../../hooks/useBoundActions';
import { Sound } from '../../util/Sound';
import * as Errors from '../../types/errors';
import type { StateType as RootStateType } from '../reducer';
import { selectNextConsecutiveVoiceNoteMessageId } from '../selectors/audioPlayer';
import {
getConversationByIdSelector,
getSelectedConversationId,
} from '../selectors/conversations';
import type {
MessageDeletedActionType,
MessageChangedActionType,
SelectedConversationChangedActionType,
ConversationChangedActionType,
} from './conversations';
import { SELECTED_CONVERSATION_CHANGED } from './conversations';
import {
SELECTED_CONVERSATION_CHANGED,
setVoiceNotePlaybackRate,
} from './conversations';
import * as log from '../../logging/log';
import { strictAssert } from '../../util/assert';
import { globalMessageAudio } from '../../services/globalMessageAudio';
// State
export type ActiveAudioPlayerStateType = {
readonly playing: boolean;
readonly currentTime: number;
readonly playbackRate: number;
readonly duration: number;
};
export type AudioPlayerStateType = {
readonly activeAudioID: string | undefined;
readonly activeAudioContext: string | undefined;
readonly active:
| (ActiveAudioPlayerStateType & { id: string; context: string })
| undefined;
};
// Actions
type SetActiveAudioIDAction = {
type: 'audioPlayer/SET_ACTIVE_AUDIO_ID';
payload: {
id: string | undefined;
context: string | undefined;
};
/**
* Sets the current "active" message audio for a particular rendering "context"
*/
export type SetMessageAudioAction = {
type: 'audioPlayer/SET_MESSAGE_AUDIO';
payload:
| {
id: string;
context: string;
playbackRate: number;
duration: number;
}
| undefined;
};
type AudioPlayerActionType = SetActiveAudioIDAction;
type SetPlaybackRate = {
type: 'audioPlayer/SET_PLAYBACK_RATE';
payload: number;
};
type SetIsPlayingAction = {
type: 'audioPlayer/SET_IS_PLAYING';
payload: boolean;
};
type CurrentTimeUpdated = {
type: 'audioPlayer/CURRENT_TIME_UPDATED';
payload: number;
};
type MessageAudioEnded = {
type: 'audioPlayer/MESSAGE_AUDIO_ENDED';
};
type DurationChanged = {
type: 'audioPlayer/DURATION_CHANGED';
payload: number;
};
type AudioPlayerActionType =
| SetMessageAudioAction
| SetIsPlayingAction
| SetPlaybackRate
| MessageAudioEnded
| CurrentTimeUpdated
| DurationChanged;
// Action Creators
export const actions = {
setActiveAudioID,
loadAndPlayMessageAudio,
unloadMessageAudio,
setPlaybackRate,
setCurrentTime,
setIsPlaying,
};
export const useActions = (): typeof actions => useBoundActions(actions);
function setActiveAudioID(
id: string | undefined,
context: string
): SetActiveAudioIDAction {
function setCurrentTime(value: number): CurrentTimeUpdated {
globalMessageAudio.currentTime = value;
return {
type: 'audioPlayer/SET_ACTIVE_AUDIO_ID',
payload: { id, context },
type: 'audioPlayer/CURRENT_TIME_UPDATED',
payload: value,
};
}
// Reducer
function setIsPlaying(value: boolean): SetIsPlayingAction {
if (!value) {
globalMessageAudio.pause();
} else {
globalMessageAudio.play();
}
return {
type: 'audioPlayer/SET_IS_PLAYING',
payload: value,
};
}
function setPlaybackRate(
conversationId: string,
rate: number
): ThunkAction<
void,
RootStateType,
unknown,
SetPlaybackRate | ConversationChangedActionType
> {
return dispatch => {
globalMessageAudio.playbackRate = rate;
dispatch({
type: 'audioPlayer/SET_PLAYBACK_RATE',
payload: rate,
});
// update the preference for the conversation
dispatch(
setVoiceNotePlaybackRate({
conversationId,
rate,
})
);
};
}
function unloadMessageAudio(): SetMessageAudioAction {
globalMessageAudio.pause();
return {
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: undefined,
};
}
const stateChangeConfirmUpSound = new Sound({
src: 'sounds/state-change_confirm-up.ogg',
});
const stateChangeConfirmDownSound = new Sound({
src: 'sounds/state-change_confirm-down.ogg',
});
/**
* @param isConsecutive Is this part of a consecutive group (not first though)
*/
function loadAndPlayMessageAudio(
id: string,
url: string,
context: string,
position: number,
isConsecutive: boolean
): ThunkAction<
void,
RootStateType,
unknown,
| SetMessageAudioAction
| MessageAudioEnded
| CurrentTimeUpdated
| SetIsPlayingAction
| DurationChanged
> {
return (dispatch, getState) => {
// set source to new message and start playing
globalMessageAudio.load({
src: url,
onTimeUpdate: () => {
dispatch({
type: 'audioPlayer/CURRENT_TIME_UPDATED',
payload: globalMessageAudio.currentTime,
});
},
onLoadedMetadata: () => {
strictAssert(
!Number.isNaN(globalMessageAudio.duration),
'Audio should have definite duration on `loadedmetadata` event'
);
log.info('MessageAudio: `loadedmetadata` event', id);
// Sync-up audio's time in case if <audio/> loaded its source after
// user clicked on waveform
if (getState().audioPlayer.active) {
globalMessageAudio.currentTime =
position * globalMessageAudio.duration;
}
},
onDurationChange: () => {
log.info('MessageAudio: `durationchange` event', id);
if (!Number.isNaN(globalMessageAudio.duration)) {
dispatch({
type: 'audioPlayer/DURATION_CHANGED',
payload: Math.max(globalMessageAudio.duration, 1e-23),
});
}
},
onEnded: () => {
const nextVoiceNoteMessage = selectNextConsecutiveVoiceNoteMessageId(
getState()
);
dispatch({
type: 'audioPlayer/MESSAGE_AUDIO_ENDED',
});
// play the next message
// for now we can just read the current conversation
// this won't work when we allow a message to continue to play as the user
// navigates away from the conversation
// TODO: DESKTOP-4158
if (nextVoiceNoteMessage) {
stateChangeConfirmUpSound.play();
dispatch(
loadAndPlayMessageAudio(
nextVoiceNoteMessage.id,
nextVoiceNoteMessage.url,
context,
0,
true
)
);
} else if (isConsecutive) {
stateChangeConfirmDownSound.play();
}
},
});
// set the playback rate to the stored value for the selected conversation
const conversationId = getSelectedConversationId(getState());
if (conversationId) {
const conversation = getConversationByIdSelector(getState())(
conversationId
);
globalMessageAudio.playbackRate =
conversation?.voiceNotePlaybackRate ?? 1;
}
globalMessageAudio.play().catch(error => {
log.error('MessageAudio: resume error', id, Errors.toLogFormat(error));
dispatch(unloadMessageAudio());
});
dispatch({
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: {
id,
context,
playbackRate: globalMessageAudio.playbackRate,
duration: globalMessageAudio.duration,
},
});
dispatch(setIsPlaying(true));
};
}
export function getEmptyState(): AudioPlayerStateType {
return {
activeAudioID: undefined,
activeAudioContext: undefined,
active: undefined,
};
}
@ -65,13 +297,18 @@ export function reducer(
| SelectedConversationChangedActionType
>
): AudioPlayerStateType {
if (action.type === 'audioPlayer/SET_ACTIVE_AUDIO_ID') {
if (action.type === 'audioPlayer/SET_MESSAGE_AUDIO') {
const { payload } = action;
return {
...state,
activeAudioID: payload.id,
activeAudioContext: payload.context,
active: payload
? {
...payload,
playing: true,
currentTime: 0,
}
: undefined,
};
}
@ -79,20 +316,75 @@ export function reducer(
if (action.type === SELECTED_CONVERSATION_CHANGED) {
return {
...state,
activeAudioID: undefined,
active: undefined,
};
}
if (action.type === 'audioPlayer/CURRENT_TIME_UPDATED') {
return {
...state,
active: state.active
? {
...state.active,
currentTime: action.payload,
}
: undefined,
};
}
if (action.type === 'audioPlayer/DURATION_CHANGED') {
return {
...state,
active: state.active
? {
...state.active,
duration: action.payload,
}
: undefined,
};
}
if (action.type === 'audioPlayer/MESSAGE_AUDIO_ENDED') {
return {
...state,
active: undefined,
};
}
if (action.type === 'audioPlayer/SET_IS_PLAYING') {
return {
...state,
active: state.active
? {
...state.active,
playing: action.payload,
}
: undefined,
};
}
if (action.type === 'audioPlayer/SET_PLAYBACK_RATE') {
return {
...state,
active: state.active
? {
...state.active,
playbackRate: action.payload,
}
: undefined,
};
}
// Reset activeAudioID on when played message is deleted on expiration.
if (action.type === 'MESSAGE_DELETED') {
const { id } = action.payload;
if (state.activeAudioID !== id) {
if (state.active?.id !== id) {
return state;
}
return {
...state,
activeAudioID: undefined,
active: undefined,
};
}
@ -100,7 +392,7 @@ export function reducer(
if (action.type === 'MESSAGE_CHANGED') {
const { id, data } = action.payload;
if (state.activeAudioID !== id) {
if (state.active?.id !== id) {
return state;
}
@ -110,7 +402,7 @@ export function reducer(
return {
...state,
activeAudioID: undefined,
active: undefined,
};
}

View File

@ -209,6 +209,7 @@ export type ConversationType = {
publicParams?: string;
acknowledgedGroupNameCollisions?: GroupNameCollisionsWithIdsByTitle;
profileKey?: string;
voiceNotePlaybackRate?: number;
badges: Array<
| {
@ -402,6 +403,9 @@ const UPDATE_USERNAME_SAVE_STATE = 'conversations/UPDATE_USERNAME_SAVE_STATE';
export const SELECTED_CONVERSATION_CHANGED =
'conversations/SELECTED_CONVERSATION_CHANGED';
export const SET_VOICE_NOTE_PLAYBACK_RATE =
'conversations/SET_VOICE_NOTE_PLAYBACK_RATE';
export type CancelVerificationDataByConversationActionType = {
type: typeof CANCEL_CONVERSATION_PENDING_VERIFICATION;
payload: {
@ -855,6 +859,7 @@ export const actions = {
setRecentMediaItems,
setSelectedConversationHeaderTitle,
setSelectedConversationPanelDepth,
setVoiceNotePlaybackRate,
showArchivedConversations,
showChooseGroupMembers,
showInbox,
@ -1270,6 +1275,42 @@ function resetAllChatColors(): ThunkAction<
};
}
// update the conversation voice note playback rate preference for the conversation
export function setVoiceNotePlaybackRate({
conversationId,
rate,
}: {
conversationId: string;
rate: number;
}): ThunkAction<void, RootStateType, unknown, ConversationChangedActionType> {
return async dispatch => {
const conversationModel = window.ConversationController.get(conversationId);
if (conversationModel) {
if (rate === 1) {
delete conversationModel.attributes.voiceNotePlaybackRate;
} else {
conversationModel.attributes.voiceNotePlaybackRate = rate;
}
await window.Signal.Data.updateConversation(conversationModel.attributes);
}
const conversation = conversationModel?.format();
if (conversation) {
dispatch({
type: 'CONVERSATION_CHANGED',
payload: {
id: conversationId,
data: {
...conversation,
voiceNotePlaybackRate: rate,
},
},
});
}
};
}
function colorSelected({
conversationId,
conversationColor,

View File

@ -1,8 +1,68 @@
// Copyright 2021 Signal Messenger, LLC
// Copyright 2021-2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { createSelector } from 'reselect';
import { collectFirst } from '../../util/iterables';
import type { StateType } from '../reducer';
import { getConversations } from './conversations';
import { getPropsForAttachment } from './message';
export const isPaused = (state: StateType): boolean => {
return state.audioPlayer.activeAudioID === undefined;
return state.audioPlayer.active === undefined;
};
export const selectActiveVoiceNoteMessageId = (
state: StateType
): string | undefined => state.audioPlayer.active?.id;
export const selectNextConsecutiveVoiceNoteMessageId = createSelector(
getConversations,
selectActiveVoiceNoteMessageId,
(
conversations,
activeVoiceNoteMessageId
): { id: string; url: string } | undefined => {
if (!activeVoiceNoteMessageId) {
return undefined;
}
const currentMessage =
conversations.messagesLookup[activeVoiceNoteMessageId];
const conversationMessages =
conversations.messagesByConversation[currentMessage.conversationId];
if (!conversationMessages) {
return undefined;
}
const idx = conversationMessages.messageIds.indexOf(
activeVoiceNoteMessageId
);
const nextIdx = idx + 1;
if (!(nextIdx in conversationMessages.messageIds)) {
return undefined;
}
const nextMessageId = conversationMessages.messageIds[nextIdx];
const nextMessage = conversations.messagesLookup[nextMessageId];
if (!nextMessage.attachments) {
return undefined;
}
const voiceNoteUrl = collectFirst(
nextMessage.attachments.map(getPropsForAttachment),
a => (a && a.isVoiceMessage && a.url ? a.url : undefined)
);
if (!voiceNoteUrl) {
return undefined;
}
return {
id: nextMessageId,
url: voiceNoteUrl,
};
}
);

View File

@ -2,8 +2,10 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { connect } from 'react-redux';
import { pick } from 'underscore';
import { MessageAudio } from '../../components/conversation/MessageAudio';
import type { OwnProps as MessageAudioOwnProps } from '../../components/conversation/MessageAudio';
import type { ComputePeaksResult } from '../../components/GlobalAudioContext';
import { mapDispatchToProps } from '../actions';
@ -14,10 +16,9 @@ import type {
DirectionType,
MessageStatusType,
} from '../../components/conversation/Message';
import type { ActiveAudioPlayerStateType } from '../ducks/audioPlayer';
export type Props = {
audio: HTMLAudioElement;
renderingContext: string;
i18n: LocalizerType;
attachment: AttachmentType;
@ -29,6 +30,7 @@ export type Props = {
expirationLength?: number;
expirationTimestamp?: number;
id: string;
conversationId: string;
played: boolean;
showMessageDetail: (id: string) => void;
status?: MessageStatusType;
@ -43,10 +45,21 @@ export type Props = {
onFirstPlayed(): void;
};
const mapStateToProps = (state: StateType, props: Props) => {
const mapStateToProps = (
state: StateType,
props: Props
): MessageAudioOwnProps => {
const { active } = state.audioPlayer;
const messageActive: ActiveAudioPlayerStateType | undefined =
active &&
active.id === props.id &&
active.context === props.renderingContext
? pick(active, 'playing', 'playbackRate', 'currentTime', 'duration')
: undefined;
return {
...props,
...state.audioPlayer,
active: messageActive,
};
};

View File

@ -1,4 +1,4 @@
// Copyright 2021 Signal Messenger, LLC
// Copyright 2021-2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { ReactElement } from 'react';
@ -7,7 +7,7 @@ import { GlobalAudioContext } from '../../components/GlobalAudioContext';
import type { Props as MessageAudioProps } from './MessageAudio';
import { SmartMessageAudio } from './MessageAudio';
type AudioAttachmentProps = Omit<MessageAudioProps, 'audio' | 'computePeaks'>;
type AudioAttachmentProps = Omit<MessageAudioProps, 'computePeaks'>;
export function renderAudioAttachment(
props: AudioAttachmentProps

View File

@ -1,9 +1,9 @@
// Copyright 2021 Signal Messenger, LLC
// Copyright 2021-2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { actions } from '../../../state/ducks/audioPlayer';
import type { SetMessageAudioAction } from '../../../state/ducks/audioPlayer';
import type { SelectedConversationChangedActionType } from '../../../state/ducks/conversations';
import {
SELECTED_CONVERSATION_CHANGED,
@ -18,6 +18,20 @@ const { messageDeleted, messageChanged } = conversationsActions;
const MESSAGE_ID = 'message-id';
// can't use the actual action since it's a ThunkAction
const setMessageAudio = (
id: string,
context: string
): SetMessageAudioAction => ({
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: {
id,
context,
playbackRate: 1,
duration: 100,
},
});
describe('both/state/ducks/audioPlayer', () => {
const getEmptyRootState = (): StateType => {
return rootReducer(undefined, noopAction());
@ -25,14 +39,10 @@ describe('both/state/ducks/audioPlayer', () => {
const getInitializedState = (): StateType => {
const state = getEmptyRootState();
const updated = rootReducer(state, setMessageAudio(MESSAGE_ID, 'context'));
const updated = rootReducer(
state,
actions.setActiveAudioID(MESSAGE_ID, 'context')
);
assert.strictEqual(updated.audioPlayer.activeAudioID, MESSAGE_ID);
assert.strictEqual(updated.audioPlayer.activeAudioContext, 'context');
assert.strictEqual(updated.audioPlayer.active?.id, MESSAGE_ID);
assert.strictEqual(updated.audioPlayer.active?.context, 'context');
return updated;
};
@ -40,14 +50,11 @@ describe('both/state/ducks/audioPlayer', () => {
describe('setActiveAudioID', () => {
it("updates `activeAudioID` in the audioPlayer's state", () => {
const state = getEmptyRootState();
assert.strictEqual(state.audioPlayer.activeAudioID, undefined);
assert.strictEqual(state.audioPlayer.active, undefined);
const updated = rootReducer(
state,
actions.setActiveAudioID('test', 'context')
);
assert.strictEqual(updated.audioPlayer.activeAudioID, 'test');
assert.strictEqual(updated.audioPlayer.activeAudioContext, 'context');
const updated = rootReducer(state, setMessageAudio('test', 'context'));
assert.strictEqual(updated.audioPlayer.active?.id, 'test');
assert.strictEqual(updated.audioPlayer.active?.context, 'context');
});
});
@ -59,8 +66,7 @@ describe('both/state/ducks/audioPlayer', () => {
payload: { id: 'any' },
});
assert.strictEqual(updated.audioPlayer.activeAudioID, undefined);
assert.strictEqual(updated.audioPlayer.activeAudioContext, 'context');
assert.strictEqual(updated.audioPlayer.active, undefined);
});
it('resets activeAudioID when message was deleted', () => {
@ -71,8 +77,7 @@ describe('both/state/ducks/audioPlayer', () => {
messageDeleted(MESSAGE_ID, 'conversation-id')
);
assert.strictEqual(updated.audioPlayer.activeAudioID, undefined);
assert.strictEqual(updated.audioPlayer.activeAudioContext, 'context');
assert.strictEqual(updated.audioPlayer.active, undefined);
});
it('resets activeAudioID when message was erased', () => {
@ -92,7 +97,6 @@ describe('both/state/ducks/audioPlayer', () => {
})
);
assert.strictEqual(updated.audioPlayer.activeAudioID, undefined);
assert.strictEqual(updated.audioPlayer.activeAudioContext, 'context');
assert.strictEqual(updated.audioPlayer.active, undefined);
});
});

View File

@ -1,31 +1,42 @@
// Copyright 2021 Signal Messenger, LLC
// Copyright 2021-2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { actions } from '../../../state/ducks/audioPlayer';
import type { SetMessageAudioAction } from '../../../state/ducks/audioPlayer';
import { noopAction } from '../../../state/ducks/noop';
import { isPaused } from '../../../state/selectors/audioPlayer';
import type { StateType } from '../../../state/reducer';
import { reducer as rootReducer } from '../../../state/reducer';
// can't use the actual action since it's a ThunkAction
const setActiveAudioID = (
id: string,
context: string
): SetMessageAudioAction => ({
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: {
id,
context,
playbackRate: 1,
duration: 100,
},
});
describe('state/selectors/audioPlayer', () => {
const getEmptyRootState = (): StateType => {
return rootReducer(undefined, noopAction());
};
describe('isPaused', () => {
it('returns true if state.audioPlayer.activeAudioID is undefined', () => {
it('returns true if state.audioPlayer.active is undefined', () => {
const state = getEmptyRootState();
assert.isTrue(isPaused(state));
});
it('returns false if state.audioPlayer.activeAudioID is not undefined', () => {
it('returns false if state.audioPlayer.active is not undefined', () => {
const state = getEmptyRootState();
const updated = rootReducer(
state,
actions.setActiveAudioID('id', 'context')
);
const updated = rootReducer(state, setActiveAudioID('id', 'context'));
assert.isFalse(isPaused(updated));
});

View File

@ -112,6 +112,16 @@ export function collect<T, S>(
return new CollectIterable(iterable, fn);
}
export function collectFirst<T, S>(
iterable: Iterable<T>,
fn: (value: T) => S | undefined
): S | undefined {
for (const v of collect(iterable, fn)) {
return v;
}
return undefined;
}
class CollectIterable<T, S> implements Iterable<S> {
constructor(
private readonly iterable: Iterable<T>,

View File

@ -69,6 +69,20 @@
"updated": "2018-09-15T00:16:19.197Z",
"line": " Module['load'] = function load(f) {"
},
{
"rule": "jQuery-load(",
"path": "ts/services/globalMessageAudio.ts",
"reasonCategory": "falseMatch",
"updated": "2022-09-09T15:04:29.812Z",
"line": " load({"
},
{
"rule": "jQuery-load(",
"path": "ts/state/ducks/audioPlayer.ts",
"line": " globalMessageAudio.load({",
"reasonCategory": "falseMatch",
"updated": "2022-09-09T15:04:29.812Z"
},
{
"rule": "jQuery-load(",
"path": "components/mp3lameencoder/lib/Mp3LameEncoder.js",