Skip to content
This repository has been archived by the owner on Sep 11, 2024. It is now read-only.

Allow voice messages to be scrubbed in the timeline #8079

Merged
merged 3 commits into from
Mar 29, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 15 additions & 3 deletions res/css/views/audio_messages/_PlaybackContainer.scss
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
Copyright 2021 The Matrix.org Foundation C.I.C.
Copyright 2021 - 2022 The Matrix.org Foundation C.I.C.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -29,6 +29,7 @@ limitations under the License.

contain: content;

// Waveforms are present in live recording only
.mx_Waveform {
.mx_Waveform_bar {
background-color: $quaternary-content;
Expand All @@ -46,11 +47,22 @@ limitations under the License.

.mx_Clock {
width: $font-42px; // we're not using a monospace font, so fake it
min-width: $font-42px; // force sensible layouts in awkward flexboxes (file panel, for example)
padding-right: 6px; // with the fixed width this ends up as a visual 8px most of the time, as intended.
padding-left: 8px; // isolate from recording circle / play control
}

&.mx_VoiceMessagePrimaryContainer_noWaveform {
max-width: 162px; // with all the padding this results in 185px wide
// For timeline-rendered playback, mirror the values for where the clock is in
// the waveform version.
.mx_SeekBar {
margin-left: 8px;
margin-right: 6px;

& + .mx_Clock {
text-align: right;

// Take the padding off the clock because it's accounted for in the seek bar
padding: 0;
}
}
}
36 changes: 2 additions & 34 deletions src/components/views/audio_messages/AudioPlayer.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
Copyright 2021 The Matrix.org Foundation C.I.C.
Copyright 2021 - 2022 The Matrix.org Foundation C.I.C.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
Expand All @@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
limitations under the License.
*/

import React, { createRef, ReactNode, RefObject } from "react";
import React, { ReactNode } from "react";

import PlayPauseButton from "./PlayPauseButton";
import { replaceableComponent } from "../../../utils/replaceableComponent";
Expand All @@ -24,41 +24,9 @@ import { _t } from "../../../languageHandler";
import SeekBar from "./SeekBar";
import PlaybackClock from "./PlaybackClock";
import AudioPlayerBase from "./AudioPlayerBase";
import { getKeyBindingsManager } from "../../../KeyBindingsManager";
import { KeyBindingAction } from "../../../accessibility/KeyboardShortcuts";

@replaceableComponent("views.audio_messages.AudioPlayer")
export default class AudioPlayer extends AudioPlayerBase {
private playPauseRef: RefObject<PlayPauseButton> = createRef();
private seekRef: RefObject<SeekBar> = createRef();

private onKeyDown = (ev: React.KeyboardEvent) => {
let handled = true;
const action = getKeyBindingsManager().getAccessibilityAction(ev);

switch (action) {
case KeyBindingAction.Space:
this.playPauseRef.current?.toggleState();
break;
case KeyBindingAction.ArrowLeft:
this.seekRef.current?.left();
break;
case KeyBindingAction.ArrowRight:
this.seekRef.current?.right();
break;
default:
handled = false;
break;
}

// stopPropagation() prevents the FocusComposer catch-all from triggering,
// but we need to do it on key down instead of press (even though the user
// interaction is typically on press).
if (handled) {
ev.stopPropagation();
}
};

protected renderFileSize(): string {
const bytes = this.props.playback.sizeBytes;
if (!bytes) return null;
Expand Down
44 changes: 39 additions & 5 deletions src/components/views/audio_messages/AudioPlayerBase.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
Copyright 2021 The Matrix.org Foundation C.I.C.
Copyright 2021 - 2022 The Matrix.org Foundation C.I.C.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
Expand All @@ -14,15 +14,19 @@ See the License for the specific language governing permissions and
limitations under the License.
*/

import React, { ReactNode } from "react";
import React, { createRef, ReactNode, RefObject } from "react";
import { logger } from "matrix-js-sdk/src/logger";

import { Playback, PlaybackState } from "../../../audio/Playback";
import { UPDATE_EVENT } from "../../../stores/AsyncStore";
import { replaceableComponent } from "../../../utils/replaceableComponent";
import { _t } from "../../../languageHandler";
import { getKeyBindingsManager } from "../../../KeyBindingsManager";
import { KeyBindingAction } from "../../../accessibility/KeyboardShortcuts";
import SeekBar from "./SeekBar";
import PlayPauseButton from "./PlayPauseButton";

interface IProps {
export interface IProps {
// Playback instance to render. Cannot change during component lifecycle: create
// an all-new component instead.
playback: Playback;
Expand All @@ -36,8 +40,11 @@ interface IState {
}

@replaceableComponent("views.audio_messages.AudioPlayerBase")
export default abstract class AudioPlayerBase extends React.PureComponent<IProps, IState> {
constructor(props: IProps) {
export default abstract class AudioPlayerBase<T extends IProps = IProps> extends React.PureComponent<T, IState> {
protected seekRef: RefObject<SeekBar> = createRef();
protected playPauseRef: RefObject<PlayPauseButton> = createRef();

constructor(props: T) {
super(props);

// Playback instances can be reused in the composer
Expand All @@ -56,6 +63,33 @@ export default abstract class AudioPlayerBase extends React.PureComponent<IProps
});
}

protected onKeyDown = (ev: React.KeyboardEvent) => {
let handled = true;
const action = getKeyBindingsManager().getAccessibilityAction(ev);

switch (action) {
case KeyBindingAction.Space:
this.playPauseRef.current?.toggleState();
break;
case KeyBindingAction.ArrowLeft:
this.seekRef.current?.left();
break;
case KeyBindingAction.ArrowRight:
this.seekRef.current?.right();
break;
default:
handled = false;
break;
}

// stopPropagation() prevents the FocusComposer catch-all from triggering,
// but we need to do it on key down instead of press (even though the user
// interaction is typically on press).
if (handled) {
ev.stopPropagation();
}
};

private onPlaybackUpdate = (ev: PlaybackState) => {
this.setState({ playbackPhase: ev });
};
Expand Down
55 changes: 38 additions & 17 deletions src/components/views/audio_messages/RecordingPlayback.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
Copyright 2021 The Matrix.org Foundation C.I.C.
Copyright 2021 - 2022 The Matrix.org Foundation C.I.C.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
Expand All @@ -19,29 +19,50 @@ import React, { ReactNode } from "react";
import PlayPauseButton from "./PlayPauseButton";
import PlaybackClock from "./PlaybackClock";
import { replaceableComponent } from "../../../utils/replaceableComponent";
import AudioPlayerBase, { IProps as IAudioPlayerBaseProps } from "./AudioPlayerBase";
import SeekBar from "./SeekBar";
import PlaybackWaveform from "./PlaybackWaveform";
import AudioPlayerBase from "./AudioPlayerBase";
import RoomContext, { TimelineRenderingType } from "../../../contexts/RoomContext";

interface IProps extends IAudioPlayerBaseProps {
/**
* When true, use a waveform instead of a seek bar
*/
withWaveform?: boolean;
}

@replaceableComponent("views.audio_messages.RecordingPlayback")
export default class RecordingPlayback extends AudioPlayerBase {
static contextType = RoomContext;
public context!: React.ContextType<typeof RoomContext>;

private get isWaveformable(): boolean {
return this.context.timelineRenderingType !== TimelineRenderingType.Notification
&& this.context.timelineRenderingType !== TimelineRenderingType.File
&& this.context.timelineRenderingType !== TimelineRenderingType.Pinned;
export default class RecordingPlayback extends AudioPlayerBase<IProps> {
// This component is rendered in two ways: the composer and timeline. They have different
// rendering properties (specifically the difference of a waveform or not).

private renderWaveformLook(): ReactNode {
return <>
<PlaybackClock playback={this.props.playback} />
<PlaybackWaveform playback={this.props.playback} />
</>;
}

protected renderComponent(): ReactNode {
const shapeClass = !this.isWaveformable ? 'mx_VoiceMessagePrimaryContainer_noWaveform' : '';
private renderSeekableLook(): ReactNode {
return <>
<SeekBar
playback={this.props.playback}
tabIndex={-1} // prevent tabbing into the bar
playbackPhase={this.state.playbackPhase}
ref={this.seekRef}
/>
<PlaybackClock playback={this.props.playback} />
</>;
}

protected renderComponent(): ReactNode {
return (
<div className={'mx_MediaBody mx_VoiceMessagePrimaryContainer ' + shapeClass}>
<PlayPauseButton playback={this.props.playback} playbackPhase={this.state.playbackPhase} />
<PlaybackClock playback={this.props.playback} />
{ this.isWaveformable && <PlaybackWaveform playback={this.props.playback} /> }
<div className="mx_MediaBody mx_VoiceMessagePrimaryContainer" onKeyDown={this.onKeyDown}>
<PlayPauseButton
playback={this.props.playback}
playbackPhase={this.state.playbackPhase}
ref={this.playPauseRef}
/>
{ this.props.withWaveform ? this.renderWaveformLook() : this.renderSeekableLook() }
</div>
);
}
Expand Down
2 changes: 1 addition & 1 deletion src/components/views/rooms/VoiceRecordComposerTile.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ export default class VoiceRecordComposerTile extends React.PureComponent<IProps,
if (!this.state.recorder) return null; // no recorder means we're not recording: no waveform

if (this.state.recordingPhase !== RecordingState.Started) {
return <RecordingPlayback playback={this.state.recorder.getPlayback()} />;
return <RecordingPlayback playback={this.state.recorder.getPlayback()} withWaveform={true} />;
}

// only other UI is the recording-in-progress UI
Expand Down
30 changes: 8 additions & 22 deletions test/components/views/audio_messages/RecordingPlayback-test.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import RoomContext, { TimelineRenderingType } from '../../../../src/contexts/Roo
import { createAudioContext } from '../../../../src/audio/compat';
import { findByTestId, flushPromises } from '../../../test-utils';
import PlaybackWaveform from '../../../../src/components/views/audio_messages/PlaybackWaveform';
import SeekBar from "../../../../src/components/views/audio_messages/SeekBar";

jest.mock('../../../../src/audio/compat', () => ({
createAudioContext: jest.fn(),
Expand Down Expand Up @@ -56,7 +57,7 @@ describe('<RecordingPlayback />', () => {
const mockChannelData = new Float32Array();

const defaultRoom = { roomId: '!room:server.org', timelineRenderingType: TimelineRenderingType.File };
const getComponent = (props: { playback: Playback }, room = defaultRoom) =>
const getComponent = (props: React.ComponentProps<RecordingPlayback>, room = defaultRoom) =>
mount(<RecordingPlayback {...props} />, {
wrappingComponent: RoomContext.Provider,
wrappingComponentProps: { value: room },
Expand Down Expand Up @@ -128,34 +129,19 @@ describe('<RecordingPlayback />', () => {
expect(playback.toggle).toHaveBeenCalled();
});

it.each([
[TimelineRenderingType.Notification],
[TimelineRenderingType.File],
[TimelineRenderingType.Pinned],
])('does not render waveform when timeline rendering type for room is %s', (timelineRenderingType) => {
it('should render a seek bar by default', () => {
const playback = new Playback(new ArrayBuffer(8));
const room = {
...defaultRoom,
timelineRenderingType,
};
const component = getComponent({ playback }, room);
const component = getComponent({ playback });

expect(component.find(PlaybackWaveform).length).toBeFalsy();
expect(component.find(SeekBar).length).toBeTruthy();
});

it.each([
[TimelineRenderingType.Room],
[TimelineRenderingType.Thread],
[TimelineRenderingType.ThreadsList],
[TimelineRenderingType.Search],
])('renders waveform when timeline rendering type for room is %s', (timelineRenderingType) => {
it('should render a waveform when requested', () => {
const playback = new Playback(new ArrayBuffer(8));
const room = {
...defaultRoom,
timelineRenderingType,
};
const component = getComponent({ playback }, room);
const component = getComponent({ playback, withWaveform: true });

expect(component.find(PlaybackWaveform).length).toBeTruthy();
expect(component.find(SeekBar).length).toBeFalsy();
});
});