Skip to content

Commit

Permalink
Merge pull request #5586 from vector-im/feature/eric/audio-files-player
Browse files Browse the repository at this point in the history
Audio files in the timeline now appear with the audio player
  • Loading branch information
ericdecanini authored Apr 7, 2022
2 parents 72bd398 + d9f2033 commit 45104f8
Show file tree
Hide file tree
Showing 19 changed files with 575 additions and 189 deletions.
1 change: 1 addition & 0 deletions changelog.d/5586.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Adds the ability for audio attachments to be played in the timeline
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ import im.vector.app.databinding.ActivityRoomDetailBinding
import im.vector.app.features.analytics.plan.MobileScreen
import im.vector.app.features.home.room.breadcrumbs.BreadcrumbsFragment
import im.vector.app.features.home.room.detail.arguments.TimelineArgs
import im.vector.app.features.home.room.detail.timeline.helper.VoiceMessagePlaybackTracker
import im.vector.app.features.home.room.detail.timeline.helper.AudioMessagePlaybackTracker
import im.vector.app.features.matrixto.MatrixToBottomSheet
import im.vector.app.features.navigation.Navigator
import im.vector.app.features.room.RequireActiveMembershipAction
Expand Down Expand Up @@ -75,7 +75,7 @@ class RoomDetailActivity :
}

private var lastKnownPlayingOrRecordingState: Boolean? = null
private val playbackActivityListener = VoiceMessagePlaybackTracker.ActivityListener { isPlayingOrRecording ->
private val playbackActivityListener = AudioMessagePlaybackTracker.ActivityListener { isPlayingOrRecording ->
if (lastKnownPlayingOrRecordingState == isPlayingOrRecording) return@ActivityListener
when (isPlayingOrRecording) {
true -> keepScreenOn()
Expand All @@ -86,7 +86,7 @@ class RoomDetailActivity :

override fun getCoordinatorLayout() = views.coordinatorLayout

@Inject lateinit var playbackTracker: VoiceMessagePlaybackTracker
@Inject lateinit var playbackTracker: AudioMessagePlaybackTracker
private lateinit var sharedActionViewModel: RoomDetailSharedActionViewModel
private val requireActiveMembershipViewModel: RequireActiveMembershipViewModel by viewModel()

Expand Down Expand Up @@ -152,7 +152,7 @@ class RoomDetailActivity :
override fun onDestroy() {
supportFragmentManager.unregisterFragmentLifecycleCallbacks(fragmentLifecycleCallbacks)
views.drawerLayout.removeDrawerListener(drawerListener)
playbackTracker.unTrackActivity(playbackActivityListener)
playbackTracker.untrackActivity(playbackActivityListener)
super.onDestroy()
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,10 +156,11 @@ import im.vector.app.features.home.room.detail.timeline.action.EventSharedAction
import im.vector.app.features.home.room.detail.timeline.action.MessageActionsBottomSheet
import im.vector.app.features.home.room.detail.timeline.action.MessageSharedActionViewModel
import im.vector.app.features.home.room.detail.timeline.edithistory.ViewEditHistoryBottomSheet
import im.vector.app.features.home.room.detail.timeline.helper.AudioMessagePlaybackTracker
import im.vector.app.features.home.room.detail.timeline.helper.MatrixItemColorProvider
import im.vector.app.features.home.room.detail.timeline.helper.VoiceMessagePlaybackTracker
import im.vector.app.features.home.room.detail.timeline.image.buildImageContentRendererData
import im.vector.app.features.home.room.detail.timeline.item.AbsMessageItem
import im.vector.app.features.home.room.detail.timeline.item.MessageAudioItem
import im.vector.app.features.home.room.detail.timeline.item.MessageFileItem
import im.vector.app.features.home.room.detail.timeline.item.MessageImageVideoItem
import im.vector.app.features.home.room.detail.timeline.item.MessageInformationData
Expand Down Expand Up @@ -264,7 +265,7 @@ class TimelineFragment @Inject constructor(
private val roomDetailPendingActionStore: RoomDetailPendingActionStore,
private val pillsPostProcessorFactory: PillsPostProcessor.Factory,
private val callManager: WebRtcCallManager,
private val voiceMessagePlaybackTracker: VoiceMessagePlaybackTracker,
private val audioMessagePlaybackTracker: AudioMessagePlaybackTracker,
private val clock: Clock,
private val matrixConfiguration: MatrixConfiguration
) :
Expand Down Expand Up @@ -737,7 +738,7 @@ class TimelineFragment @Inject constructor(
}

private fun setupVoiceMessageView() {
voiceMessagePlaybackTracker.track(VoiceMessagePlaybackTracker.RECORDING_ID, views.voiceMessageRecorderView)
audioMessagePlaybackTracker.track(AudioMessagePlaybackTracker.RECORDING_ID, views.voiceMessageRecorderView)
views.voiceMessageRecorderView.callback = object : VoiceMessageRecorderView.Callback {

override fun onVoiceRecordingStarted() {
Expand Down Expand Up @@ -794,13 +795,13 @@ class TimelineFragment @Inject constructor(

override fun onVoiceWaveformTouchedUp(percentage: Float, duration: Int) {
messageComposerViewModel.handle(
MessageComposerAction.VoiceWaveformTouchedUp(VoiceMessagePlaybackTracker.RECORDING_ID, duration, percentage)
MessageComposerAction.VoiceWaveformTouchedUp(AudioMessagePlaybackTracker.RECORDING_ID, duration, percentage)
)
}

override fun onVoiceWaveformMoved(percentage: Float, duration: Int) {
messageComposerViewModel.handle(
MessageComposerAction.VoiceWaveformTouchedUp(VoiceMessagePlaybackTracker.RECORDING_ID, duration, percentage)
MessageComposerAction.VoiceWaveformTouchedUp(AudioMessagePlaybackTracker.RECORDING_ID, duration, percentage)
)
}

Expand Down Expand Up @@ -908,6 +909,7 @@ class TimelineFragment @Inject constructor(
}

override fun onDestroyView() {
audioMessagePlaybackTracker.makeAllPlaybacksIdle()
lazyLoadedViews.unBind()
timelineEventController.callback = null
timelineEventController.removeModelBuildListener(modelBuildListener)
Expand Down Expand Up @@ -1202,13 +1204,10 @@ class TimelineFragment @Inject constructor(
}

val messageContent: MessageContent? = event.getLastMessageContent()
val nonFormattedBody = if (messageContent is MessageAudioContent && messageContent.voiceMessageIndicator != null) {
val formattedDuration = DateUtils.formatElapsedTime(((messageContent.audioInfo?.duration ?: 0) / 1000).toLong())
getString(R.string.voice_message_reply_content, formattedDuration)
} else if (messageContent is MessagePollContent) {
messageContent.getBestPollCreationInfo()?.question?.getBestQuestion()
} else {
messageContent?.body ?: ""
val nonFormattedBody = when (messageContent) {
is MessageAudioContent -> getAudioContentBodyText(messageContent)
is MessagePollContent -> messageContent.getBestPollCreationInfo()?.question?.getBestQuestion()
else -> messageContent?.body.orEmpty()
}
var formattedBody: CharSequence? = null
if (messageContent is MessageTextContent && messageContent.format == MessageFormat.FORMAT_MATRIX_HTML) {
Expand Down Expand Up @@ -1247,6 +1246,15 @@ class TimelineFragment @Inject constructor(
focusComposerAndShowKeyboard()
}

private fun getAudioContentBodyText(messageContent: MessageAudioContent): String {
val formattedDuration = DateUtils.formatElapsedTime(((messageContent.audioInfo?.duration ?: 0) / 1000).toLong())
return if (messageContent.voiceMessageIndicator != null) {
getString(R.string.voice_message_reply_content, formattedDuration)
} else {
getString(R.string.audio_message_reply_content, messageContent.body, formattedDuration)
}
}

override fun onResume() {
super.onResume()
notificationDrawerManager.setCurrentRoom(timelineArgs.roomId)
Expand All @@ -1271,7 +1279,7 @@ class TimelineFragment @Inject constructor(
override fun onPause() {
super.onPause()
notificationDrawerManager.setCurrentRoom(null)
voiceMessagePlaybackTracker.unTrack(VoiceMessagePlaybackTracker.RECORDING_ID)
audioMessagePlaybackTracker.pauseAllPlaybacks()

if (withState(messageComposerViewModel) { it.isVoiceRecording } && requireActivity().isChangingConfigurations) {
// we're rotating, maintain any active recordings
Expand Down Expand Up @@ -1399,6 +1407,7 @@ class TimelineFragment @Inject constructor(
}
return when (model) {
is MessageFileItem,
is MessageAudioItem,
is MessageVoiceItem,
is MessageImageVideoItem,
is MessageTextItem -> {
Expand Down Expand Up @@ -2088,6 +2097,10 @@ class TimelineFragment @Inject constructor(
messageComposerViewModel.handle(MessageComposerAction.VoiceWaveformMovedTo(eventId, duration, percentage))
}

override fun onAudioSeekBarMovedTo(eventId: String, duration: Int, percentage: Float) {
messageComposerViewModel.handle(MessageComposerAction.AudioSeekBarMovedTo(eventId, duration, percentage))
}

private fun onShareActionClicked(action: EventSharedAction.Share) {
when (action.messageContent) {
is MessageTextContent -> shareText(requireContext(), action.messageContent.body)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import android.media.AudioAttributes
import android.media.MediaPlayer
import androidx.core.content.FileProvider
import im.vector.app.BuildConfig
import im.vector.app.features.home.room.detail.timeline.helper.VoiceMessagePlaybackTracker
import im.vector.app.features.home.room.detail.timeline.helper.AudioMessagePlaybackTracker
import im.vector.app.features.voice.VoiceFailure
import im.vector.app.features.voice.VoiceRecorder
import im.vector.app.features.voice.VoiceRecorderProvider
Expand All @@ -40,12 +40,13 @@ import javax.inject.Inject
/**
* Helper class to record audio for voice messages.
*/
class VoiceMessageHelper @Inject constructor(
class AudioMessageHelper @Inject constructor(
private val context: Context,
private val playbackTracker: VoiceMessagePlaybackTracker,
private val playbackTracker: AudioMessagePlaybackTracker,
voiceRecorderProvider: VoiceRecorderProvider
) {
private var mediaPlayer: MediaPlayer? = null
private var currentPlayingId: String? = null
private var voiceRecorder: VoiceRecorder = voiceRecorderProvider.provideVoiceRecorder()

private val amplitudeList = mutableListOf<Int>()
Expand All @@ -58,7 +59,7 @@ class VoiceMessageHelper @Inject constructor(
amplitudeList.clear()
attachmentData.waveform?.let {
amplitudeList.addAll(it)
playbackTracker.updateCurrentRecording(VoiceMessagePlaybackTracker.RECORDING_ID, amplitudeList)
playbackTracker.updateCurrentRecording(AudioMessagePlaybackTracker.RECORDING_ID, amplitudeList)
}
}

Expand Down Expand Up @@ -127,7 +128,7 @@ class VoiceMessageHelper @Inject constructor(

fun startOrPauseRecordingPlayback() {
voiceRecorder.getCurrentRecord()?.let {
startOrPausePlayback(VoiceMessagePlaybackTracker.RECORDING_ID, it)
startOrPausePlayback(AudioMessagePlaybackTracker.RECORDING_ID, it)
}
}

Expand All @@ -136,7 +137,8 @@ class VoiceMessageHelper @Inject constructor(
mediaPlayer?.stop()
stopPlaybackTicker()
stopRecordingAmplitudes()
if (playbackState is VoiceMessagePlaybackTracker.Listener.State.Playing) {
currentPlayingId = null
if (playbackState is AudioMessagePlaybackTracker.Listener.State.Playing) {
playbackTracker.pausePlayback(id)
} else {
startPlayback(id, file)
Expand All @@ -163,6 +165,7 @@ class VoiceMessageHelper @Inject constructor(
seekTo(currentPlaybackTime)
}
}
currentPlayingId = id
} catch (failure: Throwable) {
Timber.e(failure, "Unable to start playback")
throw VoiceFailure.UnableToPlay(failure)
Expand All @@ -171,17 +174,24 @@ class VoiceMessageHelper @Inject constructor(
}

fun stopPlayback() {
playbackTracker.pausePlayback(VoiceMessagePlaybackTracker.RECORDING_ID)
playbackTracker.pausePlayback(AudioMessagePlaybackTracker.RECORDING_ID)
mediaPlayer?.stop()
stopPlaybackTicker()
currentPlayingId = null
}

fun movePlaybackTo(id: String, percentage: Float, totalDuration: Int) {
val toMillisecond = (totalDuration * percentage).toInt()
playbackTracker.updateCurrentPlaybackTime(id, toMillisecond, percentage)
playbackTracker.pauseAllPlaybacks()

stopPlayback()
playbackTracker.pausePlayback(id)
if (currentPlayingId == id) {
mediaPlayer?.seekTo(toMillisecond)
playbackTracker.updatePlayingAtPlaybackTime(id, toMillisecond, percentage)
} else {
mediaPlayer?.pause()
playbackTracker.updatePausedAtPlaybackTime(id, toMillisecond, percentage)
stopPlaybackTicker()
}
}

private fun startRecordingAmplitudes() {
Expand All @@ -200,7 +210,7 @@ class VoiceMessageHelper @Inject constructor(
try {
val maxAmplitude = voiceRecorder.getMaxAmplitude()
amplitudeList.add(maxAmplitude)
playbackTracker.updateCurrentRecording(VoiceMessagePlaybackTracker.RECORDING_ID, amplitudeList)
playbackTracker.updateCurrentRecording(AudioMessagePlaybackTracker.RECORDING_ID, amplitudeList)
} catch (e: IllegalStateException) {
Timber.e(e, "Cannot get max amplitude. Amplitude recording timer will be stopped.")
stopRecordingAmplitudes()
Expand Down Expand Up @@ -233,7 +243,7 @@ class VoiceMessageHelper @Inject constructor(
val currentPosition = mediaPlayer?.currentPosition ?: 0
val totalDuration = mediaPlayer?.duration ?: 0
val percentage = currentPosition.toFloat() / totalDuration
playbackTracker.updateCurrentPlaybackTime(id, currentPosition, percentage)
playbackTracker.updatePlayingAtPlaybackTime(id, currentPosition, percentage)
} else {
playbackTracker.stopPlayback(id)
stopPlaybackTicker()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,5 @@ sealed class MessageComposerAction : VectorViewModelAction {
data class EndAllVoiceActions(val deleteRecord: Boolean = true) : MessageComposerAction()
data class VoiceWaveformTouchedUp(val eventId: String, val duration: Int, val percentage: Float) : MessageComposerAction()
data class VoiceWaveformMovedTo(val eventId: String, val duration: Int, val percentage: Float) : MessageComposerAction()
data class AudioSeekBarMovedTo(val eventId: String, val duration: Int, val percentage: Float) : MessageComposerAction()
}
Loading

0 comments on commit 45104f8

Please sign in to comment.