Skip to content

Commit

Permalink
Add a way to intercept the audio samples before processing (#22)
Browse files Browse the repository at this point in the history
* Add a way to intercept the audio samples before processing

* fix BUILD.gn

(cherry picked from commit b33e7bd)
  • Loading branch information
kanat committed Mar 25, 2024
1 parent b891ee6 commit c61ca0d
Show file tree
Hide file tree
Showing 4 changed files with 41 additions and 2 deletions.
1 change: 1 addition & 0 deletions sdk/android/BUILD.gn
Original file line number Diff line number Diff line change
Expand Up @@ -429,6 +429,7 @@ if (is_android) {
"src/java/org/webrtc/audio/WebRtcAudioRecord.java",
"src/java/org/webrtc/audio/WebRtcAudioTrack.java",
"src/java/org/webrtc/audio/WebRtcAudioUtils.java",
"src/java/org/webrtc/audio/AudioRecordDataCallback.java",
]

deps = [
Expand Down
13 changes: 12 additions & 1 deletion sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ public static class Builder {
private AudioAttributes audioAttributes;
private boolean useLowLatency;
private boolean enableVolumeLogger;
private AudioRecordDataCallback audioRecordDataCallback;

private Builder(Context context) {
this.context = context;
Expand Down Expand Up @@ -221,6 +222,16 @@ public Builder setEnableVolumeLogger(boolean enableVolumeLogger) {
return this;
}

/**
* Can be used to gain access to the raw ByteBuffer from the recording device before it's
* fed into WebRTC. You can use this to manipulate the ByteBuffer (e.g. audio filters).
* Make sure that the operation is fast.
*/
public Builder setAudioRecordDataCallback(AudioRecordDataCallback audioRecordDataCallback) {
this.audioRecordDataCallback = audioRecordDataCallback;
return this;
}

/**
* Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership
* and is responsible for calling release().
Expand Down Expand Up @@ -255,7 +266,7 @@ public JavaAudioDeviceModule createAudioDeviceModule() {
}
final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager,
audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback,
samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor);
samplesReadyCallback, audioRecordDataCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor);
final WebRtcAudioTrack audioOutput =
new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback,
audioTrackStateCallback, useLowLatency, enableVolumeLogger);
Expand Down
16 changes: 16 additions & 0 deletions sdk/android/src/java/org/webrtc/audio/AudioRecordDataCallback.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package org.webrtc.audio;

import androidx.annotation.NonNull;

import java.nio.ByteBuffer;

public interface AudioRecordDataCallback {
/**
* Invoked after an audio sample is recorded. Can be used to manipulate
* the ByteBuffer before it's fed into WebRTC. Currently the audio in the
* ByteBuffer is always PCM 16bit and the buffer sample size is ~10ms.
*
* @param audioFormat format in android.media.AudioFormat
*/
void onAudioDataRecorded(int audioFormat, int channelCount, int sampleRate, @NonNull ByteBuffer audioBuffer);
}
13 changes: 12 additions & 1 deletion sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ class WebRtcAudioRecord {

private final @Nullable AudioRecordErrorCallback errorCallback;
private final @Nullable AudioRecordStateCallback stateCallback;
private final @Nullable AudioRecordDataCallback audioRecordDataCallback;
private final @Nullable SamplesReadyCallback audioSamplesReadyCallback;
private final boolean isAcousticEchoCancelerSupported;
private final boolean isNoiseSuppressorSupported;
Expand Down Expand Up @@ -153,6 +154,13 @@ public void run() {
captureTimeNs = audioTimestamp.nanoTime;
}
}

// Allow the client to intercept the ByteBuffer (to modify it)
if (audioRecordDataCallback != null) {
audioRecordDataCallback.onAudioDataRecorded(audioRecord.getAudioFormat(),
audioRecord.getChannelCount(), audioRecord.getSampleRate(), byteBuffer);
}

nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs);
}
if (audioSamplesReadyCallback != null) {
Expand Down Expand Up @@ -196,7 +204,8 @@ public void stopThread() {
WebRtcAudioRecord(Context context, AudioManager audioManager) {
this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE,
DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */,
null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(),
null /* audioSamplesReadyCallback */, null /* audioRecordCallback */,
WebRtcAudioEffects.isAcousticEchoCancelerSupported(),
WebRtcAudioEffects.isNoiseSuppressorSupported());
}

Expand All @@ -205,6 +214,7 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler,
@Nullable AudioRecordErrorCallback errorCallback,
@Nullable AudioRecordStateCallback stateCallback,
@Nullable SamplesReadyCallback audioSamplesReadyCallback,
@Nullable AudioRecordDataCallback audioRecordDataCallback,
boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) {
if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) {
throw new IllegalArgumentException("HW AEC not supported");
Expand All @@ -220,6 +230,7 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler,
this.errorCallback = errorCallback;
this.stateCallback = stateCallback;
this.audioSamplesReadyCallback = audioSamplesReadyCallback;
this.audioRecordDataCallback = audioRecordDataCallback;
this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported;
this.isNoiseSuppressorSupported = isNoiseSuppressorSupported;
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
Expand Down

0 comments on commit c61ca0d

Please sign in to comment.