[*] 混音优化
This commit is contained in:
@@ -13,11 +13,10 @@
|
|||||||
|
|
||||||
## 计划任务
|
## 计划任务
|
||||||
|
|
||||||
* 混音
|
|
||||||
* 音频视频时间对齐
|
|
||||||
* 分辨率调整
|
* 分辨率调整
|
||||||
* 查询消费者生产者信息
|
* 查询消费者生产者信息
|
||||||
|
|
||||||
## 完成任务
|
## 完成任务
|
||||||
|
|
||||||
|
* 混音
|
||||||
* 屏幕共享
|
* 屏幕共享
|
||||||
|
|||||||
@@ -401,7 +401,7 @@ public final class MediaManager {
|
|||||||
// }
|
// }
|
||||||
// });
|
// });
|
||||||
final JavaAudioDeviceModule javaAudioDeviceModule = JavaAudioDeviceModule.builder(this.context)
|
final JavaAudioDeviceModule javaAudioDeviceModule = JavaAudioDeviceModule.builder(this.context)
|
||||||
// .setSampleRate()
|
// .setSampleRate(48000)
|
||||||
// .setAudioSource(MediaRecorder.AudioSource.MIC)
|
// .setAudioSource(MediaRecorder.AudioSource.MIC)
|
||||||
// .setAudioFormat(AudioFormat.ENCODING_PCM_16BIT)
|
// .setAudioFormat(AudioFormat.ENCODING_PCM_16BIT)
|
||||||
// .setAudioAttributes(audioAttributes)
|
// .setAudioAttributes(audioAttributes)
|
||||||
@@ -573,6 +573,22 @@ public final class MediaManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void muteAllRemote() {
|
||||||
|
this.javaAudioDeviceModule.setSpeakerMute(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void unmuteAllRemote() {
|
||||||
|
this.javaAudioDeviceModule.setSpeakerMute(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void muteAllLocal() {
|
||||||
|
this.javaAudioDeviceModule.setMicrophoneMute(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void unmuteAllLocal() {
|
||||||
|
this.javaAudioDeviceModule.setMicrophoneMute(false);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 更新配置
|
* 更新配置
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -1,12 +1,17 @@
|
|||||||
package com.acgist.taoyao.media.audio;
|
package com.acgist.taoyao.media.audio;
|
||||||
|
|
||||||
|
import android.annotation.SuppressLint;
|
||||||
|
import android.media.AudioFormat;
|
||||||
|
import android.media.AudioRecord;
|
||||||
|
import android.media.MediaRecorder;
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
|
|
||||||
import com.acgist.taoyao.media.client.RecordClient;
|
import com.acgist.taoyao.media.client.RecordClient;
|
||||||
|
|
||||||
import org.webrtc.AudioSource;
|
|
||||||
import org.webrtc.audio.JavaAudioDeviceModule;
|
import org.webrtc.audio.JavaAudioDeviceModule;
|
||||||
|
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.concurrent.BlockingQueue;
|
import java.util.concurrent.BlockingQueue;
|
||||||
import java.util.concurrent.LinkedBlockingQueue;
|
import java.util.concurrent.LinkedBlockingQueue;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
@@ -18,21 +23,61 @@ import java.util.concurrent.TimeUnit;
|
|||||||
* WebRtcAudioTrack#AudioTrackThread :远程音频
|
* WebRtcAudioTrack#AudioTrackThread :远程音频
|
||||||
* WebRtcAudioRecord#AudioRecordThread:本地音频
|
* WebRtcAudioRecord#AudioRecordThread:本地音频
|
||||||
*
|
*
|
||||||
* 注意:只能远程终端拉取才能采集音频数据,如果需要离线采集自己使用AudioRecord实现。
|
* AudioFormat.ENCODING_PCM_16BIT = 2KB
|
||||||
|
*
|
||||||
|
* PCM时间计算:1_000_000 microseconds / 48000 hz / 2 bytes
|
||||||
*
|
*
|
||||||
* @author acgist
|
* @author acgist
|
||||||
*/
|
*/
|
||||||
public class MixerProcesser extends Thread implements JavaAudioDeviceModule.SamplesReadyCallback {
|
public class MixerProcesser extends Thread implements JavaAudioDeviceModule.SamplesReadyCallback {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 音频数据来源
|
||||||
|
* 其实不用切换可以两个同时录制,但是有点浪费资源。
|
||||||
|
*
|
||||||
|
* @author acgist
|
||||||
|
*/
|
||||||
|
public enum Source {
|
||||||
|
// 本地
|
||||||
|
NATIVE,
|
||||||
|
// WebRTC
|
||||||
|
WEBRTC;
|
||||||
|
}
|
||||||
|
|
||||||
private boolean close;
|
private boolean close;
|
||||||
|
private Source source;
|
||||||
|
private final int sampleRate;
|
||||||
|
private final int audioFormat;
|
||||||
|
private final int audioSource;
|
||||||
|
private final int channelCount;
|
||||||
|
private final int channelConfig;
|
||||||
|
private final AudioRecord audioRecord;
|
||||||
private final RecordClient recordClient;
|
private final RecordClient recordClient;
|
||||||
private final BlockingQueue<JavaAudioDeviceModule.AudioSamples> local;
|
private final BlockingQueue<JavaAudioDeviceModule.AudioSamples> local;
|
||||||
private final BlockingQueue<JavaAudioDeviceModule.AudioSamples> remote;
|
private final BlockingQueue<JavaAudioDeviceModule.AudioSamples> remote;
|
||||||
|
|
||||||
public MixerProcesser(RecordClient recordClient) {
|
@SuppressLint("MissingPermission")
|
||||||
|
public MixerProcesser(int sampleRate, int channelCount, RecordClient recordClient) {
|
||||||
this.setDaemon(true);
|
this.setDaemon(true);
|
||||||
this.setName("AudioMixer");
|
this.setName("AudioMixer");
|
||||||
this.close = false;
|
this.close = false;
|
||||||
|
this.source = Source.WEBRTC;
|
||||||
|
this.sampleRate = sampleRate;
|
||||||
|
this.audioFormat = AudioFormat.ENCODING_PCM_16BIT;
|
||||||
|
this.audioSource = MediaRecorder.AudioSource.MIC;
|
||||||
|
this.channelCount = channelCount;
|
||||||
|
this.channelConfig = AudioFormat.CHANNEL_IN_MONO;
|
||||||
|
this.audioRecord = new AudioRecord.Builder()
|
||||||
|
.setAudioFormat(
|
||||||
|
new AudioFormat.Builder()
|
||||||
|
.setEncoding(this.audioFormat)
|
||||||
|
.setSampleRate(this.sampleRate)
|
||||||
|
.setChannelMask(this.channelConfig)
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.setAudioSource(this.audioSource)
|
||||||
|
.setBufferSizeInBytes(AudioRecord.getMinBufferSize(this.sampleRate, this.channelConfig, this.audioFormat))
|
||||||
|
.build();
|
||||||
this.recordClient = recordClient;
|
this.recordClient = recordClient;
|
||||||
this.local = new LinkedBlockingQueue<>(1024);
|
this.local = new LinkedBlockingQueue<>(1024);
|
||||||
this.remote = new LinkedBlockingQueue<>(1024);
|
this.remote = new LinkedBlockingQueue<>(1024);
|
||||||
@@ -57,53 +102,107 @@ public class MixerProcesser extends Thread implements JavaAudioDeviceModule.Samp
|
|||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
long pts = System.nanoTime();
|
long pts = System.nanoTime();
|
||||||
// final byte[] target = new byte[length];
|
byte[] mixData = null;
|
||||||
// PCM时间计算:1000000 microseconds / 48000 hz / 2 bytes
|
byte[] localData = null;
|
||||||
JavaAudioDeviceModule.AudioSamples local;
|
byte[] remoteData = null;
|
||||||
JavaAudioDeviceModule.AudioSamples remote;
|
byte[] recordData = null;
|
||||||
int localValue;
|
int mixDataLength = 0;
|
||||||
int remoteValue;
|
JavaAudioDeviceModule.AudioSamples local = null;
|
||||||
byte[] localData;
|
JavaAudioDeviceModule.AudioSamples remote = null;
|
||||||
byte[] remoteData;
|
int recordSize = 0;
|
||||||
byte[] data = null;
|
// 采集数据大小:采样频率 / (一秒 / 回调频率) * 通道数量 * 采样数据大小
|
||||||
// TODO:固定长度采样率等等
|
final ByteBuffer byteBuffer = ByteBuffer.allocateDirect(this.sampleRate / (1000 / 10) * this.channelCount * 2);
|
||||||
while(!this.close) {
|
while(!this.close) {
|
||||||
try {
|
try {
|
||||||
|
if(this.source == Source.NATIVE) {
|
||||||
|
recordSize = this.audioRecord.read(byteBuffer, byteBuffer.capacity());
|
||||||
|
if(recordSize != byteBuffer.capacity()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
recordData = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), byteBuffer.capacity() + byteBuffer.arrayOffset());
|
||||||
|
pts += recordData.length * (1_000_000 / this.sampleRate / 2);
|
||||||
|
this.recordClient.onPcm(pts, recordData);
|
||||||
|
} else if(this.source == Source.WEBRTC) {
|
||||||
local = this.local.poll(100, TimeUnit.MILLISECONDS);
|
local = this.local.poll(100, TimeUnit.MILLISECONDS);
|
||||||
remote = this.remote.poll();
|
remote = this.remote.poll();
|
||||||
if(local != null && remote != null) {
|
if(local != null && remote != null) {
|
||||||
|
// Log.d(MixerProcesser.class.getSimpleName(), String.format("""
|
||||||
|
// 混音长度:%d - %d
|
||||||
|
// 混音采样:%d - %d
|
||||||
|
// 混音格式:%d - %d
|
||||||
|
// 混音数量:%d - %d""",
|
||||||
|
// local.getData().length, remote.getData().length,
|
||||||
|
// local.getSampleRate(), remote.getSampleRate(),
|
||||||
|
// local.getAudioFormat(), remote.getAudioFormat(),
|
||||||
|
// local.getChannelCount(), remote.getChannelCount()
|
||||||
|
// ));
|
||||||
localData = local.getData();
|
localData = local.getData();
|
||||||
remoteData = remote.getData();
|
remoteData = remote.getData();
|
||||||
Log.d(MixerProcesser.class.getSimpleName(), String.format("""
|
if(mixDataLength != localData.length) {
|
||||||
混音长度:%d - %d
|
// if(mixDataLength != localData.length && mixDataLength != remoteData.length) {
|
||||||
混音采样:%d - %d
|
mixDataLength = localData.length;
|
||||||
混音格式:%d - %d
|
mixData = new byte[mixDataLength];
|
||||||
""", localData.length, remoteData.length, local.getSampleRate(), remote.getSampleRate(), local.getAudioFormat(), remote.getAudioFormat()));
|
|
||||||
data = new byte[localData.length];
|
|
||||||
for (int index = 0; index < localData.length; index++) {
|
|
||||||
localValue = localData[index];
|
|
||||||
remoteValue = remoteData[index];
|
|
||||||
data[index] = (byte) ((localValue +remoteValue) / 2);
|
|
||||||
}
|
}
|
||||||
pts += data.length * (1_000_000 / local.getSampleRate() / 2);
|
// 如果多路远程声音变小:(remote * 远程路数 + local) / (远程路数 + 1)
|
||||||
|
for (int index = 0; index < mixDataLength; index++) {
|
||||||
|
// -0x8000 ~ 0x7FFF;
|
||||||
|
mixData[index] = (byte) (((localData[index] + remoteData[index]) & 0x7FFF) / 2);
|
||||||
|
// mixData[index] = (byte) (((localData[index] + remoteData[index]) & 0xFFFF) / 2);
|
||||||
|
// mixData[index] = (byte) (((localData[index] + remoteData[index] * remoteCount) & 0xFFFF) / (1 + remoteCount));
|
||||||
|
}
|
||||||
|
pts += mixData.length * (1_000_000 / local.getSampleRate() / 2);
|
||||||
|
this.recordClient.onPcm(pts, mixData);
|
||||||
} else if(local != null && remote == null) {
|
} else if(local != null && remote == null) {
|
||||||
data = local.getData();
|
localData = local.getData();
|
||||||
pts += data.length * (1_000_000 / local.getSampleRate() / 2);
|
pts += localData.length * (1_000_000 / local.getSampleRate() / 2);
|
||||||
|
this.recordClient.onPcm(pts, localData);
|
||||||
} else if(local == null && remote != null) {
|
} else if(local == null && remote != null) {
|
||||||
data = remote.getData();
|
remoteData = remote.getData();
|
||||||
pts += data.length * (1_000_000 / remote.getSampleRate() / 2);
|
pts += remoteData.length * (1_000_000 / remote.getSampleRate() / 2);
|
||||||
|
this.recordClient.onPcm(pts, remoteData);
|
||||||
} else {
|
} else {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
this.recordClient.onPcm(pts, data);
|
} else {
|
||||||
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
Log.e(MixerProcesser.class.getSimpleName(), "音频处理异常", e);
|
Log.e(MixerProcesser.class.getSimpleName(), "音频处理异常", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if(this.audioRecord != null) {
|
||||||
|
this.audioRecord.stop();
|
||||||
|
this.audioRecord.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void startNative() {
|
||||||
|
synchronized (this) {
|
||||||
|
if(this.source == Source.NATIVE) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.audioRecord.startRecording();
|
||||||
|
this.source = Source.NATIVE;
|
||||||
|
Log.i(MixerProcesser.class.getSimpleName(), "混音切换来源:" + this.source);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void startWebRTC() {
|
||||||
|
synchronized (this) {
|
||||||
|
if(this.source == Source.WEBRTC) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.audioRecord.stop();
|
||||||
|
this.source = Source.WEBRTC;
|
||||||
|
Log.i(MixerProcesser.class.getSimpleName(), "混音切换来源:" + this.source);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void close() {
|
public void close() {
|
||||||
|
synchronized (this) {
|
||||||
this.close = true;
|
this.close = true;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -52,6 +52,7 @@ public class LocalClient extends RoomClient {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
ListUtils.getOnlyOne(this.mediaStream.audioTracks, audioTrack -> {
|
ListUtils.getOnlyOne(this.mediaStream.audioTracks, audioTrack -> {
|
||||||
|
audioTrack.setVolume(Config.DEFAULT_VOLUME);
|
||||||
audioTrack.setEnabled(true);
|
audioTrack.setEnabled(true);
|
||||||
return audioTrack;
|
return audioTrack;
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -374,10 +374,10 @@ public class RecordClient extends Client implements VideoSink {
|
|||||||
public void record(VideoSource videoSource, JavaAudioDeviceModule javaAudioDeviceModule, PeerConnectionFactory peerConnectionFactory) {
|
public void record(VideoSource videoSource, JavaAudioDeviceModule javaAudioDeviceModule, PeerConnectionFactory peerConnectionFactory) {
|
||||||
// 音频
|
// 音频
|
||||||
if(javaAudioDeviceModule != null) {
|
if(javaAudioDeviceModule != null) {
|
||||||
this.mixerProcesser = new MixerProcesser(this);
|
|
||||||
this.mixerProcesser.start();
|
|
||||||
javaAudioDeviceModule.setMixerProcesser(this.mixerProcesser);
|
|
||||||
this.javaAudioDeviceModule = javaAudioDeviceModule;
|
this.javaAudioDeviceModule = javaAudioDeviceModule;
|
||||||
|
this.mixerProcesser = new MixerProcesser(this.sampleRate, this.channelCount, this);
|
||||||
|
this.mixerProcesser.start();
|
||||||
|
this.javaAudioDeviceModule.setMixerProcesser(this.mixerProcesser);
|
||||||
}
|
}
|
||||||
// 视频
|
// 视频
|
||||||
if(videoSource != null && peerConnectionFactory != null) {
|
if(videoSource != null && peerConnectionFactory != null) {
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import com.acgist.taoyao.boot.utils.ListUtils;
|
|||||||
import com.acgist.taoyao.media.config.Config;
|
import com.acgist.taoyao.media.config.Config;
|
||||||
import com.acgist.taoyao.media.signal.ITaoyao;
|
import com.acgist.taoyao.media.signal.ITaoyao;
|
||||||
|
|
||||||
|
import org.webrtc.AudioTrack;
|
||||||
import org.webrtc.MediaStreamTrack;
|
import org.webrtc.MediaStreamTrack;
|
||||||
import org.webrtc.VideoTrack;
|
import org.webrtc.VideoTrack;
|
||||||
|
|
||||||
@@ -39,8 +40,12 @@ public class RemoteClient extends RoomClient {
|
|||||||
public void playAudio() {
|
public void playAudio() {
|
||||||
super.playAudio();
|
super.playAudio();
|
||||||
ListUtils.getOnlyOne(
|
ListUtils.getOnlyOne(
|
||||||
this.tracks.values().stream().filter(v -> MediaStreamTrack.AUDIO_TRACK_KIND.equals(v.kind())).collect(Collectors.toList()),
|
this.tracks.values().stream()
|
||||||
|
.filter(v -> MediaStreamTrack.AUDIO_TRACK_KIND.equals(v.kind()))
|
||||||
|
.map(v -> (AudioTrack) v)
|
||||||
|
.collect(Collectors.toList()),
|
||||||
audioTrack -> {
|
audioTrack -> {
|
||||||
|
audioTrack.setVolume(Config.DEFAULT_VOLUME);
|
||||||
audioTrack.setEnabled(true);
|
audioTrack.setEnabled(true);
|
||||||
return audioTrack;
|
return audioTrack;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -230,6 +230,7 @@ public class SessionClient extends Client {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
ListUtils.getOnlyOne(this.remoteMediaStream.audioTracks, audioTrack -> {
|
ListUtils.getOnlyOne(this.remoteMediaStream.audioTracks, audioTrack -> {
|
||||||
|
audioTrack.setVolume(Config.DEFAULT_VOLUME);
|
||||||
audioTrack.setEnabled(true);
|
audioTrack.setEnabled(true);
|
||||||
return audioTrack;
|
return audioTrack;
|
||||||
});
|
});
|
||||||
@@ -239,6 +240,7 @@ public class SessionClient extends Client {
|
|||||||
public void pauseAudio() {
|
public void pauseAudio() {
|
||||||
super.pauseAudio();
|
super.pauseAudio();
|
||||||
ListUtils.getOnlyOne(this.remoteMediaStream.audioTracks, audioTrack -> {
|
ListUtils.getOnlyOne(this.remoteMediaStream.audioTracks, audioTrack -> {
|
||||||
|
audioTrack.setVolume(0);
|
||||||
audioTrack.setEnabled(false);
|
audioTrack.setEnabled(false);
|
||||||
return audioTrack;
|
return audioTrack;
|
||||||
});
|
});
|
||||||
@@ -248,6 +250,7 @@ public class SessionClient extends Client {
|
|||||||
public void resumeAudio() {
|
public void resumeAudio() {
|
||||||
super.resumeAudio();
|
super.resumeAudio();
|
||||||
ListUtils.getOnlyOne(this.remoteMediaStream.audioTracks, audioTrack -> {
|
ListUtils.getOnlyOne(this.remoteMediaStream.audioTracks, audioTrack -> {
|
||||||
|
audioTrack.setVolume(Config.DEFAULT_VOLUME);
|
||||||
audioTrack.setEnabled(true);
|
audioTrack.setEnabled(true);
|
||||||
return audioTrack;
|
return audioTrack;
|
||||||
});
|
});
|
||||||
@@ -296,6 +299,7 @@ public class SessionClient extends Client {
|
|||||||
public void pause(String type) {
|
public void pause(String type) {
|
||||||
if(MediaStreamTrack.AUDIO_TRACK_KIND.equals(type)) {
|
if(MediaStreamTrack.AUDIO_TRACK_KIND.equals(type)) {
|
||||||
ListUtils.getOnlyOne(this.mediaStream.audioTracks, audioTrack -> {
|
ListUtils.getOnlyOne(this.mediaStream.audioTracks, audioTrack -> {
|
||||||
|
audioTrack.setVolume(0);
|
||||||
audioTrack.setEnabled(false);
|
audioTrack.setEnabled(false);
|
||||||
return audioTrack;
|
return audioTrack;
|
||||||
});
|
});
|
||||||
@@ -318,6 +322,7 @@ public class SessionClient extends Client {
|
|||||||
public void resume(String type) {
|
public void resume(String type) {
|
||||||
if(MediaStreamTrack.AUDIO_TRACK_KIND.equals(type)) {
|
if(MediaStreamTrack.AUDIO_TRACK_KIND.equals(type)) {
|
||||||
ListUtils.getOnlyOne(this.mediaStream.audioTracks, audioTrack -> {
|
ListUtils.getOnlyOne(this.mediaStream.audioTracks, audioTrack -> {
|
||||||
|
audioTrack.setVolume(Config.DEFAULT_VOLUME);
|
||||||
audioTrack.setEnabled(true);
|
audioTrack.setEnabled(true);
|
||||||
return audioTrack;
|
return audioTrack;
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ import android.content.Context;
|
|||||||
import android.media.AudioAttributes;
|
import android.media.AudioAttributes;
|
||||||
import android.media.AudioDeviceInfo;
|
import android.media.AudioDeviceInfo;
|
||||||
import android.media.AudioManager;
|
import android.media.AudioManager;
|
||||||
|
import android.media.AudioRecord;
|
||||||
import android.os.Build;
|
import android.os.Build;
|
||||||
|
|
||||||
import androidx.annotation.RequiresApi;
|
import androidx.annotation.RequiresApi;
|
||||||
@@ -316,6 +317,14 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
|
|||||||
|
|
||||||
/** Called when new audio samples are ready. This should only be set for debug purposes */
|
/** Called when new audio samples are ready. This should only be set for debug purposes */
|
||||||
public static interface SamplesReadyCallback {
|
public static interface SamplesReadyCallback {
|
||||||
|
/**
|
||||||
|
* 本地录制
|
||||||
|
*/
|
||||||
|
void startNative();
|
||||||
|
/**
|
||||||
|
* 远程录制
|
||||||
|
*/
|
||||||
|
void startWebRTC();
|
||||||
/**
|
/**
|
||||||
* 远程音频
|
* 远程音频
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -117,7 +117,18 @@ class WebRtcAudioRecord {
|
|||||||
* @Taoyao
|
* @Taoyao
|
||||||
*/
|
*/
|
||||||
public void setMixerProcesser(SamplesReadyCallback samplesReadyCallback) {
|
public void setMixerProcesser(SamplesReadyCallback samplesReadyCallback) {
|
||||||
|
// 不用处理这个逻辑:设置为空表示关闭录制
|
||||||
|
// if(this.audioSamplesReadyCallback != null && samplesReadyCallback == null) {
|
||||||
|
// this.audioSamplesReadyCallback.startNative();
|
||||||
|
// }
|
||||||
this.audioSamplesReadyCallback = samplesReadyCallback;
|
this.audioSamplesReadyCallback = samplesReadyCallback;
|
||||||
|
if(this.audioSamplesReadyCallback != null) {
|
||||||
|
if(this.audioThread == null) {
|
||||||
|
this.audioSamplesReadyCallback.startNative();
|
||||||
|
} else {
|
||||||
|
this.audioSamplesReadyCallback.startWebRTC();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -158,7 +169,8 @@ class WebRtcAudioRecord {
|
|||||||
if (audioSamplesReadyCallback != null) {
|
if (audioSamplesReadyCallback != null) {
|
||||||
// Copy the entire byte buffer array. The start of the byteBuffer is not necessarily
|
// Copy the entire byte buffer array. The start of the byteBuffer is not necessarily
|
||||||
// at index 0.
|
// at index 0.
|
||||||
SamplesReadyCallback nullable = audioSamplesReadyCallback;
|
// 注意不能定义其他地方否则不能回收
|
||||||
|
final SamplesReadyCallback nullable = audioSamplesReadyCallback;
|
||||||
if(nullable != null) {
|
if(nullable != null) {
|
||||||
final byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), byteBuffer.capacity() + byteBuffer.arrayOffset());
|
final byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), byteBuffer.capacity() + byteBuffer.arrayOffset());
|
||||||
nullable.onWebRtcAudioRecordSamplesReady(new JavaAudioDeviceModule.AudioSamples(audioRecord.getAudioFormat(), audioRecord.getChannelCount(), audioRecord.getSampleRate(), data));
|
nullable.onWebRtcAudioRecordSamplesReady(new JavaAudioDeviceModule.AudioSamples(audioRecord.getAudioFormat(), audioRecord.getChannelCount(), audioRecord.getSampleRate(), data));
|
||||||
@@ -182,6 +194,9 @@ class WebRtcAudioRecord {
|
|||||||
} catch (IllegalStateException e) {
|
} catch (IllegalStateException e) {
|
||||||
Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage());
|
Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage());
|
||||||
}
|
}
|
||||||
|
if(audioSamplesReadyCallback != null) {
|
||||||
|
audioSamplesReadyCallback.startNative();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stops the inner thread loop and also calls AudioRecord.stop().
|
// Stops the inner thread loop and also calls AudioRecord.stop().
|
||||||
@@ -376,6 +391,9 @@ class WebRtcAudioRecord {
|
|||||||
Logging.d(TAG, "startRecording");
|
Logging.d(TAG, "startRecording");
|
||||||
assertTrue(audioRecord != null);
|
assertTrue(audioRecord != null);
|
||||||
assertTrue(audioThread == null);
|
assertTrue(audioThread == null);
|
||||||
|
if(audioSamplesReadyCallback != null) {
|
||||||
|
audioSamplesReadyCallback.startWebRTC();
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
audioRecord.startRecording();
|
audioRecord.startRecording();
|
||||||
} catch (IllegalStateException e) {
|
} catch (IllegalStateException e) {
|
||||||
|
|||||||
@@ -146,7 +146,8 @@ class WebRtcAudioTrack {
|
|||||||
}
|
}
|
||||||
int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes);
|
int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes);
|
||||||
if (audioSamplesReadyCallback != null) {
|
if (audioSamplesReadyCallback != null) {
|
||||||
SamplesReadyCallback nullable = audioSamplesReadyCallback;
|
// 注意不能定义其他地方否则不能回收
|
||||||
|
final SamplesReadyCallback nullable = audioSamplesReadyCallback;
|
||||||
if(nullable != null) {
|
if(nullable != null) {
|
||||||
final byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), byteBuffer.capacity() + byteBuffer.arrayOffset());
|
final byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), byteBuffer.capacity() + byteBuffer.arrayOffset());
|
||||||
nullable.onWebRtcAudioTrackSamplesReady(new JavaAudioDeviceModule.AudioSamples(audioTrack.getAudioFormat(), audioTrack.getChannelCount(), audioTrack.getSampleRate(), data));
|
nullable.onWebRtcAudioTrackSamplesReady(new JavaAudioDeviceModule.AudioSamples(audioTrack.getAudioFormat(), audioTrack.getChannelCount(), audioTrack.getSampleRate(), data));
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ const defaultAudioConfig = {
|
|||||||
// 设备
|
// 设备
|
||||||
// deviceId : '',
|
// deviceId : '',
|
||||||
// 音量:0~1
|
// 音量:0~1
|
||||||
volume: 0.5,
|
volume: 1.0,
|
||||||
// 延迟大小(单位毫秒):500毫秒以内较好
|
// 延迟大小(单位毫秒):500毫秒以内较好
|
||||||
latency: 0.4,
|
latency: 0.4,
|
||||||
// 采样位数:8|16|32
|
// 采样位数:8|16|32
|
||||||
|
|||||||
@@ -19,11 +19,8 @@ public class AudioMixerTest {
|
|||||||
final byte[] bytesB = Files.readAllBytes(fileB.toPath());
|
final byte[] bytesB = Files.readAllBytes(fileB.toPath());
|
||||||
final int length = Math.min(bytesA.length, bytesB.length);
|
final int length = Math.min(bytesA.length, bytesB.length);
|
||||||
final byte[] target = new byte[length];
|
final byte[] target = new byte[length];
|
||||||
int a, b;
|
|
||||||
for (int i = 0; i < length; i++) {
|
for (int i = 0; i < length; i++) {
|
||||||
a = bytesA[i];
|
target[i] = (byte) (((bytesA[i] + bytesB[i]) & 0xFFFF) / 2);
|
||||||
b = bytesB[i];
|
|
||||||
target[i] = (byte) ((a + b) / 2);
|
|
||||||
}
|
}
|
||||||
Files.write(Paths.get("D:\\tmp\\mixer\\3.pcm"), target);
|
Files.write(Paths.get("D:\\tmp\\mixer\\3.pcm"), target);
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user