[+] 信令断开释放媒体

This commit is contained in:
acgist
2023-04-30 11:33:06 +08:00
parent 3cbbc8f936
commit 165655c4e6
89 changed files with 343 additions and 181 deletions

View File

@@ -6,7 +6,6 @@ import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.ColorStateList;
import android.content.res.Resources;
import android.graphics.Color;
import android.media.projection.MediaProjectionManager;
import android.os.Bundle;
import android.os.Handler;
@@ -27,7 +26,6 @@ import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.acgist.taoyao.client.databinding.ActivityMainBinding;
import com.acgist.taoyao.client.signal.Taoyao;
import com.acgist.taoyao.media.MediaManager;
import com.acgist.taoyao.media.VideoSourceType;
import com.acgist.taoyao.media.config.Config;
@@ -53,6 +51,8 @@ public class MainActivity extends AppCompatActivity implements Serializable {
protected void onCreate(Bundle bundle) {
Log.i(MainActivity.class.getSimpleName(), "onCreate");
super.onCreate(bundle);
// 强制横屏
// this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
this.requestPermission();
this.launchMediaService();
this.setTurnScreenOn(true);
@@ -216,7 +216,7 @@ public class MainActivity extends AppCompatActivity implements Serializable {
@Override
public void handleMessage(@NonNull Message message) {
super.handleMessage(message);
Log.d(MainHandler.class.getSimpleName(), "Handler消息" + message.what + " - " + message.obj);
Log.d(MainHandler.class.getSimpleName(), "Handler消息" + message.what);
switch (message.what) {
case Config.WHAT_SCREEN_CAPTURE -> MainActivity.this.screenCapture(message);
case Config.WHAT_RECORD -> MainActivity.this.record(message);
@@ -270,6 +270,9 @@ public class MainActivity extends AppCompatActivity implements Serializable {
final GridLayout video = this.binding.video;
final SurfaceView surfaceView = (SurfaceView) message.obj;
final int index = video.indexOfChild(surfaceView);
if(index < 0) {
return;
}
video.removeViewAt(index);
}
}

View File

@@ -184,13 +184,13 @@ public final class Taoyao implements ITaoyao {
*/
private final MediaManager mediaManager;
/**
* 房间列表
* 视频房间列表
*/
private final Map<String, Room> rooms;
/**
* 会话终端列表
* 视频会话列表
*/
private final Map<String, SessionClient> sessionClients;
private final Map<String, SessionClient> sessions;
/**
* 全局静态变量
*/
@@ -240,7 +240,7 @@ public final class Taoyao implements ITaoyao {
this.heartbeatHandler.postDelayed(this::heartbeat, 30L * 1000);
this.mediaManager = MediaManager.getInstance();
this.rooms = new ConcurrentHashMap<>();
this.sessionClients = new ConcurrentHashMap<>();
this.sessions = new ConcurrentHashMap<>();
Taoyao.taoyao = this;
}
@@ -441,6 +441,24 @@ public final class Taoyao implements ITaoyao {
this.input = null;
this.output = null;
this.socket = null;
this.closeRoomMedia();
this.closeSessionMedia();
}
private void closeRoomMedia() {
Log.i(Taoyao.class.getSimpleName(), "释放所有视频房间");
this.rooms.forEach((k, v) -> {
v.close();
});
this.rooms.clear();
}
private void closeSessionMedia() {
Log.i(Taoyao.class.getSimpleName(), "释放所有视频会话");
this.sessions.forEach((k, v) -> {
v.close();
});
this.sessions.clear();
}
/**
@@ -457,7 +475,7 @@ public final class Taoyao implements ITaoyao {
this.messageThread.quitSafely();
this.executeThread.quitSafely();
this.rooms.values().forEach(Room::close);
this.sessionClients.values().forEach(SessionClient::close);
this.sessions.values().forEach(SessionClient::close);
}
/**
@@ -624,7 +642,7 @@ public final class Taoyao implements ITaoyao {
*/
private void clientConfig(Message message, Map<String, Object> body) {
final MediaProperties mediaProperties = JSONUtils.toJava(JSONUtils.toJSON(body.get("media")), MediaProperties.class);
this.mediaManager.updateMediaConfig(mediaProperties, mediaProperties.getAudio(), mediaProperties.getVideo());
this.mediaManager.updateMediaConfig(mediaProperties);
final WebrtcProperties webrtcProperties = JSONUtils.toJava(JSONUtils.toJSON(body.get("webrtc")), WebrtcProperties.class);
this.mediaManager.updateWebrtcConfig(webrtcProperties);
}
@@ -853,7 +871,7 @@ public final class Taoyao implements ITaoyao {
this.mediaManager.getMediaProperties(),
this.mediaManager.getWebrtcProperties()
);
this.sessionClients.put(sessionId, sessionClient);
this.sessions.put(sessionId, sessionClient);
}
);
}
@@ -877,14 +895,14 @@ public final class Taoyao implements ITaoyao {
this.mediaManager.getMediaProperties(),
this.mediaManager.getWebrtcProperties()
);
this.sessionClients.put(sessionId, sessionClient);
this.sessions.put(sessionId, sessionClient);
sessionClient.init();
sessionClient.offer();
}
private void sessionClose(Message message, Map<String, Object> body) {
final String sessionId = MapUtils.get(body, "sessionId");
final SessionClient sessionClient = this.sessionClients.remove(sessionId);
final SessionClient sessionClient = this.sessions.remove(sessionId);
if(sessionClient == null) {
return;
}
@@ -893,7 +911,7 @@ public final class Taoyao implements ITaoyao {
private void sessionExchange(Message message, Map<String, Object> body) {
final String sessionId = MapUtils.get(body, "sessionId");
final SessionClient sessionClient = this.sessionClients.get(sessionId);
final SessionClient sessionClient = this.sessions.get(sessionId);
if(sessionClient == null) {
Log.w(Taoyao.class.getSimpleName(), "会话交换无效会话:" + sessionId);
return;
@@ -903,7 +921,7 @@ public final class Taoyao implements ITaoyao {
private void sessionPause(Message message, Map<String, Object> body) {
final String sessionId = MapUtils.get(body, "sessionId");
final SessionClient sessionClient = this.sessionClients.get(sessionId);
final SessionClient sessionClient = this.sessions.get(sessionId);
if(sessionClient == null) {
return;
}
@@ -913,7 +931,7 @@ public final class Taoyao implements ITaoyao {
private void sessionResume(Message message, Map<String, Object> body) {
final String sessionId = MapUtils.get(body, "sessionId");
final SessionClient sessionClient = this.sessionClients.get(sessionId);
final SessionClient sessionClient = this.sessions.get(sessionId);
if(sessionClient == null) {
return;
}

View File

@@ -4,7 +4,6 @@ import android.content.Context;
import android.content.Intent;
import android.media.projection.MediaProjection;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import com.acgist.taoyao.media.client.PhotographClient;
@@ -109,14 +108,6 @@ public final class MediaManager {
* 媒体配置
*/
private MediaProperties mediaProperties;
/**
* 当前共享视频配置
*/
private MediaVideoProperties mediaVideoProperties;
/**
* 当前共享音频配置
*/
private MediaAudioProperties mediaAudioProperties;
/**
* WebRTC配置
*/
@@ -291,7 +282,6 @@ public final class MediaManager {
PeerConnectionFactory.InitializationOptions.builder(this.context)
// .setFieldTrials("WebRTC-IntelVP8/Enabled/")
// .setFieldTrials("WebRTC-H264HighProfile/Enabled/")
// TODO测试是否需要c++全局加载JavaVM
// .setNativeLibraryName("jingle_peerconnection_so")
// .setEnableInternalTracer(true)
.createInitializationOptions()
@@ -433,9 +423,7 @@ public final class MediaManager {
}
private void initSharePromise() {
final Message message = new Message();
message.what = Config.WHAT_SCREEN_CAPTURE;
this.mainHandler.sendMessage(message);
this.mainHandler.obtainMessage(Config.WHAT_SCREEN_CAPTURE).sendToTarget();
}
/**
@@ -460,7 +448,8 @@ public final class MediaManager {
this.mainVideoSource = this.peerConnectionFactory.createVideoSource(this.videoCapturer.isScreencast());
// 次码流
this.shareVideoSource = this.peerConnectionFactory.createVideoSource(this.videoCapturer.isScreencast());
this.shareVideoSource.adaptOutputFormat(this.mediaVideoProperties.getWidth(), this.mediaVideoProperties.getHeight(), this.mediaVideoProperties.getFrameRate());
final MediaVideoProperties mediaVideoProperties = this.mediaProperties.getVideo();
this.shareVideoSource.adaptOutputFormat(mediaVideoProperties.getWidth(), mediaVideoProperties.getHeight(), mediaVideoProperties.getFrameRate());
// 视频捕获
this.videoCapturer.initialize(this.surfaceTextureHelper, this.context, new VideoCapturerObserver());
// 次码流视频处理
@@ -474,25 +463,39 @@ public final class MediaManager {
* @param mediaAudioProperties 音频配置
* @param mediaVideoProperties 视频配置
*/
public void updateMediaConfig(MediaProperties mediaProperties, MediaAudioProperties mediaAudioProperties, MediaVideoProperties mediaVideoProperties) {
public void updateMediaConfig(MediaProperties mediaProperties) {
this.mediaProperties = mediaProperties;
this.updateAudioConfig(this.mediaProperties.getAudio());
this.updateVideoConfig(this.mediaProperties.getVideo());
this.updateAudioConfig();
this.updateVideoConfig();
synchronized (this) {
this.notifyAll();
}
}
public void updateAudioConfig(MediaAudioProperties mediaAudioProperties) {
this.mediaAudioProperties = mediaAudioProperties;
this.mediaProperties.setAudio(mediaAudioProperties);
this.updateAudioConfig();
}
private void updateAudioConfig() {
MediaAudioProperties mediaAudioProperties = this.mediaProperties.getAudio();
// TODO调整音频
}
public void updateVideoConfig(MediaVideoProperties mediaVideoProperties) {
this.mediaVideoProperties = mediaVideoProperties;
if(this.shareVideoSource == null) {
return;
this.mediaProperties.setVideo(mediaVideoProperties);
this.updateVideoConfig();
}
private void updateVideoConfig() {
if(this.videoCapturer != null) {
final MediaVideoProperties mediaVideoProperties = this.mediaProperties.getVideos().get(this.videoQuantity);
this.videoCapturer.changeCaptureFormat(mediaVideoProperties.getWidth(), mediaVideoProperties.getHeight(), mediaVideoProperties.getFrameRate());
}
if(this.shareVideoSource != null) {
final MediaVideoProperties mediaVideoProperties = this.mediaProperties.getVideo();
this.shareVideoSource.adaptOutputFormat(mediaVideoProperties.getWidth(), mediaVideoProperties.getHeight(), mediaVideoProperties.getFrameRate());
}
this.shareVideoSource.adaptOutputFormat(this.mediaVideoProperties.getWidth(), this.mediaVideoProperties.getHeight(), this.mediaVideoProperties.getFrameRate());
}
public void updateWebrtcConfig(WebrtcProperties webrtcProperties) {
@@ -658,24 +661,24 @@ public final class MediaManager {
public SurfaceViewRenderer buildSurfaceViewRenderer(final int flag, final VideoTrack videoTrack) {
// 预览控件
final SurfaceViewRenderer surfaceViewRenderer = new SurfaceViewRenderer(this.context);
// 添加播放
videoTrack.addSink(surfaceViewRenderer);
// 页面加载
this.mainHandler.obtainMessage(flag, surfaceViewRenderer).sendToTarget();
this.mainHandler.post(() -> {
// 视频反转
surfaceViewRenderer.setMirror(false);
// surfaceViewRenderer.setMirror(false);
// 旋转画面
// surfaceViewRenderer.setRotation(90);
// 视频拉伸
surfaceViewRenderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
// 硬件拉伸
surfaceViewRenderer.setEnableHardwareScaler(true);
// surfaceViewRenderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
// 调整帧率
// surfaceViewRenderer.setFpsReduction();
// 硬件拉伸
// surfaceViewRenderer.setEnableHardwareScaler(true);
// 加载OpenSL ES
surfaceViewRenderer.init(this.eglContext, null);
// 添加播放
videoTrack.addSink(surfaceViewRenderer);
});
// 页面加载
final Message message = new Message();
message.obj = surfaceViewRenderer;
message.what = flag;
this.mainHandler.sendMessage(message);
return surfaceViewRenderer;
}

View File

@@ -1,7 +1,6 @@
package com.acgist.taoyao.media.client;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import com.acgist.taoyao.media.config.Config;
@@ -77,13 +76,11 @@ public abstract class Client extends CloseableClient {
super.close();
Log.i(this.getClass().getSimpleName(), "关闭终端:" + this.clientId);
if(this.surfaceViewRenderer != null) {
// 移除
final Message message = new Message();
message.obj = surfaceViewRenderer;
message.what = Config.WHAT_REMOVE_VIDEO;
this.mainHandler.sendMessage(message);
// 销毁
// 释放资源
this.surfaceViewRenderer.release();
// 移除资源:注意先释放再移除避免报错
this.mainHandler.obtainMessage(Config.WHAT_REMOVE_VIDEO, this.surfaceViewRenderer).sendToTarget();
// 设置为空
this.surfaceViewRenderer = null;
}
}

View File

@@ -74,13 +74,18 @@ public class LocalClient extends RoomClient {
@Override
public void close() {
Log.i(RemoteClient.class.getSimpleName(), "关闭本地终端:" + this.clientId);
super.close();
if(this.mediaStream == null) {
return;
}
synchronized (this.mediaStream) {
this.mediaStream.dispose();
synchronized (this) {
if(this.close) {
return;
}
super.close();
Log.i(RemoteClient.class.getSimpleName(), "关闭本地终端:" + this.clientId);
if(this.mediaStream == null) {
return;
}
synchronized (this.mediaStream) {
this.mediaStream.dispose();
}
}
}

View File

@@ -67,11 +67,16 @@ public class RemoteClient extends RoomClient {
@Override
public void close() {
Log.i(RemoteClient.class.getSimpleName(), "关闭远程终端:" + this.clientId);
super.close();
synchronized (this.tracks) {
// 注意使用nativeMediaConsumerClose释放
this.tracks.clear();
synchronized (this) {
if(this.close) {
return;
}
super.close();
Log.i(RemoteClient.class.getSimpleName(), "关闭远程终端:" + this.clientId);
synchronized (this.tracks) {
// 注意使用nativeMediaConsumerClose释放
this.tracks.clear();
}
}
}

View File

@@ -27,7 +27,7 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* 房间
* 视频房间
*
* @author acgist
*/

View File

@@ -365,21 +365,18 @@ public class SessionClient extends Client {
public void onSignalingChange(PeerConnection.SignalingState signalingState) {
Log.d(SessionClient.class.getSimpleName(), "PC信令状态改变" + signalingState);
SessionClient.this.logState();
// TODO处理失败
}
@Override
public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) {
Log.d(SessionClient.class.getSimpleName(), "PCIce收集状态改变" + iceGatheringState);
SessionClient.this.logState();
// TODO处理失败
}
@Override
public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) {
Log.d(SessionClient.class.getSimpleName(), "PCIce连接状态改变" + iceConnectionState);
SessionClient.this.logState();
// TODO处理失败
}
@Override

View File

@@ -14,14 +14,17 @@ import android.media.MediaCodec;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaFormat;
import android.os.SystemClock;
import androidx.annotation.Nullable;
import android.view.Surface;
import androidx.annotation.Nullable;
import org.webrtc.ThreadUtils.ThreadChecker;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import org.webrtc.ThreadUtils.ThreadChecker;
/**
* Android hardware video decoder.

View File

@@ -11,10 +11,13 @@
package org.webrtc;
import android.os.SystemClock;
import androidx.annotation.Nullable;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import java.util.ArrayList;
import java.util.List;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@SuppressWarnings("deprecation")
public class Camera1Enumerator implements CameraEnumerator {

View File

@@ -13,11 +13,13 @@ package org.webrtc;
import android.content.Context;
import android.os.Handler;
import android.os.SystemClock;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@SuppressWarnings("deprecation")
class Camera1Session implements CameraSession {

View File

@@ -13,6 +13,7 @@ package org.webrtc;
import android.annotation.TargetApi;
import android.content.Context;
import android.hardware.camera2.CameraManager;
import androidx.annotation.Nullable;
@TargetApi(21)

View File

@@ -20,14 +20,17 @@ import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Build;
import android.os.SystemClock;
import androidx.annotation.Nullable;
import android.util.AndroidException;
import android.util.Range;
import androidx.annotation.Nullable;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@TargetApi(21)
public class Camera2Enumerator implements CameraEnumerator {

View File

@@ -21,13 +21,16 @@ import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.os.Handler;
import androidx.annotation.Nullable;
import android.util.Range;
import android.view.Surface;
import androidx.annotation.Nullable;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@TargetApi(21)
class Camera2Session implements CameraSession {

View File

@@ -13,7 +13,9 @@ package org.webrtc;
import android.content.Context;
import android.os.Handler;
import android.os.Looper;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.List;

View File

@@ -13,6 +13,7 @@ package org.webrtc;
import static java.lang.Math.abs;
import android.graphics.ImageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;

View File

@@ -12,8 +12,8 @@ package org.webrtc;
import android.content.Context;
import android.graphics.Matrix;
import android.view.WindowManager;
import android.view.Surface;
import android.view.WindowManager;
interface CameraSession {
enum FailureType { ERROR, DISCONNECTED }

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.LinkedHashSet;

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.LinkedHashSet;

View File

@@ -11,9 +11,12 @@
package org.webrtc;
import android.graphics.SurfaceTexture;
import androidx.annotation.Nullable;
import android.view.Surface;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import javax.microedition.khronos.egl.EGL10;
/**

View File

@@ -13,9 +13,11 @@ package org.webrtc;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import androidx.annotation.Nullable;
import android.view.Surface;
import android.view.SurfaceHolder;
import androidx.annotation.Nullable;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;

View File

@@ -19,9 +19,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.os.Build;
import androidx.annotation.Nullable;
import android.view.Surface;
import org.webrtc.EglBase;
import androidx.annotation.Nullable;
/**
* Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,

View File

@@ -18,8 +18,10 @@ import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import androidx.annotation.Nullable;
import android.view.Surface;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import java.text.DecimalFormat;
import java.util.ArrayList;

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import java.util.concurrent.TimeUnit;

View File

@@ -12,6 +12,7 @@ package org.webrtc;
import android.content.Context;
import android.os.SystemClock;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;

View File

@@ -12,11 +12,10 @@ package org.webrtc;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import androidx.annotation.Nullable;
import java.nio.FloatBuffer;
import org.webrtc.GlShader;
import org.webrtc.GlUtil;
import org.webrtc.RendererCommon;
/**
* Helper class to implement an instance of RendererCommon.GlDrawer that can accept multiple input

View File

@@ -12,6 +12,7 @@ package org.webrtc;
import android.opengl.GLES20;
import android.opengl.GLException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

View File

@@ -10,8 +10,8 @@
package org.webrtc;
import java.util.Map;
import java.util.HashMap;
import java.util.Map;
/** Container for static helper functions related to dealing with H264 codecs. */
class H264Utils {

View File

@@ -11,8 +11,8 @@
package org.webrtc;
import android.media.MediaCodecInfo;
import androidx.annotation.Nullable;
import java.util.Arrays;
/** Factory for Android hardware VideoDecoders. */
public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {

View File

@@ -16,15 +16,18 @@ import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.opengl.GLES20;
import android.os.Bundle;
import androidx.annotation.Nullable;
import android.view.Surface;
import androidx.annotation.Nullable;
import org.webrtc.ThreadUtils.ThreadChecker;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import org.webrtc.ThreadUtils.ThreadChecker;
/**
* Android hardware video encoder.

View File

@@ -17,7 +17,9 @@ import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Build;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

View File

@@ -11,8 +11,8 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.Arrays;
import org.webrtc.PeerConnection;
/**
* Representation of a single ICE Candidate, mirroring

View File

@@ -10,8 +10,6 @@
package org.webrtc;
import org.webrtc.CalledByNative;
import org.webrtc.Loggable;
import org.webrtc.Logging.Severity;
class JNILogging {

View File

@@ -11,9 +11,11 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import org.webrtc.VideoFrame.I420Buffer;
import java.nio.ByteBuffer;
/** Implementation of VideoFrame.I420Buffer backed by Java direct byte buffers. */
public class JavaI420Buffer implements VideoFrame.I420Buffer {
private final int width;

View File

@@ -11,12 +11,12 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.EnumSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.webrtc.Loggable;
/**
* Java wrapper for WebRTC logging. Logging defaults to java.util.logging.Logger, but a custom

View File

@@ -14,7 +14,9 @@ import android.annotation.TargetApi;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.os.Build;
import androidx.annotation.Nullable;
import java.util.HashMap;
import java.util.Map;

View File

@@ -17,7 +17,9 @@ import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.os.Build;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;

View File

@@ -15,6 +15,7 @@ import android.media.MediaCrypto;
import android.media.MediaFormat;
import android.os.Bundle;
import android.view.Surface;
import java.nio.ByteBuffer;
/**

View File

@@ -17,6 +17,7 @@ import android.media.MediaCrypto;
import android.media.MediaFormat;
import android.os.Bundle;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
public class NV12Buffer implements VideoFrame.Buffer {

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
public class NV21Buffer implements VideoFrame.Buffer {

View File

@@ -11,8 +11,6 @@
package org.webrtc;
import androidx.annotation.Nullable;
import org.webrtc.VideoFrame;
import org.webrtc.VideoProcessor;
/**
* This class is meant to be a simple layer that only handles the JNI wrapping of a C++

View File

@@ -10,8 +10,6 @@
package org.webrtc;
import org.webrtc.VideoFrame;
/**
* Used from native api and implements a simple VideoCapturer.CapturerObserver that feeds frames to
* a webrtc::jni::AndroidVideoTrackSource.

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.List;
/** Interface for detecting network changes */

View File

@@ -12,10 +12,11 @@ package org.webrtc;
import android.content.Context;
import android.os.Build;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import org.webrtc.NetworkChangeDetector;
/**
* Borrowed from Chromium's

View File

@@ -28,8 +28,10 @@ import android.net.wifi.WifiManager;
import android.net.wifi.p2p.WifiP2pGroup;
import android.net.wifi.p2p.WifiP2pManager;
import android.os.Build;
import androidx.annotation.Nullable;
import android.telephony.TelephonyManager;
import androidx.annotation.Nullable;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;

View File

@@ -11,16 +11,13 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.webrtc.CandidatePairChangeEvent;
import org.webrtc.DataChannel;
import org.webrtc.MediaStreamTrack;
import org.webrtc.RtpTransceiver;
/**
* Java-land version of the PeerConnection APIs; wraps the C++ API

View File

@@ -12,13 +12,15 @@ package org.webrtc;
import android.content.Context;
import android.os.Process;
import androidx.annotation.Nullable;
import java.util.List;
import org.webrtc.Logging.Severity;
import org.webrtc.PeerConnection;
import org.webrtc.audio.AudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule;
import java.util.List;
/**
* Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to
* the PeerConnection API for clients.

View File

@@ -11,8 +11,8 @@
package org.webrtc;
import android.media.MediaCodecInfo;
import androidx.annotation.Nullable;
import java.util.Arrays;
/** Factory for Android platform software VideoDecoders. */
public class PlatformSoftwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.concurrent.atomic.AtomicInteger;
/**

View File

@@ -10,8 +10,6 @@
package org.webrtc;
import org.webrtc.PeerConnection;
/**
* Easily storable/serializable version of a native C++ RTCCertificatePEM.
*/

View File

@@ -11,11 +11,9 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.lang.Double;
import java.lang.String;
import java.util.List;
import java.util.Map;
import org.webrtc.MediaStreamTrack;
/**
* The parameters for an {@code RtpSender}, as defined in

View File

@@ -11,7 +11,6 @@
package org.webrtc;
import androidx.annotation.Nullable;
import org.webrtc.MediaStreamTrack;
/** Java wrapper for a C++ RtpReceiverInterface. */
public class RtpReceiver {

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.List;
/** Java wrapper for a C++ RtpSenderInterface. */

View File

@@ -13,8 +13,6 @@ package org.webrtc;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.webrtc.MediaStreamTrack;
import org.webrtc.RtpParameters;
/**
* Java wrapper for a C++ RtpTransceiverInterface.

View File

@@ -18,9 +18,10 @@ import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import androidx.annotation.Nullable;
import android.view.Surface;
import androidx.annotation.Nullable;
/**
* An implementation of VideoCapturer to capture the screen content as a video stream.
* Capturing is done by {@code MediaProjection} on a {@code SurfaceTexture}. We interact with this

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import android.view.SurfaceHolder;
import java.util.concurrent.CountDownLatch;
/**

View File

@@ -17,12 +17,15 @@ import android.opengl.GLES20;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import androidx.annotation.Nullable;
import java.util.concurrent.Callable;
import org.webrtc.EglBase.Context;
import org.webrtc.TextureBufferImpl.RefCountMonitor;
import org.webrtc.VideoFrame.TextureBuffer;
import java.util.concurrent.Callable;
/**
* Helper class for using a SurfaceTexture to create WebRTC VideoFrames. In order to create WebRTC
* VideoFrames, render onto the SurfaceTexture. The frames will be delivered to the listener. Only

View File

@@ -12,6 +12,7 @@ package org.webrtc;
import android.graphics.Matrix;
import android.os.Handler;
import androidx.annotation.Nullable;
/**

View File

@@ -13,7 +13,9 @@ package org.webrtc;
import android.os.Handler;
import android.os.Looper;
import android.os.SystemClock;
import androidx.annotation.Nullable;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;

View File

@@ -11,6 +11,7 @@
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.Locale;
import java.util.Map;

View File

@@ -10,8 +10,6 @@
package org.webrtc;
import org.webrtc.VideoDecoder;
/**
* This class contains the Java glue code for JNI generation of VideoDecoder.
*/

View File

@@ -11,7 +11,6 @@
package org.webrtc;
import androidx.annotation.Nullable;
import org.webrtc.EncodedImage;
/**
* Interface for a video encoder that can be used with WebRTC. All calls will be made on the

View File

@@ -11,8 +11,8 @@
package org.webrtc;
// Explicit imports necessary for JNI generation.
import androidx.annotation.Nullable;
import org.webrtc.VideoEncoder;
/**
* This class contains the Java glue code for JNI generation of VideoEncoder.

View File

@@ -12,6 +12,7 @@ package org.webrtc;
import android.os.Handler;
import android.os.HandlerThread;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;

View File

@@ -13,6 +13,7 @@ package org.webrtc;
import android.graphics.Matrix;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
/**

View File

@@ -13,7 +13,9 @@ package org.webrtc;
import android.graphics.Matrix;
import android.graphics.Point;
import android.opengl.GLES20;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
/**

View File

@@ -13,11 +13,14 @@ package org.webrtc;
import android.graphics.Matrix;
import android.opengl.GLES20;
import android.opengl.GLException;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import org.webrtc.VideoFrame.I420Buffer;
import org.webrtc.VideoFrame.TextureBuffer;
import java.nio.ByteBuffer;
/**
* Class for converting OES textures to a YUV ByteBuffer. It can be constructed on any thread, but
* should only be operated from a single thread with an active EGL context.

View File

@@ -15,11 +15,14 @@ import android.media.AudioAttributes;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;
import android.os.Build;
import androidx.annotation.RequiresApi;
import java.util.concurrent.ScheduledExecutorService;
import org.webrtc.JniCommon;
import org.webrtc.Logging;
import java.util.concurrent.ScheduledExecutorService;
/**
* AudioDeviceModule implemented using android.media.AudioRecord as input and
* android.media.AudioTrack as output.

View File

@@ -12,6 +12,7 @@ package org.webrtc.audio;
import android.media.AudioTrack;
import android.os.Build;
import org.webrtc.Logging;
// Lowers the buffer size if no underruns are detected for 100 ms. Once an

View File

@@ -11,10 +11,13 @@
package org.webrtc.audio;
import android.media.AudioManager;
import androidx.annotation.Nullable;
import org.webrtc.Logging;
import java.util.Timer;
import java.util.TimerTask;
import org.webrtc.Logging;
// TODO(magjed): Do we really need to spawn a new thread just to log volume? Can we re-use the
// AudioTrackThread instead?

View File

@@ -15,10 +15,13 @@ import android.media.audiofx.AudioEffect;
import android.media.audiofx.AudioEffect.Descriptor;
import android.media.audiofx.NoiseSuppressor;
import android.os.Build;
import androidx.annotation.Nullable;
import java.util.UUID;
import org.webrtc.Logging;
import java.util.UUID;
// This class wraps control of three different platform effects. Supported
// effects are: AcousticEchoCanceler (AEC) and NoiseSuppressor (NS).
// Calling enable() will active all effects that are

View File

@@ -17,8 +17,9 @@ import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.os.Build;
import org.webrtc.Logging;
import org.webrtc.CalledByNative;
import org.webrtc.Logging;
/**
* This class contains static functions to query sample rate and input/output audio buffer sizes.

View File

@@ -20,9 +20,18 @@ import android.media.AudioRecordingConfiguration;
import android.media.MediaRecorder.AudioSource;
import android.os.Build;
import android.os.Process;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import java.lang.System;
import org.webrtc.CalledByNative;
import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback;
import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStartErrorCode;
import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback;
import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Iterator;
@@ -35,13 +44,6 @@ import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.webrtc.CalledByNative;
import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback;
import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStartErrorCode;
import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback;
import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
class WebRtcAudioRecord {
private static final String TAG = "WebRtcAudioRecordExternal";

View File

@@ -18,15 +18,17 @@ import android.media.AudioManager;
import android.media.AudioTrack;
import android.os.Build;
import android.os.Process;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import org.webrtc.CalledByNative;
import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback;
import org.webrtc.audio.LowLatencyAudioBufferManager;
import java.nio.ByteBuffer;
class WebRtcAudioTrack {
private static final String TAG = "WebRtcAudioTrackExternal";

View File

@@ -23,10 +23,11 @@ import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.MediaRecorder.AudioSource;
import android.os.Build;
import java.lang.Thread;
import java.util.Arrays;
import org.webrtc.Logging;
import java.util.Arrays;
final class WebRtcAudioUtils {
private static final String TAG = "WebRtcAudioUtilsExternal";

View File

@@ -15,10 +15,13 @@ import android.media.audiofx.AudioEffect;
import android.media.audiofx.AudioEffect.Descriptor;
import android.media.audiofx.NoiseSuppressor;
import android.os.Build;
import androidx.annotation.Nullable;
import org.webrtc.Logging;
import java.util.List;
import java.util.UUID;
import org.webrtc.Logging;
// This class wraps control of three different platform effects. Supported
// effects are: AcousticEchoCanceler (AEC) and NoiseSuppressor (NS).

View File

@@ -17,12 +17,15 @@ import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.os.Build;
import androidx.annotation.Nullable;
import java.util.Timer;
import java.util.TimerTask;
import org.webrtc.ContextUtils;
import org.webrtc.Logging;
import java.util.Timer;
import java.util.TimerTask;
// WebRtcAudioManager handles tasks that uses android.media.AudioManager.
// At construction, storeAudioParameters() is called and it retrieves
// fundamental audio parameters like native sample rate and number of channels.

View File

@@ -15,13 +15,15 @@ import android.media.AudioRecord;
import android.media.MediaRecorder.AudioSource;
import android.os.Build;
import android.os.Process;
import androidx.annotation.Nullable;
import java.lang.System;
import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
public class WebRtcAudioRecord {
private static final boolean DEBUG = false;

View File

@@ -18,13 +18,15 @@ import android.media.AudioManager;
import android.media.AudioTrack;
import android.os.Build;
import android.os.Process;
import androidx.annotation.Nullable;
import java.lang.Thread;
import java.nio.ByteBuffer;
import org.webrtc.ContextUtils;
import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
import java.nio.ByteBuffer;
public class WebRtcAudioTrack {
private static final boolean DEBUG = false;

View File

@@ -20,12 +20,13 @@ import android.content.pm.PackageManager;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;
import android.os.Build;
import java.lang.Thread;
import java.util.Arrays;
import java.util.List;
import org.webrtc.ContextUtils;
import org.webrtc.Logging;
import java.util.Arrays;
import java.util.List;
public final class WebRtcAudioUtils {
private static final String TAG = "WebRtcAudioUtils";