[+] 共享原生WebRTC

This commit is contained in:
acgist
2025-11-13 15:01:02 +08:00
parent 95398a0f64
commit f9de241e09
9 changed files with 270 additions and 6 deletions

2
.gitignore vendored
View File

@@ -9,6 +9,8 @@ build
target target
node_modules node_modules
.idea
.vscode .vscode
package-lock.json package-lock.json

View File

@@ -15,6 +15,8 @@
* [libmediasoupclient文档](https://mediasoup.org/documentation/v3/libmediasoupclient) * [libmediasoupclient文档](https://mediasoup.org/documentation/v3/libmediasoupclient)
* [libmediasoupclient接口](https://mediasoup.org/documentation/v3/libmediasoupclient/api) * [libmediasoupclient接口](https://mediasoup.org/documentation/v3/libmediasoupclient/api)
> 分支:`3.4.2`
## 项目配置 ## 项目配置
可以自己编译`WebRTC`依赖或者下载已有依赖,项目导入以后拷贝`libmediasoupclient`源码还有`WebRTC`头文件和库文件到`deps`目录。 可以自己编译`WebRTC`依赖或者下载已有依赖,项目导入以后拷贝`libmediasoupclient`源码还有`WebRTC`头文件和库文件到`deps`目录。

View File

@@ -48,6 +48,6 @@
<string name="videoFile"></string> <string name="videoFile"></string>
<!-- 水印 --> <!-- 水印 -->
<string name="watermark">"'TAOYAO' yyyy-MM-dd HH:mm:ss"</string> <string name="watermark">"'TAOYAO' yyyy-MM-dd HH:mm:ss"</string>
<!-- 视频来源FILE|BACK|FRONT|SCREEN --> <!-- 视频来源FILE|BACK|FRONT|SCREEN|SHARE -->
<string name="videoSourceType">BACK</string> <string name="videoSourceType">BACK</string>
</resources> </resources>

View File

@@ -1,8 +1,5 @@
#Mon Mar 20 09:51:37 CST 2023
zipStoreBase=GRADLE_USER_HOME zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists zipStorePath=wrapper/dists
distributionUrl=https\://mirrors.cloud.tencent.com/gradle/gradle-7.5-bin.zip distributionUrl=https\://mirrors.cloud.tencent.com/gradle/gradle-8.5-bin.zip
#distributionUrl=https\://services.gradle.org/distributions/gradle-7.5-bin.zip
#distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-bin.zip
distributionBase=GRADLE_USER_HOME distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists distributionPath=wrapper/dists

View File

@@ -0,0 +1,16 @@
# 目录结构
```
├─deps
│ ├─libmediasoupclient
│ │ libmediasoupclient源码
│ └─webrtc
│ ├─lib
│ │ └─arm64-v8a
│ │ libwebrtc.a
│ │ WebRTC静态库
│ └─src
│ ├─api
│ │ WebRTC头文件
```

View File

@@ -16,6 +16,7 @@ import com.acgist.taoyao.media.config.MediaProperties;
import com.acgist.taoyao.media.config.MediaVideoProperties; import com.acgist.taoyao.media.config.MediaVideoProperties;
import com.acgist.taoyao.media.config.WebrtcProperties; import com.acgist.taoyao.media.config.WebrtcProperties;
import com.acgist.taoyao.media.signal.ITaoyao; import com.acgist.taoyao.media.signal.ITaoyao;
import com.acgist.taoyao.media.video.ShareVideoCapturer;
import com.acgist.taoyao.media.video.VideoProcesser; import com.acgist.taoyao.media.video.VideoProcesser;
import com.acgist.taoyao.media.video.WatermarkProcesser; import com.acgist.taoyao.media.video.WatermarkProcesser;
@@ -491,6 +492,8 @@ public final class MediaManager {
this.initCameraCapturer(); this.initCameraCapturer();
} else if (this.videoSourceType == VideoSourceType.SCREEN) { } else if (this.videoSourceType == VideoSourceType.SCREEN) {
this.initScreenCapturerPromise(); this.initScreenCapturerPromise();
} else if(this.videoSourceType == VideoSourceType.SHARE) {
this.initShareCapturer();
} else { } else {
// 其他来源 // 其他来源
} }
@@ -544,6 +547,14 @@ public final class MediaManager {
} }
} }
/**
* 加载本地共享
*/
private void initShareCapturer() {
this.videoCapturer = new ShareVideoCapturer();
this.initVideoSource();
}
/** /**
* 加载屏幕采集 * 加载屏幕采集
* *
@@ -1050,6 +1061,18 @@ public final class MediaManager {
} }
public void addShare(MediaStream mediaStream) {
if(this.videoSourceType == VideoSourceType.SHARE) {
((ShareVideoCapturer) this.videoCapturer).addSource(mediaStream);
}
}
public void removeShare(MediaStream mediaStream) {
if(this.videoSourceType == VideoSourceType.SHARE) {
((ShareVideoCapturer) this.videoCapturer).removeSource(mediaStream);
}
}
/** /**
* 加载MediasoupClient * 加载MediasoupClient
*/ */

View File

@@ -22,7 +22,13 @@ public enum VideoSourceType {
/** /**
* 屏幕共享ScreenCapturerAndroid * 屏幕共享ScreenCapturerAndroid
*/ */
SCREEN; SCREEN,
/**
* 共享本地ShareVideoCapturer
*
* 注意:这个模式只是用来测试很多功能没有兼容
*/
SHARE;
/** /**
* @return 是否是摄像头 * @return 是否是摄像头

View File

@@ -492,6 +492,7 @@ public class SessionClient extends Client {
SessionClient.this.remoteMediaStream = mediaStream; SessionClient.this.remoteMediaStream = mediaStream;
SessionClient.this.playAudio(); SessionClient.this.playAudio();
SessionClient.this.playVideo(); SessionClient.this.playVideo();
SessionClient.this.mediaManager.addShare(mediaStream);
} }
@Override @Override
@@ -499,6 +500,7 @@ public class SessionClient extends Client {
Log.i(SessionClient.class.getSimpleName(), "删除远程媒体:" + SessionClient.this.clientId); Log.i(SessionClient.class.getSimpleName(), "删除远程媒体:" + SessionClient.this.clientId);
mediaStream.dispose(); mediaStream.dispose();
SessionClient.this.remoteMediaStream = null; SessionClient.this.remoteMediaStream = null;
SessionClient.this.mediaManager.removeShare(mediaStream);
} }
@Override @Override

View File

@@ -0,0 +1,216 @@
package com.acgist.taoyao.media.video;
import android.content.Context;
import android.util.Log;
import org.webrtc.CapturerObserver;
import org.webrtc.JavaI420Buffer;
import org.webrtc.MediaStream;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoFrame;
import org.webrtc.VideoSink;
import org.webrtc.VideoTrack;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* 注意:只是功能验证,没有实现资源释放。
*/
public class ShareVideoCapturer implements VideoCapturer {
private byte[] bytes = new byte[1024 * 1024];
private boolean running = false;
private CapturerObserver capturerObserver;
private final Map<VideoTrack, VideoFrame> frames = new HashMap<>();
@Override
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext, CapturerObserver capturerObserver) {
this.capturerObserver = capturerObserver;
}
@Override
public void startCapture(int width, int height, int framerate) {
this.running = true;
final Thread thread = new Thread(() -> {
final int col = 2;
final int row = 2;
final int stride = width;
final int width_ = width / col;
final int height_ = height / row;
Log.d(ShareVideoCapturer.class.getSimpleName(), "原始宽度:" + width);
Log.d(ShareVideoCapturer.class.getSimpleName(), "原始高度:" + height);
Log.d(ShareVideoCapturer.class.getSimpleName(), "目标宽度:" + width_);
Log.d(ShareVideoCapturer.class.getSimpleName(), "目标高度:" + height_);
final JavaI420Buffer buffer = JavaI420Buffer.wrap(
width, height,
ByteBuffer.allocateDirect(width * height), stride,
ByteBuffer.allocateDirect(width * height / 2), stride,
ByteBuffer.allocateDirect(width * height / 2), stride,
null
);
this.clearBuffer(buffer);
final List<VideoFrame.I420Buffer> buffers = new ArrayList<>();
while(ShareVideoCapturer.this.running) {
synchronized (ShareVideoCapturer.this.frames) {
do {
try {
// 25帧
ShareVideoCapturer.this.frames.wait(1000 / 25);
} catch (Exception e) {
Log.e(ShareVideoCapturer.class.getSimpleName(), "等待异常", e);
}
} while(ShareVideoCapturer.this.frames.isEmpty());
ShareVideoCapturer.this.frames.forEach((k, v) -> {
final VideoFrame.Buffer c = v.getBuffer();
final VideoFrame.Buffer o = c.cropAndScale(0, 0, c.getWidth(), c.getHeight(), width_, height_);
final VideoFrame.I420Buffer x = o.toI420();
// 如果每个都是独立传输
// buffer.getDataY().put(x.getDataY());
// buffer.getDataU().put(x.getDataU());
// buffer.getDataV().put(x.getDataV());
buffers.add(x);
o.release();
v.release();
});
for (int i = 0; i < buffers.size(); i++) {
final int row_ = i / col;
final int col_ = i % col;
final int dstX = col_ * width_;
final int dstY = row_ * height_;
ShareVideoCapturer.this.copyBuffer(buffers.get(i), buffer, dstX, dstY);
}
buffers.clear();
ShareVideoCapturer.this.frames.clear();
final VideoFrame frame = new VideoFrame(
buffer,
0,
System.nanoTime()
);
ShareVideoCapturer.this.capturerObserver.onFrameCaptured(frame);
}
}
buffer.release();
synchronized (ShareVideoCapturer.this.frames) {
ShareVideoCapturer.this.frames.forEach((k, v) -> {
v.release();
});
ShareVideoCapturer.this.frames.clear();
}
});
thread.setName("SHARE-VIDEO-CAPTURER");
thread.setDaemon(true);
thread.start();
}
private void clearBuffer(VideoFrame.I420Buffer buffer) {
this.clearBuffer(buffer.getDataY());
this.clearBuffer(buffer.getDataU());
this.clearBuffer(buffer.getDataV());
}
private void clearBuffer(ByteBuffer buffer) {
while(buffer.hasRemaining()) {
buffer.put((byte) 128);
}
}
private void copyBuffer(
VideoFrame.I420Buffer src,
VideoFrame.I420Buffer dst,
int dstX,
int dstY
) {
final int width = src.getWidth();
final int height = src.getHeight();
// 复制Y平面
this.copyPlane(
src.getDataY(), src.getStrideY(),
dst.getDataY(), dst.getStrideY(),
width, height, dstX, dstY
);
// 复制U平面
final int uvWidth = (width + 1) / 2;
final int uvHeight = (height + 1) / 2;
copyPlane(
src.getDataU(), src.getStrideU(),
dst.getDataU(), dst.getStrideU(),
uvWidth, uvHeight, dstX / 2, dstY / 2
);
// 复制V平面
this.copyPlane(
src.getDataV(), src.getStrideV(),
dst.getDataV(), dst.getStrideV(),
uvWidth, uvHeight, dstX / 2, dstY / 2
);
}
private void copyPlane(
ByteBuffer src, int srcStride,
ByteBuffer dst, int dstStride,
int width, int height, int dstX, int dstY
) {
for (int y = 0; y < height; y++) {
final int srcPos = y * srcStride;
final int dstPos = (dstY + y) * dstStride + dstX;
src.position(srcPos);
src.get(this.bytes, 0, width);
dst.position(dstPos);
dst.put(this.bytes, 0, width);
}
}
@Override
public void stopCapture() throws InterruptedException {
this.running = false;
}
@Override
public void changeCaptureFormat(int width, int height, int framerate) {
}
@Override
public void dispose() {
this.running = false;
}
@Override
public boolean isScreencast() {
return false;
}
public void addSource(MediaStream mediaStream) {
mediaStream.videoTracks.forEach(track -> {
track.addSink(new VideoSink() {
@Override
public void onFrame(VideoFrame frame) {
synchronized (ShareVideoCapturer.this.frames) {
if(ShareVideoCapturer.this.running) {
frame.retain();
final VideoFrame old = ShareVideoCapturer.this.frames.put(track, frame);
if(old != null) {
old.release();
}
}
}
}
});
});
}
public void removeSource(MediaStream mediaStream) {
mediaStream.videoTracks.forEach(track -> {
synchronized (ShareVideoCapturer.this.frames) {
final VideoFrame old = ShareVideoCapturer.this.frames.remove(track);
if(old != null) {
old.release();
}
}
});
}
}