[*] 录像拉流拍照流程调整

This commit is contained in:
acgist
2023-04-16 16:05:14 +08:00
parent a08462a3f2
commit b8a007792c
3 changed files with 151 additions and 166 deletions

View File

@@ -171,10 +171,6 @@ public final class MediaManager {
* 录制终端
*/
private RecordClient recordClient;
/**
* 拍照终端
*/
private PhotographClient photographClient;
/**
* 视频来源
*/
@@ -287,7 +283,6 @@ public final class MediaManager {
/**
* 关闭一个终端
* 最后一个终端关闭时,释放所有资源。
*
* @return 剩余终端数量
*/
@@ -296,11 +291,12 @@ public final class MediaManager {
this.clientCount--;
if (this.clientCount <= 0) {
Log.i(MediaManager.class.getSimpleName(), "释放PeerConnectionFactory");
// 注意顺序
this.stopVideoCapture();
this.closeAudio();
this.closeMainVideo();
this.closeShareVideo();
this.closeMedia();
this.stopVideoCapture();
this.nativeStop();
this.stopPeerConnectionFactory();
}
@@ -602,17 +598,16 @@ public final class MediaManager {
public String photograph() {
synchronized (this) {
String filepath;
final MediaVideoProperties mediaVideoProperties = this.mediaProperties.getVideos().get(this.videoQuantity);
final PhotographClient photographClient = new PhotographClient(this.imageQuantity, this.imagePath);
if(this.clientCount <= 0) {
filepath = photographClient.photograph(mediaVideoProperties.getWidth(), mediaVideoProperties.getHeight(), VideoSourceType.BACK, this.context);
final MediaVideoProperties mediaVideoProperties = this.mediaProperties.getVideos().get(this.videoQuantity);
photographClient.photograph(mediaVideoProperties.getWidth(), mediaVideoProperties.getHeight(), mediaVideoProperties.getFrameRate(), VideoSourceType.BACK, this.context);
filepath = photographClient.waitForPhotograph();
} else {
this.photographClient = photographClient;
this.mainVideoTrack.addSink(this.photographClient);
filepath = this.photographClient.waitForPhotograph();
this.mainVideoTrack.removeSink(this.photographClient);
this.mainVideoTrack.addSink(photographClient);
filepath = photographClient.waitForPhotograph();
this.mainVideoTrack.removeSink(photographClient);
}
this.photographClient = null;
return filepath;
}
}
@@ -765,6 +760,7 @@ public final class MediaManager {
@Override
public void onFrameCaptured(VideoFrame videoFrame) {
// 注意VideoFrame必须释放如果分开线程需要调用retain和release方法否则不要调用。
this.mainObserver.onFrameCaptured(videoFrame);
this.shareObserver.onFrameCaptured(videoFrame);
}

View File

@@ -2,7 +2,6 @@ package com.acgist.taoyao.media.client;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
@@ -14,16 +13,18 @@ import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.OutputConfiguration;
import android.hardware.camera2.params.SessionConfiguration;
import android.media.Image;
import android.media.ImageReader;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.view.Surface;
import com.acgist.taoyao.boot.utils.DateUtils;
import com.acgist.taoyao.media.MediaManager;
import com.acgist.taoyao.media.VideoSourceType;
import org.webrtc.VideoFrame;
@@ -46,73 +47,26 @@ import java.util.List;
*/
public class PhotographClient implements VideoSink {
public static final int CAPTURER_SIZE = 1;
private final int quantity;
private final String filename;
private final String filepath;
private volatile boolean wait;
private volatile boolean done;
private volatile boolean finish;
private Surface surface;
private ImageReader imageReader;
private CameraDevice cameraDevice;
private HandlerThread handlerThread;
private CameraCaptureSession cameraCaptureSession;
public PhotographClient(int quantity, String path) {
this.quantity = quantity;
this.filename = DateUtils.format(LocalDateTime.now(), DateUtils.DateTimeStyle.YYYYMMDDHH24MMSS) + ".jpg";
this.filepath = Paths.get(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOCUMENTS).getAbsolutePath(), path, this.filename).toString();
this.wait = true;
this.done = false;
this.finish = false;
Log.i(RecordClient.class.getSimpleName(), "拍摄照片文件:" + this.filepath);
}
@Override
public void onFrame(VideoFrame videoFrame) {
videoFrame.retain();
this.photograph(videoFrame);
}
public String photograph(VideoFrame videoFrame) {
if(this.wait) {
this.wait = false;
final Thread thread = new Thread(() -> this.photographBackground(videoFrame));
thread.setName("PhotographThread");
thread.setDaemon(true);
thread.start();
} else {
videoFrame.release();
}
return this.filepath;
}
private void photographBackground(VideoFrame videoFrame) {
final File file = new File(this.filepath);
try (
final OutputStream output = new FileOutputStream(file);
final ByteArrayOutputStream byteArray = new ByteArrayOutputStream();
) {
final VideoFrame.I420Buffer i420 = videoFrame.getBuffer().toI420();
final int width = i420.getWidth();
final int height = i420.getHeight();
// YuvHelper转换颜色溢出
final YuvImage image = this.i420ToYuvImage(i420, width, height);
i420.release();
videoFrame.release();
final Rect rect = new Rect(0, 0, width, height);
image.compressToJpeg(rect, this.quantity, byteArray);
final byte[] array = byteArray.toByteArray();
final Bitmap bitmap = BitmapFactory.decodeByteArray(array, 0, array.length);
// final Matrix matrix = new Matrix();
// matrix.setRotate(90);
// final Bitmap matrixBitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height, matrix, false);
bitmap.compress(Bitmap.CompressFormat.JPEG, this.quantity, output);
} catch (Exception e) {
Log.e(PhotographClient.class.getSimpleName(), "拍照异常", e);
}
this.notifyWait();
}
private void notifyWait() {
synchronized (this) {
this.finish = true;
@@ -122,18 +76,62 @@ public class PhotographClient implements VideoSink {
public String waitForPhotograph() {
synchronized (this) {
if(this.finish) {
return this.filepath;
}
try {
if(this.finish) {
return this.filepath;
}
this.wait(5000);
} catch (InterruptedException e) {
Log.e(PhotographClient.class.getSimpleName(), "拍照等待异常", e);
} finally {
this.handlerThread.quitSafely();
}
}
return this.filepath;
}
@Override
public void onFrame(VideoFrame videoFrame) {
if(this.done) {
// 已经完成忽略
} else {
this.done = true;
this.handlerThread = new HandlerThread("PhotographThread");
this.handlerThread.start();
final Handler handler = new Handler(this.handlerThread.getLooper());
videoFrame.retain();
handler.post(() -> this.photograph(videoFrame));
}
}
private void photograph(VideoFrame videoFrame) {
final VideoFrame.I420Buffer i420 = videoFrame.getBuffer().toI420();
videoFrame.release();
final File file = new File(this.filepath);
final int width = i420.getWidth();
final int height = i420.getHeight();
// YuvHelper转换颜色溢出
final YuvImage image = this.i420ToYuvImage(i420, width, height);
i420.release();
final Rect rect = new Rect(0, 0, width, height);
try (
final OutputStream output = new FileOutputStream(file);
final ByteArrayOutputStream byteArray = new ByteArrayOutputStream();
) {
image.compressToJpeg(rect, this.quantity, byteArray);
final byte[] array = byteArray.toByteArray();
final Bitmap bitmap = BitmapFactory.decodeByteArray(array, 0, array.length);
// final Matrix matrix = new Matrix();
// matrix.setRotate(90);
// final Bitmap matrixBitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height, matrix, false);
bitmap.compress(Bitmap.CompressFormat.JPEG, this.quantity, output);
} catch (Exception e) {
Log.e(PhotographClient.class.getSimpleName(), "拍照异常", e);
} finally {
this.notifyWait();
}
}
private YuvImage i420ToYuvImage(VideoFrame.I420Buffer i420, int width, int height) {
int index = 0;
final int yy = i420.getStrideY();
@@ -160,57 +158,37 @@ public class PhotographClient implements VideoSink {
}
@SuppressLint("MissingPermission")
public String photograph(int width, int height, VideoSourceType type, Context context) {
final CameraManager cameraManager = context.getSystemService(CameraManager.class);
this.imageReader = ImageReader.newInstance(width, height, ImageFormat.JPEG, PhotographClient.CAPTURER_SIZE);
this.surface = this.imageReader.getSurface();
this.imageReader.setOnImageAvailableListener(this.imageAvailableListener, null);
try {
String cameraId = null;
final String[] cameraIdList = cameraManager.getCameraIdList();
for (String id : cameraIdList) {
final CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(id);
if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK && type == VideoSourceType.BACK) {
cameraId = id;
break;
} else if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT && type == VideoSourceType.FRONT) {
cameraId = id;
break;
} else {
public void photograph(int width, int height, int fps, VideoSourceType type, Context context) {
this.handlerThread = new HandlerThread("PhotographThread");
this.handlerThread.start();
final Handler handler = new Handler(this.handlerThread.getLooper());
handler.post(() -> {
final CameraManager cameraManager = context.getSystemService(CameraManager.class);
PhotographClient.this.imageReader = ImageReader.newInstance(width, height, ImageFormat.JPEG, fps);
PhotographClient.this.surface = PhotographClient.this.imageReader.getSurface();
try {
String cameraId = null;
final String[] cameraIdList = cameraManager.getCameraIdList();
for (String id : cameraIdList) {
final CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(id);
if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK && type == VideoSourceType.BACK) {
cameraId = id;
break;
} else if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT && type == VideoSourceType.FRONT) {
cameraId = id;
break;
} else {
}
}
cameraManager.openCamera(cameraId, this.cameraDeviceStateCallback, null);
} catch (CameraAccessException e) {
Log.e(PhotographClient.class.getSimpleName(), "拍照异常", e);
}
if(cameraId == null) {
PhotographClient.this.closeCamera();
return null;
}
cameraManager.openCamera(cameraId, this.cameraDeviceStateCallback, null);
} catch (CameraAccessException e) {
Log.e(PhotographClient.class.getSimpleName(), "拍照异常", e);
PhotographClient.this.closeCamera();
}
return this.filepath;
});
}
private ImageReader.OnImageAvailableListener imageAvailableListener = (ImageReader imageReader) -> {
final Image image = imageReader.acquireLatestImage();
final Image.Plane[] planes = image.getPlanes();
final ByteBuffer byteBuffer = planes[0].getBuffer();
final byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
final File file = new File(PhotographClient.this.filepath);
try (
final OutputStream output = new FileOutputStream(file);
) {
output.write(bytes,0,bytes.length);
} catch (IOException e) {
Log.e(PhotographClient.class.getSimpleName(), "拍照异常", e);
} finally {
image.close();
PhotographClient.this.closeCamera();
}
};
private CameraDevice.StateCallback cameraDeviceStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice cameraDevice) {
PhotographClient.this.cameraDevice = cameraDevice;
@@ -223,36 +201,75 @@ public class PhotographClient implements VideoSink {
));
} catch (CameraAccessException e) {
Log.e(PhotographClient.class.getSimpleName(), "拍照异常", e);
PhotographClient.this.closeCamera();
}
}
@Override
public void onDisconnected(CameraDevice cameraDevice) {
PhotographClient.this.closeCamera();
}
@Override
public void onError(CameraDevice cameraDevice, int error) {
PhotographClient.this.closeCamera();
}
};
private CameraCaptureSession.StateCallback cameraCaptureSessionStateCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
try {
PhotographClient.this.cameraCaptureSession = cameraCaptureSession;
final CaptureRequest.Builder builder = PhotographClient.this.cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
builder.set(CaptureRequest.JPEG_QUALITY, (byte) PhotographClient.this.quantity);
// builder.set(CaptureRequest.JPEG_ORIENTATION, 90);
builder.addTarget(PhotographClient.this.surface);
cameraCaptureSession.setRepeatingRequest(builder.build(), null, null);
cameraCaptureSession.setRepeatingRequest(builder.build(), PhotographClient.this.cameraCaptureSessionCaptureCallback, null);
} catch (CameraAccessException e) {
Log.e(PhotographClient.class.getSimpleName(), "拍照异常", e);
PhotographClient.this.closeCamera();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
PhotographClient.this.closeCamera();
}
};
private CameraCaptureSession.CaptureCallback cameraCaptureSessionCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private volatile int index = 0;
@Override
public void onCaptureCompleted(CameraCaptureSession cameraCaptureSession, CaptureRequest captureRequest, TotalCaptureResult totalCaptureResult) {
final Image image = PhotographClient.this.imageReader.acquireNextImage();
if(image == null) {
return;
}
if(this.index++ <= 4 || PhotographClient.this.done) {
image.close();
return;
}
PhotographClient.this.done = true;
final Image.Plane[] planes = image.getPlanes();
final ByteBuffer byteBuffer = planes[0].getBuffer();
final byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
final File file = new File(PhotographClient.this.filepath);
try (final OutputStream output = new FileOutputStream(file)) {
// final Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
// bitmap.compress(Bitmap.CompressFormat.JPEG, PhotographClient.this.quantity, output);
output.write(bytes, 0, bytes.length);
cameraCaptureSession.stopRepeating();
} catch (IOException | CameraAccessException e) {
Log.e(PhotographClient.class.getSimpleName(), "拍照异常", e);
} finally {
image.close();
PhotographClient.this.closeCamera();
PhotographClient.this.notifyWait();
}
}
};
private void closeCamera() {
@@ -264,11 +281,6 @@ public class PhotographClient implements VideoSink {
this.cameraDevice.close();
this.cameraDevice = null;
}
// 最后释放ImageReader
if(this.surface != null) {
this.surface.release();
this.surface = null;
}
if(this.imageReader != null) {
this.imageReader.close();
this.imageReader = null;

View File

@@ -90,6 +90,10 @@ public class RecordClient extends Client implements VideoSink, JavaAudioDeviceMo
* 高度1080
*/
private final int height;
/**
* YUV数据大小
*/
private final int yuvSize;
/**
* 音频编码
*/
@@ -133,6 +137,7 @@ public class RecordClient extends Client implements VideoSink, JavaAudioDeviceMo
this.iFrameInterval = iFrameInterval;
this.width = width;
this.height = height;
this.yuvSize = width * height * 3 / 2;
this.filename = DateUtils.format(LocalDateTime.now(), DateUtils.DateTimeStyle.YYYYMMDDHH24MMSS) + ".mp4";
this.filepath = Paths.get(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOCUMENTS).getAbsolutePath(), path, this.filename).toString();
}
@@ -219,7 +224,7 @@ public class RecordClient extends Client implements VideoSink, JavaAudioDeviceMo
outputBuffer.position(bufferInfo.offset);
outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
bufferInfo.presentationTimeUs -= pts;
// this.mediaMuxer.writeSampleData(trackIndex, outputBuffer, info);
this.mediaMuxer.writeSampleData(trackIndex, outputBuffer, bufferInfo);
this.audioCodec.releaseOutputBuffer(outputIndex, false);
Log.d(RecordClient.class.getSimpleName(), "录制音频帧(时间戳):" + bufferInfo.flags + " - " + (bufferInfo.presentationTimeUs / 1_000_000F));
// if (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
@@ -280,28 +285,10 @@ public class RecordClient extends Client implements VideoSink, JavaAudioDeviceMo
long pts = 0L;
int trackIndex = -1;
int outputIndex;
VideoFrame videoFrame = null;
this.videoCodec.start();
this.videoActive = true;
final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
while (!this.close) {
try {
videoFrame = this.vq.poll(WAIT_TIME_MS, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Log.e(RecordClient.class.getSimpleName(), "录制线程等待异常", e);
}
if(videoFrame == null) {
continue;
}
final int videoFrameSize = videoFrame.getRotatedWidth() * videoFrame.getRotatedHeight() * 3 / 2;
final int index = this.videoCodec.dequeueInputBuffer(WAIT_TIME_US);
final VideoFrame.I420Buffer i420 = videoFrame.getBuffer().toI420();
final ByteBuffer inputByteBuffer = this.videoCodec.getInputBuffer(index);
YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), i420.getDataV(), i420.getStrideV(), inputByteBuffer, i420.getWidth(), i420.getHeight());
i420.release();
videoFrame.release();
this.videoCodec.queueInputBuffer(index, 0, videoFrameSize, videoFrame.getTimestampNs(), 0);
videoFrame = null;
outputIndex = this.videoCodec.dequeueOutputBuffer(bufferInfo, WAIT_TIME_US);
if (outputIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// } else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
@@ -333,7 +320,7 @@ public class RecordClient extends Client implements VideoSink, JavaAudioDeviceMo
bufferInfo.presentationTimeUs -= pts;
this.mediaMuxer.writeSampleData(trackIndex, outputBuffer, bufferInfo);
this.videoCodec.releaseOutputBuffer(outputIndex, false);
Log.d(RecordClient.class.getSimpleName(), "录制视频帧(时间戳):" + bufferInfo.flags + " - " + (bufferInfo.presentationTimeUs / 1_000_000F));
Log.d(RecordClient.class.getSimpleName(), "录制视频帧(时间戳):" + (bufferInfo.presentationTimeUs / 1_000_000F));
// if (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
// } else if (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG == MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
// } else if (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
@@ -434,32 +421,22 @@ public class RecordClient extends Client implements VideoSink, JavaAudioDeviceMo
@Override
public void onFrame(VideoFrame videoFrame) {
videoFrame.retain();
if (this.close || !this.videoActive) {
videoFrame.release();
return;
}
Log.i(RecordClient.class.getSimpleName(), "视频信息:" + videoFrame.getRotatedWidth() + " - " + videoFrame.getRotatedHeight());
try {
vq.put(videoFrame);
} catch (InterruptedException e) {
throw new RuntimeException(e);
final int index = this.videoCodec.dequeueInputBuffer(WAIT_TIME_US);
if(index < 0) {
return;
}
// final int videoFrameSize = videoFrame.getRotatedWidth() * videoFrame.getRotatedHeight() * 3 / 2;
// final int index = this.videoCodec.dequeueInputBuffer(WAIT_TIME_US);
// if(index < 0) {
// videoFrame.retain();
// return;
// }
// final VideoFrame.I420Buffer i420 = videoFrame.getBuffer().toI420();
// final ByteBuffer inputByteBuffer = this.videoCodec.getInputBuffer(index);
// YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), i420.getDataV(), i420.getStrideV(), inputByteBuffer, i420.getWidth(), i420.getHeight());
// i420.release();
// videoFrame.release();
// this.videoCodec.queueInputBuffer(index, 0, videoFrameSize, videoFrame.getTimestampNs(), 0);
// videoFrame = null;
// 内存抖动未提
final ByteBuffer inputBuffer = this.videoCodec.getInputBuffer(index);
final VideoFrame.I420Buffer i420 = videoFrame.getBuffer().toI420();
YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), i420.getDataV(), i420.getStrideV(), inputBuffer, i420.getWidth(), i420.getHeight());
i420.release();
this.videoCodec.queueInputBuffer(index, 0, this.yuvSize, videoFrame.getTimestampNs(), 0);
// final int videoFrameSize = videoFrame.getRotatedWidth() * videoFrame.getRotatedHeight() * 3 / 2;
// this.videoCodec.queueInputBuffer(index, 0, this.videoFrameSize, videoFrame.getTimestampNs(), 0);
}
private BlockingQueue<VideoFrame> vq = new LinkedBlockingQueue<>();
}