This commit is contained in:
acgist
2024-05-30 19:11:20 +08:00
parent beb6df6385
commit b23e59930e
16 changed files with 179 additions and 269 deletions

View File

@@ -60,10 +60,6 @@ target_link_libraries(
${PROJECT_NAME} PUBLIC
# libuv
libuv.so
# EGL
libEGL.so
# OpenGL ES
libGLESv3.so
# 音频
libohaudio.so
# 相机
@@ -73,15 +69,16 @@ target_link_libraries(
# LOG
libhilog_ndk.z.so
# 图片
libimage_ndk.z.so
libimage_receiver_ndk.z.so
# 本地
libnative_image.so
# NativeBuffer
libnative_buffer.so
# NativeWindow
libnative_window.so
# 编码解码
libnative_media_aenc.so
libnative_media_venc.so
libnative_media_core.so
libnative_media_venc.so
libnative_media_codecbase.so
# 屏幕录制
libnative_avscreen_capture.so

View File

@@ -85,16 +85,14 @@ static uint32_t minIndex = 667;
static uint32_t maxIndex = 999;
// 终端索引
static uint32_t clientIndex = 99999;
// ETS环境
static napi_env env = nullptr;
// 是否加载
static bool initTaoyao = false;
// push方法引用
static napi_ref pushRef = nullptr;
static napi_ref pushRef = nullptr;
// request方法引用
static napi_ref requestRef = nullptr;
// request线程安全方法
static napi_threadsafe_function requestFunction = nullptr;
// 图片收集引用
static napi_ref imageReceiverRef = nullptr;
// 媒体功能
static acgist::MediaManager* mediaManager = nullptr;
// 房间管理
@@ -277,7 +275,7 @@ static int asyncExecute(std::function<void()> function) {
* 加载系统
*/
static napi_value init(napi_env env, napi_callback_info info) {
TAOYAO_JSON_BODY(3);
TAOYAO_JSON_BODY(4);
{
std::lock_guard<std::recursive_mutex> taoyaoLock(taoyaoMutex);
if(initTaoyao) {
@@ -289,11 +287,17 @@ static napi_value init(napi_env env, napi_callback_info info) {
}
napi_create_reference(env, args[1], 1, &acgist::pushRef);
napi_create_reference(env, args[2], 1, &acgist::requestRef);
napi_create_reference(env, args[3], 1 ,&acgist::imageReceiverRef);
napi_value imageReceiver;
napi_get_reference_value(env, acgist::imageReceiverRef, &imageReceiver);
acgist::imageReceiverNative = OH_Image_Receiver_InitImageReceiverNative(env, imageReceiver);
OH_LOG_INFO(LOG_APP, "配置图片接收:%{public}lld %{public}lld", imageReceiver, acgist::imageReceiverNative);
printSupportCodec();
acgist::clientId = json["clientId"];
acgist::name = json["name"];
acgist::surfaceId = json["surfaceId"];
acgist::clientIndex = json["clientIndex"];
OH_LOG_INFO(LOG_APP, "加载libtaoyao");
OH_LOG_INFO(LOG_APP, "加载libtaoyao%{public}s %{public}s", acgist::clientId.data(), acgist::surfaceId.data());
std::string version = mediasoupclient::Version();
OH_LOG_INFO(LOG_APP, "加载MediasoupClient%{public}s", version.data());
mediasoupclient::Initialize();
@@ -332,8 +336,22 @@ static napi_value shutdown(napi_env env, napi_callback_info info) {
OH_LOG_INFO(LOG_APP, "释放mediasoupclient");
mediasoupclient::Cleanup();
OH_LOG_INFO(LOG_APP, "释放全局变量");
napi_delete_reference(env, acgist::pushRef);
napi_delete_reference(env, acgist::requestRef);
if(acgist::pushRef != nullptr) {
napi_delete_reference(env, acgist::pushRef);
acgist::pushRef = nullptr;
}
if(acgist::requestRef != nullptr) {
napi_delete_reference(env, acgist::requestRef);
acgist::requestRef = nullptr;
}
if(acgist::imageReceiverRef != nullptr) {
napi_delete_reference(env, acgist::imageReceiverRef);
acgist::imageReceiverRef = nullptr;
}
if(acgist::imageReceiverNative != nullptr) {
delete acgist::imageReceiverNative;
acgist::imageReceiverNative = nullptr;
}
// 置空即可
env = nullptr;
// 返回结果

View File

@@ -21,23 +21,18 @@
#include <map>
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <EGL/eglplatform.h>
#include <GLES3/gl32.h>
#include "./Signal.hpp"
#include "./WebRTC.hpp"
#include <native_image/native_image.h>
#include <native_buffer/native_buffer.h>
#include <native_window/external_window.h>
#include <ohcamera/camera.h>
#include "ohcamera/camera_input.h"
#include <ohcamera/video_output.h>
#include <ohcamera/capture_session.h>
#include <native_image/native_image.h>
#include <native_buffer/native_buffer.h>
#include <native_window/external_window.h>
#include "api/media_stream_track.h"
#include "api/media_stream_interface.h"
@@ -131,26 +126,11 @@ public:
class CameraCapturer : public VideoCapturer {
public:
// ================ OpenGL ES ================
// OpenGL ES SurfaceId
uint64_t surfaceId = 0;
// OpenGL ES纹理指针
GLuint textureId = 0;
// OpenGL ES纹理数量
GLsizei textureSize = 1;
// EGL显示设备
EGLDisplay eglDisplay = EGL_NO_DISPLAY;
// EGL上下文
EGLContext eglContext = EGL_NO_CONTEXT;
// EGL Surface
EGLSurface eglSurface = EGL_NO_SURFACE;
// ================ Camera ================
// 相机设备数量
uint32_t cameraSize = 0;
// 相机索引
uint32_t cameraIndex = 0;
// NativeImage
OH_NativeImage* nativeImage = nullptr;
// 相机输入
Camera_Input* cameraInput = nullptr;
// 相机设备列表
@@ -171,10 +151,6 @@ public:
virtual ~CameraCapturer() override;
public:
// 加载OpenGL ES
void initOpenGLES();
// 释放OpenGL ES
void releaseOpenGLES();
virtual bool start() override;
virtual bool stop() override;

View File

@@ -23,9 +23,9 @@
// 本地视频采集
#define TAOYAO_VIDEO_LOCAL true
// 视频来源屏幕
#define TAOYAO_VIDEO_SOURCE_SCREEN false
#define TAOYAO_VIDEO_SOURCE_SCREEN true
// 视频来源相机
#define TAOYAO_VIDEO_SOURCE_CAMERA true
#define TAOYAO_VIDEO_SOURCE_CAMERA false
namespace acgist {

View File

@@ -7,6 +7,10 @@
#ifndef TAOYAO_SIGNAL_HPP
#define TAOYAO_SIGNAL_HPP
#include <napi/native_api.h>
#include <multimedia/image_framework/image_receiver_mdk.h>
#ifndef TAOYAO_AUDIO_RET_LOG
#define TAOYAO_AUDIO_RET_LOG(format, ret, ...) \
if(ret == OH_AudioStream_Result::AUDIOSTREAM_SUCCESS) { \
@@ -62,6 +66,12 @@ extern int32_t bitsPerSample;
extern std::string clientId;
// 终端名称
extern std::string name;
// 预览句柄
extern std::string surfaceId;
// 环境
extern napi_env env;
// 图片接收
extern ImageReceiverNative* imageReceiverNative;
/**
* 发送消息

View File

@@ -23,9 +23,10 @@ acgist::AudioCapturer::AudioCapturer() {
// 配置音频采集参数
OH_AudioStreamBuilder_SetSamplingRate(this->builder, acgist::samplingRate);
OH_AudioStreamBuilder_SetChannelCount(this->builder, acgist::channelCount);
// OH_AudioStreamBuilder_SetLatencyMode(this->builder, OH_AudioStream_LatencyMode::AUDIOSTREAM_LATENCY_MODE_FAST);
OH_AudioStreamBuilder_SetLatencyMode(this->builder, OH_AudioStream_LatencyMode::AUDIOSTREAM_LATENCY_MODE_NORMAL);
OH_AudioStreamBuilder_SetSampleFormat(this->builder, OH_AudioStream_SampleFormat::AUDIOSTREAM_SAMPLE_S16LE);
// OH_AudioStreamBuilder_SetRendererInfo(this->builder, OH_AudioStream_Usage::AUDIOSTREAM_USAGE_VOICE_COMMUNICATION);
// OH_AudioStreamBuilder_SetCapturerInfo(this->builder, OH_AudioStream_SourceType::AUDIOSTREAM_SOURCE_TYPE_MIC);
OH_LOG_DEBUG(LOG_APP, "配置音频采集参数:%{public}d %{public}d", acgist::samplingRate, acgist::channelCount);
// 设置音频采集回调
OH_AudioCapturer_Callbacks callbacks;
@@ -96,7 +97,7 @@ static int32_t OnReadData(OH_AudioCapturer* capturer, void* userData, void* buff
// 字节数量 * 8 / 位深 / 通道数量
size_t number_of_frames = length / 2;
// size_t number_of_frames = length * 8 / 16 / 2;
audioCapturer->source->OnData((uint16_t*) buffer, acgist::bitsPerSample, acgist::samplingRate, acgist::channelCount, number_of_frames);
// audioCapturer->source->OnData((uint16_t*) buffer, acgist::bitsPerSample, acgist::samplingRate, acgist::channelCount, number_of_frames);
return 0;
}

View File

@@ -23,8 +23,6 @@
#include "hilog/log.h"
#include <EGL/eglplatform.h>
#include "rtc_base/time_utils.h"
#include "api/video/nv12_buffer.h"
@@ -33,9 +31,11 @@
#include <ohcamera/camera_manager.h>
#include <ohcamera/capture_session.h>
#include <multimedia/image_framework/image_mdk.h>
static std::recursive_mutex videoMutex;
// 相机回调
// 相机状态回调
static void onCameraStatus(Camera_Manager* cameraManager, Camera_StatusInfo* status);
// 相机输入回调
@@ -55,14 +55,17 @@ static void onVideoFrameStart(Camera_VideoOutput* videoOutput);
static void onSessionError(Camera_CaptureSession* session, Camera_ErrorCode errorCode);
static void onSessionFocusStateChange(Camera_CaptureSession* session, Camera_FocusState focusState);
// 数据回调
static void onFrame(void* context);
// 检查EGL扩展
static bool CheckEglExtension(const char* extensions, const char* extension);
// 视频数据回调
static void onFrame();
acgist::CameraCapturer::CameraCapturer() {
initOpenGLES();
// 装备视频
char* surfaceId = new char[sizeof(char)];
int32_t code = OH_Image_Receiver_GetReceivingSurfaceId(acgist::imageReceiverNative, surfaceId, sizeof(char));
OH_LOG_DEBUG(LOG_APP, "配置图片接收SurfaceId%{public}d %{public}d", code, surfaceId[0]);
OH_Image_Receiver_On_Callback callback = &onFrame;
OH_Image_Receiver_On(acgist::imageReceiverNative, callback);
// 装备相机
Camera_ErrorCode ret = OH_Camera_GetCameraManager(&this->cameraManager);
TAOYAO_VIDEO_RET_LOG("配置相机管理:%{public}d", ret);
CameraManager_Callbacks cameraManager_Callbacks = { onCameraStatus };
@@ -83,20 +86,24 @@ acgist::CameraCapturer::CameraCapturer() {
CameraInput_Callbacks cameraInput_Callbacks = { onInputOnError };
ret = OH_CameraInput_RegisterCallback(this->cameraInput, &cameraInput_Callbacks);
TAOYAO_VIDEO_RET_LOG("注册相机输入回调:%{public}d %{public}d", ret, this->cameraIndex);
// auto& previewProfile = this->cameraOutputCapability->previewProfiles[0];
// previewProfile->format = CAMERA_FORMAT_YUV_420_SP;
ret = OH_CameraManager_CreatePreviewOutput(this->cameraManager, this->cameraOutputCapability->previewProfiles[0], std::to_string(this->surfaceId).data(), &this->cameraPreviewOutput);
TAOYAO_VIDEO_RET_LOG("创建相机预览输出:%{public}d %{public}lld %{public}s", ret, this->surfaceId, std::to_string(this->surfaceId).data());
auto& previewProfile = this->cameraOutputCapability->previewProfiles[0];
// previewProfile->format = CAMERA_FORMAT_JPEG;
// previewProfile->format = CAMERA_FORMAT_YUV_420_SP;
ret = OH_CameraManager_CreatePreviewOutput(this->cameraManager, previewProfile, acgist::surfaceId.data(), &this->cameraPreviewOutput);
TAOYAO_VIDEO_RET_LOG("创建相机预览输出:%{public}d %{public}d %{public}s", ret, previewProfile->format, acgist::surfaceId.data());
PreviewOutput_Callbacks previewOutput_Callbacks = { onPreviewFrameStart, onPreviewFrameEnd, onPreviewError };
ret = OH_PreviewOutput_RegisterCallback(this->cameraPreviewOutput, &previewOutput_Callbacks);
TAOYAO_VIDEO_RET_LOG("注册相机预览输出回调:%{public}d", ret);
// auto& videoProfile = this->cameraOutputCapability->videoProfiles[0];
// videoProfile->format = CAMERA_FORMAT_YUV_420_SP;
// OH_LOG_DEBUG(LOG_APP, "相机视频配置:%{public}d %{public}d %{public}d %{public}d %{public}d", videoProfile->format, videoProfile->size.width, videoProfile->size.height, videoProfile->range.min, videoProfile->range.max);
// ret = OH_CameraManager_CreateVideoOutput(this->cameraManager, this->cameraOutputCapability->videoProfiles[0], std::to_string(this->surfaceId).data(), &this->cameraVideoOutput);
// TAOYAO_VIDEO_RET_LOG("创建相机视频输出:%{public}d %{public}lld %{public}s", ret, this->surfaceId, std::to_string(this->surfaceId).data());
// VideoOutput_Callbacks videoOutput_Callbacks = { onVideoFrameStart, onVideoFrameEnd, onVideoError };
// ret = OH_VideoOutput_RegisterCallback(this->cameraVideoOutput, &videoOutput_Callbacks);
auto& videoProfile = this->cameraOutputCapability->videoProfiles[0];
// videoProfile->format = CAMERA_FORMAT_JPEG;
// videoProfile->format = CAMERA_FORMAT_YUV_420_SP;
videoProfile->range.min = 20;
videoProfile->range.max = 30;
OH_LOG_DEBUG(LOG_APP, "相机视频配置:%{public}d %{public}d %{public}d %{public}d %{public}d", videoProfile->format, videoProfile->size.width, videoProfile->size.height, videoProfile->range.min, videoProfile->range.max);
ret = OH_CameraManager_CreateVideoOutput(this->cameraManager, videoProfile, surfaceId, &this->cameraVideoOutput);
TAOYAO_VIDEO_RET_LOG("创建相机视频输出:%{public}d %{public}d", ret, surfaceId[0]);
VideoOutput_Callbacks videoOutput_Callbacks = { onVideoFrameStart, onVideoFrameEnd, onVideoError };
ret = OH_VideoOutput_RegisterCallback(this->cameraVideoOutput, &videoOutput_Callbacks);
TAOYAO_VIDEO_RET_LOG("注册相机视频输出回调:%{public}d", ret);
ret = OH_CameraManager_CreateCaptureSession(this->cameraManager, &this->cameraCaptureSession);
TAOYAO_VIDEO_RET_LOG("创建相机视频会话:%{public}d", ret);
@@ -106,13 +113,13 @@ acgist::CameraCapturer::CameraCapturer() {
// 设置相机:闪光、变焦、质量、高宽、补光、防抖
// 设置缩放比例
// OH_CaptureSession_SetZoomRatio(this->cameraCaptureSession, 0.5F);
delete[] surfaceId;
}
acgist::CameraCapturer::~CameraCapturer() {
releaseOpenGLES();
CaptureSession_Callbacks captureSession_Callbacks = { onSessionFocusStateChange, onSessionError };
Camera_ErrorCode ret = OH_CaptureSession_UnregisterCallback(this->cameraCaptureSession, &captureSession_Callbacks);
TAOYAO_VIDEO_RET_LOG("取消相机视频会话:%{public}d", ret);
TAOYAO_VIDEO_RET_LOG("取消相机视频会话回调%{public}d", ret);
ret = OH_CaptureSession_Release(this->cameraCaptureSession);
this->cameraCaptureSession = nullptr;
TAOYAO_VIDEO_RET_LOG("释放相机视频会话:%{public}d", ret);
@@ -156,23 +163,22 @@ bool acgist::CameraCapturer::start() {
this->running = true;
Camera_ErrorCode ret = OH_CameraInput_Open(this->cameraInput);
TAOYAO_VIDEO_RET_LOG("打开相机输入:%{public}d", ret);
OH_NativeImage_AttachContext(this->nativeImage, this->textureId);
ret = OH_CaptureSession_BeginConfig(this->cameraCaptureSession);
TAOYAO_VIDEO_RET_LOG("开始配置视频会话:%{public}d", ret);
ret = OH_CaptureSession_AddInput(this->cameraCaptureSession, this->cameraInput);
TAOYAO_VIDEO_RET_LOG("绑定视频输入会话:%{public}d", ret);
ret = OH_CaptureSession_AddPreviewOutput(cameraCaptureSession, this->cameraPreviewOutput);
TAOYAO_VIDEO_RET_LOG("绑定相机预览输出会话:%{public}d", ret);
// ret = OH_CaptureSession_AddVideoOutput(this->cameraCaptureSession, this->cameraVideoOutput);
// TAOYAO_VIDEO_RET_LOG("绑定相机视频输出会话:%{public}d", ret);
ret = OH_CaptureSession_AddVideoOutput(this->cameraCaptureSession, this->cameraVideoOutput);
TAOYAO_VIDEO_RET_LOG("绑定相机视频输出会话:%{public}d", ret);
ret = OH_CaptureSession_CommitConfig(this->cameraCaptureSession);
TAOYAO_VIDEO_RET_LOG("开始相机视频会话:%{public}d", ret);
TAOYAO_VIDEO_RET_LOG("结束配置视频会话:%{public}d", ret);
ret = OH_CaptureSession_Start(this->cameraCaptureSession);
TAOYAO_VIDEO_RET_LOG("开始相机视频输出%{public}d", ret);
TAOYAO_VIDEO_RET_LOG("开始相机视频会话%{public}d", ret);
// ret = OH_PreviewOutput_Start(this->cameraPreviewOutput);
// TAOYAO_VIDEO_RET_LOG("开始相机预览输出:%{public}d", ret);
ret = OH_VideoOutput_Start(this->cameraVideoOutput);
TAOYAO_VIDEO_RET_LOG("结束配置视频会话%{public}d", ret);
TAOYAO_VIDEO_RET_LOG("开始相机视频输出%{public}d", ret);
return ret = Camera_ErrorCode::CAMERA_OK;
}
@@ -200,108 +206,9 @@ bool acgist::CameraCapturer::stop() {
TAOYAO_VIDEO_RET_LOG("结束相机视频输出:%{public}d", ret);
ret = OH_CameraInput_Close(this->cameraInput);
TAOYAO_VIDEO_RET_LOG("关闭相机输入:%{public}d", ret);
OH_NativeImage_DetachContext(this->nativeImage);
return ret = Camera_ErrorCode::CAMERA_OK;
}
void acgist::CameraCapturer::initOpenGLES() {
// EGL
static const char* EGL_EXT_PLATFORM_WAYLAND = "EGL_EXT_platform_wayland";
static const char* EGL_KHR_PLATFORM_WAYLAND = "EGL_KHR_platform_wayland";
static const char* EGL_GET_PLATFORM_DISPLAY_EXT = "eglGetPlatformDisplayEXT";
// 当前
this->eglDisplay = eglGetCurrentDisplay();
// 扩展
if(this->eglDisplay == EGL_NO_DISPLAY) {
const char* extensions = eglQueryString(EGL_NO_DISPLAY, EGL_EXTENSIONS);
if (extensions && (CheckEglExtension(extensions, EGL_EXT_PLATFORM_WAYLAND) || CheckEglExtension(extensions, EGL_KHR_PLATFORM_WAYLAND))) {
PFNEGLGETPLATFORMDISPLAYEXTPROC eglGetPlatformDisplayExt = (PFNEGLGETPLATFORMDISPLAYEXTPROC) eglGetProcAddress(EGL_GET_PLATFORM_DISPLAY_EXT);
this->eglDisplay = eglGetPlatformDisplayExt(EGL_PLATFORM_OHOS_KHR, EGL_DEFAULT_DISPLAY, nullptr);
OH_LOG_INFO(LOG_APP, "扩展EGLDisplay");
}
}
// 新建
if(this->eglDisplay == EGL_NO_DISPLAY) {
this->eglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
OH_LOG_INFO(LOG_APP, "新建EGLDisplay");
}
// 配置
EGLint count;
EGLConfig config;
EGLint config_attribs[] = {
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES3_BIT,
// EGL_SURFACE_TYPE, EGL_PIXMAP_BIT,
EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_RED_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_NONE
};
EGLint context_attribs[] = {
EGL_CONTEXT_CLIENT_VERSION, 3,
EGL_NONE
};
EGLBoolean ret = eglChooseConfig(this->eglDisplay, config_attribs, &config, 1, &count);
TAOYAO_OPENGL_RET_LOG("EGL选择配置%{public}d", ret);
// 当前
this->eglContext = eglGetCurrentContext();
// 新建
if(this->eglContext == EGL_NO_CONTEXT) {
EGLint major;
EGLint minor;
ret = eglInitialize(this->eglDisplay, &major, &minor);
TAOYAO_OPENGL_RET_LOG("加载EGL%{public}d", ret);
ret = eglBindAPI(EGL_OPENGL_ES_API);
TAOYAO_OPENGL_RET_LOG("绑定EGL%{public}d", ret);
this->eglContext = eglCreateContext(this->eglDisplay, config, EGL_NO_CONTEXT, context_attribs);
}
const EGLint surfaceAttr[] = {
// EGL_WIDTH, acgist::width,
// EGL_HEIGHT, acgist::height,
EGL_LARGEST_PBUFFER, EGL_TRUE,
EGL_NONE
};
this->eglSurface = eglCreatePbufferSurface(this->eglDisplay, config, surfaceAttr);
// OH_NativeWindow_CreateNativeWindow(this->eglSurface);
eglMakeCurrent(this->eglDisplay, this->eglSurface, this->eglSurface, this->eglContext);
// IMAGE WINDOW
glGenTextures(this->textureSize, &this->textureId);
this->nativeImage = OH_NativeImage_Create(this->textureId, GL_TEXTURE_2D);
OH_LOG_INFO(LOG_APP, "创建NativeImage%{public}d", this->textureId);
OH_OnFrameAvailableListener listener = { this, onFrame };
OH_NativeImage_SetOnFrameAvailableListener(this->nativeImage, listener);
OH_NativeImage_GetSurfaceId(this->nativeImage, &this->surfaceId);
OH_LOG_INFO(LOG_APP, "视频采集SurfaceId%{public}lld", this->surfaceId);
}
void acgist::CameraCapturer::releaseOpenGLES() {
if(this->textureId != 0) {
glDeleteTextures(this->textureSize, &this->textureId);
this->textureId = 0;
}
if(this->eglSurface != EGL_NO_SURFACE) {
EGLBoolean ret = eglDestroySurface(this->eglDisplay, this->eglSurface);
this->eglSurface = EGL_NO_SURFACE;
TAOYAO_OPENGL_RET_LOG("销毁EGLSurface%d", ret);
}
if(this->eglContext != EGL_NO_CONTEXT) {
EGLBoolean ret = eglDestroyContext(this->eglDisplay, this->eglContext);
this->eglContext = EGL_NO_CONTEXT;
TAOYAO_OPENGL_RET_LOG("销毁EGLContext%d", ret);
}
if(this->nativeImage != nullptr) {
OH_NativeImage_Destroy(&this->nativeImage);
this->nativeImage = nullptr;
OH_LOG_INFO(LOG_APP, "销毁nativeImage");
}
}
static void onCameraStatus(Camera_Manager* cameraManager, Camera_StatusInfo* status) {
OH_LOG_INFO(LOG_APP, "相机状态变化:%{public}d", status->status);
}
@@ -310,20 +217,8 @@ static void onInputOnError(const Camera_Input* cameraInput, Camera_ErrorCode err
OH_LOG_ERROR(LOG_APP, "相机输入错误:%{public}d", errorCode);
}
static void onVideoError(Camera_VideoOutput* videoOutput, Camera_ErrorCode errorCode) {
OH_LOG_ERROR(LOG_APP, "视频捕获数据帧失败:%d", errorCode);
}
static void onVideoFrameEnd(Camera_VideoOutput* videoOutput, int32_t frameCount) {
OH_LOG_DEBUG(LOG_APP, "结束视频捕获数据帧");
}
static void onVideoFrameStart(Camera_VideoOutput* videoOutput) {
OH_LOG_DEBUG(LOG_APP, "开始视频捕获数据帧");
}
static void onPreviewError(Camera_PreviewOutput* previewOutput, Camera_ErrorCode errorCode) {
OH_LOG_ERROR(LOG_APP, "预览捕获数据帧失败:%d", errorCode);
OH_LOG_ERROR(LOG_APP, "预览捕获数据帧失败:%{public}d", errorCode);
}
static void onPreviewFrameEnd(Camera_PreviewOutput* previewOutput, int32_t frameCount) {
@@ -334,32 +229,39 @@ static void onPreviewFrameStart(Camera_PreviewOutput* previewOutput) {
OH_LOG_DEBUG(LOG_APP, "开始预览捕获数据帧");
}
static void onVideoError(Camera_VideoOutput* videoOutput, Camera_ErrorCode errorCode) {
OH_LOG_ERROR(LOG_APP, "视频捕获数据帧失败:%{public}d", errorCode);
}
static void onVideoFrameEnd(Camera_VideoOutput* videoOutput, int32_t frameCount) {
OH_LOG_DEBUG(LOG_APP, "结束视频捕获数据帧");
}
static void onVideoFrameStart(Camera_VideoOutput* videoOutput) {
OH_LOG_DEBUG(LOG_APP, "开始视频捕获数据帧");
}
static void onSessionError(Camera_CaptureSession* session, Camera_ErrorCode errorCode) {
OH_LOG_ERROR(LOG_APP, "打开相机会话失败:%{public}o", errorCode);
OH_LOG_ERROR(LOG_APP, "打开相机会话失败:%{public}d", errorCode);
}
static void onSessionFocusStateChange(Camera_CaptureSession* session, Camera_FocusState focusState) {
OH_LOG_DEBUG(LOG_APP, "相机会话焦点改变:%{public}o", focusState);
OH_LOG_DEBUG(LOG_APP, "相机会话焦点改变:%{public}d", focusState);
}
static void onFrame(void* context) {
OH_LOG_DEBUG(LOG_APP, "视频帧数据采集回调:%{public}d", 1);
acgist::CameraCapturer* cameraCapturer = (acgist::CameraCapturer*) context;
// OH_NativeImage_UpdateSurfaceImage(cameraCapturer->nativeImage);
// 更新内容到OpenGL纹理。
uint32_t ret = OH_NativeImage_UpdateSurfaceImage(cameraCapturer->nativeImage);
if (ret != 0) {
}
// 获取最近调用OH_NativeImage_UpdateSurfaceImage的纹理图像的时间戳和变化矩阵。
int64_t timeStamp = OH_NativeImage_GetTimestamp(cameraCapturer->nativeImage);
float matrix[16];
ret = OH_NativeImage_GetTransformMatrix(cameraCapturer->nativeImage, matrix);
if (ret != 0) {
}
// 对update绑定到对应textureId的纹理做对应的opengl后处理后将纹理上屏
EGLBoolean eglRet = eglSwapBuffers(cameraCapturer->eglDisplay, cameraCapturer->eglSurface);
if (eglRet == EGL_FALSE) {
}
static void onFrame() {
napi_value nextImage;
int32_t ret = OH_Image_Receiver_ReadNextImage(acgist::imageReceiverNative, &nextImage);
OH_LOG_DEBUG(LOG_APP, "视频帧数据采集回调:%{public}d", ret);
OhosImageSize imageSize;
ImageNative * nextImage_native = OH_Image_InitImageNative(acgist::env, nextImage);
OH_Image_Size(nextImage_native, &imageSize);
OH_LOG_Print(LOG_APP, LOG_INFO, 0xFF00, "[receiver]", "OH_Image_Size width: %{public}d, height:%{public}d", imageSize.width, imageSize.height);
OhosImageComponent imgComponent;
OH_Image_GetComponent(nextImage_native, 4, &imgComponent); // 4=jpeg
uint8_t *img_buffer = imgComponent.byteBuffer;
OH_Image_Release(nextImage_native);
//HWTEST_F(NativeImageTest, OHNativeImageSetOnFrameAvailableListener001, Function | MediumTest | Level1)
//{
// if (image == nullptr) {
@@ -473,23 +375,3 @@ if (eglRet == EGL_FALSE) {
// .set_rotation(webrtc::kVideoRotation_90)
// .build();
}
static bool CheckEglExtension(const char* extensions, const char* extension) {
char CHARACTER_WHITESPACE = ' ';
size_t extlen = strlen(extension);
const char* CHARACTER_STRING_WHITESPACE = " ";
const char* end = extensions + strlen(extensions);
while (extensions < end) {
size_t index = 0;
if (*extensions == CHARACTER_WHITESPACE) {
extensions++;
continue;
}
index = strcspn(extensions, CHARACTER_STRING_WHITESPACE);
if (index == extlen && strncmp(extension, extensions, index) == 0) {
return true;
}
extensions += index;
}
return false;
}

View File

@@ -2,8 +2,8 @@
acgist::LocalClient::LocalClient(acgist::MediaManager* mediaManager) : acgist::RoomClient(mediaManager) {
this->mediaManager->newLocalClient();
this->audioTrack = this->mediaManager->getAudioTrack();
this->audioTrack->set_enabled(true);
// this->audioTrack = this->mediaManager->getAudioTrack();
// this->audioTrack->set_enabled(true);
this->videoTrack = this->mediaManager->getVideoTrack();
}

View File

@@ -121,7 +121,7 @@ int acgist::MediaManager::releaseLocalClient() {
}
bool acgist::MediaManager::startCapture() {
this->startAudioCapture();
// this->startAudioCapture();
this->startVideoCapture();
return true;
}

View File

@@ -118,7 +118,7 @@ int acgist::Room::produceMedia() {
this->createRecvTransport();
}
if(this->audioProduce) {
this->produceAudio();
// this->produceAudio();
}
if(this->videoProduce) {
this->produceVideo();

View File

@@ -13,7 +13,7 @@ static void OnError(OH_AVScreenCapture* capture, int32_t errorCode) {
}
static void OnAudioBufferAvailable(OH_AVScreenCapture* capture, bool isReady, OH_AudioCaptureSourceType type) {
// OH_LOG_DEBUG(LOG_APP, "屏幕采集音频数据帧");
// OH_LOG_DEBUG(LOG_APP, "屏幕采集音频数据帧%{public}d", isReady);
if (isReady) {
OH_AudioBuffer* buffer = new OH_AudioBuffer;
int32_t ret = OH_AVScreenCapture_AcquireAudioBuffer(capture, &buffer, type);
@@ -22,12 +22,12 @@ static void OnAudioBufferAvailable(OH_AVScreenCapture* capture, bool isReady, OH
(void) buffer->timestamp;
delete buffer;
buffer = nullptr;
OH_AVScreenCapture_ReleaseAudioBuffer(capture, type);
}
OH_AVScreenCapture_ReleaseAudioBuffer(capture, type);
}
static void OnVideoBufferAvailable(OH_AVScreenCapture* capture, bool isReady) {
OH_LOG_DEBUG(LOG_APP, "屏幕采集视频数据帧");
OH_LOG_DEBUG(LOG_APP, "屏幕采集视频数据帧%{public}d", isReady);
if (isReady) {
int32_t fence = 0;
int64_t timestamp = 0;
@@ -60,10 +60,9 @@ static void OnVideoBufferAvailable(OH_AVScreenCapture* capture, bool isReady) {
// .set_timestamp_ms(rtc::TimeMillis())
// .set_rotation(webrtc::kVideoRotation_90)
// .build();
OH_NativeBuffer_Unmap(buffer);
OH_AVScreenCapture_ReleaseVideoBuffer(capture);
}
OH_AVScreenCapture_ReleaseVideoBuffer(capture);
}
acgist::ScreenCapturer::ScreenCapturer() {

View File

@@ -12,5 +12,8 @@ int32_t channelCount = 2;
int32_t bitsPerSample = 16;
std::string clientId = "";
std::string name = "";
std::string surfaceId = "";
napi_env env = nullptr;
ImageReceiverNative* imageReceiverNative = nullptr;
}

View File

@@ -1,7 +1,10 @@
import image from "@ohos.multimedia.image";
export const init: (
json : string,
push : (signal: string, body: string, id: number) => void,
request: (signal: string, body: string, id: number) => void
json : string,
push : (signal: string, body: string, id: number) => void,
request : (signal: string, body: string, id: number) => void,
imageReceiver: image.ImageReceiver,
) => number;
export const shutdown : (json: string) => number;
export const callback : (json: string) => number;

View File

@@ -1,7 +1,6 @@
import { taoyaoSignal } from "../taoyao/TaoyaoSignal";
import fs from '@ohos.file.fs';
import image from "@ohos.multimedia.image";
import { BusinessError } from "@ohos.base";
import common from "@ohos.app.ability.common";
import abilityAccessCtrl, { Permissions } from "@ohos.abilityAccessCtrl";
@@ -45,47 +44,61 @@ struct Index {
build() {
Column() {
Column() {
XComponent({
id : taoyaoSignal.xComponentController.getXComponentSurfaceId(),
type : "surface",
controller : taoyaoSignal.xComponentController,
libraryname: "libtaoyao",
})
.onLoad(() => {
taoyaoSignal.xComponentController.setXComponentSurfaceSize({ surfaceWidth: 640, surfaceHeight: 480 });
})
.width(640)
.height(480);
}
.width(640)
.height(480);
Column() {
Button("连接信令")
.fontSize(20)
.fontWeight(FontWeight.Bold)
.onClick(() => {
taoyaoSignal.connect();
});
.fontSize(16)
.fontWeight(FontWeight.Bold)
.onClick(() => {
taoyaoSignal.connect();
});
}
.width("100%")
.height("20%");
.height("64");
Column() {
Button("断开信令")
.fontSize(20)
.fontWeight(FontWeight.Bold)
.onClick(() => {
taoyaoSignal.close();
});
.fontSize(16)
.fontWeight(FontWeight.Bold)
.onClick(() => {
taoyaoSignal.close();
});
}
.width("100%")
.height("20%");
.height("64");
Column() {
Button("加载系统")
.fontSize(20)
.fontWeight(FontWeight.Bold)
.onClick(() => {
// const imageReceiver = image.createImageReceiver({ width: 640, height: 480 }, image.ImageFormat.JPEG, 8);
taoyaoSignal.init();
});
.fontSize(16)
.fontWeight(FontWeight.Bold)
.onClick(() => {
taoyaoSignal.init();
});
}
.width("100%")
.height("20%");
.height("64");
Column() {
Button("卸载系统")
.fontSize(20)
.fontWeight(FontWeight.Bold)
.onClick(() => {
taoyaoSignal.shutdown();
});
.fontSize(16)
.fontWeight(FontWeight.Bold)
.onClick(() => {
taoyaoSignal.shutdown();
});
}
.width("100%")
.height("20%");
.height("64");
Column() {
Text(
`
@@ -93,10 +106,10 @@ struct Index {
信令地址:${setting.signalAddress}
`
)
.fontSize(20)
.fontSize(10)
}
.width("100%")
.height("20%");
.height("40");
}
.height("100%");
}

View File

@@ -18,6 +18,8 @@ class Config {
password : string = "taoyao";
// 当前终端索引
clientIndex: number = 99999;
// SurfaceId
surfaceId : string = "";
};

View File

@@ -8,6 +8,7 @@
import hilog from "@ohos.hilog";
import List from '@ohos.util.List';
import image from "@ohos.multimedia.image";
import { BusinessError } from '@ohos.base';
import webSocket from "@ohos.net.webSocket";
@@ -35,6 +36,10 @@ class TaoyaoSignal {
minIndex: number = 0;
// 最大消息索引
maxIndex: number = 666;
// 图片接收
imageReceiver: image.ImageReceiver = image.createImageReceiver({ width: 640, height: 480 }, image.ImageFormat.JPEG, 2);
// 视频预览
xComponentController: XComponentController = new XComponentController;
/**
* 加载系统
@@ -42,7 +47,8 @@ class TaoyaoSignal {
*/
init() {
hilog.info(0x0000, "TaoyaoSignal", "加载系统");
taoyaoModule.init(JSON.stringify(setting.config), this.nativePush, this.nativeRequest);
setting.config.surfaceId = this.xComponentController.getXComponentSurfaceId();
taoyaoModule.init(JSON.stringify(setting.config), this.nativePush, this.nativeRequest, this.imageReceiver);
}
/**