EasyAR自定义相机RTSP视频流(CustomCamera)
EasyAR可以使用视频源作为输入源,官方给出了示例和文档,但是对于大部分Unity开发人员来说看了文档还是一头雾水。
在Android Studio中将custom-camera.jar添加libs中,就可以查看源代码了
分析其源代码,主要是ExternalCameraSample类中的open函数和Start函数。
open即找开相机或视频流,start(callback)主要用于取图像帧,当有新的Frame时,调用callback,将最新的帧数据传入一个ByteArrayWrapper的结构中,在Unity中再将ByteArrayWrapper转换为InputFrame,即可进行识中坚力量。
用java模拟调用端的代码如下
Button btnPlay = findViewById(R.id.btnPlay);
btnPlay.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
externalCameraSample.open();
externalCameraSample.start(new ExternalCameraSample.Callback() {
@Override
public void onPreviewFrame(ExternalCameraSample.ByteArrayWrapper var1) {
String str = String.valueOf(var1.BufferLength);
Log.d("easyar", str);
}
});
}
});
修改ExternalCameraSample中open和start,重点是ByteArrayWrapper赋值
以下修改后的Java代码(C++代码略,需要自己写)
public class ExternalCameraSample {
private NativeLib nativeLib;
private ExternalCameraParameters mCameraParameters;
private boolean suc = false;
public ExternalCameraSample() {
nativeLib = new NativeLib();
}
public boolean open() {
suc = nativeLib.StartPlay(0, "rtsp://admin:admin@192.168.43.110:554/stream1");
return suc;
}
private float getRatioError(float x, float x0) {
float a = x / Math.max(x0, 1.0F) - 1.0F;
float b = x0 / Math.max(x, 1.0F) - 1.0F;
return a * a + b * b;
}
private boolean ready() {
return suc;
}
public boolean start(final ExternalCameraSample.Callback callback) {
if (!this.ready()) {
return false;
} else {
//刷新刷数据
if(suc)
{
Timer timer = new Timer();
timer.schedule(new TimerTask(){
public int flag=1;
@Override
public void run() {
nativeLib.native_updateFrame(0);
if(mCameraParameters == null)
{
mCameraParameters = new ExternalCameraParameters();
mCameraParameters.setCameraType(1);
}
mCameraParameters.setWidth(nativeLib.native_getWidth(0));
mCameraParameters.setHeight(nativeLib.native_getHeight(0));
mCameraParameters.setTimestamp(SystemClock.elapsedRealtimeNanos());
ExternalCameraSample.ByteArrayWrapper wrapper = new ExternalCameraSample.ByteArrayWrapper();
wrapper.Buffer = (byte[])nativeLib.native_getFrameData(0);
wrapper.BufferLength = nativeLib.native_getBytesLength(0);
wrapper.camParams = ExternalCameraSample.this.mCameraParameters;
callback.onPreviewFrame(wrapper);
}
}, 1, 1);
}
return true;
}
}
public boolean stop() {
if (!this.ready()) {
return true;
} else {
nativeLib.native_stopPlay(0);
return true;
}
}
public ExternalCameraParameters getCameraParameters() {
return this.mCameraParameters;
}
public int getPixelFormat() {
return 2;
}
public interface Callback {
void onPreviewFrame(ExternalCameraSample.ByteArrayWrapper var1);
}
public static class ByteArrayWrapper {
public byte[] Buffer;
public int BufferLength;
public ExternalCameraParameters camParams;
public ByteArrayWrapper() {
}
}
}
jni从C++给java返回数组
extern "C"
JNIEXPORT jbyteArray JNICALL
Java_com_example_nativelib_NativeLib_native_1getFrameData(JNIEnv *env, jobject thiz, jint index) {
//实例,返回数组bytekey
jbyteArray jarrRV =env->NewByteArray(player[index].m_numBytes);
env->SetByteArrayRegion(jarrRV, 0,player[index].m_numBytes,(jbyte*)player[index].m_imgData);
return jarrRV;
}
Unity代码(在示例上做了少量修改)
//================================================================================================================================
//
// Copyright (c) 2015-2023 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
#if !UNITY_EDITOR && (UNITY_ANDROID || UNITY_IOS)
using AOT;
using System.Runtime.InteropServices;
#endif
using System;
using UnityEngine;
using UnityEngine.UI;
namespace easyar
{
public class CustomCameraSource : FrameSource
{
private bool willOpen = false;
public override Optional<InputFrameSourceType> Type { get => InputFrameSourceType.General; }
public override Optional<bool> IsAvailable { get => Application.platform == RuntimePlatform.Android || Application.platform == RuntimePlatform.IPhonePlayer; }
protected override void OnEnable()
{
base.OnEnable();
if (externalCamera != null)
externalCamera.Call<bool>("start", cameraCallback);
}
protected override void OnDisable()
{
base.OnDisable();
if (externalCamera != null)
externalCamera.Call<bool>("stop");
}
protected virtual void OnDestroy()
{
Close();
}
public override void OnAssemble(ARSession session)
{
base.OnAssemble(session);
Open();
}
public void Open()
{
if (Application.platform != RuntimePlatform.Android && Application.platform != RuntimePlatform.IPhonePlayer)
{
throw new UIPopupException(typeof(CustomCameraSource) + " not available under " + Application.platform);
}
willOpen = true;
CameraDevice.requestPermissions(EasyARController.Scheduler, (Action<PermissionStatus, string>)((status, msg) =>
{
if (!willOpen)
{
return;
}
externalCamera = new AndroidJavaObject("com.example.nativelib.ExternalCameraSample");
externalCamera.Call<bool>("open");
cameraCallback = new CameraCallback(dataWrapper =>
{
if (sink == null)
{
return;
}
using (var param = dataWrapper.Get<AndroidJavaObject>("camParams"))
{
var byteArray = dataWrapper.Get<AndroidJavaObject>("Buffer");
var jniByteArray = byteArray.GetRawObject();
var buffer = JniUtility.wrapByteArray(jniByteArray, true, () => { byteArray.Dispose(); });
var format = PixelFormat.RGBA8888;//色彩格式
int orientation = 90;//旋转角度0~360
int cameraType = 1;//1为后摄像头,2为前摄像头
double timestamp = param.Call<long>("getTimestamp") * 1e-9;
var imageWidth = param.Call<int>("getWidth");
var imageHeight = param.Call<int>("getHeight");
var imageSize = new Vector2(imageWidth, imageHeight);
HandleSink(buffer, format, imageSize, orientation, cameraType, timestamp);
}
});
if (enabled)
{
OnEnable();
}
}));
}
public void Close()
{
willOpen = false;
OnDisable();
if (externalCamera != null)
externalCamera.Dispose();
}
private void HandleSink(Buffer imageBuffer, PixelFormat format, Vector2 imageSize, int orientation, int cameraType, double timestamp)
{
using (var cameraParams = CameraParameters.createWithDefaultIntrinsics(new Vec2I((int)imageSize.x, (int)imageSize.y), (CameraDeviceType)cameraType, orientation))
using (var image = new Image(imageBuffer, format, (int)imageSize.x, (int)imageSize.y))
using (var frame = InputFrame.createWithImageAndCameraParametersAndTemporal(image, cameraParams, timestamp))
{
if (sink != null)
sink.handle(frame);
}
imageBuffer.Dispose();
}
private AndroidJavaObject externalCamera;
private CameraCallback cameraCallback;
private class CameraCallback : AndroidJavaProxy
{
private Action<AndroidJavaObject> onPreviewFrameCallback;
public CameraCallback(Action<AndroidJavaObject> onPreviewFrameCallback) : base("com.example.nativelib.ExternalCameraSample$Callback")
{
this.onPreviewFrameCallback = onPreviewFrameCallback;
}
public void onPreviewFrame(AndroidJavaObject dataWrapper)
{
// NOTE: Workaround callback parameter not disposed in some Unity versions like 2022.2.
// This looks like a bug in Unity because usually the caller is responsible for disposing the callback parameter.
// And the behavior change is not compatible which will cause serious memory leak.
using (dataWrapper) // workaround
{
onPreviewFrameCallback(dataWrapper);
}
}
}
}
}
运行效果
原文地址:https://blog.csdn.net/zouxin_88/article/details/142565203
免责声明:本站文章内容转载自网络资源,如本站内容侵犯了原著者的合法权益,可联系本站删除。更多内容请关注自学内容网(zxcms.com)!