/****************************************************************************
* Copyright 2019 Nreal Techonology Limited. All rights reserved.
*
* This file is part of NRSDK.
*
* https://www.nreal.ai/
*
*****************************************************************************/
namespace NRKernal
{
using System;
using UnityEngine;
using System.Collections.Generic;
using System.Threading.Tasks;
/// A native camera proxy.
public class NativeCameraProxy
{
/// The identifier.
public static string ID = "NativeCameraProxy";
/// Gets or sets the camera data provider.
/// The camera data provider.
protected ICameraDataProvider CameraDataProvider { get; private set; }
/// The texture pointer.
protected IntPtr m_TexturePtr = IntPtr.Zero;
/// The current frame.
protected FrameRawData m_CurrentFrame;
/// Gets the resolution.
/// The resolution.
public virtual NativeResolution Resolution { get; }
/// True if is initialized, false if not.
private bool m_IsInitialized = false;
/// True if the format has been set, false if not.
private bool m_IsImageFormatSet = false;
/// The last frame.
protected int m_LastFrame = -1;
/// True if is playing, false if not.
protected bool m_IsPlaying = false;
/// Gets a value indicating whether this object is camera playing.
/// True if this object is camera playing, false if not.
public bool IsCamPlaying
{
get
{
return m_IsPlaying;
}
}
/// Interface of external frame consumer.
public interface IExternFrameConsumer
{
void UpdateFrame(FrameRawData frame);
}
protected IExternFrameConsumer m_ExternFrameConsumer;
public void RegisterFrameConsumer(IExternFrameConsumer consumer)
{
m_ExternFrameConsumer = consumer;
}
/// Queue of fixed sized.
public class FixedSizedQueue
{
/// The queue.
private Queue m_Queue = new Queue();
/// The lock object.
private object m_LockObj = new object();
/// The object pool.
private ObjectPool m_ObjectPool;
/// Constructor.
/// The pool.
public FixedSizedQueue(ObjectPool pool)
{
m_ObjectPool = pool;
}
/// Gets or sets the limit.
/// The limit.
public int Limit { get; set; }
/// Gets the number of.
/// The count.
public int Count
{
get
{
lock (m_LockObj)
{
return m_Queue.Count;
}
}
}
/// Adds an object onto the end of this queue.
/// The object.
public void Enqueue(FrameRawData obj)
{
lock (m_LockObj)
{
m_Queue.Enqueue(obj);
if (m_Queue.Count > Limit)
{
var frame = m_Queue.Dequeue();
m_ObjectPool.Put(frame);
}
}
}
/// Removes the head object from this queue.
/// The head object from this queue.
public FrameRawData Dequeue()
{
lock (m_LockObj)
{
var frame = m_Queue.Dequeue();
m_ObjectPool.Put(frame);
return frame;
}
}
public void Clear()
{
m_ObjectPool.Clear();
m_Queue.Clear();
}
}
/// The camera frames.
protected FixedSizedQueue m_CameraFrames;
/// The frame pool.
protected ObjectPool FramePool;
/// The active textures.
protected List m_ActiveTextures;
public List ActiveTextures
{
get
{
return m_ActiveTextures;
}
}
/// Specialized constructor for use only by derived class.
/// The provider.
protected NativeCameraProxy(ICameraDataProvider provider)
{
CameraDataProvider = provider;
this.Initialize();
}
/// Initializes this object.
public virtual void Initialize()
{
if (m_IsInitialized)
{
return;
}
NRDebugger.Info("[NativeCameraProxy] Initialize");
if (FramePool == null)
{
FramePool = new ObjectPool();
FramePool.InitCount = 10;
}
if (m_CameraFrames == null)
{
m_CameraFrames = new FixedSizedQueue(FramePool);
m_CameraFrames.Limit = 5;
}
m_ActiveTextures = new List();
CameraDataProvider.Create();
m_IsInitialized = true;
}
/// Callback, called when the regist capture.
/// The callback.
protected void RegistCaptureCallback(CameraImageCallback callback)
{
CameraDataProvider.SetCaptureCallback(callback);
}
/// Regists the given tex.
/// The tex to remove.
public void Regist(CameraModelView tex)
{
if (m_ActiveTextures == null)
{
m_ActiveTextures = new List();
}
if (!m_ActiveTextures.Contains(tex))
{
m_ActiveTextures.Add(tex);
}
NRDebugger.Debug("[NativeCamera] Regist:" + m_ActiveTextures.Count);
}
public static CameraImageFormat GetActiveCameraImageFormat(string id)
{
NativeCameraProxy controller = CameraProxyFactory.GetInstance(id);
if (controller != null && controller.ActiveTextures != null && controller.ActiveTextures.Count > 0)
{
NRDebugger.Info("[NativeCamera] Use the first texture format:" + controller.ActiveTextures[0].ImageFormat.ToString());
return controller.ActiveTextures[0].ImageFormat;
}
else
{
NRDebugger.Info("[NativeCamera] Use the default texture format: RGB_888");
return CameraImageFormat.RGB_888;
}
}
/// Removes the given tex.
/// The tex to remove.
public void Remove(CameraModelView tex)
{
if (m_ActiveTextures != null && m_ActiveTextures.Contains(tex))
{
m_ActiveTextures.Remove(tex);
}
NRDebugger.Debug("[NativeCamera] Remove:" + m_ActiveTextures.Count);
}
/// Sets image format.
/// Describes the format to use.
public void SetImageFormat(CameraImageFormat format)
{
if (!m_IsInitialized)
{
Initialize();
}
if (m_IsImageFormatSet)
{
return;
}
m_IsImageFormatSet = CameraDataProvider.SetImageFormat(format);
NRDebugger.Info("[NativeCameraProxy] SetImageFormat : " + format.ToString());
}
/// Start to play camera.
public void Play()
{
if (!m_IsInitialized)
{
Initialize();
}
if (m_IsPlaying)
{
return;
}
m_IsPlaying = true;
NRDebugger.Info("[NativeCameraProxy] StartCapture begin.");
//System.Diagnostics.Stopwatch stopwatch = new System.Diagnostics.Stopwatch();
//stopwatch.Start();
CameraDataProvider.StartCapture();
NRDebugger.Info("[NativeCameraProxy] StartCapture end.");
//NRDebugger.Info("[NativeCameraProxy] Start to play, result:{0} cost:{1}ms", m_IsPlaying, stopwatch.ElapsedMilliseconds);
}
/// Query if this object has frame.
/// True if frame, false if not.
public bool HasFrame()
{
// To prevent the problem that the object behind can not be acquired when the same frame is fetched more than once.
if (Time.frameCount == m_LastFrame)
{
return true;
}
return m_IsPlaying && m_CameraFrames.Count > 0;
}
/// Gets the frame.
/// The frame.
public FrameRawData GetFrame()
{
if (Time.frameCount != m_LastFrame && m_CameraFrames.Count > 0)
{
// Get the newest frame of the queue.
m_CurrentFrame = m_CameraFrames.Dequeue();
m_LastFrame = Time.frameCount;
}
return m_CurrentFrame;
}
/// Updates the frame.
/// Handle of the camera.
/// Handle of the camera image.
/// The userdata.
public virtual void UpdateFrame(UInt64 camera_handle, UInt64 camera_image_handle, UInt64 userdata) { }
/// Queue frame.
/// The textureptr.
/// The size.
/// The timestamp.
protected void QueueFrame(IntPtr textureptr, int size, UInt64 timestamp)
{
if (!m_IsPlaying)
{
NRDebugger.Error("camera was not stopped properly, it still sending data.");
return;
}
FrameRawData frame = FramePool.Get();
bool result = FrameRawData.MakeSafe(m_TexturePtr, size, timestamp, ref frame);
if (result)
{
m_CameraFrames.Enqueue(frame);
if (m_ExternFrameConsumer != null)
{
m_ExternFrameConsumer.UpdateFrame(frame);
}
}
else
{
FramePool.Put(frame);
}
}
/// Stop the camera.
public virtual void Stop()
{
if (!m_IsPlaying)
{
return;
}
// If there is no a active texture, pause and release camera resource.
if (m_ActiveTextures.Count == 0)
{
m_IsPlaying = false;
NRDebugger.Info("[NativeCameraProxy] StopCapture begin.");
//System.Diagnostics.Stopwatch stopwatch = new System.Diagnostics.Stopwatch();
//stopwatch.Start();
CameraDataProvider.StopCapture();
NRDebugger.Info("[NativeCameraProxy] StopCapture end.");
//NRDebugger.Info("[NativeCameraProxy] Stop rgbcamera, result:{0} cost:{1}ms", m_IsPlaying, stopwatch.ElapsedMilliseconds);
Release();
}
}
/// Release the camera.
protected virtual void Release()
{
if (CameraDataProvider == null)
{
return;
}
NRDebugger.Info("[NativeCameraProxy] Release");
CameraDataProvider.Release();
m_CameraFrames.Clear();
m_CameraFrames = null;
m_CurrentFrame.data = null;
m_ActiveTextures.Clear();
m_ActiveTextures = null;
m_IsInitialized = false;
m_IsImageFormatSet = false;
m_IsPlaying = false;
}
}
}