using NRKernal.Record;
using System.Linq;
using UnityEngine;
namespace NRKernal.Experimental.NRExamples
{
/// A video capture 2 rtp example.
[HelpURL("https://developer.nreal.ai/develop/unity/video-capture")]
public class VideoCapture2RTPExample : MonoBehaviour
{
/// The previewer.
public NRPreviewer Previewer;
/// Gets the full pathname of the rtp file.
/// The full pathname of the rtp file.
public string RTPPath
{
get
{
return @"rtp://192.168.31.6:5555";
}
}
/// The video capture.
NRVideoCapture m_VideoCapture = null;
/// Starts this object.
void Start()
{
CreateVideoCaptureTest();
}
/// Updates this object.
void Update()
{
if (m_VideoCapture == null)
{
return;
}
if (Input.GetKeyDown(KeyCode.R) || NRInput.GetButtonDown(ControllerButton.TRIGGER))
{
StartVideoCapture();
Previewer.SetData(m_VideoCapture.PreviewTexture, true);
}
if (Input.GetKeyDown(KeyCode.T) || NRInput.GetButtonDown(ControllerButton.HOME))
{
StopVideoCapture();
Previewer.SetData(m_VideoCapture.PreviewTexture, false);
}
}
/// Tests create video capture.
void CreateVideoCaptureTest()
{
NRVideoCapture.CreateAsync(false, delegate (NRVideoCapture videoCapture)
{
if (videoCapture != null)
{
m_VideoCapture = videoCapture;
}
else
{
NRDebugger.Error("Failed to create VideoCapture Instance!");
}
});
}
/// Starts video capture.
void StartVideoCapture()
{
Resolution cameraResolution = NRVideoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
NRDebugger.Info(cameraResolution);
int cameraFramerate = NRVideoCapture.GetSupportedFrameRatesForResolution(cameraResolution).OrderByDescending((fps) => fps).First();
NRDebugger.Info(cameraFramerate);
if (m_VideoCapture != null)
{
NRDebugger.Info("Created VideoCapture Instance!");
CameraParameters cameraParameters = new CameraParameters();
cameraParameters.hologramOpacity = 1f;
cameraParameters.frameRate = cameraFramerate;
cameraParameters.cameraResolutionWidth = cameraResolution.width;
cameraParameters.cameraResolutionHeight = cameraResolution.height;
cameraParameters.pixelFormat = CapturePixelFormat.BGRA32;
cameraParameters.blendMode = BlendMode.Blend;
m_VideoCapture.StartVideoModeAsync(cameraParameters, OnStartedVideoCaptureMode, true);
}
}
/// Stops video capture.
void StopVideoCapture()
{
NRDebugger.Info("Stop Video Capture!");
m_VideoCapture.StopRecordingAsync(OnStoppedRecordingVideo);
}
/// Executes the 'started video capture mode' action.
/// The result.
void OnStartedVideoCaptureMode(NRVideoCapture.VideoCaptureResult result)
{
NRDebugger.Info("Started Video Capture Mode!");
m_VideoCapture.StartRecordingAsync(RTPPath, OnStartedRecordingVideo);
}
/// Executes the 'stopped video capture mode' action.
/// The result.
void OnStoppedVideoCaptureMode(NRVideoCapture.VideoCaptureResult result)
{
NRDebugger.Info("Stopped Video Capture Mode!");
}
/// Executes the 'started recording video' action.
/// The result.
void OnStartedRecordingVideo(NRVideoCapture.VideoCaptureResult result)
{
NRDebugger.Info("Started Recording Video!");
}
/// Executes the 'stopped recording video' action.
/// The result.
void OnStoppedRecordingVideo(NRVideoCapture.VideoCaptureResult result)
{
NRDebugger.Info("Stopped Recording Video!");
m_VideoCapture.StopVideoModeAsync(OnStoppedVideoCaptureMode);
}
}
}