Net.cs 45 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256
  1. #if !UNITY_WSA_10_0
  2. using OpenCVForUnity.CoreModule;
  3. using OpenCVForUnity.UtilsModule;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.Runtime.InteropServices;
  7. namespace OpenCVForUnity.DnnModule
  8. {
  9. // C++: class Net
  10. /**
  11. * This class allows to create and manipulate comprehensive artificial neural networks.
  12. *
  13. * Neural network is presented as directed acyclic graph (DAG), where vertices are Layer instances,
  14. * and edges specify relationships between layers inputs and outputs.
  15. *
  16. * Each network layer has unique integer id and unique string name inside its network.
  17. * LayerId can store either layer name or layer id.
  18. *
  19. * This class supports reference counting of its instances, i. e. copies point to the same instance.
  20. */
  21. public class Net : DisposableOpenCVObject
  22. {
  23. protected override void Dispose(bool disposing)
  24. {
  25. try
  26. {
  27. if (disposing)
  28. {
  29. }
  30. if (IsEnabledDispose)
  31. {
  32. if (nativeObj != IntPtr.Zero)
  33. dnn_Net_delete(nativeObj);
  34. nativeObj = IntPtr.Zero;
  35. }
  36. }
  37. finally
  38. {
  39. base.Dispose(disposing);
  40. }
  41. }
  42. protected internal Net(IntPtr addr) : base(addr) { }
  43. public IntPtr getNativeObjAddr() { return nativeObj; }
  44. // internal usage only
  45. public static Net __fromPtr__(IntPtr addr) { return new Net(addr); }
  46. //
  47. // C++: cv::dnn::Net::Net()
  48. //
  49. public Net()
  50. {
  51. nativeObj = DisposableObject.ThrowIfNullIntPtr(dnn_Net_Net_10());
  52. }
  53. //
  54. // C++: static Net cv::dnn::Net::readFromModelOptimizer(String xml, String bin)
  55. //
  56. /**
  57. * Create a network from Intel's Model Optimizer intermediate representation (IR).
  58. * param xml XML configuration file with network's topology.
  59. * param bin Binary file with trained weights.
  60. * Networks imported from Intel's Model Optimizer are launched in Intel's Inference Engine
  61. * backend.
  62. * return automatically generated
  63. */
  64. public static Net readFromModelOptimizer(string xml, string bin)
  65. {
  66. return new Net(DisposableObject.ThrowIfNullIntPtr(dnn_Net_readFromModelOptimizer_10(xml, bin)));
  67. }
  68. //
  69. // C++: static Net cv::dnn::Net::readFromModelOptimizer(vector_uchar bufferModelConfig, vector_uchar bufferWeights)
  70. //
  71. /**
  72. * Create a network from Intel's Model Optimizer in-memory buffers with intermediate representation (IR).
  73. * param bufferModelConfig buffer with model's configuration.
  74. * param bufferWeights buffer with model's trained weights.
  75. * return Net object.
  76. */
  77. public static Net readFromModelOptimizer(MatOfByte bufferModelConfig, MatOfByte bufferWeights)
  78. {
  79. if (bufferModelConfig != null) bufferModelConfig.ThrowIfDisposed();
  80. if (bufferWeights != null) bufferWeights.ThrowIfDisposed();
  81. Mat bufferModelConfig_mat = bufferModelConfig;
  82. Mat bufferWeights_mat = bufferWeights;
  83. return new Net(DisposableObject.ThrowIfNullIntPtr(dnn_Net_readFromModelOptimizer_11(bufferModelConfig_mat.nativeObj, bufferWeights_mat.nativeObj)));
  84. }
  85. //
  86. // C++: bool cv::dnn::Net::empty()
  87. //
  88. /**
  89. * Returns true if there are no layers in the network.
  90. * return automatically generated
  91. */
  92. public bool empty()
  93. {
  94. ThrowIfDisposed();
  95. return dnn_Net_empty_10(nativeObj);
  96. }
  97. //
  98. // C++: String cv::dnn::Net::dump()
  99. //
  100. /**
  101. * Dump net to String
  102. * return String with structure, hyperparameters, backend, target and fusion
  103. * Call method after setInput(). To see correct backend, target and fusion run after forward().
  104. */
  105. public string dump()
  106. {
  107. ThrowIfDisposed();
  108. string retVal = Marshal.PtrToStringAnsi(DisposableObject.ThrowIfNullIntPtr(dnn_Net_dump_10(nativeObj)));
  109. return retVal;
  110. }
  111. //
  112. // C++: void cv::dnn::Net::dumpToFile(String path)
  113. //
  114. /**
  115. * Dump net structure, hyperparameters, backend, target and fusion to dot file
  116. * param path path to output file with .dot extension
  117. * SEE: dump()
  118. */
  119. public void dumpToFile(string path)
  120. {
  121. ThrowIfDisposed();
  122. dnn_Net_dumpToFile_10(nativeObj, path);
  123. }
  124. //
  125. // C++: int cv::dnn::Net::getLayerId(String layer)
  126. //
  127. /**
  128. * Converts string name of the layer to the integer identifier.
  129. * return id of the layer, or -1 if the layer wasn't found.
  130. * param layer automatically generated
  131. */
  132. public int getLayerId(string layer)
  133. {
  134. ThrowIfDisposed();
  135. return dnn_Net_getLayerId_10(nativeObj, layer);
  136. }
  137. //
  138. // C++: vector_String cv::dnn::Net::getLayerNames()
  139. //
  140. public List<string> getLayerNames()
  141. {
  142. ThrowIfDisposed();
  143. List<string> retVal = new List<string>();
  144. Mat retValMat = new Mat(DisposableObject.ThrowIfNullIntPtr(dnn_Net_getLayerNames_10(nativeObj)));
  145. Converters.Mat_to_vector_String(retValMat, retVal);
  146. return retVal;
  147. }
  148. //
  149. // C++: Ptr_Layer cv::dnn::Net::getLayer(int layerId)
  150. //
  151. /**
  152. * Returns pointer to layer with specified id or name which the network use.
  153. * param layerId automatically generated
  154. * return automatically generated
  155. */
  156. public Layer getLayer(int layerId)
  157. {
  158. ThrowIfDisposed();
  159. return Layer.__fromPtr__(DisposableObject.ThrowIfNullIntPtr(dnn_Net_getLayer_10(nativeObj, layerId)));
  160. }
  161. //
  162. // C++: Ptr_Layer cv::dnn::Net::getLayer(String layerName)
  163. //
  164. /**
  165. *
  166. * deprecated Use int getLayerId(const String &amp;layer)
  167. * param layerName automatically generated
  168. * return automatically generated
  169. */
  170. [Obsolete("This method is deprecated.")]
  171. public Layer getLayer(string layerName)
  172. {
  173. ThrowIfDisposed();
  174. return Layer.__fromPtr__(DisposableObject.ThrowIfNullIntPtr(dnn_Net_getLayer_11(nativeObj, layerName)));
  175. }
  176. //
  177. // C++: Ptr_Layer cv::dnn::Net::getLayer(LayerId layerId)
  178. //
  179. /**
  180. *
  181. * deprecated to be removed
  182. * param layerId automatically generated
  183. * return automatically generated
  184. */
  185. [Obsolete("This method is deprecated.")]
  186. public Layer getLayer(DictValue layerId)
  187. {
  188. ThrowIfDisposed();
  189. if (layerId != null) layerId.ThrowIfDisposed();
  190. return Layer.__fromPtr__(DisposableObject.ThrowIfNullIntPtr(dnn_Net_getLayer_12(nativeObj, layerId.getNativeObjAddr())));
  191. }
  192. //
  193. // C++: void cv::dnn::Net::connect(String outPin, String inpPin)
  194. //
  195. /**
  196. * Connects output of the first layer to input of the second layer.
  197. * param outPin descriptor of the first layer output.
  198. * param inpPin descriptor of the second layer input.
  199. *
  200. * Descriptors have the following template &lt;DFN&gt;&amp;lt;layer_name&amp;gt;[.input_number]&lt;/DFN&gt;:
  201. * - the first part of the template &lt;DFN&gt;layer_name&lt;/DFN&gt; is string name of the added layer.
  202. * If this part is empty then the network input pseudo layer will be used;
  203. * - the second optional part of the template &lt;DFN&gt;input_number&lt;/DFN&gt;
  204. * is either number of the layer input, either label one.
  205. * If this part is omitted then the first layer input will be used.
  206. *
  207. * SEE: setNetInputs(), Layer::inputNameToIndex(), Layer::outputNameToIndex()
  208. */
  209. public void connect(string outPin, string inpPin)
  210. {
  211. ThrowIfDisposed();
  212. dnn_Net_connect_10(nativeObj, outPin, inpPin);
  213. }
  214. //
  215. // C++: void cv::dnn::Net::setInputsNames(vector_String inputBlobNames)
  216. //
  217. /**
  218. * Sets outputs names of the network input pseudo layer.
  219. *
  220. * Each net always has special own the network input pseudo layer with id=0.
  221. * This layer stores the user blobs only and don't make any computations.
  222. * In fact, this layer provides the only way to pass user data into the network.
  223. * As any other layer, this layer can label its outputs and this function provides an easy way to do this.
  224. * param inputBlobNames automatically generated
  225. */
  226. public void setInputsNames(List<string> inputBlobNames)
  227. {
  228. ThrowIfDisposed();
  229. Mat inputBlobNames_mat = Converters.vector_String_to_Mat(inputBlobNames);
  230. dnn_Net_setInputsNames_10(nativeObj, inputBlobNames_mat.nativeObj);
  231. }
  232. //
  233. // C++: void cv::dnn::Net::setInputShape(String inputName, MatShape shape)
  234. //
  235. /**
  236. * Specify shape of network input.
  237. * param inputName automatically generated
  238. * param shape automatically generated
  239. */
  240. public void setInputShape(string inputName, MatOfInt shape)
  241. {
  242. ThrowIfDisposed();
  243. if (shape != null) shape.ThrowIfDisposed();
  244. Mat shape_mat = shape;
  245. dnn_Net_setInputShape_10(nativeObj, inputName, shape_mat.nativeObj);
  246. }
  247. //
  248. // C++: Mat cv::dnn::Net::forward(String outputName = String())
  249. //
  250. /**
  251. * Runs forward pass to compute output of layer with name {code outputName}.
  252. * param outputName name for layer which output is needed to get
  253. * return blob for first output of specified layer.
  254. * By default runs forward pass for the whole network.
  255. */
  256. public Mat forward(string outputName)
  257. {
  258. ThrowIfDisposed();
  259. return new Mat(DisposableObject.ThrowIfNullIntPtr(dnn_Net_forward_10(nativeObj, outputName)));
  260. }
  261. /**
  262. * Runs forward pass to compute output of layer with name {code outputName}.
  263. * return blob for first output of specified layer.
  264. * By default runs forward pass for the whole network.
  265. */
  266. public Mat forward()
  267. {
  268. ThrowIfDisposed();
  269. return new Mat(DisposableObject.ThrowIfNullIntPtr(dnn_Net_forward_11(nativeObj)));
  270. }
  271. //
  272. // C++: AsyncArray cv::dnn::Net::forwardAsync(String outputName = String())
  273. //
  274. // Return type 'AsyncArray' is not supported, skipping the function
  275. //
  276. // C++: void cv::dnn::Net::forward(vector_Mat& outputBlobs, String outputName = String())
  277. //
  278. /**
  279. * Runs forward pass to compute output of layer with name {code outputName}.
  280. * param outputBlobs contains all output blobs for specified layer.
  281. * param outputName name for layer which output is needed to get
  282. * If {code outputName} is empty, runs forward pass for the whole network.
  283. */
  284. public void forward(List<Mat> outputBlobs, string outputName)
  285. {
  286. ThrowIfDisposed();
  287. Mat outputBlobs_mat = new Mat();
  288. dnn_Net_forward_12(nativeObj, outputBlobs_mat.nativeObj, outputName);
  289. Converters.Mat_to_vector_Mat(outputBlobs_mat, outputBlobs);
  290. outputBlobs_mat.release();
  291. }
  292. /**
  293. * Runs forward pass to compute output of layer with name {code outputName}.
  294. * param outputBlobs contains all output blobs for specified layer.
  295. * If {code outputName} is empty, runs forward pass for the whole network.
  296. */
  297. public void forward(List<Mat> outputBlobs)
  298. {
  299. ThrowIfDisposed();
  300. Mat outputBlobs_mat = new Mat();
  301. dnn_Net_forward_13(nativeObj, outputBlobs_mat.nativeObj);
  302. Converters.Mat_to_vector_Mat(outputBlobs_mat, outputBlobs);
  303. outputBlobs_mat.release();
  304. }
  305. //
  306. // C++: void cv::dnn::Net::forward(vector_Mat& outputBlobs, vector_String outBlobNames)
  307. //
  308. /**
  309. * Runs forward pass to compute outputs of layers listed in {code outBlobNames}.
  310. * param outputBlobs contains blobs for first outputs of specified layers.
  311. * param outBlobNames names for layers which outputs are needed to get
  312. */
  313. public void forward(List<Mat> outputBlobs, List<string> outBlobNames)
  314. {
  315. ThrowIfDisposed();
  316. Mat outputBlobs_mat = new Mat();
  317. Mat outBlobNames_mat = Converters.vector_String_to_Mat(outBlobNames);
  318. dnn_Net_forward_14(nativeObj, outputBlobs_mat.nativeObj, outBlobNames_mat.nativeObj);
  319. Converters.Mat_to_vector_Mat(outputBlobs_mat, outputBlobs);
  320. outputBlobs_mat.release();
  321. }
  322. //
  323. // C++: void cv::dnn::Net::forward(vector_vector_Mat& outputBlobs, vector_String outBlobNames)
  324. //
  325. // Unknown type 'vector_vector_Mat' (O), skipping the function
  326. //
  327. // C++: Net cv::dnn::Net::quantize(vector_Mat calibData, int inputsDtype, int outputsDtype, bool perChannel = true)
  328. //
  329. /**
  330. * Returns a quantized Net from a floating-point Net.
  331. * param calibData Calibration data to compute the quantization parameters.
  332. * param inputsDtype Datatype of quantized net's inputs. Can be CV_32F or CV_8S.
  333. * param outputsDtype Datatype of quantized net's outputs. Can be CV_32F or CV_8S.
  334. * param perChannel Quantization granularity of quantized Net. The default is true, that means quantize model
  335. * in per-channel way (channel-wise). Set it false to quantize model in per-tensor way (or tensor-wise).
  336. * return automatically generated
  337. */
  338. public Net quantize(List<Mat> calibData, int inputsDtype, int outputsDtype, bool perChannel)
  339. {
  340. ThrowIfDisposed();
  341. Mat calibData_mat = Converters.vector_Mat_to_Mat(calibData);
  342. return new Net(DisposableObject.ThrowIfNullIntPtr(dnn_Net_quantize_10(nativeObj, calibData_mat.nativeObj, inputsDtype, outputsDtype, perChannel)));
  343. }
  344. /**
  345. * Returns a quantized Net from a floating-point Net.
  346. * param calibData Calibration data to compute the quantization parameters.
  347. * param inputsDtype Datatype of quantized net's inputs. Can be CV_32F or CV_8S.
  348. * param outputsDtype Datatype of quantized net's outputs. Can be CV_32F or CV_8S.
  349. * in per-channel way (channel-wise). Set it false to quantize model in per-tensor way (or tensor-wise).
  350. * return automatically generated
  351. */
  352. public Net quantize(List<Mat> calibData, int inputsDtype, int outputsDtype)
  353. {
  354. ThrowIfDisposed();
  355. Mat calibData_mat = Converters.vector_Mat_to_Mat(calibData);
  356. return new Net(DisposableObject.ThrowIfNullIntPtr(dnn_Net_quantize_11(nativeObj, calibData_mat.nativeObj, inputsDtype, outputsDtype)));
  357. }
  358. //
  359. // C++: void cv::dnn::Net::getInputDetails(vector_float& scales, vector_int& zeropoints)
  360. //
  361. /**
  362. * Returns input scale and zeropoint for a quantized Net.
  363. * param scales output parameter for returning input scales.
  364. * param zeropoints output parameter for returning input zeropoints.
  365. */
  366. public void getInputDetails(MatOfFloat scales, MatOfInt zeropoints)
  367. {
  368. ThrowIfDisposed();
  369. if (scales != null) scales.ThrowIfDisposed();
  370. if (zeropoints != null) zeropoints.ThrowIfDisposed();
  371. Mat scales_mat = scales;
  372. Mat zeropoints_mat = zeropoints;
  373. dnn_Net_getInputDetails_10(nativeObj, scales_mat.nativeObj, zeropoints_mat.nativeObj);
  374. }
  375. //
  376. // C++: void cv::dnn::Net::getOutputDetails(vector_float& scales, vector_int& zeropoints)
  377. //
  378. /**
  379. * Returns output scale and zeropoint for a quantized Net.
  380. * param scales output parameter for returning output scales.
  381. * param zeropoints output parameter for returning output zeropoints.
  382. */
  383. public void getOutputDetails(MatOfFloat scales, MatOfInt zeropoints)
  384. {
  385. ThrowIfDisposed();
  386. if (scales != null) scales.ThrowIfDisposed();
  387. if (zeropoints != null) zeropoints.ThrowIfDisposed();
  388. Mat scales_mat = scales;
  389. Mat zeropoints_mat = zeropoints;
  390. dnn_Net_getOutputDetails_10(nativeObj, scales_mat.nativeObj, zeropoints_mat.nativeObj);
  391. }
  392. //
  393. // C++: void cv::dnn::Net::setHalideScheduler(String scheduler)
  394. //
  395. /**
  396. * Compile Halide layers.
  397. * param scheduler Path to YAML file with scheduling directives.
  398. * SEE: setPreferableBackend
  399. *
  400. * Schedule layers that support Halide backend. Then compile them for
  401. * specific target. For layers that not represented in scheduling file
  402. * or if no manual scheduling used at all, automatic scheduling will be applied.
  403. */
  404. public void setHalideScheduler(string scheduler)
  405. {
  406. ThrowIfDisposed();
  407. dnn_Net_setHalideScheduler_10(nativeObj, scheduler);
  408. }
  409. //
  410. // C++: void cv::dnn::Net::setPreferableBackend(int backendId)
  411. //
  412. /**
  413. * Ask network to use specific computation backend where it supported.
  414. * param backendId backend identifier.
  415. * SEE: Backend
  416. *
  417. * If OpenCV is compiled with Intel's Inference Engine library, DNN_BACKEND_DEFAULT
  418. * means DNN_BACKEND_INFERENCE_ENGINE. Otherwise it equals to DNN_BACKEND_OPENCV.
  419. */
  420. public void setPreferableBackend(int backendId)
  421. {
  422. ThrowIfDisposed();
  423. dnn_Net_setPreferableBackend_10(nativeObj, backendId);
  424. }
  425. //
  426. // C++: void cv::dnn::Net::setPreferableTarget(int targetId)
  427. //
  428. /**
  429. * Ask network to make computations on specific target device.
  430. * param targetId target identifier.
  431. * SEE: Target
  432. *
  433. * List of supported combinations backend / target:
  434. * | | DNN_BACKEND_OPENCV | DNN_BACKEND_INFERENCE_ENGINE | DNN_BACKEND_HALIDE | DNN_BACKEND_CUDA |
  435. * |------------------------|--------------------|------------------------------|--------------------|-------------------|
  436. * | DNN_TARGET_CPU | + | + | + | |
  437. * | DNN_TARGET_OPENCL | + | + | + | |
  438. * | DNN_TARGET_OPENCL_FP16 | + | + | | |
  439. * | DNN_TARGET_MYRIAD | | + | | |
  440. * | DNN_TARGET_FPGA | | + | | |
  441. * | DNN_TARGET_CUDA | | | | + |
  442. * | DNN_TARGET_CUDA_FP16 | | | | + |
  443. * | DNN_TARGET_HDDL | | + | | |
  444. */
  445. public void setPreferableTarget(int targetId)
  446. {
  447. ThrowIfDisposed();
  448. dnn_Net_setPreferableTarget_10(nativeObj, targetId);
  449. }
  450. //
  451. // C++: void cv::dnn::Net::setInput(Mat blob, String name = "", double scalefactor = 1.0, Scalar mean = Scalar())
  452. //
  453. /**
  454. * Sets the new input value for the network
  455. * param blob A new blob. Should have CV_32F or CV_8U depth.
  456. * param name A name of input layer.
  457. * param scalefactor An optional normalization scale.
  458. * param mean An optional mean subtraction values.
  459. * SEE: connect(String, String) to know format of the descriptor.
  460. *
  461. * If scale or mean values are specified, a final input blob is computed
  462. * as:
  463. * \(input(n,c,h,w) = scalefactor \times (blob(n,c,h,w) - mean_c)\)
  464. */
  465. public void setInput(Mat blob, string name, double scalefactor, Scalar mean)
  466. {
  467. ThrowIfDisposed();
  468. if (blob != null) blob.ThrowIfDisposed();
  469. dnn_Net_setInput_10(nativeObj, blob.nativeObj, name, scalefactor, mean.val[0], mean.val[1], mean.val[2], mean.val[3]);
  470. }
  471. /**
  472. * Sets the new input value for the network
  473. * param blob A new blob. Should have CV_32F or CV_8U depth.
  474. * param name A name of input layer.
  475. * param scalefactor An optional normalization scale.
  476. * SEE: connect(String, String) to know format of the descriptor.
  477. *
  478. * If scale or mean values are specified, a final input blob is computed
  479. * as:
  480. * \(input(n,c,h,w) = scalefactor \times (blob(n,c,h,w) - mean_c)\)
  481. */
  482. public void setInput(Mat blob, string name, double scalefactor)
  483. {
  484. ThrowIfDisposed();
  485. if (blob != null) blob.ThrowIfDisposed();
  486. dnn_Net_setInput_11(nativeObj, blob.nativeObj, name, scalefactor);
  487. }
  488. /**
  489. * Sets the new input value for the network
  490. * param blob A new blob. Should have CV_32F or CV_8U depth.
  491. * param name A name of input layer.
  492. * SEE: connect(String, String) to know format of the descriptor.
  493. *
  494. * If scale or mean values are specified, a final input blob is computed
  495. * as:
  496. * \(input(n,c,h,w) = scalefactor \times (blob(n,c,h,w) - mean_c)\)
  497. */
  498. public void setInput(Mat blob, string name)
  499. {
  500. ThrowIfDisposed();
  501. if (blob != null) blob.ThrowIfDisposed();
  502. dnn_Net_setInput_12(nativeObj, blob.nativeObj, name);
  503. }
  504. /**
  505. * Sets the new input value for the network
  506. * param blob A new blob. Should have CV_32F or CV_8U depth.
  507. * SEE: connect(String, String) to know format of the descriptor.
  508. *
  509. * If scale or mean values are specified, a final input blob is computed
  510. * as:
  511. * \(input(n,c,h,w) = scalefactor \times (blob(n,c,h,w) - mean_c)\)
  512. */
  513. public void setInput(Mat blob)
  514. {
  515. ThrowIfDisposed();
  516. if (blob != null) blob.ThrowIfDisposed();
  517. dnn_Net_setInput_13(nativeObj, blob.nativeObj);
  518. }
  519. //
  520. // C++: void cv::dnn::Net::setParam(int layer, int numParam, Mat blob)
  521. //
  522. /**
  523. * Sets the new value for the learned param of the layer.
  524. * param layer name or id of the layer.
  525. * param numParam index of the layer parameter in the Layer::blobs array.
  526. * param blob the new value.
  527. * SEE: Layer::blobs
  528. * <b>Note:</b> If shape of the new blob differs from the previous shape,
  529. * then the following forward pass may fail.
  530. */
  531. public void setParam(int layer, int numParam, Mat blob)
  532. {
  533. ThrowIfDisposed();
  534. if (blob != null) blob.ThrowIfDisposed();
  535. dnn_Net_setParam_10(nativeObj, layer, numParam, blob.nativeObj);
  536. }
  537. //
  538. // C++: void cv::dnn::Net::setParam(String layerName, int numParam, Mat blob)
  539. //
  540. public void setParam(string layerName, int numParam, Mat blob)
  541. {
  542. ThrowIfDisposed();
  543. if (blob != null) blob.ThrowIfDisposed();
  544. dnn_Net_setParam_11(nativeObj, layerName, numParam, blob.nativeObj);
  545. }
  546. //
  547. // C++: Mat cv::dnn::Net::getParam(int layer, int numParam = 0)
  548. //
  549. /**
  550. * Returns parameter blob of the layer.
  551. * param layer name or id of the layer.
  552. * param numParam index of the layer parameter in the Layer::blobs array.
  553. * SEE: Layer::blobs
  554. * return automatically generated
  555. */
  556. public Mat getParam(int layer, int numParam)
  557. {
  558. ThrowIfDisposed();
  559. return new Mat(DisposableObject.ThrowIfNullIntPtr(dnn_Net_getParam_10(nativeObj, layer, numParam)));
  560. }
  561. /**
  562. * Returns parameter blob of the layer.
  563. * param layer name or id of the layer.
  564. * SEE: Layer::blobs
  565. * return automatically generated
  566. */
  567. public Mat getParam(int layer)
  568. {
  569. ThrowIfDisposed();
  570. return new Mat(DisposableObject.ThrowIfNullIntPtr(dnn_Net_getParam_11(nativeObj, layer)));
  571. }
  572. //
  573. // C++: Mat cv::dnn::Net::getParam(String layerName, int numParam = 0)
  574. //
  575. public Mat getParam(string layerName, int numParam)
  576. {
  577. ThrowIfDisposed();
  578. return new Mat(DisposableObject.ThrowIfNullIntPtr(dnn_Net_getParam_12(nativeObj, layerName, numParam)));
  579. }
  580. public Mat getParam(string layerName)
  581. {
  582. ThrowIfDisposed();
  583. return new Mat(DisposableObject.ThrowIfNullIntPtr(dnn_Net_getParam_13(nativeObj, layerName)));
  584. }
  585. //
  586. // C++: vector_int cv::dnn::Net::getUnconnectedOutLayers()
  587. //
  588. /**
  589. * Returns indexes of layers with unconnected outputs.
  590. *
  591. * FIXIT: Rework API to registerOutput() approach, deprecate this call
  592. * return automatically generated
  593. */
  594. public MatOfInt getUnconnectedOutLayers()
  595. {
  596. ThrowIfDisposed();
  597. return MatOfInt.fromNativeAddr(DisposableObject.ThrowIfNullIntPtr(dnn_Net_getUnconnectedOutLayers_10(nativeObj)));
  598. }
  599. //
  600. // C++: vector_String cv::dnn::Net::getUnconnectedOutLayersNames()
  601. //
  602. /**
  603. * Returns names of layers with unconnected outputs.
  604. *
  605. * FIXIT: Rework API to registerOutput() approach, deprecate this call
  606. * return automatically generated
  607. */
  608. public List<string> getUnconnectedOutLayersNames()
  609. {
  610. ThrowIfDisposed();
  611. List<string> retVal = new List<string>();
  612. Mat retValMat = new Mat(DisposableObject.ThrowIfNullIntPtr(dnn_Net_getUnconnectedOutLayersNames_10(nativeObj)));
  613. Converters.Mat_to_vector_String(retValMat, retVal);
  614. return retVal;
  615. }
  616. //
  617. // C++: void cv::dnn::Net::getLayersShapes(vector_MatShape netInputShapes, vector_int& layersIds, vector_vector_MatShape& inLayersShapes, vector_vector_MatShape& outLayersShapes)
  618. //
  619. // Unknown type 'vector_vector_MatShape' (O), skipping the function
  620. //
  621. // C++: void cv::dnn::Net::getLayersShapes(MatShape netInputShape, vector_int& layersIds, vector_vector_MatShape& inLayersShapes, vector_vector_MatShape& outLayersShapes)
  622. //
  623. // Unknown type 'vector_vector_MatShape' (O), skipping the function
  624. //
  625. // C++: int64 cv::dnn::Net::getFLOPS(vector_MatShape netInputShapes)
  626. //
  627. /**
  628. * Computes FLOP for whole loaded model with specified input shapes.
  629. * param netInputShapes vector of shapes for all net inputs.
  630. * return computed FLOP.
  631. */
  632. public long getFLOPS(List<MatOfInt> netInputShapes)
  633. {
  634. ThrowIfDisposed();
  635. Mat netInputShapes_mat = Converters.vector_MatShape_to_Mat(netInputShapes);
  636. return dnn_Net_getFLOPS_10(nativeObj, netInputShapes_mat.nativeObj);
  637. }
  638. //
  639. // C++: int64 cv::dnn::Net::getFLOPS(MatShape netInputShape)
  640. //
  641. public long getFLOPS(MatOfInt netInputShape)
  642. {
  643. ThrowIfDisposed();
  644. if (netInputShape != null) netInputShape.ThrowIfDisposed();
  645. Mat netInputShape_mat = netInputShape;
  646. return dnn_Net_getFLOPS_11(nativeObj, netInputShape_mat.nativeObj);
  647. }
  648. //
  649. // C++: int64 cv::dnn::Net::getFLOPS(int layerId, vector_MatShape netInputShapes)
  650. //
  651. public long getFLOPS(int layerId, List<MatOfInt> netInputShapes)
  652. {
  653. ThrowIfDisposed();
  654. Mat netInputShapes_mat = Converters.vector_MatShape_to_Mat(netInputShapes);
  655. return dnn_Net_getFLOPS_12(nativeObj, layerId, netInputShapes_mat.nativeObj);
  656. }
  657. //
  658. // C++: int64 cv::dnn::Net::getFLOPS(int layerId, MatShape netInputShape)
  659. //
  660. public long getFLOPS(int layerId, MatOfInt netInputShape)
  661. {
  662. ThrowIfDisposed();
  663. if (netInputShape != null) netInputShape.ThrowIfDisposed();
  664. Mat netInputShape_mat = netInputShape;
  665. return dnn_Net_getFLOPS_13(nativeObj, layerId, netInputShape_mat.nativeObj);
  666. }
  667. //
  668. // C++: void cv::dnn::Net::getLayerTypes(vector_String& layersTypes)
  669. //
  670. /**
  671. * Returns list of types for layer used in model.
  672. * param layersTypes output parameter for returning types.
  673. */
  674. public void getLayerTypes(List<string> layersTypes)
  675. {
  676. ThrowIfDisposed();
  677. Mat layersTypes_mat = new Mat();
  678. dnn_Net_getLayerTypes_10(nativeObj, layersTypes_mat.nativeObj);
  679. Converters.Mat_to_vector_String(layersTypes_mat, layersTypes);
  680. layersTypes_mat.release();
  681. }
  682. //
  683. // C++: int cv::dnn::Net::getLayersCount(String layerType)
  684. //
  685. /**
  686. * Returns count of layers of specified type.
  687. * param layerType type.
  688. * return count of layers
  689. */
  690. public int getLayersCount(string layerType)
  691. {
  692. ThrowIfDisposed();
  693. return dnn_Net_getLayersCount_10(nativeObj, layerType);
  694. }
  695. //
  696. // C++: void cv::dnn::Net::getMemoryConsumption(MatShape netInputShape, size_t& weights, size_t& blobs)
  697. //
  698. public void getMemoryConsumption(MatOfInt netInputShape, long[] weights, long[] blobs)
  699. {
  700. ThrowIfDisposed();
  701. if (netInputShape != null) netInputShape.ThrowIfDisposed();
  702. Mat netInputShape_mat = netInputShape;
  703. double[] weights_out = new double[1];
  704. double[] blobs_out = new double[1];
  705. dnn_Net_getMemoryConsumption_10(nativeObj, netInputShape_mat.nativeObj, weights_out, blobs_out);
  706. if (weights != null) weights[0] = (long)weights_out[0];
  707. if (blobs != null) blobs[0] = (long)blobs_out[0];
  708. }
  709. //
  710. // C++: void cv::dnn::Net::getMemoryConsumption(int layerId, vector_MatShape netInputShapes, size_t& weights, size_t& blobs)
  711. //
  712. public void getMemoryConsumption(int layerId, List<MatOfInt> netInputShapes, long[] weights, long[] blobs)
  713. {
  714. ThrowIfDisposed();
  715. Mat netInputShapes_mat = Converters.vector_MatShape_to_Mat(netInputShapes);
  716. double[] weights_out = new double[1];
  717. double[] blobs_out = new double[1];
  718. dnn_Net_getMemoryConsumption_11(nativeObj, layerId, netInputShapes_mat.nativeObj, weights_out, blobs_out);
  719. if (weights != null) weights[0] = (long)weights_out[0];
  720. if (blobs != null) blobs[0] = (long)blobs_out[0];
  721. }
  722. //
  723. // C++: void cv::dnn::Net::getMemoryConsumption(int layerId, MatShape netInputShape, size_t& weights, size_t& blobs)
  724. //
  725. public void getMemoryConsumption(int layerId, MatOfInt netInputShape, long[] weights, long[] blobs)
  726. {
  727. ThrowIfDisposed();
  728. if (netInputShape != null) netInputShape.ThrowIfDisposed();
  729. Mat netInputShape_mat = netInputShape;
  730. double[] weights_out = new double[1];
  731. double[] blobs_out = new double[1];
  732. dnn_Net_getMemoryConsumption_12(nativeObj, layerId, netInputShape_mat.nativeObj, weights_out, blobs_out);
  733. if (weights != null) weights[0] = (long)weights_out[0];
  734. if (blobs != null) blobs[0] = (long)blobs_out[0];
  735. }
  736. //
  737. // C++: void cv::dnn::Net::enableFusion(bool fusion)
  738. //
  739. /**
  740. * Enables or disables layer fusion in the network.
  741. * param fusion true to enable the fusion, false to disable. The fusion is enabled by default.
  742. */
  743. public void enableFusion(bool fusion)
  744. {
  745. ThrowIfDisposed();
  746. dnn_Net_enableFusion_10(nativeObj, fusion);
  747. }
  748. //
  749. // C++: void cv::dnn::Net::enableWinograd(bool useWinograd)
  750. //
  751. /**
  752. * Enables or disables the Winograd compute branch. The Winograd compute branch can speed up
  753. * 3x3 Convolution at a small loss of accuracy.
  754. * param useWinograd true to enable the Winograd compute branch. The default is true.
  755. */
  756. public void enableWinograd(bool useWinograd)
  757. {
  758. ThrowIfDisposed();
  759. dnn_Net_enableWinograd_10(nativeObj, useWinograd);
  760. }
  761. //
  762. // C++: int64 cv::dnn::Net::getPerfProfile(vector_double& timings)
  763. //
  764. /**
  765. * Returns overall time for inference and timings (in ticks) for layers.
  766. *
  767. * Indexes in returned vector correspond to layers ids. Some layers can be fused with others,
  768. * in this case zero ticks count will be return for that skipped layers. Supported by DNN_BACKEND_OPENCV on DNN_TARGET_CPU only.
  769. *
  770. * param timings vector for tick timings for all layers.
  771. * return overall ticks for model inference.
  772. */
  773. public long getPerfProfile(MatOfDouble timings)
  774. {
  775. ThrowIfDisposed();
  776. if (timings != null) timings.ThrowIfDisposed();
  777. Mat timings_mat = timings;
  778. return dnn_Net_getPerfProfile_10(nativeObj, timings_mat.nativeObj);
  779. }
  780. #if (UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR
  781. const string LIBNAME = "__Internal";
  782. #else
  783. const string LIBNAME = "opencvforunity";
  784. #endif
  785. // C++: cv::dnn::Net::Net()
  786. [DllImport(LIBNAME)]
  787. private static extern IntPtr dnn_Net_Net_10();
  788. // C++: static Net cv::dnn::Net::readFromModelOptimizer(String xml, String bin)
  789. [DllImport(LIBNAME)]
  790. private static extern IntPtr dnn_Net_readFromModelOptimizer_10(string xml, string bin);
  791. // C++: static Net cv::dnn::Net::readFromModelOptimizer(vector_uchar bufferModelConfig, vector_uchar bufferWeights)
  792. [DllImport(LIBNAME)]
  793. private static extern IntPtr dnn_Net_readFromModelOptimizer_11(IntPtr bufferModelConfig_mat_nativeObj, IntPtr bufferWeights_mat_nativeObj);
  794. // C++: bool cv::dnn::Net::empty()
  795. [DllImport(LIBNAME)]
  796. [return: MarshalAs(UnmanagedType.U1)]
  797. private static extern bool dnn_Net_empty_10(IntPtr nativeObj);
  798. // C++: String cv::dnn::Net::dump()
  799. [DllImport(LIBNAME)]
  800. private static extern IntPtr dnn_Net_dump_10(IntPtr nativeObj);
  801. // C++: void cv::dnn::Net::dumpToFile(String path)
  802. [DllImport(LIBNAME)]
  803. private static extern void dnn_Net_dumpToFile_10(IntPtr nativeObj, string path);
  804. // C++: int cv::dnn::Net::getLayerId(String layer)
  805. [DllImport(LIBNAME)]
  806. private static extern int dnn_Net_getLayerId_10(IntPtr nativeObj, string layer);
  807. // C++: vector_String cv::dnn::Net::getLayerNames()
  808. [DllImport(LIBNAME)]
  809. private static extern IntPtr dnn_Net_getLayerNames_10(IntPtr nativeObj);
  810. // C++: Ptr_Layer cv::dnn::Net::getLayer(int layerId)
  811. [DllImport(LIBNAME)]
  812. private static extern IntPtr dnn_Net_getLayer_10(IntPtr nativeObj, int layerId);
  813. // C++: Ptr_Layer cv::dnn::Net::getLayer(String layerName)
  814. [DllImport(LIBNAME)]
  815. private static extern IntPtr dnn_Net_getLayer_11(IntPtr nativeObj, string layerName);
  816. // C++: Ptr_Layer cv::dnn::Net::getLayer(LayerId layerId)
  817. [DllImport(LIBNAME)]
  818. private static extern IntPtr dnn_Net_getLayer_12(IntPtr nativeObj, IntPtr layerId_nativeObj);
  819. // C++: void cv::dnn::Net::connect(String outPin, String inpPin)
  820. [DllImport(LIBNAME)]
  821. private static extern void dnn_Net_connect_10(IntPtr nativeObj, string outPin, string inpPin);
  822. // C++: void cv::dnn::Net::setInputsNames(vector_String inputBlobNames)
  823. [DllImport(LIBNAME)]
  824. private static extern void dnn_Net_setInputsNames_10(IntPtr nativeObj, IntPtr inputBlobNames_mat_nativeObj);
  825. // C++: void cv::dnn::Net::setInputShape(String inputName, MatShape shape)
  826. [DllImport(LIBNAME)]
  827. private static extern void dnn_Net_setInputShape_10(IntPtr nativeObj, string inputName, IntPtr shape_mat_nativeObj);
  828. // C++: Mat cv::dnn::Net::forward(String outputName = String())
  829. [DllImport(LIBNAME)]
  830. private static extern IntPtr dnn_Net_forward_10(IntPtr nativeObj, string outputName);
  831. [DllImport(LIBNAME)]
  832. private static extern IntPtr dnn_Net_forward_11(IntPtr nativeObj);
  833. // C++: void cv::dnn::Net::forward(vector_Mat& outputBlobs, String outputName = String())
  834. [DllImport(LIBNAME)]
  835. private static extern void dnn_Net_forward_12(IntPtr nativeObj, IntPtr outputBlobs_mat_nativeObj, string outputName);
  836. [DllImport(LIBNAME)]
  837. private static extern void dnn_Net_forward_13(IntPtr nativeObj, IntPtr outputBlobs_mat_nativeObj);
  838. // C++: void cv::dnn::Net::forward(vector_Mat& outputBlobs, vector_String outBlobNames)
  839. [DllImport(LIBNAME)]
  840. private static extern void dnn_Net_forward_14(IntPtr nativeObj, IntPtr outputBlobs_mat_nativeObj, IntPtr outBlobNames_mat_nativeObj);
  841. // C++: Net cv::dnn::Net::quantize(vector_Mat calibData, int inputsDtype, int outputsDtype, bool perChannel = true)
  842. [DllImport(LIBNAME)]
  843. private static extern IntPtr dnn_Net_quantize_10(IntPtr nativeObj, IntPtr calibData_mat_nativeObj, int inputsDtype, int outputsDtype, [MarshalAs(UnmanagedType.U1)] bool perChannel);
  844. [DllImport(LIBNAME)]
  845. private static extern IntPtr dnn_Net_quantize_11(IntPtr nativeObj, IntPtr calibData_mat_nativeObj, int inputsDtype, int outputsDtype);
  846. // C++: void cv::dnn::Net::getInputDetails(vector_float& scales, vector_int& zeropoints)
  847. [DllImport(LIBNAME)]
  848. private static extern void dnn_Net_getInputDetails_10(IntPtr nativeObj, IntPtr scales_mat_nativeObj, IntPtr zeropoints_mat_nativeObj);
  849. // C++: void cv::dnn::Net::getOutputDetails(vector_float& scales, vector_int& zeropoints)
  850. [DllImport(LIBNAME)]
  851. private static extern void dnn_Net_getOutputDetails_10(IntPtr nativeObj, IntPtr scales_mat_nativeObj, IntPtr zeropoints_mat_nativeObj);
  852. // C++: void cv::dnn::Net::setHalideScheduler(String scheduler)
  853. [DllImport(LIBNAME)]
  854. private static extern void dnn_Net_setHalideScheduler_10(IntPtr nativeObj, string scheduler);
  855. // C++: void cv::dnn::Net::setPreferableBackend(int backendId)
  856. [DllImport(LIBNAME)]
  857. private static extern void dnn_Net_setPreferableBackend_10(IntPtr nativeObj, int backendId);
  858. // C++: void cv::dnn::Net::setPreferableTarget(int targetId)
  859. [DllImport(LIBNAME)]
  860. private static extern void dnn_Net_setPreferableTarget_10(IntPtr nativeObj, int targetId);
  861. // C++: void cv::dnn::Net::setInput(Mat blob, String name = "", double scalefactor = 1.0, Scalar mean = Scalar())
  862. [DllImport(LIBNAME)]
  863. private static extern void dnn_Net_setInput_10(IntPtr nativeObj, IntPtr blob_nativeObj, string name, double scalefactor, double mean_val0, double mean_val1, double mean_val2, double mean_val3);
  864. [DllImport(LIBNAME)]
  865. private static extern void dnn_Net_setInput_11(IntPtr nativeObj, IntPtr blob_nativeObj, string name, double scalefactor);
  866. [DllImport(LIBNAME)]
  867. private static extern void dnn_Net_setInput_12(IntPtr nativeObj, IntPtr blob_nativeObj, string name);
  868. [DllImport(LIBNAME)]
  869. private static extern void dnn_Net_setInput_13(IntPtr nativeObj, IntPtr blob_nativeObj);
  870. // C++: void cv::dnn::Net::setParam(int layer, int numParam, Mat blob)
  871. [DllImport(LIBNAME)]
  872. private static extern void dnn_Net_setParam_10(IntPtr nativeObj, int layer, int numParam, IntPtr blob_nativeObj);
  873. // C++: void cv::dnn::Net::setParam(String layerName, int numParam, Mat blob)
  874. [DllImport(LIBNAME)]
  875. private static extern void dnn_Net_setParam_11(IntPtr nativeObj, string layerName, int numParam, IntPtr blob_nativeObj);
  876. // C++: Mat cv::dnn::Net::getParam(int layer, int numParam = 0)
  877. [DllImport(LIBNAME)]
  878. private static extern IntPtr dnn_Net_getParam_10(IntPtr nativeObj, int layer, int numParam);
  879. [DllImport(LIBNAME)]
  880. private static extern IntPtr dnn_Net_getParam_11(IntPtr nativeObj, int layer);
  881. // C++: Mat cv::dnn::Net::getParam(String layerName, int numParam = 0)
  882. [DllImport(LIBNAME)]
  883. private static extern IntPtr dnn_Net_getParam_12(IntPtr nativeObj, string layerName, int numParam);
  884. [DllImport(LIBNAME)]
  885. private static extern IntPtr dnn_Net_getParam_13(IntPtr nativeObj, string layerName);
  886. // C++: vector_int cv::dnn::Net::getUnconnectedOutLayers()
  887. [DllImport(LIBNAME)]
  888. private static extern IntPtr dnn_Net_getUnconnectedOutLayers_10(IntPtr nativeObj);
  889. // C++: vector_String cv::dnn::Net::getUnconnectedOutLayersNames()
  890. [DllImport(LIBNAME)]
  891. private static extern IntPtr dnn_Net_getUnconnectedOutLayersNames_10(IntPtr nativeObj);
  892. // C++: int64 cv::dnn::Net::getFLOPS(vector_MatShape netInputShapes)
  893. [DllImport(LIBNAME)]
  894. private static extern long dnn_Net_getFLOPS_10(IntPtr nativeObj, IntPtr netInputShapes_mat_nativeObj);
  895. // C++: int64 cv::dnn::Net::getFLOPS(MatShape netInputShape)
  896. [DllImport(LIBNAME)]
  897. private static extern long dnn_Net_getFLOPS_11(IntPtr nativeObj, IntPtr netInputShape_mat_nativeObj);
  898. // C++: int64 cv::dnn::Net::getFLOPS(int layerId, vector_MatShape netInputShapes)
  899. [DllImport(LIBNAME)]
  900. private static extern long dnn_Net_getFLOPS_12(IntPtr nativeObj, int layerId, IntPtr netInputShapes_mat_nativeObj);
  901. // C++: int64 cv::dnn::Net::getFLOPS(int layerId, MatShape netInputShape)
  902. [DllImport(LIBNAME)]
  903. private static extern long dnn_Net_getFLOPS_13(IntPtr nativeObj, int layerId, IntPtr netInputShape_mat_nativeObj);
  904. // C++: void cv::dnn::Net::getLayerTypes(vector_String& layersTypes)
  905. [DllImport(LIBNAME)]
  906. private static extern void dnn_Net_getLayerTypes_10(IntPtr nativeObj, IntPtr layersTypes_mat_nativeObj);
  907. // C++: int cv::dnn::Net::getLayersCount(String layerType)
  908. [DllImport(LIBNAME)]
  909. private static extern int dnn_Net_getLayersCount_10(IntPtr nativeObj, string layerType);
  910. // C++: void cv::dnn::Net::getMemoryConsumption(MatShape netInputShape, size_t& weights, size_t& blobs)
  911. [DllImport(LIBNAME)]
  912. private static extern void dnn_Net_getMemoryConsumption_10(IntPtr nativeObj, IntPtr netInputShape_mat_nativeObj, double[] weights_out, double[] blobs_out);
  913. // C++: void cv::dnn::Net::getMemoryConsumption(int layerId, vector_MatShape netInputShapes, size_t& weights, size_t& blobs)
  914. [DllImport(LIBNAME)]
  915. private static extern void dnn_Net_getMemoryConsumption_11(IntPtr nativeObj, int layerId, IntPtr netInputShapes_mat_nativeObj, double[] weights_out, double[] blobs_out);
  916. // C++: void cv::dnn::Net::getMemoryConsumption(int layerId, MatShape netInputShape, size_t& weights, size_t& blobs)
  917. [DllImport(LIBNAME)]
  918. private static extern void dnn_Net_getMemoryConsumption_12(IntPtr nativeObj, int layerId, IntPtr netInputShape_mat_nativeObj, double[] weights_out, double[] blobs_out);
  919. // C++: void cv::dnn::Net::enableFusion(bool fusion)
  920. [DllImport(LIBNAME)]
  921. private static extern void dnn_Net_enableFusion_10(IntPtr nativeObj, [MarshalAs(UnmanagedType.U1)] bool fusion);
  922. // C++: void cv::dnn::Net::enableWinograd(bool useWinograd)
  923. [DllImport(LIBNAME)]
  924. private static extern void dnn_Net_enableWinograd_10(IntPtr nativeObj, [MarshalAs(UnmanagedType.U1)] bool useWinograd);
  925. // C++: int64 cv::dnn::Net::getPerfProfile(vector_double& timings)
  926. [DllImport(LIBNAME)]
  927. private static extern long dnn_Net_getPerfProfile_10(IntPtr nativeObj, IntPtr timings_mat_nativeObj);
  928. // native support for java finalize()
  929. [DllImport(LIBNAME)]
  930. private static extern void dnn_Net_delete(IntPtr nativeObj);
  931. }
  932. }
  933. #endif