Skip to content

Commit

Permalink
Merge pull request #470 from shimat/dnn_Net_feature
Browse files Browse the repository at this point in the history
add dnn::Net functions
  • Loading branch information
shimat authored Apr 12, 2018
2 parents 0eff3c1 + 4b4f581 commit 66150c2
Show file tree
Hide file tree
Showing 3 changed files with 110 additions and 2 deletions.
68 changes: 68 additions & 0 deletions src/OpenCvSharp/Modules/dnn/Net.cs
Original file line number Diff line number Diff line change
Expand Up @@ -253,6 +253,42 @@ public void Forward(IEnumerable<Mat> outputBlobs, IEnumerable<string> outBlobNam
GC.KeepAlive(this);
}

/// <summary>
/// Compile Halide layers.
/// Schedule layers that support Halide backend. Then compile them for
/// specific target.For layers that not represented in scheduling file
/// or if no manual scheduling used at all, automatic scheduling will be applied.
/// </summary>
/// <param name="scheduler">Path to YAML file with scheduling directives.</param>
public void SetHalideScheduler(string scheduler)
{
ThrowIfDisposed();
NativeMethods.dnn_Net_setHalideScheduler(ptr, scheduler);
GC.KeepAlive(this);
}

/// <summary>
/// Ask network to use specific computation backend where it supported.
/// </summary>
/// <param name="backendId">backend identifier.</param>
public void SetPreferableBackend(int backendId)
{
ThrowIfDisposed();
NativeMethods.dnn_Net_setPreferableBackend(ptr, backendId);
GC.KeepAlive(this);
}

/// <summary>
/// Ask network to make computations on specific target device.
/// </summary>
/// <param name="targetId">target identifier.</param>
public void SetPreferableTarget(int targetId)
{
ThrowIfDisposed();
NativeMethods.dnn_Net_setPreferableTarget(ptr, targetId);
GC.KeepAlive(this);
}

/// <summary>
/// Sets the new value for the layer output blob
/// </summary>
Expand All @@ -272,6 +308,38 @@ public void SetInput(Mat blob, string name = "")
GC.KeepAlive(this);
}

/// <summary>
/// Enables or disables layer fusion in the network.
/// </summary>
/// <param name="fusion">true to enable the fusion, false to disable. The fusion is enabled by default.</param>
public void EnableFusion(bool fusion)
{
ThrowIfDisposed();
NativeMethods.dnn_Net_enableFusion(ptr, fusion ? 1 : 0);
GC.KeepAlive(this);
}

/// <summary>
/// Returns overall time for inference and timings (in ticks) for layers.
/// Indexes in returned vector correspond to layers ids.Some layers can be fused with others,
/// in this case zero ticks count will be return for that skipped layers.
/// </summary>
/// <param name="timings">vector for tick timings for all layers.</param>
/// <returns>overall ticks for model inference.</returns>
public long GetPerfProfile(out double[] timings)
{
ThrowIfDisposed();

using (var timingsVec = new VectorOfDouble())
{
var ret = NativeMethods.dnn_Net_getPerfProfile(ptr, timingsVec.CvPtr);
GC.KeepAlive(this);

timings = timingsVec.ToArray();
return ret;
}
}

#endregion
}
}
15 changes: 15 additions & 0 deletions src/OpenCvSharp/PInvoke/dnn/NativeMethods_dnn_Net.cs
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,23 @@ public static extern void dnn_Net_forward2(
[DllImport(DllExtern, CallingConvention = CallingConvention.Cdecl, ExactSpelling = true, BestFitMapping = false, ThrowOnUnmappableChar = true)]
public static extern void dnn_Net_forward3(
IntPtr net, IntPtr[] outputBlobs, int outputBlobsLength, string[] outBlobNames, int outBlobNamesLength);

[DllImport(DllExtern, CallingConvention = CallingConvention.Cdecl, ExactSpelling = true, BestFitMapping = false, ThrowOnUnmappableChar = true)]
public static extern void dnn_Net_setHalideScheduler(IntPtr net, [MarshalAs(UnmanagedType.LPStr)] string scheduler);

[DllImport(DllExtern, CallingConvention = CallingConvention.Cdecl, ExactSpelling = true, BestFitMapping = false, ThrowOnUnmappableChar = true)]
public static extern void dnn_Net_setPreferableBackend(IntPtr net, int backendId);

[DllImport(DllExtern, CallingConvention = CallingConvention.Cdecl, ExactSpelling = true, BestFitMapping = false, ThrowOnUnmappableChar = true)]
public static extern void dnn_Net_setPreferableTarget(IntPtr net, int targetId);

[DllImport(DllExtern, CallingConvention = CallingConvention.Cdecl, ExactSpelling = true, BestFitMapping = false, ThrowOnUnmappableChar = true)]
public static extern void dnn_Net_setInput(IntPtr net, IntPtr blob, [MarshalAs(UnmanagedType.LPStr)] string name);

[DllImport(DllExtern, CallingConvention = CallingConvention.Cdecl, ExactSpelling = true, BestFitMapping = false, ThrowOnUnmappableChar = true)]
public static extern void dnn_Net_enableFusion(IntPtr net, int fusion);

[DllImport(DllExtern, CallingConvention = CallingConvention.Cdecl, ExactSpelling = true, BestFitMapping = false, ThrowOnUnmappableChar = true)]
public static extern Int64 dnn_Net_getPerfProfile(IntPtr net, IntPtr timings);
}
}
29 changes: 27 additions & 2 deletions src/OpenCvSharpExtern/dnn_Net.h
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,35 @@ CVAPI(void) dnn_Net_forward3(
net->forward(outputBlobsVec, outBlobNamesVec);
}

CVAPI(void) dnn_Net_setHalideScheduler(cv::dnn::Net* net, const char *scheduler)
{
net->setHalideScheduler(scheduler);
}

CVAPI(void) dnn_Net_setPreferableBackend(cv::dnn::Net* net, int backendId)
{
net->setPreferableBackend(backendId);
}

CVAPI(void) dnn_Net_setPreferableTarget(cv::dnn::Net* net, int targetId)
{
net->setPreferableTarget(targetId);
}

CVAPI(void) dnn_Net_setInput(cv::dnn::Net* net, const cv::Mat *blob, const char *name)
{
const cv::String nameStr = (name == nullptr) ? "" : cv::String(name);
net->setInput(*blob, name);
const cv::String nameStr = (name == nullptr) ? "" : cv::String(name);
net->setInput(*blob, name);
}

CVAPI(void) dnn_Net_enableFusion(cv::dnn::Net* net, int fusion)
{
net->enableFusion(fusion != 0);
}

CVAPI(int64) dnn_Net_getPerfProfile(cv::dnn::Net* net, std::vector<double> *timings)
{
return net->getPerfProfile(*timings);
}

#endif

0 comments on commit 66150c2

Please sign in to comment.