1 module performance.measure;
2 
3 import std.stdio;
4 import std.range;
5 import std.algorithm;
6 import std.datetime;
7 import std..string;
8 import std.traits;
9 import std.typecons;
10 import std.parallelism : taskPool;
11 
12 import core.time;
13 import core.thread;
14 
15 import performance.common;
16 
17 import dcv.core;
18 import dcv.imgproc;
19 import dcv.features;
20 import dcv.multiview;
21 import dcv.tracking;
22 import dcv.io;
23 
24 import mir.ndslice;
25 
26 alias map = std.algorithm.iteration.map;
27 alias each = std.algorithm.iteration.each;
28 alias iota = std.range.iota;
29 
30 immutable imsize = 128;
31 size_t iterations = 1_000;
32 
33 alias BenchmarkFunction = long function();
34 
35 BenchmarkFunction[string] funcs;
36 
37 void measure(string test, size_t iterations)
38 {
39     .iterations = iterations;
40     registerBenchmarks(test);
41     Thread.getThis.sleep(dur!"msecs"(1000));
42     runBenchmarks(exeDir ~ "/profile.csv");
43 }
44 
45 void registerBenchmark(alias fun)()
46 {
47     auto fnName = fullyQualifiedName!fun.replace("performance.measure.", "").replace("run_", "").replace("_", ".");
48     funcs[fnName] = &fun;
49 }
50 
51 void registerBenchmarks(string test)
52 {
53     foreach (m; __traits(allMembers, performance.measure))
54     {
55         static if (m.length > 4 && m[0 .. 4].equal("run_"))
56         {
57             if (test.empty || !find(m.replace("_", "."), test).empty)
58                 registerBenchmark!(__traits(getMember, performance.measure, m));
59         }
60     }
61 }
62 
63 void runBenchmarks(string outputPath)
64 {
65     import std.file;
66     import std.format;
67 
68     string output;
69 
70     auto fnNames = sort(funcs.keys);
71     foreach (name; fnNames)
72     {
73         auto fn = funcs[name];
74 
75         std.stdio.write(name, ":");
76         stdout.flush();
77         auto res = fn();
78         std.stdio.writeln(res.usecs);
79 
80         output ~= format("%s,%d\n", name, res);
81     }
82     write(outputPath, output);
83 }
84 
85 auto evalBenchmark(Fn, Args...)(Fn fn, Args args)
86 {
87     StopWatch s;
88     s.start;
89     foreach (i; iota(iterations))
90     {
91         fn(args);
92     }
93     return s.peek.usecs;
94 }
95 
96 // Profiling functions ------------------------------------------------------------------
97 
98 auto run_dcv_features_corner_harris_harrisCorners_3()
99 {
100     auto image = slice!float(imsize, imsize);
101     auto result = slice!float(imsize, imsize);
102     return evalBenchmark(&harrisCorners!(float, float, Contiguous), image, 3, 0.64, 0.84, result, taskPool);
103 }
104 
105 auto run_dcv_features_corner_harris_harrisCorners_5()
106 {
107     auto image = slice!float(imsize, imsize);
108     auto result = slice!float(imsize, imsize);
109     return evalBenchmark(&harrisCorners!(float, float, Contiguous), image, 5, 0.64, 0.84, result, taskPool);
110 }
111 
112 auto run_dcv_features_corner_harris_shiTomasiCorners_3()
113 {
114     auto image = slice!float(imsize, imsize);
115     auto result = slice!float(imsize, imsize);
116     return evalBenchmark(&shiTomasiCorners!(float, float, Contiguous), image, 3, 0.84, result, taskPool);
117 }
118 
119 auto run_dcv_features_corner_harris_shiTomasiCorners_5()
120 {
121     auto image = slice!float(imsize, imsize);
122     auto result = slice!float(imsize, imsize);
123     return evalBenchmark(&shiTomasiCorners!(float, float, Contiguous), image, 5, 0.84, result, taskPool);
124 }
125 
126 auto run_dcv_features_corner_fast_FASTDetector()
127 {
128     FASTDetector detector = new FASTDetector;
129     auto image = new Image(imsize, imsize, ImageFormat.IF_MONO, BitDepth.BD_8);
130 
131     auto detect(FASTDetector detector, Image image)
132     {
133         detector.detect(image);
134     }
135 
136     return evalBenchmark(&detect, detector, image);
137 }
138 
139 auto run_dcv_features_rht_RhtLines()
140 {
141     import dcv.features.rht;
142 
143     auto image = imread(getExampleDataPath() ~ "/img.png", ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8)).sliced;
144     auto evalRht(Slice!(Contiguous, [2], ubyte*) image)
145     {
146         auto lines = RhtLines().epouchs(10).iterations(10).minCurve(50);
147         auto collectedLines = lines(image).array;
148     }
149 
150     int err;
151     return evalBenchmark(&evalRht, image.reshape([image.length!0, image.length!1], err).scale([0.15, 0.15]));
152 }
153 
154 auto run_dcv_features_rht_RhtCircles()
155 {
156     import dcv.features.rht;
157 
158     auto image = imread(getExampleDataPath() ~ "/img.png", ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8)).sliced;
159     auto evalRht(Slice!(Contiguous, [2], ubyte*) image)
160     {
161         auto circles = RhtCircles().epouchs(10).iterations(10).minCurve(50);
162         auto collectedCircles = circles(image).array;
163     }
164 
165     int err;
166     return evalBenchmark(&evalRht, image.reshape([image.length!0, image.length!1], err).scale([0.15, 0.15]));
167 }
168 
169 auto run_dcv_features_utils_extractCorners()
170 {
171     auto image = imread(getExampleDataPath() ~ "/../features/result/harrisResponse.png",
172             ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8)).sliced;
173     int err;
174     return evalBenchmark(&extractCorners!ubyte, image.reshape([image.length!0, image.length!1], err), -1, cast(ubyte)0);
175 }
176 
177 auto run_dcv_imgproc_color_rgb2gray()
178 {
179     auto rgb = slice!ubyte(imsize, imsize, 3);
180     auto gray = slice!ubyte(imsize, imsize);
181     return evalBenchmark(&rgb2gray!ubyte, rgb, gray, Rgb2GrayConvertion.LUMINANCE_PRESERVE);
182 }
183 
184 auto run_dcv_imgproc_color_gray2rgb()
185 {
186     auto rgb = slice!ubyte(imsize, imsize, 3);
187     auto gray = slice!ubyte(imsize, imsize);
188     return evalBenchmark(&gray2rgb!ubyte, gray, rgb);
189 }
190 
191 auto run_dcv_imgproc_color_rgb2hsv()
192 {
193     auto rgb = slice!ubyte(imsize, imsize, 3);
194     auto hsv = slice!float(imsize, imsize, 3);
195     return evalBenchmark(&rgb2hsv!(float, ubyte), rgb, hsv);
196 }
197 
198 auto run_dcv_imgproc_color_hsv2rgb()
199 {
200     import std.random;
201     import mir.ndslice.algorithm : each;
202     auto rgb = slice!ubyte(imsize, imsize, 3);
203     auto hsv = slice!float(imsize, imsize, 3);
204     hsv.each!(v => v = cast(float)uniform01);
205     return evalBenchmark(&hsv2rgb!(ubyte, float), hsv, rgb);
206 }
207 
208 auto run_dcv_imgproc_color_rgb2yuv()
209 {
210     auto rgb = slice!ubyte(imsize, imsize, 3);
211     auto yuv = slice!ubyte(imsize, imsize, 3);
212     return evalBenchmark(&rgb2yuv!ubyte, rgb, yuv);
213 }
214 
215 auto run_dcv_imgproc_color_yuv2rgb()
216 {
217     auto rgb = slice!ubyte(imsize, imsize, 3);
218     auto yuv = slice!ubyte(imsize, imsize, 3);
219     return evalBenchmark(&yuv2rgb!ubyte, yuv, rgb);
220 }
221 
222 auto run_dcv_imgproc_convolution_conv_1D_3()
223 {
224     auto vector = slice!float(imsize * imsize);
225     auto result = slice!float(imsize * imsize);
226     auto kernel = slice!float(3);
227     return evalBenchmark(&conv!(neumann, typeof(vector), typeof(kernel), typeof(kernel)), vector, kernel, result,
228             emptySlice!([1], float), taskPool);
229 }
230 
231 auto run_dcv_imgproc_convolution_conv_1D_5()
232 {
233     auto vector = slice!float(imsize * imsize);
234     auto result = slice!float(imsize * imsize);
235     auto kernel = slice!float(5);
236     return evalBenchmark(&conv!(neumann, typeof(vector), typeof(kernel), typeof(kernel)), vector, kernel, result,
237             emptySlice!([1], float), taskPool);
238 }
239 
240 auto run_dcv_imgproc_convolution_conv_1D_7()
241 {
242     auto vector = slice!float(imsize * imsize);
243     auto result = slice!float(imsize * imsize);
244     auto kernel = slice!float(7);
245     return evalBenchmark(&conv!(neumann, typeof(vector), typeof(kernel), typeof(kernel)), vector, kernel, result,
246             emptySlice!([1], float), taskPool);
247 }
248 
249 auto run_dcv_imgproc_convolution_conv_2D_3x3()
250 {
251     auto image = slice!float(imsize, imsize);
252     auto result = slice!float(imsize, imsize);
253     auto kernel = slice!float(3, 3);
254     return evalBenchmark(&conv!(neumann, typeof(image), typeof(kernel), typeof(kernel)), image, kernel, result,
255             emptySlice!([2], float), taskPool);
256 }
257 
258 auto run_dcv_imgproc_convolution_conv_2D_5x5()
259 {
260     auto image = slice!float(imsize, imsize);
261     auto result = slice!float(imsize, imsize);
262     auto kernel = slice!float(5, 5);
263     return evalBenchmark(&conv!(neumann, typeof(image), typeof(kernel), typeof(kernel)), image, kernel, result,
264             emptySlice!([2], float), taskPool);
265 }
266 
267 auto run_dcv_imgproc_convolution_conv_2D_7x7()
268 {
269     auto image = slice!float(imsize, imsize);
270     auto result = slice!float(imsize, imsize);
271     auto kernel = slice!float(7, 7);
272     return evalBenchmark(&conv!(neumann, typeof(image), typeof(kernel), typeof(kernel)), image, kernel, result,
273             emptySlice!([2], float), taskPool);
274 }
275 
276 auto run_dcv_imgproc_convolution_conv_3D_3x3()
277 {
278     auto image = slice!float(imsize, imsize, 3);
279     auto result = slice!float(imsize, imsize, 3);
280     auto kernel = slice!float(3, 3);
281     return evalBenchmark(&conv!(neumann, typeof(image), typeof(kernel), typeof(kernel)), image, kernel, result,
282             emptySlice!([2], float), taskPool);
283 }
284 
285 auto run_dcv_imgproc_convolution_conv_3D_5x5()
286 {
287     auto image = slice!float(imsize, imsize, 3);
288     auto result = slice!float(imsize, imsize, 3);
289     auto kernel = slice!float(5, 5);
290     return evalBenchmark(&conv!(neumann, typeof(image), typeof(kernel), typeof(kernel)), image, kernel, result,
291             emptySlice!([2], float), taskPool);
292 }
293 
294 auto run_dcv_imgproc_filter_filterNonMaximum()
295 {
296     auto image = slice!float(imsize, imsize);
297     return evalBenchmark(&filterNonMaximum!(Contiguous, float*), image, 10);
298 }
299 
300 auto run_dcv_imgproc_filter_calcPartialDerivatives()
301 {
302     auto image = slice!float(imsize, imsize);
303     auto fx = slice!float(imsize, imsize);
304     auto fy = slice!float(imsize, imsize);
305     return evalBenchmark(&calcPartialDerivatives!(typeof(image), float), image, fx, fy, taskPool);
306 }
307 
308 auto run_dcv_imgproc_filter_calcGradients()
309 {
310     auto image = slice!float(imsize, imsize);
311     auto mag = slice!float(imsize, imsize);
312     auto orient = slice!float(imsize, imsize);
313     return evalBenchmark(&calcGradients!(typeof(image), float), image, mag, orient, EdgeKernel.SIMPLE, taskPool);
314 }
315 
316 auto run_dcv_imgproc_filter_nonMaximaSupression()
317 {
318     auto mag = slice!float(imsize, imsize);
319     auto orient = slice!float(imsize, imsize);
320     auto result = slice!float(imsize, imsize);
321     return evalBenchmark(&nonMaximaSupression!(typeof(mag), float), mag, orient, result, taskPool);
322 }
323 
324 auto run_dcv_imgproc_filter_canny()
325 {
326     // TODO implement random sampling image generation
327     auto image = slice!float(imsize, imsize);
328     auto result = slice!ubyte(imsize, imsize);
329     auto runCanny(typeof(image) image, typeof(result) result)
330     {
331         canny!ubyte(image, 0, 1, EdgeKernel.SOBEL, result, taskPool);
332     }
333     //return evalBenchmark(&canny!(float,ubyte), image, cast(ubyte)0, cast(ubyte)1, EdgeKernel.SOBEL, result);
334     return evalBenchmark(&runCanny, image, result);
335 }
336 
337 auto run_dcv_imgproc_filter_bilateralFilter_3()
338 {
339     auto image = slice!float(imsize, imsize);
340     auto result = slice!float(imsize, imsize);
341     return evalBenchmark(&bilateralFilter!(float, neumann, Contiguous, [2], float*), image, 0.84, 0.84, 3, result, taskPool);
342 }
343 
344 auto run_dcv_imgproc_filter_bilateralFilter_5()
345 {
346     auto image = slice!float(imsize, imsize);
347     auto result = slice!float(imsize, imsize);
348     return evalBenchmark(&bilateralFilter!(float, neumann, Contiguous, [2], float*), image, 0.84, 0.84, 5, result, taskPool);
349 }
350 
351 auto run_dcv_imgproc_filter_medianFilter_3()
352 {
353     auto image = slice!float(imsize, imsize);
354     auto result = slice!float(imsize, imsize);
355     return evalBenchmark(&medianFilter!(neumann, float, float, Contiguous, [2]), image, 3, result, taskPool);
356 }
357 
358 auto run_dcv_imgproc_filter_medianFilter_5()
359 {
360     auto image = slice!float(imsize, imsize);
361     auto result = slice!float(imsize, imsize);
362     return evalBenchmark(&medianFilter!(neumann, float, float, Contiguous, [2]), image, 5, result, taskPool);
363 }
364 
365 auto run_dcv_imgproc_filter_histEqualize()
366 {
367     auto image = slice!ubyte(imsize, imsize);
368     auto result = slice!ubyte(imsize, imsize);
369     int[256] histogram;
370     return evalBenchmark(&histEqualize!(ubyte, int[256], Contiguous, [2]), image, histogram, result);
371 }
372 
373 auto run_dcv_imgproc_filter_erode()
374 {
375     auto image = slice!ubyte(imsize, imsize);
376     auto result = slice!ubyte(imsize, imsize);
377     return evalBenchmark(&erode!(neumann, ubyte, Contiguous), image, radialKernel!ubyte(3), result, taskPool);
378 }
379 
380 auto run_dcv_imgproc_filter_dilate()
381 {
382     auto image = slice!ubyte(imsize, imsize);
383     auto result = slice!ubyte(imsize, imsize);
384     return evalBenchmark(&dilate!(neumann, ubyte, Contiguous), image, radialKernel!ubyte(3), result, taskPool);
385 }
386 
387 auto run_dcv_imgproc_filter_open()
388 {
389     auto image = slice!ubyte(imsize, imsize);
390     auto result = slice!ubyte(imsize, imsize);
391     return evalBenchmark(&open!(neumann, ubyte, Contiguous), image, radialKernel!ubyte(3), result, taskPool);
392 }
393 
394 auto run_dcv_imgproc_filter_close()
395 {
396     auto image = slice!ubyte(imsize, imsize);
397     auto result = slice!ubyte(imsize, imsize);
398     return evalBenchmark(&close!(neumann, ubyte, Contiguous), image, radialKernel!ubyte(3), result, taskPool);
399 }
400 
401 auto run_dcv_imgproc_imgmanip_resize_upsize()
402 {
403     auto image = slice!float(imsize, imsize);
404     size_t[2] resultSize = [cast(size_t)(imsize * 1.5), cast(size_t)(imsize * 1.5f)];
405     return evalBenchmark(&resize!(linear, Contiguous, [2], float, 2), image, resultSize, taskPool);
406 }
407 
408 auto run_dcv_imgproc_imgmanip_resize_downsize()
409 {
410     auto image = slice!float(imsize, imsize);
411     size_t[2] resultSize = [cast(size_t)(imsize * 0.5), cast(size_t)(imsize * 0.5f)];
412     return evalBenchmark(&resize!(linear, Contiguous, [2], float, 2), image, resultSize, taskPool);
413 }
414 
415 auto run_dcv_imgproc_imgmanip_scale_upsize()
416 {
417     auto image = slice!float(imsize, imsize);
418     float[2] scaleFactor = [1.5f, 1.5f];
419     return evalBenchmark(&scale!(linear, float, float, Contiguous, [2], 2), image, scaleFactor, taskPool);
420 }
421 
422 auto run_dcv_imgproc_imgmanip_scale_downsize()
423 {
424     auto image = slice!float(imsize, imsize);
425     float[2] scaleFactor = [0.5f, 0.5f];
426     return evalBenchmark(&scale!(linear, float, float, Contiguous, [2], 2), image, scaleFactor, taskPool);
427 }
428 
429 auto run_dcv_imgproc_imgmanip_transformAffine()
430 {
431     auto image = slice!float(imsize, imsize);
432     auto matrix = [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]];
433     size_t[2] outSize = [0, 0];
434     return evalBenchmark(&transformAffine!(linear, float, typeof(matrix), Contiguous, [2]), image, matrix, outSize);
435 }
436 
437 auto run_dcv_imgproc_imgmanip_transformPerspective()
438 {
439     auto image = slice!float(imsize, imsize);
440     auto matrix = [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]];
441     size_t[2] outSize = [0, 0];
442     return evalBenchmark(&transformPerspective!(linear, float, typeof(matrix), Contiguous, [2]), image, matrix, outSize);
443 }
444 
445 auto run_dcv_imgproc_imgmanip_warp()
446 {
447     auto image = slice!float(imsize, imsize);
448     auto result = slice!float(imsize, imsize);
449     auto warpMap = slice!float(imsize, imsize, 2);
450     return evalBenchmark(&warp!(linear, typeof(image), typeof(warpMap)), image, warpMap, result);
451 }
452 
453 auto run_dcv_imgproc_imgmanip_remap()
454 {
455     auto image = slice!float(imsize, imsize);
456     auto result = slice!float(imsize, imsize);
457     auto remapMap = slice!float(imsize, imsize, 2);
458     return evalBenchmark(&remap!(linear, typeof(image), typeof(remapMap)), image, remapMap, result);
459 }
460 
461 auto run_dcv_imgproc_threshold_threshold()
462 {
463     auto image = slice!ubyte(imsize, imsize);
464     auto result = slice!ubyte(imsize, imsize);
465     auto runThreshold(typeof(image) image, typeof(result) result)
466     {
467         threshold(image, 0, 1, result);
468     }
469     //return evalBenchmark(&threshold!(ubyte, ubyte, 2), image, 0, 1, result);
470     return evalBenchmark(&runThreshold, image, result);
471 }
472 
473 auto run_dcv_multiview_stereo_matching_semiGlobalMatchingPipeline()
474 {
475     import dcv.multiview.stereo.matching;
476 
477     auto left = imread(getExampleDataPath() ~ "/stereo/left.png", ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8))
478         .sliced;
479     auto right = imread(getExampleDataPath() ~ "/stereo/right.png", ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8))
480         .sliced;
481 
482     int err;
483     auto ls = left.reshape([left.length!0, left.length!1], err).scale([0.25, 0.25]).asImage(ImageFormat.IF_MONO);
484     auto rs = right.reshape([left.length!0, left.length!1], err).scale([0.25, 0.25]).asImage(ImageFormat.IF_MONO);
485 
486     auto props = StereoPipelineProperties(ls.width, ls.height, ls.channels);
487     auto matcher = semiGlobalMatchingPipeline(props, 2, 5, 10);
488     auto runStereo(typeof(matcher) matcher, Image left, Image right)
489     {
490         matcher.evaluate(left, right);
491     }
492 
493     return evalBenchmark(&runStereo, matcher, ls, rs);
494 }
495 
496 auto run_dcv_tracking_opticalflow_hornschunck_HornSchunckFlow()
497 {
498     auto left = imread(getExampleDataPath() ~ "/optflow/Army/frame10.png",
499             ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8)).sliced;
500     auto right = imread(getExampleDataPath() ~ "/optflow/Army/frame11.png",
501             ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8)).sliced;
502 
503     int err;
504     auto ls = left.reshape([left.length!0, left.length!1], err).scale([0.25, 0.25]).asImage(ImageFormat.IF_MONO);
505     auto rs = right.reshape([left.length!0, left.length!1], err).scale([0.25, 0.25]).asImage(ImageFormat.IF_MONO);
506 
507     HornSchunckFlow flowAlgorithm = new HornSchunckFlow;
508     auto flow = slice!float(ls.height, ls.width, 2);
509 
510     auto runFlow(HornSchunckFlow flowAlgorithm, Image left, Image right, DenseFlow flow)
511     {
512         flowAlgorithm.evaluate(left, right, flow);
513     }
514 
515     return evalBenchmark(&runFlow, flowAlgorithm, ls, rs, flow);
516 }
517 
518 auto run_dcv_tracking_opticalflow_lucaskanade_LucasKanadeFlow()
519 {
520     auto left = imread(getExampleDataPath() ~ "/optflow/Army/frame10.png",
521             ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8)).sliced;
522     auto right = imread(getExampleDataPath() ~ "/optflow/Army/frame11.png",
523             ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8)).sliced;
524 
525     int err;
526     auto ls = left.reshape([left.length!0, left.length!1], err).scale([0.25, 0.25]).asImage(ImageFormat.IF_MONO);
527     auto rs = right.reshape([left.length!0, left.length!1], err).scale([0.25, 0.25]).asImage(ImageFormat.IF_MONO);
528 
529     immutable pointCount = 25;
530 
531     LucasKanadeFlow flowAlgorithm = new LucasKanadeFlow;
532     float[2][] points;
533     float[2][] flow;
534 
535     flow.length = pointCount;
536 
537     float x = 0.0f, y = 0.0f;
538     5.iota.each!((i) {
539         x = 0.0f;
540         5.iota.each!((j) { float[2] xy = [x, y]; flow ~= xy; x += 10.0f; });
541         y += 10.0f;
542     });
543 
544     float[2][] searchRegions = pointCount.iota.map!( i => cast(float[2])[3.0f, 3.0f]).array;
545 
546     auto runFlow(LucasKanadeFlow flowAlgorithm, Image left, Image right, in float[2][] points,
547             in float[2][] searchRegions, float[2][] flow)
548     {
549         flowAlgorithm.evaluate(left, right, points, searchRegions, flow, false);
550     }
551 
552     return evalBenchmark(&runFlow, flowAlgorithm, ls, rs, points, searchRegions, flow);
553 }
554 
555 auto run_dcv_tracking_opticalflow_pyramidflow_DensePyramidFlow_HornSchunckFlow()
556 {
557     auto left = imread(getExampleDataPath() ~ "/optflow/Army/frame10.png",
558             ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8)).sliced;
559     auto right = imread(getExampleDataPath() ~ "/optflow/Army/frame11.png",
560             ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8)).sliced;
561     int err;
562     auto ls = left.reshape([left.length!0, left.length!1], err).scale([0.25, 0.25]).asImage(ImageFormat.IF_MONO);
563     auto rs = right.reshape([left.length!0, left.length!1], err).scale([0.25, 0.25]).asImage(ImageFormat.IF_MONO);
564     DensePyramidFlow flowAlgorithm = new DensePyramidFlow(new HornSchunckFlow, 3);
565     auto flow = slice!float(ls.height, ls.width, 2);
566     auto runFlow(DensePyramidFlow flowAlgorithm, Image left, Image right, DenseFlow flow)
567     {
568         flowAlgorithm.evaluate(left, right, flow);
569     }
570 
571     return evalBenchmark(&runFlow, flowAlgorithm, ls, rs, flow);
572 }
573 
574 auto run_dcv_tracking_opticalflow_pyramidflow_SparsePyramidFlow_LucasKanadeFlow()
575 {
576     auto left = imread(getExampleDataPath() ~ "/optflow/Army/frame10.png",
577             ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8)).sliced;
578     auto right = imread(getExampleDataPath() ~ "/optflow/Army/frame11.png",
579             ReadParams(ImageFormat.IF_MONO, BitDepth.BD_8)).sliced;
580 
581     int err;
582     auto ls = left.reshape([left.length!0, left.length!1], err).scale([0.25, 0.25]).asImage(ImageFormat.IF_MONO);
583     auto rs = right.reshape([left.length!0, left.length!1], err).scale([0.25, 0.25]).asImage(ImageFormat.IF_MONO);
584 
585     immutable pointCount = 25;
586 
587     SparsePyramidFlow flowAlgorithm = new SparsePyramidFlow(new LucasKanadeFlow, 3);
588     float[2][] points;
589     float[2][] flow;
590 
591     flow.length = pointCount;
592 
593     float x = 0.0f, y = 0.0f;
594     5.iota.each!((i) {
595         x = 0.0f;
596         5.iota.each!((j) { float[2] xy = [x, y]; flow ~= xy; x += 10.0f; });
597         y += 10.0f;
598     });
599 
600     float[2][] searchRegions = pointCount.iota.map!( i => cast(float[2])[3.0f, 3.0f]).array;
601 
602     auto runFlow(SparsePyramidFlow flowAlgorithm, Image left, Image right, in float[2][] points,
603             in float[2][] searchRegions, float[2][] flow)
604     {
605         flowAlgorithm.evaluate(left, right, points, searchRegions, flow, false);
606     }
607 
608     return evalBenchmark(&runFlow, flowAlgorithm, ls, rs, points, searchRegions, flow);
609 }
610