mirror of
https://github.com/opencv/opencv.git
synced 2025-01-18 22:44:02 +08:00
Merge pull request #22651 from mshabunin:script-doc
ts: basic documentation for utility scripts
This commit is contained in:
parent
e309a06b47
commit
c0a84dcc85
@ -1,4 +1,46 @@
|
||||
#!/usr/bin/env python
|
||||
""" OpenCV performance test results charts generator.
|
||||
|
||||
This script formats results of a performance test as a table or a series of tables according to test
|
||||
parameters.
|
||||
|
||||
### Description
|
||||
|
||||
Performance data is stored in the GTest log file created by performance tests. Default name is
|
||||
`test_details.xml`. It can be changed with the `--gtest_output=xml:<location>/<filename>.xml` test
|
||||
option. See https://github.com/opencv/opencv/wiki/HowToUsePerfTests for more details.
|
||||
|
||||
Script accepts an XML with performance test results as an input. Only one test (aka testsuite)
|
||||
containing multiple cases (aka testcase) with different parameters can be used. Test should have 2
|
||||
or more parameters, for example resolution (640x480), data type (8UC1), mode (NORM_TYPE), etc.
|
||||
Parameters #2 and #1 will be used as table row and column by default, this mapping can be changed
|
||||
with `-x` and `-y` options. Parameter combination besides the two selected for row and column will
|
||||
be represented as a separate table. I.e. one table (RES x TYPE) for `NORM_L1`, another for
|
||||
`NORM_L2`, etc.
|
||||
|
||||
Test can be selected either by using `--gtest_filter` option when running the test, or by using the
|
||||
`--filter` script option.
|
||||
|
||||
### Options:
|
||||
|
||||
-f REGEX, --filter=REGEX - regular expression used to select a test
|
||||
-x ROW, -y COL - choose different parameters for rows and columns
|
||||
-u UNITS, --units=UNITS - units for output values (s, ms (default), us, ns or ticks)
|
||||
-m NAME, --metric=NAME - output metric (mean, median, stddev, etc.)
|
||||
-o FMT, --output=FMT - output format ('txt', 'html' or 'auto')
|
||||
|
||||
### Example:
|
||||
|
||||
./chart.py -f sum opencv_perf_core.xml
|
||||
|
||||
Geometric mean for
|
||||
sum::Size_MatType::(Y, X)
|
||||
|
||||
X\Y 127x61 640x480 1280x720 1920x1080
|
||||
8UC1 0.03 ms 1.21 ms 3.61 ms 8.11 ms
|
||||
8UC4 0.10 ms 3.56 ms 10.67 ms 23.90 ms
|
||||
32FC1 0.05 ms 1.77 ms 5.23 ms 11.72 ms
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import testlog_parser, sys, os, xml, re
|
||||
@ -180,7 +222,7 @@ if __name__ == "__main__":
|
||||
exit(1)
|
||||
|
||||
for i in range(argsnum):
|
||||
arglists[i] = sorted([str(key) for key in arglists[i].iterkeys()], key=alphanum_keyselector)
|
||||
arglists[i] = sorted([str(key) for key in arglists[i].keys()], key=alphanum_keyselector)
|
||||
|
||||
if options.generateHtml and options.format != "moinwiki":
|
||||
htmlPrintHeader(sys.stdout, "Report %s for %s" % (args[0], sname))
|
||||
|
3
modules/ts/misc/color.py
Executable file → Normal file
3
modules/ts/misc/color.py
Executable file → Normal file
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
""" Utility package used by other test result formatting scripts.
|
||||
"""
|
||||
import math, os, sys
|
||||
|
||||
webcolors = {
|
||||
|
@ -1,4 +1,9 @@
|
||||
#!/usr/bin/env python
|
||||
""" Combines multiple uniform HTML documents with tables into a single one.
|
||||
|
||||
HTML header from the first document will be used in the output document. Largest
|
||||
`<tbody>...</tbody>` part from each document will be joined together.
|
||||
"""
|
||||
|
||||
from optparse import OptionParser
|
||||
import glob, sys, os, re
|
||||
|
24
modules/ts/misc/perf_tests_timing.py
Normal file → Executable file
24
modules/ts/misc/perf_tests_timing.py
Normal file → Executable file
@ -1,4 +1,26 @@
|
||||
#!/usr/bin/env python
|
||||
""" Prints total execution time and number of total/failed tests.
|
||||
|
||||
Performance data is stored in the GTest log file created by performance tests. Default name is
|
||||
`test_details.xml`. It can be changed with the `--gtest_output=xml:<location>/<filename>.xml` test
|
||||
option. See https://github.com/opencv/opencv/wiki/HowToUsePerfTests for more details.
|
||||
|
||||
This script uses XML test log to produce basic runtime statistics in a text or HTML table.
|
||||
|
||||
### Example:
|
||||
|
||||
./perf_tests_timing.py opencv_perf_core.xml
|
||||
|
||||
Overall time: 222.71 min
|
||||
|
||||
Module Testsuit Time (min) Num of tests Failed
|
||||
opencv Gemm::OCL_GemmFixture 113.669 24
|
||||
opencv dft::Size_MatType_FlagsType_NzeroRows 21.127 180
|
||||
opencv Dft::OCL_DftFixture 11.153 144
|
||||
opencv convertTo::Size_DepthSrc_DepthDst_Channels_alpha 7.992 392
|
||||
opencv Normalize::OCL_NormalizeFixture 5.412 96
|
||||
... ... ... ...
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import testlog_parser, sys, os, xml, glob, re
|
||||
@ -78,7 +100,7 @@ if __name__ == "__main__":
|
||||
suit_time = 0
|
||||
suit_num = 0
|
||||
fails_num = 0
|
||||
for name in sorted(test_cases.iterkeys(), key=alphanum_keyselector):
|
||||
for name in sorted(test_cases.keys(), key=alphanum_keyselector):
|
||||
cases = test_cases[name]
|
||||
|
||||
groupName = next(c for c in cases if c).shortName()
|
||||
|
@ -1,4 +1,38 @@
|
||||
#!/usr/bin/env python
|
||||
""" Print performance test run statistics.
|
||||
|
||||
Performance data is stored in the GTest log file created by performance tests. Default name is
|
||||
`test_details.xml`. It can be changed with the `--gtest_output=xml:<location>/<filename>.xml` test
|
||||
option. See https://github.com/opencv/opencv/wiki/HowToUsePerfTests for more details.
|
||||
|
||||
This script produces configurable performance report tables in text and HTML formats. It allows to
|
||||
filter test cases by name and parameter string and select specific performance metrics columns. One
|
||||
or multiple test results can be used for input.
|
||||
|
||||
### Example
|
||||
|
||||
./report.py -c min,mean,median -f '(LUT|Match).*640' opencv_perf_core.xml opencv_perf_features2d.xml
|
||||
|
||||
opencv_perf_features2d.xml, opencv_perf_core.xml
|
||||
|
||||
Name of Test Min Mean Median
|
||||
KnnMatch::OCL_BruteForceMatcherFixture::(640x480, 32FC1) 1365.04 ms 1368.18 ms 1368.52 ms
|
||||
LUT::OCL_LUTFixture::(640x480, 32FC1) 2.57 ms 2.62 ms 2.64 ms
|
||||
LUT::OCL_LUTFixture::(640x480, 32FC4) 21.15 ms 21.25 ms 21.24 ms
|
||||
LUT::OCL_LUTFixture::(640x480, 8UC1) 2.22 ms 2.28 ms 2.29 ms
|
||||
LUT::OCL_LUTFixture::(640x480, 8UC4) 19.12 ms 19.24 ms 19.19 ms
|
||||
LUT::SizePrm::640x480 2.22 ms 2.27 ms 2.29 ms
|
||||
Match::OCL_BruteForceMatcherFixture::(640x480, 32FC1) 1364.15 ms 1367.73 ms 1365.45 ms
|
||||
RadiusMatch::OCL_BruteForceMatcherFixture::(640x480, 32FC1) 1372.68 ms 1375.52 ms 1375.42 ms
|
||||
|
||||
### Options
|
||||
|
||||
-o FMT, --output=FMT - output results in text format (can be 'txt', 'html' or 'auto' - default)
|
||||
-u UNITS, --units=UNITS - units for output values (s, ms (default), us, ns or ticks)
|
||||
-c COLS, --columns=COLS - comma-separated list of columns to show
|
||||
-f REGEX, --filter=REGEX - regex to filter tests
|
||||
--show-all - also include empty and "notrun" lines
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import testlog_parser, sys, os, xml, re, glob
|
||||
|
@ -1,4 +1,41 @@
|
||||
#!/usr/bin/env python
|
||||
""" Test runner and results collector for OpenCV
|
||||
|
||||
This script abstracts execution procedure for OpenCV tests. Target scenario: running automated tests
|
||||
in a continuous integration system.
|
||||
See https://github.com/opencv/opencv/wiki/HowToUsePerfTests for more details.
|
||||
|
||||
### Main features
|
||||
|
||||
- Collect test executables, distinguish between accuracy and performance, main and contrib test sets
|
||||
- Pass through common GTest and OpenCV test options and handle some of them internally
|
||||
- Set up testing environment and handle some OpenCV-specific environment variables
|
||||
- Test Java and Python bindings
|
||||
- Test on remote android device
|
||||
- Support valgrind, qemu wrapping and trace collection
|
||||
|
||||
### Main options
|
||||
|
||||
-t MODULES, --tests MODULES - Comma-separated list of modules to test (example: -t core,imgproc,java)
|
||||
-b MODULES, --blacklist MODULES - Comma-separated list of modules to exclude from test (example: -b java)
|
||||
-a, --accuracy - Look for accuracy tests instead of performance tests
|
||||
--check - Shortcut for '--perf_min_samples=1 --perf_force_samples=1'
|
||||
-w PATH, --cwd PATH - Working directory for tests (default is current)
|
||||
-n, --dry_run - Do not run anything
|
||||
-v, --verbose - Print more debug information
|
||||
|
||||
### Example
|
||||
|
||||
./run.py -a -t core --gtest_filter=*CopyTo*
|
||||
|
||||
Run: /work/build-opencv/bin/opencv_test_core --gtest_filter=*CopyTo* --gtest_output=xml:core_20221017-195300.xml --gtest_color=yes
|
||||
CTEST_FULL_OUTPUT
|
||||
...
|
||||
regular test output
|
||||
...
|
||||
[ PASSED ] 113 tests.
|
||||
Collected: ['core_20221017-195300.xml']
|
||||
"""
|
||||
|
||||
import os
|
||||
import argparse
|
||||
|
@ -1,4 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
""" Utility package for run.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import getpass
|
||||
|
@ -1,4 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
""" Utility package for run.py
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import xml.etree.ElementTree as ET
|
||||
from glob import glob
|
||||
|
@ -1,4 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
""" Utility package for run.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
@ -1,4 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
""" Utility package for run.py
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import platform
|
||||
|
@ -1,4 +1,36 @@
|
||||
#!/usr/bin/env python
|
||||
""" Format performance test results and compare metrics between test runs
|
||||
|
||||
Performance data is stored in the GTest log file created by performance tests. Default name is
|
||||
`test_details.xml`. It can be changed with the `--gtest_output=xml:<location>/<filename>.xml` test
|
||||
option. See https://github.com/opencv/opencv/wiki/HowToUsePerfTests for more details.
|
||||
|
||||
This script allows to compare performance data collected during separate test runs and present it in
|
||||
a text, Markdown or HTML table.
|
||||
|
||||
### Major options
|
||||
|
||||
-o FMT, --output=FMT - output format ('txt', 'html', 'markdown', 'tabs' or 'auto')
|
||||
-f REGEX, --filter=REGEX - regex to filter tests
|
||||
-m NAME, --metric=NAME - output metric
|
||||
-u UNITS, --units=UNITS - units for output values (s, ms (default), us, ns or ticks)
|
||||
|
||||
### Example
|
||||
|
||||
./summary.py -f LUT.*640 core1.xml core2.xml
|
||||
|
||||
Geometric mean (ms)
|
||||
|
||||
Name of Test core1 core2 core2
|
||||
vs
|
||||
core1
|
||||
(x-factor)
|
||||
LUT::OCL_LUTFixture::(640x480, 8UC1) 2.278 0.737 3.09
|
||||
LUT::OCL_LUTFixture::(640x480, 32FC1) 2.622 0.805 3.26
|
||||
LUT::OCL_LUTFixture::(640x480, 8UC4) 19.243 3.624 5.31
|
||||
LUT::OCL_LUTFixture::(640x480, 32FC4) 21.254 4.296 4.95
|
||||
LUT::SizePrm::640x480 2.268 0.687 3.30
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import testlog_parser, sys, os, xml, glob, re
|
||||
|
4
modules/ts/misc/table_formatter.py
Executable file → Normal file
4
modules/ts/misc/table_formatter.py
Executable file → Normal file
@ -1,4 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
""" Prints data in a table format.
|
||||
|
||||
This module serves as utility for other scripts.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import sys, re, os.path, stat, math
|
||||
|
3
modules/ts/misc/testlog_parser.py
Executable file → Normal file
3
modules/ts/misc/testlog_parser.py
Executable file → Normal file
@ -1,5 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
""" Parse XML test log file.
|
||||
|
||||
This module serves as utility for other scripts.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import collections
|
||||
import re
|
||||
|
28
modules/ts/misc/trace_profiler.py
Normal file → Executable file
28
modules/ts/misc/trace_profiler.py
Normal file → Executable file
@ -1,3 +1,31 @@
|
||||
#!/usr/bin/env python
|
||||
""" Parse OpenCV trace logs and present summarized statistics in a table
|
||||
|
||||
To collect trace logs use OpenCV built with tracing support (enabled by default), set
|
||||
`OPENCV_TRACE=1` environment variable and run your application. `OpenCVTrace.txt` file will be
|
||||
created in the current folder.
|
||||
See https://github.com/opencv/opencv/wiki/Profiling-OpenCV-Applications for more details.
|
||||
|
||||
### Options
|
||||
|
||||
./trace_profiler.py <TraceLogFile> <num>
|
||||
|
||||
<TraceLogFile> - usually OpenCVTrace.txt
|
||||
<num> - number of functions to show (depth)
|
||||
|
||||
### Example
|
||||
|
||||
./trace_profiler.py OpenCVTrace.txt 2
|
||||
|
||||
ID name count thr min ...
|
||||
t-min ...
|
||||
1 main#test_main.cpp:6 1 1 88.484 ...
|
||||
200.210 ...
|
||||
|
||||
2 UMatBasicTests_copyTo#test_umat.cpp:176|main 40 1 0.125 ...
|
||||
0.173 ...
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
|
Loading…
Reference in New Issue
Block a user