Merge pull request #22248 from cudawarped:ffmpeg_rtsp_low_fps

* Allow the number of threads FFMpeg uses to be selected during VideoCapture::open().
Reset interupt timer in grab if
err = avformat_find_stream_info(ic, NULL);
is interupted but open is successful.

* Correct the returned number of threads and amend test cases.

* Update container test case.

* Reverse changes added to existing videoio_container test case and include test combining thread change and raw read in the newly added videoio_read test case.
This commit is contained in:
cudawarped 2022-09-12 07:12:28 +01:00 committed by GitHub
parent 7a5122121b
commit 46d988e2cb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 60 additions and 1 deletions

View File

@ -204,6 +204,7 @@ enum VideoCaptureProperties {
CAP_PROP_LRF_HAS_KEY_FRAME = 67, //!< FFmpeg back-end only - Indicates whether the Last Raw Frame (LRF), output from VideoCapture::read() when VideoCapture is initialized with VideoCapture::open(CAP_FFMPEG, {CAP_PROP_FORMAT, -1}) or VideoCapture::set(CAP_PROP_FORMAT,-1) is called before the first call to VideoCapture::read(), contains encoded data for a key frame.
CAP_PROP_CODEC_EXTRADATA_INDEX = 68, //!< Positive index indicates that returning extra data is supported by the video back end. This can be retrieved as cap.retrieve(data, <returned index>). E.g. When reading from a h264 encoded RTSP stream, the FFmpeg backend could return the SPS and/or PPS if available (if sent in reply to a DESCRIBE request), from calls to cap.retrieve(data, <returned index>).
CAP_PROP_FRAME_TYPE = 69, //!< (read-only) FFmpeg back-end only - Frame type ascii code (73 = 'I', 80 = 'P', 66 = 'B' or 63 = '?' if unknown) of the most recently read frame.
CAP_PROP_N_THREADS = 70, //!< (**open-only**) Set the maximum number of threads to use. Use 0 to use as many threads as CPU cores (applicable for FFmpeg back-end only).
#ifndef CV_DOXYGEN
CV__CAP_PROP_LATEST
#endif

View File

@ -987,7 +987,8 @@ inline void fill_codec_context(AVCodecContext * enc, AVDictionary * dict)
//#ifdef FF_API_THREAD_INIT
// avcodec_thread_init(enc, get_number_of_cpus());
//#else
enc->thread_count = get_number_of_cpus();
const int nCpus = get_number_of_cpus();
enc->thread_count = enc->thread_count ? enc->thread_count: nCpus;
//#endif
AVDictionaryEntry* avdiscard_entry = av_dict_get(dict, "avdiscard", NULL, 0);
@ -1024,6 +1025,7 @@ bool CvCapture_FFMPEG::open(const char* _filename, const VideoCaptureParameters&
unsigned i;
bool valid = false;
int nThreads = 0;
close();
@ -1081,6 +1083,10 @@ bool CvCapture_FFMPEG::open(const char* _filename, const VideoCaptureParameters&
read_timeout = params.get<int>(CAP_PROP_READ_TIMEOUT_MSEC);
}
#endif
if (params.has(CAP_PROP_N_THREADS))
{
nThreads = params.get<int>(CAP_PROP_N_THREADS);
}
if (params.warnUnusedParameters())
{
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: unsupported parameters in .open(), see logger INFO channel for details. Bailout");
@ -1248,6 +1254,7 @@ bool CvCapture_FFMPEG::open(const char* _filename, const VideoCaptureParameters&
#endif
continue;
}
context->thread_count = nThreads;
fill_codec_context(context, dict);
#ifdef CV_FFMPEG_CODECPAR
avcodec_parameters_to_context(context, par);
@ -1444,6 +1451,7 @@ bool CvCapture_FFMPEG::grabFrame()
#if USE_AV_INTERRUPT_CALLBACK
// activate interrupt callback
interrupt_metadata.timeout = 0;
get_monotonic_time(&interrupt_metadata.value);
interrupt_metadata.timeout_after_ms = read_timeout;
#endif
@ -1774,6 +1782,8 @@ double CvCapture_FFMPEG::getProperty( int property_id ) const
case CAP_PROP_STREAM_OPEN_TIME_USEC:
//ic->start_time_realtime is in microseconds
return ((double)ic->start_time_realtime);
case CAP_PROP_N_THREADS:
return static_cast<double>(context->thread_count);
default:
break;
}

View File

@ -95,6 +95,54 @@ TEST(videoio_ffmpeg, image)
//==========================================================================
#define THREADS testing::ValuesIn({ 0,1,2,2000 })
#define RAW_READ testing::ValuesIn({true, false})
typedef tuple<string, int, bool> videoio_read_params_t;
typedef testing::TestWithParam< testing::tuple<videoio_read_params_t, int, bool>> videoio_read;
TEST_P(videoio_read, threads)
{
const VideoCaptureAPIs api = CAP_FFMPEG;
if (!videoio_registry::hasBackend(api))
throw SkipTestException("Backend was not found");
const string fileName = get<0>(get<0>(GetParam()));
const int nFrames = get<1>(get<0>(GetParam()));
const bool fixedThreadCount = get<2>(get<0>(GetParam()));
const int nThreads = get<1>(GetParam());
const bool rawRead = get<2>(GetParam());
VideoCapture cap(findDataFile(fileName), api, { CAP_PROP_N_THREADS, nThreads });
if (!cap.isOpened())
throw SkipTestException("Video stream is not supported");
if (nThreads == 0 || fixedThreadCount)
EXPECT_EQ(cap.get(CAP_PROP_N_THREADS), VideoCapture(findDataFile(fileName), api).get(CAP_PROP_N_THREADS));
else
EXPECT_EQ(cap.get(CAP_PROP_N_THREADS), nThreads);
if (rawRead && !cap.set(CAP_PROP_FORMAT, -1)) // turn off video decoder (extract stream)
throw SkipTestException("Fetching of RAW video streams is not supported");
Mat frame;
int n = 0;
while (cap.read(frame)) {
ASSERT_FALSE(frame.empty());
n++;
}
ASSERT_EQ(n, nFrames);
}
const videoio_read_params_t videoio_read_params[] =
{
videoio_read_params_t("video/big_buck_bunny.h264", 125, false),
//videoio_read_params_t("video/big_buck_bunny.h265", 125, false),
videoio_read_params_t("video/big_buck_bunny.mjpg.avi", 125, true),
//videoio_read_params_t("video/big_buck_bunny.mov", 125, false),
//videoio_read_params_t("video/big_buck_bunny.mp4", 125, false),
//videoio_read_params_t("video/big_buck_bunny.mpg", 125, false),
//videoio_read_params_t("video/big_buck_bunny.wmv", 125, true),
};
INSTANTIATE_TEST_CASE_P(/**/, videoio_read, testing::Combine(testing::ValuesIn(videoio_read_params), THREADS, RAW_READ));
//==========================================================================
typedef tuple<VideoCaptureAPIs, string, string, string, string, string> videoio_container_params_t;
typedef testing::TestWithParam< videoio_container_params_t > videoio_container;