-From 070ac586846f4ae2bbe88946afcc0f5ef109c4a5 Mon Sep 17 00:00:00 2001
+From 677c6d3e58064e9d10472e5ca75380b31631d12a Mon Sep 17 00:00:00 2001
From: tobtoht <tob@featherwallet.org>
-Date: Mon, 2 Jan 2023 20:03:18 +0100
+Date: Tue, 3 Jan 2023 13:00:51 +0100
Subject: [PATCH] qtmultimedia fixes
---
- cmake/FindWMF.cmake | 10 +-
- src/multimedia/configure.cmake | 3 +-
- .../windows/qwindowsmediadevices.cpp | 2 +-
- src/multimedia/windows/qwindowsresampler.cpp | 2 +-
- .../darwin/camera/qavfcamerabase.mm | 1 +
- .../multimedia/darwin/qdarwinintegration.mm | 3 +-
- src/plugins/multimedia/ffmpeg/CMakeLists.txt | 87 +-
- src/plugins/multimedia/ffmpeg/qavfcamera.mm | 422 ------
- src/plugins/multimedia/ffmpeg/qavfcamera_p.h | 87 --
- src/plugins/multimedia/ffmpeg/qffmpeg_p.h | 60 -
- .../multimedia/ffmpeg/qffmpegaudiodecoder.cpp | 265 ----
- .../multimedia/ffmpeg/qffmpegaudiodecoder_p.h | 69 -
- .../multimedia/ffmpeg/qffmpegaudioinput.cpp | 189 ---
- .../multimedia/ffmpeg/qffmpegaudioinput_p.h | 54 -
- .../multimedia/ffmpeg/qffmpegclock.cpp | 193 ---
- .../multimedia/ffmpeg/qffmpegclock_p.h | 113 --
- .../multimedia/ffmpeg/qffmpegdecoder.cpp | 1272 -----------------
- .../multimedia/ffmpeg/qffmpegdecoder_p.h | 501 -------
- .../multimedia/ffmpeg/qffmpegencoder.cpp | 557 --------
- .../multimedia/ffmpeg/qffmpegencoder_p.h | 197 ---
- .../ffmpeg/qffmpegencoderoptions.cpp | 272 ----
- .../ffmpeg/qffmpegencoderoptions_p.h | 32 -
- .../multimedia/ffmpeg/qffmpeghwaccel.cpp | 372 -----
- .../ffmpeg/qffmpeghwaccel_d3d11.cpp | 158 --
- .../ffmpeg/qffmpeghwaccel_d3d11_p.h | 43 -
- .../ffmpeg/qffmpeghwaccel_mediacodec.cpp | 70 -
- .../ffmpeg/qffmpeghwaccel_mediacodec_p.h | 35 -
- .../multimedia/ffmpeg/qffmpeghwaccel_p.h | 121 --
- .../ffmpeg/qffmpeghwaccel_vaapi.cpp | 346 -----
- .../ffmpeg/qffmpeghwaccel_vaapi_p.h | 48 -
- .../ffmpeg/qffmpeghwaccel_videotoolbox.mm | 281 ----
- .../ffmpeg/qffmpeghwaccel_videotoolbox_p.h | 63 -
- .../ffmpeg/qffmpegmediacapturesession.cpp | 15 +-
- .../ffmpeg/qffmpegmediacapturesession_p.h | 1 -
- .../ffmpeg/qffmpegmediaformatinfo.cpp | 474 ------
- .../ffmpeg/qffmpegmediaformatinfo_p.h | 18 -
- .../ffmpeg/qffmpegmediaintegration.cpp | 31 -
- .../ffmpeg/qffmpegmediaintegration_p.h | 8 -
- .../ffmpeg/qffmpegmediametadata.cpp | 105 --
- .../ffmpeg/qffmpegmediametadata_p.h | 5 -
- .../multimedia/ffmpeg/qffmpegmediaplayer.cpp | 236 ---
- .../multimedia/ffmpeg/qffmpegmediaplayer_p.h | 98 --
- .../ffmpeg/qffmpegmediarecorder.cpp | 157 --
- .../ffmpeg/qffmpegmediarecorder_p.h | 68 -
- .../multimedia/ffmpeg/qffmpegresampler.cpp | 95 --
- .../multimedia/ffmpeg/qffmpegresampler_p.h | 46 -
- .../multimedia/ffmpeg/qffmpegthread.cpp | 57 -
- .../multimedia/ffmpeg/qffmpegthread_p.h | 68 -
- .../multimedia/ffmpeg/qffmpegvideobuffer.cpp | 356 -----
- .../multimedia/ffmpeg/qffmpegvideobuffer_p.h | 72 -
- .../ffmpeg/qffmpegvideoframeencoder.cpp | 374 -----
- .../ffmpeg/qffmpegvideoframeencoder_p.h | 76 -
- .../multimedia/ffmpeg/qffmpegvideosink.cpp | 14 -
- .../multimedia/ffmpeg/qffmpegvideosink_p.h | 10 +-
- src/plugins/multimedia/ffmpeg/qv4l2camera.cpp | 4 +-
- .../multimedia/ffmpeg/qwindowscamera.cpp | 4 +-
- .../multimedia/windows/common/mfmetadata_p.h | 2 +-
- .../windows/decoder/mfaudiodecodercontrol.cpp | 2 +-
- .../qwindowsmediadevicereader_p.h | 4 +-
- .../mediacapture/qwindowsmediaencoder.cpp | 2 +-
- .../windows/player/mfplayercontrol_p.h | 2 +-
- .../windows/player/mfplayersession.cpp | 2 +-
- .../multimedia/windows/player/mftvideo.cpp | 2 +-
- .../windows/qwindowsvideodevices.cpp | 4 +-
- 64 files changed, 33 insertions(+), 8307 deletions(-)
- delete mode 100644 src/plugins/multimedia/ffmpeg/qavfcamera.mm
- delete mode 100644 src/plugins/multimedia/ffmpeg/qavfcamera_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpeg_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegaudioinput.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegclock.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegclock_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegdecoder.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegdecoder_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegencoder.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegencoder_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegencoderoptions.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegencoderoptions_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox.mm
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegthread.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegthread_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegvideoframeencoder.cpp
- delete mode 100644 src/plugins/multimedia/ffmpeg/qffmpegvideoframeencoder_p.h
+ cmake/FindWMF.cmake | 10 +++++-----
+ src/multimedia/configure.cmake | 3 +--
+ src/multimedia/windows/qwindowsmediadevices.cpp | 2 +-
+ src/multimedia/windows/qwindowsresampler.cpp | 2 +-
+ src/plugins/multimedia/darwin/qdarwinintegration.mm | 3 +--
+ src/plugins/multimedia/windows/common/mfmetadata_p.h | 2 +-
+ .../windows/decoder/mfaudiodecodercontrol.cpp | 2 +-
+ .../windows/mediacapture/qwindowsmediadevicereader_p.h | 4 ++--
+ .../windows/mediacapture/qwindowsmediaencoder.cpp | 2 +-
+ .../multimedia/windows/player/mfplayercontrol_p.h | 2 +-
+ .../multimedia/windows/player/mfplayersession.cpp | 2 +-
+ src/plugins/multimedia/windows/player/mftvideo.cpp | 2 +-
+ .../multimedia/windows/qwindowsvideodevices.cpp | 4 ++--
+ 13 files changed, 19 insertions(+), 21 deletions(-)
diff --git a/cmake/FindWMF.cmake b/cmake/FindWMF.cmake
-index 7c6923c1e..b69274be5 100644
+index 2f4633c34..35a276cb4 100644
--- a/cmake/FindWMF.cmake
+++ b/cmake/FindWMF.cmake
-@@ -22,11 +22,11 @@ find_library(WMF_UUID_LIBRARY uuid HINTS ${CMAKE_C_IMPLICIT_LINK_DIRECTORIES})
+@@ -25,11 +25,11 @@ find_library(WMF_UUID_LIBRARY uuid HINTS ${CMAKE_C_IMPLICIT_LINK_DIRECTORIES})
find_library(WMF_MSDMO_LIBRARY msdmo HINTS ${CMAKE_C_IMPLICIT_LINK_DIRECTORIES})
find_library(WMF_OLE32_LIBRARY ole32 HINTS ${CMAKE_C_IMPLICIT_LINK_DIRECTORIES})
find_library(WMF_OLEAUT32_LIBRARY oleaut32 HINTS ${CMAKE_C_IMPLICIT_LINK_DIRECTORIES})
set(WMF_LIBRARIES ${WMF_STRMIIDS_LIBRARY} ${WMF_AMSTRMID_LIBRARY} ${WMF_DMOGUIDS_LIBRARY} ${WMF_UUID_LIBRARY}
diff --git a/src/multimedia/configure.cmake b/src/multimedia/configure.cmake
-index efcadfc5c..29b056003 100644
+index 7fdb0af3a..868e91ba4 100644
--- a/src/multimedia/configure.cmake
+++ b/src/multimedia/configure.cmake
-@@ -21,7 +21,6 @@ qt_find_package(WrapPulseAudio PROVIDED_TARGETS WrapPulseAudio::WrapPulseAudio M
+@@ -24,7 +24,6 @@ qt_find_package(WrapPulseAudio PROVIDED_TARGETS WrapPulseAudio::WrapPulseAudio M
qt_find_package(WMF PROVIDED_TARGETS WMF::WMF MODULE_NAME multimedia QMAKE_LIB wmf)
qt_find_package(EGL)
qt_find_package(VAAPI COMPONENTS VA DRM PROVIDED_TARGETS VAAPI::VA VAAPI::DRM MODULE_NAME multimedia QMAKE_LIB vaapi)
#### Tests
-@@ -73,7 +72,7 @@ qt_feature("ffmpeg" PRIVATE
+@@ -76,7 +75,7 @@ qt_feature("ffmpeg" PRIVATE
LABEL "FFmpeg"
ENABLE INPUT_ffmpeg STREQUAL 'yes'
DISABLE INPUT_ffmpeg STREQUAL 'no'
qt_feature("alsa" PUBLIC PRIVATE
LABEL "ALSA (experimental)"
diff --git a/src/multimedia/windows/qwindowsmediadevices.cpp b/src/multimedia/windows/qwindowsmediadevices.cpp
-index c91597102..8c2df5816 100644
+index fcad8bab7..110d84c82 100644
--- a/src/multimedia/windows/qwindowsmediadevices.cpp
+++ b/src/multimedia/windows/qwindowsmediadevices.cpp
@@ -13,7 +13,7 @@
#include <mftransform.h>
#include <mferror.h>
-diff --git a/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm b/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm
-index a11290a8d..b40c1133e 100644
---- a/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm
-+++ b/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm
-@@ -7,6 +7,7 @@
- #include <private/qcameradevice_p.h>
- #include "qavfhelpers_p.h"
- #include <private/qplatformmediaintegration_p.h>
-+#include <QtCore/qset.h>
-
- QT_USE_NAMESPACE
-
diff --git a/src/plugins/multimedia/darwin/qdarwinintegration.mm b/src/plugins/multimedia/darwin/qdarwinintegration.mm
index 5e26fe5c4..3e82655b0 100644
--- a/src/plugins/multimedia/darwin/qdarwinintegration.mm
#endif
m_videoDevices = new QAVFVideoDevices(this);
}
-diff --git a/src/plugins/multimedia/ffmpeg/CMakeLists.txt b/src/plugins/multimedia/ffmpeg/CMakeLists.txt
-index 5d6c0a8c3..6c83b9cb2 100644
---- a/src/plugins/multimedia/ffmpeg/CMakeLists.txt
-+++ b/src/plugins/multimedia/ffmpeg/CMakeLists.txt
-@@ -1,107 +1,32 @@
--qt_find_package(EGL)
--qt_find_package(VAAPI COMPONENTS VA DRM PROVIDED_TARGETS VAAPI::VA VAAPI::DRM MODULE_NAME multimedia QMAKE_LIB vaapi)
--
--qt_internal_find_apple_system_framework(FWCoreMedia CoreMedia) # special case
--qt_internal_find_apple_system_framework(FWCoreAudio CoreAudio) # special case
--qt_internal_find_apple_system_framework(FWAudioUnit AudioUnit) # special case
--qt_internal_find_apple_system_framework(FWVideoToolbox VideoToolbox) # special case
--qt_internal_find_apple_system_framework(FWAVFoundation AVFoundation) # special case
--
- qt_internal_add_plugin(QFFmpegMediaPlugin
- OUTPUT_NAME ffmpegmediaplugin
- PLUGIN_TYPE multimedia
- SOURCES
-- qffmpeg_p.h
-- qffmpegaudiodecoder.cpp qffmpegaudiodecoder_p.h
-- qffmpegaudioinput.cpp qffmpegaudioinput_p.h
-- qffmpegclock.cpp qffmpegclock_p.h
-- qffmpegdecoder.cpp qffmpegdecoder_p.h
-- qffmpeghwaccel.cpp qffmpeghwaccel_p.h
-- qffmpegencoderoptions.cpp qffmpegencoderoptions_p.h
- qffmpegmediametadata.cpp qffmpegmediametadata_p.h
-- qffmpegmediaplayer.cpp qffmpegmediaplayer_p.h
- qffmpegvideosink.cpp qffmpegvideosink_p.h
- qffmpegmediaformatinfo.cpp qffmpegmediaformatinfo_p.h
- qffmpegmediaintegration.cpp qffmpegmediaintegration_p.h
-- qffmpegvideobuffer.cpp qffmpegvideobuffer_p.h
- qffmpegimagecapture.cpp qffmpegimagecapture_p.h
- qffmpegmediacapturesession.cpp qffmpegmediacapturesession_p.h
-- qffmpegmediarecorder.cpp qffmpegmediarecorder_p.h
-- qffmpegencoder.cpp qffmpegencoder_p.h
-- qffmpegthread.cpp qffmpegthread_p.h
-- qffmpegresampler.cpp qffmpegresampler_p.h
-- qffmpegvideoframeencoder.cpp qffmpegvideoframeencoder_p.h
- DEFINES
- QT_COMPILING_FFMPEG
- LIBRARIES
- Qt::MultimediaPrivate
- Qt::CorePrivate
-- FFmpeg::avformat FFmpeg::avcodec FFmpeg::swresample FFmpeg::swscale FFmpeg::avutil
--)
--
--qt_internal_extend_target(QFFmpegMediaPlugin CONDITION QT_FEATURE_ffmpeg AND QT_FEATURE_vaapi
-- SOURCES
-- qffmpeghwaccel_vaapi.cpp qffmpeghwaccel_vaapi_p.h
-- LIBRARIES
-- VAAPI::VAAPI
-- EGL::EGL
- )
-
--qt_internal_extend_target(QFFmpegMediaPlugin CONDITION APPLE
-+qt_internal_extend_target(QFFmpegMediaPlugin CONDITION QT_FEATURE_linux_v4l
- SOURCES
-- ../darwin/qavfhelpers.mm ../darwin/qavfhelpers_p.h
-- ../darwin/camera/qavfcamerabase_p.h ../darwin/camera/qavfcamerabase.mm
-- ../darwin/camera/avfcamerautility_p.h ../darwin/camera/avfcamerautility.mm
-- qffmpeghwaccel_videotoolbox.mm qffmpeghwaccel_videotoolbox_p.h
-- qavfcamera.mm qavfcamera_p.h
-- INCLUDE_DIRECTORIES
-- ../darwin
-- ../darwin/camera
-- LIBRARIES
-- ${FWAudioToolbox}
-- ${FWCoreAudio}
-- ${FWCoreFoundation}
-- ${FWCoreMedia}
-- ${FWCoreVideo}
-- ${FWVideoToolbox}
-- AVFoundation::AVFoundation
-+ qv4l2camera.cpp qv4l2camera_p.h
- )
-
- qt_internal_extend_target(QFFmpegMediaPlugin CONDITION WIN32
-- SOURCES
-+ SOURCES
- ../windows/qwindowsvideodevices.cpp ../windows/qwindowsvideodevices_p.h
- qwindowscamera.cpp qwindowscamera_p.h
-- qffmpeghwaccel_d3d11.cpp qffmpeghwaccel_d3d11_p.h
-- INCLUDE_DIRECTORIES
-+ INCLUDE_DIRECTORIES
- ../windows
-- LIBRARIES
-+ LIBRARIES
- WMF::WMF
- mfreadwrite
--)
--
--qt_internal_extend_target(QFFmpegMediaPlugin CONDITION QT_FEATURE_linux_v4l
-- SOURCES
-- qv4l2camera.cpp qv4l2camera_p.h
--)
--
--if (ANDROID)
-- qt_internal_extend_target(QFFmpegMediaPlugin
-- SOURCES
-- qffmpeghwaccel_mediacodec.cpp qffmpeghwaccel_mediacodec_p.h
-- ../android/wrappers/jni/androidsurfacetexture_p.h
-- ../android/wrappers/jni/androidsurfacetexture.cpp
-- INCLUDE_DIRECTORIES
-- ${FFMPEG_DIR}/include
-- ../android/wrappers/jni/
-- )
--
-- set_property(TARGET QFFmpegMediaPlugin APPEND PROPERTY QT_ANDROID_LIB_DEPENDENCIES
-- plugins/multimedia/libplugins_multimedia_ffmpegmediaplugin.so
-- )
--
-- set_property(TARGET QFFmpegMediaPlugin APPEND PROPERTY QT_ANDROID_PERMISSIONS
-- android.permission.CAMERA android.permission.RECORD_AUDIO
-- android.permission.BLUETOOTH
-- android.permission.MODIFY_AUDIO_SETTINGS
-- )
--endif()
-+)
-\ No newline at end of file
-diff --git a/src/plugins/multimedia/ffmpeg/qavfcamera.mm b/src/plugins/multimedia/ffmpeg/qavfcamera.mm
-deleted file mode 100644
-index 37dd4b262..000000000
---- a/src/plugins/multimedia/ffmpeg/qavfcamera.mm
-+++ /dev/null
-@@ -1,422 +0,0 @@
--// Copyright (C) 2022 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#include <qavfcamera_p.h>
--#include <qpointer.h>
--#include <qmediacapturesession.h>
--#include <private/qplatformmediacapture_p.h>
--#include "avfcamerautility_p.h"
--#include "qavfhelpers_p.h"
--#include <qvideosink.h>
--#include <private/qrhi_p.h>
--#define AVMediaType XAVMediaType
--#include "qffmpegvideobuffer_p.h"
--#include "qffmpegvideosink_p.h"
--extern "C" {
--#include <libavutil/hwcontext_videotoolbox.h>
--#include <libavutil/hwcontext.h>
--}
--#undef AVMediaType
--
--
--
--#import <AVFoundation/AVFoundation.h>
--#include <CoreVideo/CoreVideo.h>
--
--static void releaseHwFrame(void */*opaque*/, uint8_t *data)
--{
-- CVPixelBufferRelease(CVPixelBufferRef(data));
--}
--
--// Make sure this is compatible with the layout used in ffmpeg's hwcontext_videotoolbox
--static AVFrame *allocHWFrame(AVBufferRef *hwContext, const CVPixelBufferRef &pixbuf)
--{
-- AVHWFramesContext *ctx = (AVHWFramesContext*)hwContext->data;
-- AVFrame *frame = av_frame_alloc();
-- frame->hw_frames_ctx = av_buffer_ref(hwContext);
-- frame->extended_data = frame->data;
--
-- frame->buf[0] = av_buffer_create((uint8_t *)pixbuf, 1, releaseHwFrame, NULL, 0);
-- frame->data[3] = (uint8_t *)pixbuf;
-- CVPixelBufferRetain(pixbuf);
-- frame->width = ctx->width;
-- frame->height = ctx->height;
-- frame->format = AV_PIX_FMT_VIDEOTOOLBOX;
-- if (frame->width != (int)CVPixelBufferGetWidth(pixbuf) ||
-- frame->height != (int)CVPixelBufferGetHeight(pixbuf)) {
-- // This can happen while changing camera format
-- av_frame_free(&frame);
-- return nullptr;
-- }
-- return frame;
--}
--
--static AVAuthorizationStatus m_cameraAuthorizationStatus = AVAuthorizationStatusNotDetermined;
--
--@interface QAVFSampleBufferDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
--
--- (QAVFSampleBufferDelegate *) initWithCamera:(QAVFCamera *)renderer;
--
--- (void) captureOutput:(AVCaptureOutput *)captureOutput
-- didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
-- fromConnection:(AVCaptureConnection *)connection;
--
--- (void) setHWAccel:(std::unique_ptr<QFFmpeg::HWAccel> &&)accel;
--
--@end
--
--@implementation QAVFSampleBufferDelegate
--{
--@private
-- QAVFCamera *m_camera;
-- AVBufferRef *hwFramesContext;
-- std::unique_ptr<QFFmpeg::HWAccel> m_accel;
-- qint64 startTime;
-- qint64 baseTime;
--}
--
--- (QAVFSampleBufferDelegate *) initWithCamera:(QAVFCamera *)renderer
--{
-- if (!(self = [super init]))
-- return nil;
--
-- m_camera = renderer;
-- hwFramesContext = nullptr;
-- startTime = 0;
-- baseTime = 0;
-- return self;
--}
--
--- (void)captureOutput:(AVCaptureOutput *)captureOutput
-- didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
-- fromConnection:(AVCaptureConnection *)connection
--{
-- Q_UNUSED(connection);
-- Q_UNUSED(captureOutput);
--
-- // NB: on iOS captureOutput/connection can be nil (when recording a video -
-- // avfmediaassetwriter).
--
-- CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
--
-- CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
-- qint64 frameTime = time.timescale ? time.value*1000/time.timescale : 0;
-- if (baseTime == 0) {
-- // drop the first frame to get a valid frame start time
-- baseTime = frameTime;
-- startTime = 0;
-- return;
-- }
--
-- if (!m_accel)
-- return;
--
-- AVFrame *avFrame = allocHWFrame(m_accel->hwFramesContextAsBuffer(), imageBuffer);
-- if (!avFrame)
-- return;
--
--#ifdef USE_SW_FRAMES
-- auto *swFrame = av_frame_alloc();
-- /* retrieve data from GPU to CPU */
-- int ret = av_hwframe_transfer_data(swFrame, avFrame, 0);
-- if (ret < 0) {
-- qWarning() << "Error transferring the data to system memory\n";
-- av_frame_unref(swFrame);
-- } else {
-- av_frame_unref(avFrame);
-- avFrame = swFrame;
-- }
--#endif
--
-- QVideoFrameFormat format = QAVFHelpers::videoFormatForImageBuffer(imageBuffer);
-- if (!format.isValid()) {
-- av_frame_unref(avFrame);
-- return;
-- }
--
-- avFrame->pts = startTime;
--
-- QFFmpegVideoBuffer *buffer = new QFFmpegVideoBuffer(avFrame);
-- QVideoFrame frame(buffer, format);
-- frame.setStartTime(startTime);
-- frame.setEndTime(frameTime);
-- startTime = frameTime;
--
-- m_camera->syncHandleFrame(frame);
--}
--
--- (void) setHWAccel:(std::unique_ptr<QFFmpeg::HWAccel> &&)accel
--{
-- m_accel = std::move(accel);
--}
--
--@end
--
--QT_BEGIN_NAMESPACE
--
--QAVFCamera::QAVFCamera(QCamera *parent)
-- : QAVFCameraBase(parent)
--{
-- m_captureSession = [[AVCaptureSession alloc] init];
-- m_sampleBufferDelegate = [[QAVFSampleBufferDelegate alloc] initWithCamera:this];
--}
--
--QAVFCamera::~QAVFCamera()
--{
-- [m_sampleBufferDelegate release];
-- [m_videoInput release];
-- [m_videoDataOutput release];
-- [m_captureSession release];
--}
--
--void QAVFCamera::requestCameraPermissionIfNeeded()
--{
-- if (m_cameraAuthorizationStatus == AVAuthorizationStatusAuthorized)
-- return;
--
-- switch ([AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo])
-- {
-- case AVAuthorizationStatusAuthorized:
-- {
-- m_cameraAuthorizationStatus = AVAuthorizationStatusAuthorized;
-- break;
-- }
-- case AVAuthorizationStatusNotDetermined:
-- {
-- m_cameraAuthorizationStatus = AVAuthorizationStatusNotDetermined;
-- QPointer<QAVFCamera> guard(this);
-- [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
-- dispatch_async(dispatch_get_main_queue(), ^{
-- if (guard)
-- cameraAuthorizationChanged(granted);
-- });
-- }];
-- break;
-- }
-- case AVAuthorizationStatusDenied:
-- case AVAuthorizationStatusRestricted:
-- {
-- m_cameraAuthorizationStatus = AVAuthorizationStatusDenied;
-- return;
-- }
-- }
--}
--
--void QAVFCamera::cameraAuthorizationChanged(bool authorized)
--{
-- if (authorized) {
-- m_cameraAuthorizationStatus = AVAuthorizationStatusAuthorized;
-- } else {
-- m_cameraAuthorizationStatus = AVAuthorizationStatusDenied;
-- qWarning() << "User has denied access to camera";
-- }
--}
--
--void QAVFCamera::updateVideoInput()
--{
-- requestCameraPermissionIfNeeded();
-- if (m_cameraAuthorizationStatus != AVAuthorizationStatusAuthorized)
-- return;
--
-- [m_captureSession beginConfiguration];
--
-- attachVideoInputDevice();
--
-- if (!m_videoDataOutput) {
-- m_videoDataOutput = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
--
-- // Configure video output
-- m_delegateQueue = dispatch_queue_create("vf_queue", nullptr);
-- [m_videoDataOutput
-- setSampleBufferDelegate:m_sampleBufferDelegate
-- queue:m_delegateQueue];
--
-- [m_captureSession addOutput:m_videoDataOutput];
-- }
-- [m_captureSession commitConfiguration];
-- deviceOrientationChanged();
--}
--
--void QAVFCamera::deviceOrientationChanged(int angle)
--{
-- AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
-- if (connection == nil || !m_videoDataOutput)
-- return;
--
-- if (!connection.supportsVideoOrientation)
-- return;
--
-- if (angle < 0)
-- angle = m_orientationHandler.currentOrientation();
--
-- AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
-- switch (angle) {
-- default:
-- break;
-- case 90:
-- orientation = AVCaptureVideoOrientationLandscapeRight;
-- break;
-- case 180:
-- // this keeps the last orientation, don't do anything
-- return;
-- case 270:
-- orientation = AVCaptureVideoOrientationLandscapeLeft;
-- break;
-- }
--
-- connection.videoOrientation = orientation;
--}
--
--void QAVFCamera::attachVideoInputDevice()
--{
-- if (m_videoInput) {
-- [m_captureSession removeInput:m_videoInput];
-- [m_videoInput release];
-- m_videoInput = nullptr;
-- }
--
-- QByteArray deviceId = m_cameraDevice.id();
-- if (deviceId.isEmpty())
-- return;
--
-- AVCaptureDevice *videoDevice = [AVCaptureDevice deviceWithUniqueID:
-- [NSString stringWithUTF8String: deviceId.constData()]];
--
-- if (!videoDevice)
-- return;
--
-- m_videoInput = [AVCaptureDeviceInput
-- deviceInputWithDevice:videoDevice
-- error:nil];
-- if (m_videoInput && [m_captureSession canAddInput:m_videoInput]) {
-- [m_videoInput retain];
-- [m_captureSession addInput:m_videoInput];
-- } else {
-- qWarning() << "Failed to create video device input";
-- }
--}
--
--AVCaptureDevice *QAVFCamera::device() const
--{
-- return m_videoInput ? m_videoInput.device : nullptr;
--}
--
--bool QAVFCamera::isActive() const
--{
-- return m_active;
--}
--
--void QAVFCamera::setActive(bool active)
--{
-- if (m_active == active)
-- return;
-- requestCameraPermissionIfNeeded();
-- if (m_cameraAuthorizationStatus != AVAuthorizationStatusAuthorized)
-- return;
--
-- m_active = active;
--
-- if (active) {
-- // According to the doc, the capture device must be locked before
-- // startRunning to prevent the format we set to be overridden by the
-- // session preset.
-- [m_videoInput.device lockForConfiguration:nil];
-- [m_captureSession startRunning];
-- [m_videoInput.device unlockForConfiguration];
-- } else {
-- [m_captureSession stopRunning];
-- }
--
-- emit activeChanged(active);
--}
--
--void QAVFCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
--{
-- m_session = session ? session->captureSession() : nullptr;
--}
--
--void QAVFCamera::setCamera(const QCameraDevice &camera)
--{
-- if (m_cameraDevice == camera)
-- return;
--
-- m_cameraDevice = camera;
--
-- requestCameraPermissionIfNeeded();
-- if (m_cameraAuthorizationStatus == AVAuthorizationStatusAuthorized)
-- updateVideoInput();
-- setCameraFormat({});
--}
--
--bool QAVFCamera::setCameraFormat(const QCameraFormat &format)
--{
-- if (m_cameraFormat == format && !format.isNull())
-- return true;
--
-- QAVFCameraBase::setCameraFormat(format);
-- updateCameraFormat();
-- return true;
--}
--
--void QAVFCamera::updateCameraFormat()
--{
-- AVCaptureDevice *captureDevice = device();
-- if (!captureDevice)
-- return;
--
-- uint avPixelFormat = 0;
-- AVCaptureDeviceFormat *newFormat = qt_convert_to_capture_device_format(captureDevice, m_cameraFormat);
-- if (newFormat) {
-- qt_set_active_format(captureDevice, newFormat, false);
-- avPixelFormat = setPixelFormat(m_cameraFormat.pixelFormat());
-- }
--
-- auto hwAccel = QFFmpeg::HWAccel::create(AV_HWDEVICE_TYPE_VIDEOTOOLBOX);
-- if (hwAccel) {
-- hwAccel->createFramesContext(av_map_videotoolbox_format_to_pixfmt(avPixelFormat),
-- m_cameraFormat.resolution());
-- hwPixelFormat = hwAccel->hwFormat();
-- } else {
-- hwPixelFormat = AV_PIX_FMT_NONE;
-- }
-- [m_sampleBufferDelegate setHWAccel:std::move(hwAccel)];
--}
--
--uint QAVFCamera::setPixelFormat(const QVideoFrameFormat::PixelFormat pixelFormat)
--{
-- // Default to 32BGRA pixel formats on the viewfinder, in case the requested
-- // format can't be used (shouldn't happen unless the developers sets a wrong camera
-- // format on the camera).
-- unsigned avPixelFormat = kCVPixelFormatType_32BGRA;
-- if (!QAVFHelpers::toCVPixelFormat(pixelFormat, avPixelFormat))
-- qWarning() << "QCamera::setCameraFormat: couldn't convert requested pixel format, using ARGB32";
--
-- bool isSupported = false;
-- NSArray *supportedPixelFormats = m_videoDataOutput.availableVideoCVPixelFormatTypes;
-- for (NSNumber *currentPixelFormat in supportedPixelFormats)
-- {
-- if ([currentPixelFormat unsignedIntValue] == avPixelFormat) {
-- isSupported = true;
-- break;
-- }
-- }
--
-- if (isSupported) {
-- NSDictionary* outputSettings = @{
-- (NSString *)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithUnsignedInt:avPixelFormat],
-- (NSString *)kCVPixelBufferMetalCompatibilityKey: @true
-- };
-- m_videoDataOutput.videoSettings = outputSettings;
-- } else {
-- qWarning() << "QCamera::setCameraFormat: requested pixel format not supported. Did you use a camera format from another camera?";
-- }
-- return avPixelFormat;
--}
--
--void QAVFCamera::syncHandleFrame(const QVideoFrame &frame)
--{
-- Q_EMIT newVideoFrame(frame);
--}
--
--QT_END_NAMESPACE
--
--#include "moc_qavfcamera_p.cpp"
-diff --git a/src/plugins/multimedia/ffmpeg/qavfcamera_p.h b/src/plugins/multimedia/ffmpeg/qavfcamera_p.h
-deleted file mode 100644
-index 40a53dc7c..000000000
---- a/src/plugins/multimedia/ffmpeg/qavfcamera_p.h
-+++ /dev/null
-@@ -1,87 +0,0 @@
--// Copyright (C) 2022 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#ifndef QAVFCAMERA_H
--#define QAVFCAMERA_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include "qavfcamerabase_p.h"
--#include <private/qplatformmediaintegration_p.h>
--#include <private/qvideooutputorientationhandler_p.h>
--#define AVMediaType XAVMediaType
--#include "qffmpeghwaccel_p.h"
--#undef AVMediaType
--
--#include <qfilesystemwatcher.h>
--#include <qsocketnotifier.h>
--#include <qmutex.h>
--
--#include <dispatch/dispatch.h>
--
--Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureSession);
--Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDeviceInput);
--Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureVideoDataOutput);
--Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDevice);
--Q_FORWARD_DECLARE_OBJC_CLASS(QAVFSampleBufferDelegate);
--
--QT_BEGIN_NAMESPACE
--
--class QFFmpegVideoSink;
--
--class QAVFCamera : public QAVFCameraBase
--{
-- Q_OBJECT
--
--public:
-- explicit QAVFCamera(QCamera *parent);
-- ~QAVFCamera();
--
-- bool isActive() const override;
-- void setActive(bool active) override;
--
-- void setCaptureSession(QPlatformMediaCaptureSession *) override;
--
-- void setCamera(const QCameraDevice &camera) override;
-- bool setCameraFormat(const QCameraFormat &format) override;
--
-- void syncHandleFrame(const QVideoFrame &frame);
--
-- void deviceOrientationChanged(int angle = -1);
--
-- std::optional<int> ffmpegHWPixelFormat() const override { return hwPixelFormat; }
--
--private:
-- void requestCameraPermissionIfNeeded();
-- void cameraAuthorizationChanged(bool authorized);
-- void updateCameraFormat();
-- void updateVideoInput();
-- void attachVideoInputDevice();
-- uint setPixelFormat(const QVideoFrameFormat::PixelFormat pixelFormat);
--
-- AVCaptureDevice *device() const;
--
-- QMediaCaptureSession *m_session = nullptr;
-- AVCaptureSession *m_captureSession = nullptr;
-- AVCaptureDeviceInput *m_videoInput = nullptr;
-- AVCaptureVideoDataOutput *m_videoDataOutput = nullptr;
-- QAVFSampleBufferDelegate *m_sampleBufferDelegate = nullptr;
-- dispatch_queue_t m_delegateQueue;
-- QVideoOutputOrientationHandler m_orientationHandler;
-- AVPixelFormat hwPixelFormat = AV_PIX_FMT_NONE;
--};
--
--QT_END_NAMESPACE
--
--
--#endif // QFFMPEGCAMERA_H
--
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpeg_p.h b/src/plugins/multimedia/ffmpeg/qffmpeg_p.h
-deleted file mode 100644
-index 6a1d6ab38..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpeg_p.h
-+++ /dev/null
-@@ -1,60 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEG_P_H
--#define QFFMPEG_P_H
--
--#include <private/qtmultimediaglobal_p.h>
--#include <qstring.h>
--
--extern "C" {
--#include <libavformat/avformat.h>
--#include <libavcodec/avcodec.h>
--#include <libswresample/swresample.h>
--#include <libavutil/avutil.h>
--#include <libswscale/swscale.h>
--}
--
--#define QT_FFMPEG_OLD_CHANNEL_LAYOUT (LIBAVCODEC_VERSION_INT < AV_VERSION_INT(59,24,100))
--
--QT_BEGIN_NAMESPACE
--
--namespace QFFmpeg
--{
--
--inline std::optional<qint64> mul(qint64 a, AVRational b)
--{
-- return b.den != 0 ? (a * b.num + b.den / 2) / b.den : std::optional<qint64>{};
--}
--
--inline std::optional<qreal> mul(qreal a, AVRational b)
--{
-- return b.den != 0 ? a * qreal(b.num) / qreal(b.den) : std::optional<qreal>{};
--}
--
--inline std::optional<qint64> timeStampMs(qint64 ts, AVRational base)
--{
-- return mul(1'000 * ts, base);
--}
--
--inline std::optional<qint64> timeStampUs(qint64 ts, AVRational base)
--{
-- return mul(1'000'000 * ts, base);
--}
--
--inline std::optional<float> toFloat(AVRational r)
--{
-- return r.den != 0 ? float(r.num) / float(r.den) : std::optional<float>{};
--}
--
--inline QString err2str(int errnum)
--{
-- char buffer[AV_ERROR_MAX_STRING_SIZE + 1] = {};
-- av_make_error_string(buffer, AV_ERROR_MAX_STRING_SIZE, errnum);
-- return QString::fromLocal8Bit(buffer);
--}
--
--QT_END_NAMESPACE
--
--}
--
--#endif
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp b/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp
-deleted file mode 100644
-index 4dff93d12..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp
-+++ /dev/null
-@@ -1,265 +0,0 @@
--// Copyright (C) 2020 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--//#define DEBUG_DECODER
--
--#include "qffmpegaudiodecoder_p.h"
--#include "qffmpegdecoder_p.h"
--#include "qffmpegmediaformatinfo_p.h"
--#include "qffmpegresampler_p.h"
--#include "qaudiobuffer.h"
--
--#include <qloggingcategory.h>
--
--Q_LOGGING_CATEGORY(qLcAudioDecoder, "qt.multimedia.ffmpeg.audioDecoder")
--
--#define MAX_BUFFERS_IN_QUEUE 4
--
--QT_BEGIN_NAMESPACE
--
--namespace QFFmpeg
--{
--
--class SteppingAudioRenderer : public Renderer
--{
--public:
-- SteppingAudioRenderer(AudioDecoder *decoder, const QAudioFormat &format);
-- ~SteppingAudioRenderer()
-- {
-- }
--
-- void loop() override;
-- AudioDecoder *m_decoder;
-- QAudioFormat m_format;
-- std::unique_ptr<Resampler> resampler;
-- bool atEndEmitted = false;
--};
--
--class AudioDecoder : public Decoder
--{
-- Q_OBJECT
--public:
-- explicit AudioDecoder(QFFmpegAudioDecoder *audioDecoder) : Decoder(), audioDecoder(audioDecoder)
-- {}
--
-- void setup(const QAudioFormat &format)
-- {
-- connect(this, &AudioDecoder::newAudioBuffer, audioDecoder, &QFFmpegAudioDecoder::newAudioBuffer);
-- connect(this, &AudioDecoder::isAtEnd, audioDecoder, &QFFmpegAudioDecoder::done);
-- m_format = format;
-- audioRenderer = new SteppingAudioRenderer(this, format);
-- audioRenderer->start();
-- auto *stream = demuxer->addStream(avStreamIndex(QPlatformMediaPlayer::AudioStream));
-- audioRenderer->setStream(stream);
-- }
--
-- void nextBuffer()
-- {
-- audioRenderer->setPaused(false);
-- }
--
--Q_SIGNALS:
-- void newAudioBuffer(const QAudioBuffer &b);
-- void isAtEnd();
--
--private:
-- QFFmpegAudioDecoder *audioDecoder = nullptr;
-- QAudioFormat m_format;
--};
--
--SteppingAudioRenderer::SteppingAudioRenderer(AudioDecoder *decoder, const QAudioFormat &format)
-- : Renderer(QPlatformMediaPlayer::AudioStream)
-- , m_decoder(decoder)
-- , m_format(format)
--{
--}
--
--
--void SteppingAudioRenderer::loop()
--{
-- if (!streamDecoder) {
-- qCDebug(qLcAudioDecoder) << "no stream";
-- timeOut = -1; // Avoid CPU load before play()
-- return;
-- }
--
-- Frame frame = streamDecoder->takeFrame();
-- if (!frame.isValid()) {
-- if (streamDecoder->isAtEnd()) {
-- if (!atEndEmitted)
-- emit m_decoder->isAtEnd();
-- atEndEmitted = true;
-- paused = true;
-- doneStep();
-- timeOut = -1;
-- return;
-- }
-- timeOut = 10;
-- streamDecoder->wake();
-- return;
-- }
-- qCDebug(qLcAudioDecoder) << " got frame";
--
-- doneStep();
--
-- if (!resampler)
-- resampler.reset(new Resampler(frame.codec(), m_format));
--
-- auto buffer = resampler->resample(frame.avFrame());
-- paused = true;
-- timeOut = -1;
--
-- emit m_decoder->newAudioBuffer(buffer);
--}
--
--}
--
--
--QFFmpegAudioDecoder::QFFmpegAudioDecoder(QAudioDecoder *parent)
-- : QPlatformAudioDecoder(parent)
--{
--}
--
--QFFmpegAudioDecoder::~QFFmpegAudioDecoder()
--{
-- delete decoder;
--}
--
--QUrl QFFmpegAudioDecoder::source() const
--{
-- return m_url;
--}
--
--void QFFmpegAudioDecoder::setSource(const QUrl &fileName)
--{
-- stop();
-- m_sourceDevice = nullptr;
--
-- if (m_url == fileName)
-- return;
-- m_url = fileName;
--
-- emit sourceChanged();
--}
--
--QIODevice *QFFmpegAudioDecoder::sourceDevice() const
--{
-- return m_sourceDevice;
--}
--
--void QFFmpegAudioDecoder::setSourceDevice(QIODevice *device)
--{
-- stop();
-- m_url.clear();
-- bool isSignalRequired = (m_sourceDevice != device);
-- m_sourceDevice = device;
-- if (isSignalRequired)
-- sourceChanged();
--}
--
--void QFFmpegAudioDecoder::start()
--{
-- qCDebug(qLcAudioDecoder) << "start";
-- delete decoder;
-- decoder = new QFFmpeg::AudioDecoder(this);
-- decoder->setMedia(m_url, m_sourceDevice);
-- if (error() != QAudioDecoder::NoError)
-- goto error;
--
-- decoder->setup(m_audioFormat);
-- if (error() != QAudioDecoder::NoError)
-- goto error;
-- decoder->play();
-- if (error() != QAudioDecoder::NoError)
-- goto error;
-- decoder->nextBuffer();
-- if (error() != QAudioDecoder::NoError)
-- goto error;
--
-- connect(decoder, &QFFmpeg::Decoder::errorOccured, this, &QFFmpegAudioDecoder::errorSignal);
-- durationChanged(duration());
-- setIsDecoding(true);
-- return;
--
-- error:
-- durationChanged(-1);
-- positionChanged(-1);
-- delete decoder;
-- decoder = nullptr;
--
--}
--
--void QFFmpegAudioDecoder::stop()
--{
-- qCDebug(qLcAudioDecoder) << ">>>>> stop";
-- if (decoder) {
-- decoder->stop();
-- done();
-- }
--}
--
--QAudioFormat QFFmpegAudioDecoder::audioFormat() const
--{
-- return m_audioFormat;
--}
--
--void QFFmpegAudioDecoder::setAudioFormat(const QAudioFormat &format)
--{
-- if (m_audioFormat == format)
-- return;
--
-- m_audioFormat = format;
-- formatChanged(m_audioFormat);
--}
--
--QAudioBuffer QFFmpegAudioDecoder::read()
--{
-- auto b = m_audioBuffer;
-- qCDebug(qLcAudioDecoder) << "reading buffer" << b.startTime();
-- m_audioBuffer = {};
-- bufferAvailableChanged(false);
-- if (decoder)
-- decoder->nextBuffer();
-- return b;
--}
--
--void QFFmpegAudioDecoder::newAudioBuffer(const QAudioBuffer &b)
--{
-- qCDebug(qLcAudioDecoder) << "new audio buffer" << b.startTime();
-- m_audioBuffer = b;
-- const qint64 pos = b.startTime();
-- positionChanged(pos/1000);
-- bufferAvailableChanged(b.isValid());
-- bufferReady();
--}
--
--void QFFmpegAudioDecoder::done()
--{
-- qCDebug(qLcAudioDecoder) << ">>>>> DONE!";
-- finished();
--}
--
--void QFFmpegAudioDecoder::errorSignal(int err, const QString &errorString)
--{
-- // unfortunately the error enums for QAudioDecoder and QMediaPlayer aren't identical.
-- // Map them.
-- switch (QMediaPlayer::Error(err)) {
-- case QMediaPlayer::NoError:
-- error(QAudioDecoder::NoError, errorString);
-- break;
-- case QMediaPlayer::ResourceError:
-- error(QAudioDecoder::ResourceError, errorString);
-- break;
-- case QMediaPlayer::FormatError:
-- error(QAudioDecoder::FormatError, errorString);
-- break;
-- case QMediaPlayer::NetworkError:
-- // fall through, Network error doesn't exist in QAudioDecoder
-- case QMediaPlayer::AccessDeniedError:
-- error(QAudioDecoder::AccessDeniedError, errorString);
-- break;
-- }
--}
--
--QT_END_NAMESPACE
--
--#include "qffmpegaudiodecoder.moc"
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder_p.h b/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder_p.h
-deleted file mode 100644
-index 0196f88a7..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder_p.h
-+++ /dev/null
-@@ -1,69 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#ifndef QFFMPEGAUDIODECODER_H
--#define QFFMPEGAUDIODECODER_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include "private/qplatformaudiodecoder_p.h"
--#include <qffmpeg_p.h>
--
--#include <qmutex.h>
--#include <qurl.h>
--#include <qqueue.h>
--
--QT_BEGIN_NAMESPACE
--
--namespace QFFmpeg {
--class AudioDecoder;
--}
--
--class QFFmpegAudioDecoder : public QPlatformAudioDecoder
--{
-- Q_OBJECT
--
--public:
-- QFFmpegAudioDecoder(QAudioDecoder *parent);
-- virtual ~QFFmpegAudioDecoder();
--
-- QUrl source() const override;
-- void setSource(const QUrl &fileName) override;
--
-- QIODevice *sourceDevice() const override;
-- void setSourceDevice(QIODevice *device) override;
--
-- void start() override;
-- void stop() override;
--
-- QAudioFormat audioFormat() const override;
-- void setAudioFormat(const QAudioFormat &format) override;
--
-- QAudioBuffer read() override;
--
--public Q_SLOTS:
-- void newAudioBuffer(const QAudioBuffer &b);
-- void done();
-- void errorSignal(int err, const QString &errorString);
--
--private:
-- QUrl m_url;
-- QIODevice *m_sourceDevice = nullptr;
-- QFFmpeg::AudioDecoder *decoder = nullptr;
-- QAudioFormat m_audioFormat;
--
-- QAudioBuffer m_audioBuffer;
--};
--
--QT_END_NAMESPACE
--
--#endif // QFFMPEGAUDIODECODER_H
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegaudioinput.cpp b/src/plugins/multimedia/ffmpeg/qffmpegaudioinput.cpp
-deleted file mode 100644
-index 5c769d524..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegaudioinput.cpp
-+++ /dev/null
-@@ -1,189 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#include "qffmpegaudioinput_p.h"
--#include <qiodevice.h>
--#include <qaudiosource.h>
--#include <qaudiobuffer.h>
--#include <qdebug.h>
--
--QT_BEGIN_NAMESPACE
--
--namespace QFFmpeg {
--
--class AudioSourceIO : public QIODevice
--{
-- Q_OBJECT
-- public:
-- AudioSourceIO(QFFmpegAudioInput *audioInput)
-- : QIODevice()
-- , input(audioInput)
-- {
-- m_muted = input->muted;
-- m_volume = input->volume;
-- updateVolume();
-- open(QIODevice::WriteOnly);
-- }
-- ~AudioSourceIO()
-- {
-- delete m_src;
-- }
--
-- void setDevice(const QAudioDevice &device)
-- {
-- QMutexLocker locker(&mutex);
-- if (m_device == device)
-- return;
-- m_device = device;
-- QMetaObject::invokeMethod(this, "updateSource");
-- }
-- void setFrameSize(int s)
-- {
-- QMutexLocker locker(&mutex);
-- frameSize = s;
-- bufferSize = m_format.bytesForFrames(frameSize);
-- }
-- void setRunning(bool r) {
-- QMutexLocker locker(&mutex);
-- if (m_running == r)
-- return;
-- m_running = r;
-- QMetaObject::invokeMethod(this, "updateRunning");
-- }
--
-- void setVolume(float vol) {
-- QMutexLocker locker(&mutex);
-- m_volume = vol;
-- QMetaObject::invokeMethod(this, "updateVolume");
-- }
-- void setMuted(bool muted) {
-- QMutexLocker locker(&mutex);
-- m_muted = muted;
-- QMetaObject::invokeMethod(this, "updateVolume");
-- }
--
--
--protected:
-- qint64 readData(char *, qint64) override
-- {
-- return 0;
-- }
-- qint64 writeData(const char *data, qint64 len) override
-- {
-- int l = len;
-- while (len > 0) {
-- int toAppend = qMin(len, bufferSize - pcm.size());
-- pcm.append(data, toAppend);
-- data += toAppend;
-- len -= toAppend;
-- if (pcm.size() == bufferSize)
-- sendBuffer();
-- }
--
-- return l;
-- }
--
--private Q_SLOTS:
-- void updateSource() {
-- QMutexLocker locker(&mutex);
-- m_format = m_device.preferredFormat();
-- if (m_src) {
-- delete m_src;
-- pcm.clear();
-- }
-- m_src = new QAudioSource(m_device, m_format);
-- updateVolume();
-- if (m_running)
-- m_src->start(this);
-- }
-- void updateVolume()
-- {
-- if (m_src)
-- m_src->setVolume(m_muted ? 0. : m_volume);
-- }
-- void updateRunning()
-- {
-- QMutexLocker locker(&mutex);
-- if (m_running) {
-- if (!m_src)
-- updateSource();
-- m_src->start(this);
-- } else {
-- m_src->stop();
-- }
-- }
--
--private:
--
-- void sendBuffer()
-- {
-- QAudioFormat fmt = m_src->format();
-- qint64 time = fmt.durationForBytes(processed);
-- QAudioBuffer buffer(pcm, fmt, time);
-- emit input->newAudioBuffer(buffer);
-- processed += bufferSize;
-- pcm.clear();
-- }
--
-- QMutex mutex;
-- QAudioDevice m_device;
-- float m_volume = 1.;
-- bool m_muted = false;
-- bool m_running = false;
--
-- QFFmpegAudioInput *input = nullptr;
-- QAudioSource *m_src = nullptr;
-- QAudioFormat m_format;
-- int frameSize = 0;
-- int bufferSize = 0;
-- qint64 processed = 0;
-- QByteArray pcm;
--};
--
--}
--
--QFFmpegAudioInput::QFFmpegAudioInput(QAudioInput *qq)
-- : QPlatformAudioInput(qq)
--{
-- qRegisterMetaType<QAudioBuffer>();
--
-- inputThread = new QThread;
-- audioIO = new QFFmpeg::AudioSourceIO(this);
-- audioIO->moveToThread(inputThread);
-- inputThread->start();
--}
--
--QFFmpegAudioInput::~QFFmpegAudioInput()
--{
-- inputThread->exit();
-- inputThread->wait();
-- delete inputThread;
--}
--
--void QFFmpegAudioInput::setAudioDevice(const QAudioDevice &device)
--{
-- audioIO->setDevice(device);
--}
--
--void QFFmpegAudioInput::setMuted(bool muted)
--{
-- audioIO->setMuted(muted);
--}
--
--void QFFmpegAudioInput::setVolume(float volume)
--{
-- audioIO->setVolume(volume);
--}
--
--void QFFmpegAudioInput::setFrameSize(int s)
--{
-- audioIO->setFrameSize(s);
--}
--
--void QFFmpegAudioInput::setRunning(bool b)
--{
-- audioIO->setRunning(b);
--}
--
--QT_END_NAMESPACE
--
--#include "qffmpegaudioinput.moc"
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h b/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h
-deleted file mode 100644
-index f81549748..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h
-+++ /dev/null
-@@ -1,54 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEGAUDIOINPUT_H
--#define QFFMPEGAUDIOINPUT_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include <private/qplatformaudioinput_p.h>
--#include "qffmpegthread_p.h"
--#include <qaudioinput.h>
--
--QT_BEGIN_NAMESPACE
--
--class QAudioSource;
--class QAudioBuffer;
--namespace QFFmpeg {
--class AudioSourceIO;
--}
--
--class QFFmpegAudioInput : public QObject, public QPlatformAudioInput
--{
-- Q_OBJECT
--public:
-- QFFmpegAudioInput(QAudioInput *qq);
-- ~QFFmpegAudioInput();
--
-- void setAudioDevice(const QAudioDevice &/*device*/) override;
-- void setMuted(bool /*muted*/) override;
-- void setVolume(float /*volume*/) override;
--
-- void setFrameSize(int s);
-- void setRunning(bool b);
--
--Q_SIGNALS:
-- void newAudioBuffer(const QAudioBuffer &buffer);
--
--private:
-- QThread *inputThread = nullptr;
-- QFFmpeg::AudioSourceIO *audioIO = nullptr;
--};
--
--QT_END_NAMESPACE
--
--
--#endif // QPLATFORMAUDIOINPUT_H
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegclock.cpp b/src/plugins/multimedia/ffmpeg/qffmpegclock.cpp
-deleted file mode 100644
-index a3ca04d93..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegclock.cpp
-+++ /dev/null
-@@ -1,193 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#include <qffmpegclock_p.h>
--#include <qloggingcategory.h>
--
--Q_LOGGING_CATEGORY(qLcClock, "qt.multimedia.ffmpeg.clock")
--
--QT_BEGIN_NAMESPACE
--
--static bool compareClocks(const QFFmpeg::Clock *a, const QFFmpeg::Clock *b)
--{
-- if (!b)
-- return false;
--
-- if (!a)
-- return true;
--
-- return a->type() < b->type();
--}
--
--QFFmpeg::Clock::Clock(ClockController *controller)
-- : controller(controller)
--{
-- Q_ASSERT(controller);
-- controller->addClock(this);
--}
--
--QFFmpeg::Clock::~Clock()
--{
-- if (controller)
-- controller->removeClock(this);
--}
--
--qint64 QFFmpeg::Clock::currentTime() const
--{
-- return controller ? controller->currentTime() : 0;
--}
--
--void QFFmpeg::Clock::syncTo(qint64 time)
--{
-- qCDebug(qLcClock) << "syncTo" << time << isMaster();
--}
--
--void QFFmpeg::Clock::setPlaybackRate(float rate, qint64 currentTime)
--{
-- qCDebug(qLcClock) << "Clock::setPlaybackRate" << rate;
-- Q_UNUSED(rate)
-- Q_UNUSED(currentTime)
--}
--
--void QFFmpeg::Clock::setPaused(bool paused)
--{
-- qCDebug(qLcClock) << "Clock::setPaused" << paused;
-- Q_UNUSED(paused)
--}
--
--qint64 QFFmpeg::Clock::timeUpdated(qint64 currentTime)
--{
-- if (controller)
-- return controller->timeUpdated(this, currentTime);
-- return currentTime;
--}
--
--qint64 QFFmpeg::Clock::usecsTo(qint64 currentTime, qint64 displayTime)
--{
-- if (!controller || controller->m_isPaused)
-- return -1;
-- const qint64 t = qRound64((displayTime - currentTime) / playbackRate());
-- return t < 0 ? 0 : t;
--}
--
--QFFmpeg::Clock::Type QFFmpeg::Clock::type() const
--{
-- return SystemClock;
--}
--
--QFFmpeg::ClockController::~ClockController()
--{
-- for (auto *p : qAsConst(m_clocks))
-- p->setController(nullptr);
--}
--
--qint64 QFFmpeg::ClockController::timeUpdated(Clock *clock, qint64 time)
--{
-- QMutexLocker l(&m_mutex);
-- if (!isMaster(clock)) {
-- // If the clock isn't the master clock, simply return the current time
-- // so we can make adjustments as needed
-- return currentTimeNoLock();
-- }
--
-- // if the clock is the master, adjust our base timing
-- m_baseTime = time;
-- m_elapsedTimer.restart();
--
-- return time;
--}
--
--void QFFmpeg::ClockController::addClock(Clock *clock)
--{
-- qCDebug(qLcClock) << "addClock" << clock;
-- Q_ASSERT(clock != nullptr);
--
-- if (m_clocks.contains(clock))
-- return;
--
-- m_clocks.append(clock);
-- m_master = std::max(m_master.loadAcquire(), clock, compareClocks);
--
-- clock->syncTo(currentTime());
-- clock->setPaused(m_isPaused);
--}
--
--void QFFmpeg::ClockController::removeClock(Clock *clock)
--{
-- qCDebug(qLcClock) << "removeClock" << clock;
-- m_clocks.removeAll(clock);
-- if (m_master == clock) {
-- // find a new master clock
-- m_master = m_clocks.empty()
-- ? nullptr
-- : *std::max_element(m_clocks.begin(), m_clocks.end(), compareClocks);
-- }
--}
--
--bool QFFmpeg::ClockController::isMaster(const Clock *clock) const
--{
-- return m_master.loadAcquire() == clock;
--}
--
--qint64 QFFmpeg::ClockController::currentTimeNoLock() const
--{
-- return m_isPaused ? m_baseTime : m_baseTime + m_elapsedTimer.elapsed() / m_playbackRate;
--}
--
--qint64 QFFmpeg::ClockController::currentTime() const
--{
-- QMutexLocker l(&m_mutex);
-- return currentTimeNoLock();
--}
--
--void QFFmpeg::ClockController::syncTo(qint64 usecs)
--{
-- {
-- QMutexLocker l(&m_mutex);
-- qCDebug(qLcClock) << "syncTo" << usecs;
-- m_baseTime = usecs;
-- m_seekTime = usecs;
-- m_elapsedTimer.restart();
-- }
--
-- for (auto *p : qAsConst(m_clocks))
-- p->syncTo(usecs);
--}
--
--void QFFmpeg::ClockController::setPlaybackRate(float rate)
--{
-- qint64 baseTime = 0;
-- {
-- qCDebug(qLcClock) << "setPlaybackRate" << rate;
--
-- QMutexLocker l(&m_mutex);
--
-- m_baseTime = baseTime = currentTimeNoLock();
-- m_elapsedTimer.restart();
-- m_playbackRate = rate;
-- }
--
-- for (auto *p : qAsConst(m_clocks))
-- p->setPlaybackRate(rate, baseTime);
--}
--
--void QFFmpeg::ClockController::setPaused(bool paused)
--{
-- {
-- QMutexLocker l(&m_mutex);
-- if (m_isPaused == paused)
-- return;
-- qCDebug(qLcClock) << "setPaused" << paused;
-- m_isPaused = paused;
-- if (m_isPaused) {
-- m_baseTime = currentTimeNoLock();
-- m_seekTime = m_baseTime;
-- } else {
-- m_elapsedTimer.restart();
-- }
-- }
--
-- for (auto *p : qAsConst(m_clocks))
-- p->setPaused(paused);
--}
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegclock_p.h b/src/plugins/multimedia/ffmpeg/qffmpegclock_p.h
-deleted file mode 100644
-index f8cc0bdf3..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegclock_p.h
-+++ /dev/null
-@@ -1,113 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEGCLOCK_P_H
--#define QFFMPEGCLOCK_P_H
--
--#include "qffmpeg_p.h"
--
--#include <qatomic.h>
--#include <qelapsedtimer.h>
--#include <qlist.h>
--#include <qmutex.h>
--#include <qmetaobject.h>
--
--QT_BEGIN_NAMESPACE
--
--namespace QFFmpeg {
--
--class ClockController;
--
--// Clock runs in displayTime, ie. if playbackRate is not 1, it runs faster or slower
--// than a regular clock. All methods take displayTime
--// Exception: usecsTo() will return the real time that should pass until we will
--// hit the requested display time
--class Clock
--{
-- ClockController *controller = nullptr;
--public:
-- enum Type {
-- SystemClock,
-- AudioClock
-- };
-- Clock(ClockController *controller);
-- virtual ~Clock();
-- virtual Type type() const;
--
-- float playbackRate() const;
-- bool isMaster() const;
--
-- // all times in usecs
-- qint64 currentTime() const;
-- qint64 seekTime() const;
-- qint64 usecsTo(qint64 currentTime, qint64 displayTime);
--
--protected:
-- virtual void syncTo(qint64 usecs);
-- virtual void setPlaybackRate(float rate, qint64 currentTime);
-- virtual void setPaused(bool paused);
--
-- qint64 timeUpdated(qint64 currentTime);
--
--private:
-- friend class ClockController;
-- void setController(ClockController *c)
-- {
-- controller = c;
-- }
--};
--
--class ClockController
--{
-- mutable QMutex m_mutex;
-- QList<Clock *> m_clocks;
-- QAtomicPointer<Clock> m_master = nullptr;
--
-- QElapsedTimer m_elapsedTimer;
-- qint64 m_baseTime = 0;
-- qint64 m_seekTime = 0;
-- float m_playbackRate = 1.;
-- bool m_isPaused = true;
--
-- qint64 currentTimeNoLock() const;
--
-- friend class Clock;
-- qint64 timeUpdated(Clock *clock, qint64 time);
-- void addClock(Clock *provider);
-- void removeClock(Clock *provider);
-- bool isMaster(const Clock *clock) const;
--
--public:
-- ClockController() = default;
-- ~ClockController();
--
--
-- qint64 currentTime() const;
--
-- void syncTo(qint64 usecs);
--
-- void setPlaybackRate(float s);
-- float playbackRate() const { return m_playbackRate; }
-- void setPaused(bool paused);
--};
--
--inline float Clock::playbackRate() const
--{
-- return controller ? controller->m_playbackRate : 1.;
--}
--
--inline bool Clock::isMaster() const
--{
-- return controller && controller->isMaster(this);
--}
--
--inline qint64 Clock::seekTime() const
--{
-- return controller ? controller->m_seekTime : 0;
--}
--
--
--}
--
--QT_END_NAMESPACE
--
--#endif
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegdecoder.cpp b/src/plugins/multimedia/ffmpeg/qffmpegdecoder.cpp
-deleted file mode 100644
-index 89a95f5a3..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegdecoder.cpp
-+++ /dev/null
-@@ -1,1272 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#include "qffmpegdecoder_p.h"
--#include "qffmpegmediaformatinfo_p.h"
--#include "qffmpeg_p.h"
--#include "qffmpegmediametadata_p.h"
--#include "qffmpegvideobuffer_p.h"
--#include "private/qplatformaudiooutput_p.h"
--#include "qffmpeghwaccel_p.h"
--#include "qffmpegvideosink_p.h"
--#include "qvideosink.h"
--#include "qaudiosink.h"
--#include "qaudiooutput.h"
--#include "qffmpegaudiodecoder_p.h"
--#include "qffmpegresampler_p.h"
--
--#include <qlocale.h>
--#include <qtimer.h>
--
--#include <qloggingcategory.h>
--
--extern "C" {
--#include <libavutil/hwcontext.h>
--}
--
--QT_BEGIN_NAMESPACE
--
--using namespace QFFmpeg;
--
--Q_LOGGING_CATEGORY(qLcDemuxer, "qt.multimedia.ffmpeg.demuxer")
--Q_LOGGING_CATEGORY(qLcDecoder, "qt.multimedia.ffmpeg.decoder")
--Q_LOGGING_CATEGORY(qLcVideoRenderer, "qt.multimedia.ffmpeg.videoRenderer")
--Q_LOGGING_CATEGORY(qLcAudioRenderer, "qt.multimedia.ffmpeg.audioRenderer")
--
--Codec::Data::Data(UniqueAVCodecContext &&context, AVStream *stream, std::unique_ptr<QFFmpeg::HWAccel> &&hwAccel)
-- : context(std::move(context))
-- , stream(stream)
-- , hwAccel(std::move(hwAccel))
--{
--}
--
--Codec::Data::~Data()
--{
-- avcodec_close(context.get());
--}
--
--QMaybe<Codec> Codec::create(AVStream *stream)
--{
-- if (!stream)
-- return { "Invalid stream" };
--
-- const AVCodec *decoder =
-- QFFmpeg::HWAccel::hardwareDecoderForCodecId(stream->codecpar->codec_id);
-- if (!decoder)
-- return { "Failed to find a valid FFmpeg decoder" };
--
-- //avcodec_free_context
-- UniqueAVCodecContext context(avcodec_alloc_context3(decoder));
-- if (!context)
-- return { "Failed to allocate a FFmpeg codec context" };
--
-- if (context->codec_type != AVMEDIA_TYPE_AUDIO &&
-- context->codec_type != AVMEDIA_TYPE_VIDEO &&
-- context->codec_type != AVMEDIA_TYPE_SUBTITLE) {
-- return { "Unknown codec type" };
-- }
--
-- int ret = avcodec_parameters_to_context(context.get(), stream->codecpar);
-- if (ret < 0)
-- return { "Failed to set FFmpeg codec parameters" };
--
-- std::unique_ptr<QFFmpeg::HWAccel> hwAccel;
-- if (decoder->type == AVMEDIA_TYPE_VIDEO) {
-- hwAccel = QFFmpeg::HWAccel::create(decoder);
-- if (hwAccel)
-- context->hw_device_ctx = av_buffer_ref(hwAccel->hwDeviceContextAsBuffer());
-- }
-- // ### This still gives errors about wrong HW formats (as we accept all of them)
-- // But it would be good to get so we can filter out pixel format we don't support natively
-- context->get_format = QFFmpeg::getFormat;
--
-- /* Init the decoder, with reference counting and threading */
-- AVDictionary *opts = nullptr;
-- av_dict_set(&opts, "refcounted_frames", "1", 0);
-- av_dict_set(&opts, "threads", "auto", 0);
-- ret = avcodec_open2(context.get(), decoder, &opts);
-- if (ret < 0)
-- return "Failed to open FFmpeg codec context " + err2str(ret);
--
-- return Codec(new Data(std::move(context), stream, std::move(hwAccel)));
--}
--
--
--Demuxer::Demuxer(Decoder *decoder, AVFormatContext *context)
-- : Thread()
-- , decoder(decoder)
-- , context(context)
--{
-- QString objectName = QLatin1String("Demuxer");
-- setObjectName(objectName);
--
-- streamDecoders.resize(context->nb_streams);
--}
--
--Demuxer::~Demuxer()
--{
-- if (context) {
-- if (context->pb) {
-- avio_context_free(&context->pb);
-- context->pb = nullptr;
-- }
-- avformat_free_context(context);
-- }
--}
--
--StreamDecoder *Demuxer::addStream(int streamIndex)
--{
-- if (streamIndex < 0 || streamIndex >= (int)context->nb_streams)
-- return nullptr;
--
-- AVStream *avStream = context->streams[streamIndex];
-- if (!avStream)
-- return nullptr;
--
-- QMutexLocker locker(&mutex);
-- auto maybeCodec = Codec::create(avStream);
-- if (!maybeCodec) {
-- decoder->errorOccured(QMediaPlayer::FormatError, "Cannot open codec; " + maybeCodec.error());
-- return nullptr;
-- }
-- auto *stream = new StreamDecoder(this, maybeCodec.value());
-- Q_ASSERT(!streamDecoders.at(streamIndex));
-- streamDecoders[streamIndex] = stream;
-- stream->start();
-- updateEnabledStreams();
-- return stream;
--}
--
--void Demuxer::removeStream(int streamIndex)
--{
-- if (streamIndex < 0)
-- return;
-- QMutexLocker locker(&mutex);
-- Q_ASSERT(streamIndex < (int)context->nb_streams);
-- Q_ASSERT(streamDecoders.at(streamIndex) != nullptr);
-- streamDecoders[streamIndex] = nullptr;
-- updateEnabledStreams();
--}
--
--void Demuxer::stopDecoding()
--{
-- qCDebug(qLcDemuxer) << "StopDecoding";
-- QMutexLocker locker(&mutex);
-- sendFinalPacketToStreams();
--}
--int Demuxer::seek(qint64 pos)
--{
-- QMutexLocker locker(&mutex);
-- for (StreamDecoder *d : qAsConst(streamDecoders)) {
-- if (d)
-- d->mutex.lock();
-- }
-- for (StreamDecoder *d : qAsConst(streamDecoders)) {
-- if (d)
-- d->flush();
-- }
-- for (StreamDecoder *d : qAsConst(streamDecoders)) {
-- if (d)
-- d->mutex.unlock();
-- }
-- qint64 seekPos = pos*AV_TIME_BASE/1000000; // usecs to AV_TIME_BASE
-- av_seek_frame(context, -1, seekPos, AVSEEK_FLAG_BACKWARD);
-- last_pts = -1;
-- loop();
-- qCDebug(qLcDemuxer) << "Demuxer::seek" << pos << last_pts;
-- return last_pts;
--}
--
--void Demuxer::updateEnabledStreams()
--{
-- if (isStopped())
-- return;
-- for (uint i = 0; i < context->nb_streams; ++i) {
-- AVDiscard discard = AVDISCARD_DEFAULT;
-- if (!streamDecoders.at(i))
-- discard = AVDISCARD_ALL;
-- context->streams[i]->discard = discard;
-- }
--}
--
--void Demuxer::sendFinalPacketToStreams()
--{
-- if (m_isStopped.loadAcquire())
-- return;
-- for (auto *streamDecoder : qAsConst(streamDecoders)) {
-- qCDebug(qLcDemuxer) << "Demuxer: sending last packet to stream" << streamDecoder;
-- if (!streamDecoder)
-- continue;
-- streamDecoder->addPacket(nullptr);
-- }
-- m_isStopped.storeRelease(true);
--}
--
--void Demuxer::init()
--{
-- qCDebug(qLcDemuxer) << "Demuxer started";
--}
--
--void Demuxer::cleanup()
--{
-- qCDebug(qLcDemuxer) << "Demuxer::cleanup";
--#ifndef QT_NO_DEBUG
-- for (auto *streamDecoder : qAsConst(streamDecoders)) {
-- Q_ASSERT(!streamDecoder);
-- }
--#endif
-- avformat_close_input(&context);
-- Thread::cleanup();
--}
--
--bool Demuxer::shouldWait() const
--{
-- if (m_isStopped)
-- return true;
--// qCDebug(qLcDemuxer) << "XXXX Demuxer::shouldWait" << this << data->seek_pos.loadRelaxed();
-- // require a minimum of 200ms of data
-- qint64 queueSize = 0;
-- bool buffersFull = true;
-- for (auto *d : streamDecoders) {
-- if (!d)
-- continue;
-- if (d->queuedDuration() < 200)
-- buffersFull = false;
-- queueSize += d->queuedPacketSize();
-- }
--// qCDebug(qLcDemuxer) << " queue size" << queueSize << MaxQueueSize;
-- if (queueSize > MaxQueueSize)
-- return true;
--// qCDebug(qLcDemuxer) << " waiting!";
-- return buffersFull;
--
--}
--
--void Demuxer::loop()
--{
-- AVPacket *packet = av_packet_alloc();
-- if (av_read_frame(context, packet) < 0) {
-- sendFinalPacketToStreams();
-- av_packet_free(&packet);
-- return;
-- }
--
-- if (last_pts < 0 && packet->pts != AV_NOPTS_VALUE) {
-- auto *stream = context->streams[packet->stream_index];
-- auto pts = timeStampMs(packet->pts, stream->time_base);
-- if (pts)
-- last_pts = *pts;
-- }
--
-- auto *streamDecoder = streamDecoders.at(packet->stream_index);
-- if (!streamDecoder) {
-- av_packet_free(&packet);
-- return;
-- }
-- streamDecoder->addPacket(packet);
--}
--
--
--StreamDecoder::StreamDecoder(Demuxer *demuxer, const Codec &codec)
-- : Thread()
-- , demuxer(demuxer)
-- , codec(codec)
--{
-- QString objectName;
-- switch (codec.context()->codec_type) {
-- case AVMEDIA_TYPE_AUDIO:
-- objectName = QLatin1String("AudioDecoderThread");
-- // Queue size: 3 frames for video/subtitle, 9 for audio
-- frameQueue.maxSize = 9;
-- break;
-- case AVMEDIA_TYPE_VIDEO:
-- objectName = QLatin1String("VideoDecoderThread");
-- break;
-- case AVMEDIA_TYPE_SUBTITLE:
-- objectName = QLatin1String("SubtitleDecoderThread");
-- break;
-- default:
-- Q_UNREACHABLE();
-- }
-- setObjectName(objectName);
--}
--
--void StreamDecoder::addPacket(AVPacket *packet)
--{
-- {
-- QMutexLocker locker(&packetQueue.mutex);
--// qCDebug(qLcDecoder) << "enqueuing packet of type" << type()
--// << "size" << packet->size
--// << "stream index" << packet->stream_index
--// << "pts" << codec.toMs(packet->pts)
--// << "duration" << codec.toMs(packet->duration);
-- packetQueue.queue.enqueue(Packet(packet));
-- if (packet) {
-- packetQueue.size += packet->size;
-- packetQueue.duration += codec.toMs(packet->duration);
-- }
-- eos.storeRelease(false);
-- }
-- wake();
--}
--
--void StreamDecoder::flush()
--{
-- qCDebug(qLcDecoder) << ">>>> flushing stream decoder" << type();
-- avcodec_flush_buffers(codec.context());
-- {
-- QMutexLocker locker(&packetQueue.mutex);
-- packetQueue.queue.clear();
-- packetQueue.size = 0;
-- packetQueue.duration = 0;
-- }
-- {
-- QMutexLocker locker(&frameQueue.mutex);
-- frameQueue.queue.clear();
-- }
-- qCDebug(qLcDecoder) << ">>>> done flushing stream decoder" << type();
--}
--
--void StreamDecoder::setRenderer(Renderer *r)
--{
-- QMutexLocker locker(&mutex);
-- m_renderer = r;
-- if (m_renderer)
-- m_renderer->wake();
--}
--
--void StreamDecoder::killHelper()
--{
-- m_renderer = nullptr;
-- demuxer->removeStream(codec.streamIndex());
--}
--
--Packet StreamDecoder::peekPacket()
--{
-- QMutexLocker locker(&packetQueue.mutex);
-- if (packetQueue.queue.isEmpty()) {
-- if (demuxer)
-- demuxer->wake();
-- return {};
-- }
-- auto packet = packetQueue.queue.first();
--
-- if (demuxer)
-- demuxer->wake();
-- return packet;
--}
--
--Packet StreamDecoder::takePacket()
--{
-- QMutexLocker locker(&packetQueue.mutex);
-- if (packetQueue.queue.isEmpty()) {
-- if (demuxer)
-- demuxer->wake();
-- return {};
-- }
-- auto packet = packetQueue.queue.dequeue();
-- if (packet.avPacket()) {
-- packetQueue.size -= packet.avPacket()->size;
-- packetQueue.duration -= codec.toMs(packet.avPacket()->duration);
-- }
--// qCDebug(qLcDecoder) << "<<<< dequeuing packet of type" << type()
--// << "size" << packet.avPacket()->size
--// << "stream index" << packet.avPacket()->stream_index
--// << "pts" << codec.toMs(packet.avPacket()->pts)
--// << "duration" << codec.toMs(packet.avPacket()->duration)
--// << "ts" << decoder->clockController.currentTime();
-- if (demuxer)
-- demuxer->wake();
-- return packet;
--}
--
--void StreamDecoder::addFrame(const Frame &f)
--{
-- Q_ASSERT(f.isValid());
-- QMutexLocker locker(&frameQueue.mutex);
-- frameQueue.queue.append(std::move(f));
-- if (m_renderer)
-- m_renderer->wake();
--}
--
--Frame StreamDecoder::takeFrame()
--{
-- QMutexLocker locker(&frameQueue.mutex);
-- // wake up the decoder so it delivers more frames
-- if (frameQueue.queue.isEmpty()) {
-- wake();
-- return {};
-- }
-- auto f = frameQueue.queue.dequeue();
-- wake();
-- return f;
--}
--
--void StreamDecoder::init()
--{
-- qCDebug(qLcDecoder) << "Starting decoder";
--}
--
--bool StreamDecoder::shouldWait() const
--{
-- if (eos.loadAcquire() || (hasNoPackets() && decoderHasNoFrames) || hasEnoughFrames())
-- return true;
-- return false;
--}
--
--void StreamDecoder::loop()
--{
-- if (codec.context()->codec->type == AVMEDIA_TYPE_SUBTITLE)
-- decodeSubtitle();
-- else
-- decode();
--}
--
--void StreamDecoder::decode()
--{
-- Q_ASSERT(codec.context());
--
-- AVFrame *frame = av_frame_alloc();
--// if (type() == 0)
--// qDebug() << "receiving frame";
-- int res = avcodec_receive_frame(codec.context(), frame);
--
-- if (res >= 0) {
-- qint64 pts;
-- if (frame->pts != AV_NOPTS_VALUE)
-- pts = codec.toUs(frame->pts);
-- else
-- pts = codec.toUs(frame->best_effort_timestamp);
-- addFrame(Frame{frame, codec, pts});
-- } else if (res == AVERROR(EOF) || res == AVERROR_EOF) {
-- eos.storeRelease(true);
-- av_frame_free(&frame);
-- timeOut = -1;
-- return;
-- } else if (res != AVERROR(EAGAIN)) {
-- qWarning() << "error in decoder" << res << err2str(res);
-- av_frame_free(&frame);
-- return;
-- } else {
-- // EAGAIN
-- decoderHasNoFrames = true;
-- av_frame_free(&frame);
-- }
--
-- Packet packet = peekPacket();
-- if (!packet.isValid()) {
-- timeOut = -1;
-- return;
-- }
--
-- res = avcodec_send_packet(codec.context(), packet.avPacket());
-- if (res != AVERROR(EAGAIN)) {
-- takePacket();
-- }
-- decoderHasNoFrames = false;
--}
--
--void StreamDecoder::decodeSubtitle()
--{
-- // qCDebug(qLcDecoder) << " decoding subtitle" << "has delay:" << (codec->codec->capabilities & AV_CODEC_CAP_DELAY);
-- AVSubtitle subtitle;
-- memset(&subtitle, 0, sizeof(subtitle));
-- int gotSubtitle = 0;
-- Packet packet = takePacket();
-- if (!packet.isValid())
-- return;
--
-- int res = avcodec_decode_subtitle2(codec.context(), &subtitle, &gotSubtitle, packet.avPacket());
-- // qCDebug(qLcDecoder) << " subtitle got:" << res << gotSubtitle << subtitle.format << Qt::hex << (quint64)subtitle.pts;
-- if (res >= 0 && gotSubtitle) {
-- // apparently the timestamps in the AVSubtitle structure are not always filled in
-- // if they are missing, use the packets pts and duration values instead
-- qint64 start, end;
-- if (subtitle.pts == AV_NOPTS_VALUE) {
-- start = codec.toUs(packet.avPacket()->pts);
-- end = start + codec.toUs(packet.avPacket()->duration);
-- } else {
-- auto pts = timeStampUs(subtitle.pts, AVRational{1, AV_TIME_BASE});
-- start = *pts + qint64(subtitle.start_display_time)*1000;
-- end = *pts + qint64(subtitle.end_display_time)*1000;
-- }
-- // qCDebug(qLcDecoder) << " got subtitle (" << start << "--" << end << "):";
-- QString text;
-- for (uint i = 0; i < subtitle.num_rects; ++i) {
-- const auto *r = subtitle.rects[i];
-- // qCDebug(qLcDecoder) << " subtitletext:" << r->text << "/" << r->ass;
-- if (i)
-- text += QLatin1Char('\n');
-- if (r->text)
-- text += QString::fromUtf8(r->text);
-- else {
-- const char *ass = r->ass;
-- int nCommas = 0;
-- while (*ass) {
-- if (nCommas == 9)
-- break;
-- if (*ass == ',')
-- ++nCommas;
-- ++ass;
-- }
-- text += QString::fromUtf8(ass);
-- }
-- }
-- text.replace(QLatin1String("\\N"), QLatin1String("\n"));
-- text.replace(QLatin1String("\\n"), QLatin1String("\n"));
-- text.replace(QLatin1String("\r\n"), QLatin1String("\n"));
-- if (text.endsWith(QLatin1Char('\n')))
-- text.chop(1);
--
--// qCDebug(qLcDecoder) << " >>> subtitle adding" << text << start << end;
-- Frame sub{text, start, end - start};
-- addFrame(sub);
-- }
--}
--
--QPlatformMediaPlayer::TrackType StreamDecoder::type() const
--{
-- switch (codec.stream()->codecpar->codec_type) {
-- case AVMEDIA_TYPE_AUDIO:
-- return QPlatformMediaPlayer::AudioStream;
-- case AVMEDIA_TYPE_VIDEO:
-- return QPlatformMediaPlayer::VideoStream;
-- case AVMEDIA_TYPE_SUBTITLE:
-- return QPlatformMediaPlayer::SubtitleStream;
-- default:
-- return QPlatformMediaPlayer::NTrackTypes;
-- }
--}
--
--Renderer::Renderer(QPlatformMediaPlayer::TrackType type)
-- : Thread()
-- , type(type)
--{
-- QString objectName;
-- if (type == QPlatformMediaPlayer::AudioStream)
-- objectName = QLatin1String("AudioRenderThread");
-- else
-- objectName = QLatin1String("VideoRenderThread");
-- setObjectName(objectName);
--}
--
--void Renderer::setStream(StreamDecoder *stream)
--{
-- QMutexLocker locker(&mutex);
-- if (streamDecoder == stream)
-- return;
-- if (streamDecoder)
-- streamDecoder->kill();
-- streamDecoder = stream;
-- if (streamDecoder)
-- streamDecoder->setRenderer(this);
-- streamChanged();
-- wake();
--}
--
--void Renderer::killHelper()
--{
-- if (streamDecoder)
-- streamDecoder->kill();
-- streamDecoder = nullptr;
--}
--
--bool Renderer::shouldWait() const
--{
-- if (!streamDecoder)
-- return true;
-- if (!paused)
-- return false;
-- if (step)
-- return false;
-- return true;
--}
--
--
--void ClockedRenderer::setPaused(bool paused)
--{
-- Clock::setPaused(paused);
-- Renderer::setPaused(paused);
--}
--
--VideoRenderer::VideoRenderer(Decoder *decoder, QVideoSink *sink)
-- : ClockedRenderer(decoder, QPlatformMediaPlayer::VideoStream)
-- , sink(sink)
--{}
--
--void VideoRenderer::killHelper()
--{
-- if (subtitleStreamDecoder)
-- subtitleStreamDecoder->kill();
-- subtitleStreamDecoder = nullptr;
-- if (streamDecoder)
-- streamDecoder->kill();
-- streamDecoder = nullptr;
--}
--
--void VideoRenderer::setSubtitleStream(StreamDecoder *stream)
--{
-- QMutexLocker locker(&mutex);
-- qCDebug(qLcVideoRenderer) << "setting subtitle stream to" << stream;
-- if (stream == subtitleStreamDecoder)
-- return;
-- if (subtitleStreamDecoder)
-- subtitleStreamDecoder->kill();
-- subtitleStreamDecoder = stream;
-- if (subtitleStreamDecoder)
-- subtitleStreamDecoder->setRenderer(this);
-- sink->setSubtitleText({});
-- wake();
--}
--
--void VideoRenderer::init()
--{
-- qCDebug(qLcVideoRenderer) << "starting video renderer";
-- ClockedRenderer::init();
--}
--
--void VideoRenderer::loop()
--{
-- if (!streamDecoder) {
-- timeOut = -1; // Avoid 100% CPU load before play()
-- return;
-- }
--
-- Frame frame = streamDecoder->takeFrame();
-- if (!frame.isValid()) {
-- if (streamDecoder->isAtEnd()) {
-- timeOut = -1;
-- eos.storeRelease(true);
-- mutex.unlock();
-- emit atEnd();
-- mutex.lock();
-- return;
-- }
-- timeOut = 1;
--// qDebug() << "no valid frame" << timer.elapsed();
-- return;
-- }
-- eos.storeRelease(false);
--// qCDebug(qLcVideoRenderer) << "received video frame" << frame.pts();
-- if (frame.pts() < seekTime()) {
-- qCDebug(qLcVideoRenderer) << " discarding" << frame.pts() << seekTime();
-- return;
-- }
--
-- AVStream *stream = frame.codec()->stream();
-- qint64 startTime = frame.pts();
-- qint64 duration = (1000000*stream->avg_frame_rate.den + (stream->avg_frame_rate.num>>1))
-- /stream->avg_frame_rate.num;
--
-- if (sink) {
-- qint64 startTime = frame.pts();
--// qDebug() << "RHI:" << accel.isNull() << accel.rhi() << sink->rhi();
--
-- // in practice this only happens with mediacodec
-- if (frame.codec()->hwAccel() && !frame.avFrame()->hw_frames_ctx) {
-- HWAccel *hwaccel = frame.codec()->hwAccel();
-- AVFrame *avframe = frame.avFrame();
-- if (!hwaccel->hwFramesContext())
-- hwaccel->createFramesContext(AVPixelFormat(avframe->format),
-- { avframe->width, avframe->height });
--
-- avframe->hw_frames_ctx = av_buffer_ref(hwaccel->hwFramesContextAsBuffer());
-- }
--
-- QFFmpegVideoBuffer *buffer = new QFFmpegVideoBuffer(frame.takeAVFrame());
-- QVideoFrameFormat format(buffer->size(), buffer->pixelFormat());
-- format.setColorSpace(buffer->colorSpace());
-- format.setColorTransfer(buffer->colorTransfer());
-- format.setColorRange(buffer->colorRange());
-- format.setMaxLuminance(buffer->maxNits());
-- QVideoFrame videoFrame(buffer, format);
-- videoFrame.setStartTime(startTime);
-- videoFrame.setEndTime(startTime + duration);
--// qDebug() << "Creating video frame" << startTime << (startTime + duration) << subtitleStreamDecoder;
--
-- // add in subtitles
-- const Frame *currentSubtitle = nullptr;
-- if (subtitleStreamDecoder)
-- currentSubtitle = subtitleStreamDecoder->lockAndPeekFrame();
--
-- if (currentSubtitle && currentSubtitle->isValid()) {
--// qDebug() << "frame: subtitle" << currentSubtitle->text() << currentSubtitle->pts() << currentSubtitle->duration();
-- qCDebug(qLcVideoRenderer) << " " << currentSubtitle->pts() << currentSubtitle->duration() << currentSubtitle->text();
-- if (currentSubtitle->pts() <= startTime && currentSubtitle->end() > startTime) {
--// qCDebug(qLcVideoRenderer) << " setting text";
-- sink->setSubtitleText(currentSubtitle->text());
-- }
-- if (currentSubtitle->end() < startTime) {
--// qCDebug(qLcVideoRenderer) << " removing subtitle item";
-- sink->setSubtitleText({});
-- subtitleStreamDecoder->removePeekedFrame();
-- }
-- } else {
-- sink->setSubtitleText({});
-- }
-- if (subtitleStreamDecoder)
-- subtitleStreamDecoder->unlockAndReleaseFrame();
--
--// qCDebug(qLcVideoRenderer) << " sending a video frame" << startTime << duration << decoder->baseTimer.elapsed();
-- sink->setVideoFrame(videoFrame);
-- doneStep();
-- }
-- const Frame *nextFrame = streamDecoder->lockAndPeekFrame();
-- qint64 nextFrameTime = 0;
-- if (nextFrame)
-- nextFrameTime = nextFrame->pts();
-- else
-- nextFrameTime = startTime + duration;
-- streamDecoder->unlockAndReleaseFrame();
-- qint64 mtime = timeUpdated(startTime);
-- timeOut = usecsTo(mtime, nextFrameTime) / 1000;
-- // qCDebug(qLcVideoRenderer) << " next video frame in" << startTime << nextFrameTime <<
-- // currentTime() << timeOut;
--}
--
--AudioRenderer::AudioRenderer(Decoder *decoder, QAudioOutput *output)
-- : ClockedRenderer(decoder, QPlatformMediaPlayer::AudioStream)
-- , output(output)
--{
-- connect(output, &QAudioOutput::deviceChanged, this, &AudioRenderer::updateAudio);
-- connect(output, &QAudioOutput::volumeChanged, this, &AudioRenderer::setSoundVolume);
--}
--
--void AudioRenderer::syncTo(qint64 usecs)
--{
-- QMutexLocker locker(&mutex);
--
-- Clock::syncTo(usecs);
-- audioBaseTime = usecs;
-- processedBase = processedUSecs;
--}
--
--void AudioRenderer::setPlaybackRate(float rate, qint64 currentTime)
--{
-- QMutexLocker locker(&mutex);
--
-- audioBaseTime = currentTime;
-- processedBase = processedUSecs;
-- Clock::setPlaybackRate(rate, currentTime);
-- deviceChanged = true;
--}
--
--void AudioRenderer::updateOutput(const Codec *codec)
--{
-- qCDebug(qLcAudioRenderer) << ">>>>>> updateOutput" << currentTime() << seekTime() << processedUSecs << isMaster();
-- freeOutput();
-- qCDebug(qLcAudioRenderer) << " " << currentTime() << seekTime() << processedUSecs;
--
-- AVStream *audioStream = codec->stream();
--
-- auto dev = output->device();
-- format = QFFmpegMediaFormatInfo::audioFormatFromCodecParameters(audioStream->codecpar);
-- format.setChannelConfig(dev.channelConfiguration());
--
-- initResempler(codec);
--
-- audioSink = new QAudioSink(dev, format);
-- audioSink->setVolume(output->volume());
--
-- audioSink->setBufferSize(format.bytesForDuration(100000));
-- audioDevice = audioSink->start();
--
-- latencyUSecs = format.durationForBytes(audioSink->bufferSize()); // ### ideally get full latency
-- qCDebug(qLcAudioRenderer) << " -> have an audio sink" << audioDevice;
--}
--
--void AudioRenderer::initResempler(const Codec *codec)
--{
-- // init resampler. It's ok to always do this, as the resampler will be a no-op if
-- // formats agree.
-- AVSampleFormat requiredFormat = QFFmpegMediaFormatInfo::avSampleFormat(format.sampleFormat());
--
--#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
-- qCDebug(qLcAudioRenderer) << "init resampler" << requiredFormat
-- << codec->stream()->codecpar->channels;
--#else
-- qCDebug(qLcAudioRenderer) << "init resampler" << requiredFormat
-- << codec->stream()->codecpar->ch_layout.nb_channels;
--#endif
--
-- auto resamplerFormat = format;
-- resamplerFormat.setSampleRate(qRound(format.sampleRate() / playbackRate()));
-- resampler.reset(new Resampler(codec, resamplerFormat));
--}
--
--void AudioRenderer::freeOutput()
--{
-- if (audioSink) {
-- audioSink->reset();
-- delete audioSink;
-- audioSink = nullptr;
-- audioDevice = nullptr;
-- }
--
-- bufferedData = {};
-- bufferWritten = 0;
--
-- audioBaseTime = currentTime();
-- processedBase = 0;
-- processedUSecs = writtenUSecs = 0;
--}
--
--void AudioRenderer::init()
--{
-- qCDebug(qLcAudioRenderer) << "Starting audio renderer";
-- ClockedRenderer::init();
--}
--
--void AudioRenderer::cleanup()
--{
-- freeOutput();
--}
--
--void AudioRenderer::loop()
--{
-- if (!streamDecoder) {
-- timeOut = -1; // Avoid 100% CPU load before play()
-- return;
-- }
--
-- if (deviceChanged)
-- freeOutput();
-- deviceChanged = false;
-- doneStep();
--
-- qint64 bytesWritten = 0;
-- if (bufferedData.isValid()) {
-- bytesWritten = audioDevice->write(bufferedData.constData<char>() + bufferWritten, bufferedData.byteCount() - bufferWritten);
-- bufferWritten += bytesWritten;
-- if (bufferWritten == bufferedData.byteCount()) {
-- bufferedData = {};
-- bufferWritten = 0;
-- }
-- processedUSecs = audioSink->processedUSecs();
-- } else {
-- Frame frame = streamDecoder->takeFrame();
-- if (!frame.isValid()) {
-- if (streamDecoder->isAtEnd()) {
-- if (audioSink)
-- processedUSecs = audioSink->processedUSecs();
-- timeOut = -1;
-- eos.storeRelease(true);
-- mutex.unlock();
-- emit atEnd();
-- mutex.lock();
-- return;
-- }
-- timeOut = 1;
-- return;
-- }
-- eos.storeRelease(false);
-- if (!audioSink)
-- updateOutput(frame.codec());
--
-- qint64 startTime = frame.pts();
-- if (startTime < seekTime())
-- return;
--
-- if (!paused) {
-- auto buffer = resampler->resample(frame.avFrame());
--
-- if (output->isMuted())
-- // This is somewhat inefficient, but it'll work
-- memset(buffer.data<char>(), 0, buffer.byteCount());
--
-- bytesWritten = audioDevice->write(buffer.constData<char>(), buffer.byteCount());
-- if (bytesWritten < buffer.byteCount()) {
-- bufferedData = buffer;
-- bufferWritten = bytesWritten;
-- }
--
-- processedUSecs = audioSink->processedUSecs();
-- }
-- }
--
-- qint64 duration = format.durationForBytes(bytesWritten);
-- writtenUSecs += duration;
--
-- timeOut = (writtenUSecs - processedUSecs - latencyUSecs)/1000;
-- if (timeOut < 0)
-- // Don't use a zero timeout if the sink didn't want any more data, rather wait for 10ms.
-- timeOut = bytesWritten > 0 ? 0 : 10;
--
--// if (!bufferedData.isEmpty())
--// qDebug() << ">>>>>>>>>>>>>>>>>>>>>>>> could not write all data" << (bufferedData.size() - bufferWritten);
--// qDebug() << "Audio: processed" << processedUSecs << "written" << writtenUSecs
--// << "delta" << (writtenUSecs - processedUSecs) << "timeOut" << timeOut;
--// qCDebug(qLcAudioRenderer) << " updating time to" << currentTimeNoLock();
-- timeUpdated(audioBaseTime + qRound((processedUSecs - processedBase) * playbackRate()));
--}
--
--void AudioRenderer::streamChanged()
--{
-- // mutex is already locked
-- deviceChanged = true;
--}
--
--void AudioRenderer::updateAudio()
--{
-- QMutexLocker locker(&mutex);
-- deviceChanged = true;
--}
--
--void AudioRenderer::setSoundVolume(float volume)
--{
-- QMutexLocker locker(&mutex);
-- if (audioSink)
-- audioSink->setVolume(volume);
--}
--
--Decoder::Decoder()
--{
--}
--
--Decoder::~Decoder()
--{
-- pause();
-- if (videoRenderer)
-- videoRenderer->kill();
-- if (audioRenderer)
-- audioRenderer->kill();
-- if (demuxer)
-- demuxer->kill();
--}
--
--static int read(void *opaque, uint8_t *buf, int buf_size)
--{
-- auto *dev = static_cast<QIODevice *>(opaque);
-- if (dev->atEnd())
-- return AVERROR_EOF;
-- return dev->read(reinterpret_cast<char *>(buf), buf_size);
--}
--
--static int64_t seek(void *opaque, int64_t offset, int whence)
--{
-- QIODevice *dev = static_cast<QIODevice *>(opaque);
--
-- if (dev->isSequential())
-- return AVERROR(EINVAL);
--
-- if (whence & AVSEEK_SIZE)
-- return dev->size();
--
-- whence &= ~AVSEEK_FORCE;
--
-- if (whence == SEEK_CUR)
-- offset += dev->pos();
-- else if (whence == SEEK_END)
-- offset += dev->size();
--
-- if (!dev->seek(offset))
-- return AVERROR(EINVAL);
-- return offset;
--}
--
--static void insertVideoData(QMediaMetaData &metaData, AVStream *stream)
--{
-- Q_ASSERT(stream);
-- auto *codecPar = stream->codecpar;
-- metaData.insert(QMediaMetaData::VideoBitRate, (int)codecPar->bit_rate);
-- metaData.insert(QMediaMetaData::VideoCodec, QVariant::fromValue(QFFmpegMediaFormatInfo::videoCodecForAVCodecId(codecPar->codec_id)));
-- metaData.insert(QMediaMetaData::Resolution, QSize(codecPar->width, codecPar->height));
-- auto fr = toFloat(stream->avg_frame_rate);
-- if (fr)
-- metaData.insert(QMediaMetaData::VideoFrameRate, *fr);
--};
--
--static void insertAudioData(QMediaMetaData &metaData, AVStream *stream)
--{
-- Q_ASSERT(stream);
-- auto *codecPar = stream->codecpar;
-- metaData.insert(QMediaMetaData::AudioBitRate, (int)codecPar->bit_rate);
-- metaData.insert(QMediaMetaData::AudioCodec,
-- QVariant::fromValue(QFFmpegMediaFormatInfo::audioCodecForAVCodecId(codecPar->codec_id)));
--};
--
--static int getDefaultStreamIndex(QList<Decoder::StreamInfo> &streams)
--{
-- if (streams.empty())
-- return -1;
-- for (qsizetype i = 0; i < streams.size(); i++)
-- if (streams[i].isDefault)
-- return i;
-- return 0;
--}
--
--static void readStreams(const AVFormatContext *context,
-- QList<Decoder::StreamInfo> (&map)[QPlatformMediaPlayer::NTrackTypes], qint64 &maxDuration)
--{
-- maxDuration = 0;
--
-- for (unsigned int i = 0; i < context->nb_streams; ++i) {
-- auto *stream = context->streams[i];
-- if (!stream)
-- continue;
--
-- auto *codecPar = stream->codecpar;
-- if (!codecPar)
-- continue;
--
-- QMediaMetaData metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
-- bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
-- QPlatformMediaPlayer::TrackType type = QPlatformMediaPlayer::VideoStream;
--
-- switch (codecPar->codec_type) {
-- case AVMEDIA_TYPE_UNKNOWN:
-- case AVMEDIA_TYPE_DATA: ///< Opaque data information usually continuous
-- case AVMEDIA_TYPE_ATTACHMENT: ///< Opaque data information usually sparse
-- case AVMEDIA_TYPE_NB:
-- continue;
-- case AVMEDIA_TYPE_VIDEO:
-- type = QPlatformMediaPlayer::VideoStream;
-- insertVideoData(metaData, stream);
-- break;
-- case AVMEDIA_TYPE_AUDIO:
-- type = QPlatformMediaPlayer::AudioStream;
-- insertAudioData(metaData, stream);
-- break;
-- case AVMEDIA_TYPE_SUBTITLE:
-- type = QPlatformMediaPlayer::SubtitleStream;
-- break;
-- }
--
-- map[type].append({ (int)i, isDefault, metaData });
-- auto maybeDuration = mul(1'000'000ll * stream->duration, stream->time_base);
-- if (maybeDuration)
-- maxDuration = qMax(maxDuration, *maybeDuration);
-- }
--}
--
--void Decoder::setMedia(const QUrl &media, QIODevice *stream)
--{
-- QByteArray url = media.toEncoded(QUrl::PreferLocalFile);
--
-- AVFormatContext *context = nullptr;
-- if (stream) {
-- if (!stream->isOpen()) {
-- if (!stream->open(QIODevice::ReadOnly)) {
-- emit errorOccured(QMediaPlayer::ResourceError,
-- QLatin1String("Could not open source device."));
-- return;
-- }
-- }
-- if (!stream->isSequential())
-- stream->seek(0);
-- context = avformat_alloc_context();
-- constexpr int bufferSize = 32768;
-- unsigned char *buffer = (unsigned char *)av_malloc(bufferSize);
-- context->pb = avio_alloc_context(buffer, bufferSize, false, stream, ::read, nullptr, ::seek);
-- }
--
-- int ret = avformat_open_input(&context, url.constData(), nullptr, nullptr);
-- if (ret < 0) {
-- auto code = QMediaPlayer::ResourceError;
-- if (ret == AVERROR(EACCES))
-- code = QMediaPlayer::AccessDeniedError;
-- else if (ret == AVERROR(EINVAL))
-- code = QMediaPlayer::FormatError;
--
-- emit errorOccured(code, QMediaPlayer::tr("Could not open file"));
-- return;
-- }
--
-- ret = avformat_find_stream_info(context, nullptr);
-- if (ret < 0) {
-- emit errorOccured(QMediaPlayer::FormatError,
-- QMediaPlayer::tr("Could not find stream information for media file"));
-- avformat_free_context(context);
-- return;
-- }
--
--#ifndef QT_NO_DEBUG
-- av_dump_format(context, 0, url.constData(), 0);
--#endif
--
-- readStreams(context, m_streamMap, m_duration);
--
-- m_requestedStreams[QPlatformMediaPlayer::VideoStream] = getDefaultStreamIndex(m_streamMap[QPlatformMediaPlayer::VideoStream]);
-- m_requestedStreams[QPlatformMediaPlayer::AudioStream] = getDefaultStreamIndex(m_streamMap[QPlatformMediaPlayer::AudioStream]);
-- m_requestedStreams[QPlatformMediaPlayer::SubtitleStream] = -1;
--
-- m_metaData = QFFmpegMetaData::fromAVMetaData(context->metadata);
-- m_metaData.insert(QMediaMetaData::FileFormat,
-- QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(context->iformat)));
--
-- if (m_requestedStreams[QPlatformMediaPlayer::VideoStream] >= 0)
-- insertVideoData(m_metaData, context->streams[avStreamIndex(QPlatformMediaPlayer::VideoStream)]);
--
-- if (m_requestedStreams[QPlatformMediaPlayer::AudioStream] >= 0)
-- insertAudioData(m_metaData, context->streams[avStreamIndex(QPlatformMediaPlayer::AudioStream)]);
--
-- m_isSeekable = !(context->ctx_flags & AVFMTCTX_UNSEEKABLE);
--
-- demuxer = new Demuxer(this, context);
-- demuxer->start();
--}
--
--int Decoder::activeTrack(QPlatformMediaPlayer::TrackType type)
--{
-- return m_requestedStreams[type];
--}
--
--void Decoder::setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber)
--{
-- if (streamNumber < 0 || streamNumber >= m_streamMap[type].size())
-- streamNumber = -1;
-- if (m_requestedStreams[type] == streamNumber)
-- return;
-- m_requestedStreams[type] = streamNumber;
-- changeAVTrack(type);
--}
--
--void Decoder::setState(QMediaPlayer::PlaybackState state)
--{
-- if (m_state == state)
-- return;
--
-- switch (state) {
-- case QMediaPlayer::StoppedState:
-- qCDebug(qLcDecoder) << "Decoder::stop";
-- setPaused(true);
-- if (demuxer)
-- demuxer->stopDecoding();
-- seek(0);
-- if (videoSink)
-- videoSink->setVideoFrame({});
-- qCDebug(qLcDecoder) << "Decoder::stop: done";
-- break;
-- case QMediaPlayer::PausedState:
-- qCDebug(qLcDecoder) << "Decoder::pause";
-- setPaused(true);
-- if (demuxer) {
-- demuxer->startDecoding();
-- demuxer->wake();
-- if (m_state == QMediaPlayer::StoppedState)
-- triggerStep();
-- }
-- break;
-- case QMediaPlayer::PlayingState:
-- qCDebug(qLcDecoder) << "Decoder::play";
-- setPaused(false);
-- if (demuxer)
-- demuxer->startDecoding();
-- break;
-- }
-- m_state = state;
--}
--
--void Decoder::setPaused(bool b)
--{
-- clockController.setPaused(b);
--}
--
--void Decoder::triggerStep()
--{
-- if (audioRenderer)
-- audioRenderer->singleStep();
-- if (videoRenderer)
-- videoRenderer->singleStep();
--}
--
--void Decoder::setVideoSink(QVideoSink *sink)
--{
-- qCDebug(qLcDecoder) << "setVideoSink" << sink;
-- if (sink == videoSink)
-- return;
-- videoSink = sink;
-- if (!videoSink || m_requestedStreams[QPlatformMediaPlayer::VideoStream] < 0) {
-- if (videoRenderer) {
-- videoRenderer->kill();
-- videoRenderer = nullptr;
-- }
-- } else if (!videoRenderer) {
-- videoRenderer = new VideoRenderer(this, sink);
-- connect(videoRenderer, &Renderer::atEnd, this, &Decoder::streamAtEnd);
-- videoRenderer->start();
-- StreamDecoder *stream = demuxer->addStream(avStreamIndex(QPlatformMediaPlayer::VideoStream));
-- videoRenderer->setStream(stream);
-- stream = demuxer->addStream(avStreamIndex(QPlatformMediaPlayer::SubtitleStream));
-- videoRenderer->setSubtitleStream(stream);
-- }
--}
--
--void Decoder::setAudioSink(QPlatformAudioOutput *output)
--{
-- if (audioOutput == output)
-- return;
--
-- qCDebug(qLcDecoder) << "setAudioSink" << audioOutput;
-- audioOutput = output;
-- if (!output || m_requestedStreams[QPlatformMediaPlayer::AudioStream] < 0) {
-- if (audioRenderer) {
-- audioRenderer->kill();
-- audioRenderer = nullptr;
-- }
-- } else if (!audioRenderer) {
-- audioRenderer = new AudioRenderer(this, output->q);
-- connect(audioRenderer, &Renderer::atEnd, this, &Decoder::streamAtEnd);
-- audioRenderer->start();
-- auto *stream = demuxer->addStream(avStreamIndex(QPlatformMediaPlayer::AudioStream));
-- audioRenderer->setStream(stream);
-- }
--}
--
--void Decoder::changeAVTrack(QPlatformMediaPlayer::TrackType type)
--{
-- if (!demuxer)
-- return;
-- qCDebug(qLcDecoder) << " applying to renderer.";
-- if (m_state == QMediaPlayer::PlayingState)
-- setPaused(true);
-- auto *streamDecoder = demuxer->addStream(avStreamIndex(type));
-- switch (type) {
-- case QPlatformMediaPlayer::AudioStream:
-- audioRenderer->setStream(streamDecoder);
-- break;
-- case QPlatformMediaPlayer::VideoStream:
-- videoRenderer->setStream(streamDecoder);
-- break;
-- case QPlatformMediaPlayer::SubtitleStream:
-- videoRenderer->setSubtitleStream(streamDecoder);
-- break;
-- default:
-- Q_UNREACHABLE();
-- }
-- demuxer->seek(clockController.currentTime());
-- if (m_state == QMediaPlayer::PlayingState)
-- setPaused(false);
-- else
-- triggerStep();
--}
--
--void Decoder::seek(qint64 pos)
--{
-- if (!demuxer)
-- return;
-- pos = qBound(0, pos, m_duration);
-- demuxer->seek(pos);
-- clockController.syncTo(pos);
-- demuxer->wake();
-- if (m_state == QMediaPlayer::PausedState)
-- triggerStep();
--}
--
--void Decoder::setPlaybackRate(float rate)
--{
-- clockController.setPlaybackRate(rate);
--}
--
--void Decoder::streamAtEnd()
--{
-- if (audioRenderer && !audioRenderer->isAtEnd())
-- return;
-- if (videoRenderer && !videoRenderer->isAtEnd())
-- return;
-- pause();
--
-- emit endOfStream();
--}
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegdecoder_p.h b/src/plugins/multimedia/ffmpeg/qffmpegdecoder_p.h
-deleted file mode 100644
-index 2ee61a68e..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegdecoder_p.h
-+++ /dev/null
-@@ -1,501 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEGDECODER_P_H
--#define QFFMPEGDECODER_P_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include "qffmpegthread_p.h"
--#include "qffmpeg_p.h"
--#include "qffmpegmediaplayer_p.h"
--#include "qffmpeghwaccel_p.h"
--#include "qffmpegclock_p.h"
--#include "qaudiobuffer.h"
--#include "qffmpegresampler_p.h"
--
--#include <private/qmultimediautils_p.h>
--#include <qshareddata.h>
--#include <qtimer.h>
--#include <qqueue.h>
--
--QT_BEGIN_NAMESPACE
--
--class QAudioSink;
--class QFFmpegAudioDecoder;
--class QFFmpegMediaPlayer;
--
--namespace QFFmpeg
--{
--
--class Resampler;
--
--// queue up max 16M of encoded data, that should always be enough
--// (it's around 2 secs of 4K HDR video, longer for almost all other formats)
--enum { MaxQueueSize = 16*1024*1024 };
--
--struct Packet
--{
-- struct Data {
-- Data(AVPacket *p)
-- : packet(p)
-- {}
-- ~Data() {
-- if (packet)
-- av_packet_free(&packet);
-- }
-- QAtomicInt ref;
-- AVPacket *packet = nullptr;
-- };
-- Packet() = default;
-- Packet(AVPacket *p)
-- : d(new Data(p))
-- {}
--
-- bool isValid() const { return !!d; }
-- AVPacket *avPacket() const { return d->packet; }
--private:
-- QExplicitlySharedDataPointer<Data> d;
--};
--
--struct Codec
--{
-- struct AVCodecFreeContext { void operator()(AVCodecContext *ctx) { avcodec_free_context(&ctx); } };
-- using UniqueAVCodecContext = std::unique_ptr<AVCodecContext, AVCodecFreeContext>;
-- struct Data {
-- Data(UniqueAVCodecContext &&context, AVStream *stream, std::unique_ptr<QFFmpeg::HWAccel> &&hwAccel);
-- ~Data();
-- QAtomicInt ref;
-- UniqueAVCodecContext context;
-- AVStream *stream = nullptr;
-- std::unique_ptr<QFFmpeg::HWAccel> hwAccel;
-- };
--
-- static QMaybe<Codec> create(AVStream *);
--
-- AVCodecContext *context() const { return d->context.get(); }
-- AVStream *stream() const { return d->stream; }
-- uint streamIndex() const { return d->stream->index; }
-- HWAccel *hwAccel() const { return d->hwAccel.get(); }
-- qint64 toMs(qint64 ts) const { return timeStampMs(ts, d->stream->time_base).value_or(0); }
-- qint64 toUs(qint64 ts) const { return timeStampUs(ts, d->stream->time_base).value_or(0); }
--
--private:
-- Codec(Data *data) : d(data) {}
-- QExplicitlySharedDataPointer<Data> d;
--};
--
--
--struct Frame
--{
-- struct Data {
-- Data(AVFrame *f, const Codec &codec, qint64 pts)
-- : codec(codec)
-- , frame(f)
-- , pts(pts)
-- {}
-- Data(const QString &text, qint64 pts, qint64 duration)
-- : text(text), pts(pts), duration(duration)
-- {}
-- ~Data() {
-- if (frame)
-- av_frame_free(&frame);
-- }
-- QAtomicInt ref;
-- std::optional<Codec> codec;
-- AVFrame *frame = nullptr;
-- QString text;
-- qint64 pts = -1;
-- qint64 duration = -1;
-- };
-- Frame() = default;
-- Frame(AVFrame *f, const Codec &codec, qint64 pts)
-- : d(new Data(f, codec, pts))
-- {}
-- Frame(const QString &text, qint64 pts, qint64 duration)
-- : d(new Data(text, pts, duration))
-- {}
-- bool isValid() const { return !!d; }
--
-- AVFrame *avFrame() const { return d->frame; }
-- AVFrame *takeAVFrame() const {
-- AVFrame *f = d->frame;
-- d->frame = nullptr;
-- return f;
-- }
-- const Codec *codec() const { return d->codec ? &d->codec.value() : nullptr; }
-- qint64 pts() const { return d->pts; }
-- qint64 duration() const { return d->duration; }
-- qint64 end() const { return d->pts + d->duration; }
-- QString text() const { return d->text; }
--private:
-- QExplicitlySharedDataPointer<Data> d;
--};
--
--class Demuxer;
--class StreamDecoder;
--class Renderer;
--class AudioRenderer;
--class VideoRenderer;
--
--class Decoder : public QObject
--{
-- Q_OBJECT
--public:
-- Decoder();
-- ~Decoder();
--
-- void setMedia(const QUrl &media, QIODevice *stream);
--
-- void init();
-- void setState(QMediaPlayer::PlaybackState state);
-- void play() {
-- setState(QMediaPlayer::PlayingState);
-- }
-- void pause() {
-- setState(QMediaPlayer::PausedState);
-- }
-- void stop() {
-- setState(QMediaPlayer::StoppedState);
-- }
--
-- void triggerStep();
--
-- void setVideoSink(QVideoSink *sink);
-- void setAudioSink(QPlatformAudioOutput *output);
--
-- void changeAVTrack(QPlatformMediaPlayer::TrackType type);
--
-- void seek(qint64 pos);
-- void setPlaybackRate(float rate);
--
-- int activeTrack(QPlatformMediaPlayer::TrackType type);
-- void setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber);
--
-- bool isSeekable() const
-- {
-- return m_isSeekable;
-- }
--
--signals:
-- void endOfStream();
-- void errorOccured(int error, const QString &errorString);
-- void positionChanged(qint64 time);
--
--public slots:
-- void streamAtEnd();
--
--public:
-- struct StreamInfo {
-- int avStreamIndex = -1;
-- bool isDefault = false;
-- QMediaMetaData metaData;
-- };
--
-- // Accessed from multiple threads, but API is threadsafe
-- ClockController clockController;
--
--private:
-- void setPaused(bool b);
--
--protected:
-- friend QFFmpegMediaPlayer;
--
-- QMediaPlayer::PlaybackState m_state = QMediaPlayer::StoppedState;
-- bool m_isSeekable = false;
--
-- Demuxer *demuxer = nullptr;
-- QVideoSink *videoSink = nullptr;
-- Renderer *videoRenderer = nullptr;
-- QPlatformAudioOutput *audioOutput = nullptr;
-- Renderer *audioRenderer = nullptr;
--
-- QList<StreamInfo> m_streamMap[QPlatformMediaPlayer::NTrackTypes];
-- int m_requestedStreams[QPlatformMediaPlayer::NTrackTypes] = { -1, -1, -1 };
-- qint64 m_duration = 0;
-- QMediaMetaData m_metaData;
--
-- int avStreamIndex(QPlatformMediaPlayer::TrackType type)
-- {
-- int i = m_requestedStreams[type];
-- return i < 0 || i >= m_streamMap[type].size() ? -1 : m_streamMap[type][i].avStreamIndex;
-- }
--};
--
--class Demuxer : public Thread
--{
-- Q_OBJECT
--public:
-- Demuxer(Decoder *decoder, AVFormatContext *context);
-- ~Demuxer();
--
-- StreamDecoder *addStream(int streamIndex);
-- void removeStream(int streamIndex);
--
-- bool isStopped() const
-- {
-- return m_isStopped.loadRelaxed();
-- }
-- void startDecoding()
-- {
-- m_isStopped.storeRelaxed(false);
-- updateEnabledStreams();
-- wake();
-- }
-- void stopDecoding();
--
-- int seek(qint64 pos);
--
--private:
-- void updateEnabledStreams();
-- void sendFinalPacketToStreams();
--
-- void init() override;
-- void cleanup() override;
-- bool shouldWait() const override;
-- void loop() override;
--
-- Decoder *decoder;
-- AVFormatContext *context = nullptr;
-- QList<StreamDecoder *> streamDecoders;
--
-- QAtomicInteger<bool> m_isStopped = true;
-- qint64 last_pts = -1;
--};
--
--
--class StreamDecoder : public Thread
--{
-- Q_OBJECT
--protected:
-- Demuxer *demuxer = nullptr;
-- Renderer *m_renderer = nullptr;
--
-- struct PacketQueue {
-- mutable QMutex mutex;
-- QQueue<Packet> queue;
-- qint64 size = 0;
-- qint64 duration = 0;
-- };
-- PacketQueue packetQueue;
--
-- struct FrameQueue {
-- mutable QMutex mutex;
-- QQueue<Frame> queue;
-- int maxSize = 3;
-- };
-- FrameQueue frameQueue;
-- QAtomicInteger<bool> eos = false;
-- bool decoderHasNoFrames = false;
--
--public:
-- StreamDecoder(Demuxer *demuxer, const Codec &codec);
--
-- void addPacket(AVPacket *packet);
--
-- qint64 queuedPacketSize() const {
-- QMutexLocker locker(&packetQueue.mutex);
-- return packetQueue.size;
-- }
-- qint64 queuedDuration() const {
-- QMutexLocker locker(&packetQueue.mutex);
-- return packetQueue.duration;
-- }
--
-- const Frame *lockAndPeekFrame()
-- {
-- frameQueue.mutex.lock();
-- return frameQueue.queue.isEmpty() ? nullptr : &frameQueue.queue.first();
-- }
-- void removePeekedFrame()
-- {
-- frameQueue.queue.takeFirst();
-- wake();
-- }
-- void unlockAndReleaseFrame()
-- {
-- frameQueue.mutex.unlock();
-- }
-- Frame takeFrame();
--
-- void flush();
--
-- Codec codec;
--
-- void setRenderer(Renderer *r);
-- Renderer *renderer() const { return m_renderer; }
--
-- bool isAtEnd() const { return eos.loadAcquire(); }
--
-- void killHelper() override;
--
--private:
-- Packet takePacket();
-- Packet peekPacket();
--
-- void addFrame(const Frame &f);
--
-- bool hasEnoughFrames() const
-- {
-- QMutexLocker locker(&frameQueue.mutex);
-- return frameQueue.queue.size() >= frameQueue.maxSize;
-- }
-- bool hasNoPackets() const
-- {
-- QMutexLocker locker(&packetQueue.mutex);
-- return packetQueue.queue.isEmpty();
-- }
--
-- void init() override;
-- bool shouldWait() const override;
-- void loop() override;
--
-- void decode();
-- void decodeSubtitle();
--
-- QPlatformMediaPlayer::TrackType type() const;
--};
--
--class Renderer : public Thread
--{
-- Q_OBJECT
--protected:
-- QPlatformMediaPlayer::TrackType type;
--
-- bool step = false;
-- bool paused = true;
-- StreamDecoder *streamDecoder = nullptr;
-- QAtomicInteger<bool> eos = false;
--
--public:
-- Renderer(QPlatformMediaPlayer::TrackType type);
--
-- void setPaused(bool p) {
-- QMutexLocker locker(&mutex);
-- paused = p;
-- if (!p)
-- wake();
-- }
-- void singleStep() {
-- QMutexLocker locker(&mutex);
-- if (!paused)
-- return;
-- step = true;
-- wake();
-- }
-- void doneStep() {
-- step = false;
-- }
-- bool isAtEnd() { return !streamDecoder || eos.loadAcquire(); }
--
-- void setStream(StreamDecoder *stream);
-- virtual void setSubtitleStream(StreamDecoder *) {}
--
-- void killHelper() override;
--
-- virtual void streamChanged() {}
--
--Q_SIGNALS:
-- void atEnd();
--
--protected:
-- bool shouldWait() const override;
--
--public:
--};
--
--class ClockedRenderer : public Renderer, public Clock
--{
--public:
-- ClockedRenderer(Decoder *decoder, QPlatformMediaPlayer::TrackType type)
-- : Renderer(type)
-- , Clock(&decoder->clockController)
-- {
-- }
-- ~ClockedRenderer()
-- {
-- }
-- void setPaused(bool paused) override;
--};
--
--class VideoRenderer : public ClockedRenderer
--{
-- Q_OBJECT
--
-- StreamDecoder *subtitleStreamDecoder = nullptr;
--public:
-- VideoRenderer(Decoder *decoder, QVideoSink *sink);
--
-- void killHelper() override;
--
-- void setSubtitleStream(StreamDecoder *stream) override;
--private:
--
-- void init() override;
-- void loop() override;
--
-- QVideoSink *sink;
--};
--
--class AudioRenderer : public ClockedRenderer
--{
-- Q_OBJECT
--public:
-- AudioRenderer(Decoder *decoder, QAudioOutput *output);
-- ~AudioRenderer() = default;
--
-- // Clock interface
-- void syncTo(qint64 usecs) override;
-- void setPlaybackRate(float rate, qint64 currentTime) override;
--
--private slots:
-- void updateAudio();
-- void setSoundVolume(float volume);
--
--private:
-- void updateOutput(const Codec *codec);
-- void initResempler(const Codec *codec);
-- void freeOutput();
--
-- void init() override;
-- void cleanup() override;
-- void loop() override;
-- void streamChanged() override;
-- Type type() const override { return AudioClock; }
--
-- int outputSamples(int inputSamples) {
-- return qRound(inputSamples/playbackRate());
-- }
--
-- // Used for timing update calculations based on processed data
-- qint64 audioBaseTime = 0;
-- qint64 processedBase = 0;
-- qint64 processedUSecs = 0;
--
-- bool deviceChanged = false;
-- QAudioOutput *output = nullptr;
-- qint64 writtenUSecs = 0;
-- qint64 latencyUSecs = 0;
--
-- QAudioFormat format;
-- QAudioSink *audioSink = nullptr;
-- QIODevice *audioDevice = nullptr;
-- std::unique_ptr<Resampler> resampler;
-- QAudioBuffer bufferedData;
-- qsizetype bufferWritten = 0;
--};
--
--}
--
--QT_END_NAMESPACE
--
--#endif
--
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegencoder.cpp b/src/plugins/multimedia/ffmpeg/qffmpegencoder.cpp
-deleted file mode 100644
-index 86e33c83c..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegencoder.cpp
-+++ /dev/null
-@@ -1,557 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#include "qffmpegencoder_p.h"
--#include "qffmpegmediaformatinfo_p.h"
--#include "qffmpegvideoframeencoder_p.h"
--#include "private/qmultimediautils_p.h"
--
--#include <qdebug.h>
--#include <qiodevice.h>
--#include <qaudiosource.h>
--#include <qaudiobuffer.h>
--#include "qffmpegaudioinput_p.h"
--#include <private/qplatformcamera_p.h>
--#include "qffmpegvideobuffer_p.h"
--#include "qffmpegmediametadata_p.h"
--#include "qffmpegencoderoptions_p.h"
--
--#include <qloggingcategory.h>
--
--extern "C" {
--#include <libavutil/pixdesc.h>
--#include <libavutil/common.h>
--}
--
--QT_BEGIN_NAMESPACE
--
--Q_LOGGING_CATEGORY(qLcFFmpegEncoder, "qt.multimedia.ffmpeg.encoder")
--
--namespace QFFmpeg
--{
--
--Encoder::Encoder(const QMediaEncoderSettings &settings, const QUrl &url)
-- : settings(settings)
--{
-- const AVOutputFormat *avFormat = QFFmpegMediaFormatInfo::outputFormatForFileFormat(settings.fileFormat());
--
-- formatContext = avformat_alloc_context();
-- formatContext->oformat = const_cast<AVOutputFormat *>(avFormat); // constness varies
--
-- QByteArray encoded = url.toEncoded();
-- formatContext->url = (char *)av_malloc(encoded.size() + 1);
-- memcpy(formatContext->url, encoded.constData(), encoded.size() + 1);
-- formatContext->pb = nullptr;
-- avio_open2(&formatContext->pb, formatContext->url, AVIO_FLAG_WRITE, nullptr, nullptr);
-- qCDebug(qLcFFmpegEncoder) << "opened" << formatContext->url;
--
-- muxer = new Muxer(this);
--}
--
--Encoder::~Encoder()
--{
--}
--
--void Encoder::addAudioInput(QFFmpegAudioInput *input)
--{
-- audioEncode = new AudioEncoder(this, input, settings);
-- connect(input, &QFFmpegAudioInput::newAudioBuffer, this, &Encoder::newAudioBuffer);
-- input->setRunning(true);
--}
--
--void Encoder::addVideoSource(QPlatformCamera *source)
--{
-- videoEncode = new VideoEncoder(this, source, settings);
-- connect(source, &QPlatformCamera::newVideoFrame, this, &Encoder::newVideoFrame);
--}
--
--void Encoder::start()
--{
-- qCDebug(qLcFFmpegEncoder) << "Encoder::start!";
--
-- formatContext->metadata = QFFmpegMetaData::toAVMetaData(metaData);
--
-- int res = avformat_write_header(formatContext, nullptr);
-- if (res < 0)
-- qWarning() << "could not write header" << res;
--
-- muxer->start();
-- if (audioEncode)
-- audioEncode->start();
-- if (videoEncode)
-- videoEncode->start();
-- isRecording = true;
--}
--
--void EncodingFinalizer::run()
--{
-- if (encoder->audioEncode)
-- encoder->audioEncode->kill();
-- if (encoder->videoEncode)
-- encoder->videoEncode->kill();
-- encoder->muxer->kill();
--
-- int res = av_write_trailer(encoder->formatContext);
-- if (res < 0)
-- qWarning() << "could not write trailer" << res;
--
-- avformat_free_context(encoder->formatContext);
-- qDebug() << " done finalizing.";
-- emit encoder->finalizationDone();
-- delete encoder;
-- deleteLater();
--}
--
--void Encoder::finalize()
--{
-- qDebug() << ">>>>>>>>>>>>>>> finalize";
--
-- isRecording = false;
-- auto *finalizer = new EncodingFinalizer(this);
-- finalizer->start();
--}
--
--void Encoder::setPaused(bool p)
--{
-- if (audioEncode)
-- audioEncode->setPaused(p);
-- if (videoEncode)
-- videoEncode->setPaused(p);
--}
--
--void Encoder::setMetaData(const QMediaMetaData &metaData)
--{
-- this->metaData = metaData;
--}
--
--void Encoder::newAudioBuffer(const QAudioBuffer &buffer)
--{
-- if (audioEncode && isRecording)
-- audioEncode->addBuffer(buffer);
--}
--
--void Encoder::newVideoFrame(const QVideoFrame &frame)
--{
-- if (videoEncode && isRecording)
-- videoEncode->addFrame(frame);
--}
--
--void Encoder::newTimeStamp(qint64 time)
--{
-- QMutexLocker locker(&timeMutex);
-- if (time > timeRecorded) {
-- timeRecorded = time;
-- emit durationChanged(time);
-- }
--}
--
--Muxer::Muxer(Encoder *encoder)
-- : encoder(encoder)
--{
-- setObjectName(QLatin1String("Muxer"));
--}
--
--void Muxer::addPacket(AVPacket *packet)
--{
--// qCDebug(qLcFFmpegEncoder) << "Muxer::addPacket" << packet->pts << packet->stream_index;
-- QMutexLocker locker(&queueMutex);
-- packetQueue.enqueue(packet);
-- wake();
--}
--
--AVPacket *Muxer::takePacket()
--{
-- QMutexLocker locker(&queueMutex);
-- if (packetQueue.isEmpty())
-- return nullptr;
--// qCDebug(qLcFFmpegEncoder) << "Muxer::takePacket" << packetQueue.first()->pts;
-- return packetQueue.dequeue();
--}
--
--void Muxer::init()
--{
--}
--
--void Muxer::cleanup()
--{
--}
--
--bool QFFmpeg::Muxer::shouldWait() const
--{
-- QMutexLocker locker(&queueMutex);
-- return packetQueue.isEmpty();
--}
--
--void Muxer::loop()
--{
-- auto *packet = takePacket();
--// qCDebug(qLcFFmpegEncoder) << "writing packet to file" << packet->pts << packet->duration << packet->stream_index;
-- av_interleaved_write_frame(encoder->formatContext, packet);
--}
--
--
--static AVSampleFormat bestMatchingSampleFormat(AVSampleFormat requested, const AVSampleFormat *available)
--{
-- if (!available)
-- return requested;
--
-- const AVSampleFormat *f = available;
-- AVSampleFormat best = *f;
--/*
-- enum {
-- First,
-- Planar,
-- Exact,
-- } score = First;
--*/
-- for (; *f != AV_SAMPLE_FMT_NONE; ++f) {
-- qCDebug(qLcFFmpegEncoder) << "format:" << *f;
-- if (*f == requested) {
-- best = *f;
--// score = Exact;
-- break;
-- }
--
-- if (av_get_planar_sample_fmt(requested) == *f) {
--// score = Planar;
-- best = *f;
-- }
-- }
-- return best;
--}
--
--AudioEncoder::AudioEncoder(Encoder *encoder, QFFmpegAudioInput *input, const QMediaEncoderSettings &settings)
-- : input(input)
--{
-- this->encoder = encoder;
--
-- setObjectName(QLatin1String("AudioEncoder"));
-- qCDebug(qLcFFmpegEncoder) << "AudioEncoder" << settings.audioCodec();
--
-- format = input->device.preferredFormat();
-- auto codecID = QFFmpegMediaFormatInfo::codecIdForAudioCodec(settings.audioCodec());
-- Q_ASSERT(avformat_query_codec(encoder->formatContext->oformat, codecID, FF_COMPLIANCE_NORMAL));
--
-- auto *avCodec = avcodec_find_encoder(codecID);
--
-- AVSampleFormat requested = QFFmpegMediaFormatInfo::avSampleFormat(format.sampleFormat());
-- AVSampleFormat bestSampleFormat = bestMatchingSampleFormat(requested, avCodec->sample_fmts);
--
-- stream = avformat_new_stream(encoder->formatContext, nullptr);
-- stream->id = encoder->formatContext->nb_streams - 1;
-- stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
-- stream->codecpar->codec_id = codecID;
--#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
-- stream->codecpar->channel_layout = av_get_default_channel_layout(format.channelCount());
-- stream->codecpar->channels = format.channelCount();
--#else
-- av_channel_layout_default(&stream->codecpar->ch_layout, format.channelCount());
--#endif
-- stream->codecpar->sample_rate = format.sampleRate();
-- stream->codecpar->frame_size = 1024;
-- stream->codecpar->format = bestSampleFormat;
-- stream->time_base = AVRational{ 1, format.sampleRate() };
--
-- Q_ASSERT(avCodec);
-- codec = avcodec_alloc_context3(avCodec);
-- avcodec_parameters_to_context(codec, stream->codecpar);
--
-- AVDictionary *opts = nullptr;
-- applyAudioEncoderOptions(settings, avCodec->name, codec, &opts);
--
-- int res = avcodec_open2(codec, avCodec, &opts);
-- qCDebug(qLcFFmpegEncoder) << "audio codec opened" << res;
-- qCDebug(qLcFFmpegEncoder) << "audio codec params: fmt=" << codec->sample_fmt << "rate=" << codec->sample_rate;
--
-- if (codec->sample_fmt != requested) {
--#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
-- resampler = swr_alloc_set_opts(nullptr, // we're allocating a new context
-- codec->channel_layout, // out_ch_layout
-- codec->sample_fmt, // out_sample_fmt
-- codec->sample_rate, // out_sample_rate
-- av_get_default_channel_layout(format.channelCount()), // in_ch_layout
-- requested, // in_sample_fmt
-- format.sampleRate(), // in_sample_rate
-- 0, // log_offset
-- nullptr);
--#else
-- AVChannelLayout in_ch_layout = {};
-- av_channel_layout_default(&in_ch_layout, format.channelCount());
-- swr_alloc_set_opts2(&resampler, // we're allocating a new context
-- &codec->ch_layout, codec->sample_fmt, codec->sample_rate,
-- &in_ch_layout, requested, format.sampleRate(),
-- 0, nullptr);
--#endif
--
-- swr_init(resampler);
-- }
--}
--
--void AudioEncoder::addBuffer(const QAudioBuffer &buffer)
--{
-- QMutexLocker locker(&queueMutex);
-- if (!paused.loadRelaxed()) {
-- audioBufferQueue.enqueue(buffer);
-- wake();
-- }
--}
--
--QAudioBuffer AudioEncoder::takeBuffer()
--{
-- QMutexLocker locker(&queueMutex);
-- if (audioBufferQueue.isEmpty())
-- return QAudioBuffer();
-- return audioBufferQueue.dequeue();
--}
--
--void AudioEncoder::init()
--{
-- if (input) {
-- input->setFrameSize(codec->frame_size);
-- }
-- qCDebug(qLcFFmpegEncoder) << "AudioEncoder::init started audio device thread.";
--}
--
--void AudioEncoder::cleanup()
--{
-- while (!audioBufferQueue.isEmpty())
-- loop();
-- while (avcodec_send_frame(codec, nullptr) == AVERROR(EAGAIN))
-- retrievePackets();
-- retrievePackets();
--}
--
--bool AudioEncoder::shouldWait() const
--{
-- QMutexLocker locker(&queueMutex);
-- return audioBufferQueue.isEmpty();
--}
--
--void AudioEncoder::retrievePackets()
--{
-- while (1) {
-- AVPacket *packet = av_packet_alloc();
-- int ret = avcodec_receive_packet(codec, packet);
-- if (ret < 0) {
-- av_packet_unref(packet);
-- if (ret != AVERROR(EOF))
-- break;
-- if (ret != AVERROR(EAGAIN)) {
-- char errStr[1024];
-- av_strerror(ret, errStr, 1024);
-- qCDebug(qLcFFmpegEncoder) << "receive packet" << ret << errStr;
-- }
-- break;
-- }
--
-- // qCDebug(qLcFFmpegEncoder) << "writing video packet" << packet->size << packet->pts << timeStamp(packet->pts, stream->time_base) << packet->stream_index;
-- packet->stream_index = stream->id;
-- encoder->muxer->addPacket(packet);
-- }
--}
--
--void AudioEncoder::loop()
--{
-- QAudioBuffer buffer = takeBuffer();
-- if (!buffer.isValid() || paused.loadAcquire())
-- return;
--
--// qCDebug(qLcFFmpegEncoder) << "new audio buffer" << buffer.byteCount() << buffer.format() << buffer.frameCount() << codec->frame_size;
-- retrievePackets();
--
-- AVFrame *frame = av_frame_alloc();
-- frame->format = codec->sample_fmt;
--#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
-- frame->channel_layout = codec->channel_layout;
-- frame->channels = codec->channels;
--#else
-- frame->ch_layout = codec->ch_layout;
--#endif
-- frame->sample_rate = codec->sample_rate;
-- frame->nb_samples = buffer.frameCount();
-- if (frame->nb_samples)
-- av_frame_get_buffer(frame, 0);
--
-- if (resampler) {
-- const uint8_t *data = buffer.constData<uint8_t>();
-- swr_convert(resampler, frame->extended_data, frame->nb_samples, &data, frame->nb_samples);
-- } else {
-- memcpy(frame->buf[0]->data, buffer.constData<uint8_t>(), buffer.byteCount());
-- }
--
-- frame->pts = samplesWritten;
-- samplesWritten += buffer.frameCount();
--
-- qint64 time = format.durationForFrames(samplesWritten);
-- encoder->newTimeStamp(time/1000);
--
--// qCDebug(qLcFFmpegEncoder) << "sending audio frame" << buffer.byteCount() << frame->pts << ((double)buffer.frameCount()/frame->sample_rate);
-- int ret = avcodec_send_frame(codec, frame);
-- if (ret < 0) {
-- char errStr[1024];
-- av_strerror(ret, errStr, 1024);
--// qCDebug(qLcFFmpegEncoder) << "error sending frame" << ret << errStr;
-- }
--}
--
--VideoEncoder::VideoEncoder(Encoder *encoder, QPlatformCamera *camera, const QMediaEncoderSettings &settings)
-- : m_encoderSettings(settings)
-- , m_camera(camera)
--{
-- this->encoder = encoder;
--
-- setObjectName(QLatin1String("VideoEncoder"));
-- qCDebug(qLcFFmpegEncoder) << "VideoEncoder" << settings.videoCodec();
--
-- auto format = m_camera->cameraFormat();
-- std::optional<AVPixelFormat> hwFormat = camera->ffmpegHWPixelFormat()
-- ? AVPixelFormat(*camera->ffmpegHWPixelFormat())
-- : std::optional<AVPixelFormat>{};
--
-- AVPixelFormat swFormat = QFFmpegVideoBuffer::toAVPixelFormat(format.pixelFormat());
-- AVPixelFormat pixelFormat = hwFormat ? *hwFormat : swFormat;
-- frameEncoder = new VideoFrameEncoder(settings, format.resolution(), format.maxFrameRate(), pixelFormat, swFormat);
--
-- frameEncoder->initWithFormatContext(encoder->formatContext);
--}
--
--VideoEncoder::~VideoEncoder()
--{
-- delete frameEncoder;
--}
--
--void VideoEncoder::addFrame(const QVideoFrame &frame)
--{
-- QMutexLocker locker(&queueMutex);
-- if (!paused.loadRelaxed()) {
-- videoFrameQueue.enqueue(frame);
-- wake();
-- }
--}
--
--QVideoFrame VideoEncoder::takeFrame()
--{
-- QMutexLocker locker(&queueMutex);
-- if (videoFrameQueue.isEmpty())
-- return QVideoFrame();
-- return videoFrameQueue.dequeue();
--}
--
--void VideoEncoder::retrievePackets()
--{
-- if (!frameEncoder)
-- return;
-- while (AVPacket *packet = frameEncoder->retrievePacket())
-- encoder->muxer->addPacket(packet);
--}
--
--void VideoEncoder::init()
--{
-- qCDebug(qLcFFmpegEncoder) << "VideoEncoder::init started video device thread.";
-- bool ok = frameEncoder->open();
-- if (!ok)
-- encoder->error(QMediaRecorder::ResourceError, "Could not initialize encoder");
--}
--
--void VideoEncoder::cleanup()
--{
-- while (!videoFrameQueue.isEmpty())
-- loop();
-- if (frameEncoder) {
-- while (frameEncoder->sendFrame(nullptr) == AVERROR(EAGAIN))
-- retrievePackets();
-- retrievePackets();
-- }
--}
--
--bool VideoEncoder::shouldWait() const
--{
-- QMutexLocker locker(&queueMutex);
-- return videoFrameQueue.isEmpty();
--}
--
--struct QVideoFrameHolder
--{
-- QVideoFrame f;
-- QImage i;
--};
--
--static void freeQVideoFrame(void *opaque, uint8_t *)
--{
-- delete reinterpret_cast<QVideoFrameHolder *>(opaque);
--}
--
--void VideoEncoder::loop()
--{
-- if (paused.loadAcquire())
-- return;
--
-- retrievePackets();
--
-- auto frame = takeFrame();
-- if (!frame.isValid())
-- return;
--
-- if (frameEncoder->isNull())
-- return;
--
--// qCDebug(qLcFFmpegEncoder) << "new video buffer" << frame.startTime();
--
-- AVFrame *avFrame = nullptr;
--
-- auto *videoBuffer = dynamic_cast<QFFmpegVideoBuffer *>(frame.videoBuffer());
-- if (videoBuffer) {
-- // ffmpeg video buffer, let's use the native AVFrame stored in there
-- auto *hwFrame = videoBuffer->getHWFrame();
-- if (hwFrame && hwFrame->format == frameEncoder->sourceFormat())
-- avFrame = av_frame_clone(hwFrame);
-- }
--
-- if (!avFrame) {
-- frame.map(QVideoFrame::ReadOnly);
-- auto size = frame.size();
-- avFrame = av_frame_alloc();
-- avFrame->format = frameEncoder->sourceFormat();
-- avFrame->width = size.width();
-- avFrame->height = size.height();
-- av_frame_get_buffer(avFrame, 0);
--
-- for (int i = 0; i < 4; ++i) {
-- avFrame->data[i] = const_cast<uint8_t *>(frame.bits(i));
-- avFrame->linesize[i] = frame.bytesPerLine(i);
-- }
--
-- QImage img;
-- if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg) {
-- // the QImage is cached inside the video frame, so we can take the pointer to the image data here
-- img = frame.toImage();
-- avFrame->data[0] = (uint8_t *)img.bits();
-- avFrame->linesize[0] = img.bytesPerLine();
-- }
--
-- Q_ASSERT(avFrame->data[0]);
-- // ensure the video frame and it's data is alive as long as it's being used in the encoder
-- avFrame->opaque_ref = av_buffer_create(nullptr, 0, freeQVideoFrame, new QVideoFrameHolder{frame, img}, 0);
-- }
--
-- if (baseTime.loadAcquire() < 0) {
-- baseTime.storeRelease(frame.startTime() - lastFrameTime);
--// qCDebug(qLcFFmpegEncoder) << ">>>> adjusting base time to" << baseTime.loadAcquire() << frame.startTime() << lastFrameTime;
-- }
--
-- qint64 time = frame.startTime() - baseTime.loadAcquire();
-- lastFrameTime = frame.endTime() - baseTime.loadAcquire();
-- avFrame->pts = frameEncoder->getPts(time);
--
-- encoder->newTimeStamp(time/1000);
--
--// qCDebug(qLcFFmpegEncoder) << ">>> sending frame" << avFrame->pts << time;
-- int ret = frameEncoder->sendFrame(avFrame);
-- if (ret < 0) {
-- qCDebug(qLcFFmpegEncoder) << "error sending frame" << ret << err2str(ret);
-- encoder->error(QMediaRecorder::ResourceError, err2str(ret));
-- }
--}
--
--}
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegencoder_p.h b/src/plugins/multimedia/ffmpeg/qffmpegencoder_p.h
-deleted file mode 100644
-index b673b718c..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegencoder_p.h
-+++ /dev/null
-@@ -1,197 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEGENCODER_P_H
--#define QFFMPEGENCODER_P_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include "qffmpegthread_p.h"
--#include "qffmpeg_p.h"
--#include "qffmpeghwaccel_p.h"
--
--#include <private/qplatformmediarecorder_p.h>
--#include <qaudioformat.h>
--#include <qaudiobuffer.h>
--
--#include <qqueue.h>
--
--QT_BEGIN_NAMESPACE
--
--class QFFmpegAudioInput;
--class QVideoFrame;
--class QPlatformCamera;
--
--namespace QFFmpeg
--{
--
--class Encoder;
--class Muxer;
--class AudioEncoder;
--class VideoEncoder;
--class VideoFrameEncoder;
--
--class EncodingFinalizer : public QThread
--{
--public:
-- EncodingFinalizer(Encoder *e)
-- : encoder(e)
-- {}
-- void run() override;
--
-- Encoder *encoder = nullptr;
--};
--
--class Encoder : public QObject
--{
-- Q_OBJECT
--public:
-- Encoder(const QMediaEncoderSettings &settings, const QUrl &url);
-- ~Encoder();
--
-- void addAudioInput(QFFmpegAudioInput *input);
-- void addVideoSource(QPlatformCamera *source);
--
-- void start();
-- void finalize();
--
-- void setPaused(bool p);
--
-- void setMetaData(const QMediaMetaData &metaData);
--
--public Q_SLOTS:
-- void newAudioBuffer(const QAudioBuffer &buffer);
-- void newVideoFrame(const QVideoFrame &frame);
-- void newTimeStamp(qint64 time);
--
--Q_SIGNALS:
-- void durationChanged(qint64 duration);
-- void error(QMediaRecorder::Error code, const QString &description);
-- void finalizationDone();
--
--public:
--
-- QMediaEncoderSettings settings;
-- QMediaMetaData metaData;
-- AVFormatContext *formatContext = nullptr;
-- Muxer *muxer = nullptr;
-- bool isRecording = false;
--
-- AudioEncoder *audioEncode = nullptr;
-- VideoEncoder *videoEncode = nullptr;
--
-- QMutex timeMutex;
-- qint64 timeRecorded = 0;
--};
--
--
--class Muxer : public Thread
--{
-- mutable QMutex queueMutex;
-- QQueue<AVPacket *> packetQueue;
--public:
-- Muxer(Encoder *encoder);
--
-- void addPacket(AVPacket *);
--
--private:
-- AVPacket *takePacket();
--
-- void init() override;
-- void cleanup() override;
-- bool shouldWait() const override;
-- void loop() override;
--
-- Encoder *encoder;
--};
--
--class EncoderThread : public Thread
--{
--public:
-- virtual void setPaused(bool b)
-- {
-- paused.storeRelease(b);
-- }
--
--protected:
-- QAtomicInteger<bool> paused = false;
-- Encoder *encoder = nullptr;
--};
--
--class AudioEncoder : public EncoderThread
--{
-- mutable QMutex queueMutex;
-- QQueue<QAudioBuffer> audioBufferQueue;
--public:
-- AudioEncoder(Encoder *encoder, QFFmpegAudioInput *input, const QMediaEncoderSettings &settings);
--
-- void addBuffer(const QAudioBuffer &buffer);
--
-- QFFmpegAudioInput *audioInput() const { return input; }
--
--private:
-- QAudioBuffer takeBuffer();
-- void retrievePackets();
--
-- void init() override;
-- void cleanup() override;
-- bool shouldWait() const override;
-- void loop() override;
--
-- AVStream *stream = nullptr;
-- AVCodecContext *codec = nullptr;
-- QFFmpegAudioInput *input;
-- QAudioFormat format;
--
-- SwrContext *resampler = nullptr;
-- qint64 samplesWritten = 0;
--};
--
--
--class VideoEncoder : public EncoderThread
--{
-- mutable QMutex queueMutex;
-- QQueue<QVideoFrame> videoFrameQueue;
--public:
-- VideoEncoder(Encoder *encoder, QPlatformCamera *camera, const QMediaEncoderSettings &settings);
-- ~VideoEncoder();
--
-- void addFrame(const QVideoFrame &frame);
--
-- void setPaused(bool b) override
-- {
-- EncoderThread::setPaused(b);
-- if (b)
-- baseTime.storeRelease(-1);
-- }
--
--private:
-- QVideoFrame takeFrame();
-- void retrievePackets();
--
-- void init() override;
-- void cleanup() override;
-- bool shouldWait() const override;
-- void loop() override;
--
-- QMediaEncoderSettings m_encoderSettings;
-- QPlatformCamera *m_camera = nullptr;
-- VideoFrameEncoder *frameEncoder = nullptr;
--
-- QAtomicInteger<qint64> baseTime = -1;
-- qint64 lastFrameTime = 0;
--};
--
--}
--
--QT_END_NAMESPACE
--
--#endif
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegencoderoptions.cpp b/src/plugins/multimedia/ffmpeg/qffmpegencoderoptions.cpp
-deleted file mode 100644
-index 2535048c3..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegencoderoptions.cpp
-+++ /dev/null
-@@ -1,272 +0,0 @@
--// Copyright (C) 2022 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#include "qffmpegencoderoptions_p.h"
--
--#if QT_CONFIG(vaapi)
--#include <va/va.h>
--#endif
--
--QT_BEGIN_NAMESPACE
--
--// unfortunately there is no common way to specify options for the encoders. The code here tries to map our settings sensibly
--// to options available in different encoders
--
--// For constant quality options, we're trying to map things to approx those bit rates for 1080p@30fps (in Mbps):
--// VeryLow Low Normal High VeryHigh
--// H264: 0.8M 1.5M 3.5M 6M 10M
--// H265: 0.5M 1.0M 2.5M 4M 7M
--
--[[maybe_unused]]
--static int bitrateForSettings(const QMediaEncoderSettings &settings, bool hdr = false)
--{
-- // calculate an acceptable bitrate depending on video codec, resolution, framerate and requested quality
-- // The calculations are rather heuristic here, trying to take into account how well codecs compress using
-- // the tables above.
--
-- // The table here is for 30FPS
-- const double bitsPerPixel[int(QMediaFormat::VideoCodec::LastVideoCodec)+1][QMediaRecorder::VeryHighQuality+1] = {
-- { 1.2, 2.25, 5, 9, 15 }, // MPEG1,
-- { 0.8, 1.5, 3.5, 6, 10 }, // MPEG2
-- { 0.4, 0.75, 1.75, 3, 5 }, // MPEG4
-- { 0.4, 0.75, 1.75, 3, 5 }, // H264
-- { 0.3, 0.5, 0.2, 2, 3 }, // H265
-- { 0.4, 0.75, 1.75, 3, 5 }, // VP8
-- { 0.3, 0.5, 0.2, 2, 3 }, // VP9
-- { 0.2, 0.4, 0.9, 1.5, 2.5 }, // AV1
-- { 0.4, 0.75, 1.75, 3, 5 }, // Theora
-- { 0.8, 1.5, 3.5, 6, 10 }, // WMV
-- { 16, 24, 32, 40, 48 }, // MotionJPEG
-- };
--
-- QSize s = settings.videoResolution();
-- double bitrate = bitsPerPixel[int(settings.videoCodec())][settings.quality()]*s.width()*s.height();
--
-- if (settings.videoCodec() != QMediaFormat::VideoCodec::MotionJPEG) {
-- // We assume that doubling the framerate requires 1.5 times the amount of data (not twice, as intraframe
-- // differences will be smaller). 4 times the frame rate uses thus 2.25 times the data, etc.
-- float rateMultiplier = log2(settings.videoFrameRate()/30.);
-- bitrate *= pow(1.5, rateMultiplier);
-- } else {
-- // MotionJPEG doesn't optimize between frames, so we have a linear dependency on framerate
-- bitrate *= settings.videoFrameRate()/30.;
-- }
--
-- // HDR requires 10bits per pixel instead of 8, so apply a factor of 1.25.
-- if (hdr)
-- bitrate *= 1.25;
-- return bitrate;
--}
--
--static void apply_x264(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **opts)
--{
-- if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
-- codec->bit_rate = settings.videoBitRate();
-- } else {
-- const char *scales[] = {
-- "29", "26", "23", "21", "19"
-- };
-- av_dict_set(opts, "crf", scales[settings.quality()], 0);
-- }
--}
--
--static void apply_x265(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **opts)
--{
-- if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
-- codec->bit_rate = settings.videoBitRate();
-- } else {
-- const char *scales[QMediaRecorder::VeryHighQuality+1] = {
-- "40", "34", "28", "26", "24",
-- };
-- av_dict_set(opts, "crf", scales[settings.quality()], 0);
-- }
--}
--
--static void apply_libvpx(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **opts)
--{
-- if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
-- codec->bit_rate = settings.videoBitRate();
-- } else {
-- const char *scales[QMediaRecorder::VeryHighQuality+1] = {
-- "38", "34", "31", "28", "25",
-- };
-- av_dict_set(opts, "crf", scales[settings.quality()], 0);
-- av_dict_set(opts, "b", 0, 0);
-- }
-- av_dict_set(opts, "row-mt", "1", 0); // better multithreading
--}
--
--#ifdef Q_OS_DARWIN
--static void apply_videotoolbox(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **)
--{
-- if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
-- codec->bit_rate = settings.videoBitRate();
-- } else {
-- // only use quality on macOS/ARM, as FFmpeg doesn't support it on the other platforms and would throw
-- // an error when initializing the codec
--#if defined(Q_OS_MACOS) && defined(Q_PROCESSOR_ARM_64)
-- // Videotoolbox describes quality as a number from 0 to 1, with low == 0.25, normal 0.5, high 0.75 and lossless = 1
-- // ffmpeg uses a different scale going from 0 to 11800.
-- // Values here are adjusted to agree approximately with the target bit rates listed above
-- const int scales[] = {
-- 3000, 4800, 5900, 6900, 7700,
-- };
-- codec->global_quality = scales[settings.quality()];
-- codec->flags |= AV_CODEC_FLAG_QSCALE;
--#else
-- codec->bit_rate = bitrateForSettings(settings);
--#endif
-- }
--}
--#endif
--
--#if QT_CONFIG(vaapi)
--static void apply_vaapi(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **/*opts*/)
--{
-- // See also vaapi_encode_init_rate_control() in libavcodec
-- if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding) {
-- codec->bit_rate = settings.videoBitRate();
-- codec->rc_max_rate = settings.videoBitRate();
-- } else if (settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
-- codec->bit_rate = settings.videoBitRate();
-- } else {
-- const int *quality = nullptr;
-- // unfortunately, all VA codecs use different quality scales :/
-- switch (settings.videoCodec()) {
-- case QMediaFormat::VideoCodec::MPEG2: {
-- static const int q[] = { 20, 15, 10, 8, 6 };
-- quality = q;
-- break;
-- }
-- case QMediaFormat::VideoCodec::MPEG4:
-- case QMediaFormat::VideoCodec::H264: {
-- static const int q[] = { 29, 26, 23, 21, 19 };
-- quality = q;
-- break;
-- }
-- case QMediaFormat::VideoCodec::H265: {
-- static const int q[] = { 40, 34, 28, 26, 24 };
-- quality = q;
-- break;
-- }
-- case QMediaFormat::VideoCodec::VP8: {
-- static const int q[] = { 56, 48, 40, 34, 28 };
-- quality = q;
-- break;
-- }
-- case QMediaFormat::VideoCodec::VP9: {
-- static const int q[] = { 124, 112, 100, 88, 76 };
-- quality = q;
-- break;
-- }
-- case QMediaFormat::VideoCodec::MotionJPEG: {
-- static const int q[] = { 40, 60, 80, 90, 95 };
-- quality = q;
-- break;
-- }
-- case QMediaFormat::VideoCodec::AV1:
-- case QMediaFormat::VideoCodec::Theora:
-- case QMediaFormat::VideoCodec::WMV:
-- default:
-- break;
-- }
--
-- if (quality) {
-- qDebug() << "using quality" << settings.quality() << quality[settings.quality()];
-- codec->global_quality = quality[settings.quality()];
-- }
-- }
--}
--#endif
--
--#ifdef Q_OS_WINDOWS
--static void apply_mf(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **opts)
--{
-- if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
-- codec->bit_rate = settings.videoBitRate();
-- av_dict_set(opts, "rate_control", "cbr", 0);
-- } else {
-- av_dict_set(opts, "rate_control", "quality", 0);
-- const char *scales[] = {
-- "25", "50", "75", "90", "100"
-- };
-- av_dict_set(opts, "quality", scales[settings.quality()], 0);
-- }
--}
--#endif
--
--namespace QFFmpeg {
--
--using ApplyOptions = void (*)(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **opts);
--
--const struct {
-- const char *name;
-- ApplyOptions apply;
--} videoCodecOptionTable[] = {
-- { "libx264", apply_x264 },
-- { "libx265xx", apply_x265 },
-- { "libvpx", apply_libvpx },
-- { "libvpx_vp9", apply_libvpx },
--#ifdef Q_OS_DARWIN
-- { "h264_videotoolbox", apply_videotoolbox },
-- { "hevc_videotoolbox", apply_videotoolbox },
-- { "prores_videotoolbox", apply_videotoolbox },
-- { "vp9_videotoolbox", apply_videotoolbox },
--#endif
--#if QT_CONFIG(vaapi)
-- { "mpeg2_vaapi", apply_vaapi },
-- { "mjpeg_vaapi", apply_vaapi },
-- { "h264_vaapi", apply_vaapi },
-- { "hevc_vaapi", apply_vaapi },
-- { "vp8_vaapi", apply_vaapi },
-- { "vp9_vaapi", apply_vaapi },
--#endif
--#ifdef Q_OS_WINDOWS
-- { "hevc_mf", apply_mf },
-- { "h264_mf", apply_mf },
--#endif
-- { nullptr, nullptr }
--};
--
--const struct {
-- const char *name;
-- ApplyOptions apply;
--} audioCodecOptionTable[] = {
-- { nullptr, nullptr }
--};
--
--void applyVideoEncoderOptions(const QMediaEncoderSettings &settings, const QByteArray &codecName, AVCodecContext *codec, AVDictionary **opts)
--{
-- av_dict_set(opts, "threads", "auto", 0); // we always want automatic threading
--
-- auto *table = videoCodecOptionTable;
-- while (table->name) {
-- if (codecName == table->name) {
-- table->apply(settings, codec, opts);
-- return;
-- }
--
-- ++table;
-- }
--}
--
--void applyAudioEncoderOptions(const QMediaEncoderSettings &settings, const QByteArray &codecName, AVCodecContext *codec, AVDictionary **opts)
--{
-- codec->thread_count = -1; // we always want automatic threading
-- if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding)
-- codec->bit_rate = settings.audioBitRate();
--
-- auto *table = audioCodecOptionTable;
-- while (table->name) {
-- if (codecName == table->name) {
-- table->apply(settings, codec, opts);
-- return;
-- }
--
-- ++table;
-- }
--
--}
--
--}
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegencoderoptions_p.h b/src/plugins/multimedia/ffmpeg/qffmpegencoderoptions_p.h
-deleted file mode 100644
-index 005ad7652..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegencoderoptions_p.h
-+++ /dev/null
-@@ -1,32 +0,0 @@
--// Copyright (C) 2022 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEGENCODEROPTIONS_P_H
--#define QFFMPEGENCODEROPTIONS_P_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include "qffmpeghwaccel_p.h"
--#include "qvideoframeformat.h"
--#include "private/qplatformmediarecorder_p.h"
--
--QT_BEGIN_NAMESPACE
--
--namespace QFFmpeg {
--
--void applyVideoEncoderOptions(const QMediaEncoderSettings &settings, const QByteArray &codecName, AVCodecContext *codec, AVDictionary **opts);
--void applyAudioEncoderOptions(const QMediaEncoderSettings &settings, const QByteArray &codecName, AVCodecContext *codec, AVDictionary **opts);
--
--}
--
--QT_END_NAMESPACE
--
--#endif
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
-deleted file mode 100644
-index e5d90239b..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
-+++ /dev/null
-@@ -1,372 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#include "qffmpeghwaccel_p.h"
--#if QT_CONFIG(vaapi)
--#include "qffmpeghwaccel_vaapi_p.h"
--#endif
--#ifdef Q_OS_DARWIN
--#include "qffmpeghwaccel_videotoolbox_p.h"
--#endif
--#if QT_CONFIG(wmf)
--#include "qffmpeghwaccel_d3d11_p.h"
--#endif
--#ifdef Q_OS_ANDROID
--# include "qffmpeghwaccel_mediacodec_p.h"
--#endif
--#include "qffmpeg_p.h"
--#include "qffmpegvideobuffer_p.h"
--
--#include <private/qrhi_p.h>
--#include <qdebug.h>
--
--/* Infrastructure for HW acceleration goes into this file. */
--
--QT_BEGIN_NAMESPACE
--
--namespace QFFmpeg {
--
--static const AVHWDeviceType preferredHardwareAccelerators[] = {
--#if defined(Q_OS_LINUX)
-- AV_HWDEVICE_TYPE_VAAPI,
--// AV_HWDEVICE_TYPE_DRM,
--#elif defined (Q_OS_WIN)
-- AV_HWDEVICE_TYPE_D3D11VA,
--#elif defined (Q_OS_DARWIN)
-- AV_HWDEVICE_TYPE_VIDEOTOOLBOX,
--#elif defined (Q_OS_ANDROID)
-- AV_HWDEVICE_TYPE_MEDIACODEC,
--#endif
--};
--
--static AVBufferRef *loadHWContext(const AVHWDeviceType type)
--{
-- AVBufferRef *hwContext = nullptr;
-- int ret = av_hwdevice_ctx_create(&hwContext, type, nullptr, nullptr, 0);
-- qDebug() << " Checking HW context:" << av_hwdevice_get_type_name(type);
-- if (ret == 0) {
-- qDebug() << " Using above hw context.";
-- return hwContext;
-- }
-- qDebug() << " Could not create hw context:" << ret << strerror(-ret);
-- return nullptr;
--}
--
--static AVBufferRef *hardwareContextForCodec(const AVCodec *codec)
--{
-- qDebug() << "Checking HW acceleration for decoder" << codec->name;
--
-- // First try our preferred accelerators. Those are the ones where we can
-- // set up a zero copy pipeline
-- for (auto type : preferredHardwareAccelerators) {
-- for (int i = 0;; ++i) {
-- const AVCodecHWConfig *config = avcodec_get_hw_config(codec, i);
-- if (!config)
-- break;
-- if (config->device_type == type) {
-- auto *hwContext = loadHWContext(config->device_type);
-- if (hwContext)
-- return hwContext;
-- break;
-- }
-- }
-- }
--
-- // Ok, let's see if we can get any HW acceleration at all. It'll still involve one buffer copy,
-- // as we can't move the data into RHI textures without a CPU copy
-- for (int i = 0;; ++i) {
-- const AVCodecHWConfig *config = avcodec_get_hw_config(codec, i);
-- if (!config)
-- break;
--
-- auto *hwContext = loadHWContext(config->device_type);
-- if (hwContext)
-- return hwContext;
-- }
-- qDebug() << " No HW accelerators found, using SW decoding.";
-- return nullptr;
--
--}
--
--// Used for the AVCodecContext::get_format callback
--AVPixelFormat getFormat(AVCodecContext *s, const AVPixelFormat *fmt)
--{
-- // First check HW accelerated codecs, the HW device context must be set
-- if (s->hw_device_ctx) {
-- auto *device_ctx = (AVHWDeviceContext*)s->hw_device_ctx->data;
-- for (int i = 0; const AVCodecHWConfig *config = avcodec_get_hw_config(s->codec, i); i++) {
-- if (!(config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX))
-- continue;
-- if (device_ctx->type != config->device_type)
-- continue;
-- for (int n = 0; fmt[n] != AV_PIX_FMT_NONE; n++) {
-- if (config->pix_fmt == fmt[n]) {
--#if QT_CONFIG(wmf)
-- if (fmt[n] == AV_PIX_FMT_D3D11)
-- QFFmpeg::D3D11TextureConverter::SetupDecoderTextures(s);
--#endif
--#ifdef Q_OS_ANDROID
-- if (fmt[n] == AV_PIX_FMT_MEDIACODEC)
-- QFFmpeg::MediaCodecTextureConverter::setupDecoderSurface(s);
--#endif
-- return fmt[n];
-- }
-- }
-- }
-- }
--
-- // prefer video formats we can handle directly
-- for (int n = 0; fmt[n] != AV_PIX_FMT_NONE; n++) {
-- bool needsConversion = true;
-- QFFmpegVideoBuffer::toQtPixelFormat(fmt[n], &needsConversion);
-- if (!needsConversion)
-- return fmt[n];
-- }
--
-- // take the native format, this will involve one additional format conversion on the CPU side
-- return *fmt;
--}
--
--TextureConverter::Data::~Data()
--{
-- delete backend;
--}
--
--HWAccel::~HWAccel()
--{
-- if (m_hwDeviceContext)
-- av_buffer_unref(&m_hwDeviceContext);
-- if (m_hwFramesContext)
-- av_buffer_unref(&m_hwFramesContext);
--}
--
--std::unique_ptr<HWAccel> HWAccel::create(const AVCodec *codec)
--{
-- if (codec->type == AVMEDIA_TYPE_VIDEO) {
-- if (auto *ctx = hardwareContextForCodec(codec))
-- return std::unique_ptr<HWAccel>(new HWAccel(ctx));
-- }
-- return {};
--}
--
--std::unique_ptr<HWAccel> HWAccel::create(AVHWDeviceType deviceType)
--{
-- if (auto *ctx = loadHWContext(deviceType))
-- return std::unique_ptr<HWAccel>(new HWAccel(ctx));
-- else
-- return {};
--}
--
--AVPixelFormat HWAccel::format(AVFrame *frame)
--{
-- if (!frame->hw_frames_ctx)
-- return AVPixelFormat(frame->format);
--
-- auto *hwFramesContext = (AVHWFramesContext *)frame->hw_frames_ctx->data;
-- Q_ASSERT(hwFramesContext);
-- return AVPixelFormat(hwFramesContext->sw_format);
--}
--
--std::pair<const AVHWDeviceType*, qsizetype> HWAccel::preferredDeviceTypes()
--{
-- return { preferredHardwareAccelerators,
-- sizeof(preferredHardwareAccelerators) / sizeof(AVHWDeviceType) };
--}
--
--AVHWDeviceContext *HWAccel::hwDeviceContext() const
--{
-- return m_hwDeviceContext ? (AVHWDeviceContext *)m_hwDeviceContext->data : nullptr;
--}
--
--AVPixelFormat HWAccel::hwFormat() const
--{
-- switch (deviceType()) {
-- case AV_HWDEVICE_TYPE_VIDEOTOOLBOX:
-- return AV_PIX_FMT_VIDEOTOOLBOX;
-- case AV_HWDEVICE_TYPE_VAAPI:
-- return AV_PIX_FMT_VAAPI;
-- case AV_HWDEVICE_TYPE_MEDIACODEC:
-- return AV_PIX_FMT_MEDIACODEC;
-- default:
-- return AV_PIX_FMT_NONE;
-- }
--}
--
--const AVCodec *HWAccel::hardwareDecoderForCodecId(AVCodecID id)
--{
-- const AVCodec *codec = nullptr;
--#ifdef Q_OS_ANDROID
-- const auto getDecoder = [](AVCodecID id) {
-- switch (id) {
-- case AV_CODEC_ID_H264:
-- return avcodec_find_decoder_by_name("h264_mediacodec");
-- case AV_CODEC_ID_HEVC:
-- return avcodec_find_decoder_by_name("hevc_mediacodec");
-- case AV_CODEC_ID_MPEG2VIDEO:
-- return avcodec_find_decoder_by_name("mpeg2_mediacodec");
-- case AV_CODEC_ID_MPEG4:
-- return avcodec_find_decoder_by_name("mpeg4_mediacodec");
-- case AV_CODEC_ID_VP8:
-- return avcodec_find_decoder_by_name("vp8_mediacodec");
-- case AV_CODEC_ID_VP9:
-- return avcodec_find_decoder_by_name("vp9_mediacodec");
-- default:
-- return avcodec_find_decoder(id);
-- }
-- };
-- codec = getDecoder(id);
--#endif
--
-- if (!codec)
-- codec = avcodec_find_decoder(id);
--
-- return codec;
--}
--
--const AVCodec *HWAccel::hardwareEncoderForCodecId(AVCodecID id) const
--{
-- const char *codec = nullptr;
-- switch (deviceType()) {
--#ifdef Q_OS_DARWIN
-- case AV_HWDEVICE_TYPE_VIDEOTOOLBOX:
-- switch (id) {
-- case AV_CODEC_ID_H264:
-- codec = "h264_videotoolbox";
-- break;
-- case AV_CODEC_ID_HEVC:
-- codec = "hevc_videotoolbox";
-- break;
-- case AV_CODEC_ID_PRORES:
-- codec = "prores_videotoolbox";
-- break;
-- case AV_CODEC_ID_VP9:
-- codec = "vp9_videotoolbox";
-- break;
-- default:
-- break;
-- }
-- break;
--#endif
-- case AV_HWDEVICE_TYPE_VAAPI:
-- switch (id) {
-- case AV_CODEC_ID_H264:
-- codec = "h264_vaapi";
-- break;
-- case AV_CODEC_ID_HEVC:
-- codec = "hevc_vaapi";
-- break;
-- case AV_CODEC_ID_MJPEG:
-- codec = "mjpeg_vaapi";
-- break;
-- case AV_CODEC_ID_MPEG2VIDEO:
-- codec = "mpeg2_vaapi";
-- break;
-- case AV_CODEC_ID_VP8:
-- codec = "vp8_vaapi";
-- break;
-- case AV_CODEC_ID_VP9:
-- codec = "vp9_vaapi";
-- break;
-- default:
-- break;
-- }
-- break;
-- default:
-- break;
-- }
-- if (!codec)
-- return nullptr;
-- const AVCodec *c = avcodec_find_encoder_by_name(codec);
-- qDebug() << "searching for HW codec" << codec << "got" << c;
-- return c;
--}
--
--std::unique_ptr<HWAccel> HWAccel::findHardwareAccelForCodecID(AVCodecID id)
--{
-- for (auto type : preferredHardwareAccelerators) {
-- auto accel = HWAccel::create(type);
-- if (accel && accel->hardwareEncoderForCodecId(id))
-- return accel;
-- }
-- return {};
--}
--
--AVHWDeviceType HWAccel::deviceType() const
--{
-- return m_hwDeviceContext ? hwDeviceContext()->type : AV_HWDEVICE_TYPE_NONE;
--}
--
--void HWAccel::createFramesContext(AVPixelFormat swFormat, const QSize &size)
--{
-- if (m_hwDeviceContext)
-- return;
-- m_hwFramesContext = av_hwframe_ctx_alloc(m_hwDeviceContext);
-- auto *c = (AVHWFramesContext *)m_hwFramesContext->data;
-- c->format = hwFormat();
-- c->sw_format = swFormat;
-- c->width = size.width();
-- c->height = size.height();
-- qDebug() << "init frames context";
-- int err = av_hwframe_ctx_init(m_hwFramesContext);
-- if (err < 0)
-- qWarning() << "failed to init HW frame context" << err << err2str(err);
-- else
-- qDebug() << "Initialized frames context" << size << c->format << c->sw_format;
--}
--
--AVHWFramesContext *HWAccel::hwFramesContext() const
--{
-- return m_hwFramesContext ? (AVHWFramesContext *)m_hwFramesContext->data : nullptr;
--}
--
--
--TextureConverter::TextureConverter(QRhi *rhi)
-- : d(new Data)
--{
-- d->rhi = rhi;
--}
--
--TextureSet *TextureConverter::getTextures(AVFrame *frame)
--{
-- if (!frame || isNull())
-- return nullptr;
--
-- Q_ASSERT(frame->format == d->format);
-- return d->backend->getTextures(frame);
--}
--
--void TextureConverter::updateBackend(AVPixelFormat fmt)
--{
-- d->backend = nullptr;
-- if (!d->rhi)
-- return;
-- switch (fmt) {
--#if QT_CONFIG(vaapi)
-- case AV_PIX_FMT_VAAPI:
-- d->backend = new VAAPITextureConverter(d->rhi);
-- break;
--#endif
--#ifdef Q_OS_DARWIN
-- case AV_PIX_FMT_VIDEOTOOLBOX:
-- d->backend = new VideoToolBoxTextureConverter(d->rhi);
-- break;
--#endif
--#if QT_CONFIG(wmf)
-- case AV_PIX_FMT_D3D11:
-- d->backend = new D3D11TextureConverter(d->rhi);
-- break;
--#endif
--#ifdef Q_OS_ANDROID
-- case AV_PIX_FMT_MEDIACODEC:
-- d->backend = new MediaCodecTextureConverter(d->rhi);
-- break;
--#endif
-- default:
-- break;
-- }
-- d->format = fmt;
--}
--
--} // namespace QFFmpeg
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp
-deleted file mode 100644
-index f0a6c7b91..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp
-+++ /dev/null
-@@ -1,158 +0,0 @@
--// Copyright (C) 2022 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#include "qffmpeghwaccel_d3d11_p.h"
--
--#include <qvideoframeformat.h>
--#include "qffmpegvideobuffer_p.h"
--
--
--#include <private/qvideotexturehelper_p.h>
--#include <private/qwindowsiupointer_p.h>
--#include <private/qrhi_p.h>
--#include <private/qrhid3d11_p.h>
--
--#include <qopenglfunctions.h>
--#include <qdebug.h>
--#include <qloggingcategory.h>
--
--#include <libavutil/hwcontext_d3d11va.h>
--
--QT_BEGIN_NAMESPACE
--
--Q_LOGGING_CATEGORY(qLcMediaFFmpegHWAccel, "qt.multimedia.hwaccel")
--
--namespace QFFmpeg {
--
--class D3D11TextureSet : public TextureSet
--{
--public:
-- D3D11TextureSet(QWindowsIUPointer<ID3D11Texture2D> &&tex)
-- : m_tex(tex)
-- {}
--
-- qint64 textureHandle(int plane) override
-- {
-- return qint64(m_tex.get());
-- }
--
--private:
-- QWindowsIUPointer<ID3D11Texture2D> m_tex;
--};
--
--
--D3D11TextureConverter::D3D11TextureConverter(QRhi *rhi)
-- : TextureConverterBackend(rhi)
--{
--}
--
--static QWindowsIUPointer<ID3D11Texture2D> getSharedTextureForDevice(ID3D11Device *dev, ID3D11Texture2D *tex)
--{
-- QWindowsIUPointer<IDXGIResource> dxgiResource;
-- HRESULT hr = tex->QueryInterface(__uuidof(IDXGIResource), reinterpret_cast<void **>(dxgiResource.address()));
-- if (FAILED(hr)) {
-- qCDebug(qLcMediaFFmpegHWAccel) << "Failed to obtain resource handle from FFMpeg texture" << hr;
-- return {};
-- }
-- HANDLE shared = nullptr;
-- hr = dxgiResource->GetSharedHandle(&shared);
-- if (FAILED(hr)) {
-- qCDebug(qLcMediaFFmpegHWAccel) << "Failed to obtain shared handle for FFmpeg texture" << hr;
-- return {};
-- }
--
-- QWindowsIUPointer<ID3D11Texture2D> sharedTex;
-- hr = dev->OpenSharedResource(shared, __uuidof(ID3D11Texture2D), reinterpret_cast<void **>(sharedTex.address()));
-- if (FAILED(hr))
-- qCDebug(qLcMediaFFmpegHWAccel) << "Failed to share FFmpeg texture" << hr;
-- return sharedTex;
--}
--
--static QWindowsIUPointer<ID3D11Texture2D> copyTextureFromArray(ID3D11Device *dev, ID3D11Texture2D *array, int index)
--{
-- D3D11_TEXTURE2D_DESC arrayDesc = {};
-- array->GetDesc(&arrayDesc);
--
-- D3D11_TEXTURE2D_DESC texDesc = {};
-- texDesc.Width = arrayDesc.Width;
-- texDesc.Height = arrayDesc.Height;
-- texDesc.Format = arrayDesc.Format;
-- texDesc.ArraySize = 1;
-- texDesc.MipLevels = 1;
-- texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
-- texDesc.MiscFlags = 0;
-- texDesc.SampleDesc = { 1, 0};
--
-- QWindowsIUPointer<ID3D11Texture2D> texCopy;
-- HRESULT hr = dev->CreateTexture2D(&texDesc, nullptr, texCopy.address());
-- if (FAILED(hr)) {
-- qCDebug(qLcMediaFFmpegHWAccel) << "Failed to create texture" << hr;
-- return {};
-- }
--
-- QWindowsIUPointer<ID3D11DeviceContext> ctx;
-- dev->GetImmediateContext(ctx.address());
-- ctx->CopySubresourceRegion(texCopy.get(), 0, 0, 0, 0, array, index, nullptr);
--
-- return texCopy;
--}
--
--TextureSet *D3D11TextureConverter::getTextures(AVFrame *frame)
--{
-- if (!frame || !frame->hw_frames_ctx || frame->format != AV_PIX_FMT_D3D11)
-- return nullptr;
--
-- auto *fCtx = (AVHWFramesContext *)frame->hw_frames_ctx->data;
-- auto *ctx = fCtx->device_ctx;
-- if (!ctx || ctx->type != AV_HWDEVICE_TYPE_D3D11VA)
-- return nullptr;
--
-- auto nh = static_cast<const QRhiD3D11NativeHandles *>(rhi->nativeHandles());
-- if (!nh)
-- return nullptr;
--
-- auto ffmpegTex = (ID3D11Texture2D *)frame->data[0];
-- int index = (intptr_t)frame->data[1];
--
-- if (rhi->backend() == QRhi::D3D11) {
-- auto dev = reinterpret_cast<ID3D11Device *>(nh->dev);
-- if (!dev)
-- return nullptr;
-- auto sharedTex = getSharedTextureForDevice(dev, ffmpegTex);
-- if (sharedTex) {
-- auto tex = copyTextureFromArray(dev, sharedTex.get(), index);
-- if (tex) {
-- QVideoFrameFormat::PixelFormat format = QFFmpegVideoBuffer::toQtPixelFormat(AVPixelFormat(fCtx->sw_format));
-- return new D3D11TextureSet(std::move(tex));
-- }
-- }
-- }
--
-- return nullptr;
--}
--
--void D3D11TextureConverter::SetupDecoderTextures(AVCodecContext *s)
--{
-- int ret = avcodec_get_hw_frames_parameters(s,
-- s->hw_device_ctx,
-- AV_PIX_FMT_D3D11,
-- &s->hw_frames_ctx);
-- if (ret < 0) {
-- qCDebug(qLcMediaFFmpegHWAccel) << "Failed to allocate HW frames context" << ret;
-- return;
-- }
--
-- auto *frames_ctx = (AVHWFramesContext *)s->hw_frames_ctx->data;
-- auto *hwctx = (AVD3D11VAFramesContext *)frames_ctx->hwctx;
-- hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
-- hwctx->BindFlags = D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE;
-- ret = av_hwframe_ctx_init(s->hw_frames_ctx);
-- if (ret < 0) {
-- qCDebug(qLcMediaFFmpegHWAccel) << "Failed to initialize HW frames context" << ret;
-- av_buffer_unref(&s->hw_frames_ctx);
-- }
--}
--
--}
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h
-deleted file mode 100644
-index 2e9c77f5b..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h
-+++ /dev/null
-@@ -1,43 +0,0 @@
--// Copyright (C) 2022 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEGHWACCEL_D3D11_P_H
--#define QFFMPEGHWACCEL_D3D11_P_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include "qffmpeghwaccel_p.h"
--
--#if QT_CONFIG(wmf)
--
--QT_BEGIN_NAMESPACE
--
--class QRhi;
--
--namespace QFFmpeg {
--
--class D3D11TextureConverter : public TextureConverterBackend
--{
--public:
-- D3D11TextureConverter(QRhi *rhi);
--
-- TextureSet *getTextures(AVFrame *frame) override;
--
-- static void SetupDecoderTextures(AVCodecContext *s);
--};
--
--}
--
--QT_END_NAMESPACE
--
--#endif
--
--#endif
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp
-deleted file mode 100644
-index 20a06c3ab..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp
-+++ /dev/null
-@@ -1,70 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#include "qffmpeghwaccel_mediacodec_p.h"
--
--#include <androidsurfacetexture_p.h>
--#include <QtGui/private/qrhi_p.h>
--
--extern "C" {
--#include <libavcodec/mediacodec.h>
--}
--
--#if !defined(Q_OS_ANDROID)
--# error "Configuration error"
--#endif
--
--namespace QFFmpeg {
--
--Q_GLOBAL_STATIC(AndroidSurfaceTexture, androidSurfaceTexture, 0);
--
--class MediaCodecTextureSet : public TextureSet
--{
--public:
-- MediaCodecTextureSet(qint64 textureHandle) : handle(textureHandle) { }
--
-- qint64 textureHandle(int plane) override { return (plane == 0) ? handle : 0; }
--
--private:
-- qint64 handle;
--};
--
--void MediaCodecTextureConverter::setupDecoderSurface(AVCodecContext *avCodecContext)
--{
-- AVMediaCodecContext *mediacodecContext = av_mediacodec_alloc_context();
-- av_mediacodec_default_init(avCodecContext, mediacodecContext, androidSurfaceTexture->surface());
--}
--
--TextureSet *MediaCodecTextureConverter::getTextures(AVFrame *frame)
--{
-- if (!androidSurfaceTexture->isValid())
-- return {};
--
-- if (!externalTexture) {
-- androidSurfaceTexture->detachFromGLContext();
-- externalTexture = std::unique_ptr<QRhiTexture>(
-- rhi->newTexture(QRhiTexture::Format::RGBA8, { frame->width, frame->height }, 1,
-- QRhiTexture::ExternalOES));
--
-- if (!externalTexture->create()) {
-- qWarning() << "Failed to create the external texture!";
-- return {};
-- }
--
-- quint64 textureHandle = externalTexture->nativeTexture().object;
-- androidSurfaceTexture->attachToGLContext(textureHandle);
-- }
--
-- // release a MediaCodec buffer and render it to the surface
-- AVMediaCodecBuffer *buffer = (AVMediaCodecBuffer *)frame->data[3];
-- int result = av_mediacodec_release_buffer(buffer, 1);
-- if (result < 0) {
-- qWarning() << "Failed to render buffer to surface.";
-- return {};
-- }
--
-- androidSurfaceTexture->updateTexImage();
--
-- return new MediaCodecTextureSet(externalTexture->nativeTexture().object);
--}
--}
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec_p.h
-deleted file mode 100644
-index 95982ba4d..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec_p.h
-+++ /dev/null
-@@ -1,35 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#ifndef QFFMPEGHWACCEL_MEDIACODEC_P_H
--#define QFFMPEGHWACCEL_MEDIACODEC_P_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include "qffmpeghwaccel_p.h"
--#include <memory>
--
--namespace QFFmpeg {
--struct Frame;
--
--class MediaCodecTextureConverter : public TextureConverterBackend
--{
--public:
-- MediaCodecTextureConverter(QRhi *rhi) : TextureConverterBackend(rhi){};
-- TextureSet *getTextures(AVFrame *frame) override;
--
-- static void setupDecoderSurface(AVCodecContext *s);
--private:
-- std::unique_ptr<QRhiTexture> externalTexture;
--};
--}
--#endif // QFFMPEGHWACCEL_MEDIACODEC_P_H
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h
-deleted file mode 100644
-index 81bb163bb..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h
-+++ /dev/null
-@@ -1,121 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEGHWACCEL_P_H
--#define QFFMPEGHWACCEL_P_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include "qffmpeg_p.h"
--#include "qvideoframeformat.h"
--#include <private/qabstractvideobuffer_p.h>
--#include <qshareddata.h>
--#include <memory>
--
--QT_BEGIN_NAMESPACE
--
--class QRhi;
--class QRhiTexture;
--class QFFmpegVideoBuffer;
--
--namespace QFFmpeg {
--
--// used for the get_format callback for the decoder
--enum AVPixelFormat getFormat(struct AVCodecContext *s, const enum AVPixelFormat * fmt);
--
--class HWAccel;
--
--class TextureSet {
--public:
-- // ### Should add QVideoFrameFormat::PixelFormat here
-- virtual ~TextureSet() {}
-- virtual qint64 textureHandle(int /*plane*/) { return 0; }
--};
--
--class TextureConverterBackend
--{
--public:
-- TextureConverterBackend(QRhi *rhi)
-- : rhi(rhi)
-- {}
-- virtual ~TextureConverterBackend() {}
-- virtual TextureSet *getTextures(AVFrame * /*frame*/) { return nullptr; }
--
-- QRhi *rhi = nullptr;
--};
--
--class TextureConverter
--{
-- class Data final
-- {
-- public:
-- ~Data();
-- QAtomicInt ref = 0;
-- QRhi *rhi = nullptr;
-- AVPixelFormat format = AV_PIX_FMT_NONE;
-- TextureConverterBackend *backend = nullptr;
-- };
--public:
-- TextureConverter(QRhi *rhi = nullptr);
--
-- void init(AVFrame *frame) {
-- AVPixelFormat fmt = frame ? AVPixelFormat(frame->format) : AV_PIX_FMT_NONE;
-- if (fmt != d->format)
-- updateBackend(fmt);
-- }
-- TextureSet *getTextures(AVFrame *frame);
-- bool isNull() const { return !d->backend || !d->backend->rhi; }
--
--private:
-- void updateBackend(AVPixelFormat format);
--
-- QExplicitlySharedDataPointer<Data> d;
--};
--
--class HWAccel
--{
-- AVBufferRef *m_hwDeviceContext = nullptr;
-- AVBufferRef *m_hwFramesContext = nullptr;
--
--public:
-- ~HWAccel();
--
-- static std::unique_ptr<HWAccel> create(const AVCodec *decoder);
-- static std::unique_ptr<HWAccel> create(AVHWDeviceType deviceType);
-- static std::unique_ptr<HWAccel> findHardwareAccelForCodecID(AVCodecID id);
--
-- static const AVCodec *hardwareDecoderForCodecId(AVCodecID id);
-- const AVCodec *hardwareEncoderForCodecId(AVCodecID id) const;
--
-- AVHWDeviceType deviceType() const;
--
-- AVBufferRef *hwDeviceContextAsBuffer() const { return m_hwDeviceContext; }
-- AVHWDeviceContext *hwDeviceContext() const;
-- AVPixelFormat hwFormat() const;
--
-- void createFramesContext(AVPixelFormat swFormat, const QSize &size);
-- AVBufferRef *hwFramesContextAsBuffer() const { return m_hwFramesContext; }
-- AVHWFramesContext *hwFramesContext() const;
--
-- static AVPixelFormat format(AVFrame *frame);
-- static std::pair<const AVHWDeviceType*, qsizetype> preferredDeviceTypes();
--
--private:
-- HWAccel(AVBufferRef *hwDeviceContext, AVBufferRef *hwFrameContext = nullptr)
-- : m_hwDeviceContext(hwDeviceContext), m_hwFramesContext(hwFrameContext)
-- {}
--};
--
--}
--
--QT_END_NAMESPACE
--
--#endif
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp
-deleted file mode 100644
-index 7b9976fe0..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp
-+++ /dev/null
-@@ -1,346 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#include "qffmpeghwaccel_vaapi_p.h"
--
--#if !QT_CONFIG(vaapi)
--#error "Configuration error"
--#endif
--
--#include <va/va.h>
--
--#include <qvideoframeformat.h>
--#include "qffmpegvideobuffer_p.h"
--#include "private/qvideotexturehelper_p.h"
--
--#include <private/qrhi_p.h>
--#include <private/qrhigles2_p.h>
--
--#include <qguiapplication.h>
--#include <qpa/qplatformnativeinterface.h>
--
--#include <qopenglfunctions.h>
--
--//#define VA_EXPORT_USE_LAYERS
--
--#if __has_include("drm/drm_fourcc.h")
--#include <drm/drm_fourcc.h>
--#elif __has_include("libdrm/drm_fourcc.h")
--#include <libdrm/drm_fourcc.h>
--#else
--// keep things building without drm_fourcc.h
--#define fourcc_code(a, b, c, d) ((uint32_t)(a) | ((uint32_t)(b) << 8) | \
-- ((uint32_t)(c) << 16) | ((uint32_t)(d) << 24))
--
--#define DRM_FORMAT_RGBA8888 fourcc_code('R', 'A', '2', '4') /* [31:0] R:G:B:A 8:8:8:8 little endian */
--#define DRM_FORMAT_RGB888 fourcc_code('R', 'G', '2', '4') /* [23:0] R:G:B little endian */
--#define DRM_FORMAT_RG88 fourcc_code('R', 'G', '8', '8') /* [15:0] R:G 8:8 little endian */
--#define DRM_FORMAT_ABGR8888 fourcc_code('A', 'B', '2', '4') /* [31:0] A:B:G:R 8:8:8:8 little endian */
--#define DRM_FORMAT_BGR888 fourcc_code('B', 'G', '2', '4') /* [23:0] B:G:R little endian */
--#define DRM_FORMAT_GR88 fourcc_code('G', 'R', '8', '8') /* [15:0] G:R 8:8 little endian */
--#define DRM_FORMAT_R8 fourcc_code('R', '8', ' ', ' ') /* [7:0] R */
--#define DRM_FORMAT_R16 fourcc_code('R', '1', '6', ' ') /* [15:0] R little endian */
--#define DRM_FORMAT_RGB565 fourcc_code('R', 'G', '1', '6') /* [15:0] R:G:B 5:6:5 little endian */
--#define DRM_FORMAT_RG1616 fourcc_code('R', 'G', '3', '2') /* [31:0] R:G 16:16 little endian */
--#define DRM_FORMAT_GR1616 fourcc_code('G', 'R', '3', '2') /* [31:0] G:R 16:16 little endian */
--#define DRM_FORMAT_BGRA1010102 fourcc_code('B', 'A', '3', '0') /* [31:0] B:G:R:A 10:10:10:2 little endian */
--#endif
--
--extern "C" {
--#include <libavutil/hwcontext_vaapi.h>
--}
--
--#include <va/va_drm.h>
--#include <va/va_drmcommon.h>
--
--#include <EGL/egl.h>
--#include <EGL/eglext.h>
--
--#include <unistd.h>
--
--#include <qdebug.h>
--
--namespace QFFmpeg {
--
--static const quint32 *fourccFromPixelFormat(const QVideoFrameFormat::PixelFormat format)
--{
--#if G_BYTE_ORDER == G_LITTLE_ENDIAN
-- const quint32 rgba_fourcc = DRM_FORMAT_ABGR8888;
-- const quint32 rg_fourcc = DRM_FORMAT_GR88;
-- const quint32 rg16_fourcc = DRM_FORMAT_GR1616;
--#else
-- const quint32 rgba_fourcc = DRM_FORMAT_RGBA8888;
-- const quint32 rg_fourcc = DRM_FORMAT_RG88;
-- const quint32 rg16_fourcc = DRM_FORMAT_RG1616;
--#endif
--
--// qDebug() << "Getting DRM fourcc for pixel format" << format;
--
-- switch (format) {
-- case QVideoFrameFormat::Format_Invalid:
-- case QVideoFrameFormat::Format_IMC1:
-- case QVideoFrameFormat::Format_IMC2:
-- case QVideoFrameFormat::Format_IMC3:
-- case QVideoFrameFormat::Format_IMC4:
-- case QVideoFrameFormat::Format_SamplerExternalOES:
-- case QVideoFrameFormat::Format_Jpeg:
-- case QVideoFrameFormat::Format_SamplerRect:
-- return nullptr;
--
-- case QVideoFrameFormat::Format_ARGB8888:
-- case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
-- case QVideoFrameFormat::Format_XRGB8888:
-- case QVideoFrameFormat::Format_BGRA8888:
-- case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
-- case QVideoFrameFormat::Format_BGRX8888:
-- case QVideoFrameFormat::Format_ABGR8888:
-- case QVideoFrameFormat::Format_XBGR8888:
-- case QVideoFrameFormat::Format_RGBA8888:
-- case QVideoFrameFormat::Format_RGBX8888:
-- case QVideoFrameFormat::Format_AYUV:
-- case QVideoFrameFormat::Format_AYUV_Premultiplied:
-- case QVideoFrameFormat::Format_UYVY:
-- case QVideoFrameFormat::Format_YUYV:
-- {
-- static constexpr quint32 format[] = { rgba_fourcc, 0, 0, 0 };
-- return format;
-- }
--
-- case QVideoFrameFormat::Format_Y8:
-- {
-- static constexpr quint32 format[] = { DRM_FORMAT_R8, 0, 0, 0 };
-- return format;
-- }
-- case QVideoFrameFormat::Format_Y16:
-- {
-- static constexpr quint32 format[] = { DRM_FORMAT_R16, 0, 0, 0 };
-- return format;
-- }
--
-- case QVideoFrameFormat::Format_YUV420P:
-- case QVideoFrameFormat::Format_YUV422P:
-- case QVideoFrameFormat::Format_YV12:
-- {
-- static constexpr quint32 format[] = { DRM_FORMAT_R8, DRM_FORMAT_R8, DRM_FORMAT_R8, 0 };
-- return format;
-- }
-- case QVideoFrameFormat::Format_YUV420P10:
-- {
-- static constexpr quint32 format[] = { DRM_FORMAT_R16, DRM_FORMAT_R16, DRM_FORMAT_R16, 0 };
-- return format;
-- }
--
-- case QVideoFrameFormat::Format_NV12:
-- case QVideoFrameFormat::Format_NV21:
-- {
-- static constexpr quint32 format[] = { DRM_FORMAT_R8, rg_fourcc, 0, 0 };
-- return format;
-- }
--
-- case QVideoFrameFormat::Format_P010:
-- case QVideoFrameFormat::Format_P016:
-- {
-- static constexpr quint32 format[] = { DRM_FORMAT_R16, rg16_fourcc, 0, 0 };
-- return format;
-- }
-- }
-- return nullptr;
--}
--
--class VAAPITextureSet : public TextureSet
--{
--public:
-- ~VAAPITextureSet();
-- qint64 textureHandle(int plane) override {
-- return textures[plane];
-- }
--
-- QRhi *rhi = nullptr;
-- QOpenGLContext *glContext = nullptr;
-- int nPlanes = 0;
-- GLuint textures[4] = {};
--};
--
--
--VAAPITextureConverter::VAAPITextureConverter(QRhi *rhi)
-- : TextureConverterBackend(nullptr)
--{
-- qDebug() << ">>>> Creating VAAPI HW accelerator";
--
-- if (!rhi || rhi->backend() != QRhi::OpenGLES2) {
-- qWarning() << "VAAPITextureConverter: No rhi or non openGL based RHI";
-- this->rhi = nullptr;
-- return;
-- }
--
-- auto *nativeHandles = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles());
-- glContext = nativeHandles->context;
-- if (!glContext) {
-- qDebug() << " no GL context, disabling";
-- return;
-- }
-- const QString platform = QGuiApplication::platformName();
-- QPlatformNativeInterface *pni = QGuiApplication::platformNativeInterface();
-- eglDisplay = pni->nativeResourceForIntegration("egldisplay");
-- qDebug() << " platform is" << platform << eglDisplay;
--
-- if (!eglDisplay) {
-- qDebug() << " no egl display, disabling";
-- return;
-- }
-- eglImageTargetTexture2D = eglGetProcAddress("glEGLImageTargetTexture2DOES");
-- if (!eglDisplay) {
-- qDebug() << " no eglImageTargetTexture2D, disabling";
-- return;
-- }
--
-- // everything ok, indicate that we can do zero copy
-- this->rhi = rhi;
--}
--
--VAAPITextureConverter::~VAAPITextureConverter()
--{
--}
--
--//#define VA_EXPORT_USE_LAYERS
--TextureSet *VAAPITextureConverter::getTextures(AVFrame *frame)
--{
--// qDebug() << "VAAPIAccel::getTextures";
-- if (frame->format != AV_PIX_FMT_VAAPI || !eglDisplay) {
-- qDebug() << "format/egl error" << frame->format << eglDisplay;
-- return nullptr;
-- }
--
-- if (!frame->hw_frames_ctx)
-- return nullptr;
--
-- auto *fCtx = (AVHWFramesContext *)frame->hw_frames_ctx->data;
-- auto *ctx = fCtx->device_ctx;
-- if (!ctx)
-- return nullptr;
--
-- auto *vaCtx = (AVVAAPIDeviceContext *)ctx->hwctx;
-- auto vaDisplay = vaCtx->display;
-- if (!vaDisplay) {
-- qDebug() << " no VADisplay, disabling";
-- return nullptr;
-- }
--
-- VASurfaceID vaSurface = (uintptr_t)frame->data[3];
--
-- VADRMPRIMESurfaceDescriptor prime;
-- if (vaExportSurfaceHandle(vaDisplay, vaSurface,
-- VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
-- VA_EXPORT_SURFACE_READ_ONLY |
--#ifdef VA_EXPORT_USE_LAYERS
-- VA_EXPORT_SURFACE_SEPARATE_LAYERS,
--#else
-- VA_EXPORT_SURFACE_COMPOSED_LAYERS,
--#endif
-- &prime) != VA_STATUS_SUCCESS)
-- {
-- qWarning() << "vaExportSurfaceHandle failed";
-- return nullptr;
-- }
-- // ### Check that prime.fourcc is what we expect
-- vaSyncSurface(vaDisplay, vaSurface);
--
--// qDebug() << "VAAPIAccel: vaSufraceDesc: width/height" << prime.width << prime.height << "num objects"
--// << prime.num_objects << "num layers" << prime.num_layers;
--
-- QOpenGLFunctions functions(glContext);
--
-- AVPixelFormat fmt = HWAccel::format(frame);
-- bool needsConversion;
-- auto qtFormat = QFFmpegVideoBuffer::toQtPixelFormat(fmt, &needsConversion);
-- auto *drm_formats = fourccFromPixelFormat(qtFormat);
-- if (!drm_formats || needsConversion) {
-- qWarning() << "can't use DMA transfer for pixel format" << fmt << qtFormat;
-- return nullptr;
-- }
--
-- auto *desc = QVideoTextureHelper::textureDescription(qtFormat);
-- int nPlanes = 0;
-- for (; nPlanes < 5; ++nPlanes) {
-- if (drm_formats[nPlanes] == 0)
-- break;
-- }
-- Q_ASSERT(nPlanes == desc->nplanes);
-- nPlanes = desc->nplanes;
--// qDebug() << "VAAPIAccel: nPlanes" << nPlanes;
--
-- rhi->makeThreadLocalNativeContextCurrent();
--
-- EGLImage images[4];
-- GLuint glTextures[4] = {};
-- functions.glGenTextures(nPlanes, glTextures);
-- for (int i = 0; i < nPlanes; ++i) {
--#ifdef VA_EXPORT_USE_LAYERS
--#define LAYER i
--#define PLANE 0
-- if (prime.layers[i].drm_format != drm_formats[i]) {
-- qWarning() << "expected DRM format check failed expected"
-- << Qt::hex << drm_formats[i] << "got" << prime.layers[i].drm_format;
-- }
--#else
--#define LAYER 0
--#define PLANE i
--#endif
--
-- EGLAttrib img_attr[] = {
-- EGL_LINUX_DRM_FOURCC_EXT, (EGLint)drm_formats[i],
-- EGL_WIDTH, desc->widthForPlane(frame->width, i),
-- EGL_HEIGHT, desc->heightForPlane(frame->height, i),
-- EGL_DMA_BUF_PLANE0_FD_EXT, prime.objects[prime.layers[LAYER].object_index[PLANE]].fd,
-- EGL_DMA_BUF_PLANE0_OFFSET_EXT, (EGLint)prime.layers[LAYER].offset[PLANE],
-- EGL_DMA_BUF_PLANE0_PITCH_EXT, (EGLint)prime.layers[LAYER].pitch[PLANE],
-- EGL_NONE
-- };
-- images[i] = eglCreateImage(eglDisplay, EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, nullptr, img_attr);
-- if (!images[i]) {
-- qWarning() << "eglCreateImage failed for plane" << i << Qt::hex << eglGetError();
-- return nullptr;
-- }
-- functions.glActiveTexture(GL_TEXTURE0 + i);
-- functions.glBindTexture(GL_TEXTURE_2D, glTextures[i]);
--
-- PFNGLEGLIMAGETARGETTEXTURE2DOESPROC eglImageTargetTexture2D = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)this->eglImageTargetTexture2D;
-- eglImageTargetTexture2D(GL_TEXTURE_2D, images[i]);
-- if (glGetError()) {
-- qWarning() << "eglImageTargetTexture2D failed";
-- }
-- }
--
-- for (int i = 0; i < (int)prime.num_objects; ++i)
-- close(prime.objects[i].fd);
--
-- for (int i = 0; i < nPlanes; ++i) {
-- functions.glActiveTexture(GL_TEXTURE0 + i);
-- functions.glBindTexture(GL_TEXTURE_2D, 0);
-- eglDestroyImage(eglDisplay, images[i]);
-- }
--
-- VAAPITextureSet *textureSet = new VAAPITextureSet;
-- textureSet->nPlanes = nPlanes;
-- textureSet->rhi = rhi;
-- textureSet->glContext = glContext;
--
-- for (int i = 0; i < 4; ++i)
-- textureSet->textures[i] = glTextures[i];
--// qDebug() << "VAAPIAccel: got textures" << textures[0] << textures[1] << textures[2] << textures[3];
--
-- return textureSet;
--}
--
--VAAPITextureSet::~VAAPITextureSet()
--{
-- if (rhi) {
-- rhi->makeThreadLocalNativeContextCurrent();
-- QOpenGLFunctions functions(glContext);
-- functions.glDeleteTextures(nPlanes, textures);
-- }
--}
--
--}
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi_p.h
-deleted file mode 100644
-index 03084cc72..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi_p.h
-+++ /dev/null
-@@ -1,48 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEGHWACCEL_VAAPI_P_H
--#define QFFMPEGHWACCEL_VAAPI_P_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include "qffmpeghwaccel_p.h"
--
--#if QT_CONFIG(vaapi)
--
--#include <qshareddata.h>
--
--QT_BEGIN_NAMESPACE
--
--class QRhi;
--class QOpenGLContext;
--
--namespace QFFmpeg {
--
--class VAAPITextureConverter : public TextureConverterBackend
--{
--public:
-- VAAPITextureConverter(QRhi *rhi);
-- ~VAAPITextureConverter();
--
-- TextureSet *getTextures(AVFrame *frame) override;
--
-- Qt::HANDLE eglDisplay = nullptr;
-- QOpenGLContext *glContext = nullptr;
-- QFunctionPointer eglImageTargetTexture2D = nullptr;
--};
--}
--
--QT_END_NAMESPACE
--
--#endif
--
--#endif
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox.mm b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox.mm
-deleted file mode 100644
-index db64f2003..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox.mm
-+++ /dev/null
-@@ -1,281 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#include "qffmpeghwaccel_videotoolbox_p.h"
--
--#if !defined(Q_OS_DARWIN)
--#error "Configuration error"
--#endif
--
--#include <qvideoframeformat.h>
--#include <qffmpegvideobuffer_p.h>
--#include "private/qvideotexturehelper_p.h"
--
--#include <private/qrhi_p.h>
--#include <private/qrhimetal_p.h>
--#include <private/qrhigles2_p.h>
--
--#include <CoreVideo/CVMetalTexture.h>
--#include <CoreVideo/CVMetalTextureCache.h>
--
--#include <qopenglcontext.h>
--#ifdef Q_OS_MACOS
--#import <AppKit/AppKit.h>
--#endif
--#import <Metal/Metal.h>
--
--QT_BEGIN_NAMESPACE
--
--namespace QFFmpeg
--{
--
--static CVMetalTextureCacheRef &mtc(void *&cache) { return reinterpret_cast<CVMetalTextureCacheRef &>(cache); }
--
--class VideoToolBoxTextureSet : public TextureSet
--{
--public:
-- ~VideoToolBoxTextureSet();
-- qint64 textureHandle(int plane) override;
--
-- QRhi *rhi = nullptr;
-- CVMetalTextureRef cvMetalTexture[3] = {};
--
--#if defined(Q_OS_MACOS)
-- CVOpenGLTextureRef cvOpenGLTexture = nullptr;
--#elif defined(Q_OS_IOS)
-- CVOpenGLESTextureRef cvOpenGLESTexture = nullptr;
--#endif
--
-- CVImageBufferRef m_buffer = nullptr;
--};
--
--VideoToolBoxTextureConverter::VideoToolBoxTextureConverter(QRhi *rhi)
-- : TextureConverterBackend(rhi)
--{
-- if (!rhi)
-- return;
--
-- if (rhi->backend() == QRhi::Metal) {
-- qDebug() << " using metal backend";
-- const auto *metal = static_cast<const QRhiMetalNativeHandles *>(rhi->nativeHandles());
--
-- // Create a Metal Core Video texture cache from the pixel buffer.
-- Q_ASSERT(!cvMetalTextureCache);
-- if (CVMetalTextureCacheCreate(
-- kCFAllocatorDefault,
-- nil,
-- (id<MTLDevice>)metal->dev,
-- nil,
-- &mtc(cvMetalTextureCache)) != kCVReturnSuccess) {
-- qWarning() << "Metal texture cache creation failed";
-- rhi = nullptr;
-- }
-- } else if (rhi->backend() == QRhi::OpenGLES2) {
--#if QT_CONFIG(opengl)
--#ifdef Q_OS_MACOS
-- const auto *gl = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles());
--
-- auto nsGLContext = gl->context->nativeInterface<QNativeInterface::QCocoaGLContext>()->nativeContext();
-- auto nsGLPixelFormat = nsGLContext.pixelFormat.CGLPixelFormatObj;
--
-- // Create an OpenGL CoreVideo texture cache from the pixel buffer.
-- if (CVOpenGLTextureCacheCreate(
-- kCFAllocatorDefault,
-- nullptr,
-- reinterpret_cast<CGLContextObj>(nsGLContext.CGLContextObj),
-- nsGLPixelFormat,
-- nil,
-- &cvOpenGLTextureCache)) {
-- qWarning() << "OpenGL texture cache creation failed";
-- rhi = nullptr;
-- }
--#endif
--#ifdef Q_OS_IOS
-- // Create an OpenGL CoreVideo texture cache from the pixel buffer.
-- if (CVOpenGLESTextureCacheCreate(
-- kCFAllocatorDefault,
-- nullptr,
-- [EAGLContext currentContext],
-- nullptr,
-- &cvOpenGLESTextureCache)) {
-- qWarning() << "OpenGL texture cache creation failed";
-- rhi = nullptr;
-- }
--#endif
--#else
-- rhi = nullptr;
--#endif // QT_CONFIG(opengl)
-- }
--}
--
--VideoToolBoxTextureConverter::~VideoToolBoxTextureConverter()
--{
-- freeTextureCaches();
--}
--
--void VideoToolBoxTextureConverter::freeTextureCaches()
--{
-- if (cvMetalTextureCache)
-- CFRelease(cvMetalTextureCache);
-- cvMetalTextureCache = nullptr;
--#if defined(Q_OS_MACOS)
-- if (cvOpenGLTextureCache)
-- CFRelease(cvOpenGLTextureCache);
-- cvOpenGLTextureCache = nullptr;
--#elif defined(Q_OS_IOS)
-- if (cvOpenGLESTextureCache)
-- CFRelease(cvOpenGLESTextureCache);
-- cvOpenGLESTextureCache = nullptr;
--#endif
--}
--
--static MTLPixelFormat rhiTextureFormatToMetalFormat(QRhiTexture::Format f)
--{
-- switch (f) {
-- default:
-- case QRhiTexture::UnknownFormat:
-- return MTLPixelFormatInvalid;
-- case QRhiTexture::RGBA8:
-- return MTLPixelFormatRGBA8Unorm;
-- case QRhiTexture::BGRA8:
-- return MTLPixelFormatBGRA8Unorm;
-- case QRhiTexture::R8:
-- return MTLPixelFormatR8Unorm;
-- case QRhiTexture::RG8:
-- return MTLPixelFormatRG8Unorm;
-- case QRhiTexture::R16:
-- return MTLPixelFormatR16Unorm;
-- case QRhiTexture::RG16:
-- return MTLPixelFormatRG16Unorm;
--
-- case QRhiTexture::RGBA16F:
-- return MTLPixelFormatRGBA16Float;
-- case QRhiTexture::RGBA32F:
-- return MTLPixelFormatRGBA32Float;
-- case QRhiTexture::R16F:
-- return MTLPixelFormatR16Float;
-- case QRhiTexture::R32F:
-- return MTLPixelFormatR32Float;
-- }
--}
--
--TextureSet *VideoToolBoxTextureConverter::getTextures(AVFrame *frame)
--{
-- if (!rhi)
-- return nullptr;
--
-- bool needsConversion = false;
-- QVideoFrameFormat::PixelFormat pixelFormat = QFFmpegVideoBuffer::toQtPixelFormat(HWAccel::format(frame), &needsConversion);
-- if (needsConversion) {
-- qDebug() << "XXXXXXXXXXXX pixel format needs conversion" << pixelFormat << HWAccel::format(frame);
-- return nullptr;
-- }
--
-- CVPixelBufferRef buffer = (CVPixelBufferRef)frame->data[3];
--
-- VideoToolBoxTextureSet *textureSet = new VideoToolBoxTextureSet;
-- textureSet->m_buffer = buffer;
-- textureSet->rhi = rhi;
-- CVPixelBufferRetain(buffer);
--
-- auto *textureDescription = QVideoTextureHelper::textureDescription(pixelFormat);
-- int bufferPlanes = CVPixelBufferGetPlaneCount(buffer);
--// qDebug() << "XXXXX getTextures" << pixelFormat << bufferPlanes << buffer;
--
-- if (rhi->backend() == QRhi::Metal) {
-- for (int plane = 0; plane < bufferPlanes; ++plane) {
-- size_t width = CVPixelBufferGetWidth(buffer);
-- size_t height = CVPixelBufferGetHeight(buffer);
-- width = textureDescription->widthForPlane(width, plane);
-- height = textureDescription->heightForPlane(height, plane);
--
-- // Create a CoreVideo pixel buffer backed Metal texture image from the texture cache.
-- auto ret = CVMetalTextureCacheCreateTextureFromImage(
-- kCFAllocatorDefault,
-- mtc(cvMetalTextureCache),
-- buffer, nil,
-- rhiTextureFormatToMetalFormat(textureDescription->textureFormat[plane]),
-- width, height,
-- plane,
-- &textureSet->cvMetalTexture[plane]);
--
-- if (ret != kCVReturnSuccess)
-- qWarning() << "texture creation failed" << ret;
--// auto t = CVMetalTextureGetTexture(textureSet->cvMetalTexture[plane]);
--// qDebug() << " metal texture for plane" << plane << "is" << quint64(textureSet->cvMetalTexture[plane]) << width << height;
--// qDebug() << " " << t.iosurfacePlane << t.pixelFormat << t.width << t.height;
-- }
-- } else if (rhi->backend() == QRhi::OpenGLES2) {
--#if QT_CONFIG(opengl)
--#ifdef Q_OS_MACOS
-- CVOpenGLTextureCacheFlush(cvOpenGLTextureCache, 0);
-- // Create a CVPixelBuffer-backed OpenGL texture image from the texture cache.
-- const CVReturn cvret = CVOpenGLTextureCacheCreateTextureFromImage(
-- kCFAllocatorDefault,
-- cvOpenGLTextureCache,
-- buffer,
-- nil,
-- &textureSet->cvOpenGLTexture);
-- if (cvret != kCVReturnSuccess)
-- qWarning() << "OpenGL texture creation failed" << cvret;
--
-- Q_ASSERT(CVOpenGLTextureGetTarget(textureSet->cvOpenGLTexture) == GL_TEXTURE_RECTANGLE);
--#endif
--#ifdef Q_OS_IOS
-- CVOpenGLESTextureCacheFlush(cvOpenGLESTextureCache, 0);
-- // Create a CVPixelBuffer-backed OpenGL texture image from the texture cache.
-- const CVReturn cvret = CVOpenGLESTextureCacheCreateTextureFromImage(
-- kCFAllocatorDefault,
-- cvOpenGLESTextureCache,
-- buffer,
-- nil,
-- GL_TEXTURE_2D,
-- GL_RGBA,
-- CVPixelBufferGetWidth(buffer),
-- CVPixelBufferGetHeight(buffer),
-- GL_RGBA,
-- GL_UNSIGNED_BYTE,
-- 0,
-- &textureSet->cvOpenGLESTexture);
-- if (cvret != kCVReturnSuccess)
-- qWarning() << "OpenGL ES texture creation failed" << cvret;
--#endif
--#endif
-- }
--
-- return textureSet;
--}
--
--VideoToolBoxTextureSet::~VideoToolBoxTextureSet()
--{
-- for (int i = 0; i < 4; ++i)
-- if (cvMetalTexture[i])
-- CFRelease(cvMetalTexture[i]);
--#if defined(Q_OS_MACOS)
-- if (cvOpenGLTexture)
-- CVOpenGLTextureRelease(cvOpenGLTexture);
--#elif defined(Q_OS_IOS)
-- if (cvOpenGLESTexture)
-- CFRelease(cvOpenGLESTexture);
--#endif
-- CVPixelBufferRelease(m_buffer);
--}
--
--qint64 VideoToolBoxTextureSet::textureHandle(int plane)
--{
-- if (rhi->backend() == QRhi::Metal)
-- return cvMetalTexture[plane] ? qint64(CVMetalTextureGetTexture(cvMetalTexture[plane])) : 0;
--#if QT_CONFIG(opengl)
-- Q_ASSERT(plane == 0);
--#ifdef Q_OS_MACOS
-- return CVOpenGLTextureGetName(cvOpenGLTexture);
--#endif
--#ifdef Q_OS_IOS
-- return CVOpenGLESTextureGetName(cvOpenGLESTexture);
--#endif
--#endif
--}
--
--}
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox_p.h
-deleted file mode 100644
-index 44fa32dd2..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox_p.h
-+++ /dev/null
-@@ -1,63 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEGHWACCEL_VIDEOTOOLBOX_P_H
--#define QFFMPEGHWACCEL_VIDEOTOOLBOX_P_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include "qffmpeghwaccel_p.h"
--
--#ifdef Q_OS_DARWIN
--
--#include <CoreVideo/CVBase.h>
--#include <CoreVideo/CVPixelBuffer.h>
--#include <CoreVideo/CVImageBuffer.h>
--
--#include <CoreVideo/CVMetalTexture.h>
--#if defined(Q_OS_MACOS)
--#include <CoreVideo/CVOpenGLTextureCache.h>
--#elif defined(Q_OS_IOS)
--#include <CoreVideo/CVOpenGLESTextureCache.h>
--#endif
--
--QT_BEGIN_NAMESPACE
--
--class QRhi;
--
--namespace QFFmpeg {
--
--class VideoToolBoxTextureConverter : public TextureConverterBackend
--{
--public:
-- VideoToolBoxTextureConverter(QRhi *rhi);
-- ~VideoToolBoxTextureConverter();
-- TextureSet *getTextures(AVFrame *frame) override;
--
--private:
-- void freeTextureCaches();
--
-- // can not forward declare that type from C++ :/
-- void *cvMetalTextureCache = nullptr;
--#if defined(Q_OS_MACOS)
-- CVOpenGLTextureCacheRef cvOpenGLTextureCache = nullptr;
--#elif defined(Q_OS_IOS)
-- CVOpenGLESTextureCacheRef cvOpenGLESTextureCache = nullptr;
--#endif
--};
--
--}
--
--QT_END_NAMESPACE
--
--#endif
--
--#endif
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp
-index 3d5fbc039..b6865761c 100644
---- a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp
-+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp
-@@ -6,7 +6,6 @@
- #include "private/qplatformaudioinput_p.h"
- #include "private/qplatformaudiooutput_p.h"
- #include "qffmpegimagecapture_p.h"
--#include "qffmpegmediarecorder_p.h"
- #include "private/qplatformcamera_p.h"
- #include "qvideosink.h"
-
-@@ -73,22 +72,12 @@ void QFFmpegMediaCaptureSession::setImageCapture(QPlatformImageCapture *imageCap
-
- void QFFmpegMediaCaptureSession::setMediaRecorder(QPlatformMediaRecorder *recorder)
- {
-- auto *r = static_cast<QFFmpegMediaRecorder *>(recorder);
-- if (m_mediaRecorder == r)
-- return;
--
-- if (m_mediaRecorder)
-- m_mediaRecorder->setCaptureSession(nullptr);
-- m_mediaRecorder = r;
-- if (m_mediaRecorder)
-- m_mediaRecorder->setCaptureSession(this);
--
-- emit encoderChanged();
-+ return;
- }
-
- QPlatformMediaRecorder *QFFmpegMediaCaptureSession::mediaRecorder()
- {
-- return m_mediaRecorder;
-+ return nullptr;
- }
-
- void QFFmpegMediaCaptureSession::setAudioInput(QPlatformAudioInput *input)
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h
-index 9e9c77551..858a537cc 100644
---- a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h
-+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h
-@@ -54,7 +54,6 @@ private:
- QPlatformCamera *m_camera = nullptr;
- QPlatformAudioInput *m_audioInput = nullptr;
- QFFmpegImageCapture *m_imageCapture = nullptr;
-- QFFmpegMediaRecorder *m_mediaRecorder = nullptr;
- QPlatformAudioOutput *m_audioOutput = nullptr;
- QVideoSink *m_videoSink = nullptr;
- };
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp
-index 2561d564d..00b838d50 100644
---- a/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp
-+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp
-@@ -2,236 +2,13 @@
- // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
- #include "qffmpegmediaformatinfo_p.h"
--#include "qffmpeg_p.h"
- #include "qaudioformat.h"
- #include "qimagewriter.h"
-
- QT_BEGIN_NAMESPACE
-
--static struct {
-- AVCodecID id;
-- QMediaFormat::VideoCodec codec;
--} videoCodecMap [] = {
-- { AV_CODEC_ID_MPEG1VIDEO, QMediaFormat::VideoCodec::MPEG1 },
-- { AV_CODEC_ID_MPEG2VIDEO, QMediaFormat::VideoCodec::MPEG2 },
-- { AV_CODEC_ID_MPEG4, QMediaFormat::VideoCodec::MPEG4 },
-- { AV_CODEC_ID_H264, QMediaFormat::VideoCodec::H264 },
-- { AV_CODEC_ID_HEVC, QMediaFormat::VideoCodec::H265 },
-- { AV_CODEC_ID_VP8, QMediaFormat::VideoCodec::VP8 },
-- { AV_CODEC_ID_VP9, QMediaFormat::VideoCodec::VP9 },
-- { AV_CODEC_ID_AV1, QMediaFormat::VideoCodec::AV1 },
-- { AV_CODEC_ID_THEORA, QMediaFormat::VideoCodec::Theora },
-- { AV_CODEC_ID_WMV3, QMediaFormat::VideoCodec::WMV },
-- { AV_CODEC_ID_MJPEG, QMediaFormat::VideoCodec::MotionJPEG }
--};
--
--static AVCodecID codecId(QMediaFormat::VideoCodec codec)
--{
-- for (const auto &c : videoCodecMap) {
-- if (c.codec == codec)
-- return c.id;
-- }
-- return AV_CODEC_ID_NONE;
--}
--
--static struct {
-- AVCodecID id;
-- QMediaFormat::AudioCodec codec;
--} audioCodecMap [] = {
-- { AV_CODEC_ID_MP3, QMediaFormat::AudioCodec::MP3 },
-- { AV_CODEC_ID_AAC, QMediaFormat::AudioCodec::AAC },
-- { AV_CODEC_ID_AC3, QMediaFormat::AudioCodec::AC3 },
-- { AV_CODEC_ID_EAC3, QMediaFormat::AudioCodec::EAC3 },
-- { AV_CODEC_ID_FLAC, QMediaFormat::AudioCodec::FLAC },
-- { AV_CODEC_ID_TRUEHD, QMediaFormat::AudioCodec::DolbyTrueHD },
-- { AV_CODEC_ID_OPUS, QMediaFormat::AudioCodec::Opus },
-- { AV_CODEC_ID_VORBIS, QMediaFormat::AudioCodec::Vorbis },
-- { AV_CODEC_ID_PCM_S16LE, QMediaFormat::AudioCodec::Wave },
-- { AV_CODEC_ID_WMAPRO, QMediaFormat::AudioCodec::WMA },
-- { AV_CODEC_ID_ALAC, QMediaFormat::AudioCodec::ALAC }
--};
--
--static AVCodecID codecId(QMediaFormat::AudioCodec codec)
--{
-- for (const auto &c : audioCodecMap) {
-- if (c.codec == codec)
-- return c.id;
-- }
-- return AV_CODEC_ID_NONE;
--}
--
--// mimetypes are mostly copied from qmediaformat.cpp. Unfortunately, FFmpeg uses
--// in some cases slightly different mimetypes
--static const struct
--{
-- QMediaFormat::FileFormat fileFormat;
-- const char *mimeType;
-- const char *name; // disambiguate if we have several muxers/demuxers
--} map[QMediaFormat::LastFileFormat + 1] = {
-- { QMediaFormat::WMV, "video/x-ms-asf", "asf" },
-- { QMediaFormat::AVI, "video/x-msvideo", nullptr },
-- { QMediaFormat::Matroska, "video/x-matroska", nullptr },
-- { QMediaFormat::MPEG4, "video/mp4", "mp4" },
-- { QMediaFormat::Ogg, "video/ogg", nullptr },
-- // QuickTime is the same as MP4
-- { QMediaFormat::WebM, "video/webm", "webm" },
-- // Audio Formats
-- // Mpeg4Audio is the same as MP4 without the video codecs
-- { QMediaFormat::AAC, "audio/aac", nullptr },
-- // WMA is the same as WMV
-- { QMediaFormat::FLAC, "audio/x-flac", nullptr },
-- { QMediaFormat::MP3, "audio/mpeg", "mp3" },
-- { QMediaFormat::Wave, "audio/x-wav", nullptr },
-- { QMediaFormat::UnspecifiedFormat, nullptr, nullptr }
--};
--
--template <typename AVFormat>
--static QMediaFormat::FileFormat formatForAVFormat(AVFormat *format)
--{
--
-- if (!format->mime_type || !*format->mime_type)
-- return QMediaFormat::UnspecifiedFormat;
--
-- auto *m = map;
-- while (m->fileFormat != QMediaFormat::UnspecifiedFormat) {
-- if (m->mimeType && !strcmp(m->mimeType, format->mime_type)) {
-- // check if the name matches. This is used to disambiguate where FFmpeg provides
-- // multiple muxers or demuxers
-- if (!m->name || !strcmp(m->name, format->name))
-- return m->fileFormat;
-- }
-- ++m;
-- }
--
-- return QMediaFormat::UnspecifiedFormat;
--}
--
--static const AVOutputFormat *avFormatForFormat(QMediaFormat::FileFormat format)
--{
-- if (format == QMediaFormat::QuickTime || format == QMediaFormat::Mpeg4Audio)
-- format = QMediaFormat::MPEG4;
-- if (format == QMediaFormat::WMA)
-- format = QMediaFormat::WMV;
--
-- auto *m = map;
-- while (m->fileFormat != QMediaFormat::UnspecifiedFormat) {
-- if (m->fileFormat == format)
-- return av_guess_format(m->name, nullptr, m->mimeType);
-- ++m;
-- }
--
-- return nullptr;
--}
--
--
- QFFmpegMediaFormatInfo::QFFmpegMediaFormatInfo()
- {
-- qDebug() << ">>>> listing codecs";
--
-- QList<QMediaFormat::AudioCodec> audioEncoders;
-- QList<QMediaFormat::AudioCodec> extraAudioDecoders;
-- QList<QMediaFormat::VideoCodec> videoEncoders;
-- QList<QMediaFormat::VideoCodec> extraVideoDecoders;
--
-- const AVCodecDescriptor *descriptor = nullptr;
-- while ((descriptor = avcodec_descriptor_next(descriptor))) {
-- bool canEncode = (avcodec_find_encoder(descriptor->id) != nullptr);
-- bool canDecode = (avcodec_find_decoder(descriptor->id) != nullptr);
-- auto videoCodec = videoCodecForAVCodecId(descriptor->id);
-- auto audioCodec = audioCodecForAVCodecId(descriptor->id);
-- if (descriptor->type == AVMEDIA_TYPE_VIDEO && videoCodec != QMediaFormat::VideoCodec::Unspecified) {
-- if (canEncode) {
-- if (!videoEncoders.contains(videoCodec))
-- videoEncoders.append(videoCodec);
-- } else if (canDecode) {
-- if (!extraVideoDecoders.contains(videoCodec))
-- extraVideoDecoders.append(videoCodec);
-- }
-- }
--
-- else if (descriptor->type == AVMEDIA_TYPE_AUDIO && audioCodec != QMediaFormat::AudioCodec::Unspecified) {
-- if (canEncode) {
-- if (!audioEncoders.contains(audioCodec))
-- audioEncoders.append(audioCodec);
-- } else if (canDecode) {
-- if (!extraAudioDecoders.contains(audioCodec))
-- extraAudioDecoders.append(audioCodec);
-- }
-- }
-- }
--
-- // get demuxers
--// qDebug() << ">>>> Muxers";
-- void *opaque = nullptr;
-- const AVOutputFormat *outputFormat = nullptr;
-- while ((outputFormat = av_muxer_iterate(&opaque))) {
-- auto mediaFormat = formatForAVFormat(outputFormat);
-- if (mediaFormat == QMediaFormat::UnspecifiedFormat)
-- continue;
--// qDebug() << " mux:" << outputFormat->name << outputFormat->long_name << outputFormat->mime_type << outputFormat->extensions << mediaFormat;
--
-- CodecMap encoder;
-- encoder.format = mediaFormat;
--
-- for (auto codec : audioEncoders) {
-- auto id = codecId(codec);
-- // only add the codec if it can be used with this container
-- if (avformat_query_codec(outputFormat, id, FF_COMPLIANCE_NORMAL) == 1) {
-- // add codec for container
--// qDebug() << " " << codec << Qt::hex << av_codec_get_tag(outputFormat->codec_tag, id);
-- encoder.audio.append(codec);
-- }
-- }
-- for (auto codec : videoEncoders) {
-- auto id = codecId(codec);
-- // only add the codec if it can be used with this container
-- if (avformat_query_codec(outputFormat, id, FF_COMPLIANCE_NORMAL) == 1) {
-- // add codec for container
--// qDebug() << " " << codec << Qt::hex << av_codec_get_tag(outputFormat->codec_tag, id);
-- encoder.video.append(codec);
-- }
-- }
--
-- // sanity checks and handling special cases
-- if (encoder.audio.isEmpty() && encoder.video.isEmpty())
-- continue;
-- switch (encoder.format) {
-- case QMediaFormat::WMV:
-- // add WMA
-- encoders.append({ QMediaFormat::WMA, encoder.audio, {} });
-- break;
-- case QMediaFormat::MPEG4:
-- // add Mpeg4Audio and QuickTime
-- encoders.append({ QMediaFormat::QuickTime, encoder.audio, encoder.video });
-- encoders.append({ QMediaFormat::Mpeg4Audio, encoder.audio, {} });
-- break;
-- case QMediaFormat::Wave:
-- // FFmpeg allows other encoded formats in WAV containers, but we do not want that
-- if (!encoder.audio.contains(QMediaFormat::AudioCodec::Wave))
-- continue;
-- encoder.audio = { QMediaFormat::AudioCodec::Wave };
-- break;
-- default:
-- break;
-- }
-- encoders.append(encoder);
-- }
--
-- // FFmpeg doesn't allow querying supported codecs for decoders
-- // we take a simple approximation stating that we can decode what we
-- // can encode. That's a safe subset.
-- decoders = encoders;
--
--// qDebug() << "extraDecoders:" << extraAudioDecoders << extraVideoDecoders;
-- // FFmpeg can currently only decode WMA and WMV, not encode
-- if (extraAudioDecoders.contains(QMediaFormat::AudioCodec::WMA)) {
-- decoders[QMediaFormat::WMA].audio.append(QMediaFormat::AudioCodec::WMA);
-- decoders[QMediaFormat::WMV].audio.append(QMediaFormat::AudioCodec::WMA);
-- }
-- if (extraVideoDecoders.contains(QMediaFormat::VideoCodec::WMV)) {
-- decoders[QMediaFormat::WMV].video.append(QMediaFormat::VideoCodec::WMV);
-- }
--
- // Add image formats we support. We currently simply use Qt's built-in image write
- // to save images. That doesn't give us HDR support or support for larger bit depths,
- // but most cameras can currently not generate those anyway.
-@@ -251,256 +28,5 @@ QFFmpegMediaFormatInfo::QFFmpegMediaFormatInfo()
-
- QFFmpegMediaFormatInfo::~QFFmpegMediaFormatInfo() = default;
-
--QMediaFormat::AudioCodec QFFmpegMediaFormatInfo::audioCodecForAVCodecId(AVCodecID id)
--{
-- for (const auto &c : audioCodecMap) {
-- if (c.id == id)
-- return c.codec;
-- }
-- return QMediaFormat::AudioCodec::Unspecified;
--}
--
--QMediaFormat::VideoCodec QFFmpegMediaFormatInfo::videoCodecForAVCodecId(AVCodecID id)
--{
-- for (const auto &c : videoCodecMap) {
-- if (c.id == id)
-- return c.codec;
-- }
-- return QMediaFormat::VideoCodec::Unspecified;
--}
--
--QMediaFormat::FileFormat
--QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(const AVInputFormat *format)
--{
-- // Seems like FFmpeg uses different names for muxers and demuxers of the same format.
-- // that makes it somewhat cumbersome to detect things correctly.
-- // The input formats have a comma separated list of short names. We check the first one of those
-- // as the docs specify that you only append to the list
-- static const struct
-- {
-- QMediaFormat::FileFormat fileFormat;
-- const char *name;
-- } map[QMediaFormat::LastFileFormat + 1] = {
-- { QMediaFormat::WMV, "asf" },
-- { QMediaFormat::AVI, "avi" },
-- { QMediaFormat::Matroska, "matroska" },
-- { QMediaFormat::MPEG4, "mov" },
-- { QMediaFormat::Ogg, "ogg" },
-- { QMediaFormat::WebM, "webm" },
-- // Audio Formats
-- // Mpeg4Audio is the same as MP4 without the video codecs
-- { QMediaFormat::AAC, "aac"},
-- // WMA is the same as WMV
-- { QMediaFormat::FLAC, "flac" },
-- { QMediaFormat::MP3, "mp3" },
-- { QMediaFormat::Wave, "wav" },
-- { QMediaFormat::UnspecifiedFormat, nullptr }
-- };
--
-- if (!format->name)
-- return QMediaFormat::UnspecifiedFormat;
--
-- auto *m = map;
-- while (m->fileFormat != QMediaFormat::UnspecifiedFormat) {
-- if (!strncmp(m->name, format->name, strlen(m->name)))
-- return m->fileFormat;
-- ++m;
-- }
--
-- return QMediaFormat::UnspecifiedFormat;
--}
--
--const AVOutputFormat *
--QFFmpegMediaFormatInfo::outputFormatForFileFormat(QMediaFormat::FileFormat format)
--{
-- return avFormatForFormat(format);
--}
--
--AVCodecID QFFmpegMediaFormatInfo::codecIdForVideoCodec(QMediaFormat::VideoCodec codec)
--{
-- return codecId(codec);
--}
--
--AVCodecID QFFmpegMediaFormatInfo::codecIdForAudioCodec(QMediaFormat::AudioCodec codec)
--{
-- return codecId(codec);
--}
--
--QAudioFormat::SampleFormat QFFmpegMediaFormatInfo::sampleFormat(AVSampleFormat format)
--{
-- switch (format) {
-- case AV_SAMPLE_FMT_NONE:
-- default:
-- return QAudioFormat::Unknown;
-- case AV_SAMPLE_FMT_U8: ///< unsigned 8 bits
-- case AV_SAMPLE_FMT_U8P: ///< unsigned 8 bits: planar
-- return QAudioFormat::UInt8;
-- case AV_SAMPLE_FMT_S16: ///< signed 16 bits
-- case AV_SAMPLE_FMT_S16P: ///< signed 16 bits: planar
-- return QAudioFormat::Int16;
-- case AV_SAMPLE_FMT_S32: ///< signed 32 bits
-- case AV_SAMPLE_FMT_S32P: ///< signed 32 bits: planar
-- return QAudioFormat::Int32;
-- case AV_SAMPLE_FMT_FLT: ///< float
-- case AV_SAMPLE_FMT_FLTP: ///< float: planar
-- return QAudioFormat::Float;
-- case AV_SAMPLE_FMT_DBL: ///< double
-- case AV_SAMPLE_FMT_DBLP: ///< double: planar
-- case AV_SAMPLE_FMT_S64: ///< signed 64 bits
-- case AV_SAMPLE_FMT_S64P: ///< signed 64 bits, planar
-- // let's use float
-- return QAudioFormat::Float;
-- }
--}
--
--AVSampleFormat QFFmpegMediaFormatInfo::avSampleFormat(QAudioFormat::SampleFormat format)
--{
-- switch (format) {
-- case QAudioFormat::UInt8:
-- return AV_SAMPLE_FMT_U8;
-- case QAudioFormat::Int16:
-- return AV_SAMPLE_FMT_S16;
-- case QAudioFormat::Int32:
-- return AV_SAMPLE_FMT_S32;
-- case QAudioFormat::Float:
-- return AV_SAMPLE_FMT_FLT;
-- default:
-- return AV_SAMPLE_FMT_NONE;
-- }
--}
--
--int64_t QFFmpegMediaFormatInfo::avChannelLayout(QAudioFormat::ChannelConfig channelConfig)
--{
-- int64_t avChannelLayout = 0;
-- if (channelConfig & (1 << QAudioFormat::FrontLeft))
-- avChannelLayout |= AV_CH_FRONT_LEFT;
-- if (channelConfig & (1 << QAudioFormat::FrontRight))
-- avChannelLayout |= AV_CH_FRONT_RIGHT;
-- if (channelConfig & (1 << QAudioFormat::FrontCenter))
-- avChannelLayout |= AV_CH_FRONT_CENTER;
-- if (channelConfig & (1 << QAudioFormat::LFE))
-- avChannelLayout |= AV_CH_LOW_FREQUENCY;
-- if (channelConfig & (1 << QAudioFormat::BackLeft))
-- avChannelLayout |= AV_CH_BACK_LEFT;
-- if (channelConfig & (1 << QAudioFormat::BackRight))
-- avChannelLayout |= AV_CH_BACK_RIGHT;
-- if (channelConfig & (1 << QAudioFormat::FrontLeftOfCenter))
-- avChannelLayout |= AV_CH_FRONT_LEFT_OF_CENTER;
-- if (channelConfig & (1 << QAudioFormat::FrontRightOfCenter))
-- avChannelLayout |= AV_CH_FRONT_RIGHT_OF_CENTER;
-- if (channelConfig & (1 << QAudioFormat::BackCenter))
-- avChannelLayout |= AV_CH_BACK_CENTER;
-- if (channelConfig & (1 << QAudioFormat::LFE2))
-- avChannelLayout |= AV_CH_LOW_FREQUENCY_2;
-- if (channelConfig & (1 << QAudioFormat::SideLeft))
-- avChannelLayout |= AV_CH_SIDE_LEFT;
-- if (channelConfig & (1 << QAudioFormat::SideRight))
-- avChannelLayout |= AV_CH_SIDE_RIGHT;
-- if (channelConfig & (1 << QAudioFormat::TopFrontLeft))
-- avChannelLayout |= AV_CH_TOP_FRONT_LEFT;
-- if (channelConfig & (1 << QAudioFormat::TopFrontRight))
-- avChannelLayout |= AV_CH_TOP_FRONT_RIGHT;
-- if (channelConfig & (1 << QAudioFormat::TopFrontCenter))
-- avChannelLayout |= AV_CH_TOP_FRONT_CENTER;
-- if (channelConfig & (1 << QAudioFormat::TopCenter))
-- avChannelLayout |= AV_CH_TOP_CENTER;
-- if (channelConfig & (1 << QAudioFormat::TopBackLeft))
-- avChannelLayout |= AV_CH_TOP_BACK_LEFT;
-- if (channelConfig & (1 << QAudioFormat::TopBackRight))
-- avChannelLayout |= AV_CH_TOP_BACK_RIGHT;
-- if (channelConfig & (1 << QAudioFormat::TopBackCenter))
-- avChannelLayout |= AV_CH_TOP_BACK_CENTER;
-- // The defines used below got added together for FFmpeg 4.4
--#ifdef AV_CH_TOP_SIDE_LEFT
-- if (channelConfig & (1 << QAudioFormat::TopSideLeft))
-- avChannelLayout |= AV_CH_TOP_SIDE_LEFT;
-- if (channelConfig & (1 << QAudioFormat::TopSideRight))
-- avChannelLayout |= AV_CH_TOP_SIDE_RIGHT;
-- if (channelConfig & (1 << QAudioFormat::BottomFrontCenter))
-- avChannelLayout |= AV_CH_BOTTOM_FRONT_CENTER;
-- if (channelConfig & (1 << QAudioFormat::BottomFrontLeft))
-- avChannelLayout |= AV_CH_BOTTOM_FRONT_LEFT;
-- if (channelConfig & (1 << QAudioFormat::BottomFrontRight))
-- avChannelLayout |= AV_CH_BOTTOM_FRONT_RIGHT;
--#endif
-- return avChannelLayout;
--}
--
--QAudioFormat::ChannelConfig QFFmpegMediaFormatInfo::channelConfigForAVLayout(int64_t avChannelLayout)
--{
-- quint32 channelConfig = 0;
-- if (avChannelLayout & AV_CH_FRONT_LEFT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::FrontLeft);
-- if (avChannelLayout & AV_CH_FRONT_RIGHT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::FrontRight);
-- if (avChannelLayout & AV_CH_FRONT_CENTER)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::FrontCenter);
-- if (avChannelLayout & AV_CH_LOW_FREQUENCY)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::LFE);
-- if (avChannelLayout & AV_CH_BACK_LEFT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::BackLeft);
-- if (avChannelLayout & AV_CH_BACK_RIGHT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::BackRight);
-- if (avChannelLayout & AV_CH_FRONT_LEFT_OF_CENTER)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::FrontLeftOfCenter);
-- if (avChannelLayout & AV_CH_FRONT_RIGHT_OF_CENTER)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::FrontRightOfCenter);
-- if (avChannelLayout & AV_CH_BACK_CENTER)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::BackCenter);
-- if (avChannelLayout & AV_CH_LOW_FREQUENCY_2)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::LFE2);
-- if (avChannelLayout & AV_CH_SIDE_LEFT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::SideLeft);
-- if (avChannelLayout & AV_CH_SIDE_RIGHT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::SideRight);
-- if (avChannelLayout & AV_CH_TOP_FRONT_LEFT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopFrontLeft);
-- if (avChannelLayout & AV_CH_TOP_FRONT_RIGHT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopFrontRight);
-- if (avChannelLayout & AV_CH_TOP_FRONT_CENTER)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopFrontCenter);
-- if (avChannelLayout & AV_CH_TOP_CENTER)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopCenter);
-- if (avChannelLayout & AV_CH_TOP_BACK_LEFT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopBackLeft);
-- if (avChannelLayout & AV_CH_TOP_BACK_RIGHT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopBackRight);
-- if (avChannelLayout & AV_CH_TOP_BACK_CENTER)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopBackCenter);
-- // The defines used below got added together for FFmpeg 4.4
--#ifdef AV_CH_TOP_SIDE_LEFT
-- if (avChannelLayout & AV_CH_TOP_SIDE_LEFT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopSideLeft);
-- if (avChannelLayout & AV_CH_TOP_SIDE_RIGHT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopSideRight);
-- if (avChannelLayout & AV_CH_BOTTOM_FRONT_CENTER)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::BottomFrontCenter);
-- if (avChannelLayout & AV_CH_BOTTOM_FRONT_LEFT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::BottomFrontLeft);
-- if (avChannelLayout & AV_CH_BOTTOM_FRONT_RIGHT)
-- channelConfig |= QAudioFormat::channelConfig(QAudioFormat::BottomFrontRight);
--#endif
-- return QAudioFormat::ChannelConfig(channelConfig);
--}
--
--QAudioFormat QFFmpegMediaFormatInfo::audioFormatFromCodecParameters(AVCodecParameters *codecpar)
--{
-- QAudioFormat format;
-- format.setSampleFormat(sampleFormat(AVSampleFormat(codecpar->format)));
-- format.setSampleRate(codecpar->sample_rate);
--#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
-- uint64_t channelLayout = codecpar->channel_layout;
-- if (!channelLayout)
-- channelLayout = avChannelLayout(QAudioFormat::defaultChannelConfigForChannelCount(codecpar->channels));
--#else
-- uint64_t channelLayout = 0;
-- if (codecpar->ch_layout.order == AV_CHANNEL_ORDER_NATIVE)
-- channelLayout = codecpar->ch_layout.u.mask;
-- else
-- channelLayout = avChannelLayout(QAudioFormat::defaultChannelConfigForChannelCount(codecpar->ch_layout.nb_channels));
--#endif
-- format.setChannelConfig(channelConfigForAVLayout(channelLayout));
-- return format;
--}
-
- QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo_p.h
-index 52fcf6f72..e34005bbf 100644
---- a/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo_p.h
-+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo_p.h
-@@ -19,7 +19,6 @@
- #include <qhash.h>
- #include <qlist.h>
- #include <qaudioformat.h>
--#include "qffmpeg_p.h"
-
- QT_BEGIN_NAMESPACE
-
-@@ -28,23 +27,6 @@ class QFFmpegMediaFormatInfo : public QPlatformMediaFormatInfo
- public:
- QFFmpegMediaFormatInfo();
- ~QFFmpegMediaFormatInfo();
--
-- static QMediaFormat::VideoCodec videoCodecForAVCodecId(AVCodecID id);
-- static QMediaFormat::AudioCodec audioCodecForAVCodecId(AVCodecID id);
-- static QMediaFormat::FileFormat fileFormatForAVInputFormat(const AVInputFormat *format);
--
-- static const AVOutputFormat *outputFormatForFileFormat(QMediaFormat::FileFormat format);
--
-- static AVCodecID codecIdForVideoCodec(QMediaFormat::VideoCodec codec);
-- static AVCodecID codecIdForAudioCodec(QMediaFormat::AudioCodec codec);
--
-- static QAudioFormat::SampleFormat sampleFormat(AVSampleFormat format);
-- static AVSampleFormat avSampleFormat(QAudioFormat::SampleFormat format);
--
-- static int64_t avChannelLayout(QAudioFormat::ChannelConfig channelConfig);
-- static QAudioFormat::ChannelConfig channelConfigForAVLayout(int64_t avChannelLayout);
--
-- static QAudioFormat audioFormatFromCodecParameters(AVCodecParameters *codecPar);
- };
-
- QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
-index e55444cc2..c07c0ebc7 100644
---- a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
-+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
-@@ -5,13 +5,9 @@
- #include <qcameradevice.h>
- #include "qffmpegmediaintegration_p.h"
- #include "qffmpegmediaformatinfo_p.h"
--#include "qffmpegmediaplayer_p.h"
- #include "qffmpegvideosink_p.h"
- #include "qffmpegmediacapturesession_p.h"
--#include "qffmpegmediarecorder_p.h"
- #include "qffmpegimagecapture_p.h"
--#include "qffmpegaudioinput_p.h"
--#include "qffmpegaudiodecoder_p.h"
-
- #ifdef Q_OS_MACOS
- #include <VideoToolbox/VideoToolbox.h>
-@@ -24,13 +20,6 @@
- #include "qwindowsvideodevices_p.h"
- #endif
-
--#ifdef Q_OS_ANDROID
--# include "jni.h"
--extern "C" {
--# include <libavcodec/jni.h>
--}
--#endif
--
- #if QT_CONFIG(linux_v4l)
- #include "qv4l2camera_p.h"
- #endif
-@@ -86,21 +75,11 @@ QPlatformMediaFormatInfo *QFFmpegMediaIntegration::formatInfo()
- return m_formatsInfo;
- }
-
--QMaybe<QPlatformAudioDecoder *> QFFmpegMediaIntegration::createAudioDecoder(QAudioDecoder *decoder)
--{
-- return new QFFmpegAudioDecoder(decoder);
--}
--
- QMaybe<QPlatformMediaCaptureSession *> QFFmpegMediaIntegration::createCaptureSession()
- {
- return new QFFmpegMediaCaptureSession();
- }
-
--QMaybe<QPlatformMediaPlayer *> QFFmpegMediaIntegration::createPlayer(QMediaPlayer *player)
--{
-- return new QFFmpegMediaPlayer(player);
--}
--
- QMaybe<QPlatformCamera *> QFFmpegMediaIntegration::createCamera(QCamera *camera)
- {
- #ifdef Q_OS_DARWIN
-@@ -115,11 +94,6 @@ QMaybe<QPlatformCamera *> QFFmpegMediaIntegration::createCamera(QCamera *camera)
- #endif
- }
-
--QMaybe<QPlatformMediaRecorder *> QFFmpegMediaIntegration::createRecorder(QMediaRecorder *recorder)
--{
-- return new QFFmpegMediaRecorder(recorder);
--}
--
- QMaybe<QPlatformImageCapture *> QFFmpegMediaIntegration::createImageCapture(QImageCapture *imageCapture)
- {
- return new QFFmpegImageCapture(imageCapture);
-@@ -130,11 +104,6 @@ QMaybe<QPlatformVideoSink *> QFFmpegMediaIntegration::createVideoSink(QVideoSink
- return new QFFmpegVideoSink(sink);
- }
-
--QMaybe<QPlatformAudioInput *> QFFmpegMediaIntegration::createAudioInput(QAudioInput *input)
--{
-- return new QFFmpegAudioInput(input);
--}
--
- #ifdef Q_OS_ANDROID
- Q_DECL_EXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void * /*reserved*/)
- {
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h
-index 35c062f16..8b44da741 100644
---- a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h
-+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h
-@@ -30,19 +30,11 @@ public:
- static QFFmpegMediaIntegration *instance() { return static_cast<QFFmpegMediaIntegration *>(QPlatformMediaIntegration::instance()); }
- QPlatformMediaFormatInfo *formatInfo() override;
-
--
-- QMaybe<QPlatformAudioDecoder *> createAudioDecoder(QAudioDecoder *decoder) override;
- QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override;
-- QMaybe<QPlatformMediaPlayer *> createPlayer(QMediaPlayer *player) override;
- QMaybe<QPlatformCamera *> createCamera(QCamera *) override;
-- QMaybe<QPlatformMediaRecorder *> createRecorder(QMediaRecorder *) override;
- QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *) override;
--
- QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *sink) override;
-
-- QMaybe<QPlatformAudioInput *> createAudioInput(QAudioInput *input) override;
--// QPlatformAudioOutput *createAudioOutput(QAudioOutput *) override;
--
- QFFmpegMediaFormatInfo *m_formatsInfo = nullptr;
- };
-
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp
-index fecce3f1b..dda577d44 100644
---- a/src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp
-+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp
-@@ -64,114 +64,9 @@ static const char *keyToTag(QMediaMetaData::Key key)
- return nullptr;
- }
-
--//internal
--void QFFmpegMetaData::addEntry(QMediaMetaData &metaData, AVDictionaryEntry *entry)
--{
--// qDebug() << " checking:" << entry->key << entry->value;
-- QByteArray tag(entry->key);
-- QMediaMetaData::Key key = tagToKey(tag.toLower());
-- if (key == QMediaMetaData::Key(-1))
-- return;
--// qDebug() << " adding" << key;
--
-- auto *map = &metaData;
--
-- int metaTypeId = keyType(key).id();
-- switch (metaTypeId) {
-- case qMetaTypeId<QString>():
-- map->insert(key, QString::fromUtf8(entry->value));
-- return;
-- case qMetaTypeId<QStringList>():
-- map->insert(key, QString::fromUtf8(entry->value).split(QLatin1Char(',')));
-- return;
-- case qMetaTypeId<QDateTime>(): {
-- QDateTime date;
-- if (!qstrcmp(entry->key, "year")) {
-- if (map->keys().contains(QMediaMetaData::Date))
-- return;
-- date = QDateTime(QDate(QByteArray(entry->value).toInt(), 1, 1), QTime(0, 0, 0));
-- } else {
-- date = QDateTime::fromString(QString::fromUtf8(entry->value), Qt::ISODate);
-- }
-- map->insert(key, date);
-- return;
-- }
-- case qMetaTypeId<QUrl>():
-- map->insert(key, QUrl::fromEncoded(entry->value));
-- return;
-- case qMetaTypeId<qint64>():
-- map->insert(key, (qint64)QByteArray(entry->value).toLongLong());
-- return;
-- case qMetaTypeId<int>():
-- map->insert(key, QByteArray(entry->value).toInt());
-- return;
-- case qMetaTypeId<qreal>():
-- map->insert(key, (qreal)QByteArray(entry->value).toDouble());
-- return;
-- default:
-- break;
-- }
-- if (metaTypeId == qMetaTypeId<QLocale::Language>()) {
-- map->insert(key, QVariant::fromValue(QLocale::codeToLanguage(QString::fromUtf8(entry->value), QLocale::ISO639Part2)));
-- }
--}
--
--
--QMediaMetaData QFFmpegMetaData::fromAVMetaData(const AVDictionary *tags)
--{
-- QMediaMetaData metaData;
-- AVDictionaryEntry *entry = nullptr;
-- while ((entry = av_dict_get(tags, "", entry, AV_DICT_IGNORE_SUFFIX)))
-- addEntry(metaData, entry);
--
-- return metaData;
--}
--
- QByteArray QFFmpegMetaData::value(const QMediaMetaData &metaData, QMediaMetaData::Key key)
- {
--// qDebug() << " checking:" << entry->key << entry->value;
--
-- const int metaTypeId = keyType(key).id();
-- const QVariant val = metaData.value(key);
-- switch (metaTypeId) {
-- case qMetaTypeId<QString>():
-- return val.toString().toUtf8();
-- case qMetaTypeId<QStringList>():
-- return val.toStringList().join(u",").toUtf8();
-- case qMetaTypeId<QDateTime>():
-- return val.toDateTime().toString(Qt::ISODate).toUtf8();
-- case qMetaTypeId<QUrl>():
-- return val.toUrl().toEncoded();
-- case qMetaTypeId<qint64>():
-- case qMetaTypeId<int>():
-- return QByteArray::number(val.toLongLong());
-- case qMetaTypeId<qreal>():
-- return QByteArray::number(val.toDouble());
-- default:
-- break;
-- }
-- if (metaTypeId == qMetaTypeId<QLocale::Language>())
-- return QLocale::languageToCode(val.value<QLocale::Language>(), QLocale::ISO639Part2).toUtf8();
- return {};
- }
-
--
--AVDictionary *QFFmpegMetaData::toAVMetaData(const QMediaMetaData &metaData)
--{
-- const QList<Key> keys = metaData.keys();
-- AVDictionary *dict = nullptr;
-- for (const auto &k : keys) {
-- const char *key = ::keyToTag(k);
-- if (!key)
-- continue;
-- QByteArray val = value(metaData, k);
-- if (val.isEmpty())
-- continue;
-- av_dict_set(&dict, key, val.constData(), 0);
-- }
-- return dict;
--}
--
--
--
- QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediametadata_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediametadata_p.h
-index 201287495..95b069b64 100644
---- a/src/plugins/multimedia/ffmpeg/qffmpegmediametadata_p.h
-+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediametadata_p.h
-@@ -16,18 +16,13 @@
- //
-
- #include <qmediametadata.h>
--#include <qffmpeg_p.h>
-
- QT_BEGIN_NAMESPACE
-
- class QFFmpegMetaData : public QMediaMetaData
- {
- public:
-- static void addEntry(QMediaMetaData &metaData, AVDictionaryEntry *entry);
-- static QMediaMetaData fromAVMetaData(const AVDictionary *tags);
--
- static QByteArray value(const QMediaMetaData &metaData, QMediaMetaData::Key key);
-- static AVDictionary *toAVMetaData(const QMediaMetaData &metaData);
- };
-
- QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp
-deleted file mode 100644
-index 5e6062f42..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp
-+++ /dev/null
-@@ -1,236 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#include "qffmpegmediaplayer_p.h"
--#include "qffmpegdecoder_p.h"
--#include "qffmpegmediaformatinfo_p.h"
--#include "qlocale.h"
--#include "qffmpeg_p.h"
--#include "qffmpegmediametadata_p.h"
--#include "qffmpegvideobuffer_p.h"
--#include "private/qplatformaudiooutput_p.h"
--#include "qvideosink.h"
--#include "qaudiosink.h"
--#include "qaudiooutput.h"
--
--#include <qlocale.h>
--#include <qthread.h>
--#include <qatomic.h>
--#include <qwaitcondition.h>
--#include <qmutex.h>
--#include <qtimer.h>
--#include <qqueue.h>
--
--#include <qloggingcategory.h>
--
--QT_BEGIN_NAMESPACE
--
--using namespace QFFmpeg;
--
--QFFmpegMediaPlayer::QFFmpegMediaPlayer(QMediaPlayer *player)
-- : QPlatformMediaPlayer(player)
--{
-- positionUpdateTimer.setInterval(100);
-- positionUpdateTimer.setTimerType(Qt::PreciseTimer);
-- connect(&positionUpdateTimer, &QTimer::timeout, this, &QFFmpegMediaPlayer::updatePosition);
--}
--
--QFFmpegMediaPlayer::~QFFmpegMediaPlayer()
--{
-- delete decoder;
--}
--
--qint64 QFFmpegMediaPlayer::duration() const
--{
-- return decoder ? decoder->m_duration/1000 : 0;
--}
--
--void QFFmpegMediaPlayer::setPosition(qint64 position)
--{
-- if (decoder) {
-- decoder->seek(position * 1000);
-- updatePosition();
-- }
-- if (state() == QMediaPlayer::StoppedState)
-- mediaStatusChanged(QMediaPlayer::LoadedMedia);
--}
--
--void QFFmpegMediaPlayer::updatePosition()
--{
-- positionChanged(decoder ? decoder->clockController.currentTime() / 1000 : 0);
--}
--
--void QFFmpegMediaPlayer::endOfStream()
--{
-- positionChanged(duration());
-- stateChanged(QMediaPlayer::StoppedState);
-- mediaStatusChanged(QMediaPlayer::EndOfMedia);
--}
--
--float QFFmpegMediaPlayer::bufferProgress() const
--{
-- return 1.;
--}
--
--QMediaTimeRange QFFmpegMediaPlayer::availablePlaybackRanges() const
--{
-- return {};
--}
--
--qreal QFFmpegMediaPlayer::playbackRate() const
--{
-- return m_playbackRate;
--}
--
--void QFFmpegMediaPlayer::setPlaybackRate(qreal rate)
--{
-- if (m_playbackRate == rate)
-- return;
-- m_playbackRate = rate;
-- if (decoder)
-- decoder->setPlaybackRate(rate);
--}
--
--QUrl QFFmpegMediaPlayer::media() const
--{
-- return m_url;
--}
--
--const QIODevice *QFFmpegMediaPlayer::mediaStream() const
--{
-- return m_device;
--}
--
--void QFFmpegMediaPlayer::setMedia(const QUrl &media, QIODevice *stream)
--{
-- m_url = media;
-- m_device = stream;
-- if (decoder)
-- delete decoder;
-- decoder = nullptr;
--
-- positionChanged(0);
--
-- if (media.isEmpty() && !stream) {
-- seekableChanged(false);
-- audioAvailableChanged(false);
-- videoAvailableChanged(false);
-- metaDataChanged();
-- mediaStatusChanged(QMediaPlayer::NoMedia);
-- return;
-- }
--
-- mediaStatusChanged(QMediaPlayer::LoadingMedia);
-- decoder = new Decoder;
-- connect(decoder, &Decoder::endOfStream, this, &QFFmpegMediaPlayer::endOfStream);
-- connect(decoder, &Decoder::errorOccured, this, &QFFmpegMediaPlayer::error);
-- decoder->setMedia(media, stream);
-- decoder->setAudioSink(m_audioOutput);
-- decoder->setVideoSink(m_videoSink);
--
-- durationChanged(duration());
-- tracksChanged();
-- metaDataChanged();
-- seekableChanged(decoder->isSeekable());
--
-- audioAvailableChanged(!decoder->m_streamMap[QPlatformMediaPlayer::AudioStream].isEmpty());
-- videoAvailableChanged(!decoder->m_streamMap[QPlatformMediaPlayer::VideoStream].isEmpty());
--
--
-- QMetaObject::invokeMethod(this, "delayedLoadedStatus", Qt::QueuedConnection);
--}
--
--void QFFmpegMediaPlayer::play()
--{
-- if (!decoder)
-- return;
--
-- if (mediaStatus() == QMediaPlayer::EndOfMedia && state() == QMediaPlayer::StoppedState) {
-- decoder->seek(0);
-- positionChanged(0);
-- }
-- decoder->play();
-- positionUpdateTimer.start();
-- stateChanged(QMediaPlayer::PlayingState);
-- mediaStatusChanged(QMediaPlayer::BufferedMedia);
--}
--
--void QFFmpegMediaPlayer::pause()
--{
-- if (!decoder)
-- return;
-- if (mediaStatus() == QMediaPlayer::EndOfMedia && state() == QMediaPlayer::StoppedState) {
-- decoder->seek(0);
-- positionChanged(0);
-- }
-- decoder->pause();
-- positionUpdateTimer.stop();
-- stateChanged(QMediaPlayer::PausedState);
-- mediaStatusChanged(QMediaPlayer::BufferedMedia);
--}
--
--void QFFmpegMediaPlayer::stop()
--{
-- if (!decoder)
-- return;
-- decoder->stop();
-- positionUpdateTimer.stop();
-- positionChanged(0);
-- stateChanged(QMediaPlayer::StoppedState);
-- mediaStatusChanged(QMediaPlayer::LoadedMedia);
--}
--
--void QFFmpegMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
--{
-- if (m_audioOutput == output)
-- return;
--
-- m_audioOutput = output;
-- if (decoder)
-- decoder->setAudioSink(output);
--}
--
--QMediaMetaData QFFmpegMediaPlayer::metaData() const
--{
-- return decoder ? decoder->m_metaData : QMediaMetaData{};
--}
--
--void QFFmpegMediaPlayer::setVideoSink(QVideoSink *sink)
--{
-- if (m_videoSink == sink)
-- return;
--
-- m_videoSink = sink;
-- if (decoder)
-- decoder->setVideoSink(sink);
--}
--
--QVideoSink *QFFmpegMediaPlayer::videoSink() const
--{
-- return m_videoSink;
--}
--
--int QFFmpegMediaPlayer::trackCount(TrackType type)
--{
-- return decoder ? decoder->m_streamMap[type].count() : 0;
--}
--
--QMediaMetaData QFFmpegMediaPlayer::trackMetaData(TrackType type, int streamNumber)
--{
-- if (!decoder || streamNumber < 0 || streamNumber >= decoder->m_streamMap[type].count())
-- return {};
-- return decoder->m_streamMap[type].at(streamNumber).metaData;
--}
--
--int QFFmpegMediaPlayer::activeTrack(TrackType type)
--{
-- return decoder ? decoder->m_requestedStreams[type] : -1;
--}
--
--void QFFmpegMediaPlayer::setActiveTrack(TrackType type, int streamNumber)
--{
-- if (decoder)
-- decoder->setActiveTrack(type, streamNumber);
--}
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h
-deleted file mode 100644
-index 8e2753c82..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h
-+++ /dev/null
-@@ -1,98 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#ifndef QFFMPEGMEDIAPLAYER_H
--#define QFFMPEGMEDIAPLAYER_H
--
--#include <private/qplatformmediaplayer_p.h>
--#include <qmediametadata.h>
--#include <qtimer.h>
--#include "qffmpeg_p.h"
--
--QT_BEGIN_NAMESPACE
--
--namespace QFFmpeg {
--class Decoder;
--}
--class QPlatformAudioOutput;
--
--class QFFmpegMediaPlayer : public QObject, public QPlatformMediaPlayer
--{
-- Q_OBJECT
--public:
-- QFFmpegMediaPlayer(QMediaPlayer *player);
-- ~QFFmpegMediaPlayer();
--
-- qint64 duration() const override;
--
-- void setPosition(qint64 position) override;
--
-- float bufferProgress() const override;
--
-- QMediaTimeRange availablePlaybackRanges() const override;
--
-- qreal playbackRate() const override;
-- void setPlaybackRate(qreal rate) override;
--
-- QUrl media() const override;
-- const QIODevice *mediaStream() const override;
-- void setMedia(const QUrl &media, QIODevice *stream) override;
--
-- void play() override;
-- void pause() override;
-- void stop() override;
--
--// bool streamPlaybackSupported() const { return false; }
--
-- void setAudioOutput(QPlatformAudioOutput *) override;
--
-- QMediaMetaData metaData() const override;
--
-- void setVideoSink(QVideoSink *sink) override;
-- QVideoSink *videoSink() const;
--
-- int trackCount(TrackType) override;
-- QMediaMetaData trackMetaData(TrackType type, int streamNumber) override;
-- int activeTrack(TrackType) override;
-- void setActiveTrack(TrackType, int streamNumber) override;
--
-- Q_INVOKABLE void delayedLoadedStatus() { mediaStatusChanged(QMediaPlayer::LoadedMedia); }
--
--private slots:
-- void updatePosition();
-- void endOfStream();
-- void error(int error, const QString &errorString)
-- {
-- QPlatformMediaPlayer::error(error, errorString);
-- }
--
--private:
-- friend class QFFmpeg::Decoder;
--
-- QTimer positionUpdateTimer;
--
-- QFFmpeg::Decoder *decoder = nullptr;
-- QPlatformAudioOutput *m_audioOutput = nullptr;
-- QVideoSink *m_videoSink = nullptr;
--
-- QUrl m_url;
-- QIODevice *m_device = nullptr;
-- float m_playbackRate = 1.;
--};
--
--QT_END_NAMESPACE
--
--
--#endif // QMEDIAPLAYERCONTROL_H
--
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp
-deleted file mode 100644
-index 870b8ad38..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp
-+++ /dev/null
-@@ -1,157 +0,0 @@
--// Copyright (C) 2016 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#include "qffmpegmediarecorder_p.h"
--#include "qaudiodevice.h"
--#include <private/qmediastoragelocation_p.h>
--#include <private/qplatformcamera_p.h>
--#include "qaudiosource.h"
--#include "qffmpegaudioinput_p.h"
--#include "qaudiobuffer.h"
--#include "qffmpegencoder_p.h"
--#include "qffmpegmediaformatinfo_p.h"
--
--#include <qdebug.h>
--#include <qeventloop.h>
--#include <qstandardpaths.h>
--#include <qmimetype.h>
--#include <qloggingcategory.h>
--
--Q_LOGGING_CATEGORY(qLcMediaEncoder, "qt.multimedia.encoder")
--
--QFFmpegMediaRecorder::QFFmpegMediaRecorder(QMediaRecorder *parent)
-- : QPlatformMediaRecorder(parent)
--{
--}
--
--QFFmpegMediaRecorder::~QFFmpegMediaRecorder()
--{
-- if (encoder)
-- encoder->finalize();
--}
--
--bool QFFmpegMediaRecorder::isLocationWritable(const QUrl &) const
--{
-- return true;
--}
--
--void QFFmpegMediaRecorder::handleSessionError(QMediaRecorder::Error code, const QString &description)
--{
-- error(code, description);
-- stop();
--}
--
--void QFFmpegMediaRecorder::record(QMediaEncoderSettings &settings)
--{
-- if (!m_session || state() != QMediaRecorder::StoppedState)
-- return;
--
-- const auto hasVideo = m_session->camera() && m_session->camera()->isActive();
-- const auto hasAudio = m_session->audioInput() != nullptr;
--
-- if (!hasVideo && !hasAudio) {
-- error(QMediaRecorder::ResourceError, QMediaRecorder::tr("No camera or audio input"));
-- return;
-- }
--
-- const auto audioOnly = settings.videoCodec() == QMediaFormat::VideoCodec::Unspecified;
--
-- auto primaryLocation = audioOnly ? QStandardPaths::MusicLocation : QStandardPaths::MoviesLocation;
-- auto container = settings.mimeType().preferredSuffix();
-- auto location = QMediaStorageLocation::generateFileName(outputLocation().toLocalFile(), primaryLocation, container);
--
-- QUrl actualSink = QUrl::fromLocalFile(QDir::currentPath()).resolved(location);
-- qCDebug(qLcMediaEncoder) << "recording new video to" << actualSink;
-- qDebug() << "requested format:" << settings.fileFormat() << settings.audioCodec();
--
-- Q_ASSERT(!actualSink.isEmpty());
--
-- encoder = new QFFmpeg::Encoder(settings, actualSink);
-- encoder->setMetaData(m_metaData);
-- connect(encoder, &QFFmpeg::Encoder::durationChanged, this, &QFFmpegMediaRecorder::newDuration);
-- connect(encoder, &QFFmpeg::Encoder::finalizationDone, this, &QFFmpegMediaRecorder::finalizationDone);
-- connect(encoder, &QFFmpeg::Encoder::error, this, &QFFmpegMediaRecorder::handleSessionError);
--
-- auto *audioInput = m_session->audioInput();
-- if (audioInput)
-- encoder->addAudioInput(static_cast<QFFmpegAudioInput *>(audioInput));
--
-- auto *camera = m_session->camera();
-- if (camera)
-- encoder->addVideoSource(camera);
--
-- durationChanged(0);
-- stateChanged(QMediaRecorder::RecordingState);
-- actualLocationChanged(QUrl::fromLocalFile(location));
--
-- encoder->start();
--}
--
--void QFFmpegMediaRecorder::pause()
--{
-- if (!m_session || state() != QMediaRecorder::RecordingState)
-- return;
--
-- Q_ASSERT(encoder);
-- encoder->setPaused(true);
--
-- stateChanged(QMediaRecorder::PausedState);
--}
--
--void QFFmpegMediaRecorder::resume()
--{
-- if (!m_session || state() != QMediaRecorder::PausedState)
-- return;
--
-- Q_ASSERT(encoder);
-- encoder->setPaused(false);
--
-- stateChanged(QMediaRecorder::RecordingState);
--}
--
--void QFFmpegMediaRecorder::stop()
--{
-- if (!m_session || state() == QMediaRecorder::StoppedState)
-- return;
-- auto * input = m_session ? m_session->audioInput() : nullptr;
-- if (input)
-- static_cast<QFFmpegAudioInput *>(input)->setRunning(false);
-- qCDebug(qLcMediaEncoder) << "stop";
-- // ### all of the below should be done asynchronous. finalize() should do it's work in a thread
-- // to avoid blocking the UI in case of slow codecs
-- if (encoder) {
-- encoder->finalize();
-- encoder = nullptr;
-- }
--}
--
--void QFFmpegMediaRecorder::finalizationDone()
--{
-- stateChanged(QMediaRecorder::StoppedState);
--}
--
--void QFFmpegMediaRecorder::setMetaData(const QMediaMetaData &metaData)
--{
-- if (!m_session)
-- return;
-- m_metaData = metaData;
--}
--
--QMediaMetaData QFFmpegMediaRecorder::metaData() const
--{
-- return m_metaData;
--}
--
--void QFFmpegMediaRecorder::setCaptureSession(QPlatformMediaCaptureSession *session)
--{
-- auto *captureSession = static_cast<QFFmpegMediaCaptureSession *>(session);
-- if (m_session == captureSession)
-- return;
--
-- if (m_session)
-- stop();
--
-- m_session = captureSession;
-- if (!m_session)
-- return;
--}
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h
-deleted file mode 100644
-index 26f4c16ad..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h
-+++ /dev/null
-@@ -1,68 +0,0 @@
--// Copyright (C) 2016 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--
--#ifndef QFFMPEGMEDIARECODER_H
--#define QFFMPEGMEDIARECODER_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include <private/qplatformmediarecorder_p.h>
--#include "qffmpegmediacapturesession_p.h"
--
--#include "qffmpeg_p.h"
--
--QT_BEGIN_NAMESPACE
--
--class QAudioSource;
--class QAudioSourceIO;
--class QAudioBuffer;
--class QMediaMetaData;
--
--namespace QFFmpeg {
--class Encoder;
--}
--
--class QFFmpegMediaRecorder : public QObject, public QPlatformMediaRecorder
--{
-- Q_OBJECT
--public:
-- QFFmpegMediaRecorder(QMediaRecorder *parent);
-- virtual ~QFFmpegMediaRecorder();
--
-- bool isLocationWritable(const QUrl &sink) const override;
--
-- void record(QMediaEncoderSettings &settings) override;
-- void pause() override;
-- void resume() override;
-- void stop() override;
--
-- void setMetaData(const QMediaMetaData &) override;
-- QMediaMetaData metaData() const override;
--
-- void setCaptureSession(QPlatformMediaCaptureSession *session);
--
--private Q_SLOTS:
-- void newDuration(qint64 d) { durationChanged(d); }
-- void finalizationDone();
-- void handleSessionError(QMediaRecorder::Error code, const QString &description);
--
--private:
-- QFFmpegMediaCaptureSession *m_session = nullptr;
-- QMediaMetaData m_metaData;
--
-- QFFmpeg::Encoder *encoder = nullptr;
--};
--
--QT_END_NAMESPACE
--
--#endif // QFFMPEGMEDIARECODER_H
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp b/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp
-deleted file mode 100644
-index bb15aa0e1..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp
-+++ /dev/null
-@@ -1,95 +0,0 @@
--// Copyright (C) 2022 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#include "qffmpegresampler_p.h"
--#include "qffmpegdecoder_p.h"
--#include "qffmpegmediaformatinfo_p.h"
--#include <qloggingcategory.h>
--
--extern "C" {
--#include <libavutil/opt.h>
--}
--
--Q_LOGGING_CATEGORY(qLcResampler, "qt.multimedia.ffmpeg.resampler")
--
--QT_BEGIN_NAMESPACE
--
--namespace QFFmpeg
--{
--
--Resampler::Resampler(const Codec *codec, const QAudioFormat &outputFormat)
-- : m_outputFormat(outputFormat)
--{
-- qCDebug(qLcResampler) << "createResampler";
-- const AVStream *audioStream = codec->stream();
-- const auto *codecpar = audioStream->codecpar;
--
-- if (!m_outputFormat.isValid())
-- // want the native format
-- m_outputFormat = QFFmpegMediaFormatInfo::audioFormatFromCodecParameters(audioStream->codecpar);
--
-- QAudioFormat::ChannelConfig config = m_outputFormat.channelConfig();
-- if (config == QAudioFormat::ChannelConfigUnknown)
-- config = QAudioFormat::defaultChannelConfigForChannelCount(m_outputFormat.channelCount());
--
--
-- qCDebug(qLcResampler) << "init resampler" << m_outputFormat.sampleRate() << config << codecpar->sample_rate;
--#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
-- auto inConfig = codecpar->channel_layout;
-- if (inConfig == 0)
-- inConfig = QFFmpegMediaFormatInfo::avChannelLayout(QAudioFormat::defaultChannelConfigForChannelCount(codecpar->channels));
-- resampler = swr_alloc_set_opts(nullptr, // we're allocating a new context
-- QFFmpegMediaFormatInfo::avChannelLayout(config), // out_ch_layout
-- QFFmpegMediaFormatInfo::avSampleFormat(m_outputFormat.sampleFormat()), // out_sample_fmt
-- m_outputFormat.sampleRate(), // out_sample_rate
-- inConfig, // in_ch_layout
-- AVSampleFormat(codecpar->format), // in_sample_fmt
-- codecpar->sample_rate, // in_sample_rate
-- 0, // log_offset
-- nullptr);
--#else
-- AVChannelLayout in_ch_layout = codecpar->ch_layout;
-- AVChannelLayout out_ch_layout = {};
-- av_channel_layout_from_mask(&out_ch_layout, QFFmpegMediaFormatInfo::avChannelLayout(config));
-- swr_alloc_set_opts2(&resampler, // we're allocating a new context
-- &out_ch_layout,
-- QFFmpegMediaFormatInfo::avSampleFormat(m_outputFormat.sampleFormat()),
-- m_outputFormat.sampleRate(),
-- &in_ch_layout,
-- AVSampleFormat(codecpar->format),
-- codecpar->sample_rate,
-- 0,
-- nullptr);
--#endif
-- // if we're not the master clock, we might need to handle clock adjustments, initialize for that
-- av_opt_set_double(resampler, "async", m_outputFormat.sampleRate()/50, 0);
--
-- swr_init(resampler);
--}
--
--Resampler::~Resampler()
--{
-- swr_free(&resampler);
--}
--
--QAudioBuffer Resampler::resample(const AVFrame *frame)
--{
-- const int outSamples = swr_get_out_samples(resampler, frame->nb_samples);
-- QByteArray samples(m_outputFormat.bytesForFrames(outSamples), Qt::Uninitialized);
-- auto **in = const_cast<const uint8_t **>(frame->extended_data);
-- auto *out = reinterpret_cast<uint8_t *>(samples.data());
-- const int out_samples = swr_convert(resampler, &out, outSamples,
-- in, frame->nb_samples);
-- samples.resize(m_outputFormat.bytesForFrames(out_samples));
--
-- qint64 startTime = m_outputFormat.durationForFrames(m_samplesProcessed);
-- m_samplesProcessed += out_samples;
--
-- qCDebug(qLcResampler) << " new frame" << startTime << "in_samples" << frame->nb_samples << out_samples << outSamples;
-- QAudioBuffer buffer(samples, m_outputFormat, startTime);
-- return buffer;
--}
--
--
--}
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h b/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h
-deleted file mode 100644
-index 4b5b59537..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h
-+++ /dev/null
-@@ -1,46 +0,0 @@
--// Copyright (C) 2022 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEGRESAMPLER_P_H
--#define QFFMPEGRESAMPLER_P_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include "qaudiobuffer.h"
--#include "qffmpeg_p.h"
--
--QT_BEGIN_NAMESPACE
--
--namespace QFFmpeg
--{
--
--struct Codec;
--
--class Resampler
--{
--public:
-- Resampler(const Codec *codec, const QAudioFormat &outputFormat);
-- ~Resampler();
--
-- QAudioBuffer resample(const AVFrame *frame);
-- qint64 samplesProcessed() const { return m_samplesProcessed; }
--
--private:
-- QAudioFormat m_outputFormat;
-- SwrContext *resampler = nullptr;
-- qint64 m_samplesProcessed = 0;
--};
--
--}
--
--QT_END_NAMESPACE
--
--#endif
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegthread.cpp b/src/plugins/multimedia/ffmpeg/qffmpegthread.cpp
-deleted file mode 100644
-index 804ba424f..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegthread.cpp
-+++ /dev/null
-@@ -1,57 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#include "qffmpegthread_p.h"
--
--#include <qloggingcategory.h>
--
--QT_BEGIN_NAMESPACE
--
--using namespace QFFmpeg;
--
--void Thread::kill()
--{
-- {
-- QMutexLocker locker(&mutex);
-- exit.storeRelease(true);
-- killHelper();
-- }
-- wake();
-- wait();
-- delete this;
--}
--
--void Thread::maybePause()
--{
-- while (timeOut > 0 || shouldWait()) {
-- if (exit.loadAcquire())
-- break;
--
-- QElapsedTimer timer;
-- timer.start();
-- if (condition.wait(&mutex, QDeadlineTimer(timeOut, Qt::PreciseTimer))) {
-- if (timeOut >= 0) {
-- timeOut -= timer.elapsed();
-- if (timeOut < 0)
-- timeOut = -1;
-- }
-- } else {
-- timeOut = -1;
-- }
-- }
--}
--
--void Thread::run()
--{
-- init();
-- QMutexLocker locker(&mutex);
-- while (1) {
-- maybePause();
-- if (exit.loadAcquire())
-- break;
-- loop();
-- }
-- cleanup();
--}
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegthread_p.h b/src/plugins/multimedia/ffmpeg/qffmpegthread_p.h
-deleted file mode 100644
-index e5c87e237..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegthread_p.h
-+++ /dev/null
-@@ -1,68 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEGTHREAD_P_H
--#define QFFMPEGTHREAD_P_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include <private/qtmultimediaglobal_p.h>
--
--#include <qmutex.h>
--#include <qwaitcondition.h>
--#include <qthread.h>
--
--QT_BEGIN_NAMESPACE
--
--class QAudioSink;
--
--namespace QFFmpeg
--{
--
--class Thread : public QThread
--{
--public:
-- mutable QMutex mutex;
-- qint64 timeOut = -1;
--private:
-- QWaitCondition condition;
--
--protected:
-- QAtomicInteger<bool> exit = false;
--
--public:
-- // public API is thread-safe
--
-- void kill();
-- virtual void killHelper() {}
--
-- void wake() {
-- condition.wakeAll();
-- }
--
--protected:
-- virtual void init() {}
-- virtual void cleanup() {}
-- // loop() should never block, all blocking has to happen in shouldWait()
-- virtual void loop() = 0;
-- virtual bool shouldWait() const { return false; }
--
--private:
-- void maybePause();
--
-- void run() override;
--};
--
--}
--
--QT_END_NAMESPACE
--
--#endif
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
-deleted file mode 100644
-index b17c04938..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
-+++ /dev/null
-@@ -1,356 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#include "qffmpegvideobuffer_p.h"
--#include "private/qvideotexturehelper_p.h"
--#include "qffmpeghwaccel_p.h"
--
--extern "C" {
--#include <libavutil/pixdesc.h>
--#include <libavutil/hdr_dynamic_metadata.h>
--#include <libavutil/mastering_display_metadata.h>
--}
--
--QT_BEGIN_NAMESPACE
--
--QFFmpegVideoBuffer::QFFmpegVideoBuffer(AVFrame *frame)
-- : QAbstractVideoBuffer(QVideoFrame::NoHandle)
-- , frame(frame)
--{
-- if (frame->hw_frames_ctx) {
-- hwFrame = frame;
-- m_pixelFormat = toQtPixelFormat(QFFmpeg::HWAccel::format(frame));
-- return;
-- }
--
-- swFrame = frame;
-- m_pixelFormat = toQtPixelFormat(AVPixelFormat(swFrame->format));
--
-- convertSWFrame();
--}
--
--QFFmpegVideoBuffer::~QFFmpegVideoBuffer()
--{
-- delete textures;
-- if (swFrame)
-- av_frame_free(&swFrame);
-- if (hwFrame)
-- av_frame_free(&hwFrame);
--}
--
--void QFFmpegVideoBuffer::convertSWFrame()
--{
-- Q_ASSERT(swFrame);
-- bool needsConversion = false;
-- auto pixelFormat = toQtPixelFormat(AVPixelFormat(swFrame->format), &needsConversion);
--// qDebug() << "SW frame format:" << pixelFormat << swFrame->format << needsConversion;
--
-- if (pixelFormat != m_pixelFormat) {
-- AVPixelFormat newFormat = toAVPixelFormat(m_pixelFormat);
-- // convert the format into something we can handle
-- SwsContext *c = sws_getContext(swFrame->width, swFrame->height, AVPixelFormat(swFrame->format),
-- swFrame->width, swFrame->height, newFormat,
-- SWS_BICUBIC, nullptr, nullptr, nullptr);
--
-- AVFrame *newFrame = av_frame_alloc();
-- newFrame->width = swFrame->width;
-- newFrame->height = swFrame->height;
-- newFrame->format = newFormat;
-- av_frame_get_buffer(newFrame, 0);
--
-- sws_scale(c, swFrame->data, swFrame->linesize, 0, swFrame->height, newFrame->data, newFrame->linesize);
-- av_frame_free(&swFrame);
-- swFrame = newFrame;
-- sws_freeContext(c);
-- }
--}
--
--void QFFmpegVideoBuffer::setTextureConverter(const QFFmpeg::TextureConverter &converter)
--{
-- textureConverter = converter;
-- textureConverter.init(hwFrame);
-- m_type = converter.isNull() ? QVideoFrame::NoHandle : QVideoFrame::RhiTextureHandle;
--}
--
--QVideoFrameFormat::ColorSpace QFFmpegVideoBuffer::colorSpace() const
--{
-- switch (frame->colorspace) {
-- default:
-- case AVCOL_SPC_UNSPECIFIED:
-- case AVCOL_SPC_RESERVED:
-- case AVCOL_SPC_FCC:
-- case AVCOL_SPC_SMPTE240M:
-- case AVCOL_SPC_YCGCO:
-- case AVCOL_SPC_SMPTE2085:
-- case AVCOL_SPC_CHROMA_DERIVED_NCL:
-- case AVCOL_SPC_CHROMA_DERIVED_CL:
-- case AVCOL_SPC_ICTCP: // BT.2100 ICtCp
-- return QVideoFrameFormat::ColorSpace_Undefined;
-- case AVCOL_SPC_RGB:
-- return QVideoFrameFormat::ColorSpace_AdobeRgb;
-- case AVCOL_SPC_BT709:
-- return QVideoFrameFormat::ColorSpace_BT709;
-- case AVCOL_SPC_BT470BG: // BT601
-- case AVCOL_SPC_SMPTE170M: // Also BT601
-- return QVideoFrameFormat::ColorSpace_BT601;
-- case AVCOL_SPC_BT2020_NCL: // Non constant luminence
-- case AVCOL_SPC_BT2020_CL: // Constant luminence
-- return QVideoFrameFormat::ColorSpace_BT2020;
-- }
--}
--
--QVideoFrameFormat::ColorTransfer QFFmpegVideoBuffer::colorTransfer() const
--{
-- switch (frame->color_trc) {
-- case AVCOL_TRC_BT709:
-- // The following three cases have transfer characteristics identical to BT709
-- case AVCOL_TRC_BT1361_ECG:
-- case AVCOL_TRC_BT2020_10:
-- case AVCOL_TRC_BT2020_12:
-- case AVCOL_TRC_SMPTE240M: // almost identical to bt709
-- return QVideoFrameFormat::ColorTransfer_BT709;
-- case AVCOL_TRC_GAMMA22:
-- case AVCOL_TRC_SMPTE428 : // No idea, let's hope for the best...
-- case AVCOL_TRC_IEC61966_2_1: // sRGB, close enough to 2.2...
-- case AVCOL_TRC_IEC61966_2_4: // not quite, but probably close enough
-- return QVideoFrameFormat::ColorTransfer_Gamma22;
-- case AVCOL_TRC_GAMMA28:
-- return QVideoFrameFormat::ColorTransfer_Gamma28;
-- case AVCOL_TRC_SMPTE170M:
-- return QVideoFrameFormat::ColorTransfer_BT601;
-- case AVCOL_TRC_LINEAR:
-- return QVideoFrameFormat::ColorTransfer_Linear;
-- case AVCOL_TRC_SMPTE2084:
-- return QVideoFrameFormat::ColorTransfer_ST2084;
-- case AVCOL_TRC_ARIB_STD_B67:
-- return QVideoFrameFormat::ColorTransfer_STD_B67;
-- default:
-- break;
-- }
-- return QVideoFrameFormat::ColorTransfer_Unknown;
--}
--
--QVideoFrameFormat::ColorRange QFFmpegVideoBuffer::colorRange() const
--{
-- switch (frame->color_range) {
-- case AVCOL_RANGE_MPEG:
-- return QVideoFrameFormat::ColorRange_Video;
-- case AVCOL_RANGE_JPEG:
-- return QVideoFrameFormat::ColorRange_Full;
-- default:
-- return QVideoFrameFormat::ColorRange_Unknown;
-- }
--}
--
--float QFFmpegVideoBuffer::maxNits()
--{
-- float maxNits = -1;
-- for (int i = 0; i <frame->nb_side_data; ++i) {
-- AVFrameSideData *sd = frame->side_data[i];
-- // TODO: Longer term we might want to also support HDR10+ dynamic metadata
-- if (sd->type == AV_FRAME_DATA_MASTERING_DISPLAY_METADATA) {
-- auto *data = reinterpret_cast<AVMasteringDisplayMetadata *>(sd->data);
-- auto maybeLum = QFFmpeg::mul(10'000., data->max_luminance);
-- if (maybeLum)
-- maxNits = float(maybeLum.value());
-- }
-- }
-- return maxNits;
--}
--
--QVideoFrame::MapMode QFFmpegVideoBuffer::mapMode() const
--{
-- return m_mode;
--}
--
--QAbstractVideoBuffer::MapData QFFmpegVideoBuffer::map(QVideoFrame::MapMode mode)
--{
-- if (!swFrame) {
-- Q_ASSERT(hwFrame && hwFrame->hw_frames_ctx);
-- swFrame = av_frame_alloc();
-- /* retrieve data from GPU to CPU */
-- int ret = av_hwframe_transfer_data(swFrame, hwFrame, 0);
-- if (ret < 0) {
-- qWarning() << "Error transferring the data to system memory\n";
-- return {};
-- }
-- convertSWFrame();
-- }
--
-- m_mode = mode;
--
--// qDebug() << "MAP:";
-- MapData mapData;
-- auto *desc = QVideoTextureHelper::textureDescription(pixelFormat());
-- mapData.nPlanes = desc->nplanes;
-- for (int i = 0; i < mapData.nPlanes; ++i) {
-- mapData.data[i] = swFrame->data[i];
-- mapData.bytesPerLine[i] = swFrame->linesize[i];
-- mapData.size[i] = mapData.bytesPerLine[i]*desc->heightForPlane(swFrame->height, i);
--// qDebug() << " " << i << mapData.data[i] << mapData.size[i];
-- }
-- return mapData;
--}
--
--void QFFmpegVideoBuffer::unmap()
--{
-- // nothing to do here for SW buffers
--}
--
--std::unique_ptr<QVideoFrameTextures> QFFmpegVideoBuffer::mapTextures(QRhi *)
--{
-- if (textures)
-- return {};
-- if (!hwFrame)
-- return {};
-- textures = textureConverter.getTextures(hwFrame);
-- if (!textures)
-- qWarning() << " failed to get textures for frame" << textureConverter.isNull();
-- return {};
--}
--
--quint64 QFFmpegVideoBuffer::textureHandle(int plane) const
--{
-- return textures ? textures->textureHandle(plane) : 0;
--}
--
--QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::pixelFormat() const
--{
-- return m_pixelFormat;
--}
--
--QSize QFFmpegVideoBuffer::size() const
--{
-- return QSize(frame->width, frame->height);
--}
--
--QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion)
--{
-- if (needsConversion)
-- *needsConversion = false;
--
-- switch (avPixelFormat) {
-- default:
-- break;
-- case AV_PIX_FMT_ARGB:
-- return QVideoFrameFormat::Format_ARGB8888;
-- case AV_PIX_FMT_0RGB:
-- return QVideoFrameFormat::Format_XRGB8888;
-- case AV_PIX_FMT_BGRA:
-- return QVideoFrameFormat::Format_BGRA8888;
-- case AV_PIX_FMT_BGR0:
-- return QVideoFrameFormat::Format_BGRX8888;
-- case AV_PIX_FMT_ABGR:
-- return QVideoFrameFormat::Format_ABGR8888;
-- case AV_PIX_FMT_0BGR:
-- return QVideoFrameFormat::Format_XBGR8888;
-- case AV_PIX_FMT_RGBA:
-- return QVideoFrameFormat::Format_RGBA8888;
-- case AV_PIX_FMT_RGB0:
-- return QVideoFrameFormat::Format_RGBX8888;
--
-- case AV_PIX_FMT_YUV422P:
-- return QVideoFrameFormat::Format_YUV422P;
-- case AV_PIX_FMT_YUV420P:
-- return QVideoFrameFormat::Format_YUV420P;
-- case AV_PIX_FMT_YUV420P10:
-- return QVideoFrameFormat::Format_YUV420P10;
-- case AV_PIX_FMT_UYVY422:
-- return QVideoFrameFormat::Format_UYVY;
-- case AV_PIX_FMT_YUYV422:
-- return QVideoFrameFormat::Format_YUYV;
-- case AV_PIX_FMT_NV12:
-- return QVideoFrameFormat::Format_NV12;
-- case AV_PIX_FMT_NV21:
-- return QVideoFrameFormat::Format_NV21;
-- case AV_PIX_FMT_GRAY8:
-- return QVideoFrameFormat::Format_Y8;
-- case AV_PIX_FMT_GRAY16:
-- return QVideoFrameFormat::Format_Y16;
--
-- case AV_PIX_FMT_P010:
-- return QVideoFrameFormat::Format_P010;
-- case AV_PIX_FMT_P016:
-- return QVideoFrameFormat::Format_P016;
-- case AV_PIX_FMT_MEDIACODEC:
-- return QVideoFrameFormat::Format_SamplerExternalOES;
-- }
--
-- if (needsConversion)
-- *needsConversion = true;
--
-- const AVPixFmtDescriptor *descriptor = av_pix_fmt_desc_get(avPixelFormat);
--
-- if (descriptor->flags & AV_PIX_FMT_FLAG_RGB)
-- return QVideoFrameFormat::Format_RGBA8888;
--
-- if (descriptor->comp[0].depth > 8)
-- return QVideoFrameFormat::Format_P016;
-- return QVideoFrameFormat::Format_YUV420P;
--}
--
--AVPixelFormat QFFmpegVideoBuffer::toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
--{
-- switch (pixelFormat) {
-- default:
-- case QVideoFrameFormat::Format_Invalid:
-- case QVideoFrameFormat::Format_AYUV:
-- case QVideoFrameFormat::Format_AYUV_Premultiplied:
-- case QVideoFrameFormat::Format_YV12:
-- case QVideoFrameFormat::Format_IMC1:
-- case QVideoFrameFormat::Format_IMC2:
-- case QVideoFrameFormat::Format_IMC3:
-- case QVideoFrameFormat::Format_IMC4:
-- return AV_PIX_FMT_NONE;
-- case QVideoFrameFormat::Format_Jpeg:
-- // We're using the data from the converted QImage here, which is in BGRA.
-- return AV_PIX_FMT_BGRA;
-- case QVideoFrameFormat::Format_ARGB8888:
-- case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
-- return AV_PIX_FMT_ARGB;
-- case QVideoFrameFormat::Format_XRGB8888:
-- return AV_PIX_FMT_0RGB;
-- case QVideoFrameFormat::Format_BGRA8888:
-- case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
-- return AV_PIX_FMT_BGRA;
-- case QVideoFrameFormat::Format_BGRX8888:
-- return AV_PIX_FMT_BGR0;
-- case QVideoFrameFormat::Format_ABGR8888:
-- return AV_PIX_FMT_ABGR;
-- case QVideoFrameFormat::Format_XBGR8888:
-- return AV_PIX_FMT_0BGR;
-- case QVideoFrameFormat::Format_RGBA8888:
-- return AV_PIX_FMT_RGBA;
-- case QVideoFrameFormat::Format_RGBX8888:
-- return AV_PIX_FMT_RGB0;
--
-- case QVideoFrameFormat::Format_YUV422P:
-- return AV_PIX_FMT_YUV422P;
-- case QVideoFrameFormat::Format_YUV420P:
-- return AV_PIX_FMT_YUV420P;
-- case QVideoFrameFormat::Format_YUV420P10:
-- return AV_PIX_FMT_YUV420P10;
-- case QVideoFrameFormat::Format_UYVY:
-- return AV_PIX_FMT_UYVY422;
-- case QVideoFrameFormat::Format_YUYV:
-- return AV_PIX_FMT_YUYV422;
-- case QVideoFrameFormat::Format_NV12:
-- return AV_PIX_FMT_NV12;
-- case QVideoFrameFormat::Format_NV21:
-- return AV_PIX_FMT_NV21;
-- case QVideoFrameFormat::Format_Y8:
-- return AV_PIX_FMT_GRAY8;
-- case QVideoFrameFormat::Format_Y16:
-- return AV_PIX_FMT_GRAY16;
--
-- case QVideoFrameFormat::Format_P010:
-- return AV_PIX_FMT_P010;
-- case QVideoFrameFormat::Format_P016:
-- return AV_PIX_FMT_P016;
--
-- case QVideoFrameFormat::Format_SamplerExternalOES:
-- return AV_PIX_FMT_MEDIACODEC;
-- }
--}
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h
-deleted file mode 100644
-index a981ec245..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h
-+++ /dev/null
-@@ -1,72 +0,0 @@
--// Copyright (C) 2021 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#ifndef QFFMPEGVIDEOBUFFER_P_H
--#define QFFMPEGVIDEOBUFFER_P_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include <private/qtmultimediaglobal_p.h>
--#include <private/qabstractvideobuffer_p.h>
--#include <qvideoframe.h>
--#include <QtCore/qvariant.h>
--
--#include "qffmpeg_p.h"
--#include "qffmpeghwaccel_p.h"
--
--QT_BEGIN_NAMESPACE
--
--class QFFmpegVideoBuffer : public QAbstractVideoBuffer
--{
--public:
--
-- QFFmpegVideoBuffer(AVFrame *frame);
-- ~QFFmpegVideoBuffer();
--
-- QVideoFrame::MapMode mapMode() const override;
-- MapData map(QVideoFrame::MapMode mode) override;
-- void unmap() override;
--
-- virtual std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) override;
-- virtual quint64 textureHandle(int plane) const override;
--
-- QVideoFrameFormat::PixelFormat pixelFormat() const;
-- QSize size() const;
--
-- static QVideoFrameFormat::PixelFormat toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion = nullptr);
-- static AVPixelFormat toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat);
--
-- void convertSWFrame();
--
-- AVFrame *getHWFrame() const { return hwFrame; }
--
-- void setTextureConverter(const QFFmpeg::TextureConverter &converter);
--
-- QVideoFrameFormat::ColorSpace colorSpace() const;
-- QVideoFrameFormat::ColorTransfer colorTransfer() const;
-- QVideoFrameFormat::ColorRange colorRange() const;
--
-- float maxNits();
--
--private:
-- QVideoFrameFormat::PixelFormat m_pixelFormat;
-- AVFrame *frame = nullptr;
-- AVFrame *hwFrame = nullptr;
-- AVFrame *swFrame = nullptr;
-- QFFmpeg::TextureConverter textureConverter;
-- QVideoFrame::MapMode m_mode = QVideoFrame::NotMapped;
-- QFFmpeg::TextureSet *textures = nullptr;
--};
--
--QT_END_NAMESPACE
--
--#endif
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideoframeencoder.cpp b/src/plugins/multimedia/ffmpeg/qffmpegvideoframeencoder.cpp
-deleted file mode 100644
-index 6cb34f56c..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegvideoframeencoder.cpp
-+++ /dev/null
-@@ -1,374 +0,0 @@
--// Copyright (C) 2022 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--
--#include "qffmpegvideoframeencoder_p.h"
--#include "qffmpegvideobuffer_p.h"
--#include "qffmpegmediaformatinfo_p.h"
--#include "qffmpegencoderoptions_p.h"
--#include "private/qplatformmediarecorder_p.h"
--#include "private/qmultimediautils_p.h"
--#include <qloggingcategory.h>
--
--extern "C" {
--#include <libavutil/pixdesc.h>
--}
--
--/* Infrastructure for HW acceleration goes into this file. */
--
--QT_BEGIN_NAMESPACE
--
--Q_LOGGING_CATEGORY(qLcVideoFrameEncoder, "qt.multimedia.ffmpeg.videoencoder")
--
--namespace QFFmpeg {
--
--VideoFrameEncoder::Data::~Data()
--{
-- if (converter)
-- sws_freeContext(converter);
-- avcodec_free_context(&codecContext);
--}
--
--VideoFrameEncoder::VideoFrameEncoder(const QMediaEncoderSettings &encoderSettings,
-- const QSize &sourceSize, float frameRate, AVPixelFormat sourceFormat, AVPixelFormat swFormat)
-- : d(new Data)
--{
-- d->settings = encoderSettings;
-- d->frameRate = frameRate;
-- d->sourceSize = sourceSize;
--
-- if (!d->settings.videoResolution().isValid())
-- d->settings.setVideoResolution(d->sourceSize);
--
-- d->sourceFormat = sourceFormat;
-- d->sourceSWFormat = swFormat;
--
-- auto qVideoCodec = encoderSettings.videoCodec();
-- auto codecID = QFFmpegMediaFormatInfo::codecIdForVideoCodec(qVideoCodec);
--
--#ifndef QT_DISABLE_HW_ENCODING
-- auto [preferredTypes, size] = HWAccel::preferredDeviceTypes();
-- for (qsizetype i = 0; i < size; i++) {
-- auto accel = HWAccel::create(preferredTypes[i]);
-- if (!accel)
-- continue;
--
-- auto matchesSizeConstraints = [&]() -> bool {
-- auto *constraints = av_hwdevice_get_hwframe_constraints(accel->hwDeviceContextAsBuffer(), nullptr);
-- if (!constraints)
-- return true;
-- // Check size constraints
-- bool result = (d->sourceSize.width() >= constraints->min_width && d->sourceSize.height() >= constraints->min_height &&
-- d->sourceSize.width() <= constraints->max_width && d->sourceSize.height() <= constraints->max_height);
-- av_hwframe_constraints_free(&constraints);
-- return result;
-- };
--
-- if (!matchesSizeConstraints())
-- continue;
--
-- d->codec = accel->hardwareEncoderForCodecId(codecID);
-- if (!d->codec)
-- continue;
-- d->accel = std::move(accel);
-- break;
-- }
--#endif
--
-- if (!d->accel) {
-- d->codec = avcodec_find_encoder(codecID);
-- if (!d->codec) {
-- qWarning() << "Could not find encoder for codecId" << codecID;
-- d = {};
-- return;
-- }
-- }
-- auto supportsFormat = [&](AVPixelFormat fmt) {
-- auto *f = d->codec->pix_fmts;
-- while (*f != -1) {
-- if (*f == fmt)
-- return true;
-- ++f;
-- }
-- return false;
-- };
--
-- d->targetFormat = d->sourceFormat;
--
-- if (!supportsFormat(d->sourceFormat)) {
-- if (supportsFormat(swFormat))
-- d->targetFormat = swFormat;
-- else
-- // Take first format the encoder supports. Might want to improve upon this
-- d->targetFormat = *d->codec->pix_fmts;
-- }
--
-- auto desc = av_pix_fmt_desc_get(d->sourceFormat);
-- d->sourceFormatIsHWFormat = desc->flags & AV_PIX_FMT_FLAG_HWACCEL;
-- desc = av_pix_fmt_desc_get(d->targetFormat);
-- d->targetFormatIsHWFormat = desc->flags & AV_PIX_FMT_FLAG_HWACCEL;
--
-- bool needToScale = d->sourceSize != d->settings.videoResolution();
-- bool zeroCopy = d->sourceFormatIsHWFormat && d->sourceFormat == d->targetFormat && !needToScale;
--
-- if (zeroCopy)
-- // no need to initialize any converters
-- return;
--
-- if (d->sourceFormatIsHWFormat) {
-- // if source and target formats don't agree, but the source is a HW format or sizes do't agree, we need to download
-- if (d->sourceFormat != d->targetFormat || needToScale)
-- d->downloadFromHW = true;
-- } else {
-- d->sourceSWFormat = d->sourceFormat;
-- }
--
-- if (d->targetFormatIsHWFormat) {
-- Q_ASSERT(d->accel);
-- // if source and target formats don't agree, but the target is a HW format, we need to upload
-- if (d->sourceFormat != d->targetFormat || needToScale) {
-- d->uploadToHW = true;
--
-- // determine the format used by the encoder.
-- // We prefer YUV422 based formats such as NV12 or P010. Selection trues to find the best matching
-- // format for the encoder depending on the bit depth of the source format
-- auto desc = av_pix_fmt_desc_get(d->sourceSWFormat);
-- int sourceDepth = desc->comp[0].depth;
--
-- d->targetSWFormat = AV_PIX_FMT_NONE;
--
-- auto *constraints = av_hwdevice_get_hwframe_constraints(d->accel->hwDeviceContextAsBuffer(), nullptr);
-- auto *f = constraints->valid_sw_formats;
-- int score = INT_MIN;
-- while (*f != AV_PIX_FMT_NONE) {
-- auto calcScore = [&](AVPixelFormat fmt) -> int {
-- auto *desc = av_pix_fmt_desc_get(fmt);
-- int s = 0;
-- if (fmt == d->sourceSWFormat)
-- // prefer exact matches
-- s += 10;
-- if (desc->comp[0].depth == sourceDepth)
-- s += 100;
-- else if (desc->comp[0].depth < sourceDepth)
-- s -= 100;
-- if (desc->log2_chroma_h == 1)
-- s += 1;
-- if (desc->log2_chroma_w == 1)
-- s += 1;
-- if (desc->flags & AV_PIX_FMT_FLAG_BE)
-- s -= 10;
-- if (desc->flags & AV_PIX_FMT_FLAG_PAL)
-- // we don't want paletted formats
-- s -= 10000;
-- if (desc->flags & AV_PIX_FMT_FLAG_RGB)
-- // we don't want RGB formats
-- s -= 1000;
-- if (desc->flags & AV_PIX_FMT_FLAG_HWACCEL)
-- // we really don't want HW accelerated formats here
-- s -= 1000000;
-- qCDebug(qLcVideoFrameEncoder) << "checking format" << fmt << Qt::hex << desc->flags << desc->comp[0].depth
-- << desc->log2_chroma_h << desc->log2_chroma_w << "score:" << s;
-- return s;
-- };
--
-- int s = calcScore(*f);
-- if (s > score) {
-- d->targetSWFormat = *f;
-- score = s;
-- }
-- ++f;
-- }
-- if (d->targetSWFormat == AV_PIX_FMT_NONE) // shouldn't happen
-- d->targetSWFormat = *constraints->valid_sw_formats;
--
-- qCDebug(qLcVideoFrameEncoder) << "using format" << d->targetSWFormat << "as transfer format.";
--
-- av_hwframe_constraints_free(&constraints);
-- // need to create a frames context to convert the input data
-- d->accel->createFramesContext(d->targetSWFormat, sourceSize);
-- }
-- } else {
-- d->targetSWFormat = d->targetFormat;
-- }
--
-- if (d->sourceSWFormat != d->targetSWFormat || needToScale) {
-- auto resolution = d->settings.videoResolution();
-- qCDebug(qLcVideoFrameEncoder) << "camera and encoder use different formats:" << d->sourceSWFormat << d->targetSWFormat;
-- d->converter = sws_getContext(d->sourceSize.width(), d->sourceSize.height(), d->sourceSWFormat,
-- resolution.width(), resolution.height(), d->targetSWFormat,
-- SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
-- }
--}
--
--VideoFrameEncoder::~VideoFrameEncoder()
--{
--}
--
--void QFFmpeg::VideoFrameEncoder::initWithFormatContext(AVFormatContext *formatContext)
--{
-- d->stream = avformat_new_stream(formatContext, nullptr);
-- d->stream->id = formatContext->nb_streams - 1;
-- //qCDebug(qLcVideoFrameEncoder) << "Video stream: index" << d->stream->id;
-- d->stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
-- d->stream->codecpar->codec_id = d->codec->id;
--
-- // Apples HEVC decoders don't like the hev1 tag ffmpeg uses by default, use hvc1 as the more commonly accepted tag
-- if (d->codec->id == AV_CODEC_ID_HEVC)
-- d->stream->codecpar->codec_tag = MKTAG('h','v','c','1');
--
-- // ### Fix hardcoded values
-- d->stream->codecpar->format = d->targetFormat;
-- d->stream->codecpar->width = d->settings.videoResolution().width();
-- d->stream->codecpar->height = d->settings.videoResolution().height();
-- d->stream->codecpar->sample_aspect_ratio = AVRational{1, 1};
-- float requestedRate = d->frameRate;
-- d->stream->time_base = AVRational{ 1, (int)(requestedRate*1000) };
--
-- float delta = 1e10;
-- if (d->codec->supported_framerates) {
-- // codec only supports fixed frame rates
-- auto *best = d->codec->supported_framerates;
-- qCDebug(qLcVideoFrameEncoder) << "Finding fixed rate:";
-- for (auto *f = d->codec->supported_framerates; f->num != 0; f++) {
-- auto maybeRate = toFloat(*f);
-- if (!maybeRate)
-- continue;
-- float d = qAbs(*maybeRate - requestedRate);
-- qCDebug(qLcVideoFrameEncoder) << " " << f->num << f->den << d;
-- if (d < delta) {
-- best = f;
-- delta = d;
-- }
-- }
-- qCDebug(qLcVideoFrameEncoder) << "Fixed frame rate required. Requested:" << requestedRate << "Using:" << best->num << "/" << best->den;
-- d->stream->time_base = *best;
-- requestedRate = toFloat(*best).value_or(0.f);
-- }
--
-- Q_ASSERT(d->codec);
-- d->codecContext = avcodec_alloc_context3(d->codec);
-- if (!d->codecContext) {
-- qWarning() << "Could not allocate codec context";
-- d = {};
-- return;
-- }
--
-- avcodec_parameters_to_context(d->codecContext, d->stream->codecpar);
-- d->codecContext->time_base = d->stream->time_base;
-- qCDebug(qLcVideoFrameEncoder) << "requesting time base" << d->codecContext->time_base.num << d->codecContext->time_base.den;
-- auto [num, den] = qRealToFraction(requestedRate);
-- d->codecContext->framerate = { num, den };
-- if (d->accel) {
-- auto deviceContext = d->accel->hwDeviceContextAsBuffer();
-- if (deviceContext)
-- d->codecContext->hw_device_ctx = av_buffer_ref(deviceContext);
-- auto framesContext = d->accel->hwFramesContextAsBuffer();
-- if (framesContext)
-- d->codecContext->hw_frames_ctx = av_buffer_ref(framesContext);
-- }
--}
--
--bool VideoFrameEncoder::open()
--{
-- AVDictionary *opts = nullptr;
-- applyVideoEncoderOptions(d->settings, d->codec->name, d->codecContext, &opts);
-- int res = avcodec_open2(d->codecContext, d->codec, &opts);
-- if (res < 0) {
-- avcodec_free_context(&d->codecContext);
-- qWarning() << "Couldn't open codec for writing" << err2str(res);
-- return false;
-- }
-- qCDebug(qLcVideoFrameEncoder) << "video codec opened" << res << "time base" << d->codecContext->time_base.num << d->codecContext->time_base.den;
-- d->stream->time_base = d->codecContext->time_base;
-- return true;
--}
--
--qint64 VideoFrameEncoder::getPts(qint64 us)
--{
-- Q_ASSERT(d);
-- qint64 div = 1'000'000 * d->stream->time_base.num;
-- return div != 0 ? (us * d->stream->time_base.den + div / 2) / div : 0;
--}
--
--int VideoFrameEncoder::sendFrame(AVFrame *frame)
--{
-- if (!frame)
-- return avcodec_send_frame(d->codecContext, frame);
-- auto pts = frame->pts;
--
-- if (d->downloadFromHW) {
-- auto *f = av_frame_alloc();
-- f->format = d->sourceSWFormat;
-- int err = av_hwframe_transfer_data(f, frame, 0);
-- if (err < 0) {
-- qCDebug(qLcVideoFrameEncoder) << "Error transferring frame data to surface." << err2str(err);
-- return err;
-- }
-- av_frame_free(&frame);
-- frame = f;
-- }
--
-- if (d->converter) {
-- auto *f = av_frame_alloc();
-- f->format = d->targetSWFormat;
-- f->width = d->settings.videoResolution().width();
-- f->height = d->settings.videoResolution().height();
-- av_frame_get_buffer(f, 0);
-- sws_scale(d->converter, frame->data, frame->linesize, 0, f->height, f->data, f->linesize);
-- av_frame_free(&frame);
-- frame = f;
-- }
--
-- if (d->uploadToHW) {
-- auto *hwFramesContext = d->accel->hwFramesContextAsBuffer();
-- Q_ASSERT(hwFramesContext);
-- auto *f = av_frame_alloc();
-- if (!f)
-- return AVERROR(ENOMEM);
-- int err = av_hwframe_get_buffer(hwFramesContext, f, 0);
-- if (err < 0) {
-- qCDebug(qLcVideoFrameEncoder) << "Error getting HW buffer" << err2str(err);
-- return err;
-- } else {
-- qCDebug(qLcVideoFrameEncoder) << "got HW buffer";
-- }
-- if (!f->hw_frames_ctx) {
-- qCDebug(qLcVideoFrameEncoder) << "no hw frames context";
-- return AVERROR(ENOMEM);
-- }
-- err = av_hwframe_transfer_data(f, frame, 0);
-- if (err < 0) {
-- qCDebug(qLcVideoFrameEncoder) << "Error transferring frame data to surface." << err2str(err);
-- return err;
-- }
-- av_frame_free(&frame);
-- frame = f;
-- }
--
-- qCDebug(qLcVideoFrameEncoder) << "sending frame" << pts;
-- frame->pts = pts;
-- int ret = avcodec_send_frame(d->codecContext, frame);
-- av_frame_free(&frame);
-- return ret;
--}
--
--AVPacket *VideoFrameEncoder::retrievePacket()
--{
-- if (!d || !d->codecContext)
-- return nullptr;
-- AVPacket *packet = av_packet_alloc();
-- int ret = avcodec_receive_packet(d->codecContext, packet);
-- if (ret < 0) {
-- av_packet_free(&packet);
-- if (ret != AVERROR(EOF) && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF)
-- qCDebug(qLcVideoFrameEncoder) << "Error receiving packet" << ret << err2str(ret);
-- return nullptr;
-- }
-- auto ts = timeStampMs(packet->pts, d->stream->time_base);
-- qCDebug(qLcVideoFrameEncoder) << "got a packet" << packet->pts << (ts ? *ts : 0);
-- packet->stream_index = d->stream->id;
-- return packet;
--}
--
--} // namespace QFFmpeg
--
--QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideoframeencoder_p.h b/src/plugins/multimedia/ffmpeg/qffmpegvideoframeencoder_p.h
-deleted file mode 100644
-index f71460799..000000000
---- a/src/plugins/multimedia/ffmpeg/qffmpegvideoframeencoder_p.h
-+++ /dev/null
-@@ -1,76 +0,0 @@
--// Copyright (C) 2022 The Qt Company Ltd.
--// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
--#ifndef QFFMPEGVIDEOFRAMEENCODER_P_H
--#define QFFMPEGVIDEOFRAMEENCODER_P_H
--
--//
--// W A R N I N G
--// -------------
--//
--// This file is not part of the Qt API. It exists purely as an
--// implementation detail. This header file may change from version to
--// version without notice, or even be removed.
--//
--// We mean it.
--//
--
--#include "qffmpeghwaccel_p.h"
--#include "qvideoframeformat.h"
--#include "private/qplatformmediarecorder_p.h"
--
--QT_BEGIN_NAMESPACE
--
--namespace QFFmpeg {
--
--class VideoFrameEncoder
--{
-- class Data final
-- {
-- public:
-- ~Data();
-- QAtomicInt ref = 0;
-- QMediaEncoderSettings settings;
-- float frameRate = 0.;
-- QSize sourceSize;
--
-- std::unique_ptr<HWAccel> accel;
-- const AVCodec *codec = nullptr;
-- AVStream *stream = nullptr;
-- AVCodecContext *codecContext = nullptr;
-- SwsContext *converter = nullptr;
-- AVPixelFormat sourceFormat = AV_PIX_FMT_NONE;
-- AVPixelFormat sourceSWFormat = AV_PIX_FMT_NONE;
-- AVPixelFormat targetFormat = AV_PIX_FMT_NONE;
-- AVPixelFormat targetSWFormat = AV_PIX_FMT_NONE;
-- bool sourceFormatIsHWFormat = false;
-- bool targetFormatIsHWFormat = false;
-- bool downloadFromHW = false;
-- bool uploadToHW = false;
-- };
--
-- QExplicitlySharedDataPointer<Data> d;
--public:
-- VideoFrameEncoder() = default;
-- VideoFrameEncoder(const QMediaEncoderSettings &encoderSettings, const QSize &sourceSize, float frameRate, AVPixelFormat sourceFormat, AVPixelFormat swFormat);
-- ~VideoFrameEncoder();
--
-- void initWithFormatContext(AVFormatContext *formatContext);
-- bool open();
--
-- bool isNull() const { return !d; }
--
-- AVPixelFormat sourceFormat() const { return d ? d->sourceFormat : AV_PIX_FMT_NONE; }
-- AVPixelFormat targetFormat() const { return d ? d->targetFormat : AV_PIX_FMT_NONE; }
--
-- qint64 getPts(qint64 ms);
--
-- int sendFrame(AVFrame *frame);
-- AVPacket *retrievePacket();
--};
--
--
--}
--
--QT_END_NAMESPACE
--
--#endif
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp b/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp
-index 3cb31b473..93e7ceeed 100644
---- a/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp
-+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp
-@@ -1,7 +1,6 @@
- // Copyright (C) 2021 The Qt Company Ltd.
- // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
- #include <qffmpegvideosink_p.h>
--#include <qffmpegvideobuffer_p.h>
-
- QT_BEGIN_NAMESPACE
-
-@@ -10,21 +9,8 @@ QFFmpegVideoSink::QFFmpegVideoSink(QVideoSink *sink)
- {
- }
-
--void QFFmpegVideoSink::setRhi(QRhi *rhi)
--{
-- if (m_rhi == rhi)
-- return;
-- m_rhi = rhi;
-- textureConverter = QFFmpeg::TextureConverter(rhi);
-- emit rhiChanged(rhi);
--}
--
- void QFFmpegVideoSink::setVideoFrame(const QVideoFrame &frame)
- {
-- auto *buffer = dynamic_cast<QFFmpegVideoBuffer *>(frame.videoBuffer());
-- if (buffer)
-- buffer->setTextureConverter(textureConverter);
--
- QPlatformVideoSink::setVideoFrame(frame);
- }
-
-diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideosink_p.h b/src/plugins/multimedia/ffmpeg/qffmpegvideosink_p.h
-index dbd9ac7f2..cbaa810d7 100644
---- a/src/plugins/multimedia/ffmpeg/qffmpegvideosink_p.h
-+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideosink_p.h
-@@ -16,7 +16,7 @@
- //
-
- #include <private/qplatformvideosink_p.h>
--#include <qffmpeghwaccel_p.h>
-+//#include <qffmpeghwaccel_p.h>
-
- QT_BEGIN_NAMESPACE
-
-@@ -29,16 +29,8 @@ class QFFmpegVideoSink : public QPlatformVideoSink
-
- public:
- QFFmpegVideoSink(QVideoSink *sink);
-- void setRhi(QRhi *rhi) override;
-
- void setVideoFrame(const QVideoFrame &frame) override;
--
--Q_SIGNALS:
-- void rhiChanged(QRhi *rhi);
--
--private:
-- QFFmpeg::TextureConverter textureConverter;
-- QRhi *m_rhi = nullptr;
- };
-
- QT_END_NAMESPACE
-diff --git a/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp b/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp
-index 4189bdedf..c61c452f3 100644
---- a/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp
-+++ b/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp
-@@ -147,7 +147,7 @@ void QV4L2CameraDevices::doCheckCameras()
-
- while (!ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) {
- if (frameSize.type != V4L2_FRMSIZE_TYPE_DISCRETE)
-- continue;
-+ break;
-
- QSize resolution(frameSize.discrete.width, frameSize.discrete.height);
- float min = 1e10;
-@@ -160,7 +160,7 @@ void QV4L2CameraDevices::doCheckCameras()
-
- while (!ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frameInterval)) {
- if (frameInterval.type != V4L2_FRMIVAL_TYPE_DISCRETE)
-- continue;
-+ break;
- ++frameInterval.index;
- float rate = float(frameInterval.discrete.denominator)/float(frameInterval.discrete.numerator);
- if (rate > max)
-diff --git a/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp b/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp
-index 790c49858..4a4e7a921 100644
---- a/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp
-+++ b/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp
-@@ -11,8 +11,8 @@
-
- #include <mfapi.h>
- #include <mfidl.h>
--#include <Mferror.h>
--#include <Mfreadwrite.h>
-+#include <mferror.h>
-+#include <mfreadwrite.h>
-
- #include <system_error>
-
diff --git a/src/plugins/multimedia/windows/common/mfmetadata_p.h b/src/plugins/multimedia/windows/common/mfmetadata_p.h
index 81a03b126..9ff196240 100644
--- a/src/plugins/multimedia/windows/common/mfmetadata_p.h
QT_USE_NAMESPACE
diff --git a/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol.cpp b/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol.cpp
-index 45bc70d65..0e27a2779 100644
+index a82b5ef30..49aa64b25 100644
--- a/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol.cpp
+++ b/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol.cpp
@@ -4,7 +4,7 @@
#include <QtCore/qobject.h>
#include <QtCore/qmutex.h>
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
-index d5eb07980..dc87afc4b 100644
+index 4a031043d..1d901c036 100644
--- a/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
@@ -8,7 +8,7 @@
#include <QtCore/qcoreevent.h>
diff --git a/src/plugins/multimedia/windows/player/mfplayersession.cpp b/src/plugins/multimedia/windows/player/mfplayersession.cpp
-index 58efaa87e..5d3372152 100644
+index 2084a168a..1aa25317f 100644
--- a/src/plugins/multimedia/windows/player/mfplayersession.cpp
+++ b/src/plugins/multimedia/windows/player/mfplayersession.cpp
@@ -32,7 +32,7 @@
//#define DEBUG_MEDIAFOUNDATION
diff --git a/src/plugins/multimedia/windows/player/mftvideo.cpp b/src/plugins/multimedia/windows/player/mftvideo.cpp
-index 601c51e42..06a8769a7 100644
+index a2ef1db0b..4cb5d2c9d 100644
--- a/src/plugins/multimedia/windows/player/mftvideo.cpp
+++ b/src/plugins/multimedia/windows/player/mftvideo.cpp
@@ -7,7 +7,7 @@
--- /dev/null
+diff --git a/src/plugins/multimedia/CMakeLists.txt b/src/plugins/multimedia/CMakeLists.txt
+index 978710112..1cb2cc730 100644
+--- a/src/plugins/multimedia/CMakeLists.txt
++++ b/src/plugins/multimedia/CMakeLists.txt
+@@ -2,7 +2,7 @@
+ # SPDX-License-Identifier: BSD-3-Clause
+
+ if (QT_FEATURE_ffmpeg)
+- add_subdirectory(ffmpeg)
++ add_subdirectory(v4l2)
+ endif ()
+ if (QT_FEATURE_gstreamer)
+ add_subdirectory(gstreamer)
+diff --git a/src/plugins/multimedia/v4l2/CMakeLists.txt b/src/plugins/multimedia/v4l2/CMakeLists.txt
+new file mode 100644
+index 000000000..7c7e1a8da
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/CMakeLists.txt
+@@ -0,0 +1,22 @@
++qt_internal_add_plugin(QFFmpegMediaPlugin
++ OUTPUT_NAME ffmpegmediaplugin
++ PLUGIN_TYPE multimedia
++ SOURCES
++ qffmpegmediametadata.cpp qffmpegmediametadata_p.h
++ qffmpegvideosink.cpp qffmpegvideosink_p.h
++ qffmpegmediaformatinfo.cpp qffmpegmediaformatinfo_p.h
++ qffmpegmediaintegration.cpp qffmpegmediaintegration_p.h
++ qffmpegimagecapture.cpp qffmpegimagecapture_p.h
++ qffmpegmediacapturesession.cpp qffmpegmediacapturesession_p.h
++ DEFINES
++ QT_COMPILING_FFMPEG
++ LIBRARIES
++ Qt::MultimediaPrivate
++ Qt::CorePrivate
++)
++
++qt_internal_extend_target(QFFmpegMediaPlugin CONDITION QT_FEATURE_linux_v4l
++ SOURCES
++ qv4l2camera.cpp qv4l2camera_p.h
++)
++
+diff --git a/src/plugins/multimedia/v4l2/ffmpeg.json b/src/plugins/multimedia/v4l2/ffmpeg.json
+new file mode 100644
+index 000000000..d8e7e4456
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/ffmpeg.json
+@@ -0,0 +1,3 @@
++{
++ "Keys": [ "ffmpeg" ]
++}
+diff --git a/src/plugins/multimedia/v4l2/qffmpegimagecapture.cpp b/src/plugins/multimedia/v4l2/qffmpegimagecapture.cpp
+new file mode 100644
+index 000000000..9ee4e1db8
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qffmpegimagecapture.cpp
+@@ -0,0 +1,269 @@
++// Copyright (C) 2016 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++#include "qffmpegimagecapture_p.h"
++#include <private/qplatformmediaformatinfo_p.h>
++#include <private/qplatformcamera_p.h>
++#include <private/qplatformimagecapture_p.h>
++#include <qvideoframeformat.h>
++#include <private/qmediastoragelocation_p.h>
++#include <qimagewriter.h>
++
++#include <QtCore/QDebug>
++#include <QtCore/QDir>
++#include <qstandardpaths.h>
++
++#include <qloggingcategory.h>
++
++QT_BEGIN_NAMESPACE
++
++Q_LOGGING_CATEGORY(qLcImageCapture, "qt.multimedia.imageCapture")
++
++QFFmpegImageCapture::QFFmpegImageCapture(QImageCapture *parent)
++ : QPlatformImageCapture(parent)
++{
++}
++
++QFFmpegImageCapture::~QFFmpegImageCapture()
++{
++}
++
++bool QFFmpegImageCapture::isReadyForCapture() const
++{
++ return m_isReadyForCapture;
++}
++
++static const char *extensionForFormat(QImageCapture::FileFormat format)
++{
++ const char *fmt = "jpg";
++ switch (format) {
++ case QImageCapture::UnspecifiedFormat:
++ case QImageCapture::JPEG:
++ fmt = "jpg";
++ break;
++ case QImageCapture::PNG:
++ fmt = "png";
++ break;
++ case QImageCapture::WebP:
++ fmt = "webp";
++ break;
++ case QImageCapture::Tiff:
++ fmt = "tiff";
++ break;
++ }
++ return fmt;
++}
++
++int QFFmpegImageCapture::capture(const QString &fileName)
++{
++ QString path = QMediaStorageLocation::generateFileName(fileName, QStandardPaths::PicturesLocation, QLatin1String(extensionForFormat(m_settings.format())));
++ return doCapture(path);
++}
++
++int QFFmpegImageCapture::captureToBuffer()
++{
++ return doCapture(QString());
++}
++
++int QFFmpegImageCapture::doCapture(const QString &fileName)
++{
++ qCDebug(qLcImageCapture) << "do capture";
++ if (!m_session) {
++ //emit error in the next event loop,
++ //so application can associate it with returned request id.
++ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
++ Q_ARG(int, -1),
++ Q_ARG(int, QImageCapture::ResourceError),
++ Q_ARG(QString, QPlatformImageCapture::msgImageCaptureNotSet()));
++
++ qCDebug(qLcImageCapture) << "error 1";
++ return -1;
++ }
++ if (!m_camera) {
++ //emit error in the next event loop,
++ //so application can associate it with returned request id.
++ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
++ Q_ARG(int, -1),
++ Q_ARG(int, QImageCapture::ResourceError),
++ Q_ARG(QString,tr("No camera available.")));
++
++ qCDebug(qLcImageCapture) << "error 2";
++ return -1;
++ }
++ if (passImage) {
++ //emit error in the next event loop,
++ //so application can associate it with returned request id.
++ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
++ Q_ARG(int, -1),
++ Q_ARG(int, QImageCapture::NotReadyError),
++ Q_ARG(QString, QPlatformImageCapture::msgCameraNotReady()));
++
++ qCDebug(qLcImageCapture) << "error 3";
++ return -1;
++ }
++ m_lastId++;
++
++ pendingImages.enqueue({m_lastId, fileName, QMediaMetaData{}});
++ // let one image pass the pipeline
++ passImage = true;
++
++ updateReadyForCapture();
++ return m_lastId;
++}
++
++void QFFmpegImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
++{
++ auto *captureSession = static_cast<QFFmpegMediaCaptureSession *>(session);
++ if (m_session == captureSession)
++ return;
++
++ if (m_session) {
++ disconnect(m_session, nullptr, this, nullptr);
++ m_lastId = 0;
++ pendingImages.clear();
++ passImage = false;
++ cameraActive = false;
++ }
++
++ m_session = captureSession;
++ if (m_session)
++ connect(m_session, &QPlatformMediaCaptureSession::cameraChanged, this, &QFFmpegImageCapture::onCameraChanged);
++
++ onCameraChanged();
++ updateReadyForCapture();
++}
++
++void QFFmpegImageCapture::updateReadyForCapture()
++{
++ bool ready = m_session && !passImage && cameraActive;
++ if (ready == m_isReadyForCapture)
++ return;
++ m_isReadyForCapture = ready;
++ emit readyForCaptureChanged(m_isReadyForCapture);
++}
++
++void QFFmpegImageCapture::cameraActiveChanged(bool active)
++{
++ qCDebug(qLcImageCapture) << "cameraActiveChanged" << cameraActive << active;
++ if (cameraActive == active)
++ return;
++ cameraActive = active;
++ qCDebug(qLcImageCapture) << "isReady" << isReadyForCapture();
++ updateReadyForCapture();
++}
++
++void QFFmpegImageCapture::newVideoFrame(const QVideoFrame &frame)
++{
++ if (!passImage)
++ return;
++
++ passImage = false;
++ Q_ASSERT(!pendingImages.isEmpty());
++ auto pending = pendingImages.dequeue();
++
++ emit imageExposed(pending.id);
++ // ### Add metadata from the AVFrame
++ emit imageMetadataAvailable(pending.id, pending.metaData);
++ emit imageAvailable(pending.id, frame);
++ QImage image = frame.toImage();
++ if (m_settings.resolution().isValid() && m_settings.resolution() != image.size())
++ image = image.scaled(m_settings.resolution());
++
++ emit imageCaptured(pending.id, image);
++ if (!pending.filename.isEmpty()) {
++ const char *fmt = nullptr;
++ switch (m_settings.format()) {
++ case QImageCapture::UnspecifiedFormat:
++ case QImageCapture::JPEG:
++ fmt = "jpeg";
++ break;
++ case QImageCapture::PNG:
++ fmt = "png";
++ break;
++ case QImageCapture::WebP:
++ fmt = "webp";
++ break;
++ case QImageCapture::Tiff:
++ fmt = "tiff";
++ break;
++ }
++ int quality = -1;
++ switch (m_settings.quality()) {
++ case QImageCapture::VeryLowQuality:
++ quality = 25;
++ break;
++ case QImageCapture::LowQuality:
++ quality = 50;
++ break;
++ case QImageCapture::NormalQuality:
++ break;
++ case QImageCapture::HighQuality:
++ quality = 75;
++ break;
++ case QImageCapture::VeryHighQuality:
++ quality = 99;
++ break;
++ }
++
++ QImageWriter writer(pending.filename, fmt);
++ writer.setQuality(quality);
++
++ if (writer.write(image)) {
++ emit imageSaved(pending.id, pending.filename);
++ } else {
++ QImageCapture::Error err = QImageCapture::ResourceError;
++ if (writer.error() == QImageWriter::UnsupportedFormatError)
++ err = QImageCapture::FormatError;
++ emit error(pending.id, err, writer.errorString());
++ }
++ }
++ updateReadyForCapture();
++}
++
++void QFFmpegImageCapture::onCameraChanged()
++{
++ auto *camera = m_session ? m_session->camera() : nullptr;
++ if (m_camera == camera)
++ return;
++
++ if (m_camera)
++ disconnect(m_camera);
++
++ m_camera = camera;
++
++ if (camera) {
++ cameraActiveChanged(camera->isActive());
++ connect(camera, &QPlatformCamera::activeChanged, this, &QFFmpegImageCapture::cameraActiveChanged);
++ connect(camera, &QPlatformCamera::newVideoFrame, this, &QFFmpegImageCapture::newVideoFrame);
++ } else {
++ cameraActiveChanged(false);
++ }
++}
++
++QImageEncoderSettings QFFmpegImageCapture::imageSettings() const
++{
++ return m_settings;
++}
++
++void QFFmpegImageCapture::setImageSettings(const QImageEncoderSettings &settings)
++{
++ auto s = settings;
++ const auto supportedFormats = QPlatformMediaIntegration::instance()->formatInfo()->imageFormats;
++ if (supportedFormats.isEmpty()) {
++ emit error(-1, QImageCapture::FormatError, "No image formats supported, can't capture.");
++ return;
++ }
++ if (s.format() == QImageCapture::UnspecifiedFormat) {
++ auto f = QImageCapture::JPEG;
++ if (!supportedFormats.contains(f))
++ f = supportedFormats.first();
++ s.setFormat(f);
++ } else if (!supportedFormats.contains(settings.format())) {
++ emit error(-1, QImageCapture::FormatError, "Image format not supported.");
++ return;
++ }
++
++ m_settings = settings;
++}
++
++QT_END_NAMESPACE
+diff --git a/src/plugins/multimedia/v4l2/qffmpegimagecapture_p.h b/src/plugins/multimedia/v4l2/qffmpegimagecapture_p.h
+new file mode 100644
+index 000000000..de54fe7cb
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qffmpegimagecapture_p.h
+@@ -0,0 +1,72 @@
++// Copyright (C) 2016 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++
++#ifndef QFFMPEGIMAGECAPTURE_H
++#define QFFMPEGIMAGECAPTURE_H
++
++//
++// W A R N I N G
++// -------------
++//
++// This file is not part of the Qt API. It exists purely as an
++// implementation detail. This header file may change from version to
++// version without notice, or even be removed.
++//
++// We mean it.
++//
++
++#include <private/qplatformimagecapture_p.h>
++#include "qffmpegmediacapturesession_p.h"
++
++#include <qqueue.h>
++
++QT_BEGIN_NAMESPACE
++
++class QFFmpegImageCapture : public QPlatformImageCapture
++
++{
++ Q_OBJECT
++public:
++ QFFmpegImageCapture(QImageCapture *parent);
++ virtual ~QFFmpegImageCapture();
++
++ bool isReadyForCapture() const override;
++ int capture(const QString &fileName) override;
++ int captureToBuffer() override;
++
++ QImageEncoderSettings imageSettings() const override;
++ void setImageSettings(const QImageEncoderSettings &settings) override;
++
++ void setCaptureSession(QPlatformMediaCaptureSession *session);
++
++ void updateReadyForCapture();
++
++public Q_SLOTS:
++ void cameraActiveChanged(bool active);
++ void newVideoFrame(const QVideoFrame &frame);
++ void onCameraChanged();
++
++private:
++ int doCapture(const QString &fileName);
++
++ QFFmpegMediaCaptureSession *m_session = nullptr;
++ int m_lastId = 0;
++ QImageEncoderSettings m_settings;
++ QPlatformCamera *m_camera = nullptr;
++
++ struct PendingImage {
++ int id;
++ QString filename;
++ QMediaMetaData metaData;
++ };
++
++ QQueue<PendingImage> pendingImages;
++ bool passImage = false;
++ bool cameraActive = false;
++ bool m_isReadyForCapture = false;
++};
++
++QT_END_NAMESPACE
++
++#endif // QGSTREAMERCAPTURECORNTROL_H
+diff --git a/src/plugins/multimedia/v4l2/qffmpegmediacapturesession.cpp b/src/plugins/multimedia/v4l2/qffmpegmediacapturesession.cpp
+new file mode 100644
+index 000000000..b6865761c
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qffmpegmediacapturesession.cpp
+@@ -0,0 +1,114 @@
++// Copyright (C) 2016 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++#include "qffmpegmediacapturesession_p.h"
++
++#include "private/qplatformaudioinput_p.h"
++#include "private/qplatformaudiooutput_p.h"
++#include "qffmpegimagecapture_p.h"
++#include "private/qplatformcamera_p.h"
++#include "qvideosink.h"
++
++#include <qloggingcategory.h>
++
++QT_BEGIN_NAMESPACE
++
++Q_LOGGING_CATEGORY(qLcMediaCapture, "qt.multimedia.capture")
++
++
++
++QFFmpegMediaCaptureSession::QFFmpegMediaCaptureSession()
++{
++}
++
++QFFmpegMediaCaptureSession::~QFFmpegMediaCaptureSession()
++{
++}
++
++QPlatformCamera *QFFmpegMediaCaptureSession::camera()
++{
++ return m_camera;
++}
++
++void QFFmpegMediaCaptureSession::setCamera(QPlatformCamera *camera)
++{
++ if (m_camera == camera)
++ return;
++ if (m_camera) {
++ m_camera->disconnect(this);
++ m_camera->setCaptureSession(nullptr);
++ }
++
++ m_camera = camera;
++
++ if (m_camera) {
++ connect(m_camera, &QPlatformCamera::newVideoFrame, this, &QFFmpegMediaCaptureSession::newVideoFrame);
++ m_camera->setCaptureSession(this);
++ }
++
++ emit cameraChanged();
++}
++
++QPlatformImageCapture *QFFmpegMediaCaptureSession::imageCapture()
++{
++ return m_imageCapture;
++}
++
++void QFFmpegMediaCaptureSession::setImageCapture(QPlatformImageCapture *imageCapture)
++{
++ if (m_imageCapture == imageCapture)
++ return;
++
++ if (m_imageCapture)
++ m_imageCapture->setCaptureSession(nullptr);
++
++ m_imageCapture = static_cast<QFFmpegImageCapture *>(imageCapture);
++
++ if (m_imageCapture)
++ m_imageCapture->setCaptureSession(this);
++
++ emit imageCaptureChanged();
++}
++
++void QFFmpegMediaCaptureSession::setMediaRecorder(QPlatformMediaRecorder *recorder)
++{
++ return;
++}
++
++QPlatformMediaRecorder *QFFmpegMediaCaptureSession::mediaRecorder()
++{
++ return nullptr;
++}
++
++void QFFmpegMediaCaptureSession::setAudioInput(QPlatformAudioInput *input)
++{
++ if (m_audioInput == input)
++ return;
++
++ m_audioInput = input;
++}
++
++void QFFmpegMediaCaptureSession::setVideoPreview(QVideoSink *sink)
++{
++ if (m_videoSink == sink)
++ return;
++
++ m_videoSink = sink;
++}
++
++void QFFmpegMediaCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
++{
++ if (m_audioOutput == output)
++ return;
++
++ m_audioOutput = output;
++}
++
++void QFFmpegMediaCaptureSession::newVideoFrame(const QVideoFrame &frame)
++{
++ if (m_videoSink)
++ m_videoSink->setVideoFrame(frame);
++}
++
++
++QT_END_NAMESPACE
+diff --git a/src/plugins/multimedia/v4l2/qffmpegmediacapturesession_p.h b/src/plugins/multimedia/v4l2/qffmpegmediacapturesession_p.h
+new file mode 100644
+index 000000000..858a537cc
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qffmpegmediacapturesession_p.h
+@@ -0,0 +1,63 @@
++// Copyright (C) 2016 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++#ifndef QFFMPEGMEDIACAPTURESESSION_H
++#define QFFMPEGMEDIACAPTURESESSION_H
++
++//
++// W A R N I N G
++// -------------
++//
++// This file is not part of the Qt API. It exists purely as an
++// implementation detail. This header file may change from version to
++// version without notice, or even be removed.
++//
++// We mean it.
++//
++
++#include <private/qplatformmediacapture_p.h>
++#include <private/qplatformmediaintegration_p.h>
++
++QT_BEGIN_NAMESPACE
++
++class QFFmpegMediaRecorder;
++class QFFmpegImageCapture;
++class QVideoFrame;
++
++class QFFmpegMediaCaptureSession : public QPlatformMediaCaptureSession
++{
++ Q_OBJECT
++
++public:
++ QFFmpegMediaCaptureSession();
++ virtual ~QFFmpegMediaCaptureSession();
++
++ QPlatformCamera *camera() override;
++ void setCamera(QPlatformCamera *camera) override;
++
++ QPlatformImageCapture *imageCapture() override;
++ void setImageCapture(QPlatformImageCapture *imageCapture) override;
++
++ QPlatformMediaRecorder *mediaRecorder() override;
++ void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
++
++ void setAudioInput(QPlatformAudioInput *input) override;
++ QPlatformAudioInput *audioInput() { return m_audioInput; }
++
++ void setVideoPreview(QVideoSink *sink) override;
++ void setAudioOutput(QPlatformAudioOutput *output) override;
++
++public Q_SLOTS:
++ void newVideoFrame(const QVideoFrame &frame);
++
++private:
++ QPlatformCamera *m_camera = nullptr;
++ QPlatformAudioInput *m_audioInput = nullptr;
++ QFFmpegImageCapture *m_imageCapture = nullptr;
++ QPlatformAudioOutput *m_audioOutput = nullptr;
++ QVideoSink *m_videoSink = nullptr;
++};
++
++QT_END_NAMESPACE
++
++#endif // QGSTREAMERCAPTURESERVICE_H
+diff --git a/src/plugins/multimedia/v4l2/qffmpegmediaformatinfo.cpp b/src/plugins/multimedia/v4l2/qffmpegmediaformatinfo.cpp
+new file mode 100644
+index 000000000..00b838d50
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qffmpegmediaformatinfo.cpp
+@@ -0,0 +1,32 @@
++// Copyright (C) 2021 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++#include "qffmpegmediaformatinfo_p.h"
++#include "qaudioformat.h"
++#include "qimagewriter.h"
++
++QT_BEGIN_NAMESPACE
++
++QFFmpegMediaFormatInfo::QFFmpegMediaFormatInfo()
++{
++ // Add image formats we support. We currently simply use Qt's built-in image write
++ // to save images. That doesn't give us HDR support or support for larger bit depths,
++ // but most cameras can currently not generate those anyway.
++ const auto imgFormats = QImageWriter::supportedImageFormats();
++ for (const auto &f : imgFormats) {
++ if (f == "png")
++ imageFormats.append(QImageCapture::PNG);
++ else if (f == "jpeg")
++ imageFormats.append(QImageCapture::JPEG);
++ else if (f == "tiff")
++ imageFormats.append(QImageCapture::Tiff);
++ else if (f == "webp")
++ imageFormats.append(QImageCapture::WebP);
++ }
++
++}
++
++QFFmpegMediaFormatInfo::~QFFmpegMediaFormatInfo() = default;
++
++
++QT_END_NAMESPACE
+diff --git a/src/plugins/multimedia/v4l2/qffmpegmediaformatinfo_p.h b/src/plugins/multimedia/v4l2/qffmpegmediaformatinfo_p.h
+new file mode 100644
+index 000000000..e34005bbf
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qffmpegmediaformatinfo_p.h
+@@ -0,0 +1,34 @@
++// Copyright (C) 2021 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++#ifndef QFFmpegMediaFormatInfo_H
++#define QFFmpegMediaFormatInfo_H
++
++//
++// W A R N I N G
++// -------------
++//
++// This file is not part of the Qt API. It exists purely as an
++// implementation detail. This header file may change from version to
++// version without notice, or even be removed.
++//
++// We mean it.
++//
++
++#include <private/qplatformmediaformatinfo_p.h>
++#include <qhash.h>
++#include <qlist.h>
++#include <qaudioformat.h>
++
++QT_BEGIN_NAMESPACE
++
++class QFFmpegMediaFormatInfo : public QPlatformMediaFormatInfo
++{
++public:
++ QFFmpegMediaFormatInfo();
++ ~QFFmpegMediaFormatInfo();
++};
++
++QT_END_NAMESPACE
++
++#endif
+diff --git a/src/plugins/multimedia/v4l2/qffmpegmediaintegration.cpp b/src/plugins/multimedia/v4l2/qffmpegmediaintegration.cpp
+new file mode 100644
+index 000000000..c07c0ebc7
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qffmpegmediaintegration.cpp
+@@ -0,0 +1,130 @@
++// Copyright (C) 2021 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++#include <QtMultimedia/private/qplatformmediaplugin_p.h>
++#include <qcameradevice.h>
++#include "qffmpegmediaintegration_p.h"
++#include "qffmpegmediaformatinfo_p.h"
++#include "qffmpegvideosink_p.h"
++#include "qffmpegmediacapturesession_p.h"
++#include "qffmpegimagecapture_p.h"
++
++#ifdef Q_OS_MACOS
++#include <VideoToolbox/VideoToolbox.h>
++#endif
++
++#ifdef Q_OS_DARWIN
++#include "qavfcamera_p.h"
++#elif defined(Q_OS_WINDOWS)
++#include "qwindowscamera_p.h"
++#include "qwindowsvideodevices_p.h"
++#endif
++
++#if QT_CONFIG(linux_v4l)
++#include "qv4l2camera_p.h"
++#endif
++
++QT_BEGIN_NAMESPACE
++
++class QFFmpegMediaPlugin : public QPlatformMediaPlugin
++{
++ Q_OBJECT
++ Q_PLUGIN_METADATA(IID QPlatformMediaPlugin_iid FILE "ffmpeg.json")
++
++public:
++ QFFmpegMediaPlugin()
++ : QPlatformMediaPlugin()
++ {}
++
++ QPlatformMediaIntegration* create(const QString &name) override
++ {
++ if (name == QLatin1String("ffmpeg"))
++ return new QFFmpegMediaIntegration;
++ return nullptr;
++ }
++};
++
++QFFmpegMediaIntegration::QFFmpegMediaIntegration()
++{
++ m_formatsInfo = new QFFmpegMediaFormatInfo();
++
++#if QT_CONFIG(linux_v4l)
++ m_videoDevices = new QV4L2CameraDevices(this);
++#endif
++#ifdef Q_OS_DARWIN
++ m_videoDevices = new QAVFVideoDevices(this);
++#elif defined(Q_OS_WINDOWS)
++ m_videoDevices = new QWindowsVideoDevices(this);
++#endif
++
++#ifndef QT_NO_DEBUG
++ qDebug() << "Available HW decoding frameworks:";
++ AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE;
++ while ((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)
++ qDebug() << " " << av_hwdevice_get_type_name(type);
++#endif
++}
++
++QFFmpegMediaIntegration::~QFFmpegMediaIntegration()
++{
++ delete m_formatsInfo;
++}
++
++QPlatformMediaFormatInfo *QFFmpegMediaIntegration::formatInfo()
++{
++ return m_formatsInfo;
++}
++
++QMaybe<QPlatformMediaCaptureSession *> QFFmpegMediaIntegration::createCaptureSession()
++{
++ return new QFFmpegMediaCaptureSession();
++}
++
++QMaybe<QPlatformCamera *> QFFmpegMediaIntegration::createCamera(QCamera *camera)
++{
++#ifdef Q_OS_DARWIN
++ return new QAVFCamera(camera);
++#elif QT_CONFIG(linux_v4l)
++ return new QV4L2Camera(camera);
++#elif defined(Q_OS_WINDOWS)
++ return new QWindowsCamera(camera);
++#else
++ Q_UNUSED(camera);
++ return nullptr;//new QFFmpegCamera(camera);
++#endif
++}
++
++QMaybe<QPlatformImageCapture *> QFFmpegMediaIntegration::createImageCapture(QImageCapture *imageCapture)
++{
++ return new QFFmpegImageCapture(imageCapture);
++}
++
++QMaybe<QPlatformVideoSink *> QFFmpegMediaIntegration::createVideoSink(QVideoSink *sink)
++{
++ return new QFFmpegVideoSink(sink);
++}
++
++#ifdef Q_OS_ANDROID
++Q_DECL_EXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void * /*reserved*/)
++{
++ static bool initialized = false;
++ if (initialized)
++ return JNI_VERSION_1_6;
++ initialized = true;
++
++ QT_USE_NAMESPACE
++ void *environment;
++ if (vm->GetEnv(&environment, JNI_VERSION_1_6))
++ return JNI_ERR;
++
++ // setting our javavm into ffmpeg.
++ if (av_jni_set_java_vm(vm, nullptr))
++ return JNI_ERR;
++
++ return JNI_VERSION_1_6;
++}
++#endif
++
++QT_END_NAMESPACE
++
++#include "qffmpegmediaintegration.moc"
+diff --git a/src/plugins/multimedia/v4l2/qffmpegmediaintegration_p.h b/src/plugins/multimedia/v4l2/qffmpegmediaintegration_p.h
+new file mode 100644
+index 000000000..8b44da741
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qffmpegmediaintegration_p.h
+@@ -0,0 +1,43 @@
++// Copyright (C) 2021 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++#ifndef QGSTREAMERINTEGRATION_H
++#define QGSTREAMERINTEGRATION_H
++
++//
++// W A R N I N G
++// -------------
++//
++// This file is not part of the Qt API. It exists purely as an
++// implementation detail. This header file may change from version to
++// version without notice, or even be removed.
++//
++// We mean it.
++//
++
++#include <private/qplatformmediaintegration_p.h>
++
++QT_BEGIN_NAMESPACE
++
++class QFFmpegMediaFormatInfo;
++
++class QFFmpegMediaIntegration : public QPlatformMediaIntegration
++{
++public:
++ QFFmpegMediaIntegration();
++ ~QFFmpegMediaIntegration();
++
++ static QFFmpegMediaIntegration *instance() { return static_cast<QFFmpegMediaIntegration *>(QPlatformMediaIntegration::instance()); }
++ QPlatformMediaFormatInfo *formatInfo() override;
++
++ QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override;
++ QMaybe<QPlatformCamera *> createCamera(QCamera *) override;
++ QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *) override;
++ QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *sink) override;
++
++ QFFmpegMediaFormatInfo *m_formatsInfo = nullptr;
++};
++
++QT_END_NAMESPACE
++
++#endif
+diff --git a/src/plugins/multimedia/v4l2/qffmpegmediametadata.cpp b/src/plugins/multimedia/v4l2/qffmpegmediametadata.cpp
+new file mode 100644
+index 000000000..dda577d44
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qffmpegmediametadata.cpp
+@@ -0,0 +1,72 @@
++// Copyright (C) 2022 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++#include "qffmpegmediametadata_p.h"
++#include <QDebug>
++#include <QtCore/qdatetime.h>
++#include <qstringlist.h>
++#include <qurl.h>
++#include <qlocale.h>
++
++QT_BEGIN_NAMESPACE
++
++namespace {
++
++struct {
++ const char *tag;
++ QMediaMetaData::Key key;
++} ffmpegTagToMetaDataKey[] = {
++ { "title", QMediaMetaData::Title },
++ { "comment", QMediaMetaData::Comment },
++ { "description", QMediaMetaData::Description },
++ { "genre", QMediaMetaData::Genre },
++ { "date", QMediaMetaData::Date },
++ { "year", QMediaMetaData::Date },
++ { "creation_time", QMediaMetaData::Date },
++
++ { "language", QMediaMetaData::Language },
++
++ { "copyright", QMediaMetaData::Copyright },
++
++ // Music
++ { "album", QMediaMetaData::AlbumTitle },
++ { "album_artist", QMediaMetaData::AlbumArtist },
++ { "artist", QMediaMetaData::ContributingArtist },
++ { "track", QMediaMetaData::TrackNumber },
++
++ // Movie
++ { "performer", QMediaMetaData::LeadPerformer },
++
++ { nullptr, QMediaMetaData::Title }
++};
++
++}
++
++static QMediaMetaData::Key tagToKey(const char *tag)
++{
++ auto *map = ffmpegTagToMetaDataKey;
++ while (map->tag) {
++ if (!strcmp(map->tag, tag))
++ return map->key;
++ ++map;
++ }
++ return QMediaMetaData::Key(-1);
++}
++
++static const char *keyToTag(QMediaMetaData::Key key)
++{
++ auto *map = ffmpegTagToMetaDataKey;
++ while (map->tag) {
++ if (map->key == key)
++ return map->tag;
++ ++map;
++ }
++ return nullptr;
++}
++
++QByteArray QFFmpegMetaData::value(const QMediaMetaData &metaData, QMediaMetaData::Key key)
++{
++ return {};
++}
++
++QT_END_NAMESPACE
+diff --git a/src/plugins/multimedia/v4l2/qffmpegmediametadata_p.h b/src/plugins/multimedia/v4l2/qffmpegmediametadata_p.h
+new file mode 100644
+index 000000000..95b069b64
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qffmpegmediametadata_p.h
+@@ -0,0 +1,30 @@
++// Copyright (C) 2022 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++#ifndef QFFMPEGMEDIAMETADATA_H
++#define QFFMPEGMEDIAMETADATA_H
++
++//
++// W A R N I N G
++// -------------
++//
++// This file is not part of the Qt API. It exists purely as an
++// implementation detail. This header file may change from version to
++// version without notice, or even be removed.
++//
++// We mean it.
++//
++
++#include <qmediametadata.h>
++
++QT_BEGIN_NAMESPACE
++
++class QFFmpegMetaData : public QMediaMetaData
++{
++public:
++ static QByteArray value(const QMediaMetaData &metaData, QMediaMetaData::Key key);
++};
++
++QT_END_NAMESPACE
++
++#endif // QFFMPEGMEDIAMETADATA_H
+diff --git a/src/plugins/multimedia/v4l2/qffmpegvideosink.cpp b/src/plugins/multimedia/v4l2/qffmpegvideosink.cpp
+new file mode 100644
+index 000000000..93e7ceeed
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qffmpegvideosink.cpp
+@@ -0,0 +1,17 @@
++// Copyright (C) 2021 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++#include <qffmpegvideosink_p.h>
++
++QT_BEGIN_NAMESPACE
++
++QFFmpegVideoSink::QFFmpegVideoSink(QVideoSink *sink)
++ : QPlatformVideoSink(sink)
++{
++}
++
++void QFFmpegVideoSink::setVideoFrame(const QVideoFrame &frame)
++{
++ QPlatformVideoSink::setVideoFrame(frame);
++}
++
++QT_END_NAMESPACE
+diff --git a/src/plugins/multimedia/v4l2/qffmpegvideosink_p.h b/src/plugins/multimedia/v4l2/qffmpegvideosink_p.h
+new file mode 100644
+index 000000000..cbaa810d7
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qffmpegvideosink_p.h
+@@ -0,0 +1,39 @@
++// Copyright (C) 2021 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++#ifndef QFFMPEGVIDEOSINK_H
++#define QFFMPEGVIDEOSINK_H
++
++//
++// W A R N I N G
++// -------------
++//
++// This file is not part of the Qt API. It exists purely as an
++// implementation detail. This header file may change from version to
++// version without notice, or even be removed.
++//
++// We mean it.
++//
++
++#include <private/qplatformvideosink_p.h>
++//#include <qffmpeghwaccel_p.h>
++
++QT_BEGIN_NAMESPACE
++
++// Required for QDoc workaround
++class QString;
++
++class QFFmpegVideoSink : public QPlatformVideoSink
++{
++ Q_OBJECT
++
++public:
++ QFFmpegVideoSink(QVideoSink *sink);
++
++ void setVideoFrame(const QVideoFrame &frame) override;
++};
++
++QT_END_NAMESPACE
++
++
++#endif
+diff --git a/src/plugins/multimedia/v4l2/qv4l2camera.cpp b/src/plugins/multimedia/v4l2/qv4l2camera.cpp
+new file mode 100644
+index 000000000..0f7a8c91a
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qv4l2camera.cpp
+@@ -0,0 +1,940 @@
++// Copyright (C) 2021 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++#include "qv4l2camera_p.h"
++
++#include <qdir.h>
++#include <qmutex.h>
++#include <qendian.h>
++#include <private/qcameradevice_p.h>
++#include <private/qabstractvideobuffer_p.h>
++#include <private/qvideotexturehelper_p.h>
++#include <private/qmultimediautils_p.h>
++#include <private/qplatformmediadevices_p.h>
++
++#include <sys/types.h>
++#include <sys/stat.h>
++#include <sys/ioctl.h>
++#include <unistd.h>
++#include <fcntl.h>
++#include <private/qcore_unix_p.h>
++#include <sys/mman.h>
++
++#include <linux/videodev2.h>
++
++#include <qloggingcategory.h>
++
++QT_BEGIN_NAMESPACE
++
++Q_LOGGING_CATEGORY(qLV4L2Camera, "qt.multimedia.ffmpeg.v4l2camera");
++
++QV4L2CameraDevices::QV4L2CameraDevices(QPlatformMediaIntegration *integration)
++ : QPlatformVideoDevices(integration)
++{
++ deviceWatcher.addPath(QLatin1String("/dev"));
++ connect(&deviceWatcher, &QFileSystemWatcher::directoryChanged, this, &QV4L2CameraDevices::checkCameras);
++ doCheckCameras();
++}
++
++QList<QCameraDevice> QV4L2CameraDevices::videoDevices() const
++{
++ return cameras;
++}
++
++void QV4L2CameraDevices::checkCameras()
++{
++ doCheckCameras();
++ videoInputsChanged();
++}
++
++const struct {
++ QVideoFrameFormat::PixelFormat fmt;
++ uint32_t v4l2Format;
++} formatMap[] = {
++ // ### How do we handle V4L2_PIX_FMT_H264 and V4L2_PIX_FMT_MPEG4?
++ { QVideoFrameFormat::Format_YUV420P, V4L2_PIX_FMT_YUV420 },
++ { QVideoFrameFormat::Format_YUV422P, V4L2_PIX_FMT_YUV422P },
++ { QVideoFrameFormat::Format_YUYV, V4L2_PIX_FMT_YUYV },
++ { QVideoFrameFormat::Format_UYVY, V4L2_PIX_FMT_UYVY },
++ { QVideoFrameFormat::Format_XBGR8888, V4L2_PIX_FMT_XBGR32 },
++ { QVideoFrameFormat::Format_XRGB8888, V4L2_PIX_FMT_XRGB32 },
++ { QVideoFrameFormat::Format_ABGR8888, V4L2_PIX_FMT_ABGR32 },
++ { QVideoFrameFormat::Format_ARGB8888, V4L2_PIX_FMT_ARGB32 },
++ { QVideoFrameFormat::Format_BGRX8888, V4L2_PIX_FMT_BGR32 },
++ { QVideoFrameFormat::Format_RGBX8888, V4L2_PIX_FMT_RGB32 },
++ { QVideoFrameFormat::Format_BGRA8888, V4L2_PIX_FMT_BGRA32 },
++ { QVideoFrameFormat::Format_RGBA8888, V4L2_PIX_FMT_RGBA32 },
++ { QVideoFrameFormat::Format_Y8, V4L2_PIX_FMT_GREY },
++ { QVideoFrameFormat::Format_Y16, V4L2_PIX_FMT_Y16 },
++ { QVideoFrameFormat::Format_NV12, V4L2_PIX_FMT_NV12 },
++ { QVideoFrameFormat::Format_NV21, V4L2_PIX_FMT_NV21 },
++ { QVideoFrameFormat::Format_Jpeg, V4L2_PIX_FMT_MJPEG },
++ { QVideoFrameFormat::Format_Jpeg, V4L2_PIX_FMT_JPEG },
++ { QVideoFrameFormat::Format_Invalid, 0 },
++};
++
++static QVideoFrameFormat::PixelFormat formatForV4L2Format(uint32_t v4l2Format)
++{
++ auto *f = formatMap;
++ while (f->v4l2Format) {
++ if (f->v4l2Format == v4l2Format)
++ return f->fmt;
++ ++f;
++ }
++ return QVideoFrameFormat::Format_Invalid;
++}
++
++static uint32_t v4l2FormatForPixelFormat(QVideoFrameFormat::PixelFormat format)
++{
++ auto *f = formatMap;
++ while (f->v4l2Format) {
++ if (f->fmt == format)
++ return f->v4l2Format;
++ ++f;
++ }
++ return 0;
++}
++
++
++void QV4L2CameraDevices::doCheckCameras()
++{
++ cameras.clear();
++
++ QDir dir(QLatin1String("/dev"));
++ const auto devices = dir.entryList(QDir::System);
++
++ bool first = true;
++
++ for (auto device : devices) {
++// qCDebug(qLV4L2Camera) << "device:" << device;
++ if (!device.startsWith(QLatin1String("video")))
++ continue;
++
++ QByteArray file = QFile::encodeName(dir.filePath(device));
++ int fd = open(file.constData(), O_RDONLY);
++ if (fd < 0)
++ continue;
++
++ QCameraDevicePrivate *camera = nullptr;
++ v4l2_fmtdesc formatDesc = {};
++
++ struct v4l2_capability cap;
++ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
++ goto fail;
++
++ if (cap.device_caps & V4L2_CAP_META_CAPTURE)
++ goto fail;
++ if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
++ goto fail;
++ if (!(cap.capabilities & V4L2_CAP_STREAMING))
++ goto fail;
++
++ camera = new QCameraDevicePrivate;
++ camera->id = file;
++ camera->description = QString::fromUtf8((const char *)cap.card);
++// qCDebug(qLV4L2Camera) << "found camera" << camera->id << camera->description;
++
++ formatDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
++
++ while (!ioctl(fd, VIDIOC_ENUM_FMT, &formatDesc)) {
++ auto pixelFmt = formatForV4L2Format(formatDesc.pixelformat);
++ qCDebug(qLV4L2Camera) << " " << pixelFmt;
++
++ if (pixelFmt == QVideoFrameFormat::Format_Invalid) {
++ ++formatDesc.index;
++ continue;
++ }
++
++// qCDebug(qLV4L2Camera) << "frame sizes:";
++ v4l2_frmsizeenum frameSize = {};
++ frameSize.pixel_format = formatDesc.pixelformat;
++
++ while (!ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) {
++ if (frameSize.type != V4L2_FRMSIZE_TYPE_DISCRETE)
++ continue;
++
++ QSize resolution(frameSize.discrete.width, frameSize.discrete.height);
++ float min = 1e10;
++ float max = 0;
++
++ v4l2_frmivalenum frameInterval = {};
++ frameInterval.pixel_format = formatDesc.pixelformat;
++ frameInterval.width = frameSize.discrete.width;
++ frameInterval.height = frameSize.discrete.height;
++
++ while (!ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frameInterval)) {
++ if (frameInterval.type != V4L2_FRMIVAL_TYPE_DISCRETE)
++ continue;
++ ++frameInterval.index;
++ float rate = float(frameInterval.discrete.denominator)/float(frameInterval.discrete.numerator);
++ if (rate > max)
++ max = rate;
++ if (rate < min)
++ min = rate;
++ }
++
++// qCDebug(qLV4L2Camera) << " " << resolution << min << max;
++ ++frameSize.index;
++
++ if (min <= max) {
++ QCameraFormatPrivate *fmt = new QCameraFormatPrivate;
++ fmt->pixelFormat = pixelFmt;
++ fmt->resolution = resolution;
++ fmt->minFrameRate = min;
++ fmt->maxFrameRate = max;
++ camera->videoFormats.append(fmt->create());
++ camera->photoResolutions.append(resolution);
++ }
++ }
++
++ ++formatDesc.index;
++ }
++
++ // first camera is default
++ camera->isDefault = first;
++ first = false;
++
++ cameras.append(camera->create());
++
++ close(fd);
++ continue;
++
++ fail:
++ if (camera)
++ delete camera;
++ close(fd);
++ }
++}
++
++class QV4L2VideoBuffer : public QAbstractVideoBuffer
++{
++public:
++ QV4L2VideoBuffer(QV4L2CameraBuffers *d, int index)
++ : QAbstractVideoBuffer(QVideoFrame::NoHandle, nullptr)
++ , index(index)
++ , d(d)
++ {}
++ ~QV4L2VideoBuffer()
++ {
++ d->release(index);
++ }
++
++ QVideoFrame::MapMode mapMode() const override { return m_mode; }
++ MapData map(QVideoFrame::MapMode mode) override {
++ m_mode = mode;
++ return d->v4l2FileDescriptor >= 0 ? data : MapData{};
++ }
++ void unmap() override {
++ m_mode = QVideoFrame::NotMapped;
++ }
++
++ QVideoFrame::MapMode m_mode = QVideoFrame::NotMapped;
++ MapData data;
++ int index = 0;
++ QExplicitlySharedDataPointer<QV4L2CameraBuffers> d;
++};
++
++QV4L2CameraBuffers::~QV4L2CameraBuffers()
++{
++ QMutexLocker locker(&mutex);
++ Q_ASSERT(v4l2FileDescriptor < 0);
++ unmapBuffers();
++}
++
++
++
++void QV4L2CameraBuffers::release(int index)
++{
++ QMutexLocker locker(&mutex);
++ if (v4l2FileDescriptor < 0 || index >= mappedBuffers.size())
++ return;
++
++ struct v4l2_buffer buf = {};
++
++ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
++ buf.memory = V4L2_MEMORY_MMAP;
++ buf.index = index;
++
++ if (ioctl(v4l2FileDescriptor, VIDIOC_QBUF, &buf) < 0)
++ qWarning() << "Couldn't release V4L2 buffer" << errno << strerror(errno) << index;
++}
++
++void QV4L2CameraBuffers::unmapBuffers()
++{
++ for (const auto &b : std::as_const(mappedBuffers))
++ munmap(b.data, b.size);
++ mappedBuffers.clear();
++}
++
++QV4L2Camera::QV4L2Camera(QCamera *camera)
++ : QPlatformCamera(camera)
++{
++}
++
++QV4L2Camera::~QV4L2Camera()
++{
++ setActive(false);
++ stopCapturing();
++ closeV4L2Fd();
++}
++
++bool QV4L2Camera::isActive() const
++{
++ return m_active;
++}
++
++void QV4L2Camera::setActive(bool active)
++{
++ if (m_active == active)
++ return;
++ if (m_cameraDevice.isNull() && active)
++ return;
++
++ if (m_cameraFormat.isNull())
++ resolveCameraFormat({});
++
++ m_active = active;
++ if (m_active) {
++ setV4L2CameraFormat();
++ initMMap();
++ startCapturing();
++ } else {
++ stopCapturing();
++ }
++ emit newVideoFrame({});
++
++ emit activeChanged(active);
++}
++
++void QV4L2Camera::setCamera(const QCameraDevice &camera)
++{
++ if (m_cameraDevice == camera)
++ return;
++ if (m_active)
++ stopCapturing();
++
++ closeV4L2Fd();
++
++ m_cameraDevice = camera;
++ resolveCameraFormat({});
++
++ initV4L2Controls();
++
++ if (m_active) {
++ setV4L2CameraFormat();
++ initMMap();
++ startCapturing();
++ }
++}
++
++bool QV4L2Camera::setCameraFormat(const QCameraFormat &format)
++{
++ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
++ return false;
++
++ if (!resolveCameraFormat(format))
++ return true;
++
++ if (m_active) {
++ stopCapturing();
++ closeV4L2Fd();
++ initV4L2Controls();
++ setV4L2CameraFormat();
++ initMMap();
++ startCapturing();
++ }
++
++ return true;
++}
++
++bool QV4L2Camera::resolveCameraFormat(const QCameraFormat &format)
++{
++ auto fmt = format;
++ if (fmt.isNull())
++ fmt = findBestCameraFormat(m_cameraDevice);
++
++ if (fmt == m_cameraFormat)
++ return false;
++
++ m_cameraFormat = fmt;
++ return true;
++}
++
++void QV4L2Camera::setFocusMode(QCamera::FocusMode mode)
++{
++ if (mode == focusMode())
++ return;
++
++ bool focusDist = supportedFeatures() & QCamera::Feature::FocusDistance;
++ if (!focusDist && !v4l2RangedFocus)
++ return;
++
++ switch (mode) {
++ default:
++ case QCamera::FocusModeAuto:
++ setV4L2Parameter(V4L2_CID_FOCUS_AUTO, 1);
++ if (v4l2RangedFocus)
++ setV4L2Parameter(V4L2_CID_AUTO_FOCUS_RANGE, V4L2_AUTO_FOCUS_RANGE_AUTO);
++ break;
++ case QCamera::FocusModeAutoNear:
++ setV4L2Parameter(V4L2_CID_FOCUS_AUTO, 1);
++ if (v4l2RangedFocus)
++ setV4L2Parameter(V4L2_CID_AUTO_FOCUS_RANGE, V4L2_AUTO_FOCUS_RANGE_MACRO);
++ else if (focusDist)
++ setV4L2Parameter(V4L2_CID_FOCUS_ABSOLUTE, v4l2MinFocus);
++ break;
++ case QCamera::FocusModeAutoFar:
++ setV4L2Parameter(V4L2_CID_FOCUS_AUTO, 1);
++ if (v4l2RangedFocus)
++ setV4L2Parameter(V4L2_CID_AUTO_FOCUS_RANGE, V4L2_AUTO_FOCUS_RANGE_INFINITY);
++ break;
++ case QCamera::FocusModeInfinity:
++ setV4L2Parameter(V4L2_CID_FOCUS_AUTO, 0);
++ setV4L2Parameter(V4L2_CID_FOCUS_ABSOLUTE, v4l2MaxFocus);
++ break;
++ case QCamera::FocusModeManual:
++ setV4L2Parameter(V4L2_CID_FOCUS_AUTO, 0);
++ setFocusDistance(focusDistance());
++ break;
++ }
++ focusModeChanged(mode);
++}
++
++void QV4L2Camera::setFocusDistance(float d)
++{
++ int distance = v4l2MinFocus + int((v4l2MaxFocus - v4l2MinFocus)*d);
++ setV4L2Parameter(V4L2_CID_FOCUS_ABSOLUTE, distance);
++ focusDistanceChanged(d);
++}
++
++void QV4L2Camera::zoomTo(float factor, float)
++{
++ if (v4l2MaxZoom == v4l2MinZoom)
++ return;
++ factor = qBound(1., factor, 2.);
++ int zoom = v4l2MinZoom + (factor - 1.)*(v4l2MaxZoom - v4l2MinZoom);
++ setV4L2Parameter(V4L2_CID_ZOOM_ABSOLUTE, zoom);
++ zoomFactorChanged(factor);
++}
++
++bool QV4L2Camera::isFocusModeSupported(QCamera::FocusMode mode) const
++{
++ if (supportedFeatures() & QCamera::Feature::FocusDistance &&
++ (mode == QCamera::FocusModeManual || mode == QCamera::FocusModeAutoNear || mode == QCamera::FocusModeInfinity))
++ return true;
++
++ return mode == QCamera::FocusModeAuto;
++}
++
++void QV4L2Camera::setFlashMode(QCamera::FlashMode mode)
++{
++ if (!v4l2FlashSupported || mode == QCamera::FlashOn)
++ return;
++ setV4L2Parameter(V4L2_CID_FLASH_LED_MODE, mode == QCamera::FlashAuto ? V4L2_FLASH_LED_MODE_FLASH : V4L2_FLASH_LED_MODE_NONE);
++ flashModeChanged(mode);
++}
++
++bool QV4L2Camera::isFlashModeSupported(QCamera::FlashMode mode) const
++{
++ if (v4l2FlashSupported && mode == QCamera::FlashAuto)
++ return true;
++ return mode == QCamera::FlashOff;
++}
++
++bool QV4L2Camera::isFlashReady() const
++{
++ struct v4l2_queryctrl queryControl;
++ ::memset(&queryControl, 0, sizeof(queryControl));
++ queryControl.id = V4L2_CID_AUTO_WHITE_BALANCE;
++
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0)
++ return true;
++
++ return false;
++}
++
++void QV4L2Camera::setTorchMode(QCamera::TorchMode mode)
++{
++ if (!v4l2TorchSupported || mode == QCamera::TorchOn)
++ return;
++ setV4L2Parameter(V4L2_CID_FLASH_LED_MODE, mode == QCamera::TorchOn ? V4L2_FLASH_LED_MODE_TORCH : V4L2_FLASH_LED_MODE_NONE);
++ torchModeChanged(mode);
++}
++
++bool QV4L2Camera::isTorchModeSupported(QCamera::TorchMode mode) const
++{
++ if (mode == QCamera::TorchOn)
++ return v4l2TorchSupported;
++ return mode == QCamera::TorchOff;
++}
++
++void QV4L2Camera::setExposureMode(QCamera::ExposureMode mode)
++{
++ if (v4l2AutoExposureSupported && v4l2ManualExposureSupported) {
++ if (mode != QCamera::ExposureAuto && mode != QCamera::ExposureManual)
++ return;
++ int value = QCamera::ExposureAuto ? V4L2_EXPOSURE_AUTO : V4L2_EXPOSURE_MANUAL;
++ setV4L2Parameter(V4L2_CID_EXPOSURE_AUTO, value);
++ exposureModeChanged(mode);
++ return;
++ }
++}
++
++bool QV4L2Camera::isExposureModeSupported(QCamera::ExposureMode mode) const
++{
++ if (mode == QCamera::ExposureAuto)
++ return true;
++ if (v4l2ManualExposureSupported && v4l2AutoExposureSupported)
++ return mode == QCamera::ExposureManual;
++ return false;
++}
++
++void QV4L2Camera::setExposureCompensation(float compensation)
++{
++ if ((v4l2MinExposureAdjustment != 0 || v4l2MaxExposureAdjustment != 0)) {
++ int value = qBound(v4l2MinExposureAdjustment, (int)(compensation*1000), v4l2MaxExposureAdjustment);
++ setV4L2Parameter(V4L2_CID_AUTO_EXPOSURE_BIAS, value);
++ exposureCompensationChanged(value/1000.);
++ return;
++ }
++}
++
++void QV4L2Camera::setManualIsoSensitivity(int iso)
++{
++ if (!(supportedFeatures() & QCamera::Feature::IsoSensitivity))
++ return;
++ setV4L2Parameter(V4L2_CID_ISO_SENSITIVITY_AUTO, iso <= 0 ? V4L2_ISO_SENSITIVITY_AUTO : V4L2_ISO_SENSITIVITY_MANUAL);
++ if (iso > 0) {
++ iso = qBound(minIso(), iso, maxIso());
++ setV4L2Parameter(V4L2_CID_ISO_SENSITIVITY, iso);
++ }
++ return;
++}
++
++int QV4L2Camera::isoSensitivity() const
++{
++ if (!(supportedFeatures() & QCamera::Feature::IsoSensitivity))
++ return -1;
++ return getV4L2Parameter(V4L2_CID_ISO_SENSITIVITY);
++}
++
++void QV4L2Camera::setManualExposureTime(float secs)
++{
++ if (v4l2ManualExposureSupported && v4l2AutoExposureSupported) {
++ int exposure = qBound(v4l2MinExposure, qRound(secs*10000.), v4l2MaxExposure);
++ setV4L2Parameter(V4L2_CID_EXPOSURE_ABSOLUTE, exposure);
++ exposureTimeChanged(exposure/10000.);
++ return;
++ }
++}
++
++float QV4L2Camera::exposureTime() const
++{
++ return getV4L2Parameter(V4L2_CID_EXPOSURE_ABSOLUTE)/10000.;
++}
++
++bool QV4L2Camera::isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const
++{
++ if (v4l2AutoWhiteBalanceSupported && v4l2ColorTemperatureSupported)
++ return true;
++
++ return mode == QCamera::WhiteBalanceAuto;
++}
++
++void QV4L2Camera::setWhiteBalanceMode(QCamera::WhiteBalanceMode mode)
++{
++ Q_ASSERT(isWhiteBalanceModeSupported(mode));
++
++ int temperature = colorTemperatureForWhiteBalance(mode);
++ int t = setV4L2ColorTemperature(temperature);
++ if (t == 0)
++ mode = QCamera::WhiteBalanceAuto;
++ whiteBalanceModeChanged(mode);
++}
++
++void QV4L2Camera::setColorTemperature(int temperature)
++{
++ if (temperature == 0) {
++ setWhiteBalanceMode(QCamera::WhiteBalanceAuto);
++ return;
++ }
++
++ Q_ASSERT(isWhiteBalanceModeSupported(QCamera::WhiteBalanceManual));
++
++ int t = setV4L2ColorTemperature(temperature);
++ if (t)
++ colorTemperatureChanged(t);
++}
++
++void QV4L2Camera::readFrame()
++{
++ if (!d)
++ return;
++
++ v4l2_buffer buf = {};
++ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
++ buf.memory = V4L2_MEMORY_MMAP;
++
++ if (ioctl(d->v4l2FileDescriptor, VIDIOC_DQBUF, &buf) < 0) {
++ if (errno == ENODEV) {
++ // camera got removed while being active
++ stopCapturing();
++ closeV4L2Fd();
++ return;
++ }
++ if (errno != EAGAIN)
++ qWarning() << "error calling VIDIOC_DQBUF" << errno << strerror(errno);
++ }
++
++ Q_ASSERT(qsizetype(buf.index) < d->mappedBuffers.size());
++ int i = buf.index;
++
++// auto textureDesc = QVideoTextureHelper::textureDescription(m_format.pixelFormat());
++
++ QV4L2VideoBuffer *buffer = new QV4L2VideoBuffer(d.get(), i);
++ buffer->data.nPlanes = 1;
++ buffer->data.bytesPerLine[0] = bytesPerLine;
++ buffer->data.data[0] = (uchar *)d->mappedBuffers.at(i).data;
++ buffer->data.size[0] = d->mappedBuffers.at(i).size;
++ QVideoFrameFormat fmt(m_cameraFormat.resolution(), m_cameraFormat.pixelFormat());
++ fmt.setColorSpace(colorSpace);
++// qCDebug(qLV4L2Camera) << "got a frame" << d->mappedBuffers.at(i).data << d->mappedBuffers.at(i).size << fmt << i;
++ QVideoFrame frame(buffer, fmt);
++
++ if (firstFrameTime.tv_sec == -1)
++ firstFrameTime = buf.timestamp;
++ qint64 secs = buf.timestamp.tv_sec - firstFrameTime.tv_sec;
++ qint64 usecs = buf.timestamp.tv_usec - firstFrameTime.tv_usec;
++ frame.setStartTime(secs*1000000 + usecs);
++ frame.setEndTime(frame.startTime() + frameDuration);
++
++ emit newVideoFrame(frame);
++}
++
++void QV4L2Camera::setCameraBusy()
++{
++ cameraBusy = true;
++ error(QCamera::CameraError, tr("Camera is in use."));
++}
++
++void QV4L2Camera::initV4L2Controls()
++{
++ v4l2AutoWhiteBalanceSupported = false;
++ v4l2ColorTemperatureSupported = false;
++ v4l2RangedFocus = false;
++ v4l2FlashSupported = false;
++ v4l2TorchSupported = false;
++ QCamera::Features features;
++
++
++ const QByteArray deviceName = m_cameraDevice.id();
++ Q_ASSERT(!deviceName.isEmpty());
++
++ closeV4L2Fd();
++ Q_ASSERT(!d);
++
++ d = new QV4L2CameraBuffers;
++
++ d->v4l2FileDescriptor = qt_safe_open(deviceName.constData(), O_RDWR);
++ if (d->v4l2FileDescriptor == -1) {
++ qWarning() << "Unable to open the camera" << deviceName
++ << "for read to query the parameter info:" << qt_error_string(errno);
++ return;
++ }
++ qCDebug(qLV4L2Camera) << "FD=" << d->v4l2FileDescriptor;
++
++ struct v4l2_queryctrl queryControl;
++ ::memset(&queryControl, 0, sizeof(queryControl));
++ queryControl.id = V4L2_CID_AUTO_WHITE_BALANCE;
++
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) {
++ v4l2AutoWhiteBalanceSupported = true;
++ setV4L2Parameter(V4L2_CID_AUTO_WHITE_BALANCE, true);
++ }
++
++ ::memset(&queryControl, 0, sizeof(queryControl));
++ queryControl.id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) {
++ v4l2MinColorTemp = queryControl.minimum;
++ v4l2MaxColorTemp = queryControl.maximum;
++ v4l2ColorTemperatureSupported = true;
++ features |= QCamera::Feature::ColorTemperature;
++ }
++
++ ::memset(&queryControl, 0, sizeof(queryControl));
++ queryControl.id = V4L2_CID_EXPOSURE_AUTO;
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) {
++ v4l2AutoExposureSupported = true;
++ }
++
++ ::memset(&queryControl, 0, sizeof(queryControl));
++ queryControl.id = V4L2_CID_EXPOSURE_ABSOLUTE;
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) {
++ v4l2ManualExposureSupported = true;
++ v4l2MinExposure = queryControl.minimum;
++ v4l2MaxExposure = queryControl.maximum;
++ features |= QCamera::Feature::ManualExposureTime;
++ }
++
++ ::memset(&queryControl, 0, sizeof(queryControl));
++ queryControl.id = V4L2_CID_AUTO_EXPOSURE_BIAS;
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) {
++ v4l2MinExposureAdjustment = queryControl.minimum;
++ v4l2MaxExposureAdjustment = queryControl.maximum;
++ features |= QCamera::Feature::ExposureCompensation;
++ }
++
++ ::memset(&queryControl, 0, sizeof(queryControl));
++ queryControl.id = V4L2_CID_ISO_SENSITIVITY_AUTO;
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) {
++ queryControl.id = V4L2_CID_ISO_SENSITIVITY;
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) {
++ features |= QCamera::Feature::IsoSensitivity;
++ minIsoChanged(queryControl.minimum);
++ maxIsoChanged(queryControl.minimum);
++ }
++ }
++
++ ::memset(&queryControl, 0, sizeof(queryControl));
++ queryControl.id = V4L2_CID_FOCUS_ABSOLUTE;
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) {
++ v4l2MinExposureAdjustment = queryControl.minimum;
++ v4l2MaxExposureAdjustment = queryControl.maximum;
++ features |= QCamera::Feature::FocusDistance;
++ }
++
++ ::memset(&queryControl, 0, sizeof(queryControl));
++ queryControl.id = V4L2_CID_AUTO_FOCUS_RANGE;
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) {
++ v4l2RangedFocus = true;
++ }
++
++ ::memset(&queryControl, 0, sizeof(queryControl));
++ queryControl.id = V4L2_CID_FLASH_LED_MODE;
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) {
++ v4l2FlashSupported = queryControl.minimum <= V4L2_FLASH_LED_MODE_FLASH && queryControl.maximum >= V4L2_FLASH_LED_MODE_FLASH;
++ v4l2TorchSupported = queryControl.minimum <= V4L2_FLASH_LED_MODE_TORCH && queryControl.maximum >= V4L2_FLASH_LED_MODE_TORCH;
++ }
++
++ v4l2MinZoom = 0;
++ v4l2MaxZoom = 0;
++ ::memset(&queryControl, 0, sizeof(queryControl));
++ queryControl.id = V4L2_CID_ZOOM_ABSOLUTE;
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) {
++ v4l2MinZoom = queryControl.minimum;
++ v4l2MaxZoom = queryControl.maximum;
++ }
++ // zoom factors are in arbitrary units, so we simply normalize them to go from 1 to 2
++ // if they are different
++ minimumZoomFactorChanged(1);
++ maximumZoomFactorChanged(v4l2MinZoom != v4l2MaxZoom ? 2 : 1);
++
++ supportedFeaturesChanged(features);
++}
++
++void QV4L2Camera::closeV4L2Fd()
++{
++ if (d && d->v4l2FileDescriptor >= 0) {
++ QMutexLocker locker(&d->mutex);
++ d->unmapBuffers();
++ qt_safe_close(d->v4l2FileDescriptor);
++ d->v4l2FileDescriptor = -1;
++ }
++ d = nullptr;
++}
++
++int QV4L2Camera::setV4L2ColorTemperature(int temperature)
++{
++ struct v4l2_control control;
++ ::memset(&control, 0, sizeof(control));
++
++ if (v4l2AutoWhiteBalanceSupported) {
++ setV4L2Parameter(V4L2_CID_AUTO_WHITE_BALANCE, temperature == 0 ? true : false);
++ } else if (temperature == 0) {
++ temperature = 5600;
++ }
++
++ if (temperature != 0 && v4l2ColorTemperatureSupported) {
++ temperature = qBound(v4l2MinColorTemp, temperature, v4l2MaxColorTemp);
++ if (!setV4L2Parameter(V4L2_CID_WHITE_BALANCE_TEMPERATURE, qBound(v4l2MinColorTemp, temperature, v4l2MaxColorTemp)))
++ temperature = 0;
++ } else {
++ temperature = 0;
++ }
++
++ return temperature;
++}
++
++bool QV4L2Camera::setV4L2Parameter(quint32 id, qint32 value)
++{
++ struct v4l2_control control{id, value};
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_S_CTRL, &control) != 0) {
++ qWarning() << "Unable to set the V4L2 Parameter" << Qt::hex << id << "to" << value << qt_error_string(errno);
++ return false;
++ }
++ return true;
++}
++
++int QV4L2Camera::getV4L2Parameter(quint32 id) const
++{
++ struct v4l2_control control{id, 0};
++ if (::ioctl(d->v4l2FileDescriptor, VIDIOC_G_CTRL, &control) != 0) {
++ qWarning() << "Unable to get the V4L2 Parameter" << Qt::hex << id << qt_error_string(errno);
++ return 0;
++ }
++ return control.value;
++}
++
++void QV4L2Camera::setV4L2CameraFormat()
++{
++ Q_ASSERT(!m_cameraFormat.isNull());
++ qCDebug(qLV4L2Camera) << "XXXXX" << this << m_cameraDevice.id() << m_cameraFormat.pixelFormat() << m_cameraFormat.resolution();
++
++ v4l2_format fmt = {};
++ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
++
++ auto size = m_cameraFormat.resolution();
++ fmt.fmt.pix.width = size.width();
++ fmt.fmt.pix.height = size.height();
++ fmt.fmt.pix.pixelformat = v4l2FormatForPixelFormat(m_cameraFormat.pixelFormat());
++ fmt.fmt.pix.field = V4L2_FIELD_ANY;
++
++ qCDebug(qLV4L2Camera) << "setting camera format to" << size;
++
++ if (ioctl(d->v4l2FileDescriptor, VIDIOC_S_FMT, &fmt) < 0) {
++ if (errno == EBUSY) {
++ setCameraBusy();
++ return;
++ }
++ qWarning() << "Couldn't set video format on v4l2 camera" << strerror(errno);
++ }
++
++ bytesPerLine = fmt.fmt.pix.bytesperline;
++
++ switch (v4l2_colorspace(fmt.fmt.pix.colorspace)) {
++ default:
++ case V4L2_COLORSPACE_DCI_P3:
++ colorSpace = QVideoFrameFormat::ColorSpace_Undefined;
++ break;
++ case V4L2_COLORSPACE_REC709:
++ colorSpace = QVideoFrameFormat::ColorSpace_BT709;
++ break;
++ case V4L2_COLORSPACE_JPEG:
++ colorSpace = QVideoFrameFormat::ColorSpace_AdobeRgb;
++ break;
++ case V4L2_COLORSPACE_SRGB:
++ // ##### is this correct???
++ colorSpace = QVideoFrameFormat::ColorSpace_BT601;
++ break;
++ case V4L2_COLORSPACE_BT2020:
++ colorSpace = QVideoFrameFormat::ColorSpace_BT2020;
++ break;
++ }
++
++ v4l2_streamparm streamParam = {};
++ streamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
++
++ streamParam.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
++ auto [num, den] = qRealToFraction(1./m_cameraFormat.maxFrameRate());
++ streamParam.parm.capture.timeperframe = { (uint)num, (uint)den };
++ ioctl(d->v4l2FileDescriptor, VIDIOC_S_PARM, &streamParam);
++
++ frameDuration = 1000000*streamParam.parm.capture.timeperframe.numerator
++ /streamParam.parm.capture.timeperframe.denominator;
++}
++
++void QV4L2Camera::initMMap()
++{
++ if (cameraBusy)
++ return;
++
++ v4l2_requestbuffers req = {};
++ req.count = 4;
++ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
++ req.memory = V4L2_MEMORY_MMAP;
++
++ if (ioctl(d->v4l2FileDescriptor, VIDIOC_REQBUFS, &req) < 0) {
++ if (errno == EBUSY)
++ setCameraBusy();
++ qWarning() << "requesting mmap'ed buffers failed" << strerror(errno);
++ return;
++ }
++
++ if (req.count < 2) {
++ qWarning() << "Can't map 2 or more buffers";
++ return;
++ }
++
++ for (uint32_t n = 0; n < req.count; ++n) {
++ v4l2_buffer buf = {};
++ buf.index = n;
++ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
++ buf.memory = V4L2_MEMORY_MMAP;
++
++ if (ioctl(d->v4l2FileDescriptor, VIDIOC_QUERYBUF, &buf) != 0) {
++ qWarning() << "Can't map buffer" << n;
++ return;
++ }
++
++ QV4L2CameraBuffers::MappedBuffer buffer;
++ buffer.size = buf.length;
++ buffer.data = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED,
++ d->v4l2FileDescriptor, buf.m.offset);
++
++ if (buffer.data == MAP_FAILED) {
++ qWarning() << "mmap failed" << n << buf.length << buf.m.offset;
++ return;
++ }
++
++ d->mappedBuffers.append(buffer);
++ }
++
++}
++
++void QV4L2Camera::stopCapturing()
++{
++ if (!d)
++ return;
++
++ delete notifier;
++ notifier = nullptr;
++
++ enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
++
++ if (ioctl(d->v4l2FileDescriptor, VIDIOC_STREAMOFF, &type) < 0) {
++ if (errno != ENODEV)
++ qWarning() << "failed to stop capture";
++ }
++ cameraBusy = false;
++}
++
++void QV4L2Camera::startCapturing()
++{
++ if (cameraBusy)
++ return;
++
++ // #### better to use the user data method instead of mmap???
++ qsizetype i;
++
++ for (i = 0; i < d->mappedBuffers.size(); ++i) {
++ v4l2_buffer buf = {};
++ buf.index = i;
++ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
++ buf.memory = V4L2_MEMORY_MMAP;
++
++ if (ioctl(d->v4l2FileDescriptor, VIDIOC_QBUF, &buf) < 0) {
++ qWarning() << "failed to set up mapped buffer";
++ return;
++ }
++ }
++ enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
++ if (ioctl(d->v4l2FileDescriptor, VIDIOC_STREAMON, &type) < 0)
++ qWarning() << "failed to start capture";
++
++ notifier = new QSocketNotifier(d->v4l2FileDescriptor, QSocketNotifier::Read);
++ connect(notifier, &QSocketNotifier::activated, this, &QV4L2Camera::readFrame);
++
++ firstFrameTime = { -1, -1 };
++}
++
++QT_END_NAMESPACE
+diff --git a/src/plugins/multimedia/v4l2/qv4l2camera_p.h b/src/plugins/multimedia/v4l2/qv4l2camera_p.h
+new file mode 100644
+index 000000000..714b4c1db
+--- /dev/null
++++ b/src/plugins/multimedia/v4l2/qv4l2camera_p.h
+@@ -0,0 +1,160 @@
++// Copyright (C) 2021 The Qt Company Ltd.
++// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
++
++#ifndef QFFMPEGCAMERA_H
++#define QFFMPEGCAMERA_H
++
++//
++// W A R N I N G
++// -------------
++//
++// This file is not part of the Qt API. It exists purely as an
++// implementation detail. This header file may change from version to
++// version without notice, or even be removed.
++//
++// We mean it.
++//
++
++#include <private/qplatformcamera_p.h>
++#include <private/qplatformvideodevices_p.h>
++#include <private/qplatformmediaintegration_p.h>
++
++#include <qfilesystemwatcher.h>
++#include <qsocketnotifier.h>
++#include <qmutex.h>
++
++QT_BEGIN_NAMESPACE
++
++class QV4L2CameraDevices : public QObject,
++ public QPlatformVideoDevices
++{
++ Q_OBJECT
++public:
++ QV4L2CameraDevices(QPlatformMediaIntegration *integration);
++
++ QList<QCameraDevice> videoDevices() const override;
++
++public Q_SLOTS:
++ void checkCameras();
++
++private:
++ void doCheckCameras();
++
++ QList<QCameraDevice> cameras;
++ QFileSystemWatcher deviceWatcher;
++};
++
++struct QV4L2CameraBuffers
++{
++public:
++ ~QV4L2CameraBuffers();
++
++ void release(int index);
++ void unmapBuffers();
++
++ QAtomicInt ref;
++ QMutex mutex;
++ struct MappedBuffer {
++ void *data;
++ qsizetype size;
++ };
++ QList<MappedBuffer> mappedBuffers;
++ int v4l2FileDescriptor = -1;
++};
++
++class Q_MULTIMEDIA_EXPORT QV4L2Camera : public QPlatformCamera
++{
++ Q_OBJECT
++
++public:
++ explicit QV4L2Camera(QCamera *parent);
++ ~QV4L2Camera();
++
++ bool isActive() const override;
++ void setActive(bool active) override;
++
++ void setCamera(const QCameraDevice &camera) override;
++ bool setCameraFormat(const QCameraFormat &format) override;
++ bool resolveCameraFormat(const QCameraFormat &format);
++
++ bool isFocusModeSupported(QCamera::FocusMode mode) const override;
++ void setFocusMode(QCamera::FocusMode /*mode*/) override;
++
++// void setCustomFocusPoint(const QPointF &/*point*/) override;
++ void setFocusDistance(float) override;
++ void zoomTo(float /*newZoomFactor*/, float /*rate*/ = -1.) override;
++
++ void setFlashMode(QCamera::FlashMode /*mode*/) override;
++ bool isFlashModeSupported(QCamera::FlashMode mode) const override;
++ bool isFlashReady() const override;
++
++ void setTorchMode(QCamera::TorchMode /*mode*/) override;
++ bool isTorchModeSupported(QCamera::TorchMode mode) const override;
++
++ void setExposureMode(QCamera::ExposureMode) override;
++ bool isExposureModeSupported(QCamera::ExposureMode mode) const override;
++ void setExposureCompensation(float) override;
++ int isoSensitivity() const override;
++ void setManualIsoSensitivity(int) override;
++ void setManualExposureTime(float) override;
++ float exposureTime() const override;
++
++ bool isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const override;
++ void setWhiteBalanceMode(QCamera::WhiteBalanceMode /*mode*/) override;
++ void setColorTemperature(int /*temperature*/) override;
++
++ void releaseBuffer(int index);
++
++private Q_SLOTS:
++ void readFrame();
++
++private:
++ void setCameraBusy();
++
++ bool m_active = false;
++
++ QCameraDevice m_cameraDevice;
++
++ void initV4L2Controls();
++ void closeV4L2Fd();
++ int setV4L2ColorTemperature(int temperature);
++ bool setV4L2Parameter(quint32 id, qint32 value);
++ int getV4L2Parameter(quint32 id) const;
++
++ void setV4L2CameraFormat();
++ void initMMap();
++ void startCapturing();
++ void stopCapturing();
++
++ QSocketNotifier *notifier = nullptr;
++ QExplicitlySharedDataPointer<QV4L2CameraBuffers> d;
++
++ bool v4l2AutoWhiteBalanceSupported = false;
++ bool v4l2ColorTemperatureSupported = false;
++ bool v4l2AutoExposureSupported = false;
++ bool v4l2ManualExposureSupported = false;
++ qint32 v4l2MinColorTemp = 5600; // Daylight...
++ qint32 v4l2MaxColorTemp = 5600;
++ qint32 v4l2MinExposure = 0;
++ qint32 v4l2MaxExposure = 0;
++ qint32 v4l2MinExposureAdjustment = 0;
++ qint32 v4l2MaxExposureAdjustment = 0;
++ qint32 v4l2MinFocus = 0;
++ qint32 v4l2MaxFocus = 0;
++ qint32 v4l2RangedFocus = false;
++ bool v4l2FlashSupported = false;
++ bool v4l2TorchSupported = false;
++ int v4l2MinZoom = 0;
++ int v4l2MaxZoom = 0;
++ timeval firstFrameTime = {-1, -1};
++ int bytesPerLine = -1;
++ QVideoFrameFormat::ColorSpace colorSpace = QVideoFrameFormat::ColorSpace_Undefined;
++ qint64 frameDuration = -1;
++ bool cameraBusy = false;
++};
++
++QT_END_NAMESPACE
++
++
++#endif // QFFMPEGCAMERA_H
++