File indexing completed on 2025-01-19 03:57:06
0001 /********************************************************* 0002 * Copyright (C) 2020, Val Doroshchuk <valbok@gmail.com> * 0003 * * 0004 * This file is part of QtAVPlayer. * 0005 * Free Qt Media Player based on FFmpeg. * 0006 *********************************************************/ 0007 0008 #include "qavvideoframe.h" 0009 #include "qavvideobuffer_cpu_p.h" 0010 #include "qavframe_p.h" 0011 #include "qavvideocodec_p.h" 0012 #include "qavhwdevice_p.h" 0013 #include <QSize> 0014 #ifdef QT_AVPLAYER_MULTIMEDIA 0015 #if QT_VERSION < QT_VERSION_CHECK(6, 0, 0) 0016 #include <QAbstractVideoSurface> 0017 #else 0018 #include <QtMultimedia/private/qabstractvideobuffer_p.h> 0019 #include <QtMultimedia/private/qvideotexturehelper_p.h> 0020 #endif 0021 #endif 0022 #include <QDebug> 0023 0024 extern "C" { 0025 #include <libswscale/swscale.h> 0026 #include <libavutil/pixdesc.h> 0027 #include "libavutil/imgutils.h" 0028 #include <libavutil/mastering_display_metadata.h> 0029 }; 0030 0031 QT_BEGIN_NAMESPACE 0032 0033 static const QAVVideoCodec *videoCodec(const QAVCodec *c) 0034 { 0035 return reinterpret_cast<const QAVVideoCodec *>(c); 0036 } 0037 0038 class QAVVideoFramePrivate : public QAVFramePrivate 0039 { 0040 Q_DECLARE_PUBLIC(QAVVideoFrame) 0041 public: 0042 QAVVideoFramePrivate(QAVVideoFrame *q) : q_ptr(q) { } 0043 0044 QAVVideoBuffer &videoBuffer() const 0045 { 0046 if (!buffer) { 0047 auto c = videoCodec(stream.codec().data()); 0048 auto buf = c && c->device() && frame->format == c->device()->format() ? c->device()->videoBuffer(*q_ptr) : new QAVVideoBuffer_CPU(*q_ptr); 0049 const_cast<QAVVideoFramePrivate*>(this)->buffer.reset(buf); 0050 } 0051 0052 return *buffer; 0053 } 0054 0055 QAVVideoFrame *q_ptr = nullptr; 0056 QScopedPointer<QAVVideoBuffer> buffer; 0057 }; 0058 0059 QAVVideoFrame::QAVVideoFrame() 0060 : QAVFrame(*new QAVVideoFramePrivate(this)) 0061 { 0062 } 0063 0064 QAVVideoFrame::QAVVideoFrame(const QAVFrame &other) 0065 : QAVVideoFrame() 0066 { 0067 operator=(other); 0068 } 0069 0070 QAVVideoFrame::QAVVideoFrame(const QAVVideoFrame &other) 0071 : QAVVideoFrame() 0072 { 0073 operator=(other); 0074 } 0075 0076 QAVVideoFrame::QAVVideoFrame(const QSize &size, AVPixelFormat fmt) 0077 : QAVVideoFrame() 0078 { 0079 frame()->format = fmt; 0080 frame()->width = size.width(); 0081 frame()->height = size.height(); 0082 av_frame_get_buffer(frame(), 1); 0083 } 0084 0085 QAVVideoFrame &QAVVideoFrame::operator=(const QAVFrame &other) 0086 { 0087 Q_D(QAVVideoFrame); 0088 QAVFrame::operator=(other); 0089 d->buffer.reset(); 0090 return *this; 0091 } 0092 0093 QAVVideoFrame &QAVVideoFrame::operator=(const QAVVideoFrame &other) 0094 { 0095 Q_D(QAVVideoFrame); 0096 QAVFrame::operator=(other); 0097 d->buffer.reset(); 0098 return *this; 0099 } 0100 0101 QSize QAVVideoFrame::size() const 0102 { 0103 Q_D(const QAVFrame); 0104 return {d->frame->width, d->frame->height}; 0105 } 0106 0107 QAVVideoFrame::MapData QAVVideoFrame::map() const 0108 { 0109 Q_D(const QAVVideoFrame); 0110 return d->videoBuffer().map(); 0111 } 0112 0113 QAVVideoFrame::HandleType QAVVideoFrame::handleType() const 0114 { 0115 Q_D(const QAVVideoFrame); 0116 return d->videoBuffer().handleType(); 0117 } 0118 0119 #if QT_VERSION >= QT_VERSION_CHECK(6, 0, 0) 0120 QVariant QAVVideoFrame::handle(QRhi *rhi) const 0121 { 0122 Q_D(const QAVVideoFrame); 0123 return d->videoBuffer().handle(rhi); 0124 } 0125 #else 0126 QVariant QAVVideoFrame::handle() const 0127 { 0128 Q_D(const QAVVideoFrame); 0129 return d->videoBuffer().handle(); 0130 } 0131 #endif 0132 0133 AVPixelFormat QAVVideoFrame::format() const 0134 { 0135 return static_cast<AVPixelFormat>(frame()->format); 0136 } 0137 0138 QString QAVVideoFrame::formatName() const 0139 { 0140 return QLatin1String(av_pix_fmt_desc_get(QAVVideoFrame::format())->name); 0141 } 0142 0143 QAVVideoFrame QAVVideoFrame::convertTo(AVPixelFormat fmt) const 0144 { 0145 if (fmt == frame()->format) 0146 return *this; 0147 0148 auto mapData = map(); 0149 if (mapData.format == AV_PIX_FMT_NONE) { 0150 qWarning() << __FUNCTION__ << "Could not map:" << formatName(); 0151 return QAVVideoFrame(); 0152 } 0153 auto ctx = sws_getContext(size().width(), size().height(), mapData.format, 0154 size().width(), size().height(), fmt, 0155 SWS_BICUBIC, NULL, NULL, NULL); 0156 if (ctx == nullptr) { 0157 qWarning() << __FUNCTION__ << ": Could not get sws context:" << formatName(); 0158 return QAVVideoFrame(); 0159 } 0160 0161 int ret = sws_setColorspaceDetails(ctx, sws_getCoefficients(SWS_CS_ITU601), 0162 0, sws_getCoefficients(SWS_CS_ITU709), 0, 0, 1 << 16, 1 << 16); 0163 if (ret == -1) { 0164 qWarning() << __FUNCTION__ << "Colorspace not support"; 0165 return QAVVideoFrame(); 0166 } 0167 0168 QAVVideoFrame result(size(), fmt); 0169 result.d_ptr->stream = d_ptr->stream; 0170 sws_scale(ctx, mapData.data, mapData.bytesPerLine, 0, result.size().height(), result.frame()->data, result.frame()->linesize); 0171 sws_freeContext(ctx); 0172 0173 return result; 0174 } 0175 0176 #ifdef QT_AVPLAYER_MULTIMEDIA 0177 #if QT_VERSION < QT_VERSION_CHECK(6, 0, 0) 0178 class PlanarVideoBuffer : public QAbstractPlanarVideoBuffer 0179 { 0180 public: 0181 PlanarVideoBuffer(const QAVVideoFrame &frame, HandleType type = NoHandle) 0182 : QAbstractPlanarVideoBuffer(type), m_frame(frame) 0183 { 0184 } 0185 0186 QVariant handle() const override 0187 { 0188 return m_frame.handle(); 0189 } 0190 0191 MapMode mapMode() const override { return m_mode; } 0192 using QAbstractPlanarVideoBuffer::map; 0193 int map(MapMode mode, int *numBytes, int bytesPerLine[4], uchar *data[4]) override 0194 { 0195 if (m_mode != NotMapped || mode == NotMapped) 0196 return 0; 0197 0198 auto mapData = m_frame.map(); 0199 m_mode = mode; 0200 if (numBytes) 0201 *numBytes = mapData.size; 0202 0203 int i = 0; 0204 for (; i < 4; ++i) { 0205 if (!mapData.bytesPerLine[i]) 0206 break; 0207 0208 bytesPerLine[i] = mapData.bytesPerLine[i]; 0209 data[i] = mapData.data[i]; 0210 } 0211 0212 return i; 0213 } 0214 void unmap() override { m_mode = NotMapped; } 0215 0216 private: 0217 QAVVideoFrame m_frame; 0218 MapMode m_mode = NotMapped; 0219 }; 0220 #else 0221 class PlanarVideoBuffer : public QAbstractVideoBuffer 0222 { 0223 public: 0224 PlanarVideoBuffer(const QAVVideoFrame &frame, QVideoFrameFormat::PixelFormat format 0225 , QVideoFrame::HandleType type = QVideoFrame::NoHandle) 0226 : QAbstractVideoBuffer(type) 0227 , m_frame(frame) 0228 , m_pixelFormat(format) 0229 { 0230 } 0231 0232 quint64 textureHandle(int plane) const override 0233 { 0234 if (m_textures.isNull()) 0235 const_cast<PlanarVideoBuffer *>(this)->m_textures = m_frame.handle(m_rhi); 0236 if (m_textures.canConvert<QList<QVariant>>()) { 0237 auto textures = m_textures.toList(); 0238 auto r = plane < textures.size() ? textures[plane].toULongLong() : 0; 0239 return r; 0240 } 0241 return m_textures.toULongLong(); 0242 } 0243 0244 QVideoFrame::MapMode mapMode() const override { return m_mode; } 0245 MapData map(QVideoFrame::MapMode mode) override 0246 { 0247 MapData res; 0248 if (m_mode != QVideoFrame::NotMapped || mode == QVideoFrame::NotMapped) 0249 return res; 0250 0251 m_mode = mode; 0252 auto mapData = m_frame.map(); 0253 auto *desc = QVideoTextureHelper::textureDescription(m_pixelFormat); 0254 res.nPlanes = desc->nplanes; 0255 for (int i = 0; i < res.nPlanes; ++i) { 0256 if (!mapData.bytesPerLine[i]) 0257 break; 0258 0259 res.data[i] = mapData.data[i]; 0260 res.bytesPerLine[i] = mapData.bytesPerLine[i]; 0261 // TODO: Reimplement heightForPlane 0262 res.size[i] = mapData.bytesPerLine[i] * desc->heightForPlane(m_frame.size().height(), i); 0263 } 0264 return res; 0265 } 0266 void unmap() override { m_mode = QVideoFrame::NotMapped; } 0267 0268 #if QT_VERSION >= QT_VERSION_CHECK(6, 4, 0) 0269 std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override 0270 { 0271 m_rhi = rhi; 0272 if (m_textures.isNull()) 0273 m_textures = m_frame.handle(m_rhi); 0274 return nullptr; 0275 } 0276 0277 static QVideoFrameFormat::ColorSpace colorSpace(const AVFrame *frame) 0278 { 0279 switch (frame->colorspace) { 0280 default: 0281 case AVCOL_SPC_UNSPECIFIED: 0282 case AVCOL_SPC_RESERVED: 0283 case AVCOL_SPC_FCC: 0284 case AVCOL_SPC_SMPTE240M: 0285 case AVCOL_SPC_YCGCO: 0286 case AVCOL_SPC_SMPTE2085: 0287 case AVCOL_SPC_CHROMA_DERIVED_NCL: 0288 case AVCOL_SPC_CHROMA_DERIVED_CL: 0289 case AVCOL_SPC_ICTCP: // BT.2100 ICtCp 0290 return QVideoFrameFormat::ColorSpace_Undefined; 0291 case AVCOL_SPC_RGB: 0292 return QVideoFrameFormat::ColorSpace_AdobeRgb; 0293 case AVCOL_SPC_BT709: 0294 return QVideoFrameFormat::ColorSpace_BT709; 0295 case AVCOL_SPC_BT470BG: // BT601 0296 case AVCOL_SPC_SMPTE170M: // Also BT601 0297 return QVideoFrameFormat::ColorSpace_BT601; 0298 case AVCOL_SPC_BT2020_NCL: // Non constant luminence 0299 case AVCOL_SPC_BT2020_CL: // Constant luminence 0300 return QVideoFrameFormat::ColorSpace_BT2020; 0301 } 0302 } 0303 0304 static QVideoFrameFormat::ColorTransfer colorTransfer(const AVFrame *frame) 0305 { 0306 switch (frame->color_trc) { 0307 case AVCOL_TRC_BT709: 0308 // The following three cases have transfer characteristics identical to BT709 0309 case AVCOL_TRC_BT1361_ECG: 0310 case AVCOL_TRC_BT2020_10: 0311 case AVCOL_TRC_BT2020_12: 0312 case AVCOL_TRC_SMPTE240M: // almost identical to bt709 0313 return QVideoFrameFormat::ColorTransfer_BT709; 0314 case AVCOL_TRC_GAMMA22: 0315 case AVCOL_TRC_SMPTE428: // No idea, let's hope for the best... 0316 case AVCOL_TRC_IEC61966_2_1: // sRGB, close enough to 2.2... 0317 case AVCOL_TRC_IEC61966_2_4: // not quite, but probably close enough 0318 return QVideoFrameFormat::ColorTransfer_Gamma22; 0319 case AVCOL_TRC_GAMMA28: 0320 return QVideoFrameFormat::ColorTransfer_Gamma28; 0321 case AVCOL_TRC_SMPTE170M: 0322 return QVideoFrameFormat::ColorTransfer_BT601; 0323 case AVCOL_TRC_LINEAR: 0324 return QVideoFrameFormat::ColorTransfer_Linear; 0325 case AVCOL_TRC_SMPTE2084: 0326 return QVideoFrameFormat::ColorTransfer_ST2084; 0327 case AVCOL_TRC_ARIB_STD_B67: 0328 return QVideoFrameFormat::ColorTransfer_STD_B67; 0329 default: 0330 break; 0331 } 0332 return QVideoFrameFormat::ColorTransfer_Unknown; 0333 } 0334 0335 static QVideoFrameFormat::ColorRange colorRange(const AVFrame *frame) 0336 { 0337 switch (frame->color_range) { 0338 case AVCOL_RANGE_MPEG: 0339 return QVideoFrameFormat::ColorRange_Video; 0340 case AVCOL_RANGE_JPEG: 0341 return QVideoFrameFormat::ColorRange_Full; 0342 default: 0343 return QVideoFrameFormat::ColorRange_Unknown; 0344 } 0345 } 0346 0347 static float maxNits(const AVFrame *frame) 0348 { 0349 float maxNits = -1; 0350 for (int i = 0; i < frame->nb_side_data; ++i) { 0351 AVFrameSideData *sd = frame->side_data[i]; 0352 // TODO: Longer term we might want to also support HDR10+ dynamic metadata 0353 if (sd->type == AV_FRAME_DATA_MASTERING_DISPLAY_METADATA) { 0354 auto data = reinterpret_cast<AVMasteringDisplayMetadata *>(sd->data); 0355 auto b = data->max_luminance; 0356 auto maybeLum = b.den != 0 ? 10'000.0 * qreal(b.num) / qreal(b.den) : std::optional<qreal>{}; 0357 if (maybeLum) 0358 maxNits = float(maybeLum.value()); 0359 } 0360 } 0361 return maxNits; 0362 } 0363 #endif 0364 0365 private: 0366 QAVVideoFrame m_frame; 0367 QVideoFrameFormat::PixelFormat m_pixelFormat = QVideoFrameFormat::Format_Invalid; 0368 QVideoFrame::MapMode m_mode = QVideoFrame::NotMapped; 0369 QVariant m_textures; 0370 #if QT_VERSION < QT_VERSION_CHECK(6, 4, 0) 0371 QRhi *m_rhi = nullptr; 0372 #endif 0373 }; 0374 0375 #endif // #if QT_VERSION < QT_VERSION_CHECK(6, 0, 0) 0376 0377 QAVVideoFrame::operator QVideoFrame() const 0378 { 0379 QAVVideoFrame result = *this; 0380 if (!result) 0381 return QVideoFrame(); 0382 0383 #if QT_VERSION < QT_VERSION_CHECK(6, 0, 0) 0384 using VideoFrame = QVideoFrame; 0385 #else 0386 using VideoFrame = QVideoFrameFormat; 0387 #endif 0388 0389 VideoFrame::PixelFormat format = VideoFrame::Format_Invalid; 0390 switch (frame()->format) { 0391 case AV_PIX_FMT_RGB32: 0392 #if QT_VERSION < QT_VERSION_CHECK(6, 0, 0) 0393 format = VideoFrame::Format_RGB32; 0394 #else 0395 format = QVideoFrameFormat::Format_BGRA8888; 0396 #endif 0397 break; 0398 case AV_PIX_FMT_YUV420P: 0399 format = VideoFrame::Format_YUV420P; 0400 break; 0401 case AV_PIX_FMT_YUV444P: 0402 case AV_PIX_FMT_YUV422P: 0403 #if QT_VERSION < QT_VERSION_CHECK(5, 15, 0) 0404 result = convertTo(AV_PIX_FMT_YUV420P); 0405 format = VideoFrame::Format_YUV420P; 0406 #else 0407 format = VideoFrame::Format_YUV422P; 0408 #endif 0409 break; 0410 case AV_PIX_FMT_VAAPI: 0411 case AV_PIX_FMT_VDPAU: 0412 #if QT_VERSION < QT_VERSION_CHECK(6, 0, 0) 0413 format = VideoFrame::Format_BGRA32; 0414 #else 0415 format = QVideoFrameFormat::Format_RGBA8888; 0416 #endif 0417 break; 0418 case AV_PIX_FMT_D3D11: 0419 case AV_PIX_FMT_VIDEOTOOLBOX: 0420 case AV_PIX_FMT_NV12: 0421 format = VideoFrame::Format_NV12; 0422 break; 0423 #if QT_VERSION >= QT_VERSION_CHECK(6, 0, 0) 0424 case AV_PIX_FMT_MEDIACODEC: 0425 format = VideoFrame::Format_SamplerExternalOES; 0426 break; 0427 #endif 0428 default: 0429 // TODO: Add more supported formats instead of converting 0430 result = convertTo(AV_PIX_FMT_YUV420P); 0431 format = VideoFrame::Format_YUV420P; 0432 break; 0433 } 0434 0435 #if QT_VERSION < QT_VERSION_CHECK(6, 0, 0) 0436 using HandleType = QAbstractVideoBuffer::HandleType; 0437 #else 0438 using HandleType = QVideoFrame::HandleType; 0439 #endif 0440 0441 HandleType type = HandleType::NoHandle; 0442 switch (handleType()) { 0443 case GLTextureHandle: 0444 #if QT_VERSION < QT_VERSION_CHECK(6, 0, 0) 0445 type = HandleType::GLTextureHandle; 0446 #else 0447 type = HandleType::RhiTextureHandle; 0448 #endif 0449 break; 0450 case MTLTextureHandle: 0451 case D3D11Texture2DHandle: 0452 #if QT_VERSION >= QT_VERSION_CHECK(6, 0, 0) 0453 type = HandleType::RhiTextureHandle; 0454 #endif 0455 break; 0456 default: 0457 break; 0458 } 0459 0460 #if QT_VERSION < QT_VERSION_CHECK(6, 0, 0) 0461 return QVideoFrame(new PlanarVideoBuffer(result, type), size(), format); 0462 #else 0463 QVideoFrameFormat videoFormat(size(), format); 0464 #if QT_VERSION >= QT_VERSION_CHECK(6, 4, 0) 0465 videoFormat.setColorSpace(PlanarVideoBuffer::colorSpace(frame())); 0466 videoFormat.setColorTransfer(PlanarVideoBuffer::colorTransfer(frame())); 0467 videoFormat.setColorRange(PlanarVideoBuffer::colorRange(frame())); 0468 videoFormat.setMaxLuminance(PlanarVideoBuffer::maxNits(frame())); 0469 #endif 0470 return QVideoFrame(new PlanarVideoBuffer(result, format, type), videoFormat); 0471 #endif 0472 } 0473 #endif // #ifdef QT_AVPLAYER_MULTIMEDIA 0474 0475 QT_END_NAMESPACE