File indexing completed on 2024-03-24 17:02:23

0001 /*
0002     SPDX-FileCopyrightText: 2022 Aleix Pol Gonzalez <aleixpol@kde.org>
0003 
0004     SPDX-License-Identifier: LGPL-2.1-only OR LGPL-3.0-only OR LicenseRef-KDE-Accepted-LGPL
0005 */
0006 
0007 #include "pipewirerecord.h"
0008 #include "glhelpers.h"
0009 #include "pipewirerecord_p.h"
0010 #include "pipewiresourcestream.h"
0011 #include <epoxy/egl.h>
0012 #include <epoxy/gl.h>
0013 #include <logging_record.h>
0014 
0015 #include <QDateTime>
0016 #include <QGuiApplication>
0017 #include <QImage>
0018 #include <QMutex>
0019 #include <QPainter>
0020 #include <QThreadPool>
0021 #include <QTimer>
0022 #include <qpa/qplatformnativeinterface.h>
0023 
0024 #include <KShell>
0025 
0026 #include <unistd.h>
0027 
0028 extern "C" {
0029 #include <libavcodec/avcodec.h>
0030 #include <libavformat/avformat.h>
0031 #include <libavutil/imgutils.h>
0032 #include <libavutil/opt.h>
0033 #include <libavutil/timestamp.h>
0034 #include <libswscale/swscale.h>
0035 }
0036 
0037 #ifdef av_err2str
0038 #undef av_err2str
0039 #include <string>
0040 char str[AV_ERROR_MAX_STRING_SIZE];
0041 av_always_inline char *av_err2str(int errnum)
0042 {
0043     return av_make_error_string(str, AV_ERROR_MAX_STRING_SIZE, errnum);
0044 }
0045 #endif // av_err2str
0046 
0047 #ifdef av_ts2str
0048 #undef av_ts2str
0049 char buf[AV_TS_MAX_STRING_SIZE];
0050 #define av_ts2str(ts) av_ts_make_string(buf, ts)
0051 #endif // av_ts2str
0052 
0053 #ifdef av_ts2timestr
0054 #undef av_ts2timestr
0055 char timebuf[AV_TS_MAX_STRING_SIZE];
0056 #define av_ts2timestr(ts, tb) av_ts_make_time_string(timebuf, ts, tb)
0057 #endif // av_ts2timestr
0058 
0059 class CustomAVFrame
0060 {
0061 public:
0062     CustomAVFrame()
0063         : m_avFrame(av_frame_alloc())
0064     {
0065     }
0066 
0067     ~CustomAVFrame()
0068     {
0069         av_freep(m_avFrame->data);
0070         av_frame_free(&m_avFrame);
0071     }
0072 
0073     int alloc(int width, int height, AVPixelFormat pix_fmt)
0074     {
0075         m_avFrame->format = pix_fmt;
0076         m_avFrame->width = width;
0077         m_avFrame->height = height;
0078         return av_image_alloc(m_avFrame->data, m_avFrame->linesize, width, height, pix_fmt, 32);
0079     }
0080 
0081     AVFrame *m_avFrame;
0082 };
0083 
0084 static AVPixelFormat convertQImageFormatToAVPixelFormat(QImage::Format format)
0085 {
0086     // Listing those handed by SpaToQImageFormat
0087     switch (format) {
0088     case QImage::Format_BGR888:
0089         return AV_PIX_FMT_BGR24;
0090     case QImage::Format_RGBX8888:
0091     case QImage::Format_RGBA8888_Premultiplied:
0092         return AV_PIX_FMT_RGBA;
0093     case QImage::Format_RGB32:
0094         return AV_PIX_FMT_RGB32;
0095     default:
0096         qDebug() << "Unexpected pixel format" << format;
0097         return AV_PIX_FMT_RGB32;
0098     }
0099 }
0100 
0101 Q_DECLARE_METATYPE(std::optional<int>);
0102 Q_DECLARE_METATYPE(std::optional<std::chrono::nanoseconds>);
0103 
0104 PipeWireRecord::PipeWireRecord(QObject *parent)
0105     : QObject(parent)
0106     , d(new PipeWireRecordPrivate)
0107 {
0108     d->m_encoder = "libvpx";
0109     av_log_set_level(AV_LOG_DEBUG);
0110     qRegisterMetaType<std::optional<int>>();
0111     qRegisterMetaType<std::optional<std::chrono::nanoseconds>>();
0112 }
0113 
0114 PipeWireRecord::~PipeWireRecord()
0115 {
0116     setActive(false);
0117     if (d->m_fd) {
0118         close(*d->m_fd);
0119     }
0120 
0121     if (d->m_recordThread) {
0122         d->m_recordThread->wait();
0123     }
0124 }
0125 
0126 PipeWireRecord::State PipeWireRecord::state() const
0127 {
0128     if (d->m_active)
0129         return Recording;
0130     else if (d->m_recordThread || !d->m_produceThreadFinished)
0131         return Rendering;
0132 
0133     return Idle;
0134 }
0135 
0136 void PipeWireRecord::setNodeId(uint nodeId)
0137 {
0138     if (nodeId == d->m_nodeId)
0139         return;
0140 
0141     d->m_nodeId = nodeId;
0142     refresh();
0143     Q_EMIT nodeIdChanged(nodeId);
0144 }
0145 
0146 void PipeWireRecord::setFd(uint fd)
0147 {
0148     if (fd == d->m_fd)
0149         return;
0150 
0151     if (d->m_fd) {
0152         close(*d->m_fd);
0153     }
0154     d->m_fd = fd;
0155     refresh();
0156     Q_EMIT fdChanged(fd);
0157 }
0158 
0159 void PipeWireRecord::setActive(bool active)
0160 {
0161     if (d->m_active == active)
0162         return;
0163 
0164     d->m_active = active;
0165     refresh();
0166     Q_EMIT activeChanged(active);
0167 }
0168 
0169 void PipeWireRecord::setOutput(const QString &_output)
0170 {
0171     const QString output = KShell::tildeExpand(_output);
0172 
0173     if (d->m_output == output)
0174         return;
0175 
0176     d->m_output = output;
0177     refresh();
0178     Q_EMIT outputChanged(output);
0179 }
0180 
0181 PipeWireRecordProduce::PipeWireRecordProduce(const QByteArray &encoder, uint nodeId, uint fd, const QString &output)
0182     : QObject()
0183     , m_output(output)
0184     , m_nodeId(nodeId)
0185     , m_encoder(encoder)
0186 {
0187     m_stream.reset(new PipeWireSourceStream(nullptr));
0188     bool created = m_stream->createStream(m_nodeId, fd);
0189     if (!created || !m_stream->error().isEmpty()) {
0190         qCWarning(PIPEWIRERECORD_LOGGING) << "failed to set up stream for" << m_nodeId << m_stream->error();
0191         m_error = m_stream->error();
0192         m_stream.reset(nullptr);
0193         return;
0194     }
0195     connect(m_stream.get(), &PipeWireSourceStream::streamParametersChanged, this, &PipeWireRecordProduce::setupStream);
0196     connect(m_stream.get(), &PipeWireSourceStream::stateChanged, this, &PipeWireRecordProduce::stateChanged);
0197 }
0198 
0199 PipeWireRecordProduce::~PipeWireRecordProduce()
0200 {
0201 }
0202 
0203 void PipeWireRecordProduceThread::run()
0204 {
0205     PipeWireRecordProduce produce(m_encoder, m_nodeId, m_fd, m_output);
0206     if (!produce.m_stream) {
0207         Q_EMIT errorFound(produce.error());
0208         return;
0209     }
0210     m_producer = &produce;
0211     qCDebug(PIPEWIRERECORD_LOGGING) << "executing";
0212     const int ret = exec();
0213     qCDebug(PIPEWIRERECORD_LOGGING) << "finishing" << ret;
0214     m_producer = nullptr;
0215 }
0216 
0217 void PipeWireRecordProduceThread::deactivate()
0218 {
0219     if (m_producer) {
0220         m_producer->m_deactivated = true;
0221         m_producer->m_stream->setActive(false);
0222     }
0223 }
0224 
0225 void PipeWireRecordProduce::stateChanged(pw_stream_state state)
0226 {
0227     if (state != PW_STREAM_STATE_PAUSED || !m_deactivated) {
0228         return;
0229     }
0230     if (!m_stream) {
0231         qCDebug(PIPEWIRERECORD_LOGGING) << "finished without a stream";
0232         return;
0233     }
0234 
0235     disconnect(m_stream.data(), &PipeWireSourceStream::frameReceived, this, &PipeWireRecordProduce::processFrame);
0236     if (m_writeThread) {
0237         m_writeThread->quit();
0238         bool done = m_writeThread->wait();
0239         Q_ASSERT(done);
0240     }
0241 
0242     qCDebug(PIPEWIRERECORD_LOGGING) << "finished";
0243     if (m_avCodecContext) {
0244         avio_closep(&m_avFormatContext->pb);
0245         avcodec_close(m_avCodecContext);
0246         av_free(m_avCodecContext);
0247         avformat_free_context(m_avFormatContext);
0248     }
0249     QThread::currentThread()->quit();
0250 }
0251 
0252 QString PipeWireRecord::extension()
0253 {
0254     return QStringLiteral("webm");
0255 }
0256 
0257 QString PipeWireRecord::currentExtension() const
0258 {
0259     static QHash<QByteArray, QString> s_extensions = {
0260         {"libx264", QStringLiteral("mp4")},
0261         {"libvpx", QStringLiteral("webm")},
0262     };
0263     return s_extensions.value(d->m_encoder, QStringLiteral("mkv"));
0264 }
0265 
0266 void PipeWireRecordProduce::setupStream()
0267 {
0268     qCDebug(PIPEWIRERECORD_LOGGING) << "Setting up stream";
0269     disconnect(m_stream.get(), &PipeWireSourceStream::streamParametersChanged, this, &PipeWireRecordProduce::setupStream);
0270     avformat_alloc_output_context2(&m_avFormatContext, nullptr, nullptr, m_output.toUtf8().constData());
0271     if (!m_avFormatContext) {
0272         qCWarning(PIPEWIRERECORD_LOGGING) << "Could not deduce output format from file: using WebM." << m_output;
0273         avformat_alloc_output_context2(&m_avFormatContext, nullptr, "webm", m_output.toUtf8().constData());
0274     }
0275     if (!m_avFormatContext) {
0276         qCDebug(PIPEWIRERECORD_LOGGING) << "could not set stream up";
0277         return;
0278     }
0279 
0280     m_codec = avcodec_find_encoder_by_name(m_encoder.constData());
0281     if (!m_codec) {
0282         qCWarning(PIPEWIRERECORD_LOGGING) << "Codec not found";
0283         return;
0284     }
0285 
0286     m_avCodecContext = avcodec_alloc_context3(m_codec);
0287     if (!m_avCodecContext) {
0288         qCWarning(PIPEWIRERECORD_LOGGING) << "Could not allocate video codec context";
0289         return;
0290     }
0291 
0292     const QSize size = m_stream->size();
0293     const Fraction framerate = m_stream->framerate();
0294 
0295     // Have the bitrate depend on the size of the input stream. What looks acceptable on a small
0296     // stream on a big one will look bad.
0297     m_avCodecContext->bit_rate = size.width() * size.height() * 2;
0298 
0299     Q_ASSERT(!size.isEmpty());
0300     m_avCodecContext->width = size.width();
0301     m_avCodecContext->height = size.height();
0302     m_avCodecContext->max_b_frames = 1;
0303     m_avCodecContext->gop_size = 100;
0304     if (m_codec->pix_fmts && m_codec->pix_fmts[0] > 0) {
0305         m_avCodecContext->pix_fmt = m_codec->pix_fmts[0];
0306     } else {
0307         m_avCodecContext->pix_fmt = AV_PIX_FMT_YUV420P;
0308     }
0309     m_avCodecContext->time_base = AVRational{1, 1000};
0310 
0311     AVDictionary *options = nullptr;
0312     av_dict_set_int(&options, "threads", qMin(16, QThread::idealThreadCount()), 0);
0313     av_dict_set(&options, "preset", "veryfast", 0);
0314     av_dict_set(&options, "tune-content", "screen", 0);
0315     av_dict_set(&options, "deadline", "realtime", 0);
0316     // In theory a lower number should be faster, but the opposite seems to be true
0317     av_dict_set(&options, "quality", "40", 0);
0318     av_dict_set(&options, "cpu-used", "6", 0);
0319     // Disable motion estimation, not great while dragging windows but speeds up encoding by an order of magnitude
0320     av_dict_set(&options, "flags", "+mv4", 0);
0321     // Disable in-loop filtering
0322     av_dict_set(&options, "-flags", "+loop", 0);
0323 
0324     int ret = avcodec_open2(m_avCodecContext, m_codec, &options);
0325     if (ret < 0) {
0326         qCWarning(PIPEWIRERECORD_LOGGING) << "Could not open codec" << av_err2str(ret);
0327         return;
0328     }
0329 
0330     ret = avio_open(&m_avFormatContext->pb, QFile::encodeName(m_output).constData(), AVIO_FLAG_WRITE);
0331     if (ret < 0) {
0332         qCWarning(PIPEWIRERECORD_LOGGING) << "Could not open" << m_output << av_err2str(ret);
0333         return;
0334     }
0335 
0336     auto avStream = avformat_new_stream(m_avFormatContext, nullptr);
0337     avStream->start_time = 0;
0338     avStream->r_frame_rate.num = framerate.numerator;
0339     avStream->r_frame_rate.den = framerate.denominator;
0340     avStream->avg_frame_rate.num = framerate.numerator;
0341     avStream->avg_frame_rate.den = framerate.denominator;
0342 
0343     ret = avcodec_parameters_from_context(avStream->codecpar, m_avCodecContext);
0344     if (ret < 0) {
0345         qCWarning(PIPEWIRERECORD_LOGGING) << "Error occurred when passing the codec:" << av_err2str(ret);
0346         return;
0347     }
0348 
0349     ret = avformat_write_header(m_avFormatContext, nullptr);
0350     if (ret < 0) {
0351         qCWarning(PIPEWIRERECORD_LOGGING) << "Error occurred when writing header:" << av_err2str(ret);
0352         return;
0353     }
0354 
0355     connect(m_stream.data(), &PipeWireSourceStream::frameReceived, this, &PipeWireRecordProduce::processFrame);
0356     m_writeThread = new PipeWireRecordWriteThread(this, m_avFormatContext, m_avCodecContext);
0357     m_writeThread->start();
0358 }
0359 
0360 void PipeWireRecordProduce::processFrame(const PipeWireFrame &frame)
0361 {
0362     bool cursorChanged = false;
0363     if (frame.cursor) {
0364         cursorChanged = m_cursor.position != frame.cursor->position;
0365         m_cursor.position = frame.cursor->position;
0366         m_cursor.hotspot = frame.cursor->hotspot;
0367         if (!frame.cursor->texture.isNull()) {
0368             m_cursor.dirty = true;
0369             m_cursor.texture = frame.cursor->texture;
0370         }
0371     }
0372 
0373     if (frame.dmabuf) {
0374         if (m_frameWithoutMetadataCursor.size() != m_stream->size()) {
0375             m_frameWithoutMetadataCursor = QImage(m_stream->size(), QImage::Format_RGBA8888_Premultiplied);
0376         }
0377 
0378         if (!m_dmabufHandler.downloadFrame(m_frameWithoutMetadataCursor, frame)) {
0379             m_stream->renegotiateModifierFailed(frame.format, frame.dmabuf->modifier);
0380             return;
0381         }
0382         render(frame);
0383     } else if (frame.image) {
0384         updateTextureImage(*frame.image, frame);
0385     } else if (cursorChanged && !m_frameWithoutMetadataCursor.isNull()) {
0386         render(frame);
0387     }
0388 }
0389 
0390 void PipeWireRecord::refresh()
0391 {
0392     if (!d->m_output.isEmpty() && d->m_active && d->m_nodeId > 0) {
0393         d->m_recordThread.reset(new PipeWireRecordProduceThread(d->m_encoder, d->m_nodeId, d->m_fd.value_or(0), d->m_output));
0394         connect(d->m_recordThread.get(), &PipeWireRecordProduceThread::errorFound, this, &PipeWireRecord::errorFound);
0395         connect(d->m_recordThread.get(), &PipeWireRecordProduceThread::finished, this, [this] {
0396             setActive(false);
0397         });
0398         d->m_recordThread->start();
0399     } else if (d->m_recordThread) {
0400         d->m_recordThread->deactivate();
0401 
0402         connect(d->m_recordThread.get(), &PipeWireRecordProduceThread::finished, this, [this] {
0403             qCDebug(PIPEWIRERECORD_LOGGING) << "produce thread finished" << d->m_output;
0404             d->m_recordThread.reset();
0405             d->m_produceThreadFinished = true;
0406             Q_EMIT stateChanged();
0407         });
0408         d->m_produceThreadFinished = false;
0409     }
0410     Q_EMIT stateChanged();
0411 }
0412 
0413 void PipeWireRecordProduce::updateTextureImage(const QImage &image, const PipeWireFrame &frame)
0414 {
0415     m_frameWithoutMetadataCursor = image;
0416     render(frame);
0417 }
0418 
0419 void PipeWireRecordProduce::render(const PipeWireFrame &frame)
0420 {
0421     Q_ASSERT(!m_frameWithoutMetadataCursor.isNull());
0422 
0423     QImage image(m_frameWithoutMetadataCursor);
0424     if (!image.isNull() && m_cursor.position && !m_cursor.texture.isNull()) {
0425         image = m_frameWithoutMetadataCursor.copy();
0426         QPainter p(&image);
0427         p.drawImage(*m_cursor.position, m_cursor.texture);
0428     }
0429 
0430     Q_EMIT producedFrame(image, frame.sequential, frame.presentationTimestamp);
0431 }
0432 
0433 static void log_packet(const AVFormatContext *fmt_ctx, const AVPacket *pkt)
0434 {
0435     AVRational *time_base = &fmt_ctx->streams[pkt->stream_index]->time_base;
0436 
0437     qCDebug(PIPEWIRERECORD_LOGGING,
0438             "pts:%s pts_time:%s dts:%s dts_time:%s duration:%s duration_time:%s "
0439             "stream_index:%d",
0440             av_ts2str(pkt->pts),
0441             av_ts2timestr(pkt->pts, time_base),
0442             av_ts2str(pkt->dts),
0443             av_ts2timestr(pkt->dts, time_base),
0444             av_ts2str(pkt->duration),
0445             av_ts2timestr(pkt->duration, time_base),
0446             pkt->stream_index);
0447 }
0448 
0449 void PipeWireRecord::setEncoder(const QByteArray &encoder)
0450 {
0451     if (d->m_encoder == encoder) {
0452         return;
0453     }
0454     d->m_encoder = encoder;
0455     Q_EMIT encoderChanged();
0456 }
0457 
0458 QByteArray PipeWireRecord::encoder() const
0459 {
0460     return d->m_encoder;
0461 }
0462 
0463 QList<QByteArray> PipeWireRecord::suggestedEncoders() const
0464 {
0465     QList<QByteArray> ret = {"libvpx", "libx264"};
0466     std::remove_if(ret.begin(), ret.end(), [](const QByteArray &encoder) {
0467         return !avcodec_find_encoder_by_name(encoder.constData());
0468     });
0469     return ret;
0470 }
0471 
0472 QString PipeWireRecord::output() const
0473 {
0474     return d->m_output;
0475 }
0476 
0477 bool PipeWireRecord::isActive() const
0478 {
0479     return d->m_active;
0480 }
0481 
0482 uint PipeWireRecord::nodeId() const
0483 {
0484     return d->m_nodeId;
0485 }
0486 
0487 uint PipeWireRecord::fd() const
0488 {
0489     return d->m_fd.value_or(0);
0490 }
0491 
0492 PipeWireRecordWrite::PipeWireRecordWrite(PipeWireRecordProduce *produce, AVFormatContext *avFormatContext, AVCodecContext *avCodecContext)
0493     : QObject()
0494     , m_packet(av_packet_alloc())
0495     , m_avFormatContext(avFormatContext)
0496     , m_avCodecContext(avCodecContext)
0497 {
0498     connect(produce, &PipeWireRecordProduce::producedFrame, this, &PipeWireRecordWrite::addFrame);
0499 }
0500 
0501 PipeWireRecordWrite::~PipeWireRecordWrite()
0502 {
0503     int ret = av_write_trailer(m_avFormatContext);
0504     if (ret < 0) {
0505         qCWarning(PIPEWIRERECORD_LOGGING) << "failed to write trailer" << av_err2str(ret);
0506     }
0507     av_packet_free(&m_packet);
0508 }
0509 
0510 PipeWireRecordWriteThread::PipeWireRecordWriteThread(PipeWireRecordProduce *produce, AVFormatContext *avFormatContext, AVCodecContext *avCodecContext)
0511     : QThread(produce)
0512     , m_produce(produce)
0513     , m_avFormatContext(avFormatContext)
0514     , m_avCodecContext(avCodecContext)
0515 {
0516 }
0517 
0518 void PipeWireRecordWrite::addFrame(const QImage &image, std::optional<int> sequential, std::optional<std::chrono::nanoseconds> presentationTimestamp)
0519 {
0520     if (!sws_context || m_lastReceivedSize != image.size()) {
0521         sws_context = sws_getCachedContext(sws_context,
0522                                            image.width(),
0523                                            image.height(),
0524                                            convertQImageFormatToAVPixelFormat(image.format()),
0525                                            m_avCodecContext->width,
0526                                            m_avCodecContext->height,
0527                                            m_avCodecContext->pix_fmt,
0528                                            0,
0529                                            nullptr,
0530                                            nullptr,
0531                                            nullptr);
0532     }
0533 
0534     CustomAVFrame avFrame;
0535     int ret = avFrame.alloc(m_avCodecContext->width, m_avCodecContext->height, m_avCodecContext->pix_fmt);
0536     if (ret < 0) {
0537         qCWarning(PIPEWIRERECORD_LOGGING) << "Could not allocate raw picture buffer" << av_err2str(ret);
0538         return;
0539     }
0540     const std::uint8_t *buffers[] = {image.constBits(), nullptr};
0541     const int strides[] = {static_cast<int>(image.bytesPerLine()), 0, 0, 0};
0542     sws_scale(sws_context, buffers, strides, 0, m_avCodecContext->height, avFrame.m_avFrame->data, avFrame.m_avFrame->linesize);
0543 
0544     if (presentationTimestamp.has_value()) {
0545         const auto current = std::chrono::duration_cast<std::chrono::milliseconds>(*presentationTimestamp).count();
0546         if ((*m_avFormatContext->streams)->start_time == 0) {
0547             (*m_avFormatContext->streams)->start_time = current;
0548         }
0549 
0550         Q_ASSERT((*m_avFormatContext->streams)->start_time <= current);
0551         avFrame.m_avFrame->pts = current - (*m_avFormatContext->streams)->start_time;
0552     } else {
0553         avFrame.m_avFrame->pts = AV_NOPTS_VALUE;
0554     }
0555 
0556     // Let's add a key frame every 100 frames and also the first frame
0557     if (sequential && (*sequential == 0 || (*sequential - m_lastKeyFrame) > 100)) {
0558         avFrame.m_avFrame->key_frame = 1;
0559         m_lastKeyFrame = *sequential;
0560     }
0561 
0562     if (m_lastPts > 0 && avFrame.m_avFrame->pts <= m_lastPts) {
0563         // Make sure we don't have two frames at the same presentation time
0564         avFrame.m_avFrame->pts = m_lastPts + 1;
0565     }
0566     m_lastPts = avFrame.m_avFrame->pts;
0567 
0568     ret = avcodec_send_frame(m_avCodecContext, avFrame.m_avFrame);
0569     if (ret < 0) {
0570         qCWarning(PIPEWIRERECORD_LOGGING) << "Error sending a frame for encoding:" << av_err2str(ret);
0571         return;
0572     }
0573     for (;;) {
0574         ret = avcodec_receive_packet(m_avCodecContext, m_packet);
0575         if (ret < 0) {
0576             if (ret != AVERROR_EOF && ret != AVERROR(EAGAIN)) {
0577                 qCWarning(PIPEWIRERECORD_LOGGING) << "Error encoding a frame: " << av_err2str(ret) << ret;
0578             }
0579             break;
0580         }
0581 
0582         m_packet->stream_index = (*m_avFormatContext->streams)->index;
0583         av_packet_rescale_ts(m_packet, m_avCodecContext->time_base, (*m_avFormatContext->streams)->time_base);
0584         log_packet(m_avFormatContext, m_packet);
0585         ret = av_interleaved_write_frame(m_avFormatContext, m_packet);
0586         if (ret < 0) {
0587             qCWarning(PIPEWIRERECORD_LOGGING) << "Error while writing output packet:" << av_err2str(ret);
0588         }
0589         av_packet_unref(m_packet);
0590     }
0591 }
0592 
0593 void PipeWireRecordWriteThread::run()
0594 {
0595     PipeWireRecordWrite writer(m_produce, m_avFormatContext, m_avCodecContext);
0596     QThread::exec();
0597     AVPacket *pkt = av_packet_alloc();
0598     avcodec_send_frame(m_avCodecContext, nullptr);
0599 
0600     for (;;) {
0601         if (avcodec_receive_packet(m_avCodecContext, pkt) < 0)
0602             break;
0603 
0604         pkt->stream_index = (*m_avFormatContext->streams)->index;
0605         av_packet_rescale_ts(pkt, m_avCodecContext->time_base, (*m_avFormatContext->streams)->time_base);
0606         log_packet(m_avFormatContext, pkt);
0607         int ret = av_interleaved_write_frame(m_avFormatContext, pkt);
0608         if (ret < 0) {
0609             qCWarning(PIPEWIRERECORD_LOGGING) << "Error while writing output packet:" << av_err2str(ret);
0610         }
0611         av_packet_unref(pkt);
0612     }
0613     av_packet_free(&pkt);
0614 }