File indexing completed on 2024-06-23 05:24:07

0001 // SPDX-FileCopyrightText: 2023 Arjen Hiemstra <ahiemstra@heimr.nl>
0002 //
0003 // SPDX-License-Identifier: LGPL-2.1-only OR LGPL-3.0-only OR LicenseRef-KDE-Accepted-LGPL
0004 
0005 #include "VideoStream.h"
0006 
0007 #include <algorithm>
0008 #include <condition_variable>
0009 
0010 #include <QDateTime>
0011 #include <QQueue>
0012 
0013 #include <freerdp/freerdp.h>
0014 #include <freerdp/peer.h>
0015 
0016 #include "NetworkDetection.h"
0017 #include "PeerContext_p.h"
0018 #include "RdpConnection.h"
0019 
0020 #include "krdp_logging.h"
0021 
0022 namespace KRdp
0023 {
0024 
0025 namespace clk = std::chrono;
0026 
0027 // Maximum number of frames to contain in the queue.
0028 constexpr qsizetype MaxQueueSize = 10;
0029 
0030 constexpr clk::system_clock::duration FrameRateEstimateAveragePeriod = clk::seconds(1);
0031 
0032 struct RdpCapsInformation {
0033     uint32_t version;
0034     RDPGFX_CAPSET capSet;
0035     bool avcSupported : 1 = false;
0036     bool yuv420Supported : 1 = false;
0037 };
0038 
0039 const char *capVersionToString(uint32_t version)
0040 {
0041     switch (version) {
0042     case RDPGFX_CAPVERSION_107:
0043         return "RDPGFX_CAPVERSION_107";
0044     case RDPGFX_CAPVERSION_106:
0045         return "RDPGFX_CAPVERSION_106";
0046     case RDPGFX_CAPVERSION_105:
0047         return "RDPGFX_CAPVERSION_105";
0048     case RDPGFX_CAPVERSION_104:
0049         return "RDPGFX_CAPVERSION_104";
0050     case RDPGFX_CAPVERSION_103:
0051         return "RDPGFX_CAPVERSION_103";
0052     case RDPGFX_CAPVERSION_102:
0053         return "RDPGFX_CAPVERSION_102";
0054     case RDPGFX_CAPVERSION_101:
0055         return "RDPGFX_CAPVERSION_101";
0056     case RDPGFX_CAPVERSION_10:
0057         return "RDPGFX_CAPVERSION_10";
0058     case RDPGFX_CAPVERSION_81:
0059         return "RDPGFX_CAPVERSION_81";
0060     case RDPGFX_CAPVERSION_8:
0061         return "RDPGFX_CAPVERSION_8";
0062     default:
0063         return "UNKNOWN_VERSION";
0064     }
0065 }
0066 
0067 BOOL gfxChannelIdAssigned(RdpgfxServerContext *context, uint32_t channelId)
0068 {
0069     auto stream = reinterpret_cast<VideoStream *>(context->custom);
0070     if (stream->onChannelIdAssigned(channelId)) {
0071         return TRUE;
0072     }
0073     return FALSE;
0074 }
0075 
0076 uint32_t gfxCapsAdvertise(RdpgfxServerContext *context, const RDPGFX_CAPS_ADVERTISE_PDU *capsAdvertise)
0077 {
0078     auto stream = reinterpret_cast<VideoStream *>(context->custom);
0079     return stream->onCapsAdvertise(capsAdvertise);
0080 }
0081 
0082 uint32_t gfxCacheImportOffer(RdpgfxServerContext *context, const RDPGFX_CACHE_IMPORT_OFFER_PDU *cacheImportOffer)
0083 {
0084     RDPGFX_CACHE_IMPORT_REPLY_PDU cacheImportReply;
0085     return context->CacheImportReply(context, &cacheImportReply);
0086 }
0087 
0088 uint32_t gfxFrameAcknowledge(RdpgfxServerContext *context, const RDPGFX_FRAME_ACKNOWLEDGE_PDU *frameAcknowledge)
0089 {
0090     auto stream = reinterpret_cast<VideoStream *>(context->custom);
0091     return stream->onFrameAcknowledge(frameAcknowledge);
0092 }
0093 
0094 uint32_t gfxQoEFrameAcknowledge(RdpgfxServerContext *, const RDPGFX_QOE_FRAME_ACKNOWLEDGE_PDU *)
0095 {
0096     return CHANNEL_RC_OK;
0097 }
0098 
0099 struct Surface {
0100     uint16_t id;
0101     QSize size;
0102 };
0103 
0104 struct FrameRateEstimate {
0105     clk::system_clock::time_point timeStamp;
0106     int estimate = 0;
0107 };
0108 
0109 class KRDP_NO_EXPORT VideoStream::Private
0110 {
0111 public:
0112     using RdpGfxContextPtr = std::unique_ptr<RdpgfxServerContext, decltype(&rdpgfx_server_context_free)>;
0113 
0114     RdpConnection *session;
0115 
0116     RdpGfxContextPtr gfxContext = RdpGfxContextPtr(nullptr, rdpgfx_server_context_free);
0117 
0118     uint32_t frameId = 0;
0119     uint32_t channelId = 0;
0120 
0121     uint16_t nextSurfaceId = 1;
0122     Surface surface;
0123 
0124     bool pendingReset = true;
0125     bool enabled = false;
0126     bool capsConfirmed = false;
0127 
0128     std::jthread frameSubmissionThread;
0129     std::mutex frameQueueMutex;
0130 
0131     QQueue<VideoFrame> frameQueue;
0132     QSet<uint32_t> pendingFrames;
0133 
0134     int maximumFrameRate = 60;
0135     int requestedFrameRate = 60;
0136     QQueue<FrameRateEstimate> frameRateEstimates;
0137     clk::system_clock::time_point lastFrameRateEstimation;
0138 
0139     std::atomic_int encodedFrames = 0;
0140     std::atomic_int frameDelay = 0;
0141 };
0142 
0143 VideoStream::VideoStream(RdpConnection *session)
0144     : QObject(nullptr)
0145     , d(std::make_unique<Private>())
0146 {
0147     d->session = session;
0148 }
0149 
0150 VideoStream::~VideoStream()
0151 {
0152 }
0153 
0154 bool VideoStream::initialize()
0155 {
0156     if (d->gfxContext) {
0157         return true;
0158     }
0159 
0160     auto peerContext = reinterpret_cast<PeerContext *>(d->session->rdpPeer()->context);
0161 
0162     d->gfxContext = Private::RdpGfxContextPtr{rdpgfx_server_context_new(peerContext->virtualChannelManager), rdpgfx_server_context_free};
0163     if (!d->gfxContext) {
0164         qCWarning(KRDP) << "Failed creating RDPGFX context";
0165         return false;
0166     }
0167 
0168     d->gfxContext->ChannelIdAssigned = gfxChannelIdAssigned;
0169     d->gfxContext->CapsAdvertise = gfxCapsAdvertise;
0170     d->gfxContext->CacheImportOffer = gfxCacheImportOffer;
0171     d->gfxContext->FrameAcknowledge = gfxFrameAcknowledge;
0172     d->gfxContext->QoeFrameAcknowledge = gfxQoEFrameAcknowledge;
0173 
0174     d->gfxContext->custom = this;
0175     d->gfxContext->rdpcontext = d->session->rdpPeer()->context;
0176 
0177     if (!d->gfxContext->Open(d->gfxContext.get())) {
0178         qCWarning(KRDP) << "Could not open GFX context";
0179         return false;
0180     }
0181 
0182     connect(d->session->networkDetection(), &NetworkDetection::rttChanged, this, &VideoStream::updateRequestedFrameRate);
0183 
0184     d->frameSubmissionThread = std::jthread([this](std::stop_token token) {
0185         while (!token.stop_requested()) {
0186             {
0187                 std::unique_lock lock(d->frameQueueMutex);
0188                 if (!d->frameQueue.isEmpty()) {
0189                     sendFrame(d->frameQueue.takeFirst());
0190                 }
0191             }
0192 
0193             std::this_thread::sleep_for(std::chrono::milliseconds(1000) / d->requestedFrameRate);
0194         }
0195     });
0196 
0197     qCDebug(KRDP) << "Video stream initialized";
0198 
0199     return true;
0200 }
0201 
0202 void VideoStream::close()
0203 {
0204     if (!d->gfxContext) {
0205         return;
0206     }
0207 
0208     d->gfxContext->Close(d->gfxContext.get());
0209 
0210     if (d->frameSubmissionThread.joinable()) {
0211         d->frameSubmissionThread.request_stop();
0212         d->frameSubmissionThread.join();
0213     }
0214 
0215     Q_EMIT closed();
0216 }
0217 
0218 void VideoStream::queueFrame(const KRdp::VideoFrame &frame)
0219 {
0220     if (d->session->state() != RdpConnection::State::Streaming || !d->enabled) {
0221         return;
0222     }
0223 
0224     std::lock_guard lock(d->frameQueueMutex);
0225     d->frameQueue.append(frame);
0226 
0227     while (d->frameQueue.size() > MaxQueueSize) {
0228         d->frameQueue.pop_front();
0229     }
0230 }
0231 
0232 void VideoStream::reset()
0233 {
0234     d->pendingReset = true;
0235 }
0236 
0237 bool VideoStream::enabled() const
0238 {
0239     return d->enabled;
0240 }
0241 
0242 void VideoStream::setEnabled(bool enabled)
0243 {
0244     if (d->enabled == enabled) {
0245         return;
0246     }
0247 
0248     d->enabled = enabled;
0249     Q_EMIT enabledChanged();
0250 }
0251 
0252 uint32_t VideoStream::requestedFrameRate() const
0253 {
0254     return d->requestedFrameRate;
0255 }
0256 
0257 bool VideoStream::onChannelIdAssigned(uint32_t channelId)
0258 {
0259     d->channelId = channelId;
0260 
0261     return true;
0262 }
0263 
0264 uint32_t VideoStream::onCapsAdvertise(const RDPGFX_CAPS_ADVERTISE_PDU *capsAdvertise)
0265 {
0266     auto capsSets = capsAdvertise->capsSets;
0267     auto count = capsAdvertise->capsSetCount;
0268 
0269     std::vector<RdpCapsInformation> capsInformation;
0270     capsInformation.reserve(count);
0271 
0272     qCDebug(KRDP) << "Received caps:";
0273     for (int i = 0; i < count; ++i) {
0274         auto set = capsSets[i];
0275 
0276         RdpCapsInformation caps;
0277         caps.version = set.version;
0278         caps.capSet = set;
0279 
0280         switch (set.version) {
0281         case RDPGFX_CAPVERSION_107:
0282         case RDPGFX_CAPVERSION_106:
0283         case RDPGFX_CAPVERSION_105:
0284         case RDPGFX_CAPVERSION_104:
0285             caps.yuv420Supported = true;
0286             Q_FALLTHROUGH();
0287         case RDPGFX_CAPVERSION_103:
0288         case RDPGFX_CAPVERSION_102:
0289         case RDPGFX_CAPVERSION_101:
0290         case RDPGFX_CAPVERSION_10:
0291             if (!(set.flags & RDPGFX_CAPS_FLAG_AVC_DISABLED)) {
0292                 caps.avcSupported = true;
0293             }
0294             break;
0295         case RDPGFX_CAPVERSION_81:
0296             if (set.flags & RDPGFX_CAPS_FLAG_AVC420_ENABLED) {
0297                 caps.avcSupported = true;
0298                 caps.yuv420Supported = true;
0299             }
0300             break;
0301         case RDPGFX_CAPVERSION_8:
0302             break;
0303         }
0304 
0305         qCDebug(KRDP) << " " << capVersionToString(caps.version) << "AVC:" << caps.avcSupported << "YUV420:" << caps.yuv420Supported;
0306 
0307         capsInformation.push_back(caps);
0308     }
0309 
0310     auto supported = std::any_of(capsInformation.begin(), capsInformation.end(), [](const RdpCapsInformation &caps) {
0311         return caps.avcSupported && caps.yuv420Supported;
0312     });
0313 
0314     if (!supported) {
0315         qCWarning(KRDP) << "Client does not support H.264 in YUV420 mode!";
0316         d->session->close();
0317         return CHANNEL_RC_OK;
0318     }
0319 
0320     auto maxVersion = std::max_element(capsInformation.begin(), capsInformation.end(), [](const auto &first, const auto &second) {
0321         return first.version < second.version;
0322     });
0323 
0324     qCDebug(KRDP) << "Selected caps:" << capVersionToString(maxVersion->version);
0325 
0326     RDPGFX_CAPS_CONFIRM_PDU capsConfirmPdu;
0327     capsConfirmPdu.capsSet = &(maxVersion->capSet);
0328     d->gfxContext->CapsConfirm(d->gfxContext.get(), &capsConfirmPdu);
0329 
0330     d->capsConfirmed = true;
0331 
0332     return CHANNEL_RC_OK;
0333 }
0334 
0335 uint32_t VideoStream::onFrameAcknowledge(const RDPGFX_FRAME_ACKNOWLEDGE_PDU *frameAcknowledge)
0336 {
0337     auto id = frameAcknowledge->frameId;
0338 
0339     auto itr = d->pendingFrames.find(id);
0340     if (itr == d->pendingFrames.end()) {
0341         qCWarning(KRDP) << "Got frame acknowledge for an unknown frame";
0342         return CHANNEL_RC_OK;
0343     }
0344 
0345     if (frameAcknowledge->queueDepth & SUSPEND_FRAME_ACKNOWLEDGEMENT) {
0346         qDebug() << "suspend frame ack";
0347     }
0348 
0349     d->frameDelay = d->encodedFrames - frameAcknowledge->totalFramesDecoded;
0350     d->pendingFrames.erase(itr);
0351 
0352     return CHANNEL_RC_OK;
0353 }
0354 
0355 void VideoStream::performReset(const QSize &size)
0356 {
0357     RDPGFX_RESET_GRAPHICS_PDU resetGraphicsPdu;
0358     resetGraphicsPdu.width = size.width();
0359     resetGraphicsPdu.height = size.height();
0360     resetGraphicsPdu.monitorCount = 1;
0361 
0362     auto monitors = new MONITOR_DEF[1];
0363     monitors[0].left = 0;
0364     monitors[0].right = size.width();
0365     monitors[0].top = 0;
0366     monitors[0].bottom = size.height();
0367     monitors[0].flags = MONITOR_PRIMARY;
0368     resetGraphicsPdu.monitorDefArray = monitors;
0369     d->gfxContext->ResetGraphics(d->gfxContext.get(), &resetGraphicsPdu);
0370 
0371     RDPGFX_CREATE_SURFACE_PDU createSurfacePdu;
0372     createSurfacePdu.width = size.width();
0373     createSurfacePdu.height = size.height();
0374     uint16_t surfaceId = d->nextSurfaceId++;
0375     createSurfacePdu.surfaceId = surfaceId;
0376     createSurfacePdu.pixelFormat = GFX_PIXEL_FORMAT_XRGB_8888;
0377     d->gfxContext->CreateSurface(d->gfxContext.get(), &createSurfacePdu);
0378 
0379     d->surface = Surface{
0380         .id = surfaceId,
0381         .size = size,
0382     };
0383 
0384     RDPGFX_MAP_SURFACE_TO_OUTPUT_PDU mapSurfaceToOutputPdu;
0385     mapSurfaceToOutputPdu.outputOriginX = 0;
0386     mapSurfaceToOutputPdu.outputOriginY = 0;
0387     mapSurfaceToOutputPdu.surfaceId = surfaceId;
0388     d->gfxContext->MapSurfaceToOutput(d->gfxContext.get(), &mapSurfaceToOutputPdu);
0389 }
0390 
0391 void VideoStream::sendFrame(const VideoFrame &frame)
0392 {
0393     if (!d->gfxContext || !d->capsConfirmed) {
0394         return;
0395     }
0396 
0397     if (frame.data.size() == 0) {
0398         return;
0399     }
0400 
0401     if (d->pendingReset) {
0402         d->pendingReset = false;
0403         performReset(frame.size);
0404     }
0405 
0406     d->session->networkDetection()->startBandwidthMeasure();
0407 
0408     // auto alignedSize = QSize{
0409     //     frame.size.width() + (frame.size.width() % 16 > 0 ? 16 - frame.size.width() : 0),
0410     //     frame.size.height() + (frame.size.height() % 16 > 0 ? 16 - frame.size.height() : 0)
0411     // };
0412     auto frameId = d->frameId++;
0413 
0414     d->encodedFrames++;
0415 
0416     d->pendingFrames.insert(frameId);
0417 
0418     RDPGFX_START_FRAME_PDU startFramePdu;
0419     RDPGFX_END_FRAME_PDU endFramePdu;
0420 
0421     auto now = QDateTime::currentDateTimeUtc().time();
0422     startFramePdu.timestamp = now.hour() << 22 | now.minute() << 16 | now.second() << 10 | now.msec();
0423 
0424     startFramePdu.frameId = frameId;
0425     endFramePdu.frameId = frameId;
0426 
0427     RDPGFX_SURFACE_COMMAND surfaceCommand;
0428     surfaceCommand.surfaceId = d->surface.id;
0429     surfaceCommand.codecId = RDPGFX_CODECID_AVC420;
0430     surfaceCommand.format = PIXEL_FORMAT_BGRX32;
0431 
0432     // auto damageRect = frame.damage.boundingRect();
0433 
0434     surfaceCommand.left = 0;
0435     surfaceCommand.top = 0;
0436     // surfaceCommand.right = damageRect.x() + damageRect.width();
0437     // surfaceCommand.bottom = damageRect.y() + damageRect.height();
0438     surfaceCommand.right = frame.size.width();
0439     surfaceCommand.bottom = frame.size.height();
0440     surfaceCommand.length = 0;
0441     surfaceCommand.data = nullptr;
0442 
0443     RDPGFX_AVC420_BITMAP_STREAM avcStream;
0444     surfaceCommand.extra = &avcStream;
0445 
0446     avcStream.data = (BYTE *)frame.data.data();
0447     avcStream.length = frame.data.length();
0448 
0449     avcStream.meta.numRegionRects = 1;
0450     auto rects = std::make_unique<RECTANGLE_16[]>(1);
0451     rects[0].left = 0;
0452     rects[0].top = 0;
0453     rects[0].right = frame.size.width();
0454     rects[0].bottom = frame.size.height();
0455     avcStream.meta.regionRects = rects.get();
0456     auto qualities = std::make_unique<RDPGFX_H264_QUANT_QUALITY[]>(1);
0457     avcStream.meta.quantQualityVals = qualities.get();
0458     qualities[0].qp = 22;
0459     qualities[0].p = 0;
0460     qualities[0].qualityVal = 100;
0461 
0462     // for (int i = 0; i < frame.damage.rectCount(); ++i) {
0463     //     auto rect = *(frame.damage.begin() + i);
0464     //     rects[i].left = rect.x();
0465     //     rects[i].top = rect.y();
0466     //     rects[i].right = rect.x() + rect.width();
0467     //     rects[i].bottom = rect.y() + rect.height();
0468     //
0469     //     qualities[i].qp = 22;
0470     //     qualities[i].p = 0;
0471     //     qualities[i].qualityVal = 100;
0472     // }
0473 
0474     d->gfxContext->StartFrame(d->gfxContext.get(), &startFramePdu);
0475     d->gfxContext->SurfaceCommand(d->gfxContext.get(), &surfaceCommand);
0476 
0477     // RDPGFX_SURFACE_TO_SURFACE_PDU surfacePdu;
0478     // surfacePdu.surfaceIdSrc = d->surface.id;
0479     // surfacePdu.surfaceIdDest = d->surface.id;
0480     //
0481     // RDPGFX_POINT16 destinationPosition;
0482     //
0483     // for (int i = 0; i < frame.damage.rectCount(); ++i) {
0484     //     auto rect = *(frame.damage.begin() + i);
0485     //     destinationPosition.x = rect.x();
0486     //     destinationPosition.y = rect.y();
0487     //     surfacePdu.destPts = &destinationPosition;
0488     //     surfacePdu.destPtsCount = 1;
0489     //     surfacePdu.rectSrc = rects[i];
0490     //
0491     //     d->gfxContext->SurfaceToSurface(d->gfxContext, &surfacePdu);
0492     // }
0493 
0494     d->gfxContext->EndFrame(d->gfxContext.get(), &endFramePdu);
0495 
0496     d->session->networkDetection()->stopBandwidthMeasure();
0497 
0498     // rdpUpdate *update = d->session->rdpPeer()->context->update;
0499     //
0500     // const SURFACE_FRAME_MARKER beginMarker {
0501     //     .frameAction = SURFACECMD_FRAMEACTION_BEGIN,
0502     //     .frameId = d->frameId,
0503     // };
0504     // update->SurfaceFrameMarker(update->context, &beginMarker);
0505     //
0506     // SURFACE_BITS_COMMAND surfaceBits;
0507     //
0508     // update->SurfaceBits(update->context, &surfaceBits);
0509     //
0510     // const SURFACE_FRAME_MARKER endMarker {
0511     //     .frameAction = SURFACECMD_FRAMEACTION_END,
0512     //     .frameId = d->frameId,
0513     // };
0514     // update->SurfaceFrameMarker(update->context, &endMarker);
0515 }
0516 
0517 void VideoStream::updateRequestedFrameRate()
0518 {
0519     auto rtt = std::max(clk::duration_cast<clk::milliseconds>(d->session->networkDetection()->averageRTT()), clk::milliseconds(1));
0520     auto now = clk::system_clock::now();
0521 
0522     FrameRateEstimate estimate;
0523     estimate.timeStamp = now;
0524     estimate.estimate = std::min(int(clk::milliseconds(1000) / (rtt * std::max(d->frameDelay.load(), 1))), d->maximumFrameRate);
0525     d->frameRateEstimates.append(estimate);
0526 
0527     if (now - d->lastFrameRateEstimation < FrameRateEstimateAveragePeriod) {
0528         return;
0529     }
0530 
0531     d->lastFrameRateEstimation = now;
0532 
0533     d->frameRateEstimates.erase(std::remove_if(d->frameRateEstimates.begin(),
0534                                                d->frameRateEstimates.end(),
0535                                                [now](const auto &estimate) {
0536                                                    return (estimate.timeStamp - now) > FrameRateEstimateAveragePeriod;
0537                                                }),
0538                                 d->frameRateEstimates.end());
0539 
0540     auto sum = std::accumulate(d->frameRateEstimates.begin(), d->frameRateEstimates.end(), 0, [](int acc, const auto &estimate) {
0541         return acc + estimate.estimate;
0542     });
0543     auto average = sum / d->frameRateEstimates.size();
0544 
0545     if (average != d->requestedFrameRate) {
0546         d->requestedFrameRate = average;
0547         Q_EMIT requestedFrameRateChanged();
0548     }
0549 }
0550 }
0551 
0552 #include "moc_VideoStream.cpp"