File indexing completed on 2024-11-10 04:57:07
0001 /* 0002 SPDX-FileCopyrightText: 2018-2020 Red Hat Inc 0003 SPDX-FileCopyrightText: 2020 Aleix Pol Gonzalez <aleixpol@kde.org> 0004 SPDX-FileContributor: Jan Grulich <jgrulich@redhat.com> 0005 0006 SPDX-License-Identifier: LGPL-2.0-or-later 0007 */ 0008 0009 #include "screencaststream.h" 0010 #include "compositor.h" 0011 #include "core/graphicsbufferallocator.h" 0012 #include "core/outputbackend.h" 0013 #include "core/renderbackend.h" 0014 #include "cursor.h" 0015 #include "kwinscreencast_logging.h" 0016 #include "main.h" 0017 #include "opengl/eglnativefence.h" 0018 #include "opengl/gltexture.h" 0019 #include "opengl/glutils.h" 0020 #include "pipewirecore.h" 0021 #include "platformsupport/scenes/opengl/abstract_egl_backend.h" 0022 #include "platformsupport/scenes/opengl/openglbackend.h" 0023 #include "scene/workspacescene.h" 0024 #include "screencastdmabuftexture.h" 0025 #include "screencastsource.h" 0026 0027 #include <KLocalizedString> 0028 0029 #include <QLoggingCategory> 0030 #include <QPainter> 0031 0032 #include <spa/buffer/meta.h> 0033 0034 #include <fcntl.h> 0035 #include <sys/mman.h> 0036 #include <unistd.h> 0037 0038 #include <libdrm/drm_fourcc.h> 0039 0040 namespace KWin 0041 { 0042 0043 static spa_video_format drmFourCCToSpaVideoFormat(quint32 format) 0044 { 0045 switch (format) { 0046 case DRM_FORMAT_ARGB8888: 0047 return SPA_VIDEO_FORMAT_BGRA; 0048 case DRM_FORMAT_XRGB8888: 0049 return SPA_VIDEO_FORMAT_BGRx; 0050 case DRM_FORMAT_RGBA8888: 0051 return SPA_VIDEO_FORMAT_ABGR; 0052 case DRM_FORMAT_RGBX8888: 0053 return SPA_VIDEO_FORMAT_xBGR; 0054 case DRM_FORMAT_ABGR8888: 0055 return SPA_VIDEO_FORMAT_RGBA; 0056 case DRM_FORMAT_XBGR8888: 0057 return SPA_VIDEO_FORMAT_RGBx; 0058 case DRM_FORMAT_BGRA8888: 0059 return SPA_VIDEO_FORMAT_ARGB; 0060 case DRM_FORMAT_BGRX8888: 0061 return SPA_VIDEO_FORMAT_xRGB; 0062 case DRM_FORMAT_NV12: 0063 return SPA_VIDEO_FORMAT_NV12; 0064 case DRM_FORMAT_RGB888: 0065 return SPA_VIDEO_FORMAT_BGR; 0066 case DRM_FORMAT_BGR888: 0067 return SPA_VIDEO_FORMAT_RGB; 0068 default: 0069 qCDebug(KWIN_SCREENCAST) << "unknown format" << format; 0070 return SPA_VIDEO_FORMAT_xRGB; 0071 } 0072 } 0073 0074 void ScreenCastStream::onStreamStateChanged(pw_stream_state old, pw_stream_state state, const char *error_message) 0075 { 0076 qCDebug(KWIN_SCREENCAST) << "state changed" << pw_stream_state_as_string(old) << " -> " << pw_stream_state_as_string(state) << error_message; 0077 0078 m_streaming = false; 0079 m_pendingBuffer = nullptr; 0080 m_pendingNotifier.reset(); 0081 m_pendingFence.reset(); 0082 0083 switch (state) { 0084 case PW_STREAM_STATE_ERROR: 0085 qCWarning(KWIN_SCREENCAST) << "Stream error: " << error_message; 0086 break; 0087 case PW_STREAM_STATE_PAUSED: 0088 if (nodeId() == 0 && m_pwStream) { 0089 m_pwNodeId = pw_stream_get_node_id(m_pwStream); 0090 Q_EMIT streamReady(nodeId()); 0091 } 0092 break; 0093 case PW_STREAM_STATE_STREAMING: 0094 m_streaming = true; 0095 Q_EMIT startStreaming(); 0096 break; 0097 case PW_STREAM_STATE_CONNECTING: 0098 break; 0099 case PW_STREAM_STATE_UNCONNECTED: 0100 if (!m_stopped) { 0101 Q_EMIT stopStreaming(); 0102 } 0103 break; 0104 } 0105 } 0106 0107 #define CURSOR_BPP 4 0108 #define CURSOR_META_SIZE(w, h) (sizeof(struct spa_meta_cursor) + sizeof(struct spa_meta_bitmap) + w * h * CURSOR_BPP) 0109 static const int videoDamageRegionCount = 16; 0110 0111 void ScreenCastStream::newStreamParams() 0112 { 0113 qCDebug(KWIN_SCREENCAST) << "announcing stream params. with dmabuf:" << m_dmabufParams.has_value(); 0114 uint8_t paramsBuffer[1024]; 0115 spa_pod_builder pod_builder = SPA_POD_BUILDER_INIT(paramsBuffer, sizeof(paramsBuffer)); 0116 const int buffertypes = m_dmabufParams ? (1 << SPA_DATA_DmaBuf) : (1 << SPA_DATA_MemFd); 0117 const int bpp = m_videoFormat.format == SPA_VIDEO_FORMAT_RGB || m_videoFormat.format == SPA_VIDEO_FORMAT_BGR ? 3 : 4; 0118 const int stride = SPA_ROUND_UP_N(m_resolution.width() * bpp, 4); 0119 0120 struct spa_pod_frame f; 0121 spa_pod_builder_push_object(&pod_builder, &f, SPA_TYPE_OBJECT_ParamBuffers, SPA_PARAM_Buffers); 0122 spa_pod_builder_add(&pod_builder, 0123 SPA_PARAM_BUFFERS_buffers, SPA_POD_CHOICE_RANGE_Int(16, 2, 16), 0124 SPA_PARAM_BUFFERS_dataType, SPA_POD_CHOICE_FLAGS_Int(buffertypes), 0); 0125 if (!m_dmabufParams) { 0126 spa_pod_builder_add(&pod_builder, 0127 SPA_PARAM_BUFFERS_blocks, SPA_POD_Int(1), 0128 SPA_PARAM_BUFFERS_size, SPA_POD_Int(stride * m_resolution.height()), 0129 SPA_PARAM_BUFFERS_stride, SPA_POD_Int(stride), 0130 SPA_PARAM_BUFFERS_align, SPA_POD_Int(16), 0); 0131 } else { 0132 spa_pod_builder_add(&pod_builder, 0133 SPA_PARAM_BUFFERS_blocks, SPA_POD_Int(m_dmabufParams->planeCount), 0); 0134 } 0135 spa_pod *bufferPod = (spa_pod *)spa_pod_builder_pop(&pod_builder, &f); 0136 0137 QVarLengthArray<const spa_pod *> params = { 0138 bufferPod, 0139 (spa_pod *)spa_pod_builder_add_object(&pod_builder, 0140 SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, 0141 SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Cursor), 0142 SPA_PARAM_META_size, SPA_POD_Int(CURSOR_META_SIZE(m_cursor.bitmapSize.width(), m_cursor.bitmapSize.height()))), 0143 (spa_pod *)spa_pod_builder_add_object(&pod_builder, 0144 SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, 0145 SPA_PARAM_META_type, SPA_POD_Id(SPA_META_VideoDamage), 0146 SPA_PARAM_META_size, SPA_POD_CHOICE_RANGE_Int(sizeof(struct spa_meta_region) * videoDamageRegionCount, sizeof(struct spa_meta_region) * 1, sizeof(struct spa_meta_region) * videoDamageRegionCount)), 0147 (spa_pod *)spa_pod_builder_add_object(&pod_builder, 0148 SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, 0149 SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Header), 0150 SPA_PARAM_META_size, SPA_POD_Int(sizeof(struct spa_meta_header))), 0151 }; 0152 0153 pw_stream_update_params(m_pwStream, params.data(), params.count()); 0154 } 0155 0156 void ScreenCastStream::onStreamParamChanged(uint32_t id, const struct spa_pod *format) 0157 { 0158 if (!format || id != SPA_PARAM_Format) { 0159 return; 0160 } 0161 0162 spa_format_video_raw_parse(format, &m_videoFormat); 0163 auto modifierProperty = spa_pod_find_prop(format, nullptr, SPA_FORMAT_VIDEO_modifier); 0164 QList<uint64_t> receivedModifiers; 0165 if (modifierProperty) { 0166 const struct spa_pod *modifierPod = &modifierProperty->value; 0167 0168 uint32_t modifiersCount = SPA_POD_CHOICE_N_VALUES(modifierPod); 0169 uint64_t *modifiers = (uint64_t *)SPA_POD_CHOICE_VALUES(modifierPod); 0170 receivedModifiers = QList<uint64_t>(modifiers, modifiers + modifiersCount); 0171 // Remove duplicates 0172 std::sort(receivedModifiers.begin(), receivedModifiers.end()); 0173 receivedModifiers.erase(std::unique(receivedModifiers.begin(), receivedModifiers.end()), receivedModifiers.end()); 0174 0175 if (!m_dmabufParams || !receivedModifiers.contains(m_dmabufParams->modifier)) { 0176 if (modifierProperty->flags & SPA_POD_PROP_FLAG_DONT_FIXATE) { 0177 // DRM_MOD_INVALID should be used as a last option. Do not just remove it it's the only 0178 // item on the list 0179 if (receivedModifiers.count() > 1) { 0180 receivedModifiers.removeAll(DRM_FORMAT_MOD_INVALID); 0181 } 0182 m_dmabufParams = testCreateDmaBuf(m_resolution, m_drmFormat, receivedModifiers); 0183 } else { 0184 m_dmabufParams = testCreateDmaBuf(m_resolution, m_drmFormat, {DRM_FORMAT_MOD_INVALID}); 0185 } 0186 0187 // In case we fail to use any modifier from the list of offered ones, remove these 0188 // from our all future offerings, otherwise there will be no indication that it cannot 0189 // be used and clients can go for it over and over 0190 if (!m_dmabufParams.has_value()) { 0191 for (uint64_t modifier : receivedModifiers) { 0192 m_modifiers.removeAll(modifier); 0193 } 0194 // Also in case DRM_FORMAT_MOD_INVALID was used and didn't fail, we still need to 0195 // set it as our modifier, otherwise it would be set to default value (0) which is 0196 // also a valid modifier, but not the one we want to actually use 0197 } else if (receivedModifiers.count() == 1 && receivedModifiers.constFirst() == DRM_FORMAT_MOD_INVALID) { 0198 m_dmabufParams->modifier = DRM_FORMAT_MOD_INVALID; 0199 } 0200 0201 qCDebug(KWIN_SCREENCAST) << "Stream dmabuf modifiers received, offering our best suited modifier" << m_dmabufParams.has_value(); 0202 char buffer[2048]; 0203 auto params = buildFormats(m_dmabufParams.has_value(), buffer); 0204 pw_stream_update_params(m_pwStream, params.data(), params.count()); 0205 return; 0206 } 0207 } else { 0208 m_dmabufParams.reset(); 0209 } 0210 0211 qCDebug(KWIN_SCREENCAST) << "Stream format found, defining buffers"; 0212 newStreamParams(); 0213 m_streaming = true; 0214 } 0215 0216 void ScreenCastStream::onStreamAddBuffer(pw_buffer *buffer) 0217 { 0218 struct spa_data *spa_data = buffer->buffer->datas; 0219 0220 spa_data->mapoffset = 0; 0221 spa_data->flags = SPA_DATA_FLAG_READWRITE; 0222 0223 std::shared_ptr<ScreenCastDmaBufTexture> dmabuff; 0224 0225 if (spa_data[0].type != SPA_ID_INVALID && spa_data[0].type & (1 << SPA_DATA_DmaBuf)) { 0226 Q_ASSERT(m_dmabufParams); 0227 dmabuff = createDmaBufTexture(*m_dmabufParams); 0228 } 0229 0230 if (dmabuff) { 0231 const DmaBufAttributes *dmabufAttribs = dmabuff->buffer()->dmabufAttributes(); 0232 spa_data->maxsize = dmabufAttribs->pitch[0] * m_resolution.height(); 0233 0234 Q_ASSERT(buffer->buffer->n_datas >= uint(dmabufAttribs->planeCount)); 0235 for (int i = 0; i < dmabufAttribs->planeCount; ++i) { 0236 buffer->buffer->datas[i].type = SPA_DATA_DmaBuf; 0237 buffer->buffer->datas[i].fd = dmabufAttribs->fd[i].get(); 0238 buffer->buffer->datas[i].data = nullptr; 0239 } 0240 m_dmabufDataForPwBuffer.insert(buffer, dmabuff); 0241 #ifdef F_SEAL_SEAL // Disable memfd on systems that don't have it, like BSD < 12 0242 } else { 0243 if (!(spa_data[0].type & (1 << SPA_DATA_MemFd))) { 0244 qCCritical(KWIN_SCREENCAST) << "memfd: Client doesn't support memfd buffer data type"; 0245 return; 0246 } 0247 0248 const int bytesPerPixel = m_source->hasAlphaChannel() ? 4 : 3; 0249 const int stride = SPA_ROUND_UP_N(m_resolution.width() * bytesPerPixel, 4); 0250 spa_data->maxsize = stride * m_resolution.height(); 0251 spa_data->type = SPA_DATA_MemFd; 0252 spa_data->fd = memfd_create("kwin-screencast-memfd", MFD_CLOEXEC | MFD_ALLOW_SEALING); 0253 if (spa_data->fd == -1) { 0254 qCCritical(KWIN_SCREENCAST) << "memfd: Can't create memfd"; 0255 return; 0256 } 0257 spa_data->mapoffset = 0; 0258 0259 if (ftruncate(spa_data->fd, spa_data->maxsize) < 0) { 0260 qCCritical(KWIN_SCREENCAST) << "memfd: Can't truncate to" << spa_data->maxsize; 0261 return; 0262 } 0263 0264 unsigned int seals = F_SEAL_GROW | F_SEAL_SHRINK | F_SEAL_SEAL; 0265 if (fcntl(spa_data->fd, F_ADD_SEALS, seals) == -1) { 0266 qCWarning(KWIN_SCREENCAST) << "memfd: Failed to add seals"; 0267 } 0268 0269 spa_data->data = mmap(nullptr, 0270 spa_data->maxsize, 0271 PROT_READ | PROT_WRITE, 0272 MAP_SHARED, 0273 spa_data->fd, 0274 spa_data->mapoffset); 0275 if (spa_data->data == MAP_FAILED) { 0276 qCCritical(KWIN_SCREENCAST) << "memfd: Failed to mmap memory"; 0277 } else { 0278 qCDebug(KWIN_SCREENCAST) << "memfd: created successfully" << spa_data->data << spa_data->maxsize; 0279 } 0280 #endif 0281 } 0282 0283 m_waitForNewBuffers = false; 0284 } 0285 0286 void ScreenCastStream::onStreamRemoveBuffer(pw_buffer *buffer) 0287 { 0288 m_dmabufDataForPwBuffer.remove(buffer); 0289 0290 struct spa_buffer *spa_buffer = buffer->buffer; 0291 struct spa_data *spa_data = spa_buffer->datas; 0292 if (spa_data && spa_data->type == SPA_DATA_MemFd) { 0293 munmap(spa_data->data, spa_data->maxsize); 0294 close(spa_data->fd); 0295 } else if (spa_data && spa_data->type == SPA_DATA_DmaBuf) { 0296 for (int i = 0, c = buffer->buffer->n_datas; i < c; ++i) { 0297 close(buffer->buffer->datas[i].fd); 0298 } 0299 } 0300 } 0301 0302 void ScreenCastStream::onStreamRenegotiateFormat(uint64_t) 0303 { 0304 m_streaming = false; // pause streaming as we wait for the renegotiation 0305 char buffer[2048]; 0306 auto params = buildFormats(m_dmabufParams.has_value(), buffer); 0307 pw_stream_update_params(m_pwStream, params.data(), params.count()); 0308 } 0309 0310 ScreenCastStream::ScreenCastStream(ScreenCastSource *source, std::shared_ptr<PipeWireCore> pwCore, QObject *parent) 0311 : QObject(parent) 0312 , m_pwCore(pwCore) 0313 , m_source(source) 0314 , m_resolution(source->textureSize()) 0315 { 0316 connect(source, &ScreenCastSource::closed, this, [this] { 0317 m_streaming = false; 0318 Q_EMIT stopStreaming(); 0319 }); 0320 0321 m_pwStreamEvents.version = PW_VERSION_STREAM_EVENTS; 0322 m_pwStreamEvents.add_buffer = [](void *data, struct pw_buffer *buffer) { 0323 auto _this = static_cast<ScreenCastStream *>(data); 0324 _this->onStreamAddBuffer(buffer); 0325 }; 0326 m_pwStreamEvents.remove_buffer = [](void *data, struct pw_buffer *buffer) { 0327 auto _this = static_cast<ScreenCastStream *>(data); 0328 _this->onStreamRemoveBuffer(buffer); 0329 }; 0330 m_pwStreamEvents.state_changed = [](void *data, pw_stream_state old, pw_stream_state state, const char *error_message) { 0331 auto _this = static_cast<ScreenCastStream *>(data); 0332 _this->onStreamStateChanged(old, state, error_message); 0333 }; 0334 m_pwStreamEvents.param_changed = [](void *data, uint32_t id, const struct spa_pod *param) { 0335 auto _this = static_cast<ScreenCastStream *>(data); 0336 _this->onStreamParamChanged(id, param); 0337 }; 0338 0339 m_pendingFrame.setSingleShot(true); 0340 connect(&m_pendingFrame, &QTimer::timeout, this, [this] { 0341 recordFrame(m_pendingDamages); 0342 }); 0343 } 0344 0345 ScreenCastStream::~ScreenCastStream() 0346 { 0347 m_stopped = true; 0348 if (m_pwStream) { 0349 pw_stream_destroy(m_pwStream); 0350 } 0351 } 0352 0353 bool ScreenCastStream::init() 0354 { 0355 if (!m_pwCore->m_error.isEmpty()) { 0356 m_error = m_pwCore->m_error; 0357 return false; 0358 } 0359 0360 connect(m_pwCore.get(), &PipeWireCore::pipewireFailed, this, &ScreenCastStream::coreFailed); 0361 0362 if (!createStream()) { 0363 qCWarning(KWIN_SCREENCAST) << "Failed to create PipeWire stream"; 0364 m_error = i18n("Failed to create PipeWire stream"); 0365 return false; 0366 } 0367 0368 m_pwRenegotiate = pw_loop_add_event( 0369 m_pwCore->pwMainLoop, [](void *data, uint64_t format) { 0370 auto _this = static_cast<ScreenCastStream *>(data); 0371 _this->onStreamRenegotiateFormat(format); 0372 }, 0373 this); 0374 0375 return true; 0376 } 0377 0378 uint ScreenCastStream::framerate() 0379 { 0380 if (m_pwStream) { 0381 return m_videoFormat.max_framerate.num / m_videoFormat.max_framerate.denom; 0382 } 0383 0384 return 0; 0385 } 0386 0387 uint ScreenCastStream::nodeId() 0388 { 0389 return m_pwNodeId; 0390 } 0391 0392 bool ScreenCastStream::createStream() 0393 { 0394 const QByteArray objname = "kwin-screencast-" + objectName().toUtf8(); 0395 m_pwStream = pw_stream_new(m_pwCore->pwCore, objname, nullptr); 0396 0397 const auto supported = Compositor::self()->backend()->supportedFormats(); 0398 auto itModifiers = supported.constFind(m_source->drmFormat()); 0399 0400 // If the offered format is not available for dmabuf, prefer converting to another one than resorting to memfd 0401 if (itModifiers == supported.constEnd() && !supported.isEmpty()) { 0402 itModifiers = supported.constFind(DRM_FORMAT_ARGB8888); 0403 if (itModifiers != supported.constEnd()) { 0404 m_drmFormat = itModifiers.key(); 0405 } 0406 } 0407 0408 if (itModifiers == supported.constEnd()) { 0409 m_drmFormat = m_source->drmFormat(); 0410 m_modifiers = {DRM_FORMAT_MOD_INVALID}; 0411 } else { 0412 m_drmFormat = itModifiers.key(); 0413 m_modifiers = *itModifiers; 0414 // Also support modifier-less DmaBufs 0415 m_modifiers += DRM_FORMAT_MOD_INVALID; 0416 } 0417 m_hasDmaBuf = testCreateDmaBuf(m_resolution, m_drmFormat, {DRM_FORMAT_MOD_INVALID}).has_value(); 0418 0419 char buffer[2048]; 0420 QList<const spa_pod *> params = buildFormats(false, buffer); 0421 0422 pw_stream_add_listener(m_pwStream, &m_streamListener, &m_pwStreamEvents, this); 0423 auto flags = pw_stream_flags(PW_STREAM_FLAG_DRIVER | PW_STREAM_FLAG_ALLOC_BUFFERS); 0424 0425 if (pw_stream_connect(m_pwStream, PW_DIRECTION_OUTPUT, SPA_ID_INVALID, flags, params.data(), params.count()) != 0) { 0426 qCWarning(KWIN_SCREENCAST) << "Could not connect to stream"; 0427 pw_stream_destroy(m_pwStream); 0428 m_pwStream = nullptr; 0429 return false; 0430 } 0431 0432 if (m_cursor.mode == ScreencastV1Interface::Embedded) { 0433 connect(Cursors::self(), &Cursors::currentCursorChanged, this, &ScreenCastStream::invalidateCursor); 0434 connect(Cursors::self(), &Cursors::positionChanged, this, [this] { 0435 recordFrame({}); 0436 }); 0437 } else if (m_cursor.mode == ScreencastV1Interface::Metadata) { 0438 connect(Cursors::self(), &Cursors::currentCursorChanged, this, &ScreenCastStream::invalidateCursor); 0439 connect(Cursors::self(), &Cursors::positionChanged, this, &ScreenCastStream::recordCursor); 0440 } 0441 0442 return true; 0443 } 0444 void ScreenCastStream::coreFailed(const QString &errorMessage) 0445 { 0446 m_error = errorMessage; 0447 Q_EMIT stopStreaming(); 0448 } 0449 0450 void ScreenCastStream::stop() 0451 { 0452 m_stopped = true; 0453 delete this; 0454 } 0455 0456 void ScreenCastStream::recordFrame(const QRegion &_damagedRegion) 0457 { 0458 QRegion damagedRegion = _damagedRegion; 0459 Q_ASSERT(!m_stopped); 0460 0461 if (!m_streaming) { 0462 m_pendingDamages += damagedRegion; 0463 return; 0464 } 0465 0466 if (m_videoFormat.max_framerate.num != 0 && !m_lastSent.isNull()) { 0467 auto frameInterval = (1000. * m_videoFormat.max_framerate.denom / m_videoFormat.max_framerate.num); 0468 auto lastSentAgo = m_lastSent.msecsTo(QDateTime::currentDateTimeUtc()); 0469 if (lastSentAgo < frameInterval) { 0470 m_pendingDamages += damagedRegion; 0471 if (!m_pendingFrame.isActive()) { 0472 m_pendingFrame.start(frameInterval - lastSentAgo); 0473 } 0474 return; 0475 } 0476 } 0477 0478 m_pendingDamages = {}; 0479 if (m_pendingBuffer) { 0480 return; 0481 } 0482 0483 if (m_waitForNewBuffers) { 0484 qCWarning(KWIN_SCREENCAST) << "Waiting for new buffers to be created"; 0485 return; 0486 } 0487 0488 const auto size = m_source->textureSize(); 0489 if (size != m_resolution) { 0490 m_resolution = size; 0491 m_waitForNewBuffers = true; 0492 m_dmabufParams = std::nullopt; 0493 pw_loop_signal_event(m_pwCore->pwMainLoop, m_pwRenegotiate); 0494 return; 0495 } 0496 0497 const char *error = ""; 0498 auto state = pw_stream_get_state(m_pwStream, &error); 0499 if (state != PW_STREAM_STATE_STREAMING) { 0500 if (error) { 0501 qCWarning(KWIN_SCREENCAST) << "Failed to record frame: stream is not active" << error; 0502 } 0503 return; 0504 } 0505 0506 struct pw_buffer *buffer = pw_stream_dequeue_buffer(m_pwStream); 0507 0508 if (!buffer) { 0509 return; 0510 } 0511 0512 struct spa_buffer *spa_buffer = buffer->buffer; 0513 struct spa_data *spa_data = spa_buffer->datas; 0514 0515 uint8_t *data = (uint8_t *)spa_data->data; 0516 if (!data && spa_buffer->datas->type != SPA_DATA_DmaBuf) { 0517 qCWarning(KWIN_SCREENCAST) << "Failed to record frame: invalid buffer data"; 0518 pw_stream_queue_buffer(m_pwStream, buffer); 0519 return; 0520 } 0521 0522 spa_data->chunk->offset = 0; 0523 spa_data->chunk->flags = SPA_CHUNK_FLAG_NONE; 0524 static_cast<OpenGLBackend *>(Compositor::self()->backend())->makeCurrent(); 0525 if (data || spa_data[0].type == SPA_DATA_MemFd) { 0526 const bool hasAlpha = m_source->hasAlphaChannel(); 0527 const int bpp = data && !hasAlpha ? 3 : 4; 0528 const uint stride = SPA_ROUND_UP_N(size.width() * bpp, 4); 0529 0530 if ((stride * size.height()) > spa_data->maxsize) { 0531 qCDebug(KWIN_SCREENCAST) << "Failed to record frame: frame is too big"; 0532 pw_stream_queue_buffer(m_pwStream, buffer); 0533 return; 0534 } 0535 0536 spa_data->chunk->stride = stride; 0537 spa_data->chunk->size = stride * size.height(); 0538 0539 m_source->render(spa_data, m_videoFormat.format); 0540 0541 auto cursor = Cursors::self()->currentCursor(); 0542 if (m_cursor.mode == ScreencastV1Interface::Embedded && includesCursor(cursor)) { 0543 QImage dest(data, size.width(), size.height(), stride, hasAlpha ? QImage::Format_RGBA8888_Premultiplied : QImage::Format_RGB888); 0544 QPainter painter(&dest); 0545 const auto position = (cursor->pos() - m_cursor.viewport.topLeft() - cursor->hotspot()) * m_cursor.scale; 0546 const PlatformCursorImage cursorImage = kwinApp()->cursorImage(); 0547 painter.drawImage(QRect{position.toPoint(), cursorImage.image().size()}, cursorImage.image()); 0548 } 0549 } else { 0550 auto &buf = m_dmabufDataForPwBuffer[buffer]; 0551 Q_ASSERT(buf); 0552 0553 const DmaBufAttributes *dmabufAttribs = buf->buffer()->dmabufAttributes(); 0554 Q_ASSERT(buffer->buffer->n_datas >= uint(dmabufAttribs->planeCount)); 0555 for (int i = 0; i < dmabufAttribs->planeCount; ++i) { 0556 buffer->buffer->datas[i].chunk->stride = dmabufAttribs->pitch[i]; 0557 buffer->buffer->datas[i].chunk->offset = dmabufAttribs->offset[i]; 0558 } 0559 spa_data->chunk->size = spa_data->maxsize; 0560 0561 m_source->render(buf->framebuffer()); 0562 0563 auto cursor = Cursors::self()->currentCursor(); 0564 if (m_cursor.mode == ScreencastV1Interface::Embedded && includesCursor(cursor)) { 0565 if (m_cursor.invalid) { 0566 m_cursor.invalid = false; 0567 const PlatformCursorImage cursorImage = kwinApp()->cursorImage(); 0568 if (cursorImage.isNull()) { 0569 m_cursor.texture = nullptr; 0570 } else { 0571 m_cursor.texture = GLTexture::upload(cursorImage.image()); 0572 } 0573 } 0574 if (m_cursor.texture) { 0575 GLFramebuffer::pushFramebuffer(buf->framebuffer()); 0576 0577 auto shader = ShaderManager::instance()->pushShader(ShaderTrait::MapTexture); 0578 0579 const QRectF cursorRect = scaledRect(cursor->geometry().translated(-m_cursor.viewport.topLeft()), m_cursor.scale); 0580 QMatrix4x4 mvp; 0581 mvp.scale(1, -1); 0582 mvp.ortho(QRectF(QPointF(0, 0), size)); 0583 mvp.translate(cursorRect.x(), cursorRect.y()); 0584 shader->setUniform(GLShader::Mat4Uniform::ModelViewProjectionMatrix, mvp); 0585 0586 glEnable(GL_BLEND); 0587 glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); 0588 m_cursor.texture->render(cursorRect.size()); 0589 glDisable(GL_BLEND); 0590 0591 ShaderManager::instance()->popShader(); 0592 GLFramebuffer::popFramebuffer(); 0593 0594 damagedRegion += QRegion{m_cursor.lastRect.toAlignedRect()} | cursorRect.toAlignedRect(); 0595 m_cursor.lastRect = cursorRect; 0596 } else { 0597 damagedRegion += m_cursor.lastRect.toAlignedRect(); 0598 m_cursor.lastRect = {}; 0599 } 0600 } 0601 } 0602 0603 if (m_cursor.mode == ScreencastV1Interface::Metadata) { 0604 sendCursorData(Cursors::self()->currentCursor(), 0605 (spa_meta_cursor *)spa_buffer_find_meta_data(spa_buffer, SPA_META_Cursor, sizeof(spa_meta_cursor))); 0606 } 0607 0608 addDamage(spa_buffer, damagedRegion); 0609 addHeader(spa_buffer); 0610 tryEnqueue(buffer); 0611 } 0612 0613 void ScreenCastStream::addHeader(spa_buffer *spaBuffer) 0614 { 0615 spa_meta_header *spaHeader = (spa_meta_header *)spa_buffer_find_meta_data(spaBuffer, SPA_META_Header, sizeof(spaHeader)); 0616 if (spaHeader) { 0617 spaHeader->flags = 0; 0618 spaHeader->dts_offset = 0; 0619 spaHeader->seq = m_sequential++; 0620 spaHeader->pts = m_source->clock().count(); 0621 } 0622 } 0623 0624 void ScreenCastStream::addDamage(spa_buffer *spaBuffer, const QRegion &damagedRegion) 0625 { 0626 if (spa_meta *vdMeta = spa_buffer_find_meta(spaBuffer, SPA_META_VideoDamage)) { 0627 struct spa_meta_region *r = (spa_meta_region *)spa_meta_first(vdMeta); 0628 0629 // If there's too many rectangles, we just send the bounding rect 0630 if (damagedRegion.rectCount() > videoDamageRegionCount - 1) { 0631 if (spa_meta_check(r, vdMeta)) { 0632 auto rect = damagedRegion.boundingRect(); 0633 r->region = SPA_REGION(rect.x(), rect.y(), quint32(rect.width()), quint32(rect.height())); 0634 r++; 0635 } 0636 } else { 0637 for (const QRect &rect : damagedRegion) { 0638 if (spa_meta_check(r, vdMeta)) { 0639 r->region = SPA_REGION(rect.x(), rect.y(), quint32(rect.width()), quint32(rect.height())); 0640 r++; 0641 } 0642 } 0643 } 0644 0645 if (spa_meta_check(r, vdMeta)) { 0646 r->region = SPA_REGION(0, 0, 0, 0); 0647 } 0648 } 0649 } 0650 0651 void ScreenCastStream::invalidateCursor() 0652 { 0653 m_cursor.invalid = true; 0654 } 0655 0656 void ScreenCastStream::recordCursor() 0657 { 0658 Q_ASSERT(!m_stopped); 0659 if (!m_streaming) { 0660 return; 0661 } 0662 0663 if (m_pendingBuffer) { 0664 return; 0665 } 0666 0667 const char *error = ""; 0668 auto state = pw_stream_get_state(m_pwStream, &error); 0669 if (state != PW_STREAM_STATE_STREAMING) { 0670 if (error) { 0671 qCWarning(KWIN_SCREENCAST) << "Failed to record cursor position: stream is not active" << error; 0672 } 0673 return; 0674 } 0675 0676 if (!includesCursor(Cursors::self()->currentCursor()) && !m_cursor.visible) { 0677 return; 0678 } 0679 0680 m_pendingBuffer = pw_stream_dequeue_buffer(m_pwStream); 0681 if (!m_pendingBuffer) { 0682 return; 0683 } 0684 0685 struct spa_buffer *spa_buffer = m_pendingBuffer->buffer; 0686 0687 // in pipewire terms, corrupted means "do not look at the frame contents" and here they're empty. 0688 spa_buffer->datas[0].chunk->flags = SPA_CHUNK_FLAG_CORRUPTED; 0689 spa_buffer->datas[0].chunk->size = 0; 0690 0691 sendCursorData(Cursors::self()->currentCursor(), 0692 (spa_meta_cursor *)spa_buffer_find_meta_data(spa_buffer, SPA_META_Cursor, sizeof(spa_meta_cursor))); 0693 addHeader(spa_buffer); 0694 addDamage(spa_buffer, {}); 0695 enqueue(); 0696 } 0697 0698 void ScreenCastStream::tryEnqueue(pw_buffer *buffer) 0699 { 0700 m_pendingBuffer = buffer; 0701 0702 // The GPU doesn't necessarily process draw commands as soon as they are issued. Thus, 0703 // we need to insert a fence into the command stream and enqueue the pipewire buffer 0704 // only after the fence is signaled; otherwise stream consumers will most likely see 0705 // a corrupted buffer. 0706 if (Compositor::self()->scene()->supportsNativeFence()) { 0707 Q_ASSERT_X(eglGetCurrentContext(), "tryEnqueue", "no current context"); 0708 m_pendingFence = std::make_unique<EGLNativeFence>(kwinApp()->outputBackend()->sceneEglDisplayObject()); 0709 if (!m_pendingFence->isValid()) { 0710 qCWarning(KWIN_SCREENCAST) << "Failed to create a native EGL fence"; 0711 glFinish(); 0712 enqueue(); 0713 } else { 0714 m_pendingNotifier = std::make_unique<QSocketNotifier>(m_pendingFence->fileDescriptor().get(), QSocketNotifier::Read); 0715 connect(m_pendingNotifier.get(), &QSocketNotifier::activated, this, &ScreenCastStream::enqueue); 0716 } 0717 } else { 0718 // The compositing backend doesn't support native fences. We don't have any other choice 0719 // but stall the graphics pipeline. Otherwise stream consumers may see an incomplete buffer. 0720 glFinish(); 0721 enqueue(); 0722 } 0723 } 0724 0725 void ScreenCastStream::enqueue() 0726 { 0727 Q_ASSERT_X(m_pendingBuffer, "enqueue", "pending buffer must be valid"); 0728 0729 m_pendingFence.reset(); 0730 m_pendingNotifier.reset(); 0731 0732 if (!m_streaming) { 0733 return; 0734 } 0735 pw_stream_queue_buffer(m_pwStream, m_pendingBuffer); 0736 0737 if (m_pendingBuffer->buffer->datas[0].chunk->flags != SPA_CHUNK_FLAG_CORRUPTED) { 0738 m_lastSent = QDateTime::currentDateTimeUtc(); 0739 } 0740 0741 m_pendingBuffer = nullptr; 0742 } 0743 0744 QList<const spa_pod *> ScreenCastStream::buildFormats(bool fixate, char buffer[2048]) 0745 { 0746 const auto format = drmFourCCToSpaVideoFormat(m_drmFormat); 0747 spa_pod_builder podBuilder = SPA_POD_BUILDER_INIT(buffer, 2048); 0748 spa_fraction defFramerate = SPA_FRACTION(0, 1); 0749 spa_fraction minFramerate = SPA_FRACTION(1, 1); 0750 spa_fraction maxFramerate = SPA_FRACTION(m_source->refreshRate() / 1000, 1); 0751 0752 spa_rectangle resolution = SPA_RECTANGLE(uint32_t(m_resolution.width()), uint32_t(m_resolution.height())); 0753 0754 QList<const spa_pod *> params; 0755 params.reserve(fixate + m_hasDmaBuf + 1); 0756 if (fixate) { 0757 params.append(buildFormat(&podBuilder, SPA_VIDEO_FORMAT_BGRA, &resolution, &defFramerate, &minFramerate, &maxFramerate, {m_dmabufParams->modifier}, SPA_POD_PROP_FLAG_MANDATORY)); 0758 } 0759 if (m_hasDmaBuf) { 0760 params.append(buildFormat(&podBuilder, SPA_VIDEO_FORMAT_BGRA, &resolution, &defFramerate, &minFramerate, &maxFramerate, m_modifiers, SPA_POD_PROP_FLAG_MANDATORY | SPA_POD_PROP_FLAG_DONT_FIXATE)); 0761 } 0762 params.append(buildFormat(&podBuilder, format, &resolution, &defFramerate, &minFramerate, &maxFramerate, {}, 0)); 0763 return params; 0764 } 0765 0766 spa_pod *ScreenCastStream::buildFormat(struct spa_pod_builder *b, enum spa_video_format format, struct spa_rectangle *resolution, 0767 struct spa_fraction *defaultFramerate, struct spa_fraction *minFramerate, struct spa_fraction *maxFramerate, 0768 const QList<uint64_t> &modifiers, quint32 modifiersFlags) 0769 { 0770 struct spa_pod_frame f[2]; 0771 spa_pod_builder_push_object(b, &f[0], SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat); 0772 spa_pod_builder_add(b, SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video), 0); 0773 spa_pod_builder_add(b, SPA_FORMAT_mediaSubtype, SPA_POD_Id(SPA_MEDIA_SUBTYPE_raw), 0); 0774 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_size, SPA_POD_Rectangle(resolution), 0); 0775 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_framerate, SPA_POD_Fraction(defaultFramerate), 0); 0776 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_maxFramerate, 0777 SPA_POD_CHOICE_RANGE_Fraction( 0778 SPA_POD_Fraction(maxFramerate), 0779 SPA_POD_Fraction(minFramerate), 0780 SPA_POD_Fraction(maxFramerate)), 0781 0); 0782 0783 if (format == SPA_VIDEO_FORMAT_BGRA) { 0784 /* announce equivalent format without alpha */ 0785 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_format, SPA_POD_CHOICE_ENUM_Id(3, format, format, SPA_VIDEO_FORMAT_BGRx), 0); 0786 } else if (format == SPA_VIDEO_FORMAT_RGBA) { 0787 /* announce equivalent format without alpha */ 0788 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_format, SPA_POD_CHOICE_ENUM_Id(3, format, format, SPA_VIDEO_FORMAT_RGBx), 0); 0789 } else { 0790 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_format, SPA_POD_Id(format), 0); 0791 } 0792 0793 if (!modifiers.isEmpty()) { 0794 spa_pod_builder_prop(b, SPA_FORMAT_VIDEO_modifier, modifiersFlags); 0795 spa_pod_builder_push_choice(b, &f[1], SPA_CHOICE_Enum, 0); 0796 0797 int c = 0; 0798 for (auto modifier : modifiers) { 0799 spa_pod_builder_long(b, modifier); 0800 if (c++ == 0) { 0801 spa_pod_builder_long(b, modifier); 0802 } 0803 } 0804 spa_pod_builder_pop(b, &f[1]); 0805 } 0806 return (spa_pod *)spa_pod_builder_pop(b, &f[0]); 0807 } 0808 0809 bool ScreenCastStream::includesCursor(Cursor *cursor) const 0810 { 0811 if (Cursors::self()->isCursorHidden()) { 0812 return false; 0813 } 0814 return m_cursor.viewport.intersects(cursor->geometry()); 0815 } 0816 0817 void ScreenCastStream::sendCursorData(Cursor *cursor, spa_meta_cursor *spa_meta_cursor) 0818 { 0819 if (!cursor || !spa_meta_cursor) { 0820 return; 0821 } 0822 0823 if (!includesCursor(cursor)) { 0824 spa_meta_cursor->id = 0; 0825 spa_meta_cursor->position.x = -1; 0826 spa_meta_cursor->position.y = -1; 0827 spa_meta_cursor->hotspot.x = -1; 0828 spa_meta_cursor->hotspot.y = -1; 0829 spa_meta_cursor->bitmap_offset = 0; 0830 m_cursor.visible = false; 0831 return; 0832 } 0833 m_cursor.visible = true; 0834 const auto position = (cursor->pos() - m_cursor.viewport.topLeft()) * m_cursor.scale; 0835 0836 spa_meta_cursor->id = 1; 0837 spa_meta_cursor->position.x = position.x(); 0838 spa_meta_cursor->position.y = position.y(); 0839 spa_meta_cursor->hotspot.x = cursor->hotspot().x() * m_cursor.scale; 0840 spa_meta_cursor->hotspot.y = cursor->hotspot().y() * m_cursor.scale; 0841 spa_meta_cursor->bitmap_offset = 0; 0842 0843 if (!m_cursor.invalid) { 0844 return; 0845 } 0846 0847 m_cursor.invalid = false; 0848 spa_meta_cursor->bitmap_offset = sizeof(struct spa_meta_cursor); 0849 0850 const QSize targetSize = (cursor->rect().size() * m_cursor.scale).toSize(); 0851 0852 struct spa_meta_bitmap *spa_meta_bitmap = SPA_MEMBER(spa_meta_cursor, 0853 spa_meta_cursor->bitmap_offset, 0854 struct spa_meta_bitmap); 0855 spa_meta_bitmap->format = SPA_VIDEO_FORMAT_RGBA; 0856 spa_meta_bitmap->offset = sizeof(struct spa_meta_bitmap); 0857 spa_meta_bitmap->size.width = std::min(m_cursor.bitmapSize.width(), targetSize.width()); 0858 spa_meta_bitmap->size.height = std::min(m_cursor.bitmapSize.height(), targetSize.height()); 0859 spa_meta_bitmap->stride = spa_meta_bitmap->size.width * 4; 0860 0861 uint8_t *bitmap_data = SPA_MEMBER(spa_meta_bitmap, spa_meta_bitmap->offset, uint8_t); 0862 QImage dest(bitmap_data, 0863 spa_meta_bitmap->size.width, 0864 spa_meta_bitmap->size.height, 0865 spa_meta_bitmap->stride, 0866 QImage::Format_RGBA8888_Premultiplied); 0867 dest.fill(Qt::transparent); 0868 0869 const QImage image = kwinApp()->cursorImage().image(); 0870 if (!image.isNull()) { 0871 QPainter painter(&dest); 0872 painter.drawImage(QRect({0, 0}, targetSize), image); 0873 } 0874 } 0875 0876 void ScreenCastStream::setCursorMode(ScreencastV1Interface::CursorMode mode, qreal scale, const QRectF &viewport) 0877 { 0878 m_cursor.mode = mode; 0879 m_cursor.scale = scale; 0880 m_cursor.viewport = viewport; 0881 } 0882 0883 std::optional<ScreenCastDmaBufTextureParams> ScreenCastStream::testCreateDmaBuf(const QSize &size, quint32 format, const QList<uint64_t> &modifiers) 0884 { 0885 AbstractEglBackend *backend = dynamic_cast<AbstractEglBackend *>(Compositor::self()->backend()); 0886 if (!backend) { 0887 return std::nullopt; 0888 } 0889 0890 GraphicsBuffer *buffer = backend->graphicsBufferAllocator()->allocate(GraphicsBufferOptions{ 0891 .size = size, 0892 .format = format, 0893 .modifiers = modifiers, 0894 }); 0895 if (!buffer) { 0896 return std::nullopt; 0897 } 0898 auto drop = qScopeGuard([&buffer]() { 0899 buffer->drop(); 0900 }); 0901 0902 const DmaBufAttributes *attrs = buffer->dmabufAttributes(); 0903 if (!attrs) { 0904 return std::nullopt; 0905 } 0906 0907 return ScreenCastDmaBufTextureParams{ 0908 .planeCount = attrs->planeCount, 0909 .width = attrs->width, 0910 .height = attrs->height, 0911 .format = attrs->format, 0912 .modifier = attrs->modifier, 0913 }; 0914 } 0915 0916 std::shared_ptr<ScreenCastDmaBufTexture> ScreenCastStream::createDmaBufTexture(const ScreenCastDmaBufTextureParams ¶ms) 0917 { 0918 AbstractEglBackend *backend = dynamic_cast<AbstractEglBackend *>(Compositor::self()->backend()); 0919 if (!backend) { 0920 return nullptr; 0921 } 0922 0923 GraphicsBuffer *buffer = backend->graphicsBufferAllocator()->allocate(GraphicsBufferOptions{ 0924 .size = QSize(params.width, params.height), 0925 .format = params.format, 0926 .modifiers = {params.modifier}, 0927 }); 0928 if (!buffer) { 0929 return nullptr; 0930 } 0931 0932 const DmaBufAttributes *attrs = buffer->dmabufAttributes(); 0933 if (!attrs) { 0934 buffer->drop(); 0935 return nullptr; 0936 } 0937 0938 backend->makeCurrent(); 0939 return std::make_shared<ScreenCastDmaBufTexture>(backend->importDmaBufAsTexture(*attrs), buffer); 0940 } 0941 0942 } // namespace KWin 0943 0944 #include "moc_screencaststream.cpp"