KWin
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Modules Pages
screencaststream.cpp
Go to the documentation of this file.
1/*
2 SPDX-FileCopyrightText: 2018-2020 Red Hat Inc
3 SPDX-FileCopyrightText: 2020 Aleix Pol Gonzalez <aleixpol@kde.org>
4 SPDX-FileContributor: Jan Grulich <jgrulich@redhat.com>
5
6 SPDX-License-Identifier: LGPL-2.0-or-later
7*/
8
9#include "screencaststream.h"
10#include "compositor.h"
12#include "core/outputbackend.h"
13#include "core/renderbackend.h"
14#include "cursor.h"
15#include "kwinscreencast_logging.h"
16#include "main.h"
18#include "opengl/gltexture.h"
19#include "opengl/glutils.h"
20#include "pipewirecore.h"
25#include "screencastsource.h"
26
27#include <KLocalizedString>
28
29#include <QLoggingCategory>
30#include <QPainter>
31
32#include <spa/buffer/meta.h>
33
34#include <fcntl.h>
35#include <sys/mman.h>
36#include <unistd.h>
37
38#include <libdrm/drm_fourcc.h>
39
40namespace KWin
41{
42
43static spa_video_format drmFourCCToSpaVideoFormat(quint32 format)
44{
45 switch (format) {
46 case DRM_FORMAT_ARGB8888:
47 return SPA_VIDEO_FORMAT_BGRA;
48 case DRM_FORMAT_XRGB8888:
49 return SPA_VIDEO_FORMAT_BGRx;
50 case DRM_FORMAT_RGBA8888:
51 return SPA_VIDEO_FORMAT_ABGR;
52 case DRM_FORMAT_RGBX8888:
53 return SPA_VIDEO_FORMAT_xBGR;
54 case DRM_FORMAT_ABGR8888:
55 return SPA_VIDEO_FORMAT_RGBA;
56 case DRM_FORMAT_XBGR8888:
57 return SPA_VIDEO_FORMAT_RGBx;
58 case DRM_FORMAT_BGRA8888:
59 return SPA_VIDEO_FORMAT_ARGB;
60 case DRM_FORMAT_BGRX8888:
61 return SPA_VIDEO_FORMAT_xRGB;
62 case DRM_FORMAT_NV12:
63 return SPA_VIDEO_FORMAT_NV12;
64 case DRM_FORMAT_RGB888:
65 return SPA_VIDEO_FORMAT_BGR;
66 case DRM_FORMAT_BGR888:
67 return SPA_VIDEO_FORMAT_RGB;
68 default:
69 qCDebug(KWIN_SCREENCAST) << "unknown format" << format;
70 return SPA_VIDEO_FORMAT_xRGB;
71 }
72}
73
74void ScreenCastStream::onStreamStateChanged(pw_stream_state old, pw_stream_state state, const char *error_message)
75{
76 qCDebug(KWIN_SCREENCAST) << "state changed" << pw_stream_state_as_string(old) << " -> " << pw_stream_state_as_string(state) << error_message;
77
78 m_streaming = false;
79 m_pendingBuffer = nullptr;
80 m_pendingNotifier.reset();
81 m_pendingFence.reset();
82
83 switch (state) {
84 case PW_STREAM_STATE_ERROR:
85 qCWarning(KWIN_SCREENCAST) << "Stream error: " << error_message;
86 break;
87 case PW_STREAM_STATE_PAUSED:
88 if (nodeId() == 0 && m_pwStream) {
89 m_pwNodeId = pw_stream_get_node_id(m_pwStream);
90 Q_EMIT streamReady(nodeId());
91 }
92 break;
93 case PW_STREAM_STATE_STREAMING:
94 m_streaming = true;
95 Q_EMIT startStreaming();
96 break;
97 case PW_STREAM_STATE_CONNECTING:
98 break;
99 case PW_STREAM_STATE_UNCONNECTED:
100 if (!m_stopped) {
101 Q_EMIT stopStreaming();
102 }
103 break;
104 }
105}
106
107#define CURSOR_BPP 4
108#define CURSOR_META_SIZE(w, h) (sizeof(struct spa_meta_cursor) + sizeof(struct spa_meta_bitmap) + w * h * CURSOR_BPP)
109static const int videoDamageRegionCount = 16;
110
111void ScreenCastStream::newStreamParams()
112{
113 qCDebug(KWIN_SCREENCAST) << "announcing stream params. with dmabuf:" << m_dmabufParams.has_value();
114 uint8_t paramsBuffer[1024];
115 spa_pod_builder pod_builder = SPA_POD_BUILDER_INIT(paramsBuffer, sizeof(paramsBuffer));
116 const int buffertypes = m_dmabufParams ? (1 << SPA_DATA_DmaBuf) : (1 << SPA_DATA_MemFd);
117 const int bpp = m_videoFormat.format == SPA_VIDEO_FORMAT_RGB || m_videoFormat.format == SPA_VIDEO_FORMAT_BGR ? 3 : 4;
118 const int stride = SPA_ROUND_UP_N(m_resolution.width() * bpp, 4);
119
120 struct spa_pod_frame f;
121 spa_pod_builder_push_object(&pod_builder, &f, SPA_TYPE_OBJECT_ParamBuffers, SPA_PARAM_Buffers);
122 spa_pod_builder_add(&pod_builder,
123 SPA_PARAM_BUFFERS_buffers, SPA_POD_CHOICE_RANGE_Int(16, 2, 16),
124 SPA_PARAM_BUFFERS_dataType, SPA_POD_CHOICE_FLAGS_Int(buffertypes), 0);
125 if (!m_dmabufParams) {
126 spa_pod_builder_add(&pod_builder,
127 SPA_PARAM_BUFFERS_blocks, SPA_POD_Int(1),
128 SPA_PARAM_BUFFERS_size, SPA_POD_Int(stride * m_resolution.height()),
129 SPA_PARAM_BUFFERS_stride, SPA_POD_Int(stride),
130 SPA_PARAM_BUFFERS_align, SPA_POD_Int(16), 0);
131 } else {
132 spa_pod_builder_add(&pod_builder,
133 SPA_PARAM_BUFFERS_blocks, SPA_POD_Int(m_dmabufParams->planeCount), 0);
134 }
135 spa_pod *bufferPod = (spa_pod *)spa_pod_builder_pop(&pod_builder, &f);
136
137 QVarLengthArray<const spa_pod *> params = {
138 bufferPod,
139 (spa_pod *)spa_pod_builder_add_object(&pod_builder,
140 SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
141 SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Cursor),
142 SPA_PARAM_META_size, SPA_POD_Int(CURSOR_META_SIZE(m_cursor.bitmapSize.width(), m_cursor.bitmapSize.height()))),
143 (spa_pod *)spa_pod_builder_add_object(&pod_builder,
144 SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
145 SPA_PARAM_META_type, SPA_POD_Id(SPA_META_VideoDamage),
146 SPA_PARAM_META_size, SPA_POD_CHOICE_RANGE_Int(sizeof(struct spa_meta_region) * videoDamageRegionCount, sizeof(struct spa_meta_region) * 1, sizeof(struct spa_meta_region) * videoDamageRegionCount)),
147 (spa_pod *)spa_pod_builder_add_object(&pod_builder,
148 SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
149 SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Header),
150 SPA_PARAM_META_size, SPA_POD_Int(sizeof(struct spa_meta_header))),
151 };
152
153 pw_stream_update_params(m_pwStream, params.data(), params.count());
154}
155
156void ScreenCastStream::onStreamParamChanged(uint32_t id, const struct spa_pod *format)
157{
158 if (!format || id != SPA_PARAM_Format) {
159 return;
160 }
161
162 spa_format_video_raw_parse(format, &m_videoFormat);
163 auto modifierProperty = spa_pod_find_prop(format, nullptr, SPA_FORMAT_VIDEO_modifier);
164 QList<uint64_t> receivedModifiers;
165 if (modifierProperty) {
166 const struct spa_pod *modifierPod = &modifierProperty->value;
167
168 uint32_t modifiersCount = SPA_POD_CHOICE_N_VALUES(modifierPod);
169 uint64_t *modifiers = (uint64_t *)SPA_POD_CHOICE_VALUES(modifierPod);
170 receivedModifiers = QList<uint64_t>(modifiers, modifiers + modifiersCount);
171 // Remove duplicates
172 std::sort(receivedModifiers.begin(), receivedModifiers.end());
173 receivedModifiers.erase(std::unique(receivedModifiers.begin(), receivedModifiers.end()), receivedModifiers.end());
174
175 if (!m_dmabufParams || !receivedModifiers.contains(m_dmabufParams->modifier)) {
176 if (modifierProperty->flags & SPA_POD_PROP_FLAG_DONT_FIXATE) {
177 // DRM_MOD_INVALID should be used as a last option. Do not just remove it it's the only
178 // item on the list
179 if (receivedModifiers.count() > 1) {
180 receivedModifiers.removeAll(DRM_FORMAT_MOD_INVALID);
181 }
182 m_dmabufParams = testCreateDmaBuf(m_resolution, m_drmFormat, receivedModifiers);
183 } else {
184 m_dmabufParams = testCreateDmaBuf(m_resolution, m_drmFormat, {DRM_FORMAT_MOD_INVALID});
185 }
186
187 // In case we fail to use any modifier from the list of offered ones, remove these
188 // from our all future offerings, otherwise there will be no indication that it cannot
189 // be used and clients can go for it over and over
190 if (!m_dmabufParams.has_value()) {
191 for (uint64_t modifier : receivedModifiers) {
192 m_modifiers.removeAll(modifier);
193 }
194 // Also in case DRM_FORMAT_MOD_INVALID was used and didn't fail, we still need to
195 // set it as our modifier, otherwise it would be set to default value (0) which is
196 // also a valid modifier, but not the one we want to actually use
197 } else if (receivedModifiers.count() == 1 && receivedModifiers.constFirst() == DRM_FORMAT_MOD_INVALID) {
198 m_dmabufParams->modifier = DRM_FORMAT_MOD_INVALID;
199 }
200
201 qCDebug(KWIN_SCREENCAST) << "Stream dmabuf modifiers received, offering our best suited modifier" << m_dmabufParams.has_value();
202 char buffer[2048];
203 auto params = buildFormats(m_dmabufParams.has_value(), buffer);
204 pw_stream_update_params(m_pwStream, params.data(), params.count());
205 return;
206 }
207 } else {
208 m_dmabufParams.reset();
209 }
210
211 qCDebug(KWIN_SCREENCAST) << "Stream format found, defining buffers";
212 newStreamParams();
213 m_streaming = true;
214}
215
216void ScreenCastStream::onStreamAddBuffer(pw_buffer *buffer)
217{
218 struct spa_data *spa_data = buffer->buffer->datas;
219
220 spa_data->mapoffset = 0;
221 spa_data->flags = SPA_DATA_FLAG_READWRITE;
222
223 std::shared_ptr<ScreenCastDmaBufTexture> dmabuff;
224
225 if (spa_data[0].type != SPA_ID_INVALID && spa_data[0].type & (1 << SPA_DATA_DmaBuf)) {
226 Q_ASSERT(m_dmabufParams);
227 dmabuff = createDmaBufTexture(*m_dmabufParams);
228 }
229
230 if (dmabuff) {
231 const DmaBufAttributes *dmabufAttribs = dmabuff->buffer()->dmabufAttributes();
232 spa_data->maxsize = dmabufAttribs->pitch[0] * m_resolution.height();
233
234 Q_ASSERT(buffer->buffer->n_datas >= uint(dmabufAttribs->planeCount));
235 for (int i = 0; i < dmabufAttribs->planeCount; ++i) {
236 buffer->buffer->datas[i].type = SPA_DATA_DmaBuf;
237 buffer->buffer->datas[i].fd = dmabufAttribs->fd[i].get();
238 buffer->buffer->datas[i].data = nullptr;
239 }
240 m_dmabufDataForPwBuffer.insert(buffer, dmabuff);
241#ifdef F_SEAL_SEAL // Disable memfd on systems that don't have it, like BSD < 12
242 } else {
243 if (!(spa_data[0].type & (1 << SPA_DATA_MemFd))) {
244 qCCritical(KWIN_SCREENCAST) << "memfd: Client doesn't support memfd buffer data type";
245 return;
246 }
247
248 const int bytesPerPixel = m_source->hasAlphaChannel() ? 4 : 3;
249 const int stride = SPA_ROUND_UP_N(m_resolution.width() * bytesPerPixel, 4);
250 spa_data->maxsize = stride * m_resolution.height();
251 spa_data->type = SPA_DATA_MemFd;
252 spa_data->fd = memfd_create("kwin-screencast-memfd", MFD_CLOEXEC | MFD_ALLOW_SEALING);
253 if (spa_data->fd == -1) {
254 qCCritical(KWIN_SCREENCAST) << "memfd: Can't create memfd";
255 return;
256 }
257 spa_data->mapoffset = 0;
258
259 if (ftruncate(spa_data->fd, spa_data->maxsize) < 0) {
260 qCCritical(KWIN_SCREENCAST) << "memfd: Can't truncate to" << spa_data->maxsize;
261 return;
262 }
263
264 unsigned int seals = F_SEAL_GROW | F_SEAL_SHRINK | F_SEAL_SEAL;
265 if (fcntl(spa_data->fd, F_ADD_SEALS, seals) == -1) {
266 qCWarning(KWIN_SCREENCAST) << "memfd: Failed to add seals";
267 }
268
269 spa_data->data = mmap(nullptr,
270 spa_data->maxsize,
271 PROT_READ | PROT_WRITE,
272 MAP_SHARED,
273 spa_data->fd,
274 spa_data->mapoffset);
275 if (spa_data->data == MAP_FAILED) {
276 qCCritical(KWIN_SCREENCAST) << "memfd: Failed to mmap memory";
277 } else {
278 qCDebug(KWIN_SCREENCAST) << "memfd: created successfully" << spa_data->data << spa_data->maxsize;
279 }
280#endif
281 }
282
283 m_waitForNewBuffers = false;
284}
285
286void ScreenCastStream::onStreamRemoveBuffer(pw_buffer *buffer)
287{
288 m_dmabufDataForPwBuffer.remove(buffer);
289
290 struct spa_buffer *spa_buffer = buffer->buffer;
291 struct spa_data *spa_data = spa_buffer->datas;
292 if (spa_data && spa_data->type == SPA_DATA_MemFd) {
293 munmap(spa_data->data, spa_data->maxsize);
294 close(spa_data->fd);
295 } else if (spa_data && spa_data->type == SPA_DATA_DmaBuf) {
296 for (int i = 0, c = buffer->buffer->n_datas; i < c; ++i) {
297 close(buffer->buffer->datas[i].fd);
298 }
299 }
300}
301
302void ScreenCastStream::onStreamRenegotiateFormat(uint64_t)
303{
304 m_streaming = false; // pause streaming as we wait for the renegotiation
305 char buffer[2048];
306 auto params = buildFormats(m_dmabufParams.has_value(), buffer);
307 pw_stream_update_params(m_pwStream, params.data(), params.count());
308}
309
310ScreenCastStream::ScreenCastStream(ScreenCastSource *source, std::shared_ptr<PipeWireCore> pwCore, QObject *parent)
311 : QObject(parent)
312 , m_pwCore(pwCore)
313 , m_source(source)
314 , m_resolution(source->textureSize())
315{
316 connect(source, &ScreenCastSource::closed, this, [this] {
317 m_streaming = false;
318 Q_EMIT stopStreaming();
319 });
320
321 m_pwStreamEvents.version = PW_VERSION_STREAM_EVENTS;
322 m_pwStreamEvents.add_buffer = [](void *data, struct pw_buffer *buffer) {
323 auto _this = static_cast<ScreenCastStream *>(data);
324 _this->onStreamAddBuffer(buffer);
325 };
326 m_pwStreamEvents.remove_buffer = [](void *data, struct pw_buffer *buffer) {
327 auto _this = static_cast<ScreenCastStream *>(data);
328 _this->onStreamRemoveBuffer(buffer);
329 };
330 m_pwStreamEvents.state_changed = [](void *data, pw_stream_state old, pw_stream_state state, const char *error_message) {
331 auto _this = static_cast<ScreenCastStream *>(data);
332 _this->onStreamStateChanged(old, state, error_message);
333 };
334 m_pwStreamEvents.param_changed = [](void *data, uint32_t id, const struct spa_pod *param) {
335 auto _this = static_cast<ScreenCastStream *>(data);
336 _this->onStreamParamChanged(id, param);
337 };
338
339 m_pendingFrame.setSingleShot(true);
340 connect(&m_pendingFrame, &QTimer::timeout, this, [this] {
341 recordFrame(m_pendingDamages);
342 });
343}
344
346{
347 m_stopped = true;
348 if (m_pwStream) {
349 pw_stream_destroy(m_pwStream);
350 }
351}
352
354{
355 if (!m_pwCore->m_error.isEmpty()) {
356 m_error = m_pwCore->m_error;
357 return false;
358 }
359
360 connect(m_pwCore.get(), &PipeWireCore::pipewireFailed, this, &ScreenCastStream::coreFailed);
361
362 if (!createStream()) {
363 qCWarning(KWIN_SCREENCAST) << "Failed to create PipeWire stream";
364 m_error = i18n("Failed to create PipeWire stream");
365 return false;
366 }
367
368 m_pwRenegotiate = pw_loop_add_event(
369 m_pwCore->pwMainLoop, [](void *data, uint64_t format) {
370 auto _this = static_cast<ScreenCastStream *>(data);
371 _this->onStreamRenegotiateFormat(format);
372 },
373 this);
374
375 return true;
376}
377
379{
380 if (m_pwStream) {
381 return m_videoFormat.max_framerate.num / m_videoFormat.max_framerate.denom;
382 }
383
384 return 0;
385}
386
388{
389 return m_pwNodeId;
390}
391
392bool ScreenCastStream::createStream()
393{
394 const QByteArray objname = "kwin-screencast-" + objectName().toUtf8();
395 m_pwStream = pw_stream_new(m_pwCore->pwCore, objname, nullptr);
396
397 const auto supported = Compositor::self()->backend()->supportedFormats();
398 auto itModifiers = supported.constFind(m_source->drmFormat());
399
400 // If the offered format is not available for dmabuf, prefer converting to another one than resorting to memfd
401 if (itModifiers == supported.constEnd() && !supported.isEmpty()) {
402 itModifiers = supported.constFind(DRM_FORMAT_ARGB8888);
403 if (itModifiers != supported.constEnd()) {
404 m_drmFormat = itModifiers.key();
405 }
406 }
407
408 if (itModifiers == supported.constEnd()) {
409 m_drmFormat = m_source->drmFormat();
410 m_modifiers = {DRM_FORMAT_MOD_INVALID};
411 } else {
412 m_drmFormat = itModifiers.key();
413 m_modifiers = *itModifiers;
414 // Also support modifier-less DmaBufs
415 m_modifiers += DRM_FORMAT_MOD_INVALID;
416 }
417 m_hasDmaBuf = testCreateDmaBuf(m_resolution, m_drmFormat, {DRM_FORMAT_MOD_INVALID}).has_value();
418
419 char buffer[2048];
420 QList<const spa_pod *> params = buildFormats(false, buffer);
421
422 pw_stream_add_listener(m_pwStream, &m_streamListener, &m_pwStreamEvents, this);
423 auto flags = pw_stream_flags(PW_STREAM_FLAG_DRIVER | PW_STREAM_FLAG_ALLOC_BUFFERS);
424
425 if (pw_stream_connect(m_pwStream, PW_DIRECTION_OUTPUT, SPA_ID_INVALID, flags, params.data(), params.count()) != 0) {
426 qCWarning(KWIN_SCREENCAST) << "Could not connect to stream";
427 pw_stream_destroy(m_pwStream);
428 m_pwStream = nullptr;
429 return false;
430 }
431
432 if (m_cursor.mode == ScreencastV1Interface::Embedded) {
434 connect(Cursors::self(), &Cursors::positionChanged, this, [this] {
435 recordFrame({});
436 });
437 } else if (m_cursor.mode == ScreencastV1Interface::Metadata) {
440 }
441
442 return true;
443}
444void ScreenCastStream::coreFailed(const QString &errorMessage)
445{
446 m_error = errorMessage;
447 Q_EMIT stopStreaming();
448}
449
451{
452 m_stopped = true;
453 delete this;
454}
455
456void ScreenCastStream::recordFrame(const QRegion &_damagedRegion)
457{
458 QRegion damagedRegion = _damagedRegion;
459 Q_ASSERT(!m_stopped);
460
461 if (!m_streaming) {
462 m_pendingDamages += damagedRegion;
463 return;
464 }
465
466 if (m_videoFormat.max_framerate.num != 0 && !m_lastSent.isNull()) {
467 auto frameInterval = (1000. * m_videoFormat.max_framerate.denom / m_videoFormat.max_framerate.num);
468 auto lastSentAgo = m_lastSent.msecsTo(QDateTime::currentDateTimeUtc());
469 if (lastSentAgo < frameInterval) {
470 m_pendingDamages += damagedRegion;
471 if (!m_pendingFrame.isActive()) {
472 m_pendingFrame.start(frameInterval - lastSentAgo);
473 }
474 return;
475 }
476 }
477
478 m_pendingDamages = {};
479 if (m_pendingBuffer) {
480 return;
481 }
482
483 if (m_waitForNewBuffers) {
484 qCWarning(KWIN_SCREENCAST) << "Waiting for new buffers to be created";
485 return;
486 }
487
488 const auto size = m_source->textureSize();
489 if (size != m_resolution) {
490 m_resolution = size;
491 m_waitForNewBuffers = true;
492 m_dmabufParams = std::nullopt;
493 pw_loop_signal_event(m_pwCore->pwMainLoop, m_pwRenegotiate);
494 return;
495 }
496
497 const char *error = "";
498 auto state = pw_stream_get_state(m_pwStream, &error);
499 if (state != PW_STREAM_STATE_STREAMING) {
500 if (error) {
501 qCWarning(KWIN_SCREENCAST) << "Failed to record frame: stream is not active" << error;
502 }
503 return;
504 }
505
506 struct pw_buffer *buffer = pw_stream_dequeue_buffer(m_pwStream);
507
508 if (!buffer) {
509 return;
510 }
511
512 struct spa_buffer *spa_buffer = buffer->buffer;
513 struct spa_data *spa_data = spa_buffer->datas;
514
515 uint8_t *data = (uint8_t *)spa_data->data;
516 if (!data && spa_buffer->datas->type != SPA_DATA_DmaBuf) {
517 qCWarning(KWIN_SCREENCAST) << "Failed to record frame: invalid buffer data";
518 pw_stream_queue_buffer(m_pwStream, buffer);
519 return;
520 }
521
522 spa_data->chunk->offset = 0;
523 spa_data->chunk->flags = SPA_CHUNK_FLAG_NONE;
524 static_cast<OpenGLBackend *>(Compositor::self()->backend())->makeCurrent();
525 if (data || spa_data[0].type == SPA_DATA_MemFd) {
526 const bool hasAlpha = m_source->hasAlphaChannel();
527 const int bpp = data && !hasAlpha ? 3 : 4;
528 const uint stride = SPA_ROUND_UP_N(size.width() * bpp, 4);
529
530 if ((stride * size.height()) > spa_data->maxsize) {
531 qCDebug(KWIN_SCREENCAST) << "Failed to record frame: frame is too big";
532 pw_stream_queue_buffer(m_pwStream, buffer);
533 return;
534 }
535
536 spa_data->chunk->stride = stride;
537 spa_data->chunk->size = stride * size.height();
538
539 m_source->render(spa_data, m_videoFormat.format);
540
541 auto cursor = Cursors::self()->currentCursor();
542 if (m_cursor.mode == ScreencastV1Interface::Embedded && includesCursor(cursor)) {
543 QImage dest(data, size.width(), size.height(), stride, hasAlpha ? QImage::Format_RGBA8888_Premultiplied : QImage::Format_RGB888);
544 QPainter painter(&dest);
545 const auto position = (cursor->pos() - m_cursor.viewport.topLeft() - cursor->hotspot()) * m_cursor.scale;
546 const PlatformCursorImage cursorImage = kwinApp()->cursorImage();
547 painter.drawImage(QRect{position.toPoint(), cursorImage.image().size()}, cursorImage.image());
548 }
549 } else {
550 auto &buf = m_dmabufDataForPwBuffer[buffer];
551 Q_ASSERT(buf);
552
553 const DmaBufAttributes *dmabufAttribs = buf->buffer()->dmabufAttributes();
554 Q_ASSERT(buffer->buffer->n_datas >= uint(dmabufAttribs->planeCount));
555 for (int i = 0; i < dmabufAttribs->planeCount; ++i) {
556 buffer->buffer->datas[i].chunk->stride = dmabufAttribs->pitch[i];
557 buffer->buffer->datas[i].chunk->offset = dmabufAttribs->offset[i];
558 }
559 spa_data->chunk->size = spa_data->maxsize;
560
561 m_source->render(buf->framebuffer());
562
563 auto cursor = Cursors::self()->currentCursor();
564 if (m_cursor.mode == ScreencastV1Interface::Embedded && includesCursor(cursor)) {
565 if (m_cursor.invalid) {
566 m_cursor.invalid = false;
567 const PlatformCursorImage cursorImage = kwinApp()->cursorImage();
568 if (cursorImage.isNull()) {
569 m_cursor.texture = nullptr;
570 } else {
571 m_cursor.texture = GLTexture::upload(cursorImage.image());
572 }
573 }
574 if (m_cursor.texture) {
575 GLFramebuffer::pushFramebuffer(buf->framebuffer());
576
578
579 const QRectF cursorRect = scaledRect(cursor->geometry().translated(-m_cursor.viewport.topLeft()), m_cursor.scale);
580 QMatrix4x4 mvp;
581 mvp.scale(1, -1);
582 mvp.ortho(QRectF(QPointF(0, 0), size));
583 mvp.translate(cursorRect.x(), cursorRect.y());
585
586 glEnable(GL_BLEND);
587 glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
588 m_cursor.texture->render(cursorRect.size());
589 glDisable(GL_BLEND);
590
593
594 damagedRegion += QRegion{m_cursor.lastRect.toAlignedRect()} | cursorRect.toAlignedRect();
595 m_cursor.lastRect = cursorRect;
596 } else {
597 damagedRegion += m_cursor.lastRect.toAlignedRect();
598 m_cursor.lastRect = {};
599 }
600 }
601 }
602
603 if (m_cursor.mode == ScreencastV1Interface::Metadata) {
604 sendCursorData(Cursors::self()->currentCursor(),
605 (spa_meta_cursor *)spa_buffer_find_meta_data(spa_buffer, SPA_META_Cursor, sizeof(spa_meta_cursor)));
606 }
607
608 addDamage(spa_buffer, damagedRegion);
609 addHeader(spa_buffer);
610 tryEnqueue(buffer);
611}
612
613void ScreenCastStream::addHeader(spa_buffer *spaBuffer)
614{
615 spa_meta_header *spaHeader = (spa_meta_header *)spa_buffer_find_meta_data(spaBuffer, SPA_META_Header, sizeof(spaHeader));
616 if (spaHeader) {
617 spaHeader->flags = 0;
618 spaHeader->dts_offset = 0;
619 spaHeader->seq = m_sequential++;
620 spaHeader->pts = m_source->clock().count();
621 }
622}
623
624void ScreenCastStream::addDamage(spa_buffer *spaBuffer, const QRegion &damagedRegion)
625{
626 if (spa_meta *vdMeta = spa_buffer_find_meta(spaBuffer, SPA_META_VideoDamage)) {
627 struct spa_meta_region *r = (spa_meta_region *)spa_meta_first(vdMeta);
628
629 // If there's too many rectangles, we just send the bounding rect
630 if (damagedRegion.rectCount() > videoDamageRegionCount - 1) {
631 if (spa_meta_check(r, vdMeta)) {
632 auto rect = damagedRegion.boundingRect();
633 r->region = SPA_REGION(rect.x(), rect.y(), quint32(rect.width()), quint32(rect.height()));
634 r++;
635 }
636 } else {
637 for (const QRect &rect : damagedRegion) {
638 if (spa_meta_check(r, vdMeta)) {
639 r->region = SPA_REGION(rect.x(), rect.y(), quint32(rect.width()), quint32(rect.height()));
640 r++;
641 }
642 }
643 }
644
645 if (spa_meta_check(r, vdMeta)) {
646 r->region = SPA_REGION(0, 0, 0, 0);
647 }
648 }
649}
650
652{
653 m_cursor.invalid = true;
654}
655
657{
658 Q_ASSERT(!m_stopped);
659 if (!m_streaming) {
660 return;
661 }
662
663 if (m_pendingBuffer) {
664 return;
665 }
666
667 const char *error = "";
668 auto state = pw_stream_get_state(m_pwStream, &error);
669 if (state != PW_STREAM_STATE_STREAMING) {
670 if (error) {
671 qCWarning(KWIN_SCREENCAST) << "Failed to record cursor position: stream is not active" << error;
672 }
673 return;
674 }
675
676 if (!includesCursor(Cursors::self()->currentCursor()) && !m_cursor.visible) {
677 return;
678 }
679
680 m_pendingBuffer = pw_stream_dequeue_buffer(m_pwStream);
681 if (!m_pendingBuffer) {
682 return;
683 }
684
685 struct spa_buffer *spa_buffer = m_pendingBuffer->buffer;
686
687 // in pipewire terms, corrupted means "do not look at the frame contents" and here they're empty.
688 spa_buffer->datas[0].chunk->flags = SPA_CHUNK_FLAG_CORRUPTED;
689 spa_buffer->datas[0].chunk->size = 0;
690
691 sendCursorData(Cursors::self()->currentCursor(),
692 (spa_meta_cursor *)spa_buffer_find_meta_data(spa_buffer, SPA_META_Cursor, sizeof(spa_meta_cursor)));
693 addHeader(spa_buffer);
694 addDamage(spa_buffer, {});
695 enqueue();
696}
697
698void ScreenCastStream::tryEnqueue(pw_buffer *buffer)
699{
700 m_pendingBuffer = buffer;
701
702 // The GPU doesn't necessarily process draw commands as soon as they are issued. Thus,
703 // we need to insert a fence into the command stream and enqueue the pipewire buffer
704 // only after the fence is signaled; otherwise stream consumers will most likely see
705 // a corrupted buffer.
706 if (Compositor::self()->scene()->supportsNativeFence()) {
707 Q_ASSERT_X(eglGetCurrentContext(), "tryEnqueue", "no current context");
708 m_pendingFence = std::make_unique<EGLNativeFence>(kwinApp()->outputBackend()->sceneEglDisplayObject());
709 if (!m_pendingFence->isValid()) {
710 qCWarning(KWIN_SCREENCAST) << "Failed to create a native EGL fence";
711 glFinish();
712 enqueue();
713 } else {
714 m_pendingNotifier = std::make_unique<QSocketNotifier>(m_pendingFence->fileDescriptor().get(), QSocketNotifier::Read);
715 connect(m_pendingNotifier.get(), &QSocketNotifier::activated, this, &ScreenCastStream::enqueue);
716 }
717 } else {
718 // The compositing backend doesn't support native fences. We don't have any other choice
719 // but stall the graphics pipeline. Otherwise stream consumers may see an incomplete buffer.
720 glFinish();
721 enqueue();
722 }
723}
724
725void ScreenCastStream::enqueue()
726{
727 Q_ASSERT_X(m_pendingBuffer, "enqueue", "pending buffer must be valid");
728
729 m_pendingFence.reset();
730 m_pendingNotifier.reset();
731
732 if (!m_streaming) {
733 return;
734 }
735 pw_stream_queue_buffer(m_pwStream, m_pendingBuffer);
736
737 if (m_pendingBuffer->buffer->datas[0].chunk->flags != SPA_CHUNK_FLAG_CORRUPTED) {
738 m_lastSent = QDateTime::currentDateTimeUtc();
739 }
740
741 m_pendingBuffer = nullptr;
742}
743
744QList<const spa_pod *> ScreenCastStream::buildFormats(bool fixate, char buffer[2048])
745{
746 const auto format = drmFourCCToSpaVideoFormat(m_drmFormat);
747 spa_pod_builder podBuilder = SPA_POD_BUILDER_INIT(buffer, 2048);
748 spa_fraction defFramerate = SPA_FRACTION(0, 1);
749 spa_fraction minFramerate = SPA_FRACTION(1, 1);
750 spa_fraction maxFramerate = SPA_FRACTION(m_source->refreshRate() / 1000, 1);
751
752 spa_rectangle resolution = SPA_RECTANGLE(uint32_t(m_resolution.width()), uint32_t(m_resolution.height()));
753
754 QList<const spa_pod *> params;
755 params.reserve(fixate + m_hasDmaBuf + 1);
756 if (fixate) {
757 params.append(buildFormat(&podBuilder, SPA_VIDEO_FORMAT_BGRA, &resolution, &defFramerate, &minFramerate, &maxFramerate, {m_dmabufParams->modifier}, SPA_POD_PROP_FLAG_MANDATORY));
758 }
759 if (m_hasDmaBuf) {
760 params.append(buildFormat(&podBuilder, SPA_VIDEO_FORMAT_BGRA, &resolution, &defFramerate, &minFramerate, &maxFramerate, m_modifiers, SPA_POD_PROP_FLAG_MANDATORY | SPA_POD_PROP_FLAG_DONT_FIXATE));
761 }
762 params.append(buildFormat(&podBuilder, format, &resolution, &defFramerate, &minFramerate, &maxFramerate, {}, 0));
763 return params;
764}
765
766spa_pod *ScreenCastStream::buildFormat(struct spa_pod_builder *b, enum spa_video_format format, struct spa_rectangle *resolution,
767 struct spa_fraction *defaultFramerate, struct spa_fraction *minFramerate, struct spa_fraction *maxFramerate,
768 const QList<uint64_t> &modifiers, quint32 modifiersFlags)
769{
770 struct spa_pod_frame f[2];
771 spa_pod_builder_push_object(b, &f[0], SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat);
772 spa_pod_builder_add(b, SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video), 0);
773 spa_pod_builder_add(b, SPA_FORMAT_mediaSubtype, SPA_POD_Id(SPA_MEDIA_SUBTYPE_raw), 0);
774 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_size, SPA_POD_Rectangle(resolution), 0);
775 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_framerate, SPA_POD_Fraction(defaultFramerate), 0);
776 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_maxFramerate,
777 SPA_POD_CHOICE_RANGE_Fraction(
778 SPA_POD_Fraction(maxFramerate),
779 SPA_POD_Fraction(minFramerate),
780 SPA_POD_Fraction(maxFramerate)),
781 0);
782
783 if (format == SPA_VIDEO_FORMAT_BGRA) {
784 /* announce equivalent format without alpha */
785 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_format, SPA_POD_CHOICE_ENUM_Id(3, format, format, SPA_VIDEO_FORMAT_BGRx), 0);
786 } else if (format == SPA_VIDEO_FORMAT_RGBA) {
787 /* announce equivalent format without alpha */
788 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_format, SPA_POD_CHOICE_ENUM_Id(3, format, format, SPA_VIDEO_FORMAT_RGBx), 0);
789 } else {
790 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_format, SPA_POD_Id(format), 0);
791 }
792
793 if (!modifiers.isEmpty()) {
794 spa_pod_builder_prop(b, SPA_FORMAT_VIDEO_modifier, modifiersFlags);
795 spa_pod_builder_push_choice(b, &f[1], SPA_CHOICE_Enum, 0);
796
797 int c = 0;
798 for (auto modifier : modifiers) {
799 spa_pod_builder_long(b, modifier);
800 if (c++ == 0) {
801 spa_pod_builder_long(b, modifier);
802 }
803 }
804 spa_pod_builder_pop(b, &f[1]);
805 }
806 return (spa_pod *)spa_pod_builder_pop(b, &f[0]);
807}
808
810{
811 if (Cursors::self()->isCursorHidden()) {
812 return false;
813 }
814 return m_cursor.viewport.intersects(cursor->geometry());
815}
816
817void ScreenCastStream::sendCursorData(Cursor *cursor, spa_meta_cursor *spa_meta_cursor)
818{
819 if (!cursor || !spa_meta_cursor) {
820 return;
821 }
822
823 if (!includesCursor(cursor)) {
824 spa_meta_cursor->id = 0;
825 spa_meta_cursor->position.x = -1;
826 spa_meta_cursor->position.y = -1;
827 spa_meta_cursor->hotspot.x = -1;
828 spa_meta_cursor->hotspot.y = -1;
829 spa_meta_cursor->bitmap_offset = 0;
830 m_cursor.visible = false;
831 return;
832 }
833 m_cursor.visible = true;
834 const auto position = (cursor->pos() - m_cursor.viewport.topLeft()) * m_cursor.scale;
835
836 spa_meta_cursor->id = 1;
837 spa_meta_cursor->position.x = position.x();
838 spa_meta_cursor->position.y = position.y();
839 spa_meta_cursor->hotspot.x = cursor->hotspot().x() * m_cursor.scale;
840 spa_meta_cursor->hotspot.y = cursor->hotspot().y() * m_cursor.scale;
841 spa_meta_cursor->bitmap_offset = 0;
842
843 if (!m_cursor.invalid) {
844 return;
845 }
846
847 m_cursor.invalid = false;
848 spa_meta_cursor->bitmap_offset = sizeof(struct spa_meta_cursor);
849
850 const QSize targetSize = (cursor->rect().size() * m_cursor.scale).toSize();
851
852 struct spa_meta_bitmap *spa_meta_bitmap = SPA_MEMBER(spa_meta_cursor,
853 spa_meta_cursor->bitmap_offset,
854 struct spa_meta_bitmap);
855 spa_meta_bitmap->format = SPA_VIDEO_FORMAT_RGBA;
856 spa_meta_bitmap->offset = sizeof(struct spa_meta_bitmap);
857 spa_meta_bitmap->size.width = std::min(m_cursor.bitmapSize.width(), targetSize.width());
858 spa_meta_bitmap->size.height = std::min(m_cursor.bitmapSize.height(), targetSize.height());
859 spa_meta_bitmap->stride = spa_meta_bitmap->size.width * 4;
860
861 uint8_t *bitmap_data = SPA_MEMBER(spa_meta_bitmap, spa_meta_bitmap->offset, uint8_t);
862 QImage dest(bitmap_data,
863 spa_meta_bitmap->size.width,
864 spa_meta_bitmap->size.height,
865 spa_meta_bitmap->stride,
866 QImage::Format_RGBA8888_Premultiplied);
867 dest.fill(Qt::transparent);
868
869 const QImage image = kwinApp()->cursorImage().image();
870 if (!image.isNull()) {
871 QPainter painter(&dest);
872 painter.drawImage(QRect({0, 0}, targetSize), image);
873 }
874}
875
876void ScreenCastStream::setCursorMode(ScreencastV1Interface::CursorMode mode, qreal scale, const QRectF &viewport)
877{
878 m_cursor.mode = mode;
879 m_cursor.scale = scale;
880 m_cursor.viewport = viewport;
881}
882
883std::optional<ScreenCastDmaBufTextureParams> ScreenCastStream::testCreateDmaBuf(const QSize &size, quint32 format, const QList<uint64_t> &modifiers)
884{
885 AbstractEglBackend *backend = dynamic_cast<AbstractEglBackend *>(Compositor::self()->backend());
886 if (!backend) {
887 return std::nullopt;
888 }
889
890 GraphicsBuffer *buffer = backend->graphicsBufferAllocator()->allocate(GraphicsBufferOptions{
891 .size = size,
892 .format = format,
893 .modifiers = modifiers,
894 });
895 if (!buffer) {
896 return std::nullopt;
897 }
898 auto drop = qScopeGuard([&buffer]() {
899 buffer->drop();
900 });
901
902 const DmaBufAttributes *attrs = buffer->dmabufAttributes();
903 if (!attrs) {
904 return std::nullopt;
905 }
906
907 return ScreenCastDmaBufTextureParams{
908 .planeCount = attrs->planeCount,
909 .width = attrs->width,
910 .height = attrs->height,
911 .format = attrs->format,
912 .modifier = attrs->modifier,
913 };
914}
915
916std::shared_ptr<ScreenCastDmaBufTexture> ScreenCastStream::createDmaBufTexture(const ScreenCastDmaBufTextureParams &params)
917{
918 AbstractEglBackend *backend = dynamic_cast<AbstractEglBackend *>(Compositor::self()->backend());
919 if (!backend) {
920 return nullptr;
921 }
922
923 GraphicsBuffer *buffer = backend->graphicsBufferAllocator()->allocate(GraphicsBufferOptions{
924 .size = QSize(params.width, params.height),
925 .format = params.format,
926 .modifiers = {params.modifier},
927 });
928 if (!buffer) {
929 return nullptr;
930 }
931
932 const DmaBufAttributes *attrs = buffer->dmabufAttributes();
933 if (!attrs) {
934 buffer->drop();
935 return nullptr;
936 }
937
938 backend->makeCurrent();
939 return std::make_shared<ScreenCastDmaBufTexture>(backend->importDmaBufAsTexture(*attrs), buffer);
940}
941
942} // namespace KWin
943
944#include "moc_screencaststream.cpp"
RenderBackend * backend() const
Definition compositor.h:68
static Compositor * self()
Replacement for QCursor.
Definition cursor.h:102
QPointF pos()
Definition cursor.cpp:204
QRectF rect() const
Definition cursor.cpp:195
QPointF hotspot() const
Definition cursor.cpp:182
QRectF geometry() const
Definition cursor.cpp:190
static Cursors * self()
Definition cursor.cpp:35
void positionChanged(Cursor *cursor, const QPointF &position)
void currentCursorChanged(Cursor *cursor)
Cursor * currentCursor() const
Definition cursor.h:285
static GLFramebuffer * popFramebuffer()
static void pushFramebuffer(GLFramebuffer *fbo)
static std::unique_ptr< GLTexture > upload(const QImage &image)
virtual GraphicsBuffer * allocate(const GraphicsBufferOptions &options)=0
virtual const DmaBufAttributes * dmabufAttributes() const
The OpenGLBackend creates and holds the OpenGL context and is responsible for Texture from Pixmap.
void pipewireFailed(const QString &message)
QImage image() const
Definition globals.h:217
virtual GraphicsBufferAllocator * graphicsBufferAllocator() const
virtual QHash< uint32_t, QList< uint64_t > > supportedFormats() const
void streamReady(quint32 nodeId)
void setCursorMode(ScreencastV1Interface::CursorMode mode, qreal scale, const QRectF &viewport)
ScreencastV1Interface::CursorMode mode
ScreenCastStream(ScreenCastSource *source, std::shared_ptr< PipeWireCore > pwCore, QObject *parent)
bool includesCursor(Cursor *cursor) const
void recordFrame(const QRegion &damagedRegion)
static ShaderManager * instance()
GLShader * pushShader(ShaderTraits traits)
Session::Type type
Definition session.cpp:17
GLenum format
Definition gltexture.cpp:49
KWIN_EXPORT QRectF scaledRect(const QRectF &rect, qreal scale)
Definition globals.h:243
#define CURSOR_META_SIZE(w, h)
std::array< uint32_t, 4 > offset
std::array< uint32_t, 4 > pitch