10#include "compositor.h"
15#include "kwinscreencast_logging.h"
27#include <KLocalizedString>
29#include <QLoggingCategory>
32#include <spa/buffer/meta.h>
38#include <libdrm/drm_fourcc.h>
43static spa_video_format drmFourCCToSpaVideoFormat(quint32
format)
46 case DRM_FORMAT_ARGB8888:
47 return SPA_VIDEO_FORMAT_BGRA;
48 case DRM_FORMAT_XRGB8888:
49 return SPA_VIDEO_FORMAT_BGRx;
50 case DRM_FORMAT_RGBA8888:
51 return SPA_VIDEO_FORMAT_ABGR;
52 case DRM_FORMAT_RGBX8888:
53 return SPA_VIDEO_FORMAT_xBGR;
54 case DRM_FORMAT_ABGR8888:
55 return SPA_VIDEO_FORMAT_RGBA;
56 case DRM_FORMAT_XBGR8888:
57 return SPA_VIDEO_FORMAT_RGBx;
58 case DRM_FORMAT_BGRA8888:
59 return SPA_VIDEO_FORMAT_ARGB;
60 case DRM_FORMAT_BGRX8888:
61 return SPA_VIDEO_FORMAT_xRGB;
63 return SPA_VIDEO_FORMAT_NV12;
64 case DRM_FORMAT_RGB888:
65 return SPA_VIDEO_FORMAT_BGR;
66 case DRM_FORMAT_BGR888:
67 return SPA_VIDEO_FORMAT_RGB;
69 qCDebug(KWIN_SCREENCAST) <<
"unknown format" <<
format;
70 return SPA_VIDEO_FORMAT_xRGB;
74void ScreenCastStream::onStreamStateChanged(pw_stream_state old, pw_stream_state state,
const char *error_message)
76 qCDebug(KWIN_SCREENCAST) <<
"state changed" << pw_stream_state_as_string(old) <<
" -> " << pw_stream_state_as_string(state) << error_message;
79 m_pendingBuffer =
nullptr;
80 m_pendingNotifier.reset();
81 m_pendingFence.reset();
84 case PW_STREAM_STATE_ERROR:
85 qCWarning(KWIN_SCREENCAST) <<
"Stream error: " << error_message;
87 case PW_STREAM_STATE_PAUSED:
88 if (
nodeId() == 0 && m_pwStream) {
89 m_pwNodeId = pw_stream_get_node_id(m_pwStream);
93 case PW_STREAM_STATE_STREAMING:
97 case PW_STREAM_STATE_CONNECTING:
99 case PW_STREAM_STATE_UNCONNECTED:
108#define CURSOR_META_SIZE(w, h) (sizeof(struct spa_meta_cursor) + sizeof(struct spa_meta_bitmap) + w * h * CURSOR_BPP)
109static const int videoDamageRegionCount = 16;
111void ScreenCastStream::newStreamParams()
113 qCDebug(KWIN_SCREENCAST) <<
"announcing stream params. with dmabuf:" << m_dmabufParams.has_value();
114 uint8_t paramsBuffer[1024];
115 spa_pod_builder pod_builder = SPA_POD_BUILDER_INIT(paramsBuffer,
sizeof(paramsBuffer));
116 const int buffertypes = m_dmabufParams ? (1 << SPA_DATA_DmaBuf) : (1 << SPA_DATA_MemFd);
117 const int bpp = m_videoFormat.format == SPA_VIDEO_FORMAT_RGB || m_videoFormat.format == SPA_VIDEO_FORMAT_BGR ? 3 : 4;
118 const int stride = SPA_ROUND_UP_N(m_resolution.width() * bpp, 4);
120 struct spa_pod_frame f;
121 spa_pod_builder_push_object(&pod_builder, &f, SPA_TYPE_OBJECT_ParamBuffers, SPA_PARAM_Buffers);
122 spa_pod_builder_add(&pod_builder,
123 SPA_PARAM_BUFFERS_buffers, SPA_POD_CHOICE_RANGE_Int(16, 2, 16),
124 SPA_PARAM_BUFFERS_dataType, SPA_POD_CHOICE_FLAGS_Int(buffertypes), 0);
125 if (!m_dmabufParams) {
126 spa_pod_builder_add(&pod_builder,
127 SPA_PARAM_BUFFERS_blocks, SPA_POD_Int(1),
128 SPA_PARAM_BUFFERS_size, SPA_POD_Int(stride * m_resolution.height()),
129 SPA_PARAM_BUFFERS_stride, SPA_POD_Int(stride),
130 SPA_PARAM_BUFFERS_align, SPA_POD_Int(16), 0);
132 spa_pod_builder_add(&pod_builder,
133 SPA_PARAM_BUFFERS_blocks, SPA_POD_Int(m_dmabufParams->planeCount), 0);
135 spa_pod *bufferPod = (spa_pod *)spa_pod_builder_pop(&pod_builder, &f);
137 QVarLengthArray<const spa_pod *> params = {
139 (spa_pod *)spa_pod_builder_add_object(&pod_builder,
140 SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
141 SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Cursor),
142 SPA_PARAM_META_size, SPA_POD_Int(
CURSOR_META_SIZE(m_cursor.bitmapSize.width(), m_cursor.bitmapSize.height()))),
143 (spa_pod *)spa_pod_builder_add_object(&pod_builder,
144 SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
145 SPA_PARAM_META_type, SPA_POD_Id(SPA_META_VideoDamage),
146 SPA_PARAM_META_size, SPA_POD_CHOICE_RANGE_Int(
sizeof(
struct spa_meta_region) * videoDamageRegionCount,
sizeof(
struct spa_meta_region) * 1,
sizeof(
struct spa_meta_region) * videoDamageRegionCount)),
147 (spa_pod *)spa_pod_builder_add_object(&pod_builder,
148 SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
149 SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Header),
150 SPA_PARAM_META_size, SPA_POD_Int(
sizeof(
struct spa_meta_header))),
153 pw_stream_update_params(m_pwStream, params.data(), params.count());
156void ScreenCastStream::onStreamParamChanged(uint32_t
id,
const struct spa_pod *
format)
158 if (!
format ||
id != SPA_PARAM_Format) {
162 spa_format_video_raw_parse(
format, &m_videoFormat);
163 auto modifierProperty = spa_pod_find_prop(
format,
nullptr, SPA_FORMAT_VIDEO_modifier);
164 QList<uint64_t> receivedModifiers;
165 if (modifierProperty) {
166 const struct spa_pod *modifierPod = &modifierProperty->value;
168 uint32_t modifiersCount = SPA_POD_CHOICE_N_VALUES(modifierPod);
169 uint64_t *modifiers = (uint64_t *)SPA_POD_CHOICE_VALUES(modifierPod);
170 receivedModifiers = QList<uint64_t>(modifiers, modifiers + modifiersCount);
172 std::sort(receivedModifiers.begin(), receivedModifiers.end());
173 receivedModifiers.erase(std::unique(receivedModifiers.begin(), receivedModifiers.end()), receivedModifiers.end());
175 if (!m_dmabufParams || !receivedModifiers.contains(m_dmabufParams->modifier)) {
176 if (modifierProperty->flags & SPA_POD_PROP_FLAG_DONT_FIXATE) {
179 if (receivedModifiers.count() > 1) {
180 receivedModifiers.removeAll(DRM_FORMAT_MOD_INVALID);
182 m_dmabufParams = testCreateDmaBuf(m_resolution, m_drmFormat, receivedModifiers);
184 m_dmabufParams = testCreateDmaBuf(m_resolution, m_drmFormat, {DRM_FORMAT_MOD_INVALID});
190 if (!m_dmabufParams.has_value()) {
191 for (uint64_t modifier : receivedModifiers) {
192 m_modifiers.removeAll(modifier);
197 }
else if (receivedModifiers.count() == 1 && receivedModifiers.constFirst() == DRM_FORMAT_MOD_INVALID) {
198 m_dmabufParams->modifier = DRM_FORMAT_MOD_INVALID;
201 qCDebug(KWIN_SCREENCAST) <<
"Stream dmabuf modifiers received, offering our best suited modifier" << m_dmabufParams.has_value();
203 auto params = buildFormats(m_dmabufParams.has_value(), buffer);
204 pw_stream_update_params(m_pwStream, params.data(), params.count());
208 m_dmabufParams.reset();
211 qCDebug(KWIN_SCREENCAST) <<
"Stream format found, defining buffers";
216void ScreenCastStream::onStreamAddBuffer(pw_buffer *buffer)
218 struct spa_data *spa_data = buffer->buffer->datas;
220 spa_data->mapoffset = 0;
221 spa_data->flags = SPA_DATA_FLAG_READWRITE;
223 std::shared_ptr<ScreenCastDmaBufTexture> dmabuff;
225 if (spa_data[0].
type != SPA_ID_INVALID && spa_data[0].
type & (1 << SPA_DATA_DmaBuf)) {
226 Q_ASSERT(m_dmabufParams);
227 dmabuff = createDmaBufTexture(*m_dmabufParams);
231 const DmaBufAttributes *dmabufAttribs = dmabuff->buffer()->dmabufAttributes();
232 spa_data->maxsize = dmabufAttribs->pitch[0] * m_resolution.height();
234 Q_ASSERT(buffer->buffer->n_datas >= uint(dmabufAttribs->planeCount));
235 for (
int i = 0; i < dmabufAttribs->planeCount; ++i) {
236 buffer->buffer->datas[i].type = SPA_DATA_DmaBuf;
237 buffer->buffer->datas[i].fd = dmabufAttribs->fd[i].get();
238 buffer->buffer->datas[i].data =
nullptr;
240 m_dmabufDataForPwBuffer.insert(buffer, dmabuff);
243 if (!(spa_data[0].
type & (1 << SPA_DATA_MemFd))) {
244 qCCritical(KWIN_SCREENCAST) <<
"memfd: Client doesn't support memfd buffer data type";
248 const int bytesPerPixel = m_source->hasAlphaChannel() ? 4 : 3;
249 const int stride = SPA_ROUND_UP_N(m_resolution.width() * bytesPerPixel, 4);
250 spa_data->maxsize = stride * m_resolution.height();
251 spa_data->type = SPA_DATA_MemFd;
252 spa_data->fd = memfd_create(
"kwin-screencast-memfd", MFD_CLOEXEC | MFD_ALLOW_SEALING);
253 if (spa_data->fd == -1) {
254 qCCritical(KWIN_SCREENCAST) <<
"memfd: Can't create memfd";
257 spa_data->mapoffset = 0;
259 if (ftruncate(spa_data->fd, spa_data->maxsize) < 0) {
260 qCCritical(KWIN_SCREENCAST) <<
"memfd: Can't truncate to" << spa_data->maxsize;
264 unsigned int seals = F_SEAL_GROW | F_SEAL_SHRINK | F_SEAL_SEAL;
265 if (fcntl(spa_data->fd, F_ADD_SEALS, seals) == -1) {
266 qCWarning(KWIN_SCREENCAST) <<
"memfd: Failed to add seals";
269 spa_data->data = mmap(
nullptr,
271 PROT_READ | PROT_WRITE,
274 spa_data->mapoffset);
275 if (spa_data->data == MAP_FAILED) {
276 qCCritical(KWIN_SCREENCAST) <<
"memfd: Failed to mmap memory";
278 qCDebug(KWIN_SCREENCAST) <<
"memfd: created successfully" << spa_data->data << spa_data->maxsize;
283 m_waitForNewBuffers =
false;
286void ScreenCastStream::onStreamRemoveBuffer(pw_buffer *buffer)
288 m_dmabufDataForPwBuffer.remove(buffer);
290 struct spa_buffer *spa_buffer = buffer->buffer;
291 struct spa_data *spa_data = spa_buffer->datas;
292 if (spa_data && spa_data->type == SPA_DATA_MemFd) {
293 munmap(spa_data->data, spa_data->maxsize);
295 }
else if (spa_data && spa_data->type == SPA_DATA_DmaBuf) {
296 for (
int i = 0, c = buffer->buffer->n_datas; i < c; ++i) {
297 close(buffer->buffer->datas[i].fd);
302void ScreenCastStream::onStreamRenegotiateFormat(uint64_t)
306 auto params = buildFormats(m_dmabufParams.has_value(), buffer);
307 pw_stream_update_params(m_pwStream, params.data(), params.count());
314 , m_resolution(source->textureSize())
321 m_pwStreamEvents.version = PW_VERSION_STREAM_EVENTS;
322 m_pwStreamEvents.add_buffer = [](
void *data,
struct pw_buffer *buffer) {
324 _this->onStreamAddBuffer(buffer);
326 m_pwStreamEvents.remove_buffer = [](
void *data,
struct pw_buffer *buffer) {
328 _this->onStreamRemoveBuffer(buffer);
330 m_pwStreamEvents.state_changed = [](
void *data, pw_stream_state old, pw_stream_state state,
const char *error_message) {
332 _this->onStreamStateChanged(old, state, error_message);
334 m_pwStreamEvents.param_changed = [](
void *data, uint32_t id,
const struct spa_pod *param) {
336 _this->onStreamParamChanged(
id, param);
339 m_pendingFrame.setSingleShot(
true);
340 connect(&m_pendingFrame, &QTimer::timeout,
this, [
this] {
349 pw_stream_destroy(m_pwStream);
355 if (!m_pwCore->m_error.isEmpty()) {
356 m_error = m_pwCore->m_error;
362 if (!createStream()) {
363 qCWarning(KWIN_SCREENCAST) <<
"Failed to create PipeWire stream";
364 m_error = i18n(
"Failed to create PipeWire stream");
368 m_pwRenegotiate = pw_loop_add_event(
369 m_pwCore->pwMainLoop, [](
void *data, uint64_t
format) {
370 auto _this = static_cast<ScreenCastStream *>(data);
371 _this->onStreamRenegotiateFormat(format);
381 return m_videoFormat.max_framerate.num / m_videoFormat.max_framerate.denom;
392bool ScreenCastStream::createStream()
394 const QByteArray objname =
"kwin-screencast-" + objectName().toUtf8();
395 m_pwStream = pw_stream_new(m_pwCore->pwCore, objname,
nullptr);
398 auto itModifiers = supported.constFind(m_source->drmFormat());
401 if (itModifiers == supported.constEnd() && !supported.isEmpty()) {
402 itModifiers = supported.constFind(DRM_FORMAT_ARGB8888);
403 if (itModifiers != supported.constEnd()) {
404 m_drmFormat = itModifiers.key();
408 if (itModifiers == supported.constEnd()) {
409 m_drmFormat = m_source->drmFormat();
410 m_modifiers = {DRM_FORMAT_MOD_INVALID};
412 m_drmFormat = itModifiers.key();
413 m_modifiers = *itModifiers;
415 m_modifiers += DRM_FORMAT_MOD_INVALID;
417 m_hasDmaBuf = testCreateDmaBuf(m_resolution, m_drmFormat, {DRM_FORMAT_MOD_INVALID}).has_value();
420 QList<const spa_pod *> params = buildFormats(
false, buffer);
422 pw_stream_add_listener(m_pwStream, &m_streamListener, &m_pwStreamEvents,
this);
423 auto flags = pw_stream_flags(PW_STREAM_FLAG_DRIVER | PW_STREAM_FLAG_ALLOC_BUFFERS);
425 if (pw_stream_connect(m_pwStream, PW_DIRECTION_OUTPUT, SPA_ID_INVALID, flags, params.data(), params.count()) != 0) {
426 qCWarning(KWIN_SCREENCAST) <<
"Could not connect to stream";
427 pw_stream_destroy(m_pwStream);
428 m_pwStream =
nullptr;
444void ScreenCastStream::coreFailed(
const QString &errorMessage)
446 m_error = errorMessage;
458 QRegion damagedRegion = _damagedRegion;
459 Q_ASSERT(!m_stopped);
462 m_pendingDamages += damagedRegion;
466 if (m_videoFormat.max_framerate.num != 0 && !m_lastSent.isNull()) {
467 auto frameInterval = (1000. * m_videoFormat.max_framerate.denom / m_videoFormat.max_framerate.num);
468 auto lastSentAgo = m_lastSent.msecsTo(QDateTime::currentDateTimeUtc());
469 if (lastSentAgo < frameInterval) {
470 m_pendingDamages += damagedRegion;
471 if (!m_pendingFrame.isActive()) {
472 m_pendingFrame.start(frameInterval - lastSentAgo);
478 m_pendingDamages = {};
479 if (m_pendingBuffer) {
483 if (m_waitForNewBuffers) {
484 qCWarning(KWIN_SCREENCAST) <<
"Waiting for new buffers to be created";
488 const auto size = m_source->textureSize();
489 if (size != m_resolution) {
491 m_waitForNewBuffers =
true;
492 m_dmabufParams = std::nullopt;
493 pw_loop_signal_event(m_pwCore->pwMainLoop, m_pwRenegotiate);
497 const char *
error =
"";
498 auto state = pw_stream_get_state(m_pwStream, &
error);
499 if (state != PW_STREAM_STATE_STREAMING) {
501 qCWarning(KWIN_SCREENCAST) <<
"Failed to record frame: stream is not active" <<
error;
506 struct pw_buffer *buffer = pw_stream_dequeue_buffer(m_pwStream);
512 struct spa_buffer *spa_buffer = buffer->buffer;
513 struct spa_data *spa_data = spa_buffer->datas;
515 uint8_t *data = (uint8_t *)spa_data->data;
516 if (!data && spa_buffer->datas->type != SPA_DATA_DmaBuf) {
517 qCWarning(KWIN_SCREENCAST) <<
"Failed to record frame: invalid buffer data";
518 pw_stream_queue_buffer(m_pwStream, buffer);
522 spa_data->chunk->offset = 0;
523 spa_data->chunk->flags = SPA_CHUNK_FLAG_NONE;
525 if (data || spa_data[0].
type == SPA_DATA_MemFd) {
526 const bool hasAlpha = m_source->hasAlphaChannel();
527 const int bpp = data && !hasAlpha ? 3 : 4;
528 const uint stride = SPA_ROUND_UP_N(size.width() * bpp, 4);
530 if ((stride * size.height()) > spa_data->maxsize) {
531 qCDebug(KWIN_SCREENCAST) <<
"Failed to record frame: frame is too big";
532 pw_stream_queue_buffer(m_pwStream, buffer);
536 spa_data->chunk->stride = stride;
537 spa_data->chunk->size = stride * size.height();
539 m_source->render(spa_data, m_videoFormat.format);
543 QImage dest(data, size.width(), size.height(), stride, hasAlpha ? QImage::Format_RGBA8888_Premultiplied : QImage::Format_RGB888);
544 QPainter painter(&dest);
545 const auto position = (cursor->pos() - m_cursor.viewport.topLeft() - cursor->hotspot()) * m_cursor.scale;
547 painter.drawImage(QRect{position.toPoint(), cursorImage.
image().size()}, cursorImage.
image());
550 auto &buf = m_dmabufDataForPwBuffer[buffer];
554 Q_ASSERT(buffer->buffer->n_datas >= uint(dmabufAttribs->
planeCount));
555 for (
int i = 0; i < dmabufAttribs->
planeCount; ++i) {
556 buffer->buffer->datas[i].chunk->stride = dmabufAttribs->
pitch[i];
557 buffer->buffer->datas[i].chunk->offset = dmabufAttribs->
offset[i];
559 spa_data->chunk->size = spa_data->maxsize;
561 m_source->render(buf->framebuffer());
565 if (m_cursor.invalid) {
566 m_cursor.invalid =
false;
568 if (cursorImage.
isNull()) {
569 m_cursor.texture =
nullptr;
574 if (m_cursor.texture) {
579 const QRectF cursorRect =
scaledRect(cursor->geometry().translated(-m_cursor.viewport.topLeft()), m_cursor.scale);
582 mvp.ortho(QRectF(QPointF(0, 0), size));
583 mvp.translate(cursorRect.x(), cursorRect.y());
587 glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
588 m_cursor.texture->render(cursorRect.size());
594 damagedRegion += QRegion{m_cursor.lastRect.toAlignedRect()} | cursorRect.toAlignedRect();
595 m_cursor.lastRect = cursorRect;
597 damagedRegion += m_cursor.lastRect.toAlignedRect();
598 m_cursor.lastRect = {};
605 (spa_meta_cursor *)spa_buffer_find_meta_data(spa_buffer, SPA_META_Cursor,
sizeof(spa_meta_cursor)));
608 addDamage(spa_buffer, damagedRegion);
609 addHeader(spa_buffer);
613void ScreenCastStream::addHeader(spa_buffer *spaBuffer)
615 spa_meta_header *spaHeader = (spa_meta_header *)spa_buffer_find_meta_data(spaBuffer, SPA_META_Header,
sizeof(spaHeader));
617 spaHeader->flags = 0;
618 spaHeader->dts_offset = 0;
619 spaHeader->seq = m_sequential++;
620 spaHeader->pts = m_source->clock().count();
624void ScreenCastStream::addDamage(spa_buffer *spaBuffer,
const QRegion &damagedRegion)
626 if (spa_meta *vdMeta = spa_buffer_find_meta(spaBuffer, SPA_META_VideoDamage)) {
627 struct spa_meta_region *r = (spa_meta_region *)spa_meta_first(vdMeta);
630 if (damagedRegion.rectCount() > videoDamageRegionCount - 1) {
631 if (spa_meta_check(r, vdMeta)) {
632 auto rect = damagedRegion.boundingRect();
633 r->region = SPA_REGION(rect.x(), rect.y(), quint32(rect.width()), quint32(rect.height()));
637 for (
const QRect &rect : damagedRegion) {
638 if (spa_meta_check(r, vdMeta)) {
639 r->region = SPA_REGION(rect.x(), rect.y(), quint32(rect.width()), quint32(rect.height()));
645 if (spa_meta_check(r, vdMeta)) {
646 r->region = SPA_REGION(0, 0, 0, 0);
653 m_cursor.invalid =
true;
658 Q_ASSERT(!m_stopped);
663 if (m_pendingBuffer) {
667 const char *
error =
"";
668 auto state = pw_stream_get_state(m_pwStream, &
error);
669 if (state != PW_STREAM_STATE_STREAMING) {
671 qCWarning(KWIN_SCREENCAST) <<
"Failed to record cursor position: stream is not active" <<
error;
680 m_pendingBuffer = pw_stream_dequeue_buffer(m_pwStream);
681 if (!m_pendingBuffer) {
685 struct spa_buffer *spa_buffer = m_pendingBuffer->buffer;
688 spa_buffer->datas[0].chunk->flags = SPA_CHUNK_FLAG_CORRUPTED;
689 spa_buffer->datas[0].chunk->size = 0;
692 (spa_meta_cursor *)spa_buffer_find_meta_data(spa_buffer, SPA_META_Cursor,
sizeof(spa_meta_cursor)));
693 addHeader(spa_buffer);
694 addDamage(spa_buffer, {});
698void ScreenCastStream::tryEnqueue(pw_buffer *buffer)
700 m_pendingBuffer = buffer;
707 Q_ASSERT_X(eglGetCurrentContext(),
"tryEnqueue",
"no current context");
708 m_pendingFence = std::make_unique<EGLNativeFence>(kwinApp()->outputBackend()->sceneEglDisplayObject());
709 if (!m_pendingFence->isValid()) {
710 qCWarning(KWIN_SCREENCAST) <<
"Failed to create a native EGL fence";
714 m_pendingNotifier = std::make_unique<QSocketNotifier>(m_pendingFence->fileDescriptor().get(), QSocketNotifier::Read);
715 connect(m_pendingNotifier.get(), &QSocketNotifier::activated,
this, &ScreenCastStream::enqueue);
725void ScreenCastStream::enqueue()
727 Q_ASSERT_X(m_pendingBuffer,
"enqueue",
"pending buffer must be valid");
729 m_pendingFence.reset();
730 m_pendingNotifier.reset();
735 pw_stream_queue_buffer(m_pwStream, m_pendingBuffer);
737 if (m_pendingBuffer->buffer->datas[0].chunk->flags != SPA_CHUNK_FLAG_CORRUPTED) {
738 m_lastSent = QDateTime::currentDateTimeUtc();
741 m_pendingBuffer =
nullptr;
744QList<const spa_pod *> ScreenCastStream::buildFormats(
bool fixate,
char buffer[2048])
746 const auto format = drmFourCCToSpaVideoFormat(m_drmFormat);
747 spa_pod_builder podBuilder = SPA_POD_BUILDER_INIT(buffer, 2048);
748 spa_fraction defFramerate = SPA_FRACTION(0, 1);
749 spa_fraction minFramerate = SPA_FRACTION(1, 1);
750 spa_fraction maxFramerate = SPA_FRACTION(m_source->refreshRate() / 1000, 1);
752 spa_rectangle resolution = SPA_RECTANGLE(uint32_t(m_resolution.width()), uint32_t(m_resolution.height()));
754 QList<const spa_pod *> params;
755 params.reserve(fixate + m_hasDmaBuf + 1);
757 params.append(buildFormat(&podBuilder, SPA_VIDEO_FORMAT_BGRA, &resolution, &defFramerate, &minFramerate, &maxFramerate, {m_dmabufParams->modifier}, SPA_POD_PROP_FLAG_MANDATORY));
760 params.append(buildFormat(&podBuilder, SPA_VIDEO_FORMAT_BGRA, &resolution, &defFramerate, &minFramerate, &maxFramerate, m_modifiers, SPA_POD_PROP_FLAG_MANDATORY | SPA_POD_PROP_FLAG_DONT_FIXATE));
762 params.append(buildFormat(&podBuilder,
format, &resolution, &defFramerate, &minFramerate, &maxFramerate, {}, 0));
766spa_pod *ScreenCastStream::buildFormat(
struct spa_pod_builder *b,
enum spa_video_format
format,
struct spa_rectangle *resolution,
767 struct spa_fraction *defaultFramerate,
struct spa_fraction *minFramerate,
struct spa_fraction *maxFramerate,
768 const QList<uint64_t> &modifiers, quint32 modifiersFlags)
770 struct spa_pod_frame f[2];
771 spa_pod_builder_push_object(b, &f[0], SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat);
772 spa_pod_builder_add(b, SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video), 0);
773 spa_pod_builder_add(b, SPA_FORMAT_mediaSubtype, SPA_POD_Id(SPA_MEDIA_SUBTYPE_raw), 0);
774 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_size, SPA_POD_Rectangle(resolution), 0);
775 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_framerate, SPA_POD_Fraction(defaultFramerate), 0);
776 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_maxFramerate,
777 SPA_POD_CHOICE_RANGE_Fraction(
778 SPA_POD_Fraction(maxFramerate),
779 SPA_POD_Fraction(minFramerate),
780 SPA_POD_Fraction(maxFramerate)),
783 if (
format == SPA_VIDEO_FORMAT_BGRA) {
785 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_format, SPA_POD_CHOICE_ENUM_Id(3,
format,
format, SPA_VIDEO_FORMAT_BGRx), 0);
786 }
else if (
format == SPA_VIDEO_FORMAT_RGBA) {
788 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_format, SPA_POD_CHOICE_ENUM_Id(3,
format,
format, SPA_VIDEO_FORMAT_RGBx), 0);
790 spa_pod_builder_add(b, SPA_FORMAT_VIDEO_format, SPA_POD_Id(
format), 0);
793 if (!modifiers.isEmpty()) {
794 spa_pod_builder_prop(b, SPA_FORMAT_VIDEO_modifier, modifiersFlags);
795 spa_pod_builder_push_choice(b, &f[1], SPA_CHOICE_Enum, 0);
798 for (
auto modifier : modifiers) {
799 spa_pod_builder_long(b, modifier);
801 spa_pod_builder_long(b, modifier);
804 spa_pod_builder_pop(b, &f[1]);
806 return (spa_pod *)spa_pod_builder_pop(b, &f[0]);
814 return m_cursor.viewport.intersects(cursor->
geometry());
817void ScreenCastStream::sendCursorData(
Cursor *cursor, spa_meta_cursor *spa_meta_cursor)
819 if (!cursor || !spa_meta_cursor) {
824 spa_meta_cursor->id = 0;
825 spa_meta_cursor->position.x = -1;
826 spa_meta_cursor->position.y = -1;
827 spa_meta_cursor->hotspot.x = -1;
828 spa_meta_cursor->hotspot.y = -1;
829 spa_meta_cursor->bitmap_offset = 0;
830 m_cursor.visible =
false;
833 m_cursor.visible =
true;
834 const auto position = (cursor->
pos() - m_cursor.viewport.topLeft()) * m_cursor.scale;
836 spa_meta_cursor->id = 1;
837 spa_meta_cursor->position.x = position.x();
838 spa_meta_cursor->position.y = position.y();
839 spa_meta_cursor->hotspot.x = cursor->
hotspot().x() * m_cursor.scale;
840 spa_meta_cursor->hotspot.y = cursor->
hotspot().y() * m_cursor.scale;
841 spa_meta_cursor->bitmap_offset = 0;
843 if (!m_cursor.invalid) {
847 m_cursor.invalid =
false;
848 spa_meta_cursor->bitmap_offset =
sizeof(
struct spa_meta_cursor);
850 const QSize targetSize = (cursor->
rect().size() * m_cursor.scale).toSize();
852 struct spa_meta_bitmap *spa_meta_bitmap = SPA_MEMBER(spa_meta_cursor,
853 spa_meta_cursor->bitmap_offset,
854 struct spa_meta_bitmap);
855 spa_meta_bitmap->format = SPA_VIDEO_FORMAT_RGBA;
856 spa_meta_bitmap->offset =
sizeof(
struct spa_meta_bitmap);
857 spa_meta_bitmap->size.width = std::min(m_cursor.bitmapSize.width(), targetSize.width());
858 spa_meta_bitmap->size.height = std::min(m_cursor.bitmapSize.height(), targetSize.height());
859 spa_meta_bitmap->stride = spa_meta_bitmap->size.width * 4;
861 uint8_t *bitmap_data = SPA_MEMBER(spa_meta_bitmap, spa_meta_bitmap->offset, uint8_t);
862 QImage dest(bitmap_data,
863 spa_meta_bitmap->size.width,
864 spa_meta_bitmap->size.height,
865 spa_meta_bitmap->stride,
866 QImage::Format_RGBA8888_Premultiplied);
867 dest.fill(Qt::transparent);
869 const QImage image = kwinApp()->cursorImage().image();
870 if (!image.isNull()) {
871 QPainter painter(&dest);
872 painter.drawImage(QRect({0, 0}, targetSize), image);
878 m_cursor.mode =
mode;
879 m_cursor.scale =
scale;
883std::optional<ScreenCastDmaBufTextureParams> ScreenCastStream::testCreateDmaBuf(
const QSize &size, quint32
format,
const QList<uint64_t> &modifiers)
893 .modifiers = modifiers,
898 auto drop = qScopeGuard([&buffer]() {
907 return ScreenCastDmaBufTextureParams{
909 .width = attrs->width,
910 .height = attrs->height,
911 .format = attrs->format,
912 .modifier = attrs->modifier,
916std::shared_ptr<ScreenCastDmaBufTexture> ScreenCastStream::createDmaBufTexture(
const ScreenCastDmaBufTextureParams ¶ms)
924 .size = QSize(params.width, params.height),
925 .format = params.format,
926 .modifiers = {params.modifier},
938 backend->makeCurrent();
939 return std::make_shared<ScreenCastDmaBufTexture>(backend->importDmaBufAsTexture(*attrs), buffer);
944#include "moc_screencaststream.cpp"
RenderBackend * backend() const
static Compositor * self()
void positionChanged(Cursor *cursor, const QPointF &position)
void currentCursorChanged(Cursor *cursor)
Cursor * currentCursor() const
static GLFramebuffer * popFramebuffer()
static void pushFramebuffer(GLFramebuffer *fbo)
@ ModelViewProjectionMatrix
static std::unique_ptr< GLTexture > upload(const QImage &image)
virtual GraphicsBuffer * allocate(const GraphicsBufferOptions &options)=0
virtual const DmaBufAttributes * dmabufAttributes() const
The OpenGLBackend creates and holds the OpenGL context and is responsible for Texture from Pixmap.
void pipewireFailed(const QString &message)
virtual GraphicsBufferAllocator * graphicsBufferAllocator() const
virtual QHash< uint32_t, QList< uint64_t > > supportedFormats() const
void streamReady(quint32 nodeId)
void setCursorMode(ScreencastV1Interface::CursorMode mode, qreal scale, const QRectF &viewport)
ScreencastV1Interface::CursorMode mode
ScreenCastStream(ScreenCastSource *source, std::shared_ptr< PipeWireCore > pwCore, QObject *parent)
bool includesCursor(Cursor *cursor) const
void recordFrame(const QRegion &damagedRegion)
static ShaderManager * instance()
GLShader * pushShader(ShaderTraits traits)
KWIN_EXPORT QRectF scaledRect(const QRectF &rect, qreal scale)
#define CURSOR_META_SIZE(w, h)
std::array< uint32_t, 4 > offset
std::array< uint32_t, 4 > pitch