diff options
author | Dmytro Poplavskiy <dmytro.poplavskiy@nokia.com> | 2010-02-05 01:55:41 (GMT) |
---|---|---|
committer | Dmytro Poplavskiy <dmytro.poplavskiy@nokia.com> | 2010-02-05 01:55:41 (GMT) |
commit | 5f1c9dd14e1a6f6fc479dc8103a1166c71b61746 (patch) | |
tree | 4273f506d02a469e30f04d5ecffd49fb6ee3896c | |
parent | 623fa99d781a5cc2ddbd330f949764333fcd7ab1 (diff) | |
download | Qt-5f1c9dd14e1a6f6fc479dc8103a1166c71b61746.zip Qt-5f1c9dd14e1a6f6fc479dc8103a1166c71b61746.tar.gz Qt-5f1c9dd14e1a6f6fc479dc8103a1166c71b61746.tar.bz2 |
Added support for XvImage shm based video frames to gstreamer backend.
Gstreamer backend will use XvShmPutImage without copying frames data
if possible with overlay video output, and provide XvImage based
video frames to renderer video output surface.
7 files changed, 709 insertions, 94 deletions
diff --git a/src/plugins/mediaservices/gstreamer/qgstvideobuffer.cpp b/src/plugins/mediaservices/gstreamer/qgstvideobuffer.cpp index 384bdff..9519db6 100644 --- a/src/plugins/mediaservices/gstreamer/qgstvideobuffer.cpp +++ b/src/plugins/mediaservices/gstreamer/qgstvideobuffer.cpp @@ -51,6 +51,18 @@ QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine) gst_buffer_ref(m_buffer); } +QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine, + QGstVideoBuffer::HandleType handleType, + const QVariant &handle) + : QAbstractVideoBuffer(handleType) + , m_buffer(buffer) + , m_bytesPerLine(bytesPerLine) + , m_mode(NotMapped) + , m_handle(handle) +{ + gst_buffer_ref(m_buffer); +} + QGstVideoBuffer::~QGstVideoBuffer() { gst_buffer_unref(m_buffer); diff --git a/src/plugins/mediaservices/gstreamer/qgstvideobuffer.h b/src/plugins/mediaservices/gstreamer/qgstvideobuffer.h index 36b7c08..5133e2e 100644 --- a/src/plugins/mediaservices/gstreamer/qgstvideobuffer.h +++ b/src/plugins/mediaservices/gstreamer/qgstvideobuffer.h @@ -43,6 +43,7 @@ #define QGSTVIDEOBUFFER_H #include <QtMultimedia/QAbstractVideoBuffer> +#include <QtCore/qvariant.h> #include <gst/gst.h> @@ -55,6 +56,8 @@ class QGstVideoBuffer : public QAbstractVideoBuffer { public: QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine); + QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine, + HandleType handleType, const QVariant &handle); ~QGstVideoBuffer(); MapMode mapMode() const; @@ -62,10 +65,12 @@ public: uchar *map(MapMode mode, int *numBytes, int *bytesPerLine); void unmap(); + QVariant handle() const { return m_handle; } private: GstBuffer *m_buffer; int m_bytesPerLine; MapMode m_mode; + QVariant m_handle; }; QT_END_NAMESPACE diff --git a/src/plugins/mediaservices/gstreamer/qgstxvimagebuffer.cpp b/src/plugins/mediaservices/gstreamer/qgstxvimagebuffer.cpp new file mode 100644 index 0000000..0e47c98 --- /dev/null +++ b/src/plugins/mediaservices/gstreamer/qgstxvimagebuffer.cpp @@ -0,0 +1,276 @@ +/**************************************************************************** +** +** Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). +** All rights reserved. +** Contact: Nokia Corporation (qt-info@nokia.com) +** +** This file is part of the plugins of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL$ +** No Commercial Usage +** This file contains pre-release code and may not be distributed. +** You may use this file in accordance with the terms and conditions +** contained in the Technology Preview License Agreement accompanying +** this package. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 as published by the Free Software +** Foundation and appearing in the file LICENSE.LGPL included in the +** packaging of this file. Please review the following information to +** ensure the GNU Lesser General Public License version 2.1 requirements +** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** In addition, as a special exception, Nokia gives you certain additional +** rights. These rights are described in the Nokia Qt LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** If you have questions regarding the use of this file, please contact +** Nokia at qt-info@nokia.com. +** +** +** +** +** +** +** +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#include <QtCore/qdebug.h> +#include <QtCore/qthread.h> +#include <QtCore/qvariant.h> +#include <QtGui/qx11info_x11.h> + +#include "qgstxvimagebuffer.h" +#include "qvideosurfacegstsink.h" + +GstBufferClass *QGstXvImageBuffer::parent_class = NULL; + +GType QGstXvImageBuffer::get_type(void) +{ + static GType buffer_type = 0; + + if (buffer_type == 0) { + static const GTypeInfo buffer_info = { + sizeof (GstBufferClass), + NULL, + NULL, + QGstXvImageBuffer::class_init, + NULL, + NULL, + sizeof(QGstXvImageBuffer), + 0, + (GInstanceInitFunc)QGstXvImageBuffer::buffer_init, + NULL + }; + buffer_type = g_type_register_static(GST_TYPE_BUFFER, + "QGstXvImageBuffer", &buffer_info, GTypeFlags(0)); + } + return buffer_type; +} + +void QGstXvImageBuffer::class_init(gpointer g_class, gpointer class_data) +{ + Q_UNUSED(class_data); + GST_MINI_OBJECT_CLASS(g_class)->finalize = + (GstMiniObjectFinalizeFunction)buffer_finalize; + parent_class = (GstBufferClass*)g_type_class_peek_parent(g_class); +} + +void QGstXvImageBuffer::buffer_init(QGstXvImageBuffer *xvImage, gpointer g_class) +{ + Q_UNUSED(g_class); + xvImage->pool = 0; + xvImage->shmInfo.shmaddr = ((char *) -1); + xvImage->shmInfo.shmid = -1; + xvImage->markedForDeletion = false; +} + +void QGstXvImageBuffer::buffer_finalize(QGstXvImageBuffer * xvImage) +{ + if (xvImage->pool) { + if (xvImage->markedForDeletion) + xvImage->pool->destroyBuffer(xvImage); + else + xvImage->pool->recycleBuffer(xvImage); + } +} + + +QGstXvImageBufferPool::QGstXvImageBufferPool(QObject *parent) + :QObject(parent) +{ +} + +QGstXvImageBufferPool::~QGstXvImageBufferPool() +{ +} + +bool QGstXvImageBufferPool::isFormatSupported(const QVideoSurfaceFormat &surfaceFormat) +{ + bool ok = true; + surfaceFormat.property("portId").toULongLong(&ok); + if (!ok) + return false; + + int xvFormatId = surfaceFormat.property("xvFormatId").toInt(&ok); + if (!ok || xvFormatId < 0) + return false; + + int dataSize = surfaceFormat.property("dataSize").toInt(&ok); + if (!ok || dataSize<=0) + return false; + + return true; +} + +QGstXvImageBuffer *QGstXvImageBufferPool::takeBuffer(const QVideoSurfaceFormat &format, GstCaps *caps) +{ + m_poolMutex.lock(); + + m_caps = caps; + if (format != m_format) { + doClear(); + m_format = format; + } + + + if (m_pool.isEmpty()) { + //qDebug() << "QGstXvImageBufferPool::takeBuffer: no buffer available, allocate the new one"; + if (QThread::currentThread() == thread()) { + m_poolMutex.unlock(); + queuedAlloc(); + m_poolMutex.lock(); + } else { + QMetaObject::invokeMethod(this, "queuedAlloc", Qt::QueuedConnection); + m_allocWaitCondition.wait(&m_poolMutex, 300); + } + } + QGstXvImageBuffer *res = 0; + + if (!m_pool.isEmpty()) { + res = m_pool.takeLast(); + } + + m_poolMutex.unlock(); + + return res; +} + +void QGstXvImageBufferPool::queuedAlloc() +{ + QMutexLocker lock(&m_poolMutex); + + Q_ASSERT(QThread::currentThread() == thread()); + + QGstXvImageBuffer *xvBuffer = (QGstXvImageBuffer *)gst_mini_object_new(QGstXvImageBuffer::get_type()); + + quint64 portId = m_format.property("portId").toULongLong(); + int xvFormatId = m_format.property("xvFormatId").toInt(); + + xvBuffer->xvImage = XvShmCreateImage( + QX11Info::display(), + portId, + xvFormatId, + 0, + m_format.frameWidth(), + m_format.frameHeight(), + &xvBuffer->shmInfo + ); + + if (!xvBuffer->xvImage) { + qDebug() << "QGstXvImageBufferPool: XvShmCreateImage failed"; + m_allocWaitCondition.wakeOne(); + return; + } + + xvBuffer->shmInfo.shmid = shmget(IPC_PRIVATE, xvBuffer->xvImage->data_size, IPC_CREAT | 0777); + xvBuffer->shmInfo.shmaddr = xvBuffer->xvImage->data = (char*)shmat(xvBuffer->shmInfo.shmid, 0, 0); + xvBuffer->shmInfo.readOnly = False; + + if (!XShmAttach(QX11Info::display(), &xvBuffer->shmInfo)) { + qDebug() << "QGstXvImageBufferPool: XShmAttach failed"; + m_allocWaitCondition.wakeOne(); + return; + } + + shmctl (xvBuffer->shmInfo.shmid, IPC_RMID, NULL); + + xvBuffer->pool = this; + GST_MINI_OBJECT_CAST(xvBuffer)->flags = 0; + gst_buffer_set_caps(GST_BUFFER_CAST(xvBuffer), m_caps); + GST_BUFFER_DATA(xvBuffer) = (uchar*)xvBuffer->xvImage->data; + GST_BUFFER_SIZE(xvBuffer) = xvBuffer->xvImage->data_size; + + m_allBuffers.append(xvBuffer); + m_pool.append(xvBuffer); + + m_allocWaitCondition.wakeOne(); +} + + +void QGstXvImageBufferPool::clear() +{ + QMutexLocker lock(&m_poolMutex); + doClear(); +} + +void QGstXvImageBufferPool::doClear() +{ + foreach (QGstXvImageBuffer *xvBuffer, m_allBuffers) { + xvBuffer->markedForDeletion = true; + } + m_allBuffers.clear(); + + foreach (QGstXvImageBuffer *xvBuffer, m_pool) { + gst_buffer_unref(GST_BUFFER(xvBuffer)); + } + m_pool.clear(); + + m_format = QVideoSurfaceFormat(); +} + +void QGstXvImageBufferPool::queuedDestroy() +{ + QMutexLocker lock(&m_destroyMutex); + + foreach(XvShmImage xvImage, m_imagesToDestroy) { + if (xvImage.shmInfo.shmaddr != ((void *) -1)) { + XShmDetach(QX11Info::display(), &xvImage.shmInfo); + XSync(QX11Info::display(), false); + + shmdt(xvImage.shmInfo.shmaddr); + } + + if (xvImage.xvImage) + XFree(xvImage.xvImage); + } + + m_imagesToDestroy.clear(); + + XSync(QX11Info::display(), false); +} + +void QGstXvImageBufferPool::recycleBuffer(QGstXvImageBuffer *xvBuffer) +{ + QMutexLocker lock(&m_poolMutex); + gst_buffer_ref(GST_BUFFER_CAST(xvBuffer)); + m_pool.append(xvBuffer); +} + +void QGstXvImageBufferPool::destroyBuffer(QGstXvImageBuffer *xvBuffer) +{ + XvShmImage imageToDestroy; + imageToDestroy.xvImage = xvBuffer->xvImage; + imageToDestroy.shmInfo = xvBuffer->shmInfo; + + m_destroyMutex.lock(); + m_imagesToDestroy.append(imageToDestroy); + m_destroyMutex.unlock(); + + if (m_imagesToDestroy.size() == 1) + QMetaObject::invokeMethod(this, "queuedDestroy", Qt::QueuedConnection); +} diff --git a/src/plugins/mediaservices/gstreamer/qgstxvimagebuffer.h b/src/plugins/mediaservices/gstreamer/qgstxvimagebuffer.h new file mode 100644 index 0000000..beeb01f --- /dev/null +++ b/src/plugins/mediaservices/gstreamer/qgstxvimagebuffer.h @@ -0,0 +1,131 @@ +/**************************************************************************** +** +** Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). +** All rights reserved. +** Contact: Nokia Corporation (qt-info@nokia.com) +** +** This file is part of the plugins of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL$ +** No Commercial Usage +** This file contains pre-release code and may not be distributed. +** You may use this file in accordance with the terms and conditions +** contained in the Technology Preview License Agreement accompanying +** this package. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 as published by the Free Software +** Foundation and appearing in the file LICENSE.LGPL included in the +** packaging of this file. Please review the following information to +** ensure the GNU Lesser General Public License version 2.1 requirements +** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** In addition, as a special exception, Nokia gives you certain additional +** rights. These rights are described in the Nokia Qt LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** If you have questions regarding the use of this file, please contact +** Nokia at qt-info@nokia.com. +** +** +** +** +** +** +** +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#ifndef QGSTXVIMAGEBUFFER_H +#define QGSTXVIMAGEBUFFER_H + +#include <QtMultimedia/qabstractvideobuffer.h> +#include <QtMultimedia/qvideosurfaceformat.h> +#include <QtCore/qmutex.h> +#include <QtCore/qwaitcondition.h> +#include <QtCore/qqueue.h> + +#include <X11/Xlib.h> +#include <sys/ipc.h> +#include <sys/shm.h> +#include <X11/extensions/XShm.h> +#include <X11/Xlib.h> +#include <X11/extensions/Xv.h> +#include <X11/extensions/Xvlib.h> + + +#include <gst/gst.h> + +QT_BEGIN_HEADER + +QT_BEGIN_NAMESPACE + + +class QGstXvImageBufferPool; + +struct QGstXvImageBuffer { + GstBuffer buffer; + QGstXvImageBufferPool *pool; + XvImage *xvImage; + XShmSegmentInfo shmInfo; + bool markedForDeletion; + + static GType get_type(void); + static void class_init(gpointer g_class, gpointer class_data); + static void buffer_init(QGstXvImageBuffer *xvimage, gpointer g_class); + static void buffer_finalize(QGstXvImageBuffer * xvimage); + static GstBufferClass *parent_class; +}; + +const QAbstractVideoBuffer::HandleType XvHandleType = QAbstractVideoBuffer::HandleType(4); + +Q_DECLARE_METATYPE(XvImage*) + + +class QGstXvImageBufferPool : public QObject { +Q_OBJECT +friend class QGstXvImageBuffer; +public: + QGstXvImageBufferPool(QObject *parent = 0); + virtual ~QGstXvImageBufferPool(); + + bool isFormatSupported(const QVideoSurfaceFormat &format); + + QGstXvImageBuffer *takeBuffer(const QVideoSurfaceFormat &format, GstCaps *caps); + void clear(); + +private slots: + void queuedAlloc(); + void queuedDestroy(); + + void doClear(); + + void recycleBuffer(QGstXvImageBuffer *); + void destroyBuffer(QGstXvImageBuffer *); + +private: + struct XvShmImage { + XvImage *xvImage; + XShmSegmentInfo shmInfo; + }; + + QMutex m_poolMutex; + QMutex m_allocMutex; + QWaitCondition m_allocWaitCondition; + QMutex m_destroyMutex; + QVideoSurfaceFormat m_format; + GstCaps *m_caps; + QList<QGstXvImageBuffer*> m_pool; + QList<QGstXvImageBuffer*> m_allBuffers; + QList<XvShmImage> m_imagesToDestroy; +}; + +QT_END_NAMESPACE + +QT_END_HEADER + + +#endif diff --git a/src/plugins/mediaservices/gstreamer/qvideosurfacegstsink.cpp b/src/plugins/mediaservices/gstreamer/qvideosurfacegstsink.cpp index 41e3f77..ca91522 100644 --- a/src/plugins/mediaservices/gstreamer/qvideosurfacegstsink.cpp +++ b/src/plugins/mediaservices/gstreamer/qvideosurfacegstsink.cpp @@ -39,16 +39,19 @@ ** ****************************************************************************/ -#include "qvideosurfacegstsink.h" - -#include "qgstvideobuffer.h" - #include <QtMultimedia/QAbstractVideoSurface> #include <QtMultimedia/QVideoFrame> #include <QDebug> #include <QMap> #include <QDebug> #include <QThread> +#include <QtGui/qx11info_x11.h> + +#include "qvideosurfacegstsink.h" + +#include "qgstvideobuffer.h" +#include "qgstxvimagebuffer.h" + Q_DECLARE_METATYPE(QVideoSurfaceFormat) @@ -62,11 +65,20 @@ QVideoSurfaceGstDelegate::QVideoSurfaceGstDelegate(QAbstractVideoSurface *surfac connect(m_surface, SIGNAL(supportedFormatsChanged()), this, SLOT(supportedFormatsChanged())); } -QList<QVideoFrame::PixelFormat> QVideoSurfaceGstDelegate::supportedPixelFormats() const +QList<QVideoFrame::PixelFormat> QVideoSurfaceGstDelegate::supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const { QMutexLocker locker(const_cast<QMutex *>(&m_mutex)); - return m_supportedPixelFormats; + if (handleType == QAbstractVideoBuffer::NoHandle) + return m_supportedPixelFormats; + else + return m_surface->supportedPixelFormats(handleType); +} + +QVideoSurfaceFormat QVideoSurfaceGstDelegate::surfaceFormat() const +{ + QMutexLocker locker(const_cast<QMutex *>(&m_mutex)); + return m_format; } bool QVideoSurfaceGstDelegate::start(const QVideoSurfaceFormat &format, int bytesPerLine) @@ -84,6 +96,8 @@ bool QVideoSurfaceGstDelegate::start(const QVideoSurfaceFormat &format, int byte m_setupCondition.wait(&m_mutex); } + m_format = m_surface->surfaceFormat(); + return m_started; } @@ -103,12 +117,27 @@ void QVideoSurfaceGstDelegate::stop() m_started = false; } +bool QVideoSurfaceGstDelegate::isActive() +{ + QMutexLocker locker(&m_mutex); + return m_surface->isActive(); +} + GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer) { QMutexLocker locker(&m_mutex); + QGstVideoBuffer *videoBuffer = 0; + + if (G_TYPE_CHECK_INSTANCE_TYPE(buffer, QGstXvImageBuffer::get_type())) { + QGstXvImageBuffer *xvBuffer = reinterpret_cast<QGstXvImageBuffer *>(buffer); + QVariant handle = QVariant::fromValue(xvBuffer->xvImage); + videoBuffer = new QGstVideoBuffer(buffer, m_bytesPerLine, XvHandleType, handle); + } else + videoBuffer = new QGstVideoBuffer(buffer, m_bytesPerLine); + m_frame = QVideoFrame( - new QGstVideoBuffer(buffer, m_bytesPerLine), + videoBuffer, m_format.frameSize(), m_format.pixelFormat()); @@ -134,7 +163,6 @@ GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer) } } - void QVideoSurfaceGstDelegate::queuedStart() { QMutexLocker locker(&m_mutex); @@ -196,8 +224,8 @@ static const YuvFormat qt_yuvColorLookup[] = { { QVideoFrame::Format_YUV420P, GST_MAKE_FOURCC('I','4','2','0'), 8 }, { QVideoFrame::Format_YV12, GST_MAKE_FOURCC('Y','V','1','2'), 8 }, - { QVideoFrame::Format_UYVY, GST_MAKE_FOURCC('U','Y','V','Y'), 8 }, - { QVideoFrame::Format_YUYV, GST_MAKE_FOURCC('Y','U','Y','V'), 8 }, + { QVideoFrame::Format_UYVY, GST_MAKE_FOURCC('U','Y','V','Y'), 16 }, + { QVideoFrame::Format_YUYV, GST_MAKE_FOURCC('Y','U','Y','V'), 16 }, { QVideoFrame::Format_YUYV, GST_MAKE_FOURCC('Y','U','Y','2'), 8 }, { QVideoFrame::Format_NV12, GST_MAKE_FOURCC('N','V','1','2'), 8 }, { QVideoFrame::Format_NV21, GST_MAKE_FOURCC('N','V','2','1'), 8 } @@ -210,16 +238,18 @@ static int indexOfYuvColor(QVideoFrame::PixelFormat format) for (int i = 0; i < count; ++i) if (qt_yuvColorLookup[i].pixelFormat == format) return i; + return -1; } static int indexOfYuvColor(guint32 fourcc) { - const int count = sizeof(YuvFormat) / sizeof(YuvFormat); + const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat); for (int i = 0; i < count; ++i) if (qt_yuvColorLookup[i].fourcc == fourcc) return i; + return -1; } @@ -313,7 +343,7 @@ void QVideoSurfaceGstSink::class_init(gpointer g_class, gpointer class_data) GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class); base_sink_class->get_caps = QVideoSurfaceGstSink::get_caps; base_sink_class->set_caps = QVideoSurfaceGstSink::set_caps; - // base_sink_class->buffer_alloc = QVideoSurfaceGstSink::buffer_alloc; // Not implemented. + base_sink_class->buffer_alloc = QVideoSurfaceGstSink::buffer_alloc; base_sink_class->start = QVideoSurfaceGstSink::start; base_sink_class->stop = QVideoSurfaceGstSink::stop; // base_sink_class->unlock = QVideoSurfaceGstSink::unlock; // Not implemented. @@ -352,11 +382,27 @@ void QVideoSurfaceGstSink::instance_init(GTypeInstance *instance, gpointer g_cla Q_UNUSED(g_class); sink->delegate = 0; + sink->pool = new QGstXvImageBufferPool(); + sink->lastRequestedCaps = 0; + sink->lastBufferCaps = 0; + sink->lastSurfaceFormat = new QVideoSurfaceFormat; } void QVideoSurfaceGstSink::finalize(GObject *object) { - Q_UNUSED(object); + VO_SINK(object); + delete sink->pool; + sink->pool = 0; + delete sink->lastSurfaceFormat; + sink->lastSurfaceFormat = 0; + + if (sink->lastBufferCaps) + gst_caps_unref(sink->lastBufferCaps); + sink->lastBufferCaps = 0; + + if (sink->lastRequestedCaps) + gst_caps_unref(sink->lastRequestedCaps); + sink->lastRequestedCaps = 0; } GstStateChangeReturn QVideoSurfaceGstSink::change_state( @@ -421,92 +467,184 @@ gboolean QVideoSurfaceGstSink::set_caps(GstBaseSink *base, GstCaps *caps) { VO_SINK(base); + //qDebug() << "set_caps"; + //qDebug() << gst_caps_to_string(caps); + if (!caps) { sink->delegate->stop(); return TRUE; } else { - const GstStructure *structure = gst_caps_get_structure(caps, 0); + int bytesPerLine = 0; + QVideoSurfaceFormat format = formatForCaps(caps, &bytesPerLine); - //qDebug() << gst_caps_to_string(caps); + if (sink->delegate->isActive()) { + QVideoSurfaceFormat surfaceFormst = sink->delegate->surfaceFormat(); - QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid; - int bitsPerPixel = 0; + if (format.pixelFormat() == surfaceFormst.pixelFormat() && + format.frameSize() == surfaceFormst.frameSize()) + return TRUE; + else + sink->delegate->stop(); + } - QSize size; - gst_structure_get_int(structure, "width", &size.rwidth()); - gst_structure_get_int(structure, "height", &size.rheight()); + if (sink->lastRequestedCaps) + gst_caps_unref(sink->lastRequestedCaps); + sink->lastRequestedCaps = 0; - if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) { - guint32 fourcc = 0; - gst_structure_get_fourcc(structure, "format", &fourcc); + //qDebug() << "Staring video surface:"; + //qDebug() << format; + //qDebug() << bytesPerLine; - int index = indexOfYuvColor(fourcc); - if (index != -1) { - pixelFormat = qt_yuvColorLookup[index].pixelFormat; - bitsPerPixel = qt_yuvColorLookup[index].bitsPerPixel; - } - } else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) { - int depth = 0; - int endianness = 0; - int red = 0; - int green = 0; - int blue = 0; - int alpha = 0; - - gst_structure_get_int(structure, "bpp", &bitsPerPixel); - gst_structure_get_int(structure, "depth", &depth); - gst_structure_get_int(structure, "endianness", &endianness); - gst_structure_get_int(structure, "red_mask", &red); - gst_structure_get_int(structure, "green_mask", &green); - gst_structure_get_int(structure, "blue_mask", &blue); - gst_structure_get_int(structure, "alpha_mask", &alpha); - - int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha); - - if (index != -1) - pixelFormat = qt_rgbColorLookup[index].pixelFormat; - } + if (sink->delegate->start(format, bytesPerLine)) + return TRUE; - if (pixelFormat != QVideoFrame::Format_Invalid) { - QVideoSurfaceFormat format(size, pixelFormat); + } - QPair<int, int> rate; - gst_structure_get_fraction(structure, "framerate", &rate.first, &rate.second); + return FALSE; +} - if (rate.second) - format.setFrameRate(qreal(rate.first)/rate.second); +QVideoSurfaceFormat QVideoSurfaceGstSink::formatForCaps(GstCaps *caps, int *bytesPerLine) +{ + const GstStructure *structure = gst_caps_get_structure(caps, 0); - gint aspectNum = 0; - gint aspectDenum = 0; - if (gst_structure_get_fraction( - structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) { - if (aspectDenum > 0) - format.setPixelAspectRatio(aspectNum, aspectDenum); - } + QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid; + int bitsPerPixel = 0; - int bytesPerLine = ((size.width() * bitsPerPixel / 8) + 3) & ~3; + QSize size; + gst_structure_get_int(structure, "width", &size.rwidth()); + gst_structure_get_int(structure, "height", &size.rheight()); - if (sink->delegate->start(format, bytesPerLine)) - return TRUE; + if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) { + guint32 fourcc = 0; + gst_structure_get_fourcc(structure, "format", &fourcc); + + int index = indexOfYuvColor(fourcc); + if (index != -1) { + pixelFormat = qt_yuvColorLookup[index].pixelFormat; + bitsPerPixel = qt_yuvColorLookup[index].bitsPerPixel; } + } else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) { + int depth = 0; + int endianness = 0; + int red = 0; + int green = 0; + int blue = 0; + int alpha = 0; + + gst_structure_get_int(structure, "bpp", &bitsPerPixel); + gst_structure_get_int(structure, "depth", &depth); + gst_structure_get_int(structure, "endianness", &endianness); + gst_structure_get_int(structure, "red_mask", &red); + gst_structure_get_int(structure, "green_mask", &green); + gst_structure_get_int(structure, "blue_mask", &blue); + gst_structure_get_int(structure, "alpha_mask", &alpha); + + int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha); + + if (index != -1) + pixelFormat = qt_rgbColorLookup[index].pixelFormat; + } + + if (pixelFormat != QVideoFrame::Format_Invalid) { + QVideoSurfaceFormat format(size, pixelFormat); + + QPair<int, int> rate; + gst_structure_get_fraction(structure, "framerate", &rate.first, &rate.second); + + if (rate.second) + format.setFrameRate(qreal(rate.first)/rate.second); + + gint aspectNum = 0; + gint aspectDenum = 0; + if (gst_structure_get_fraction( + structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) { + if (aspectDenum > 0) + format.setPixelAspectRatio(aspectNum, aspectDenum); + } + + if (bytesPerLine) + *bytesPerLine = ((size.width() * bitsPerPixel / 8) + 3) & ~3; + return format; } - return FALSE; + return QVideoSurfaceFormat(); } + GstFlowReturn QVideoSurfaceGstSink::buffer_alloc( GstBaseSink *base, guint64 offset, guint size, GstCaps *caps, GstBuffer **buffer) { - // Should implement this when the buffer pool situation is sorted. - Q_UNUSED(base); + VO_SINK(base); + Q_UNUSED(offset); Q_UNUSED(size); - Q_UNUSED(caps); - Q_UNUSED(buffer); - return GST_FLOW_NOT_SUPPORTED; + *buffer = 0; + + if (sink->lastRequestedCaps && gst_caps_is_equal(sink->lastRequestedCaps, caps)) { + //qDebug() << "reusing last caps"; + *buffer = GST_BUFFER(sink->pool->takeBuffer(*sink->lastSurfaceFormat, sink->lastBufferCaps)); + return GST_FLOW_OK; + } + + if (sink->delegate->supportedPixelFormats(XvHandleType).isEmpty()) { + //qDebug() << "sink doesn't support Xv buffers, skip buffers allocation"; + return GST_FLOW_OK; + } + + GstCaps *intersection = gst_caps_intersect(get_caps(GST_BASE_SINK(sink)), caps); + + if (gst_caps_is_empty (intersection)) { + gst_caps_unref(intersection); + return GST_FLOW_NOT_NEGOTIATED; + } + + if (sink->delegate->isActive()) { + //if format was changed, restart the surface + QVideoSurfaceFormat format = formatForCaps(intersection); + QVideoSurfaceFormat surfaceFormat = sink->delegate->surfaceFormat(); + + if (format.pixelFormat() != surfaceFormat.pixelFormat() || + format.frameSize() != surfaceFormat.frameSize()) { + //qDebug() << "new format requested, restart video surface"; + sink->delegate->stop(); + } + } + + if (!sink->delegate->isActive()) { + int bytesPerLine = 0; + QVideoSurfaceFormat format = formatForCaps(intersection, &bytesPerLine); + + if (!sink->delegate->start(format, bytesPerLine)) { + qDebug() << "failed to start video surface"; + return GST_FLOW_NOT_NEGOTIATED; + } + } + + QVideoSurfaceFormat surfaceFormat = sink->delegate->surfaceFormat(); + + if (!sink->pool->isFormatSupported(surfaceFormat)) { + //qDebug() << "sink doesn't provide Xv buffer details, skip buffers allocation"; + return GST_FLOW_OK; + } + + if (sink->lastRequestedCaps) + gst_caps_unref(sink->lastRequestedCaps); + sink->lastRequestedCaps = caps; + gst_caps_ref(sink->lastRequestedCaps); + + if (sink->lastBufferCaps) + gst_caps_unref(sink->lastBufferCaps); + sink->lastBufferCaps = intersection; + gst_caps_ref(sink->lastBufferCaps); + + *sink->lastSurfaceFormat = surfaceFormat; + + *buffer = GST_BUFFER(sink->pool->takeBuffer(surfaceFormat, intersection)); + + return GST_FLOW_OK; } gboolean QVideoSurfaceGstSink::start(GstBaseSink *base) diff --git a/src/plugins/mediaservices/gstreamer/qvideosurfacegstsink.h b/src/plugins/mediaservices/gstreamer/qvideosurfacegstsink.h index f02f9a5..f59a43c 100644 --- a/src/plugins/mediaservices/gstreamer/qvideosurfacegstsink.h +++ b/src/plugins/mediaservices/gstreamer/qvideosurfacegstsink.h @@ -50,6 +50,7 @@ #include <QtCore/qwaitcondition.h> #include <QtMultimedia/qvideosurfaceformat.h> #include <QtMultimedia/qvideoframe.h> +#include <QtMultimedia/qabstractvideobuffer.h> QT_BEGIN_HEADER @@ -58,17 +59,27 @@ QT_BEGIN_NAMESPACE class QAbstractVideoSurface; +class QGstXvImageBuffer; +class QGstXvImageBufferPool; + + class QVideoSurfaceGstDelegate : public QObject { Q_OBJECT public: QVideoSurfaceGstDelegate(QAbstractVideoSurface *surface); - QList<QVideoFrame::PixelFormat> supportedPixelFormats() const; + QList<QVideoFrame::PixelFormat> supportedPixelFormats( + QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle) const; + + QVideoSurfaceFormat surfaceFormat() const; + bool start(const QVideoSurfaceFormat &format, int bytesPerLine); void stop(); + bool isActive(); + GstFlowReturn render(GstBuffer *buffer); private slots: @@ -97,6 +108,7 @@ public: GstVideoSink parent; static QVideoSurfaceGstSink *createSink(QAbstractVideoSurface *surface); + static QVideoSurfaceFormat formatForCaps(GstCaps *caps, int *bytesPerLine = 0); private: static GType get_type(); @@ -125,6 +137,10 @@ private: private: QVideoSurfaceGstDelegate *delegate; + QGstXvImageBufferPool *pool; + GstCaps *lastRequestedCaps; + GstCaps *lastBufferCaps; + QVideoSurfaceFormat *lastSurfaceFormat; }; diff --git a/src/plugins/mediaservices/gstreamer/qx11videosurface.cpp b/src/plugins/mediaservices/gstreamer/qx11videosurface.cpp index 30b7e55..7a98384 100644 --- a/src/plugins/mediaservices/gstreamer/qx11videosurface.cpp +++ b/src/plugins/mediaservices/gstreamer/qx11videosurface.cpp @@ -39,11 +39,17 @@ ** ****************************************************************************/ +#include <QtCore/qvariant.h> +#include <QtCore/qdebug.h> #include <QtGui/qx11info_x11.h> #include <QtMultimedia/qvideosurfaceformat.h> #include "qx11videosurface.h" +Q_DECLARE_METATYPE(XvImage*); + +static QAbstractVideoBuffer::HandleType XvHandleType = QAbstractVideoBuffer::HandleType(4); + struct XvFormatRgb { QVideoFrame::PixelFormat pixelFormat; @@ -284,7 +290,7 @@ int QX11VideoSurface::redistribute( QList<QVideoFrame::PixelFormat> QX11VideoSurface::supportedPixelFormats( QAbstractVideoBuffer::HandleType handleType) const { - return handleType == QAbstractVideoBuffer::NoHandle + return handleType == QAbstractVideoBuffer::NoHandle || handleType == XvHandleType ? m_supportedPixelFormats : QList<QVideoFrame::PixelFormat>(); } @@ -319,7 +325,12 @@ bool QX11VideoSurface::start(const QVideoSurfaceFormat &format) m_viewport = format.viewport(); m_image = image; - return QAbstractVideoSurface::start(format); + QVideoSurfaceFormat newFormat = format; + newFormat.setProperty("portId", QVariant(quint64(m_portId))); + newFormat.setProperty("xvFormatId", xvFormatId); + newFormat.setProperty("dataSize", image->data_size); + + return QAbstractVideoSurface::start(newFormat); } } @@ -359,31 +370,57 @@ bool QX11VideoSurface::present(const QVideoFrame &frame) } else { bool presented = false; - if (m_image->data_size > frame.mappedBytes()) { + if (frame.handleType() != XvHandleType && + m_image->data_size > frame.mappedBytes()) { qWarning("Insufficient frame buffer size"); setError(IncorrectFormatError); - } else if (m_image->num_planes > 0 && m_image->pitches[0] != frame.bytesPerLine()) { + } else if (frame.handleType() != XvHandleType && + m_image->num_planes > 0 && + m_image->pitches[0] != frame.bytesPerLine()) { qWarning("Incompatible frame pitches"); setError(IncorrectFormatError); } else { - m_image->data = reinterpret_cast<char *>(frameCopy.bits()); - - XvPutImage( - QX11Info::display(), - m_portId, - m_winId, - m_gc, - m_image, - m_viewport.x(), - m_viewport.y(), - m_viewport.width(), - m_viewport.height(), - m_displayRect.x(), - m_displayRect.y(), - m_displayRect.width(), - m_displayRect.height()); - - m_image->data = 0; + if (frame.handleType() != XvHandleType) { + m_image->data = reinterpret_cast<char *>(frameCopy.bits()); + + //qDebug() << "copy frame"; + XvPutImage( + QX11Info::display(), + m_portId, + m_winId, + m_gc, + m_image, + m_viewport.x(), + m_viewport.y(), + m_viewport.width(), + m_viewport.height(), + m_displayRect.x(), + m_displayRect.y(), + m_displayRect.width(), + m_displayRect.height()); + + m_image->data = 0; + } else { + XvImage *img = frame.handle().value<XvImage*>(); + + //qDebug() << "render directly"; + if (img) + XvShmPutImage( + QX11Info::display(), + m_portId, + m_winId, + m_gc, + img, + m_viewport.x(), + m_viewport.y(), + m_viewport.width(), + m_viewport.height(), + m_displayRect.x(), + m_displayRect.y(), + m_displayRect.width(), + m_displayRect.height(), + false); + } presented = true; } |