1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
|
eUploaded = false;
}
}
m_checkedPrograms = false;
}
#endif // QT_NO_OPENGL
void freeResources()
{
QMutexLocker locker(&m_mutex);
m_sampleBuffer = ComPointer<IMediaSample>();
#ifndef QT_NO_OPENGL
freeGLResources();
#endif // QT_NO_OPENGL
m_textureUploaded = false;
}
void endOfStream()
{
//received from the input pin
::SetEvent(m_receiveCanWait); //unblocks the flow
//we send the message to the graph
ComPointer<IMediaEventSink> sink(graph(), IID_IMediaEventSink);
if (sink) {
sink->Notify(EC_COMPLETE, S_OK,
reinterpret_cast<LONG_PTR>(static_cast<IBaseFilter*>(this)));
}
}
void freeMediaSample()
{
QMutexLocker locker(&m_mutex);
m_sampleBuffer = ComPointer<IMediaSample>();
}
void beginFlush()
{
freeMediaSample();
::SetEvent(m_receiveCanWait); //unblocks the flow
}
void endFlush()
{
if (m_inputPin->connected() == 0) {
::SetEvent(m_receiveCanWait); //unblock the flow in receive
} else {
::ResetEvent(m_receiveCanWait); //block the flow again
}
}
STDMETHODIMP Stop()
{
HRESULT hr = QBaseFilter::Stop();
beginFlush();
return hr;
}
STDMETHODIMP Pause()
{
HRESULT hr = QBaseFilter::Pause();
if (m_inputPin->connected() == 0) {
::SetEvent(m_receiveCanWait); //unblock the flow in receive
} else {
::ResetEvent(m_receiveCanWait); //this will block
}
return hr;
}
STDMETHODIMP Run(REFERENCE_TIME start)
{
HRESULT hr = QBaseFilter::Run(start);
m_start = start;
if (m_inputPin->connected() == 0) {
endOfStream();
} else {
::SetEvent(m_receiveCanWait); //unblocks the flow (this event will block then again)
}
#ifdef FPS_COUNTER
fpsTime.restart();
nbFramesProcessed = 0;
nbFramesDisplayed = 0;
#endif
return hr;
}
HRESULT processSample(IMediaSample *sample);
void applyMixerSettings(qreal brightness, qreal contrast, qreal hue, qreal saturation)
{
//let's normalize the values
m_brightness = brightness * 128;
m_contrast = contrast + 1.;
m_hue = hue * M_PI;
m_saturation = saturation + 1.;
}
QImage currentImage() const
{
return m_currentImage;
}
void setCurrentImage(const QImage &image)
{
QMutexLocker locker(&m_mutex);
m_currentImage = image;
}
//the following function is called from the GUI thread
void repaintCurrentFrame(QPainter &painter, const QRect &r);
protected:
static void convertYV12toRGB(const uchar *data, const QSize &s, QImage &dest,
qreal brightness, qreal contrast, qreal hue, qreal saturation);
static void convertYUY2toRGB(const uchar *data, const QSize &s, QImage &dest,
qreal brightness, qreal contrast, qreal hue, qreal saturation);
static void normalizeRGB(const uchar *data, const QSize &s, QImage &destImage);
private:
QPin *const m_inputPin;
ComPointer<IMediaSample> m_sampleBuffer;
QImage m_currentImage;
VideoRendererSoft *m_renderer;
mutable QMutex m_mutex;
REFERENCE_TIME m_start;
HANDLE m_renderEvent, m_receiveCanWait; // Signals sample to render
QSize m_size;
bool m_textureUploaded;
//mixer settings
qreal m_brightness,
m_contrast,
m_hue,
m_saturation;
#ifdef FPS_COUNTER
QTime fpsTime;
int nbFramesProcessed;
int nbFramesDisplayed;
#endif
#ifndef QT_NO_OPENGL
enum Program
{
YV12toRGB = 0,
YUY2toRGB = 1,
ProgramCount = 2
};
void updateTexture();
bool checkGLPrograms();
// ARB_fragment_program
typedef void (APIENTRY *_glProgramStringARB) (GLenum, GLenum, GLsizei, const GLvoid *);
typedef void (APIENTRY *_glBindProgramARB) (GLenum, GLuint);
typedef void (APIENTRY *_glDeleteProgramsARB) (GLsizei, const GLuint *);
typedef void (APIENTRY *_glGenProgramsARB) (GLsizei, GLuint *);
typedef void (APIENTRY *_glProgramLocalParameter4fARB) (GLenum, GLuint, GLfloat, GLfloat, GLfloat, GLfloat);
typedef void (APIENTRY *_glActiveTexture) (GLenum);
_glProgramStringARB glProgramStringARB;
_glBindProgramARB glBindProgramARB;
_glDeleteProgramsARB glDeleteProgramsARB;
_glGenProgramsARB glGenProgramsARB;
_glProgramLocalParameter4fARB glProgramLocalParameter4fARB;
_glActiveTexture glActiveTexture;
bool m_checkedPrograms;
bool m_usingOpenGL;
GLuint m_program[2];
GLuint m_texture[3];
#endif
};
class VideoRendererSoftPin : public QMemInputPin
{
public:
VideoRendererSoftPin(VideoRendererSoftFilter *parent) :
QMemInputPin(parent, videoMediaTypes(), false /*no transformation of the samples*/),
m_renderer(parent)
{
}
STDMETHODIMP EndOfStream()
{
m_renderer->endOfStream();
return QMemInputPin::EndOfStream();
}
STDMETHODIMP ReceiveCanBlock()
{
//yes, it can block
return S_OK;
}
STDMETHODIMP BeginFlush()
{
m_renderer->beginFlush();
return QMemInputPin::BeginFlush();
}
STDMETHODIMP EndFlush()
{
m_renderer->endFlush();
return QMemInputPin::EndFlush();
}
STDMETHODIMP GetAllocatorRequirements(ALLOCATOR_PROPERTIES *prop)
{
if (!prop) {
return E_POINTER;
}
//we need 2 buffers
prop->cBuffers = 2;
return S_OK;
}
STDMETHODIMP NotifyAllocator(IMemAllocator *alloc, BOOL readonly)
{
if (!alloc) {
return E_POINTER;
}
ALLOCATOR_PROPERTIES prop;
HRESULT hr = alloc->GetProperties(&prop);
if (SUCCEEDED(hr) && prop.cBuffers == 1) {
//we ask to get 2 buffers so that we don't block the flow
//when we addref the mediasample
prop.cBuffers = 2;
ALLOCATOR_PROPERTIES dummy;
alloc->SetProperties(&prop, &dummy);
}
return QMemInputPin::NotifyAllocator(alloc, readonly);
}
private:
VideoRendererSoftFilter * const m_renderer;
};
VideoRendererSoftFilter::VideoRendererSoftFilter(VideoRendererSoft *renderer) :
QBaseFilter(CLSID_NULL), m_inputPin(new VideoRendererSoftPin(this)),
m_renderer(renderer), m_start(0)
#ifndef QT_NO_OPENGL
,m_usingOpenGL(false), m_checkedPrograms(false), m_textureUploaded(false)
#endif
{
m_renderEvent = ::CreateEvent(0, 0, 0, 0);
m_receiveCanWait = ::CreateEvent(0, 0, 0, 0);
//simply initialize the array with default values
applyMixerSettings(0., 0., 0., 0.);
#ifndef QT_NO_OPENGL
#endif
}
VideoRendererSoftFilter::~VideoRendererSoftFilter()
{
::CloseHandle(m_renderEvent);
::CloseHandle(m_receiveCanWait);
//this frees up resources
freeResources();
}
QSize VideoRendererSoftFilter::videoSize() const
{
QSize ret;
const AM_MEDIA_TYPE &mt = m_inputPin->connectedType();
if (mt.pbFormat && mt.pbFormat) {
if (mt.formattype == FORMAT_VideoInfo) {
const VIDEOINFOHEADER *header = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat);
const int h = qAbs(header->bmiHeader.biHeight),
w = qAbs(header->bmiHeader.biWidth);
ret = QSize(w, h);
} else if (mt.formattype == FORMAT_VideoInfo2) {
const VIDEOINFOHEADER2 *header = reinterpret_cast<VIDEOINFOHEADER2*>(mt.pbFormat);
const int h = qAbs(header->bmiHeader.biHeight),
w = qAbs(header->bmiHeader.biWidth);
ret = QSize(w, h);
}
}
return ret;
}
HRESULT VideoRendererSoftFilter::processSample(IMediaSample *sample)
{
#ifdef FPS_COUNTER
if (fpsTime.elapsed() > 1000) {
qDebug("FPS_COUNTER: processed=%d, displayed=%d (%d)", nbFramesProcessed, nbFramesDisplayed, fpsTime.elapsed());
nbFramesProcessed = 0;
nbFramesDisplayed = 0;
fpsTime.restart();
}
#endif
AM_MEDIA_TYPE *type = 0;
if (sample->GetMediaType(&type) == S_OK) {
//let's update the media type of the input pin
m_inputPin->setConnectedType(*type);
}
const AM_MEDIA_TYPE &mt = m_inputPin->connectedType();
if (mt.pbFormat == 0 || mt.cbFormat == 0) {
return VFW_E_INVALIDMEDIATYPE;
}
m_size = videoSize();
if (!m_size.isValid()) {
return VFW_E_INVALIDMEDIATYPE;
}
#ifdef FPS_COUNTER
nbFramesProcessed++;
#endif
REFERENCE_TIME start = 0, stop = 0;
HRESULT hr = sample->GetTime(&start, &stop);
ComPointer<IReferenceClock> clock;
GetSyncSource(clock.pparam());
const bool playing = SUCCEEDED(hr) && state() == State_Running && clock;
if (playing) {
REFERENCE_TIME current;
clock->GetTime(¤t);
DWORD_PTR advise;
//let's synchronize here
clock->AdviseTime(m_start, start,
reinterpret_cast<HEVENT>(m_renderEvent), &advise);
HANDLE handles[] = {m_receiveCanWait, m_renderEvent};
if (::WaitForMultipleObjects(2, handles, false, INFINITE) == WAIT_OBJECT_0) {
if (state() != State_Stopped && !m_inputPin->isFlushing()) {
::ResetEvent(m_receiveCanWait);
}
}
}
//the let's lock the sample to be used in the GUI thread
{
QMutexLocker locker(&m_mutex);
sample->AddRef();
m_sampleBuffer = ComPointer<IMediaSample>(sample);
}
//image is updated: we should update the widget
//we should never call directly members of target due to thread-safety
QApplication::postEvent(m_renderer, new QEvent(QEvent::UpdateRequest));
if (!playing) {
//useless to test the return value of WaitForSingleObject: timeout can't happen
::WaitForSingleObject(m_receiveCanWait, INFINITE);
if (state() != State_Stopped && !m_inputPin->isFlushing()) {
::ResetEvent(m_receiveCanWait);
}
}
//everything should be ok
return S_OK;
}
#ifndef QT_NO_OPENGL
bool VideoRendererSoftFilter::checkGLPrograms()
{
if (!m_checkedPrograms) {
m_checkedPrograms = true;
glProgramStringARB = (_glProgramStringARB) wglGetProcAddress("glProgramStringARB");
glBindProgramARB = (_glBindProgramARB) wglGetProcAddress("glBindProgramARB");
glDeleteProgramsARB = (_glDeleteProgramsARB) wglGetProcAddress("glDeleteProgramsARB");
glGenProgramsARB = (_glGenProgramsARB) wglGetProcAddress("glGenProgramsARB");
glProgramLocalParameter4fARB = (_glProgramLocalParameter4fARB) wglGetProcAddress("glProgramLocalParameter4fARB");
glActiveTexture = (_glActiveTexture) wglGetProcAddress("glActiveTexture");
//we check only once if the widget is drawn using opengl
if (glProgramStringARB && glBindProgramARB && glDeleteProgramsARB &&
glGenProgramsARB && glActiveTexture && glProgramLocalParameter4fARB) {
glGenProgramsARB(2, m_program);
const char *code[] = {yv12ToRgb, yuy2ToRgb};
bool error = false;
for(int i = 0; i < ProgramCount && !error; ++i) {
glBindProgramARB(GL_FRAGMENT_PROGRAM_ARB, m_program[i]);
const GLbyte *gl_src = reinterpret_cast<const GLbyte *>(code[i]);
glProgramStringARB(GL_FRAGMENT_PROGRAM_ARB, GL_PROGRAM_FORMAT_ASCII_ARB,
strlen(code[i]), gl_src);
if (glGetError() != GL_NO_ERROR) {
error = true;
}
}
if (error) {
glDeleteProgramsARB(2, m_program);
} else {
//everything went fine we store the context here (we support YV12 and YUY2)
m_usingOpenGL = m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12
|| m_inputPin->connectedType().subtype == MEDIASUBTYPE_YUY2;
//those "textures" will be used as byte streams
//to pass Y, U and V data to the graphics card
glGenTextures(3, m_texture);
}
}
}
return m_usingOpenGL;
}
void VideoRendererSoftFilter::updateTexture()
{
if (!m_sampleBuffer) {
return; //the texture is already up2date or their is no data yet
}
uchar *data = 0;
m_sampleBuffer->GetPointer(&data);
if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12) {
int w[3] = { m_size.width(), m_size.width()/2, m_size.width()/2 };
int h[3] = { m_size.height(), m_size.height()/2, m_size.height()/2 };
int offs[3] = { 0, m_size.width()*m_size.height(), m_size.width()*m_size.height()*5/4 };
for (int i = 0; i < 3; ++i) {
glBindTexture(GL_TEXTURE_2D, m_texture[i]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, w[i], h[i], 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, data + offs[i]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
}
} else { //m_inputPin->connectedType().subtype == MEDIASUBTYPE_YUY2
//we upload 1 texture
glBindTexture(GL_TEXTURE_2D, m_texture[0]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_size.width() / 2, m_size.height(), 0,
GL_RGBA, GL_UNSIGNED_BYTE, data);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
}
m_sampleBuffer = ComPointer<IMediaSample>();
m_textureUploaded = true;
}
#endif
void VideoRendererSoftFilter::repaintCurrentFrame(QPainter &painter, const QRect &r)
{
QMutexLocker locker(&m_mutex);
#ifdef FPS_COUNTER
nbFramesDisplayed++;
#endif
#ifndef QT_NO_OPENGL
if (painter.paintEngine() && painter.paintEngine()->type() == QPaintEngine::OpenGL && checkGLPrograms()) {
//for now we only support YUV (both YV12 and YUY2)
updateTexture();
if (!m_textureUploaded) {
//we simply fill the whole video with content
//the callee has already set the brush
painter.drawRect(r);
return;
}
//let's draw the texture
//Let's pass the other arguments
const Program prog = (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12) ? YV12toRGB : YUY2toRGB;
glBindProgramARB(GL_FRAGMENT_PROGRAM_ARB, m_program[prog]);
//loading the parameters
glProgramLocalParameter4fARB(GL_FRAGMENT_PROGRAM_ARB, 0, m_brightness / 256., m_contrast, qCos(m_hue), qSin(m_hue));
glProgramLocalParameter4fARB(GL_FRAGMENT_PROGRAM_ARB, 1, m_saturation, painter.opacity() /*alpha */, 0. /*dummy*/, 0. /*dummy*/);
glEnable(GL_FRAGMENT_PROGRAM_ARB);
const float v_array[] = { r.left(), r.top(), r.right()+1, r.top(), r.right()+1, r.bottom()+1, r.left(), r.bottom()+1 };
float tx_array[12] = {0., 0., 0., 1.,
0., 0., 1., 1.,
0., 0., 1., 0.};
if (prog == YUY2toRGB) {
const float w = m_size.width() / 2,
iw = 1. / w;
tx_array[3] = w;
tx_array[6] = w;
for (int i = 0; i < 4; ++i) {
tx_array[3*i + 2] = iw;
}
}
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_texture[0]);
if (prog == YV12toRGB) {
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, m_texture[2]);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, m_texture[1]);
glActiveTexture(GL_TEXTURE0);
}
glVertexPointer(2, GL_FLOAT, 0, v_array);
glTexCoordPointer(3, GL_FLOAT, 0, tx_array);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glDrawArrays(GL_QUADS, 0, 4);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
glDisable(GL_FRAGMENT_PROGRAM_ARB);
return;
} else
#endif
if (m_sampleBuffer) {
//we need to get the sample data
uchar *data = 0;
m_sampleBuffer->GetPointer(&data);
//let's update the current image
if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12) {
convertYV12toRGB(data, m_size, m_currentImage,
m_brightness, m_contrast, m_hue, m_saturation);
} else if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YUY2) {
convertYUY2toRGB(data, m_size, m_currentImage,
m_brightness, m_contrast, m_hue, m_saturation);
} else if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_RGB32) {
normalizeRGB(data, m_size, m_currentImage);
}
m_sampleBuffer = ComPointer<IMediaSample>();
}
if (m_currentImage.isNull()) {
//we simply fill the whole video with content
//the callee has alrtead set the brush
painter.drawRect(r);
} else {
painter.drawImage(0, 0, m_currentImage);
}
}
void VideoRendererSoftFilter::normalizeRGB(const uchar *data, const QSize &s, QImage &destImage)
{
const int w = s.width(),
h = s.height();
if (destImage.size() != s) {
destImage = QImage(w, h, QImage::Format_ARGB32_Premultiplied);
}
if (destImage.isNull()) {
return; //the system can't allocate the memory for the image drawing
}
const QRgb *rgb = reinterpret_cast<const QRgb*>(data);
//this sets the alpha channel to 0xff and flip the image vertically
for (int y = h - 1; y >= 0; --y) {
QRgb *dest = reinterpret_cast<QRgb*>(destImage.scanLine(y));
for(int i = w; i > 0; --i, ++rgb, ++dest) {
*dest = *rgb | (0xff << 24); //we force the alpha channel to 0xff
}
}
}
//we render data interpreted as YV12 into m_renderbuffer
void VideoRendererSoftFilter::convertYV12toRGB(const uchar *data, const QSize &s, QImage &destImage,
qreal brightness, qreal contrast, qreal hue, qreal saturation)
{
const int w = s.width(),
h = s.height();
//let's cache some computation
const int cosHx256 = qRound(qCos(hue) * contrast * saturation * 256),
sinHx256 = qRound(qSin(hue) * contrast * saturation * 256);
int Yvalue[256];
for(int i = 0;i<256;++i) {
Yvalue[i] = qRound(((i - 16) * contrast + brightness) * 298 + 128);
}
if (destImage.size() != s) {
destImage = QImage(w, h, QImage::Format_ARGB32_Premultiplied);
}
if (destImage.isNull()) {
return; //the system can't allocate the memory for the image drawing
}
QRgb *dest = reinterpret_cast<QRgb*>(destImage.bits());
const uchar *dataY = data,
*dataV = data + (w*h),
*dataU = dataV + (w*h)/4;
uint *line1 = dest,
*line2 = dest + w;
for(int l = (h >> 1); l > 0; --l) {
//we treat 2 lines by 2 lines
for(int x = (w >> 1); x > 0; --x) {
const int u = *dataU++ - 128,
v = *dataV++ - 128;
const int d = (u * cosHx256 + v * sinHx256) >> 8,
e = (v * cosHx256 + u * sinHx256) >> 8;
const int compRed = 409 * e,
compGreen = -100 * d - 208 * e,
compBlue = 516 * d;
const int y21 = Yvalue[ dataY[w] ],
y11 = Yvalue[ *dataY++ ],
y22 = Yvalue[ dataY[w] ],
y12 = Yvalue[ *dataY++ ];
//1st line 1st pixel
*line1++ = CLIPPED_PIXEL(y11, compRed, compGreen, compBlue);
//1st line, 2nd pixel
*line1++ = CLIPPED_PIXEL(y12, compRed, compGreen, compBlue);
//2nd line 1st pixel
*line2++ = CLIPPED_PIXEL(y21, compRed, compGreen, compBlue);
//2nd line 2nd pixel
*line2++ = CLIPPED_PIXEL(y22, compRed, compGreen, compBlue);
} //for
//end of the line
dataY += w;
line1 = line2;
line2 += w;
} //for
}
//we render data interpreted as YUY2 into m_renderbuffer
void VideoRendererSoftFilter::convertYUY2toRGB(const uchar *data, const QSize &s, QImage &destImage,
qreal brightness, qreal contrast, qreal hue, qreal saturation)
{
const int w = s.width(),
h = s.height();
//let's cache some computation
int Yvalue[256];
for(int i = 0;i<256;++i) {
Yvalue[i] = qRound(((i - 16) * contrast + brightness) * 298 + 128);
}
const int cosHx256 = qRound(qCos(hue) * contrast * saturation * 256),
sinHx256 = qRound(qSin(hue) * contrast * saturation * 256);
if (destImage.size() != s) {
//this will only allocate memory when needed
destImage = QImage(w, h, QImage::Format_ARGB32_Premultiplied);
}
if (destImage.isNull()) {
return; //the system can't allocate the memory for the image drawing
}
QRgb *dest = reinterpret_cast<QRgb*>(destImage.bits());
//the number of iterations is width * height / 2 because we treat 2 pixels at each iterations
for (int c = w * h / 2; c > 0 ; --c) {
//the idea of that algorithm comes from
//http://msdn2.microsoft.com/en-us/library/ms867704.aspx#yuvformats_identifying_yuv_formats_in_directshow
//we treat 2 pixels by 2 pixels (we start reading 2 pixels info ie. "YUYV"
const int y1 = Yvalue[*data++],
u = *data++ - 128,
y2 = Yvalue[*data++],
v = *data++ - 128;
const int d = (u * cosHx256 + v * sinHx256) >> 8,
e = (v * cosHx256 + u * sinHx256) >> 8;
const int compRed = 409 * e,
compGreen = -100 * d - 208 * e,
compBlue = 516 * d;
//first pixel
*dest++ = CLIPPED_PIXEL(y1, compRed, compGreen, compBlue);
//second pixel
*dest++ = CLIPPED_PIXEL(y2, compRed, compGreen, compBlue);
}
}
VideoRendererSoft::VideoRendererSoft(QWidget *target) :
m_renderer(new VideoRendererSoftFilter(this)), m_target(target)
{
m_filter = Filter(m_renderer);
}
VideoRendererSoft::~VideoRendererSoft()
{
}
bool VideoRendererSoft::isNative() const
{
return false;
}
void VideoRendererSoft::repaintCurrentFrame(QWidget *target, const QRect &rect)
{
QPainter painter(target);
QColor backColor = target->palette().color(target->backgroundRole());
painter.setBrush(backColor);
painter.setPen(Qt::NoPen);
if (!m_videoRect.contains(rect)) {
//we repaint the borders only when needed
const QVector<QRect> reg = (QRegion(rect) - m_videoRect).rects();
for (int i = 0; i < reg.count(); ++i) {
painter.drawRect(reg.at(i));
}
}
painter.setRenderHint(QPainter::SmoothPixmapTransform);
painter.setTransform(m_transform, true);
QSize vsize = videoSize();
m_renderer->repaintCurrentFrame(painter, QRect(0,0, vsize.width(), vsize.height()));
}
void VideoRendererSoft::notifyResize(const QSize &size,
Phonon::VideoWidget::AspectRatio aspectRatio, Phonon::VideoWidget::ScaleMode scaleMode)
{
const QSize vsize = videoSize();
internalNotifyResize(size, vsize, aspectRatio, scaleMode);
m_transform.reset();
if (vsize.isValid() && size.isValid()) {
m_transform.translate(m_dstX, m_dstY);
const qreal sx = qreal(m_dstWidth) / qreal(vsize.width()),
sy = qreal(m_dstHeight) / qreal(vsize.height());
m_transform.scale(sx, sy);
m_videoRect = m_transform.mapRect( QRect(0,0, vsize.width(), vsize.height()));
}
}
QSize VideoRendererSoft::videoSize() const
{
if (m_renderer->pins().first()->connected()) {
return m_renderer->videoSize();
} else {
return m_renderer->currentImage().size();
}
}
void VideoRendererSoft::applyMixerSettings(qreal brightness, qreal contrast, qreal hue, qreal saturation)
{
m_renderer->applyMixerSettings(brightness, contrast, hue, saturation);
}
QImage VideoRendererSoft::snapshot() const
{
return m_renderer->currentImage(); //not accurate (especially when using opengl...)
}
void VideoRendererSoft::setSnapshot(const QImage &image)
{
m_renderer->setCurrentImage(image);
}
bool VideoRendererSoft::event(QEvent *e)
{
if (e->type() == QEvent::UpdateRequest) {
m_target->update(m_videoRect);
return true;
}
return QObject::event(e);
}
}
}
QT_END_NAMESPACE
#endif //QT_NO_PHONON_VIDEO
|