winegstreamer: Get rid of the YUV-to-(A)RGB converters.

This is handled by the splitter filter now.

Signed-off-by: Zebediah Figura <z.figura12@gmail.com>
Signed-off-by: Alexandre Julliard <julliard@winehq.org>
This commit is contained in:
Zebediah Figura 2020-01-24 19:55:10 -06:00 committed by Alexandre Julliard
parent f5a1e2bd87
commit 841502993f
4 changed files with 0 additions and 273 deletions

View File

@ -23,5 +23,3 @@ DEFINE_GUID(CLSID_Gstreamer_AudioConvert, 0x334b2ec9, 0xf2b5, 0x40b9, 0x84, 0x32
DEFINE_GUID(CLSID_Gstreamer_Mp3, 0x728dcf55, 0x128f, 0x4dd1, 0xad, 0x22, 0xbe, 0xcf, 0xa6, 0x6c, 0xe7, 0xaa);
DEFINE_GUID(CLSID_Gstreamer_Splitter, 0xf9d8d64e, 0xa144, 0x47dc, 0x8e, 0xe0, 0xf5, 0x34, 0x98, 0x37, 0x2c, 0x29);
DEFINE_GUID(WINESUBTYPE_Gstreamer, 0xffffffff, 0x128f, 0x4dd1, 0xad, 0x22, 0xbe, 0xcf, 0xa6, 0x6c, 0xe7, 0xaa);
DEFINE_GUID(CLSID_Gstreamer_YUV2RGB, 0x2d5507df, 0x5ac9, 0x4bb9, 0x9c, 0x09, 0xb2, 0x80, 0xfc, 0x0b, 0xce, 0x01);
DEFINE_GUID(CLSID_Gstreamer_YUV2ARGB, 0x2d5507df, 0x5ac9, 0x4bb9, 0x9c, 0x09, 0xb2, 0x80, 0xfc, 0x0b, 0xce, 0x02);

View File

@ -39,8 +39,6 @@ IUnknown * CALLBACK avi_splitter_create(IUnknown *outer, HRESULT *phr) DECLSPEC_
IUnknown * CALLBACK mpeg_splitter_create(IUnknown *outer, HRESULT *phr) DECLSPEC_HIDDEN;
IUnknown * CALLBACK Gstreamer_AudioConvert_create(IUnknown *pUnkOuter, HRESULT *phr);
IUnknown * CALLBACK Gstreamer_Mp3_create(IUnknown *pUnkOuter, HRESULT *phr);
IUnknown * CALLBACK Gstreamer_YUV2RGB_create(IUnknown *pUnkOuter, HRESULT *phr);
IUnknown * CALLBACK Gstreamer_YUV2ARGB_create(IUnknown *pUnkOuter, HRESULT *phr);
IUnknown * CALLBACK Gstreamer_Splitter_create(IUnknown *pUnkOuter, HRESULT *phr);
IUnknown * CALLBACK wave_parser_create(IUnknown *outer, HRESULT *phr) DECLSPEC_HIDDEN;

View File

@ -627,223 +627,6 @@ IUnknown * CALLBACK Gstreamer_Mp3_create(IUnknown *punkouter, HRESULT *phr)
return obj;
}
static HRESULT WINAPI Gstreamer_YUV_QueryConnect(TransformFilter *iface, const AM_MEDIA_TYPE *amt)
{
GstTfImpl *This = (GstTfImpl*)iface;
TRACE("%p %p\n", This, amt);
if (!IsEqualGUID(&amt->majortype, &MEDIATYPE_Video) ||
(!IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo) &&
!IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo2)))
return S_FALSE;
if (memcmp(&amt->subtype.Data2, &MEDIATYPE_Video.Data2, sizeof(GUID) - sizeof(amt->subtype.Data1)))
return S_FALSE;
switch (amt->subtype.Data1) {
case mmioFOURCC('I','4','2','0'):
case mmioFOURCC('Y','V','1','2'):
case mmioFOURCC('N','V','1','2'):
case mmioFOURCC('N','V','2','1'):
case mmioFOURCC('Y','U','Y','2'):
case mmioFOURCC('Y','V','Y','U'):
return S_OK;
default:
WARN("Unhandled fourcc %s\n", debugstr_an((char*)&amt->subtype.Data1, 4));
return S_FALSE;
}
}
static HRESULT yuv_to_rgb_connect_sink(TransformFilter *tf, const AM_MEDIA_TYPE *amt)
{
GstTfImpl *This = (GstTfImpl*)tf;
GstCaps *capsin, *capsout;
AM_MEDIA_TYPE *outpmt = &This->tf.pmt;
HRESULT hr;
int avgtime;
LONG width, height;
mark_wine_thread();
if (Gstreamer_YUV_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat)
return E_FAIL;
FreeMediaType(outpmt);
CopyMediaType(outpmt, amt);
if (IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo)) {
VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)outpmt->pbFormat;
avgtime = vih->AvgTimePerFrame;
width = vih->bmiHeader.biWidth;
height = vih->bmiHeader.biHeight;
vih->bmiHeader.biBitCount = 24;
vih->bmiHeader.biCompression = BI_RGB;
vih->bmiHeader.biSizeImage = width * abs(height) * 3;
} else {
VIDEOINFOHEADER2 *vih = (VIDEOINFOHEADER2*)outpmt->pbFormat;
avgtime = vih->AvgTimePerFrame;
width = vih->bmiHeader.biWidth;
height = vih->bmiHeader.biHeight;
vih->bmiHeader.biBitCount = 24;
vih->bmiHeader.biCompression = BI_RGB;
vih->bmiHeader.biSizeImage = width * abs(height) * 3;
}
if (!avgtime)
avgtime = 10000000 / 30;
outpmt->subtype = MEDIASUBTYPE_RGB24;
capsin = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING,
gst_video_format_to_string(
gst_video_format_from_fourcc(amt->subtype.Data1)),
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, 10000000, avgtime,
NULL);
capsout = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "BGR",
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, 10000000, avgtime,
NULL);
hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout);
gst_caps_unref(capsin);
gst_caps_unref(capsout);
This->cbBuffer = width * height * 4;
return hr;
}
static const TransformFilterFuncTable Gstreamer_YUV2RGB_vtbl = {
.pfnDecideBufferSize = Gstreamer_transform_DecideBufferSize,
.pfnStartStreaming = Gstreamer_transform_ProcessBegin,
.pfnReceive = Gstreamer_transform_ProcessData,
.pfnStopStreaming = Gstreamer_transform_ProcessEnd,
.pfnCheckInputType = Gstreamer_YUV_QueryConnect,
.transform_connect_sink = yuv_to_rgb_connect_sink,
.pfnBreakConnect = Gstreamer_transform_Cleanup,
.pfnEndOfStream = Gstreamer_transform_EndOfStream,
.pfnBeginFlush = Gstreamer_transform_BeginFlush,
.pfnEndFlush = Gstreamer_transform_EndFlush,
.pfnNewSegment = Gstreamer_transform_NewSegment,
.pfnNotify = Gstreamer_transform_QOS,
};
IUnknown * CALLBACK Gstreamer_YUV2RGB_create(IUnknown *punkouter, HRESULT *phr)
{
IUnknown *obj = NULL;
TRACE("%p %p\n", punkouter, phr);
if (!init_gstreamer())
{
*phr = E_FAIL;
return NULL;
}
*phr = Gstreamer_transform_create(punkouter, &CLSID_Gstreamer_YUV2RGB, "videoconvert", &Gstreamer_YUV2RGB_vtbl, (LPVOID*)&obj);
TRACE("returning %p\n", obj);
return obj;
}
static HRESULT yuv_to_argb_connect_sink(TransformFilter *tf, const AM_MEDIA_TYPE *amt)
{
GstTfImpl *This = (GstTfImpl*)tf;
GstCaps *capsin, *capsout;
AM_MEDIA_TYPE *outpmt = &This->tf.pmt;
HRESULT hr;
int avgtime;
LONG width, height;
mark_wine_thread();
if (Gstreamer_YUV_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat)
return E_FAIL;
FreeMediaType(outpmt);
CopyMediaType(outpmt, amt);
if (IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo)) {
VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)outpmt->pbFormat;
avgtime = vih->AvgTimePerFrame;
width = vih->bmiHeader.biWidth;
height = vih->bmiHeader.biHeight;
vih->bmiHeader.biBitCount = 32;
vih->bmiHeader.biCompression = BI_RGB;
vih->bmiHeader.biSizeImage = width * abs(height) * 3;
} else {
VIDEOINFOHEADER2 *vih = (VIDEOINFOHEADER2*)outpmt->pbFormat;
avgtime = vih->AvgTimePerFrame;
width = vih->bmiHeader.biWidth;
height = vih->bmiHeader.biHeight;
vih->bmiHeader.biBitCount = 32;
vih->bmiHeader.biCompression = BI_RGB;
vih->bmiHeader.biSizeImage = width * abs(height) * 3;
}
if (!avgtime)
avgtime = 10000000 / 30;
outpmt->subtype = MEDIASUBTYPE_ARGB32;
capsin = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING,
gst_video_format_to_string(
gst_video_format_from_fourcc(amt->subtype.Data1)),
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, 10000000, avgtime,
NULL);
capsout = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "BGRA",
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, 10000000, avgtime,
NULL);
hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout);
gst_caps_unref(capsin);
gst_caps_unref(capsout);
This->cbBuffer = width * height * 4;
return hr;
}
static const TransformFilterFuncTable Gstreamer_YUV2ARGB_vtbl = {
.pfnDecideBufferSize = Gstreamer_transform_DecideBufferSize,
.pfnStartStreaming = Gstreamer_transform_ProcessBegin,
.pfnReceive = Gstreamer_transform_ProcessData,
.pfnStopStreaming = Gstreamer_transform_ProcessEnd,
.pfnCheckInputType = Gstreamer_YUV_QueryConnect,
.transform_connect_sink = yuv_to_argb_connect_sink,
.pfnBreakConnect = Gstreamer_transform_Cleanup,
.pfnEndOfStream = Gstreamer_transform_EndOfStream,
.pfnBeginFlush = Gstreamer_transform_BeginFlush,
.pfnEndFlush = Gstreamer_transform_EndFlush,
.pfnNewSegment = Gstreamer_transform_NewSegment,
.pfnNotify = Gstreamer_transform_QOS,
};
IUnknown * CALLBACK Gstreamer_YUV2ARGB_create(IUnknown *punkouter, HRESULT *phr)
{
IUnknown *obj = NULL;
TRACE("%p %p\n", punkouter, phr);
if (!init_gstreamer())
{
*phr = E_FAIL;
return NULL;
}
*phr = Gstreamer_transform_create(punkouter, &CLSID_Gstreamer_YUV2ARGB, "videoconvert", &Gstreamer_YUV2ARGB_vtbl, (LPVOID*)&obj);
TRACE("returning %p\n", obj);
return obj;
}
static HRESULT WINAPI Gstreamer_AudioConvert_QueryConnect(TransformFilter *iface, const AM_MEDIA_TYPE *amt)
{
GstTfImpl *This = (GstTfImpl*)iface;

View File

@ -38,10 +38,6 @@ WINE_DEFAULT_DEBUG_CHANNEL(gstreamer);
static const WCHAR wGstreamer_Splitter[] =
{'G','S','t','r','e','a','m','e','r',' ','s','p','l','i','t','t','e','r',' ','f','i','l','t','e','r',0};
static const WCHAR wGstreamer_YUV2RGB[] =
{'G','S','t','r','e','a','m','e','r',' ','Y','U','V',' ','t','o',' ','R','G','B',' ','f','i','l','t','e','r',0};
static const WCHAR wGstreamer_YUV2ARGB[] =
{'G','S','t','r','e','a','m','e','r',' ','Y','U','V',' ','t','o',' ','A','R','G','B',' ','f','i','l','t','e','r',0};
static const WCHAR wGstreamer_Mp3[] =
{'G','S','t','r','e','a','m','e','r',' ','M','p','3',' ','f','i','l','t','e','r',0};
static const WCHAR wGstreamer_AudioConvert[] =
@ -100,40 +96,6 @@ static const AMOVIESETUP_FILTER amfSplitter =
amfSplitPin
};
static const AMOVIESETUP_PIN amfYUVPin[] =
{ { wNull,
FALSE, FALSE, FALSE, FALSE,
&GUID_NULL,
NULL,
1,
amfMTvideo
},
{
wNull,
FALSE, TRUE, FALSE, FALSE,
&GUID_NULL,
NULL,
1,
amfMTvideo
},
};
static const AMOVIESETUP_FILTER amfYUV2RGB =
{ &CLSID_Gstreamer_YUV2RGB,
wGstreamer_YUV2RGB,
MERIT_UNLIKELY,
2,
amfYUVPin
};
static const AMOVIESETUP_FILTER amfYUV2ARGB =
{ &CLSID_Gstreamer_YUV2ARGB,
wGstreamer_YUV2ARGB,
MERIT_UNLIKELY,
2,
amfYUVPin
};
AMOVIESETUP_PIN amfMp3Pin[] =
{ { wNull,
FALSE, FALSE, FALSE, FALSE,
@ -326,20 +288,6 @@ FactoryTemplate const g_Templates[] = {
NULL,
&amfSplitter,
},
{
wGstreamer_YUV2RGB,
&CLSID_Gstreamer_YUV2RGB,
Gstreamer_YUV2RGB_create,
NULL,
&amfYUV2RGB,
},
{
wGstreamer_YUV2ARGB,
&CLSID_Gstreamer_YUV2ARGB,
Gstreamer_YUV2ARGB_create,
NULL,
&amfYUV2ARGB,
},
{
wGstreamer_Mp3,
&CLSID_Gstreamer_Mp3,