3035 lines
95 KiB
C
3035 lines
95 KiB
C
/*
|
|
* GStreamer splitter + decoder, adapted from parser.c
|
|
*
|
|
* Copyright 2010 Maarten Lankhorst for CodeWeavers
|
|
* Copyright 2010 Aric Stewart for CodeWeavers
|
|
*
|
|
* This library is free software; you can redistribute it and/or
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
* License as published by the Free Software Foundation; either
|
|
* version 2.1 of the License, or (at your option) any later version.
|
|
*
|
|
* This library is distributed in the hope that it will be useful,
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
* Lesser General Public License for more details.
|
|
*
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
* License along with this library; if not, write to the Free Software
|
|
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
|
|
*/
|
|
|
|
#include "config.h"
|
|
#include "gst_private.h"
|
|
#include "gst_guids.h"
|
|
#include "gst_cbs.h"
|
|
|
|
#include "vfwmsgs.h"
|
|
#include "amvideo.h"
|
|
|
|
#include "wine/unicode.h"
|
|
#include "wine/debug.h"
|
|
|
|
#include <assert.h>
|
|
|
|
#include "dvdmedia.h"
|
|
#include "mmreg.h"
|
|
#include "ks.h"
|
|
#include "initguid.h"
|
|
#include "wmcodecdsp.h"
|
|
#include "ksmedia.h"
|
|
|
|
WINE_DEFAULT_DEBUG_CHANNEL(gstreamer);
|
|
|
|
GST_DEBUG_CATEGORY_STATIC(wine);
|
|
#define GST_CAT_DEFAULT wine
|
|
|
|
static const GUID MEDIASUBTYPE_CVID = {mmioFOURCC('c','v','i','d'), 0x0000, 0x0010, {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
|
|
|
|
struct wg_parser
|
|
{
|
|
GstElement *container;
|
|
GstBus *bus;
|
|
GstPad *my_src, *their_sink;
|
|
|
|
guint64 file_size, start_offset, next_offset, stop_offset;
|
|
|
|
pthread_t push_thread;
|
|
|
|
pthread_mutex_t mutex;
|
|
|
|
pthread_cond_t init_cond;
|
|
bool no_more_pads, has_duration, error;
|
|
|
|
pthread_cond_t read_cond, read_done_cond;
|
|
struct
|
|
{
|
|
GstBuffer *buffer;
|
|
uint64_t offset;
|
|
uint32_t size;
|
|
bool done;
|
|
GstFlowReturn ret;
|
|
} read_request;
|
|
|
|
bool flushing, sink_connected;
|
|
};
|
|
|
|
enum wg_parser_event_type
|
|
{
|
|
WG_PARSER_EVENT_NONE = 0,
|
|
WG_PARSER_EVENT_BUFFER,
|
|
WG_PARSER_EVENT_EOS,
|
|
WG_PARSER_EVENT_SEGMENT,
|
|
};
|
|
|
|
struct wg_parser_event
|
|
{
|
|
enum wg_parser_event_type type;
|
|
union
|
|
{
|
|
GstBuffer *buffer;
|
|
struct
|
|
{
|
|
uint64_t position, stop;
|
|
double rate;
|
|
} segment;
|
|
} u;
|
|
};
|
|
|
|
struct wg_parser_stream
|
|
{
|
|
GstPad *their_src, *post_sink, *post_src, *my_sink;
|
|
GstElement *flip;
|
|
GstSegment *segment;
|
|
GstCaps *caps;
|
|
|
|
pthread_cond_t event_cond, event_empty_cond;
|
|
struct wg_parser_event event;
|
|
|
|
bool flushing, eos, enabled;
|
|
};
|
|
|
|
struct parser
|
|
{
|
|
struct strmbase_filter filter;
|
|
IAMStreamSelect IAMStreamSelect_iface;
|
|
|
|
struct strmbase_sink sink;
|
|
IAsyncReader *reader;
|
|
|
|
struct parser_source **sources;
|
|
unsigned int source_count;
|
|
BOOL enum_sink_first;
|
|
|
|
LONGLONG file_size;
|
|
|
|
struct wg_parser *wg_parser;
|
|
|
|
/* FIXME: It would be nice to avoid duplicating these with strmbase.
|
|
* However, synchronization is tricky; we need access to be protected by a
|
|
* separate lock. */
|
|
bool streaming, sink_connected;
|
|
|
|
uint64_t next_pull_offset;
|
|
|
|
HANDLE read_thread;
|
|
|
|
BOOL (*init_gst)(struct parser *filter);
|
|
HRESULT (*source_query_accept)(struct parser_source *pin, const AM_MEDIA_TYPE *mt);
|
|
HRESULT (*source_get_media_type)(struct parser_source *pin, unsigned int index, AM_MEDIA_TYPE *mt);
|
|
};
|
|
|
|
struct parser_source
|
|
{
|
|
struct strmbase_source pin;
|
|
IQualityControl IQualityControl_iface;
|
|
|
|
struct wg_parser_stream *wg_stream;
|
|
|
|
SourceSeeking seek;
|
|
|
|
CRITICAL_SECTION flushing_cs;
|
|
HANDLE thread;
|
|
};
|
|
|
|
static inline struct parser *impl_from_strmbase_filter(struct strmbase_filter *iface)
|
|
{
|
|
return CONTAINING_RECORD(iface, struct parser, filter);
|
|
}
|
|
|
|
static const WCHAR wcsInputPinName[] = {'i','n','p','u','t',' ','p','i','n',0};
|
|
static const IMediaSeekingVtbl GST_Seeking_Vtbl;
|
|
static const IQualityControlVtbl GSTOutPin_QualityControl_Vtbl;
|
|
|
|
static struct parser_source *create_pin(struct parser *filter, const WCHAR *name);
|
|
static HRESULT GST_RemoveOutputPins(struct parser *This);
|
|
static HRESULT WINAPI GST_ChangeCurrent(IMediaSeeking *iface);
|
|
static HRESULT WINAPI GST_ChangeStop(IMediaSeeking *iface);
|
|
static HRESULT WINAPI GST_ChangeRate(IMediaSeeking *iface);
|
|
|
|
static gboolean amt_from_gst_audio_info(const GstAudioInfo *info, AM_MEDIA_TYPE *amt)
|
|
{
|
|
WAVEFORMATEXTENSIBLE *wfe;
|
|
WAVEFORMATEX *wfx;
|
|
gint32 depth, bpp;
|
|
|
|
wfe = CoTaskMemAlloc(sizeof(*wfe));
|
|
wfx = (WAVEFORMATEX*)wfe;
|
|
amt->majortype = MEDIATYPE_Audio;
|
|
amt->subtype = MEDIASUBTYPE_PCM;
|
|
amt->formattype = FORMAT_WaveFormatEx;
|
|
amt->pbFormat = (BYTE*)wfe;
|
|
amt->cbFormat = sizeof(*wfe);
|
|
amt->bFixedSizeSamples = TRUE;
|
|
amt->bTemporalCompression = FALSE;
|
|
amt->pUnk = NULL;
|
|
|
|
wfx->wFormatTag = WAVE_FORMAT_EXTENSIBLE;
|
|
|
|
wfx->nChannels = info->channels;
|
|
wfx->nSamplesPerSec = info->rate;
|
|
depth = GST_AUDIO_INFO_WIDTH(info);
|
|
bpp = GST_AUDIO_INFO_DEPTH(info);
|
|
|
|
if (!depth || depth > 32 || depth % 8)
|
|
depth = bpp;
|
|
else if (!bpp)
|
|
bpp = depth;
|
|
wfe->Samples.wValidBitsPerSample = depth;
|
|
wfx->wBitsPerSample = bpp;
|
|
wfx->cbSize = sizeof(*wfe)-sizeof(*wfx);
|
|
switch (wfx->nChannels) {
|
|
case 1: wfe->dwChannelMask = KSAUDIO_SPEAKER_MONO; break;
|
|
case 2: wfe->dwChannelMask = KSAUDIO_SPEAKER_STEREO; break;
|
|
case 4: wfe->dwChannelMask = KSAUDIO_SPEAKER_SURROUND; break;
|
|
case 5: wfe->dwChannelMask = (KSAUDIO_SPEAKER_5POINT1 & ~SPEAKER_LOW_FREQUENCY); break;
|
|
case 6: wfe->dwChannelMask = KSAUDIO_SPEAKER_5POINT1; break;
|
|
case 8: wfe->dwChannelMask = KSAUDIO_SPEAKER_7POINT1; break;
|
|
default:
|
|
wfe->dwChannelMask = 0;
|
|
}
|
|
if (GST_AUDIO_INFO_IS_FLOAT(info))
|
|
{
|
|
amt->subtype = MEDIASUBTYPE_IEEE_FLOAT;
|
|
wfe->SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
|
|
} else {
|
|
wfe->SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
|
|
if (wfx->nChannels <= 2 && bpp <= 16 && depth == bpp) {
|
|
wfx->wFormatTag = WAVE_FORMAT_PCM;
|
|
wfx->cbSize = 0;
|
|
amt->cbFormat = sizeof(WAVEFORMATEX);
|
|
}
|
|
}
|
|
amt->lSampleSize = wfx->nBlockAlign = wfx->nChannels * wfx->wBitsPerSample/8;
|
|
wfx->nAvgBytesPerSec = wfx->nSamplesPerSec * wfx->nBlockAlign;
|
|
return TRUE;
|
|
}
|
|
|
|
static gboolean amt_from_gst_video_info(const GstVideoInfo *info, AM_MEDIA_TYPE *amt)
|
|
{
|
|
VIDEOINFO *vih;
|
|
BITMAPINFOHEADER *bih;
|
|
gint32 width, height;
|
|
|
|
width = GST_VIDEO_INFO_WIDTH(info);
|
|
height = GST_VIDEO_INFO_HEIGHT(info);
|
|
|
|
vih = CoTaskMemAlloc(sizeof(*vih));
|
|
bih = &vih->bmiHeader;
|
|
|
|
amt->formattype = FORMAT_VideoInfo;
|
|
amt->pbFormat = (BYTE*)vih;
|
|
amt->cbFormat = sizeof(VIDEOINFOHEADER);
|
|
amt->bFixedSizeSamples = FALSE;
|
|
amt->bTemporalCompression = TRUE;
|
|
amt->lSampleSize = 1;
|
|
amt->pUnk = NULL;
|
|
ZeroMemory(vih, sizeof(*vih));
|
|
amt->majortype = MEDIATYPE_Video;
|
|
|
|
if (GST_VIDEO_INFO_IS_RGB(info))
|
|
{
|
|
bih->biCompression = BI_RGB;
|
|
switch (GST_VIDEO_INFO_FORMAT(info))
|
|
{
|
|
case GST_VIDEO_FORMAT_BGRA:
|
|
amt->subtype = MEDIASUBTYPE_ARGB32;
|
|
bih->biBitCount = 32;
|
|
break;
|
|
case GST_VIDEO_FORMAT_BGRx:
|
|
amt->subtype = MEDIASUBTYPE_RGB32;
|
|
bih->biBitCount = 32;
|
|
break;
|
|
case GST_VIDEO_FORMAT_BGR:
|
|
amt->subtype = MEDIASUBTYPE_RGB24;
|
|
bih->biBitCount = 24;
|
|
break;
|
|
case GST_VIDEO_FORMAT_RGB16:
|
|
amt->subtype = MEDIASUBTYPE_RGB565;
|
|
amt->cbFormat = offsetof(VIDEOINFO, u.dwBitMasks[3]);
|
|
vih->u.dwBitMasks[iRED] = 0xf800;
|
|
vih->u.dwBitMasks[iGREEN] = 0x07e0;
|
|
vih->u.dwBitMasks[iBLUE] = 0x001f;
|
|
bih->biBitCount = 16;
|
|
bih->biCompression = BI_BITFIELDS;
|
|
break;
|
|
case GST_VIDEO_FORMAT_RGB15:
|
|
amt->subtype = MEDIASUBTYPE_RGB555;
|
|
bih->biBitCount = 16;
|
|
break;
|
|
default:
|
|
WARN("Cannot convert %s to a DirectShow type.\n", GST_VIDEO_INFO_NAME(info));
|
|
CoTaskMemFree(vih);
|
|
return FALSE;
|
|
}
|
|
} else {
|
|
amt->subtype = MEDIATYPE_Video;
|
|
if (!(amt->subtype.Data1 = gst_video_format_to_fourcc(GST_VIDEO_INFO_FORMAT(info))))
|
|
{
|
|
CoTaskMemFree(vih);
|
|
return FALSE;
|
|
}
|
|
switch (amt->subtype.Data1) {
|
|
case mmioFOURCC('I','4','2','0'):
|
|
case mmioFOURCC('Y','V','1','2'):
|
|
case mmioFOURCC('N','V','1','2'):
|
|
case mmioFOURCC('N','V','2','1'):
|
|
bih->biBitCount = 12; break;
|
|
case mmioFOURCC('Y','U','Y','2'):
|
|
case mmioFOURCC('Y','V','Y','U'):
|
|
case mmioFOURCC('U','Y','V','Y'):
|
|
bih->biBitCount = 16; break;
|
|
}
|
|
bih->biCompression = amt->subtype.Data1;
|
|
}
|
|
bih->biSizeImage = GST_VIDEO_INFO_SIZE(info);
|
|
if ((vih->AvgTimePerFrame = (REFERENCE_TIME)MulDiv(10000000,
|
|
GST_VIDEO_INFO_FPS_D(info), GST_VIDEO_INFO_FPS_N(info))) == -1)
|
|
vih->AvgTimePerFrame = 0; /* zero division or integer overflow */
|
|
bih->biSize = sizeof(*bih);
|
|
bih->biWidth = width;
|
|
bih->biHeight = height;
|
|
bih->biPlanes = 1;
|
|
return TRUE;
|
|
}
|
|
|
|
static gboolean amt_from_gst_caps_audio_mpeg(const GstCaps *caps, AM_MEDIA_TYPE *mt)
|
|
{
|
|
GstStructure *structure = gst_caps_get_structure(caps, 0);
|
|
gint layer, channels, rate;
|
|
|
|
mt->majortype = MEDIATYPE_Audio;
|
|
mt->subtype = MEDIASUBTYPE_MPEG1AudioPayload;
|
|
mt->bFixedSizeSamples = FALSE;
|
|
mt->bTemporalCompression = FALSE;
|
|
mt->lSampleSize = 0;
|
|
mt->formattype = FORMAT_WaveFormatEx;
|
|
mt->pUnk = NULL;
|
|
|
|
if (!gst_structure_get_int(structure, "layer", &layer))
|
|
{
|
|
WARN("Missing 'layer' value.\n");
|
|
return FALSE;
|
|
}
|
|
if (!gst_structure_get_int(structure, "channels", &channels))
|
|
{
|
|
WARN("Missing 'channels' value.\n");
|
|
return FALSE;
|
|
}
|
|
if (!gst_structure_get_int(structure, "rate", &rate))
|
|
{
|
|
WARN("Missing 'rate' value.\n");
|
|
return FALSE;
|
|
}
|
|
|
|
if (layer == 3)
|
|
{
|
|
MPEGLAYER3WAVEFORMAT *wfx = CoTaskMemAlloc(sizeof(*wfx));
|
|
memset(wfx, 0, sizeof(*wfx));
|
|
|
|
mt->subtype.Data1 = WAVE_FORMAT_MPEGLAYER3;
|
|
mt->cbFormat = sizeof(*wfx);
|
|
mt->pbFormat = (BYTE *)wfx;
|
|
wfx->wfx.wFormatTag = WAVE_FORMAT_MPEGLAYER3;
|
|
wfx->wfx.nChannels = channels;
|
|
wfx->wfx.nSamplesPerSec = rate;
|
|
/* FIXME: We can't get most of the MPEG data from the caps. We may have
|
|
* to manually parse the header. */
|
|
wfx->wfx.cbSize = sizeof(*wfx) - sizeof(WAVEFORMATEX);
|
|
wfx->wID = MPEGLAYER3_ID_MPEG;
|
|
wfx->fdwFlags = MPEGLAYER3_FLAG_PADDING_ON;
|
|
wfx->nFramesPerBlock = 1;
|
|
wfx->nCodecDelay = 1393;
|
|
}
|
|
else
|
|
{
|
|
MPEG1WAVEFORMAT *wfx = CoTaskMemAlloc(sizeof(*wfx));
|
|
memset(wfx, 0, sizeof(*wfx));
|
|
|
|
mt->subtype.Data1 = WAVE_FORMAT_MPEG;
|
|
mt->cbFormat = sizeof(*wfx);
|
|
mt->pbFormat = (BYTE *)wfx;
|
|
wfx->wfx.wFormatTag = WAVE_FORMAT_MPEG;
|
|
wfx->wfx.nChannels = channels;
|
|
wfx->wfx.nSamplesPerSec = rate;
|
|
wfx->wfx.cbSize = sizeof(*wfx) - sizeof(WAVEFORMATEX);
|
|
wfx->fwHeadLayer = layer;
|
|
}
|
|
|
|
return TRUE;
|
|
}
|
|
|
|
static gboolean amt_from_gst_caps(const GstCaps *caps, AM_MEDIA_TYPE *mt)
|
|
{
|
|
const char *type = gst_structure_get_name(gst_caps_get_structure(caps, 0));
|
|
GstStructure *structure = gst_caps_get_structure(caps, 0);
|
|
|
|
memset(mt, 0, sizeof(AM_MEDIA_TYPE));
|
|
|
|
if (!strcmp(type, "audio/x-raw"))
|
|
{
|
|
GstAudioInfo info;
|
|
|
|
if (!(gst_audio_info_from_caps(&info, caps)))
|
|
return FALSE;
|
|
return amt_from_gst_audio_info(&info, mt);
|
|
}
|
|
else if (!strcmp(type, "video/x-raw"))
|
|
{
|
|
GstVideoInfo info;
|
|
|
|
if (!gst_video_info_from_caps(&info, caps))
|
|
return FALSE;
|
|
return amt_from_gst_video_info(&info, mt);
|
|
}
|
|
else if (!strcmp(type, "audio/mpeg"))
|
|
return amt_from_gst_caps_audio_mpeg(caps, mt);
|
|
else if (!strcmp(type, "video/x-cinepak"))
|
|
{
|
|
VIDEOINFOHEADER *vih;
|
|
gint i;
|
|
|
|
mt->majortype = MEDIATYPE_Video;
|
|
mt->subtype = MEDIASUBTYPE_CVID;
|
|
mt->bTemporalCompression = TRUE;
|
|
mt->lSampleSize = 1;
|
|
mt->formattype = FORMAT_VideoInfo;
|
|
if (!(vih = CoTaskMemAlloc(sizeof(VIDEOINFOHEADER))))
|
|
return FALSE;
|
|
mt->cbFormat = sizeof(VIDEOINFOHEADER);
|
|
mt->pbFormat = (BYTE *)vih;
|
|
|
|
memset(vih, 0, sizeof(VIDEOINFOHEADER));
|
|
vih->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
|
|
if (gst_structure_get_int(structure, "width", &i))
|
|
vih->bmiHeader.biWidth = i;
|
|
if (gst_structure_get_int(structure, "height", &i))
|
|
vih->bmiHeader.biHeight = i;
|
|
vih->bmiHeader.biPlanes = 1;
|
|
/* Both ffmpeg's encoder and a Cinepak file seen in the wild report
|
|
* 24 bpp. ffmpeg sets biSizeImage as below; others may be smaller, but
|
|
* as long as every sample fits into our allocator, we're fine. */
|
|
vih->bmiHeader.biBitCount = 24;
|
|
vih->bmiHeader.biCompression = mmioFOURCC('c','v','i','d');
|
|
vih->bmiHeader.biSizeImage = vih->bmiHeader.biWidth
|
|
* vih->bmiHeader.biHeight * vih->bmiHeader.biBitCount / 8;
|
|
return TRUE;
|
|
}
|
|
else
|
|
{
|
|
FIXME("Unhandled type %s.\n", debugstr_a(type));
|
|
return FALSE;
|
|
}
|
|
}
|
|
|
|
static GstCaps *amt_to_gst_caps_video(const AM_MEDIA_TYPE *mt)
|
|
{
|
|
static const struct
|
|
{
|
|
const GUID *subtype;
|
|
GstVideoFormat format;
|
|
}
|
|
format_map[] =
|
|
{
|
|
{&MEDIASUBTYPE_ARGB32, GST_VIDEO_FORMAT_BGRA},
|
|
{&MEDIASUBTYPE_RGB32, GST_VIDEO_FORMAT_BGRx},
|
|
{&MEDIASUBTYPE_RGB24, GST_VIDEO_FORMAT_BGR},
|
|
{&MEDIASUBTYPE_RGB565, GST_VIDEO_FORMAT_RGB16},
|
|
{&MEDIASUBTYPE_RGB555, GST_VIDEO_FORMAT_RGB15},
|
|
};
|
|
|
|
const VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)mt->pbFormat;
|
|
GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;
|
|
GstVideoInfo info;
|
|
unsigned int i;
|
|
GstCaps *caps;
|
|
|
|
if (!IsEqualGUID(&mt->formattype, &FORMAT_VideoInfo)
|
|
|| mt->cbFormat < sizeof(VIDEOINFOHEADER) || !mt->pbFormat)
|
|
return NULL;
|
|
|
|
for (i = 0; i < ARRAY_SIZE(format_map); ++i)
|
|
{
|
|
if (IsEqualGUID(&mt->subtype, format_map[i].subtype))
|
|
{
|
|
format = format_map[i].format;
|
|
break;
|
|
}
|
|
}
|
|
|
|
if (format == GST_VIDEO_FORMAT_UNKNOWN)
|
|
format = gst_video_format_from_fourcc(vih->bmiHeader.biCompression);
|
|
|
|
if (format == GST_VIDEO_FORMAT_UNKNOWN)
|
|
{
|
|
FIXME("Unknown video format (subtype %s, compression %#x).\n",
|
|
debugstr_guid(&mt->subtype), vih->bmiHeader.biCompression);
|
|
return NULL;
|
|
}
|
|
|
|
gst_video_info_set_format(&info, format, vih->bmiHeader.biWidth, vih->bmiHeader.biHeight);
|
|
if ((caps = gst_video_info_to_caps(&info)))
|
|
{
|
|
/* Clear some fields that shouldn't prevent us from connecting. */
|
|
for (i = 0; i < gst_caps_get_size(caps); ++i)
|
|
{
|
|
gst_structure_remove_fields(gst_caps_get_structure(caps, i),
|
|
"framerate", "pixel-aspect-ratio", "colorimetry", "chroma-site", NULL);
|
|
}
|
|
}
|
|
return caps;
|
|
}
|
|
|
|
static GstCaps *amt_to_gst_caps_audio(const AM_MEDIA_TYPE *mt)
|
|
{
|
|
const WAVEFORMATEX *wfx = (WAVEFORMATEX *)mt->pbFormat;
|
|
GstAudioFormat format = GST_AUDIO_FORMAT_UNKNOWN;
|
|
GstAudioInfo info;
|
|
|
|
if (!IsEqualGUID(&mt->formattype, &FORMAT_WaveFormatEx)
|
|
|| mt->cbFormat < sizeof(WAVEFORMATEX) || !mt->pbFormat)
|
|
return NULL;
|
|
|
|
if (IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_PCM))
|
|
format = gst_audio_format_build_integer(wfx->wBitsPerSample != 8,
|
|
G_LITTLE_ENDIAN, wfx->wBitsPerSample, wfx->wBitsPerSample);
|
|
else if (IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_IEEE_FLOAT))
|
|
{
|
|
if (wfx->wBitsPerSample == 32)
|
|
format = GST_AUDIO_FORMAT_F32LE;
|
|
else if (wfx->wBitsPerSample == 64)
|
|
format = GST_AUDIO_FORMAT_F64LE;
|
|
}
|
|
|
|
if (format == GST_AUDIO_FORMAT_UNKNOWN)
|
|
{
|
|
FIXME("Unknown audio format (subtype %s, depth %u).\n",
|
|
debugstr_guid(&mt->subtype), wfx->wBitsPerSample);
|
|
return NULL;
|
|
}
|
|
|
|
gst_audio_info_set_format(&info, format, wfx->nSamplesPerSec, wfx->nChannels, NULL);
|
|
return gst_audio_info_to_caps(&info);
|
|
}
|
|
|
|
static GstCaps *amt_to_gst_caps(const AM_MEDIA_TYPE *mt)
|
|
{
|
|
if (IsEqualGUID(&mt->majortype, &MEDIATYPE_Video))
|
|
return amt_to_gst_caps_video(mt);
|
|
else if (IsEqualGUID(&mt->majortype, &MEDIATYPE_Audio))
|
|
return amt_to_gst_caps_audio(mt);
|
|
|
|
FIXME("Unknown major type %s.\n", debugstr_guid(&mt->majortype));
|
|
return NULL;
|
|
}
|
|
|
|
static gboolean query_sink(GstPad *pad, GstObject *parent, GstQuery *query)
|
|
{
|
|
struct parser_source *pin = gst_pad_get_element_private(pad);
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
|
|
TRACE("pin %p, type \"%s\".\n", pin, gst_query_type_get_name(query->type));
|
|
|
|
switch (query->type)
|
|
{
|
|
case GST_QUERY_CAPS:
|
|
{
|
|
GstCaps *caps, *filter, *temp;
|
|
|
|
gst_query_parse_caps(query, &filter);
|
|
|
|
if (stream->enabled)
|
|
caps = amt_to_gst_caps(&pin->pin.pin.mt);
|
|
else
|
|
caps = gst_caps_new_any();
|
|
if (!caps)
|
|
return FALSE;
|
|
|
|
if (filter)
|
|
{
|
|
temp = gst_caps_intersect(caps, filter);
|
|
gst_caps_unref(caps);
|
|
caps = temp;
|
|
}
|
|
|
|
gst_query_set_caps_result(query, caps);
|
|
gst_caps_unref(caps);
|
|
return TRUE;
|
|
}
|
|
case GST_QUERY_ACCEPT_CAPS:
|
|
{
|
|
gboolean ret = TRUE;
|
|
AM_MEDIA_TYPE mt;
|
|
GstCaps *caps;
|
|
|
|
if (!stream->enabled)
|
|
{
|
|
gst_query_set_accept_caps_result(query, TRUE);
|
|
return TRUE;
|
|
}
|
|
|
|
gst_query_parse_accept_caps(query, &caps);
|
|
if (!amt_from_gst_caps(caps, &mt))
|
|
return FALSE;
|
|
|
|
if (!IsEqualGUID(&mt.majortype, &pin->pin.pin.mt.majortype)
|
|
|| !IsEqualGUID(&mt.subtype, &pin->pin.pin.mt.subtype)
|
|
|| !IsEqualGUID(&mt.formattype, &pin->pin.pin.mt.formattype))
|
|
ret = FALSE;
|
|
|
|
if (IsEqualGUID(&mt.majortype, &MEDIATYPE_Video))
|
|
{
|
|
const VIDEOINFOHEADER *req_vih = (VIDEOINFOHEADER *)mt.pbFormat;
|
|
const VIDEOINFOHEADER *our_vih = (VIDEOINFOHEADER *)pin->pin.pin.mt.pbFormat;
|
|
|
|
if (req_vih->bmiHeader.biWidth != our_vih->bmiHeader.biWidth
|
|
|| req_vih->bmiHeader.biHeight != our_vih->bmiHeader.biHeight
|
|
|| req_vih->bmiHeader.biBitCount != our_vih->bmiHeader.biBitCount
|
|
|| req_vih->bmiHeader.biCompression != our_vih->bmiHeader.biCompression)
|
|
ret = FALSE;
|
|
}
|
|
else if (IsEqualGUID(&mt.majortype, &MEDIATYPE_Audio))
|
|
{
|
|
const WAVEFORMATEX *req_wfx = (WAVEFORMATEX *)mt.pbFormat;
|
|
const WAVEFORMATEX *our_wfx = (WAVEFORMATEX *)pin->pin.pin.mt.pbFormat;
|
|
|
|
if (req_wfx->nChannels != our_wfx->nChannels
|
|
|| req_wfx->nSamplesPerSec != our_wfx->nSamplesPerSec
|
|
|| req_wfx->wBitsPerSample != our_wfx->wBitsPerSample)
|
|
ret = FALSE;
|
|
}
|
|
|
|
FreeMediaType(&mt);
|
|
|
|
if (!ret && WARN_ON(gstreamer))
|
|
{
|
|
gchar *str = gst_caps_to_string(caps);
|
|
WARN("Rejecting caps \"%s\".\n", debugstr_a(str));
|
|
g_free(str);
|
|
}
|
|
|
|
gst_query_set_accept_caps_result(query, ret);
|
|
return TRUE;
|
|
}
|
|
default:
|
|
return gst_pad_query_default (pad, parent, query);
|
|
}
|
|
}
|
|
|
|
static gboolean gst_base_src_perform_seek(struct parser *This, GstEvent *event)
|
|
{
|
|
struct wg_parser *parser = This->wg_parser;
|
|
gboolean res = TRUE;
|
|
gdouble rate;
|
|
GstFormat seek_format;
|
|
GstSeekFlags flags;
|
|
GstSeekType cur_type, stop_type;
|
|
gint64 cur, stop;
|
|
gboolean flush;
|
|
guint32 seqnum;
|
|
GstEvent *tevent;
|
|
BOOL thread = !!parser->push_thread;
|
|
|
|
gst_event_parse_seek(event, &rate, &seek_format, &flags,
|
|
&cur_type, &cur, &stop_type, &stop);
|
|
|
|
if (seek_format != GST_FORMAT_BYTES)
|
|
{
|
|
GST_FIXME("Unhandled format \"%s\".", gst_format_get_name(seek_format));
|
|
return FALSE;
|
|
}
|
|
|
|
flush = flags & GST_SEEK_FLAG_FLUSH;
|
|
seqnum = gst_event_get_seqnum(event);
|
|
|
|
/* send flush start */
|
|
if (flush) {
|
|
tevent = gst_event_new_flush_start();
|
|
gst_event_set_seqnum(tevent, seqnum);
|
|
gst_pad_push_event(parser->my_src, tevent);
|
|
if (thread)
|
|
gst_pad_set_active(parser->my_src, 1);
|
|
}
|
|
|
|
parser->next_offset = parser->start_offset = cur;
|
|
|
|
/* and prepare to continue streaming */
|
|
if (flush) {
|
|
tevent = gst_event_new_flush_stop(TRUE);
|
|
gst_event_set_seqnum(tevent, seqnum);
|
|
gst_pad_push_event(parser->my_src, tevent);
|
|
if (thread)
|
|
gst_pad_set_active(parser->my_src, 1);
|
|
}
|
|
|
|
return res;
|
|
}
|
|
|
|
static gboolean event_src(GstPad *pad, GstObject *parent, GstEvent *event)
|
|
{
|
|
struct parser *This = gst_pad_get_element_private(pad);
|
|
gboolean ret = TRUE;
|
|
|
|
GST_LOG("filter %p, type \"%s\".", This, GST_EVENT_TYPE_NAME(event));
|
|
|
|
switch (event->type)
|
|
{
|
|
case GST_EVENT_SEEK:
|
|
ret = gst_base_src_perform_seek(This, event);
|
|
break;
|
|
|
|
case GST_EVENT_FLUSH_START:
|
|
case GST_EVENT_FLUSH_STOP:
|
|
case GST_EVENT_QOS:
|
|
case GST_EVENT_RECONFIGURE:
|
|
break;
|
|
|
|
default:
|
|
GST_WARNING("Ignoring \"%s\" event.", GST_EVENT_TYPE_NAME(event));
|
|
ret = FALSE;
|
|
break;
|
|
}
|
|
gst_event_unref(event);
|
|
return ret;
|
|
}
|
|
|
|
static GstFlowReturn queue_stream_event(struct parser_source *pin, const struct wg_parser_event *event)
|
|
{
|
|
struct wg_parser *parser = impl_from_strmbase_filter(pin->pin.pin.filter)->wg_parser;
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
|
|
/* Unlike request_buffer_src() [q.v.], we need to watch for GStreamer
|
|
* flushes here. The difference is that we can be blocked by the streaming
|
|
* thread not running (or itself flushing on the DirectShow side).
|
|
* request_buffer_src() can only be blocked by the upstream source, and that
|
|
* is solved by flushing the upstream source. */
|
|
|
|
pthread_mutex_lock(&parser->mutex);
|
|
while (!stream->flushing && stream->event.type != WG_PARSER_EVENT_NONE)
|
|
pthread_cond_wait(&stream->event_empty_cond, &parser->mutex);
|
|
if (stream->flushing)
|
|
{
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
GST_DEBUG("Filter is flushing; discarding event.");
|
|
return GST_FLOW_FLUSHING;
|
|
}
|
|
stream->event = *event;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
pthread_cond_signal(&stream->event_cond);
|
|
GST_LOG("Event queued.");
|
|
return GST_FLOW_OK;
|
|
}
|
|
|
|
static gboolean event_sink(GstPad *pad, GstObject *parent, GstEvent *event)
|
|
{
|
|
struct parser_source *pin = gst_pad_get_element_private(pad);
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
struct parser *filter = impl_from_strmbase_filter(pin->pin.pin.filter);
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
|
|
GST_LOG("pin %p, type \"%s\".", pin, GST_EVENT_TYPE_NAME(event));
|
|
|
|
switch (event->type)
|
|
{
|
|
case GST_EVENT_SEGMENT:
|
|
if (stream->enabled)
|
|
{
|
|
struct wg_parser_event stream_event;
|
|
const GstSegment *segment;
|
|
|
|
gst_event_parse_segment(event, &segment);
|
|
|
|
if (segment->format != GST_FORMAT_TIME)
|
|
{
|
|
GST_FIXME("Unhandled format \"%s\".", gst_format_get_name(segment->format));
|
|
break;
|
|
}
|
|
|
|
gst_segment_copy_into(segment, stream->segment);
|
|
|
|
stream_event.type = WG_PARSER_EVENT_SEGMENT;
|
|
stream_event.u.segment.position = segment->position / 100;
|
|
stream_event.u.segment.stop = segment->stop / 100;
|
|
stream_event.u.segment.rate = segment->rate * segment->applied_rate;
|
|
queue_stream_event(pin, &stream_event);
|
|
}
|
|
break;
|
|
|
|
case GST_EVENT_EOS:
|
|
if (stream->enabled)
|
|
{
|
|
struct wg_parser_event stream_event;
|
|
|
|
stream_event.type = WG_PARSER_EVENT_EOS;
|
|
queue_stream_event(pin, &stream_event);
|
|
}
|
|
else
|
|
{
|
|
pthread_mutex_lock(&parser->mutex);
|
|
stream->eos = true;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
pthread_cond_signal(&parser->init_cond);
|
|
}
|
|
break;
|
|
|
|
case GST_EVENT_FLUSH_START:
|
|
if (stream->enabled)
|
|
{
|
|
pthread_mutex_lock(&parser->mutex);
|
|
|
|
stream->flushing = true;
|
|
pthread_cond_signal(&stream->event_empty_cond);
|
|
|
|
switch (stream->event.type)
|
|
{
|
|
case WG_PARSER_EVENT_NONE:
|
|
case WG_PARSER_EVENT_EOS:
|
|
case WG_PARSER_EVENT_SEGMENT:
|
|
break;
|
|
|
|
case WG_PARSER_EVENT_BUFFER:
|
|
gst_buffer_unref(stream->event.u.buffer);
|
|
break;
|
|
}
|
|
stream->event.type = WG_PARSER_EVENT_NONE;
|
|
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
}
|
|
break;
|
|
|
|
case GST_EVENT_FLUSH_STOP:
|
|
gst_segment_init(stream->segment, GST_FORMAT_TIME);
|
|
if (stream->enabled)
|
|
{
|
|
pthread_mutex_lock(&parser->mutex);
|
|
stream->flushing = false;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
}
|
|
break;
|
|
|
|
case GST_EVENT_CAPS:
|
|
{
|
|
GstCaps *caps;
|
|
|
|
gst_event_parse_caps(event, &caps);
|
|
pthread_mutex_lock(&parser->mutex);
|
|
gst_caps_replace(&stream->caps, caps);
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
pthread_cond_signal(&parser->init_cond);
|
|
break;
|
|
}
|
|
|
|
default:
|
|
GST_WARNING("Ignoring \"%s\" event.", GST_EVENT_TYPE_NAME(event));
|
|
}
|
|
gst_event_unref(event);
|
|
return TRUE;
|
|
}
|
|
|
|
static GstFlowReturn request_buffer_src(GstPad *pad, GstObject *parent, guint64 offset, guint size, GstBuffer **buffer);
|
|
|
|
static void *push_data(void *iface)
|
|
{
|
|
struct parser *This = iface;
|
|
struct wg_parser *parser = This->wg_parser;
|
|
GstBuffer *buffer;
|
|
LONGLONG maxlen;
|
|
|
|
GST_DEBUG("Starting push thread.");
|
|
|
|
if (!(buffer = gst_buffer_new_allocate(NULL, 16384, NULL)))
|
|
{
|
|
GST_ERROR("Failed to allocate memory.");
|
|
return NULL;
|
|
}
|
|
|
|
maxlen = parser->stop_offset ? parser->stop_offset : parser->file_size;
|
|
|
|
for (;;) {
|
|
ULONG len;
|
|
int ret;
|
|
|
|
if (parser->next_offset >= maxlen)
|
|
break;
|
|
len = min(16384, maxlen - parser->next_offset);
|
|
|
|
if ((ret = request_buffer_src(parser->my_src, NULL, parser->next_offset, len, &buffer)) < 0)
|
|
{
|
|
GST_ERROR("Failed to read data, ret %s.", gst_flow_get_name(ret));
|
|
break;
|
|
}
|
|
|
|
parser->next_offset += len;
|
|
|
|
buffer->duration = buffer->pts = -1;
|
|
if ((ret = gst_pad_push(parser->my_src, buffer)) < 0)
|
|
{
|
|
GST_ERROR("Failed to push data, ret %s.", gst_flow_get_name(ret));
|
|
break;
|
|
}
|
|
}
|
|
|
|
gst_buffer_unref(buffer);
|
|
|
|
gst_pad_push_event(parser->my_src, gst_event_new_eos());
|
|
|
|
GST_DEBUG("Stopping push thread.");
|
|
|
|
return NULL;
|
|
}
|
|
|
|
static GstFlowReturn got_data_sink(GstPad *pad, GstObject *parent, GstBuffer *buffer)
|
|
{
|
|
struct parser_source *pin = gst_pad_get_element_private(pad);
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
struct wg_parser_event stream_event;
|
|
GstFlowReturn ret;
|
|
|
|
GST_LOG("pin %p, buffer %p.", pin, buffer);
|
|
|
|
if (!stream->enabled)
|
|
{
|
|
gst_buffer_unref(buffer);
|
|
return GST_FLOW_OK;
|
|
}
|
|
|
|
stream_event.type = WG_PARSER_EVENT_BUFFER;
|
|
stream_event.u.buffer = buffer;
|
|
/* Transfer our reference to the buffer to the thread. */
|
|
if ((ret = queue_stream_event(pin, &stream_event)) != GST_FLOW_OK)
|
|
gst_buffer_unref(buffer);
|
|
return ret;
|
|
}
|
|
|
|
/* Fill and send a single IMediaSample. */
|
|
static HRESULT send_sample(struct parser_source *pin, IMediaSample *sample,
|
|
GstBuffer *buf, GstMapInfo *info, gsize offset, gsize size, DWORD bytes_per_second)
|
|
{
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
HRESULT hr;
|
|
BYTE *ptr = NULL;
|
|
|
|
hr = IMediaSample_SetActualDataLength(sample, size);
|
|
if(FAILED(hr)){
|
|
WARN("SetActualDataLength failed: %08x\n", hr);
|
|
return hr;
|
|
}
|
|
|
|
IMediaSample_GetPointer(sample, &ptr);
|
|
|
|
memcpy(ptr, &info->data[offset], size);
|
|
|
|
if (GST_BUFFER_PTS_IS_VALID(buf)) {
|
|
REFERENCE_TIME rtStart;
|
|
GstClockTime ptsStart = buf->pts;
|
|
if (offset > 0)
|
|
ptsStart = buf->pts + gst_util_uint64_scale(offset, GST_SECOND, bytes_per_second);
|
|
rtStart = gst_segment_to_running_time(stream->segment, GST_FORMAT_TIME, ptsStart);
|
|
if (rtStart >= 0)
|
|
rtStart /= 100;
|
|
|
|
if (GST_BUFFER_DURATION_IS_VALID(buf)) {
|
|
REFERENCE_TIME rtStop;
|
|
REFERENCE_TIME tStart;
|
|
REFERENCE_TIME tStop;
|
|
GstClockTime ptsStop = buf->pts + buf->duration;
|
|
if (offset + size < info->size)
|
|
ptsStop = buf->pts + gst_util_uint64_scale(offset + size, GST_SECOND, bytes_per_second);
|
|
tStart = ptsStart / 100;
|
|
tStop = ptsStop / 100;
|
|
rtStop = gst_segment_to_running_time(stream->segment, GST_FORMAT_TIME, ptsStop);
|
|
if (rtStop >= 0)
|
|
rtStop /= 100;
|
|
TRACE("Current time on %p: %i to %i ms\n", pin, (int)(rtStart / 10000), (int)(rtStop / 10000));
|
|
IMediaSample_SetTime(sample, &rtStart, rtStop >= 0 ? &rtStop : NULL);
|
|
IMediaSample_SetMediaTime(sample, &tStart, &tStop);
|
|
} else {
|
|
IMediaSample_SetTime(sample, rtStart >= 0 ? &rtStart : NULL, NULL);
|
|
IMediaSample_SetMediaTime(sample, NULL, NULL);
|
|
}
|
|
} else {
|
|
IMediaSample_SetTime(sample, NULL, NULL);
|
|
IMediaSample_SetMediaTime(sample, NULL, NULL);
|
|
}
|
|
|
|
IMediaSample_SetDiscontinuity(sample, !offset && GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DISCONT));
|
|
IMediaSample_SetPreroll(sample, GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_LIVE));
|
|
IMediaSample_SetSyncPoint(sample, !GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT));
|
|
|
|
if (!pin->pin.pin.peer)
|
|
hr = VFW_E_NOT_CONNECTED;
|
|
else
|
|
hr = IMemInputPin_Receive(pin->pin.pMemInputPin, sample);
|
|
|
|
TRACE("sending sample returned: %08x\n", hr);
|
|
|
|
return hr;
|
|
}
|
|
|
|
/* Send a single GStreamer buffer (splitting it into multiple IMediaSamples if
|
|
* necessary). */
|
|
static void send_buffer(struct parser_source *pin, GstBuffer *buf)
|
|
{
|
|
HRESULT hr;
|
|
IMediaSample *sample;
|
|
GstMapInfo info;
|
|
|
|
gst_buffer_map(buf, &info, GST_MAP_READ);
|
|
|
|
if (IsEqualGUID(&pin->pin.pin.mt.formattype, &FORMAT_WaveFormatEx)
|
|
&& (IsEqualGUID(&pin->pin.pin.mt.subtype, &MEDIASUBTYPE_PCM)
|
|
|| IsEqualGUID(&pin->pin.pin.mt.subtype, &MEDIASUBTYPE_IEEE_FLOAT)))
|
|
{
|
|
WAVEFORMATEX *format = (WAVEFORMATEX *)pin->pin.pin.mt.pbFormat;
|
|
gsize offset = 0;
|
|
while (offset < info.size)
|
|
{
|
|
gsize advance;
|
|
|
|
hr = BaseOutputPinImpl_GetDeliveryBuffer(&pin->pin, &sample, NULL, NULL, 0);
|
|
|
|
if (FAILED(hr))
|
|
{
|
|
if (hr != VFW_E_NOT_CONNECTED)
|
|
ERR("Could not get a delivery buffer (%x), returning GST_FLOW_FLUSHING\n", hr);
|
|
break;
|
|
}
|
|
|
|
advance = min(IMediaSample_GetSize(sample), info.size - offset);
|
|
|
|
hr = send_sample(pin, sample, buf, &info, offset, advance, format->nAvgBytesPerSec);
|
|
|
|
IMediaSample_Release(sample);
|
|
|
|
if (FAILED(hr))
|
|
break;
|
|
|
|
offset += advance;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
hr = BaseOutputPinImpl_GetDeliveryBuffer(&pin->pin, &sample, NULL, NULL, 0);
|
|
|
|
if (FAILED(hr))
|
|
{
|
|
if (hr != VFW_E_NOT_CONNECTED)
|
|
ERR("Could not get a delivery buffer (%x), returning GST_FLOW_FLUSHING\n", hr);
|
|
}
|
|
else
|
|
{
|
|
hr = send_sample(pin, sample, buf, &info, 0, info.size, 0);
|
|
|
|
IMediaSample_Release(sample);
|
|
}
|
|
}
|
|
|
|
gst_buffer_unmap(buf, &info);
|
|
|
|
gst_buffer_unref(buf);
|
|
}
|
|
|
|
static bool get_stream_event(struct parser_source *pin, struct wg_parser_event *event)
|
|
{
|
|
struct parser *filter = impl_from_strmbase_filter(pin->pin.pin.filter);
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
|
|
pthread_mutex_lock(&parser->mutex);
|
|
|
|
while (!parser->flushing && stream->event.type == WG_PARSER_EVENT_NONE)
|
|
pthread_cond_wait(&stream->event_cond, &parser->mutex);
|
|
|
|
if (parser->flushing)
|
|
{
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
TRACE("Filter is flushing.\n");
|
|
return false;
|
|
}
|
|
|
|
*event = stream->event;
|
|
stream->event.type = WG_PARSER_EVENT_NONE;
|
|
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
pthread_cond_signal(&stream->event_empty_cond);
|
|
|
|
return true;
|
|
}
|
|
|
|
static DWORD CALLBACK stream_thread(void *arg)
|
|
{
|
|
struct parser_source *pin = arg;
|
|
struct parser *filter = impl_from_strmbase_filter(pin->pin.pin.filter);
|
|
|
|
TRACE("Starting streaming thread for pin %p.\n", pin);
|
|
|
|
while (filter->streaming)
|
|
{
|
|
struct wg_parser_event event;
|
|
|
|
EnterCriticalSection(&pin->flushing_cs);
|
|
|
|
if (!get_stream_event(pin, &event))
|
|
{
|
|
LeaveCriticalSection(&pin->flushing_cs);
|
|
continue;
|
|
}
|
|
|
|
TRACE("Got event of type %#x.\n", event.type);
|
|
|
|
switch (event.type)
|
|
{
|
|
case WG_PARSER_EVENT_BUFFER:
|
|
send_buffer(pin, event.u.buffer);
|
|
break;
|
|
|
|
case WG_PARSER_EVENT_EOS:
|
|
IPin_EndOfStream(pin->pin.pin.peer);
|
|
break;
|
|
|
|
case WG_PARSER_EVENT_SEGMENT:
|
|
IPin_NewSegment(pin->pin.pin.peer, event.u.segment.position,
|
|
event.u.segment.stop, event.u.segment.rate);
|
|
break;
|
|
|
|
case WG_PARSER_EVENT_NONE:
|
|
assert(0);
|
|
}
|
|
|
|
LeaveCriticalSection(&pin->flushing_cs);
|
|
}
|
|
|
|
TRACE("Streaming stopped; exiting.\n");
|
|
return 0;
|
|
}
|
|
|
|
static GstFlowReturn request_buffer_src(GstPad *pad, GstObject *parent, guint64 offset, guint size, GstBuffer **buffer)
|
|
{
|
|
struct parser *filter = gst_pad_get_element_private(pad);
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
GstBuffer *new_buffer = NULL;
|
|
GstFlowReturn ret;
|
|
|
|
GST_LOG("pad %p, offset %" G_GINT64_MODIFIER "u, length %u, buffer %p.", pad, offset, size, *buffer);
|
|
|
|
if (!*buffer)
|
|
*buffer = new_buffer = gst_buffer_new_and_alloc(size);
|
|
|
|
pthread_mutex_lock(&parser->mutex);
|
|
|
|
assert(!parser->read_request.buffer);
|
|
parser->read_request.buffer = *buffer;
|
|
parser->read_request.offset = offset;
|
|
parser->read_request.size = size;
|
|
parser->read_request.done = false;
|
|
pthread_cond_signal(&parser->read_cond);
|
|
|
|
/* Note that we don't unblock this wait on GST_EVENT_FLUSH_START. We expect
|
|
* the upstream pin to flush if necessary. We should never be blocked on
|
|
* read_thread() not running. */
|
|
|
|
while (!parser->read_request.done)
|
|
pthread_cond_wait(&parser->read_done_cond, &parser->mutex);
|
|
|
|
ret = parser->read_request.ret;
|
|
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
|
|
GST_LOG("Request returned %s.", gst_flow_get_name(ret));
|
|
|
|
if (ret != GST_FLOW_OK && new_buffer)
|
|
gst_buffer_unref(new_buffer);
|
|
|
|
return ret;
|
|
}
|
|
|
|
static GstFlowReturn read_buffer(struct parser *This, guint64 ofs, guint len, GstBuffer *buffer)
|
|
{
|
|
HRESULT hr;
|
|
GstMapInfo info;
|
|
|
|
TRACE("filter %p, offset %s, length %u, buffer %p.\n", This, wine_dbgstr_longlong(ofs), len, buffer);
|
|
|
|
if (ofs == GST_BUFFER_OFFSET_NONE)
|
|
ofs = This->next_pull_offset;
|
|
if (ofs >= This->file_size)
|
|
{
|
|
WARN("Reading past eof: %s, %u\n", wine_dbgstr_longlong(ofs), len);
|
|
return GST_FLOW_EOS;
|
|
}
|
|
if (len + ofs > This->file_size)
|
|
len = This->file_size - ofs;
|
|
This->next_pull_offset = ofs + len;
|
|
|
|
gst_buffer_map(buffer, &info, GST_MAP_WRITE);
|
|
hr = IAsyncReader_SyncRead(This->reader, ofs, len, info.data);
|
|
gst_buffer_unmap(buffer, &info);
|
|
if (FAILED(hr))
|
|
{
|
|
ERR("Failed to read data, hr %#x.\n", hr);
|
|
return GST_FLOW_ERROR;
|
|
}
|
|
|
|
return GST_FLOW_OK;
|
|
}
|
|
|
|
static DWORD CALLBACK read_thread(void *arg)
|
|
{
|
|
struct parser *filter = arg;
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
|
|
TRACE("Starting read thread for filter %p.\n", filter);
|
|
|
|
pthread_mutex_lock(&parser->mutex);
|
|
|
|
while (filter->sink_connected)
|
|
{
|
|
while (parser->sink_connected && !parser->read_request.buffer)
|
|
pthread_cond_wait(&parser->read_cond, &parser->mutex);
|
|
|
|
if (!parser->sink_connected)
|
|
break;
|
|
|
|
parser->read_request.done = true;
|
|
parser->read_request.ret = read_buffer(filter, parser->read_request.offset,
|
|
parser->read_request.size, parser->read_request.buffer);
|
|
parser->read_request.buffer = NULL;
|
|
pthread_cond_signal(&parser->read_done_cond);
|
|
}
|
|
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
|
|
TRACE("Streaming stopped; exiting.\n");
|
|
return 0;
|
|
}
|
|
|
|
static void removed_decoded_pad(GstElement *bin, GstPad *pad, gpointer user)
|
|
{
|
|
struct parser *filter = user;
|
|
unsigned int i;
|
|
char *name;
|
|
|
|
GST_LOG("filter %p, bin %p, pad %p.", filter, bin, pad);
|
|
|
|
for (i = 0; i < filter->source_count; ++i)
|
|
{
|
|
struct wg_parser_stream *stream = filter->sources[i]->wg_stream;
|
|
|
|
if (stream->their_src == pad)
|
|
{
|
|
if (stream->post_sink)
|
|
gst_pad_unlink(stream->their_src, stream->post_sink);
|
|
else
|
|
gst_pad_unlink(stream->their_src, stream->my_sink);
|
|
gst_object_unref(stream->their_src);
|
|
stream->their_src = NULL;
|
|
return;
|
|
}
|
|
}
|
|
|
|
name = gst_pad_get_name(pad);
|
|
GST_LOG("No pin matching pad \"%s\" found.", name);
|
|
g_free(name);
|
|
}
|
|
|
|
static void init_new_decoded_pad(GstElement *bin, GstPad *pad, struct parser *This)
|
|
{
|
|
static const WCHAR formatW[] = {'S','t','r','e','a','m',' ','%','0','2','u',0};
|
|
struct wg_parser *parser = This->wg_parser;
|
|
struct wg_parser_stream *stream;
|
|
const char *typename;
|
|
char *name;
|
|
GstCaps *caps;
|
|
GstStructure *arg;
|
|
struct parser_source *pin;
|
|
int ret;
|
|
WCHAR nameW[128];
|
|
|
|
TRACE("%p %p %p\n", This, bin, pad);
|
|
|
|
sprintfW(nameW, formatW, This->source_count);
|
|
|
|
name = gst_pad_get_name(pad);
|
|
TRACE("Name: %s\n", name);
|
|
g_free(name);
|
|
|
|
caps = gst_pad_query_caps(pad, NULL);
|
|
caps = gst_caps_make_writable(caps);
|
|
arg = gst_caps_get_structure(caps, 0);
|
|
typename = gst_structure_get_name(arg);
|
|
|
|
if (!(pin = create_pin(This, nameW)))
|
|
{
|
|
ERR("Failed to allocate memory.\n");
|
|
goto out;
|
|
}
|
|
stream = pin->wg_stream;
|
|
|
|
if (!strcmp(typename, "video/x-raw"))
|
|
{
|
|
GstElement *deinterlace, *vconv, *flip, *vconv2;
|
|
|
|
/* DirectShow can express interlaced video, but downstream filters can't
|
|
* necessarily consume it. In particular, the video renderer can't. */
|
|
if (!(deinterlace = gst_element_factory_make("deinterlace", NULL)))
|
|
{
|
|
ERR("Failed to create deinterlace, are %u-bit GStreamer \"good\" plugins installed?\n",
|
|
8 * (int)sizeof(void *));
|
|
goto out;
|
|
}
|
|
|
|
/* decodebin considers many YUV formats to be "raw", but some quartz
|
|
* filters can't handle those. Also, videoflip can't handle all "raw"
|
|
* formats either. Add a videoconvert to swap color spaces. */
|
|
if (!(vconv = gst_element_factory_make("videoconvert", NULL)))
|
|
{
|
|
ERR("Failed to create videoconvert, are %u-bit GStreamer \"base\" plugins installed?\n",
|
|
8 * (int)sizeof(void *));
|
|
goto out;
|
|
}
|
|
|
|
/* GStreamer outputs RGB video top-down, but DirectShow expects bottom-up. */
|
|
if (!(flip = gst_element_factory_make("videoflip", NULL)))
|
|
{
|
|
ERR("Failed to create videoflip, are %u-bit GStreamer \"good\" plugins installed?\n",
|
|
8 * (int)sizeof(void *));
|
|
goto out;
|
|
}
|
|
|
|
/* videoflip does not support 15 and 16-bit RGB so add a second videoconvert
|
|
* to do the final conversion. */
|
|
if (!(vconv2 = gst_element_factory_make("videoconvert", NULL)))
|
|
{
|
|
ERR("Failed to create videoconvert, are %u-bit GStreamer \"base\" plugins installed?\n",
|
|
8 * (int)sizeof(void *));
|
|
goto out;
|
|
}
|
|
|
|
/* The bin takes ownership of these elements. */
|
|
gst_bin_add(GST_BIN(parser->container), deinterlace);
|
|
gst_element_sync_state_with_parent(deinterlace);
|
|
gst_bin_add(GST_BIN(parser->container), vconv);
|
|
gst_element_sync_state_with_parent(vconv);
|
|
gst_bin_add(GST_BIN(parser->container), flip);
|
|
gst_element_sync_state_with_parent(flip);
|
|
gst_bin_add(GST_BIN(parser->container), vconv2);
|
|
gst_element_sync_state_with_parent(vconv2);
|
|
|
|
gst_element_link(deinterlace, vconv);
|
|
gst_element_link(vconv, flip);
|
|
gst_element_link(flip, vconv2);
|
|
|
|
stream->post_sink = gst_element_get_static_pad(deinterlace, "sink");
|
|
stream->post_src = gst_element_get_static_pad(vconv2, "src");
|
|
stream->flip = flip;
|
|
}
|
|
else if (!strcmp(typename, "audio/x-raw"))
|
|
{
|
|
GstElement *convert;
|
|
|
|
/* Currently our dsound can't handle 64-bit formats or all
|
|
* surround-sound configurations. Native dsound can't always handle
|
|
* 64-bit formats either. Add an audioconvert to allow changing bit
|
|
* depth and channel count. */
|
|
if (!(convert = gst_element_factory_make("audioconvert", NULL)))
|
|
{
|
|
ERR("Failed to create audioconvert, are %u-bit GStreamer \"base\" plugins installed?\n",
|
|
8 * (int)sizeof(void *));
|
|
goto out;
|
|
}
|
|
|
|
gst_bin_add(GST_BIN(parser->container), convert);
|
|
gst_element_sync_state_with_parent(convert);
|
|
|
|
stream->post_sink = gst_element_get_static_pad(convert, "sink");
|
|
stream->post_src = gst_element_get_static_pad(convert, "src");
|
|
}
|
|
|
|
if (stream->post_sink)
|
|
{
|
|
if ((ret = gst_pad_link(pad, stream->post_sink)) < 0)
|
|
{
|
|
ERR("Failed to link decodebin source pad to post-processing elements, error %s.\n",
|
|
gst_pad_link_get_name(ret));
|
|
gst_object_unref(stream->post_sink);
|
|
stream->post_sink = NULL;
|
|
goto out;
|
|
}
|
|
|
|
if ((ret = gst_pad_link(stream->post_src, stream->my_sink)) < 0)
|
|
{
|
|
ERR("Failed to link post-processing elements to our sink pad, error %s.\n",
|
|
gst_pad_link_get_name(ret));
|
|
gst_object_unref(stream->post_src);
|
|
stream->post_src = NULL;
|
|
gst_object_unref(stream->post_sink);
|
|
stream->post_sink = NULL;
|
|
goto out;
|
|
}
|
|
}
|
|
else if ((ret = gst_pad_link(pad, stream->my_sink)) < 0)
|
|
{
|
|
ERR("Failed to link decodebin source pad to our sink pad, error %s.\n",
|
|
gst_pad_link_get_name(ret));
|
|
goto out;
|
|
}
|
|
|
|
gst_pad_set_active(stream->my_sink, 1);
|
|
gst_object_ref(stream->their_src = pad);
|
|
out:
|
|
gst_caps_unref(caps);
|
|
}
|
|
|
|
static void existing_new_pad(GstElement *bin, GstPad *pad, gpointer user)
|
|
{
|
|
struct parser *This = user;
|
|
|
|
TRACE("%p %p %p\n", This, bin, pad);
|
|
|
|
if (gst_pad_is_linked(pad))
|
|
return;
|
|
|
|
init_new_decoded_pad(bin, pad, This);
|
|
}
|
|
|
|
static gboolean query_function(GstPad *pad, GstObject *parent, GstQuery *query)
|
|
{
|
|
struct parser *This = gst_pad_get_element_private(pad);
|
|
struct wg_parser *parser = This->wg_parser;
|
|
GstFormat format;
|
|
|
|
GST_LOG("filter %p, type %s.", This, GST_QUERY_TYPE_NAME(query));
|
|
|
|
switch (GST_QUERY_TYPE(query)) {
|
|
case GST_QUERY_DURATION:
|
|
gst_query_parse_duration(query, &format, NULL);
|
|
if (format == GST_FORMAT_PERCENT)
|
|
{
|
|
gst_query_set_duration(query, GST_FORMAT_PERCENT, GST_FORMAT_PERCENT_MAX);
|
|
return TRUE;
|
|
}
|
|
else if (format == GST_FORMAT_BYTES)
|
|
{
|
|
gst_query_set_duration(query, GST_FORMAT_BYTES, parser->file_size);
|
|
return TRUE;
|
|
}
|
|
return FALSE;
|
|
case GST_QUERY_SEEKING:
|
|
gst_query_parse_seeking (query, &format, NULL, NULL, NULL);
|
|
if (format != GST_FORMAT_BYTES)
|
|
{
|
|
GST_WARNING("Cannot seek using format \"%s\".", gst_format_get_name(format));
|
|
return FALSE;
|
|
}
|
|
gst_query_set_seeking(query, GST_FORMAT_BYTES, 1, 0, parser->file_size);
|
|
return TRUE;
|
|
case GST_QUERY_SCHEDULING:
|
|
gst_query_set_scheduling(query, GST_SCHEDULING_FLAG_SEEKABLE, 1, -1, 0);
|
|
gst_query_add_scheduling_mode(query, GST_PAD_MODE_PUSH);
|
|
gst_query_add_scheduling_mode(query, GST_PAD_MODE_PULL);
|
|
return TRUE;
|
|
default:
|
|
GST_WARNING("Unhandled query type %s.", GST_QUERY_TYPE_NAME(query));
|
|
return FALSE;
|
|
}
|
|
}
|
|
|
|
static gboolean activate_push(GstPad *pad, gboolean activate)
|
|
{
|
|
struct parser *This = gst_pad_get_element_private(pad);
|
|
struct wg_parser *parser = This->wg_parser;
|
|
|
|
if (!activate) {
|
|
if (parser->push_thread) {
|
|
pthread_join(parser->push_thread, NULL);
|
|
parser->push_thread = 0;
|
|
}
|
|
} else if (!parser->push_thread) {
|
|
int ret;
|
|
|
|
if ((ret = pthread_create(&parser->push_thread, NULL, push_data, This)))
|
|
{
|
|
GST_ERROR("Failed to create push thread: %s", strerror(errno));
|
|
parser->push_thread = 0;
|
|
return FALSE;
|
|
}
|
|
}
|
|
return TRUE;
|
|
}
|
|
|
|
static gboolean activate_mode(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate)
|
|
{
|
|
struct parser *filter = gst_pad_get_element_private(pad);
|
|
|
|
GST_DEBUG("%s source pad for filter %p in %s mode.",
|
|
activate ? "Activating" : "Deactivating", filter, gst_pad_mode_get_name(mode));
|
|
|
|
switch (mode) {
|
|
case GST_PAD_MODE_PULL:
|
|
return TRUE;
|
|
case GST_PAD_MODE_PUSH:
|
|
return activate_push(pad, activate);
|
|
default:
|
|
return FALSE;
|
|
}
|
|
return FALSE;
|
|
}
|
|
|
|
static void no_more_pads(GstElement *decodebin, gpointer user)
|
|
{
|
|
struct parser *filter = user;
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
|
|
GST_DEBUG("filter %p.", filter);
|
|
|
|
pthread_mutex_lock(&parser->mutex);
|
|
parser->no_more_pads = true;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
pthread_cond_signal(&parser->init_cond);
|
|
}
|
|
|
|
static GstAutoplugSelectResult autoplug_blacklist(GstElement *bin, GstPad *pad, GstCaps *caps, GstElementFactory *fact, gpointer user)
|
|
{
|
|
const char *name = gst_element_factory_get_longname(fact);
|
|
|
|
GST_TRACE("Using \"%s\".", name);
|
|
|
|
if (strstr(name, "Player protection"))
|
|
{
|
|
GST_WARNING("Blacklisted a/52 decoder because it only works in Totem.");
|
|
return GST_AUTOPLUG_SELECT_SKIP;
|
|
}
|
|
if (!strcmp(name, "Fluendo Hardware Accelerated Video Decoder"))
|
|
{
|
|
GST_WARNING("Disabled video acceleration since it breaks in wine.");
|
|
return GST_AUTOPLUG_SELECT_SKIP;
|
|
}
|
|
return GST_AUTOPLUG_SELECT_TRY;
|
|
}
|
|
|
|
static GstBusSyncReply watch_bus(GstBus *bus, GstMessage *msg, gpointer data)
|
|
{
|
|
struct parser *filter = data;
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
GError *err = NULL;
|
|
gchar *dbg_info = NULL;
|
|
|
|
GST_DEBUG("filter %p, message type %s.", filter, GST_MESSAGE_TYPE_NAME(msg));
|
|
|
|
switch (msg->type)
|
|
{
|
|
case GST_MESSAGE_ERROR:
|
|
gst_message_parse_error(msg, &err, &dbg_info);
|
|
fprintf(stderr, "winegstreamer: error: %s: %s\n", GST_OBJECT_NAME(msg->src), err->message);
|
|
fprintf(stderr, "winegstreamer: error: %s: %s\n", GST_OBJECT_NAME(msg->src), dbg_info);
|
|
g_error_free(err);
|
|
g_free(dbg_info);
|
|
pthread_mutex_lock(&parser->mutex);
|
|
parser->error = true;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
pthread_cond_signal(&parser->init_cond);
|
|
break;
|
|
|
|
case GST_MESSAGE_WARNING:
|
|
gst_message_parse_warning(msg, &err, &dbg_info);
|
|
fprintf(stderr, "winegstreamer: warning: %s: %s\n", GST_OBJECT_NAME(msg->src), err->message);
|
|
fprintf(stderr, "winegstreamer: warning: %s: %s\n", GST_OBJECT_NAME(msg->src), dbg_info);
|
|
g_error_free(err);
|
|
g_free(dbg_info);
|
|
break;
|
|
|
|
case GST_MESSAGE_DURATION_CHANGED:
|
|
pthread_mutex_lock(&parser->mutex);
|
|
parser->has_duration = true;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
pthread_cond_signal(&parser->init_cond);
|
|
break;
|
|
|
|
default:
|
|
break;
|
|
}
|
|
gst_message_unref(msg);
|
|
return GST_BUS_DROP;
|
|
}
|
|
|
|
static LONGLONG query_duration(GstPad *pad)
|
|
{
|
|
gint64 duration, byte_length;
|
|
|
|
if (gst_pad_query_duration(pad, GST_FORMAT_TIME, &duration))
|
|
return duration / 100;
|
|
|
|
WARN("Failed to query time duration; trying to convert from byte length.\n");
|
|
|
|
/* To accurately get a duration for the stream, we want to only consider the
|
|
* length of that stream. Hence, query for the pad duration, instead of
|
|
* using the file duration. */
|
|
if (gst_pad_query_duration(pad, GST_FORMAT_BYTES, &byte_length)
|
|
&& gst_pad_query_convert(pad, GST_FORMAT_BYTES, byte_length, GST_FORMAT_TIME, &duration))
|
|
return duration / 100;
|
|
|
|
ERR("Failed to query duration.\n");
|
|
return 0;
|
|
}
|
|
|
|
static HRESULT GST_Connect(struct parser *This, IPin *pConnectPin)
|
|
{
|
|
struct wg_parser *parser = This->wg_parser;
|
|
unsigned int i;
|
|
LONGLONG avail;
|
|
GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE(
|
|
"quartz_src",
|
|
GST_PAD_SRC,
|
|
GST_PAD_ALWAYS,
|
|
GST_STATIC_CAPS_ANY);
|
|
|
|
IAsyncReader_Length(This->reader, &This->file_size, &avail);
|
|
parser->file_size = This->file_size;
|
|
|
|
This->sink_connected = true;
|
|
parser->sink_connected = true;
|
|
|
|
This->read_thread = CreateThread(NULL, 0, read_thread, This, 0, NULL);
|
|
|
|
if (!parser->bus)
|
|
{
|
|
parser->bus = gst_bus_new();
|
|
gst_bus_set_sync_handler(parser->bus, watch_bus, This, NULL);
|
|
}
|
|
|
|
parser->container = gst_bin_new(NULL);
|
|
gst_element_set_bus(parser->container, parser->bus);
|
|
|
|
parser->my_src = gst_pad_new_from_static_template(&src_template, "quartz-src");
|
|
gst_pad_set_getrange_function(parser->my_src, request_buffer_src);
|
|
gst_pad_set_query_function(parser->my_src, query_function);
|
|
gst_pad_set_activatemode_function(parser->my_src, activate_mode);
|
|
gst_pad_set_event_function(parser->my_src, event_src);
|
|
gst_pad_set_element_private (parser->my_src, This);
|
|
|
|
parser->start_offset = parser->next_offset = parser->stop_offset = 0;
|
|
This->next_pull_offset = 0;
|
|
|
|
if (!This->init_gst(This))
|
|
return E_FAIL;
|
|
|
|
pthread_mutex_lock(&parser->mutex);
|
|
|
|
for (i = 0; i < This->source_count; ++i)
|
|
{
|
|
struct parser_source *pin = This->sources[i];
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
|
|
pin->seek.llDuration = pin->seek.llStop = query_duration(stream->their_src);
|
|
pin->seek.llCurrent = 0;
|
|
while (!stream->caps && !parser->error)
|
|
pthread_cond_wait(&parser->init_cond, &parser->mutex);
|
|
if (parser->error)
|
|
{
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
return E_FAIL;
|
|
}
|
|
}
|
|
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
|
|
parser->next_offset = 0;
|
|
This->next_pull_offset = 0;
|
|
return S_OK;
|
|
}
|
|
|
|
static inline struct parser_source *impl_from_IMediaSeeking(IMediaSeeking *iface)
|
|
{
|
|
return CONTAINING_RECORD(iface, struct parser_source, seek.IMediaSeeking_iface);
|
|
}
|
|
|
|
static struct strmbase_pin *parser_get_pin(struct strmbase_filter *base, unsigned int index)
|
|
{
|
|
struct parser *filter = impl_from_strmbase_filter(base);
|
|
|
|
if (filter->enum_sink_first)
|
|
{
|
|
if (!index)
|
|
return &filter->sink.pin;
|
|
else if (index <= filter->source_count)
|
|
return &filter->sources[index - 1]->pin.pin;
|
|
}
|
|
else
|
|
{
|
|
if (index < filter->source_count)
|
|
return &filter->sources[index]->pin.pin;
|
|
else if (index == filter->source_count)
|
|
return &filter->sink.pin;
|
|
}
|
|
return NULL;
|
|
}
|
|
|
|
static void wg_parser_destroy(struct wg_parser *parser)
|
|
{
|
|
if (parser->bus)
|
|
{
|
|
gst_bus_set_sync_handler(parser->bus, NULL, NULL, NULL);
|
|
gst_object_unref(parser->bus);
|
|
}
|
|
|
|
pthread_mutex_destroy(&parser->mutex);
|
|
pthread_cond_destroy(&parser->init_cond);
|
|
pthread_cond_destroy(&parser->read_cond);
|
|
pthread_cond_destroy(&parser->read_done_cond);
|
|
|
|
free(parser);
|
|
}
|
|
|
|
static void parser_destroy(struct strmbase_filter *iface)
|
|
{
|
|
struct parser *filter = impl_from_strmbase_filter(iface);
|
|
HRESULT hr;
|
|
|
|
/* Don't need to clean up output pins, disconnecting input pin will do that */
|
|
if (filter->sink.pin.peer)
|
|
{
|
|
hr = IPin_Disconnect(filter->sink.pin.peer);
|
|
assert(hr == S_OK);
|
|
hr = IPin_Disconnect(&filter->sink.pin.IPin_iface);
|
|
assert(hr == S_OK);
|
|
}
|
|
|
|
if (filter->reader)
|
|
IAsyncReader_Release(filter->reader);
|
|
filter->reader = NULL;
|
|
|
|
wg_parser_destroy(filter->wg_parser);
|
|
|
|
strmbase_sink_cleanup(&filter->sink);
|
|
strmbase_filter_cleanup(&filter->filter);
|
|
heap_free(filter);
|
|
}
|
|
|
|
static HRESULT parser_init_stream(struct strmbase_filter *iface)
|
|
{
|
|
struct parser *filter = impl_from_strmbase_filter(iface);
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
GstSeekType stop_type = GST_SEEK_TYPE_NONE;
|
|
const SourceSeeking *seeking;
|
|
unsigned int i;
|
|
|
|
if (!parser->container)
|
|
return S_OK;
|
|
|
|
filter->streaming = true;
|
|
pthread_mutex_lock(&parser->mutex);
|
|
parser->flushing = false;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
|
|
/* DirectShow retains the old seek positions, but resets to them every time
|
|
* it transitions from stopped -> paused. */
|
|
|
|
parser->next_offset = parser->start_offset;
|
|
|
|
seeking = &filter->sources[0]->seek;
|
|
if (seeking->llStop && seeking->llStop != seeking->llDuration)
|
|
stop_type = GST_SEEK_TYPE_SET;
|
|
gst_pad_push_event(filter->sources[0]->wg_stream->my_sink, gst_event_new_seek(
|
|
seeking->dRate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
|
|
GST_SEEK_TYPE_SET, seeking->llCurrent * 100,
|
|
stop_type, seeking->llStop * 100));
|
|
|
|
for (i = 0; i < filter->source_count; ++i)
|
|
{
|
|
HRESULT hr;
|
|
|
|
if (!filter->sources[i]->pin.pin.peer)
|
|
continue;
|
|
|
|
if (FAILED(hr = IMemAllocator_Commit(filter->sources[i]->pin.pAllocator)))
|
|
ERR("Failed to commit allocator, hr %#x.\n", hr);
|
|
|
|
filter->sources[i]->thread = CreateThread(NULL, 0, stream_thread, filter->sources[i], 0, NULL);
|
|
}
|
|
|
|
return S_OK;
|
|
}
|
|
|
|
static HRESULT parser_cleanup_stream(struct strmbase_filter *iface)
|
|
{
|
|
struct parser *filter = impl_from_strmbase_filter(iface);
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
unsigned int i;
|
|
|
|
if (!parser->container)
|
|
return S_OK;
|
|
|
|
filter->streaming = false;
|
|
pthread_mutex_lock(&parser->mutex);
|
|
parser->flushing = true;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
|
|
for (i = 0; i < filter->source_count; ++i)
|
|
{
|
|
struct parser_source *pin = filter->sources[i];
|
|
|
|
if (!pin->pin.pin.peer)
|
|
continue;
|
|
|
|
pthread_cond_signal(&pin->wg_stream->event_cond);
|
|
}
|
|
|
|
for (i = 0; i < filter->source_count; ++i)
|
|
{
|
|
struct parser_source *pin = filter->sources[i];
|
|
|
|
if (!pin->pin.pin.peer)
|
|
continue;
|
|
|
|
IMemAllocator_Decommit(pin->pin.pAllocator);
|
|
|
|
WaitForSingleObject(pin->thread, INFINITE);
|
|
CloseHandle(pin->thread);
|
|
pin->thread = NULL;
|
|
}
|
|
|
|
return S_OK;
|
|
}
|
|
|
|
static const struct strmbase_filter_ops filter_ops =
|
|
{
|
|
.filter_get_pin = parser_get_pin,
|
|
.filter_destroy = parser_destroy,
|
|
.filter_init_stream = parser_init_stream,
|
|
.filter_cleanup_stream = parser_cleanup_stream,
|
|
};
|
|
|
|
static inline struct parser *impl_from_strmbase_sink(struct strmbase_sink *iface)
|
|
{
|
|
return CONTAINING_RECORD(iface, struct parser, sink);
|
|
}
|
|
|
|
static HRESULT sink_query_accept(struct strmbase_pin *iface, const AM_MEDIA_TYPE *mt)
|
|
{
|
|
if (IsEqualGUID(&mt->majortype, &MEDIATYPE_Stream))
|
|
return S_OK;
|
|
return S_FALSE;
|
|
}
|
|
|
|
static HRESULT parser_sink_connect(struct strmbase_sink *iface, IPin *peer, const AM_MEDIA_TYPE *pmt)
|
|
{
|
|
struct parser *filter = impl_from_strmbase_sink(iface);
|
|
HRESULT hr = S_OK;
|
|
|
|
mark_wine_thread();
|
|
|
|
filter->reader = NULL;
|
|
if (FAILED(hr = IPin_QueryInterface(peer, &IID_IAsyncReader, (void **)&filter->reader)))
|
|
return hr;
|
|
|
|
if (FAILED(hr = GST_Connect(filter, peer)))
|
|
goto err;
|
|
|
|
return S_OK;
|
|
err:
|
|
GST_RemoveOutputPins(filter);
|
|
IAsyncReader_Release(filter->reader);
|
|
filter->reader = NULL;
|
|
return hr;
|
|
}
|
|
|
|
static void parser_sink_disconnect(struct strmbase_sink *iface)
|
|
{
|
|
struct parser *filter = impl_from_strmbase_sink(iface);
|
|
|
|
mark_wine_thread();
|
|
|
|
GST_RemoveOutputPins(filter);
|
|
|
|
IAsyncReader_Release(filter->reader);
|
|
filter->reader = NULL;
|
|
}
|
|
|
|
static const struct strmbase_sink_ops sink_ops =
|
|
{
|
|
.base.pin_query_accept = sink_query_accept,
|
|
.sink_connect = parser_sink_connect,
|
|
.sink_disconnect = parser_sink_disconnect,
|
|
};
|
|
|
|
static BOOL decodebin_parser_init_gst(struct parser *filter)
|
|
{
|
|
GstElement *element = gst_element_factory_make("decodebin", NULL);
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
int ret;
|
|
|
|
if (!element)
|
|
{
|
|
ERR("Failed to create decodebin; are %u-bit GStreamer \"base\" plugins installed?\n",
|
|
8 * (int)sizeof(void*));
|
|
return FALSE;
|
|
}
|
|
|
|
gst_bin_add(GST_BIN(parser->container), element);
|
|
|
|
g_signal_connect(element, "pad-added", G_CALLBACK(existing_new_pad_wrapper), filter);
|
|
g_signal_connect(element, "pad-removed", G_CALLBACK(removed_decoded_pad), filter);
|
|
g_signal_connect(element, "autoplug-select", G_CALLBACK(autoplug_blacklist), filter);
|
|
g_signal_connect(element, "no-more-pads", G_CALLBACK(no_more_pads), filter);
|
|
|
|
parser->their_sink = gst_element_get_static_pad(element, "sink");
|
|
|
|
pthread_mutex_lock(&parser->mutex);
|
|
parser->no_more_pads = parser->error = false;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
|
|
if ((ret = gst_pad_link(parser->my_src, parser->their_sink)) < 0)
|
|
{
|
|
ERR("Failed to link pads, error %d.\n", ret);
|
|
return FALSE;
|
|
}
|
|
|
|
gst_element_set_state(parser->container, GST_STATE_PAUSED);
|
|
ret = gst_element_get_state(parser->container, NULL, NULL, -1);
|
|
if (ret == GST_STATE_CHANGE_FAILURE)
|
|
{
|
|
ERR("Failed to play stream.\n");
|
|
return FALSE;
|
|
}
|
|
|
|
pthread_mutex_lock(&parser->mutex);
|
|
while (!parser->no_more_pads && !parser->error)
|
|
pthread_cond_wait(&parser->init_cond, &parser->mutex);
|
|
if (parser->error)
|
|
{
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
return FALSE;
|
|
}
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
return TRUE;
|
|
}
|
|
|
|
static HRESULT decodebin_parser_source_query_accept(struct parser_source *pin, const AM_MEDIA_TYPE *mt)
|
|
{
|
|
/* At least make sure we can convert it to GstCaps. */
|
|
GstCaps *caps = amt_to_gst_caps(mt);
|
|
|
|
if (!caps)
|
|
return S_FALSE;
|
|
gst_caps_unref(caps);
|
|
return S_OK;
|
|
}
|
|
|
|
static HRESULT decodebin_parser_source_get_media_type(struct parser_source *pin,
|
|
unsigned int index, AM_MEDIA_TYPE *mt)
|
|
{
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
const GstCaps *caps = stream->caps;
|
|
const GstStructure *structure;
|
|
const char *type;
|
|
|
|
static const GstVideoFormat video_formats[] =
|
|
{
|
|
/* Try to prefer YUV formats over RGB ones. Most decoders output in the
|
|
* YUV color space, and it's generally much less expensive for
|
|
* videoconvert to do YUV -> YUV transformations. */
|
|
GST_VIDEO_FORMAT_AYUV,
|
|
GST_VIDEO_FORMAT_I420,
|
|
GST_VIDEO_FORMAT_YV12,
|
|
GST_VIDEO_FORMAT_YUY2,
|
|
GST_VIDEO_FORMAT_UYVY,
|
|
GST_VIDEO_FORMAT_YVYU,
|
|
GST_VIDEO_FORMAT_NV12,
|
|
GST_VIDEO_FORMAT_BGRA,
|
|
GST_VIDEO_FORMAT_BGRx,
|
|
GST_VIDEO_FORMAT_BGR,
|
|
GST_VIDEO_FORMAT_RGB16,
|
|
GST_VIDEO_FORMAT_RGB15,
|
|
};
|
|
|
|
assert(caps); /* We shouldn't be able to get here if caps haven't been set. */
|
|
structure = gst_caps_get_structure(caps, 0);
|
|
type = gst_structure_get_name(structure);
|
|
|
|
memset(mt, 0, sizeof(AM_MEDIA_TYPE));
|
|
|
|
if (amt_from_gst_caps(caps, mt))
|
|
{
|
|
if (!index--)
|
|
return S_OK;
|
|
FreeMediaType(mt);
|
|
}
|
|
|
|
if (!strcmp(type, "video/x-raw") && index < ARRAY_SIZE(video_formats))
|
|
{
|
|
gint width, height, fps_n, fps_d;
|
|
GstVideoInfo info;
|
|
|
|
gst_structure_get_int(structure, "width", &width);
|
|
gst_structure_get_int(structure, "height", &height);
|
|
gst_video_info_set_format(&info, video_formats[index], width, height);
|
|
if (gst_structure_get_fraction(structure, "framerate", &fps_n, &fps_d) && fps_n)
|
|
{
|
|
info.fps_n = fps_n;
|
|
info.fps_d = fps_d;
|
|
}
|
|
if (!amt_from_gst_video_info(&info, mt))
|
|
return E_OUTOFMEMORY;
|
|
return S_OK;
|
|
}
|
|
else if (!strcmp(type, "audio/x-raw") && !index)
|
|
{
|
|
GstAudioInfo info;
|
|
gint rate;
|
|
|
|
gst_structure_get_int(structure, "rate", &rate);
|
|
gst_audio_info_set_format(&info, GST_AUDIO_FORMAT_S16LE, rate, 2, NULL);
|
|
if (!amt_from_gst_audio_info(&info, mt))
|
|
return E_OUTOFMEMORY;
|
|
return S_OK;
|
|
}
|
|
|
|
return VFW_S_NO_MORE_ITEMS;
|
|
}
|
|
|
|
static BOOL parser_init_gstreamer(void)
|
|
{
|
|
if (!init_gstreamer())
|
|
return FALSE;
|
|
GST_DEBUG_CATEGORY_INIT(wine, "WINE", GST_DEBUG_FG_RED, "Wine GStreamer support");
|
|
return TRUE;
|
|
}
|
|
|
|
static struct wg_parser *wg_parser_create(void)
|
|
{
|
|
struct wg_parser *parser;
|
|
|
|
if (!(parser = calloc(1, sizeof(*parser))))
|
|
return NULL;
|
|
|
|
pthread_mutex_init(&parser->mutex, NULL);
|
|
pthread_cond_init(&parser->init_cond, NULL);
|
|
pthread_cond_init(&parser->read_cond, NULL);
|
|
pthread_cond_init(&parser->read_done_cond, NULL);
|
|
parser->flushing = true;
|
|
|
|
TRACE("Created winegstreamer parser %p.\n", parser);
|
|
return parser;
|
|
}
|
|
|
|
HRESULT decodebin_parser_create(IUnknown *outer, IUnknown **out)
|
|
{
|
|
struct parser *object;
|
|
|
|
if (!parser_init_gstreamer())
|
|
return E_FAIL;
|
|
|
|
mark_wine_thread();
|
|
|
|
if (!(object = heap_alloc_zero(sizeof(*object))))
|
|
return E_OUTOFMEMORY;
|
|
|
|
if (!(object->wg_parser = wg_parser_create()))
|
|
{
|
|
heap_free(object);
|
|
return E_OUTOFMEMORY;
|
|
}
|
|
|
|
strmbase_filter_init(&object->filter, outer, &CLSID_decodebin_parser, &filter_ops);
|
|
strmbase_sink_init(&object->sink, &object->filter, wcsInputPinName, &sink_ops, NULL);
|
|
|
|
object->init_gst = decodebin_parser_init_gst;
|
|
object->source_query_accept = decodebin_parser_source_query_accept;
|
|
object->source_get_media_type = decodebin_parser_source_get_media_type;
|
|
|
|
TRACE("Created GStreamer demuxer %p.\n", object);
|
|
*out = &object->filter.IUnknown_inner;
|
|
return S_OK;
|
|
}
|
|
|
|
static struct parser *impl_from_IAMStreamSelect(IAMStreamSelect *iface)
|
|
{
|
|
return CONTAINING_RECORD(iface, struct parser, IAMStreamSelect_iface);
|
|
}
|
|
|
|
static HRESULT WINAPI stream_select_QueryInterface(IAMStreamSelect *iface, REFIID iid, void **out)
|
|
{
|
|
struct parser *filter = impl_from_IAMStreamSelect(iface);
|
|
return IUnknown_QueryInterface(filter->filter.outer_unk, iid, out);
|
|
}
|
|
|
|
static ULONG WINAPI stream_select_AddRef(IAMStreamSelect *iface)
|
|
{
|
|
struct parser *filter = impl_from_IAMStreamSelect(iface);
|
|
return IUnknown_AddRef(filter->filter.outer_unk);
|
|
}
|
|
|
|
static ULONG WINAPI stream_select_Release(IAMStreamSelect *iface)
|
|
{
|
|
struct parser *filter = impl_from_IAMStreamSelect(iface);
|
|
return IUnknown_Release(filter->filter.outer_unk);
|
|
}
|
|
|
|
static HRESULT WINAPI stream_select_Count(IAMStreamSelect *iface, DWORD *count)
|
|
{
|
|
FIXME("iface %p, count %p, stub!\n", iface, count);
|
|
return E_NOTIMPL;
|
|
}
|
|
|
|
static HRESULT WINAPI stream_select_Info(IAMStreamSelect *iface, LONG index,
|
|
AM_MEDIA_TYPE **mt, DWORD *flags, LCID *lcid, DWORD *group, WCHAR **name,
|
|
IUnknown **object, IUnknown **unknown)
|
|
{
|
|
FIXME("iface %p, index %d, mt %p, flags %p, lcid %p, group %p, name %p, object %p, unknown %p, stub!\n",
|
|
iface, index, mt, flags, lcid, group, name, object, unknown);
|
|
return E_NOTIMPL;
|
|
}
|
|
|
|
static HRESULT WINAPI stream_select_Enable(IAMStreamSelect *iface, LONG index, DWORD flags)
|
|
{
|
|
FIXME("iface %p, index %d, flags %#x, stub!\n", iface, index, flags);
|
|
return E_NOTIMPL;
|
|
}
|
|
|
|
static const IAMStreamSelectVtbl stream_select_vtbl =
|
|
{
|
|
stream_select_QueryInterface,
|
|
stream_select_AddRef,
|
|
stream_select_Release,
|
|
stream_select_Count,
|
|
stream_select_Info,
|
|
stream_select_Enable,
|
|
};
|
|
|
|
static HRESULT WINAPI GST_ChangeCurrent(IMediaSeeking *iface)
|
|
{
|
|
struct parser_source *This = impl_from_IMediaSeeking(iface);
|
|
TRACE("(%p)\n", This);
|
|
return S_OK;
|
|
}
|
|
|
|
static HRESULT WINAPI GST_ChangeStop(IMediaSeeking *iface)
|
|
{
|
|
struct parser_source *This = impl_from_IMediaSeeking(iface);
|
|
TRACE("(%p)\n", This);
|
|
return S_OK;
|
|
}
|
|
|
|
static HRESULT WINAPI GST_ChangeRate(IMediaSeeking *iface)
|
|
{
|
|
struct parser_source *This = impl_from_IMediaSeeking(iface);
|
|
struct wg_parser_stream *stream = This->wg_stream;
|
|
GstEvent *ev = gst_event_new_seek(This->seek.dRate, GST_FORMAT_TIME, 0, GST_SEEK_TYPE_NONE, -1, GST_SEEK_TYPE_NONE, -1);
|
|
TRACE("(%p) New rate %g\n", This, This->seek.dRate);
|
|
mark_wine_thread();
|
|
gst_pad_push_event(stream->my_sink, ev);
|
|
return S_OK;
|
|
}
|
|
|
|
static HRESULT WINAPI GST_Seeking_QueryInterface(IMediaSeeking *iface, REFIID riid, void **ppv)
|
|
{
|
|
struct parser_source *This = impl_from_IMediaSeeking(iface);
|
|
return IPin_QueryInterface(&This->pin.pin.IPin_iface, riid, ppv);
|
|
}
|
|
|
|
static ULONG WINAPI GST_Seeking_AddRef(IMediaSeeking *iface)
|
|
{
|
|
struct parser_source *This = impl_from_IMediaSeeking(iface);
|
|
return IPin_AddRef(&This->pin.pin.IPin_iface);
|
|
}
|
|
|
|
static ULONG WINAPI GST_Seeking_Release(IMediaSeeking *iface)
|
|
{
|
|
struct parser_source *This = impl_from_IMediaSeeking(iface);
|
|
return IPin_Release(&This->pin.pin.IPin_iface);
|
|
}
|
|
|
|
static HRESULT WINAPI GST_Seeking_SetPositions(IMediaSeeking *iface,
|
|
LONGLONG *current, DWORD current_flags, LONGLONG *stop, DWORD stop_flags)
|
|
{
|
|
GstSeekType current_type = GST_SEEK_TYPE_SET, stop_type = GST_SEEK_TYPE_SET;
|
|
struct parser_source *pin = impl_from_IMediaSeeking(iface);
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
struct parser *filter = impl_from_strmbase_filter(pin->pin.pin.filter);
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
GstSeekFlags flags = 0;
|
|
HRESULT hr = S_OK;
|
|
int i;
|
|
|
|
TRACE("pin %p, current %s, current_flags %#x, stop %s, stop_flags %#x.\n",
|
|
pin, current ? debugstr_time(*current) : "<null>", current_flags,
|
|
stop ? debugstr_time(*stop) : "<null>", stop_flags);
|
|
|
|
mark_wine_thread();
|
|
|
|
if (pin->pin.pin.filter->state == State_Stopped)
|
|
{
|
|
SourceSeekingImpl_SetPositions(iface, current, current_flags, stop, stop_flags);
|
|
return S_OK;
|
|
}
|
|
|
|
if (!(current_flags & AM_SEEKING_NoFlush))
|
|
{
|
|
pthread_mutex_lock(&parser->mutex);
|
|
parser->flushing = true;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
|
|
for (i = 0; i < filter->source_count; ++i)
|
|
{
|
|
if (filter->sources[i]->pin.pin.peer)
|
|
{
|
|
pthread_cond_signal(&stream->event_cond);
|
|
IPin_BeginFlush(filter->sources[i]->pin.pin.peer);
|
|
}
|
|
}
|
|
|
|
if (filter->reader)
|
|
IAsyncReader_BeginFlush(filter->reader);
|
|
}
|
|
|
|
/* Acquire the flushing locks. This blocks the streaming threads, and
|
|
* ensures the seek is serialized between flushes. */
|
|
for (i = 0; i < filter->source_count; ++i)
|
|
{
|
|
if (filter->sources[i]->pin.pin.peer)
|
|
EnterCriticalSection(&pin->flushing_cs);
|
|
}
|
|
|
|
SourceSeekingImpl_SetPositions(iface, current, current_flags, stop, stop_flags);
|
|
|
|
if (current_flags & AM_SEEKING_SeekToKeyFrame)
|
|
flags |= GST_SEEK_FLAG_KEY_UNIT;
|
|
if (current_flags & AM_SEEKING_Segment)
|
|
flags |= GST_SEEK_FLAG_SEGMENT;
|
|
if (!(current_flags & AM_SEEKING_NoFlush))
|
|
flags |= GST_SEEK_FLAG_FLUSH;
|
|
|
|
if ((current_flags & AM_SEEKING_PositioningBitsMask) == AM_SEEKING_NoPositioning)
|
|
current_type = GST_SEEK_TYPE_NONE;
|
|
if ((stop_flags & AM_SEEKING_PositioningBitsMask) == AM_SEEKING_NoPositioning)
|
|
stop_type = GST_SEEK_TYPE_NONE;
|
|
|
|
if (!gst_pad_push_event(stream->my_sink, gst_event_new_seek(pin->seek.dRate, GST_FORMAT_TIME, flags,
|
|
current_type, pin->seek.llCurrent * 100, stop_type, pin->seek.llStop * 100)))
|
|
{
|
|
ERR("Failed to seek (current %s, stop %s).\n",
|
|
debugstr_time(pin->seek.llCurrent), debugstr_time(pin->seek.llStop));
|
|
hr = E_FAIL;
|
|
}
|
|
|
|
if (!(current_flags & AM_SEEKING_NoFlush))
|
|
{
|
|
pthread_mutex_lock(&parser->mutex);
|
|
parser->flushing = false;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
|
|
for (i = 0; i < filter->source_count; ++i)
|
|
{
|
|
if (filter->sources[i]->pin.pin.peer)
|
|
IPin_EndFlush(filter->sources[i]->pin.pin.peer);
|
|
}
|
|
|
|
if (filter->reader)
|
|
IAsyncReader_EndFlush(filter->reader);
|
|
}
|
|
|
|
/* Release the flushing locks. */
|
|
for (i = filter->source_count - 1; i >= 0; --i)
|
|
{
|
|
if (filter->sources[i]->pin.pin.peer)
|
|
LeaveCriticalSection(&pin->flushing_cs);
|
|
}
|
|
|
|
return hr;
|
|
}
|
|
|
|
static const IMediaSeekingVtbl GST_Seeking_Vtbl =
|
|
{
|
|
GST_Seeking_QueryInterface,
|
|
GST_Seeking_AddRef,
|
|
GST_Seeking_Release,
|
|
SourceSeekingImpl_GetCapabilities,
|
|
SourceSeekingImpl_CheckCapabilities,
|
|
SourceSeekingImpl_IsFormatSupported,
|
|
SourceSeekingImpl_QueryPreferredFormat,
|
|
SourceSeekingImpl_GetTimeFormat,
|
|
SourceSeekingImpl_IsUsingTimeFormat,
|
|
SourceSeekingImpl_SetTimeFormat,
|
|
SourceSeekingImpl_GetDuration,
|
|
SourceSeekingImpl_GetStopPosition,
|
|
SourceSeekingImpl_GetCurrentPosition,
|
|
SourceSeekingImpl_ConvertTimeFormat,
|
|
GST_Seeking_SetPositions,
|
|
SourceSeekingImpl_GetPositions,
|
|
SourceSeekingImpl_GetAvailable,
|
|
SourceSeekingImpl_SetRate,
|
|
SourceSeekingImpl_GetRate,
|
|
SourceSeekingImpl_GetPreroll
|
|
};
|
|
|
|
static inline struct parser_source *impl_from_IQualityControl( IQualityControl *iface )
|
|
{
|
|
return CONTAINING_RECORD(iface, struct parser_source, IQualityControl_iface);
|
|
}
|
|
|
|
static HRESULT WINAPI GST_QualityControl_QueryInterface(IQualityControl *iface, REFIID riid, void **ppv)
|
|
{
|
|
struct parser_source *pin = impl_from_IQualityControl(iface);
|
|
return IPin_QueryInterface(&pin->pin.pin.IPin_iface, riid, ppv);
|
|
}
|
|
|
|
static ULONG WINAPI GST_QualityControl_AddRef(IQualityControl *iface)
|
|
{
|
|
struct parser_source *pin = impl_from_IQualityControl(iface);
|
|
return IPin_AddRef(&pin->pin.pin.IPin_iface);
|
|
}
|
|
|
|
static ULONG WINAPI GST_QualityControl_Release(IQualityControl *iface)
|
|
{
|
|
struct parser_source *pin = impl_from_IQualityControl(iface);
|
|
return IPin_Release(&pin->pin.pin.IPin_iface);
|
|
}
|
|
|
|
static HRESULT WINAPI GST_QualityControl_Notify(IQualityControl *iface, IBaseFilter *sender, Quality q)
|
|
{
|
|
struct parser_source *pin = impl_from_IQualityControl(iface);
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
GstQOSType type = GST_QOS_TYPE_OVERFLOW;
|
|
GstClockTime timestamp;
|
|
GstClockTimeDiff diff;
|
|
GstEvent *event;
|
|
|
|
TRACE("pin %p, sender %p, type %s, proportion %u, late %s, timestamp %s.\n",
|
|
pin, sender, q.Type == Famine ? "Famine" : "Flood", q.Proportion,
|
|
debugstr_time(q.Late), debugstr_time(q.TimeStamp));
|
|
|
|
mark_wine_thread();
|
|
|
|
/* GST_QOS_TYPE_OVERFLOW is also used for buffers that arrive on time, but
|
|
* DirectShow filters might use Famine, so check that there actually is an
|
|
* underrun. */
|
|
if (q.Type == Famine && q.Proportion < 1000)
|
|
type = GST_QOS_TYPE_UNDERFLOW;
|
|
|
|
/* DirectShow filters sometimes pass negative timestamps (Audiosurf uses the
|
|
* current time instead of the time of the last buffer). GstClockTime is
|
|
* unsigned, so clamp it to 0. */
|
|
timestamp = max(q.TimeStamp * 100, 0);
|
|
|
|
/* The documentation specifies that timestamp + diff must be nonnegative. */
|
|
diff = q.Late * 100;
|
|
if (diff < 0 && timestamp < (GstClockTime)-diff)
|
|
diff = -timestamp;
|
|
|
|
/* DirectShow "Proportion" describes what percentage of buffers the upstream
|
|
* filter should keep (i.e. dropping the rest). If frames are late, the
|
|
* proportion will be less than 1. For example, a proportion of 500 means
|
|
* that the element should drop half of its frames, essentially because
|
|
* frames are taking twice as long as they should to arrive.
|
|
*
|
|
* GStreamer "proportion" is the inverse of this; it describes how much
|
|
* faster the upstream element should produce frames. I.e. if frames are
|
|
* taking twice as long as they should to arrive, we want the frames to be
|
|
* decoded twice as fast, and so we pass 2.0 to GStreamer. */
|
|
|
|
if (!q.Proportion)
|
|
{
|
|
WARN("Ignoring quality message with zero proportion.\n");
|
|
return S_OK;
|
|
}
|
|
|
|
if (!(event = gst_event_new_qos(type, 1000.0 / q.Proportion, diff, timestamp)))
|
|
ERR("Failed to create QOS event.\n");
|
|
|
|
gst_pad_push_event(stream->my_sink, event);
|
|
|
|
return S_OK;
|
|
}
|
|
|
|
static HRESULT WINAPI GST_QualityControl_SetSink(IQualityControl *iface, IQualityControl *tonotify)
|
|
{
|
|
struct parser_source *pin = impl_from_IQualityControl(iface);
|
|
TRACE("(%p)->(%p)\n", pin, pin);
|
|
/* Do nothing */
|
|
return S_OK;
|
|
}
|
|
|
|
static const IQualityControlVtbl GSTOutPin_QualityControl_Vtbl = {
|
|
GST_QualityControl_QueryInterface,
|
|
GST_QualityControl_AddRef,
|
|
GST_QualityControl_Release,
|
|
GST_QualityControl_Notify,
|
|
GST_QualityControl_SetSink
|
|
};
|
|
|
|
static inline struct parser_source *impl_source_from_IPin(IPin *iface)
|
|
{
|
|
return CONTAINING_RECORD(iface, struct parser_source, pin.pin.IPin_iface);
|
|
}
|
|
|
|
static HRESULT source_query_interface(struct strmbase_pin *iface, REFIID iid, void **out)
|
|
{
|
|
struct parser_source *pin = impl_source_from_IPin(&iface->IPin_iface);
|
|
|
|
if (IsEqualGUID(iid, &IID_IMediaSeeking))
|
|
*out = &pin->seek.IMediaSeeking_iface;
|
|
else if (IsEqualGUID(iid, &IID_IQualityControl))
|
|
*out = &pin->IQualityControl_iface;
|
|
else
|
|
return E_NOINTERFACE;
|
|
|
|
IUnknown_AddRef((IUnknown *)*out);
|
|
return S_OK;
|
|
}
|
|
|
|
static HRESULT source_query_accept(struct strmbase_pin *iface, const AM_MEDIA_TYPE *mt)
|
|
{
|
|
struct parser_source *pin = impl_source_from_IPin(&iface->IPin_iface);
|
|
struct parser *filter = impl_from_strmbase_filter(iface->filter);
|
|
return filter->source_query_accept(pin, mt);
|
|
}
|
|
|
|
static HRESULT source_get_media_type(struct strmbase_pin *iface, unsigned int index, AM_MEDIA_TYPE *mt)
|
|
{
|
|
struct parser_source *pin = impl_source_from_IPin(&iface->IPin_iface);
|
|
struct parser *filter = impl_from_strmbase_filter(iface->filter);
|
|
return filter->source_get_media_type(pin, index, mt);
|
|
}
|
|
|
|
static HRESULT WINAPI GSTOutPin_DecideBufferSize(struct strmbase_source *iface,
|
|
IMemAllocator *allocator, ALLOCATOR_PROPERTIES *props)
|
|
{
|
|
struct parser_source *pin = impl_source_from_IPin(&iface->pin.IPin_iface);
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
unsigned int buffer_size = 16384;
|
|
ALLOCATOR_PROPERTIES ret_props;
|
|
|
|
if (IsEqualGUID(&pin->pin.pin.mt.formattype, &FORMAT_VideoInfo))
|
|
{
|
|
VIDEOINFOHEADER *format = (VIDEOINFOHEADER *)pin->pin.pin.mt.pbFormat;
|
|
buffer_size = format->bmiHeader.biSizeImage;
|
|
|
|
gst_util_set_object_arg(G_OBJECT(stream->flip), "method",
|
|
(format->bmiHeader.biCompression == BI_RGB
|
|
|| format->bmiHeader.biCompression == BI_BITFIELDS) ? "vertical-flip" : "none");
|
|
}
|
|
else if (IsEqualGUID(&pin->pin.pin.mt.formattype, &FORMAT_WaveFormatEx)
|
|
&& (IsEqualGUID(&pin->pin.pin.mt.subtype, &MEDIASUBTYPE_PCM)
|
|
|| IsEqualGUID(&pin->pin.pin.mt.subtype, &MEDIASUBTYPE_IEEE_FLOAT)))
|
|
{
|
|
WAVEFORMATEX *format = (WAVEFORMATEX *)pin->pin.pin.mt.pbFormat;
|
|
buffer_size = format->nAvgBytesPerSec;
|
|
}
|
|
|
|
stream->enabled = true;
|
|
|
|
gst_pad_push_event(stream->my_sink, gst_event_new_reconfigure());
|
|
/* We do need to drop any buffers that might have been sent with the old
|
|
* caps, but this will be handled in parser_init_stream(). */
|
|
|
|
props->cBuffers = max(props->cBuffers, 1);
|
|
props->cbBuffer = max(props->cbBuffer, buffer_size);
|
|
props->cbAlign = max(props->cbAlign, 1);
|
|
return IMemAllocator_SetProperties(allocator, props, &ret_props);
|
|
}
|
|
|
|
static void source_disconnect(struct strmbase_source *iface)
|
|
{
|
|
struct parser_source *pin = impl_source_from_IPin(&iface->pin.IPin_iface);
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
|
|
stream->enabled = false;
|
|
}
|
|
|
|
static void free_source_pin(struct parser_source *pin)
|
|
{
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
|
|
if (pin->pin.pin.peer)
|
|
{
|
|
if (SUCCEEDED(IMemAllocator_Decommit(pin->pin.pAllocator)))
|
|
IPin_Disconnect(pin->pin.pin.peer);
|
|
IPin_Disconnect(&pin->pin.pin.IPin_iface);
|
|
}
|
|
|
|
if (stream->their_src)
|
|
{
|
|
if (stream->post_sink)
|
|
{
|
|
gst_pad_unlink(stream->their_src, stream->post_sink);
|
|
gst_pad_unlink(stream->post_src, stream->my_sink);
|
|
gst_object_unref(stream->post_src);
|
|
gst_object_unref(stream->post_sink);
|
|
stream->post_src = stream->post_sink = NULL;
|
|
}
|
|
else
|
|
gst_pad_unlink(stream->their_src, stream->my_sink);
|
|
gst_object_unref(stream->their_src);
|
|
}
|
|
gst_object_unref(stream->my_sink);
|
|
gst_segment_free(stream->segment);
|
|
|
|
pthread_cond_destroy(&stream->event_cond);
|
|
pthread_cond_destroy(&stream->event_empty_cond);
|
|
|
|
free(stream);
|
|
|
|
pin->flushing_cs.DebugInfo->Spare[0] = 0;
|
|
DeleteCriticalSection(&pin->flushing_cs);
|
|
|
|
strmbase_seeking_cleanup(&pin->seek);
|
|
strmbase_source_cleanup(&pin->pin);
|
|
heap_free(pin);
|
|
}
|
|
|
|
static const struct strmbase_source_ops source_ops =
|
|
{
|
|
.base.pin_query_interface = source_query_interface,
|
|
.base.pin_query_accept = source_query_accept,
|
|
.base.pin_get_media_type = source_get_media_type,
|
|
.pfnAttemptConnection = BaseOutputPinImpl_AttemptConnection,
|
|
.pfnDecideAllocator = BaseOutputPinImpl_DecideAllocator,
|
|
.pfnDecideBufferSize = GSTOutPin_DecideBufferSize,
|
|
.source_disconnect = source_disconnect,
|
|
};
|
|
|
|
static struct parser_source *create_pin(struct parser *filter, const WCHAR *name)
|
|
{
|
|
struct parser_source *pin, **new_array;
|
|
struct wg_parser_stream *stream;
|
|
char pad_name[19];
|
|
|
|
if (!(new_array = heap_realloc(filter->sources, (filter->source_count + 1) * sizeof(*new_array))))
|
|
return NULL;
|
|
filter->sources = new_array;
|
|
|
|
if (!(pin = heap_alloc_zero(sizeof(*pin))))
|
|
return NULL;
|
|
|
|
if (!(stream = calloc(1, sizeof(*stream))))
|
|
{
|
|
heap_free(pin);
|
|
return NULL;
|
|
}
|
|
pin->wg_stream = stream;
|
|
|
|
strmbase_source_init(&pin->pin, &filter->filter, name, &source_ops);
|
|
stream->segment = gst_segment_new();
|
|
gst_segment_init(stream->segment, GST_FORMAT_TIME);
|
|
pin->IQualityControl_iface.lpVtbl = &GSTOutPin_QualityControl_Vtbl;
|
|
strmbase_seeking_init(&pin->seek, &GST_Seeking_Vtbl, GST_ChangeStop,
|
|
GST_ChangeCurrent, GST_ChangeRate);
|
|
pthread_cond_init(&stream->event_cond, NULL);
|
|
pthread_cond_init(&stream->event_empty_cond, NULL);
|
|
BaseFilterImpl_IncrementPinVersion(&filter->filter);
|
|
|
|
InitializeCriticalSection(&pin->flushing_cs);
|
|
pin->flushing_cs.DebugInfo->Spare[0] = (DWORD_PTR)(__FILE__ ": pin.flushing_cs");
|
|
|
|
sprintf(pad_name, "qz_sink_%u", filter->source_count);
|
|
stream->my_sink = gst_pad_new(pad_name, GST_PAD_SINK);
|
|
gst_pad_set_element_private(stream->my_sink, pin);
|
|
gst_pad_set_chain_function(stream->my_sink, got_data_sink);
|
|
gst_pad_set_event_function(stream->my_sink, event_sink);
|
|
gst_pad_set_query_function(stream->my_sink, query_sink_wrapper);
|
|
|
|
filter->sources[filter->source_count++] = pin;
|
|
return pin;
|
|
}
|
|
|
|
static HRESULT GST_RemoveOutputPins(struct parser *This)
|
|
{
|
|
struct wg_parser *parser = This->wg_parser;
|
|
unsigned int i;
|
|
|
|
TRACE("(%p)\n", This);
|
|
mark_wine_thread();
|
|
|
|
if (!parser->container)
|
|
return S_OK;
|
|
|
|
/* Unblock all of our streams. */
|
|
pthread_mutex_lock(&parser->mutex);
|
|
for (i = 0; i < This->source_count; ++i)
|
|
{
|
|
This->sources[i]->wg_stream->flushing = true;
|
|
pthread_cond_signal(&This->sources[i]->wg_stream->event_empty_cond);
|
|
}
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
|
|
gst_element_set_state(parser->container, GST_STATE_NULL);
|
|
gst_pad_unlink(parser->my_src, parser->their_sink);
|
|
gst_object_unref(parser->my_src);
|
|
gst_object_unref(parser->their_sink);
|
|
parser->my_src = parser->their_sink = NULL;
|
|
|
|
/* read_thread() needs to stay alive to service any read requests GStreamer
|
|
* sends, so we can only shut it down after GStreamer stops. */
|
|
This->sink_connected = false;
|
|
pthread_mutex_lock(&parser->mutex);
|
|
parser->sink_connected = false;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
pthread_cond_signal(&parser->read_cond);
|
|
WaitForSingleObject(This->read_thread, INFINITE);
|
|
CloseHandle(This->read_thread);
|
|
|
|
for (i = 0; i < This->source_count; ++i)
|
|
free_source_pin(This->sources[i]);
|
|
|
|
This->source_count = 0;
|
|
heap_free(This->sources);
|
|
This->sources = NULL;
|
|
gst_element_set_bus(parser->container, NULL);
|
|
gst_object_unref(parser->container);
|
|
parser->container = NULL;
|
|
BaseFilterImpl_IncrementPinVersion(&This->filter);
|
|
return S_OK;
|
|
}
|
|
|
|
void perform_cb_gstdemux(struct cb_data *cbdata)
|
|
{
|
|
switch(cbdata->type)
|
|
{
|
|
case EXISTING_NEW_PAD:
|
|
{
|
|
struct pad_added_data *data = &cbdata->u.pad_added_data;
|
|
existing_new_pad(data->element, data->pad, data->user);
|
|
break;
|
|
}
|
|
case QUERY_SINK:
|
|
{
|
|
struct query_sink_data *data = &cbdata->u.query_sink_data;
|
|
cbdata->u.query_sink_data.ret = query_sink(data->pad, data->parent,
|
|
data->query);
|
|
break;
|
|
}
|
|
default:
|
|
{
|
|
assert(0);
|
|
}
|
|
}
|
|
}
|
|
|
|
static BOOL compare_media_types(const AM_MEDIA_TYPE *a, const AM_MEDIA_TYPE *b)
|
|
{
|
|
return IsEqualGUID(&a->majortype, &b->majortype)
|
|
&& IsEqualGUID(&a->subtype, &b->subtype)
|
|
&& IsEqualGUID(&a->formattype, &b->formattype)
|
|
&& a->cbFormat == b->cbFormat
|
|
&& !memcmp(a->pbFormat, b->pbFormat, a->cbFormat);
|
|
}
|
|
|
|
static HRESULT wave_parser_sink_query_accept(struct strmbase_pin *iface, const AM_MEDIA_TYPE *mt)
|
|
{
|
|
if (!IsEqualGUID(&mt->majortype, &MEDIATYPE_Stream))
|
|
return S_FALSE;
|
|
if (IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_WAVE))
|
|
return S_OK;
|
|
if (IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_AU) || IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_AIFF))
|
|
FIXME("AU and AIFF files are not yet supported.\n");
|
|
return S_FALSE;
|
|
}
|
|
|
|
static const struct strmbase_sink_ops wave_parser_sink_ops =
|
|
{
|
|
.base.pin_query_accept = wave_parser_sink_query_accept,
|
|
.sink_connect = parser_sink_connect,
|
|
.sink_disconnect = parser_sink_disconnect,
|
|
};
|
|
|
|
static BOOL wave_parser_init_gst(struct parser *filter)
|
|
{
|
|
static const WCHAR source_name[] = {'o','u','t','p','u','t',0};
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
struct wg_parser_stream *stream;
|
|
struct parser_source *pin;
|
|
GstElement *element;
|
|
int ret;
|
|
|
|
if (!(element = gst_element_factory_make("wavparse", NULL)))
|
|
{
|
|
ERR("Failed to create wavparse; are %u-bit GStreamer \"good\" plugins installed?\n",
|
|
8 * (int)sizeof(void*));
|
|
return FALSE;
|
|
}
|
|
|
|
gst_bin_add(GST_BIN(parser->container), element);
|
|
|
|
parser->their_sink = gst_element_get_static_pad(element, "sink");
|
|
if ((ret = gst_pad_link(parser->my_src, parser->their_sink)) < 0)
|
|
{
|
|
ERR("Failed to link sink pads, error %d.\n", ret);
|
|
return FALSE;
|
|
}
|
|
|
|
if (!(pin = create_pin(filter, source_name)))
|
|
return FALSE;
|
|
stream = pin->wg_stream;
|
|
stream->their_src = gst_element_get_static_pad(element, "src");
|
|
gst_object_ref(stream->their_src);
|
|
if ((ret = gst_pad_link(stream->their_src, stream->my_sink)) < 0)
|
|
{
|
|
ERR("Failed to link source pads, error %d.\n", ret);
|
|
return FALSE;
|
|
}
|
|
|
|
gst_pad_set_active(stream->my_sink, 1);
|
|
gst_element_set_state(parser->container, GST_STATE_PAUSED);
|
|
ret = gst_element_get_state(parser->container, NULL, NULL, -1);
|
|
if (ret == GST_STATE_CHANGE_FAILURE)
|
|
{
|
|
ERR("Failed to play stream.\n");
|
|
return FALSE;
|
|
}
|
|
|
|
return TRUE;
|
|
}
|
|
|
|
static HRESULT wave_parser_source_query_accept(struct parser_source *pin, const AM_MEDIA_TYPE *mt)
|
|
{
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
AM_MEDIA_TYPE pad_mt;
|
|
HRESULT hr;
|
|
|
|
if (!amt_from_gst_caps(stream->caps, &pad_mt))
|
|
return E_OUTOFMEMORY;
|
|
hr = compare_media_types(mt, &pad_mt) ? S_OK : S_FALSE;
|
|
FreeMediaType(&pad_mt);
|
|
return hr;
|
|
}
|
|
|
|
static HRESULT wave_parser_source_get_media_type(struct parser_source *pin,
|
|
unsigned int index, AM_MEDIA_TYPE *mt)
|
|
{
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
|
|
if (index > 0)
|
|
return VFW_S_NO_MORE_ITEMS;
|
|
if (!amt_from_gst_caps(stream->caps, mt))
|
|
return E_OUTOFMEMORY;
|
|
return S_OK;
|
|
}
|
|
|
|
HRESULT wave_parser_create(IUnknown *outer, IUnknown **out)
|
|
{
|
|
static const WCHAR sink_name[] = {'i','n','p','u','t',' ','p','i','n',0};
|
|
struct parser *object;
|
|
|
|
if (!parser_init_gstreamer())
|
|
return E_FAIL;
|
|
|
|
mark_wine_thread();
|
|
|
|
if (!(object = heap_alloc_zero(sizeof(*object))))
|
|
return E_OUTOFMEMORY;
|
|
|
|
if (!(object->wg_parser = wg_parser_create()))
|
|
{
|
|
heap_free(object);
|
|
return E_OUTOFMEMORY;
|
|
}
|
|
|
|
strmbase_filter_init(&object->filter, outer, &CLSID_WAVEParser, &filter_ops);
|
|
strmbase_sink_init(&object->sink, &object->filter, sink_name, &wave_parser_sink_ops, NULL);
|
|
object->init_gst = wave_parser_init_gst;
|
|
object->source_query_accept = wave_parser_source_query_accept;
|
|
object->source_get_media_type = wave_parser_source_get_media_type;
|
|
|
|
TRACE("Created WAVE parser %p.\n", object);
|
|
*out = &object->filter.IUnknown_inner;
|
|
return S_OK;
|
|
}
|
|
|
|
static HRESULT avi_splitter_sink_query_accept(struct strmbase_pin *iface, const AM_MEDIA_TYPE *mt)
|
|
{
|
|
if (IsEqualGUID(&mt->majortype, &MEDIATYPE_Stream)
|
|
&& IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_Avi))
|
|
return S_OK;
|
|
return S_FALSE;
|
|
}
|
|
|
|
static const struct strmbase_sink_ops avi_splitter_sink_ops =
|
|
{
|
|
.base.pin_query_accept = avi_splitter_sink_query_accept,
|
|
.sink_connect = parser_sink_connect,
|
|
.sink_disconnect = parser_sink_disconnect,
|
|
};
|
|
|
|
static BOOL avi_splitter_init_gst(struct parser *filter)
|
|
{
|
|
GstElement *element = gst_element_factory_make("avidemux", NULL);
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
int ret;
|
|
|
|
if (!element)
|
|
{
|
|
ERR("Failed to create avidemux; are %u-bit GStreamer \"good\" plugins installed?\n",
|
|
8 * (int)sizeof(void*));
|
|
return FALSE;
|
|
}
|
|
|
|
gst_bin_add(GST_BIN(parser->container), element);
|
|
|
|
g_signal_connect(element, "pad-added", G_CALLBACK(existing_new_pad_wrapper), filter);
|
|
g_signal_connect(element, "pad-removed", G_CALLBACK(removed_decoded_pad), filter);
|
|
g_signal_connect(element, "no-more-pads", G_CALLBACK(no_more_pads), filter);
|
|
|
|
parser->their_sink = gst_element_get_static_pad(element, "sink");
|
|
|
|
pthread_mutex_lock(&parser->mutex);
|
|
parser->no_more_pads = parser->error = false;
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
|
|
if ((ret = gst_pad_link(parser->my_src, parser->their_sink)) < 0)
|
|
{
|
|
ERR("Failed to link pads, error %d.\n", ret);
|
|
return FALSE;
|
|
}
|
|
|
|
gst_element_set_state(parser->container, GST_STATE_PAUSED);
|
|
ret = gst_element_get_state(parser->container, NULL, NULL, -1);
|
|
if (ret == GST_STATE_CHANGE_FAILURE)
|
|
{
|
|
ERR("Failed to play stream.\n");
|
|
return FALSE;
|
|
}
|
|
|
|
pthread_mutex_lock(&parser->mutex);
|
|
while (!parser->no_more_pads && !parser->error)
|
|
pthread_cond_wait(&parser->init_cond, &parser->mutex);
|
|
if (parser->error)
|
|
{
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
return FALSE;
|
|
}
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
return TRUE;
|
|
}
|
|
|
|
static HRESULT avi_splitter_source_query_accept(struct parser_source *pin, const AM_MEDIA_TYPE *mt)
|
|
{
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
AM_MEDIA_TYPE pad_mt;
|
|
HRESULT hr;
|
|
|
|
if (!amt_from_gst_caps(stream->caps, &pad_mt))
|
|
return E_OUTOFMEMORY;
|
|
hr = compare_media_types(mt, &pad_mt) ? S_OK : S_FALSE;
|
|
FreeMediaType(&pad_mt);
|
|
return hr;
|
|
}
|
|
|
|
static HRESULT avi_splitter_source_get_media_type(struct parser_source *pin,
|
|
unsigned int index, AM_MEDIA_TYPE *mt)
|
|
{
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
|
|
if (index > 0)
|
|
return VFW_S_NO_MORE_ITEMS;
|
|
if (!amt_from_gst_caps(stream->caps, mt))
|
|
return E_OUTOFMEMORY;
|
|
return S_OK;
|
|
}
|
|
|
|
HRESULT avi_splitter_create(IUnknown *outer, IUnknown **out)
|
|
{
|
|
static const WCHAR sink_name[] = {'i','n','p','u','t',' ','p','i','n',0};
|
|
struct parser *object;
|
|
|
|
if (!parser_init_gstreamer())
|
|
return E_FAIL;
|
|
|
|
mark_wine_thread();
|
|
|
|
if (!(object = heap_alloc_zero(sizeof(*object))))
|
|
return E_OUTOFMEMORY;
|
|
|
|
if (!(object->wg_parser = wg_parser_create()))
|
|
{
|
|
heap_free(object);
|
|
return E_OUTOFMEMORY;
|
|
}
|
|
|
|
strmbase_filter_init(&object->filter, outer, &CLSID_AviSplitter, &filter_ops);
|
|
strmbase_sink_init(&object->sink, &object->filter, sink_name, &avi_splitter_sink_ops, NULL);
|
|
object->init_gst = avi_splitter_init_gst;
|
|
object->source_query_accept = avi_splitter_source_query_accept;
|
|
object->source_get_media_type = avi_splitter_source_get_media_type;
|
|
|
|
TRACE("Created AVI splitter %p.\n", object);
|
|
*out = &object->filter.IUnknown_inner;
|
|
return S_OK;
|
|
}
|
|
|
|
static HRESULT mpeg_splitter_sink_query_accept(struct strmbase_pin *iface, const AM_MEDIA_TYPE *mt)
|
|
{
|
|
if (!IsEqualGUID(&mt->majortype, &MEDIATYPE_Stream))
|
|
return S_FALSE;
|
|
if (IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_MPEG1Audio))
|
|
return S_OK;
|
|
if (IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_MPEG1Video)
|
|
|| IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_MPEG1System)
|
|
|| IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_MPEG1VideoCD))
|
|
FIXME("Unsupported subtype %s.\n", wine_dbgstr_guid(&mt->subtype));
|
|
return S_FALSE;
|
|
}
|
|
|
|
static const struct strmbase_sink_ops mpeg_splitter_sink_ops =
|
|
{
|
|
.base.pin_query_accept = mpeg_splitter_sink_query_accept,
|
|
.sink_connect = parser_sink_connect,
|
|
.sink_disconnect = parser_sink_disconnect,
|
|
};
|
|
|
|
static BOOL mpeg_splitter_init_gst(struct parser *filter)
|
|
{
|
|
static const WCHAR source_name[] = {'A','u','d','i','o',0};
|
|
struct wg_parser *parser = filter->wg_parser;
|
|
struct wg_parser_stream *stream;
|
|
struct parser_source *pin;
|
|
GstElement *element;
|
|
int ret;
|
|
|
|
if (!(element = gst_element_factory_make("mpegaudioparse", NULL)))
|
|
{
|
|
ERR("Failed to create mpegaudioparse; are %u-bit GStreamer \"good\" plugins installed?\n",
|
|
8 * (int)sizeof(void*));
|
|
return FALSE;
|
|
}
|
|
|
|
gst_bin_add(GST_BIN(parser->container), element);
|
|
|
|
parser->their_sink = gst_element_get_static_pad(element, "sink");
|
|
if ((ret = gst_pad_link(parser->my_src, parser->their_sink)) < 0)
|
|
{
|
|
ERR("Failed to link sink pads, error %d.\n", ret);
|
|
return FALSE;
|
|
}
|
|
|
|
if (!(pin = create_pin(filter, source_name)))
|
|
return FALSE;
|
|
stream = pin->wg_stream;
|
|
gst_object_ref(stream->their_src = gst_element_get_static_pad(element, "src"));
|
|
if ((ret = gst_pad_link(stream->their_src, stream->my_sink)) < 0)
|
|
{
|
|
ERR("Failed to link source pads, error %d.\n", ret);
|
|
return FALSE;
|
|
}
|
|
|
|
gst_pad_set_active(stream->my_sink, 1);
|
|
gst_element_set_state(parser->container, GST_STATE_PAUSED);
|
|
ret = gst_element_get_state(parser->container, NULL, NULL, -1);
|
|
if (ret == GST_STATE_CHANGE_FAILURE)
|
|
{
|
|
ERR("Failed to play stream.\n");
|
|
return FALSE;
|
|
}
|
|
|
|
pthread_mutex_lock(&parser->mutex);
|
|
while (!parser->has_duration && !parser->error && !stream->eos)
|
|
pthread_cond_wait(&parser->init_cond, &parser->mutex);
|
|
if (parser->error)
|
|
{
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
return FALSE;
|
|
}
|
|
pthread_mutex_unlock(&parser->mutex);
|
|
return TRUE;
|
|
}
|
|
|
|
static HRESULT mpeg_splitter_source_query_accept(struct parser_source *pin, const AM_MEDIA_TYPE *mt)
|
|
{
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
AM_MEDIA_TYPE pad_mt;
|
|
HRESULT hr;
|
|
|
|
if (!amt_from_gst_caps(stream->caps, &pad_mt))
|
|
return E_OUTOFMEMORY;
|
|
hr = compare_media_types(mt, &pad_mt) ? S_OK : S_FALSE;
|
|
FreeMediaType(&pad_mt);
|
|
return hr;
|
|
}
|
|
|
|
static HRESULT mpeg_splitter_source_get_media_type(struct parser_source *pin,
|
|
unsigned int index, AM_MEDIA_TYPE *mt)
|
|
{
|
|
struct wg_parser_stream *stream = pin->wg_stream;
|
|
|
|
if (index > 0)
|
|
return VFW_S_NO_MORE_ITEMS;
|
|
if (!amt_from_gst_caps(stream->caps, mt))
|
|
return E_OUTOFMEMORY;
|
|
return S_OK;
|
|
}
|
|
|
|
static HRESULT mpeg_splitter_query_interface(struct strmbase_filter *iface, REFIID iid, void **out)
|
|
{
|
|
struct parser *filter = impl_from_strmbase_filter(iface);
|
|
|
|
if (IsEqualGUID(iid, &IID_IAMStreamSelect))
|
|
{
|
|
*out = &filter->IAMStreamSelect_iface;
|
|
IUnknown_AddRef((IUnknown *)*out);
|
|
return S_OK;
|
|
}
|
|
|
|
return E_NOINTERFACE;
|
|
}
|
|
|
|
static const struct strmbase_filter_ops mpeg_splitter_ops =
|
|
{
|
|
.filter_query_interface = mpeg_splitter_query_interface,
|
|
.filter_get_pin = parser_get_pin,
|
|
.filter_destroy = parser_destroy,
|
|
.filter_init_stream = parser_init_stream,
|
|
.filter_cleanup_stream = parser_cleanup_stream,
|
|
};
|
|
|
|
HRESULT mpeg_splitter_create(IUnknown *outer, IUnknown **out)
|
|
{
|
|
static const WCHAR sink_name[] = {'I','n','p','u','t',0};
|
|
struct parser *object;
|
|
|
|
if (!parser_init_gstreamer())
|
|
return E_FAIL;
|
|
|
|
mark_wine_thread();
|
|
|
|
if (!(object = heap_alloc_zero(sizeof(*object))))
|
|
return E_OUTOFMEMORY;
|
|
|
|
if (!(object->wg_parser = wg_parser_create()))
|
|
{
|
|
heap_free(object);
|
|
return E_OUTOFMEMORY;
|
|
}
|
|
|
|
strmbase_filter_init(&object->filter, outer, &CLSID_MPEG1Splitter, &mpeg_splitter_ops);
|
|
strmbase_sink_init(&object->sink, &object->filter, sink_name, &mpeg_splitter_sink_ops, NULL);
|
|
object->IAMStreamSelect_iface.lpVtbl = &stream_select_vtbl;
|
|
|
|
object->init_gst = mpeg_splitter_init_gst;
|
|
object->source_query_accept = mpeg_splitter_source_query_accept;
|
|
object->source_get_media_type = mpeg_splitter_source_get_media_type;
|
|
object->enum_sink_first = TRUE;
|
|
|
|
TRACE("Created MPEG-1 splitter %p.\n", object);
|
|
*out = &object->filter.IUnknown_inner;
|
|
return S_OK;
|
|
}
|