mf/sar: Implement sample processing.
Signed-off-by: Nikolay Sivov <nsivov@codeweavers.com> Signed-off-by: Alexandre Julliard <julliard@winehq.org>
This commit is contained in:
parent
cbf156b780
commit
3d51816d1c
275
dlls/mf/sar.c
275
dlls/mf/sar.c
|
@ -28,6 +28,7 @@
|
|||
|
||||
#include "wine/debug.h"
|
||||
#include "wine/heap.h"
|
||||
#include "wine/list.h"
|
||||
|
||||
WINE_DEFAULT_DEBUG_CHANNEL(mfplat);
|
||||
|
||||
|
@ -42,6 +43,32 @@ enum audio_renderer_flags
|
|||
{
|
||||
SAR_SHUT_DOWN = 0x1,
|
||||
SAR_PREROLLED = 0x2,
|
||||
SAR_SAMPLE_REQUESTED = 0x4,
|
||||
};
|
||||
|
||||
enum queued_object_type
|
||||
{
|
||||
OBJECT_TYPE_SAMPLE,
|
||||
OBJECT_TYPE_MARKER,
|
||||
};
|
||||
|
||||
struct queued_object
|
||||
{
|
||||
struct list entry;
|
||||
enum queued_object_type type;
|
||||
union
|
||||
{
|
||||
struct
|
||||
{
|
||||
IMFSample *sample;
|
||||
unsigned int frame_offset;
|
||||
} sample;
|
||||
struct
|
||||
{
|
||||
MFSTREAMSINK_MARKER_TYPE type;
|
||||
PROPVARIANT context;
|
||||
} marker;
|
||||
} u;
|
||||
};
|
||||
|
||||
struct audio_renderer
|
||||
|
@ -56,6 +83,7 @@ struct audio_renderer
|
|||
IMFSimpleAudioVolume IMFSimpleAudioVolume_iface;
|
||||
IMFAudioStreamVolume IMFAudioStreamVolume_iface;
|
||||
IMFAudioPolicy IMFAudioPolicy_iface;
|
||||
IMFAsyncCallback render_callback;
|
||||
LONG refcount;
|
||||
IMFMediaEventQueue *event_queue;
|
||||
IMFMediaEventQueue *stream_event_queue;
|
||||
|
@ -64,14 +92,34 @@ struct audio_renderer
|
|||
IMFMediaType *current_media_type;
|
||||
IMMDevice *device;
|
||||
IAudioClient *audio_client;
|
||||
IAudioRenderClient *audio_render_client;
|
||||
IAudioStreamVolume *stream_volume;
|
||||
ISimpleAudioVolume *audio_volume;
|
||||
HANDLE buffer_ready_event;
|
||||
MFWORKITEM_KEY buffer_ready_key;
|
||||
unsigned int frame_size;
|
||||
struct list queue;
|
||||
enum stream_state state;
|
||||
unsigned int flags;
|
||||
CRITICAL_SECTION cs;
|
||||
};
|
||||
|
||||
static void release_pending_object(struct queued_object *object)
|
||||
{
|
||||
list_remove(&object->entry);
|
||||
switch (object->type)
|
||||
{
|
||||
case OBJECT_TYPE_SAMPLE:
|
||||
if (object->u.sample.sample)
|
||||
IMFSample_Release(object->u.sample.sample);
|
||||
break;
|
||||
case OBJECT_TYPE_MARKER:
|
||||
PropVariantClear(&object->u.marker.context);
|
||||
break;
|
||||
}
|
||||
heap_free(object);
|
||||
}
|
||||
|
||||
static struct audio_renderer *impl_from_IMFMediaSink(IMFMediaSink *iface)
|
||||
{
|
||||
return CONTAINING_RECORD(iface, struct audio_renderer, IMFMediaSink_iface);
|
||||
|
@ -122,6 +170,11 @@ static struct audio_renderer *impl_from_IMFMediaTypeHandler(IMFMediaTypeHandler
|
|||
return CONTAINING_RECORD(iface, struct audio_renderer, IMFMediaTypeHandler_iface);
|
||||
}
|
||||
|
||||
static struct audio_renderer *impl_from_render_callback_IMFAsyncCallback(IMFAsyncCallback *iface)
|
||||
{
|
||||
return CONTAINING_RECORD(iface, struct audio_renderer, render_callback);
|
||||
}
|
||||
|
||||
static HRESULT WINAPI audio_renderer_sink_QueryInterface(IMFMediaSink *iface, REFIID riid, void **obj)
|
||||
{
|
||||
struct audio_renderer *renderer = impl_from_IMFMediaSink(iface);
|
||||
|
@ -171,9 +224,18 @@ static ULONG WINAPI audio_renderer_sink_AddRef(IMFMediaSink *iface)
|
|||
|
||||
static void audio_renderer_release_audio_client(struct audio_renderer *renderer)
|
||||
{
|
||||
MFCancelWorkItem(renderer->buffer_ready_key);
|
||||
renderer->buffer_ready_key = 0;
|
||||
if (renderer->audio_client)
|
||||
{
|
||||
IAudioClient_Stop(renderer->audio_client);
|
||||
IAudioClient_Reset(renderer->audio_client);
|
||||
IAudioClient_Release(renderer->audio_client);
|
||||
}
|
||||
renderer->audio_client = NULL;
|
||||
if (renderer->audio_render_client)
|
||||
IAudioRenderClient_Release(renderer->audio_render_client);
|
||||
renderer->audio_render_client = NULL;
|
||||
if (renderer->stream_volume)
|
||||
IAudioStreamVolume_Release(renderer->stream_volume);
|
||||
renderer->stream_volume = NULL;
|
||||
|
@ -187,6 +249,7 @@ static ULONG WINAPI audio_renderer_sink_Release(IMFMediaSink *iface)
|
|||
{
|
||||
struct audio_renderer *renderer = impl_from_IMFMediaSink(iface);
|
||||
ULONG refcount = InterlockedDecrement(&renderer->refcount);
|
||||
struct queued_object *obj, *obj2;
|
||||
|
||||
TRACE("%p, refcount %u.\n", iface, refcount);
|
||||
|
||||
|
@ -204,8 +267,12 @@ static ULONG WINAPI audio_renderer_sink_Release(IMFMediaSink *iface)
|
|||
IMFMediaType_Release(renderer->media_type);
|
||||
if (renderer->current_media_type)
|
||||
IMFMediaType_Release(renderer->current_media_type);
|
||||
CloseHandle(renderer->buffer_ready_event);
|
||||
audio_renderer_release_audio_client(renderer);
|
||||
CloseHandle(renderer->buffer_ready_event);
|
||||
LIST_FOR_EACH_ENTRY_SAFE(obj, obj2, &renderer->queue, struct queued_object, entry)
|
||||
{
|
||||
release_pending_object(obj);
|
||||
}
|
||||
DeleteCriticalSection(&renderer->cs);
|
||||
heap_free(renderer);
|
||||
}
|
||||
|
@ -388,6 +455,7 @@ static HRESULT WINAPI audio_renderer_sink_Shutdown(IMFMediaSink *iface)
|
|||
IMFMediaEventQueue_Shutdown(renderer->event_queue);
|
||||
IMFMediaEventQueue_Shutdown(renderer->stream_event_queue);
|
||||
audio_renderer_set_presentation_clock(renderer, NULL);
|
||||
audio_renderer_release_audio_client(renderer);
|
||||
LeaveCriticalSection(&renderer->cs);
|
||||
|
||||
return S_OK;
|
||||
|
@ -1248,19 +1316,82 @@ static HRESULT WINAPI audio_renderer_stream_GetMediaTypeHandler(IMFStreamSink *i
|
|||
return S_OK;
|
||||
}
|
||||
|
||||
static HRESULT stream_queue_sample(struct audio_renderer *renderer, IMFSample *sample)
|
||||
{
|
||||
struct queued_object *object;
|
||||
|
||||
if (!(object = heap_alloc_zero(sizeof(*object))))
|
||||
return E_OUTOFMEMORY;
|
||||
|
||||
object->type = OBJECT_TYPE_SAMPLE;
|
||||
object->u.sample.sample = sample;
|
||||
IMFSample_AddRef(object->u.sample.sample);
|
||||
|
||||
list_add_tail(&renderer->queue, &object->entry);
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
static HRESULT WINAPI audio_renderer_stream_ProcessSample(IMFStreamSink *iface, IMFSample *sample)
|
||||
{
|
||||
FIXME("%p, %p.\n", iface, sample);
|
||||
struct audio_renderer *renderer = impl_from_IMFStreamSink(iface);
|
||||
HRESULT hr = S_OK;
|
||||
|
||||
return E_NOTIMPL;
|
||||
TRACE("%p, %p.\n", iface, sample);
|
||||
|
||||
if (!sample)
|
||||
return E_POINTER;
|
||||
|
||||
if (renderer->flags & SAR_SHUT_DOWN)
|
||||
return MF_E_STREAMSINK_REMOVED;
|
||||
|
||||
EnterCriticalSection(&renderer->cs);
|
||||
if (renderer->state == STREAM_STATE_RUNNING)
|
||||
hr = stream_queue_sample(renderer, sample);
|
||||
renderer->flags &= ~SAR_SAMPLE_REQUESTED;
|
||||
LeaveCriticalSection(&renderer->cs);
|
||||
|
||||
return hr;
|
||||
}
|
||||
|
||||
static HRESULT stream_place_marker(struct audio_renderer *renderer, MFSTREAMSINK_MARKER_TYPE marker_type,
|
||||
const PROPVARIANT *context_value)
|
||||
{
|
||||
struct queued_object *marker;
|
||||
HRESULT hr = S_OK;
|
||||
|
||||
if (!(marker = heap_alloc_zero(sizeof(*marker))))
|
||||
return E_OUTOFMEMORY;
|
||||
|
||||
marker->type = OBJECT_TYPE_MARKER;
|
||||
marker->u.marker.type = marker_type;
|
||||
PropVariantInit(&marker->u.marker.context);
|
||||
if (context_value)
|
||||
hr = PropVariantCopy(&marker->u.marker.context, context_value);
|
||||
if (SUCCEEDED(hr))
|
||||
list_add_tail(&renderer->queue, &marker->entry);
|
||||
else
|
||||
release_pending_object(marker);
|
||||
|
||||
return hr;
|
||||
}
|
||||
|
||||
static HRESULT WINAPI audio_renderer_stream_PlaceMarker(IMFStreamSink *iface, MFSTREAMSINK_MARKER_TYPE marker_type,
|
||||
const PROPVARIANT *marker_value, const PROPVARIANT *context_value)
|
||||
{
|
||||
FIXME("%p, %d, %p, %p.\n", iface, marker_type, marker_value, context_value);
|
||||
struct audio_renderer *renderer = impl_from_IMFStreamSink(iface);
|
||||
HRESULT hr;
|
||||
|
||||
return E_NOTIMPL;
|
||||
TRACE("%p, %d, %p, %p.\n", iface, marker_type, marker_value, context_value);
|
||||
|
||||
if (renderer->flags & SAR_SHUT_DOWN)
|
||||
return MF_E_STREAMSINK_REMOVED;
|
||||
|
||||
EnterCriticalSection(&renderer->cs);
|
||||
hr = stream_place_marker(renderer, marker_type, context_value);
|
||||
LeaveCriticalSection(&renderer->cs);
|
||||
|
||||
return hr;
|
||||
}
|
||||
|
||||
static HRESULT WINAPI audio_renderer_stream_Flush(IMFStreamSink *iface)
|
||||
|
@ -1350,6 +1481,7 @@ static HRESULT WINAPI audio_renderer_stream_type_handler_GetMediaTypeByIndex(IMF
|
|||
|
||||
static HRESULT audio_renderer_create_audio_client(struct audio_renderer *renderer)
|
||||
{
|
||||
IMFAsyncResult *result;
|
||||
WAVEFORMATEX *wfx;
|
||||
HRESULT hr;
|
||||
|
||||
|
@ -1372,6 +1504,8 @@ static HRESULT audio_renderer_create_audio_client(struct audio_renderer *rendere
|
|||
return hr;
|
||||
}
|
||||
|
||||
renderer->frame_size = wfx->wBitsPerSample * wfx->nChannels / 8;
|
||||
|
||||
hr = IAudioClient_Initialize(renderer->audio_client, AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
|
||||
1000000, 0, wfx, NULL);
|
||||
CoTaskMemFree(wfx);
|
||||
|
@ -1394,12 +1528,26 @@ static HRESULT audio_renderer_create_audio_client(struct audio_renderer *rendere
|
|||
return hr;
|
||||
}
|
||||
|
||||
if (FAILED(hr = IAudioClient_GetService(renderer->audio_client, &IID_IAudioRenderClient,
|
||||
(void **)&renderer->audio_render_client)))
|
||||
{
|
||||
WARN("Failed to get audio render client, hr %#x.\n", hr);
|
||||
return hr;
|
||||
}
|
||||
|
||||
if (FAILED(hr = IAudioClient_SetEventHandle(renderer->audio_client, renderer->buffer_ready_event)))
|
||||
{
|
||||
WARN("Failed to set event handle, hr %#x.\n", hr);
|
||||
return hr;
|
||||
}
|
||||
|
||||
if (SUCCEEDED(hr = MFCreateAsyncResult(NULL, &renderer->render_callback, NULL, &result)))
|
||||
{
|
||||
if (FAILED(hr = MFPutWaitingWorkItem(renderer->buffer_ready_event, 0, result, &renderer->buffer_ready_key)))
|
||||
WARN("Failed to submit wait item, hr %#x.\n", hr);
|
||||
IMFAsyncResult_Release(result);
|
||||
}
|
||||
|
||||
return hr;
|
||||
}
|
||||
|
||||
|
@ -1532,6 +1680,121 @@ static HRESULT audio_renderer_collect_supported_types(struct audio_renderer *ren
|
|||
return hr;
|
||||
}
|
||||
|
||||
static HRESULT WINAPI audio_renderer_render_callback_QueryInterface(IMFAsyncCallback *iface, REFIID riid, void **obj)
|
||||
{
|
||||
TRACE("%p, %s, %p.\n", iface, debugstr_guid(riid), obj);
|
||||
|
||||
if (IsEqualIID(riid, &IID_IMFAsyncCallback) ||
|
||||
IsEqualIID(riid, &IID_IUnknown))
|
||||
{
|
||||
*obj = iface;
|
||||
IMFAsyncCallback_AddRef(iface);
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
WARN("Unsupported interface %s.\n", debugstr_guid(riid));
|
||||
*obj = NULL;
|
||||
return E_NOINTERFACE;
|
||||
}
|
||||
|
||||
static ULONG WINAPI audio_renderer_render_callback_AddRef(IMFAsyncCallback *iface)
|
||||
{
|
||||
struct audio_renderer *renderer = impl_from_render_callback_IMFAsyncCallback(iface);
|
||||
return IMFMediaSink_AddRef(&renderer->IMFMediaSink_iface);
|
||||
}
|
||||
|
||||
static ULONG WINAPI audio_renderer_render_callback_Release(IMFAsyncCallback *iface)
|
||||
{
|
||||
struct audio_renderer *renderer = impl_from_render_callback_IMFAsyncCallback(iface);
|
||||
return IMFMediaSink_Release(&renderer->IMFMediaSink_iface);
|
||||
}
|
||||
|
||||
static HRESULT WINAPI audio_renderer_render_callback_GetParameters(IMFAsyncCallback *iface, DWORD *flags, DWORD *queue)
|
||||
{
|
||||
return E_NOTIMPL;
|
||||
}
|
||||
|
||||
static HRESULT WINAPI audio_renderer_render_callback_Invoke(IMFAsyncCallback *iface, IMFAsyncResult *result)
|
||||
{
|
||||
struct audio_renderer *renderer = impl_from_render_callback_IMFAsyncCallback(iface);
|
||||
unsigned int src_frames, dst_frames, max_frames, src_len;
|
||||
struct queued_object *obj, *obj2;
|
||||
BOOL keep_sample = FALSE;
|
||||
IMFMediaBuffer *buffer;
|
||||
BYTE *dst, *src;
|
||||
HRESULT hr;
|
||||
|
||||
EnterCriticalSection(&renderer->cs);
|
||||
|
||||
LIST_FOR_EACH_ENTRY_SAFE(obj, obj2, &renderer->queue, struct queued_object, entry)
|
||||
{
|
||||
if (obj->type == OBJECT_TYPE_MARKER)
|
||||
{
|
||||
IMFMediaEventQueue_QueueEventParamVar(renderer->stream_event_queue, MEStreamSinkMarker,
|
||||
&GUID_NULL, S_OK, &obj->u.marker.context);
|
||||
}
|
||||
else if (obj->type == OBJECT_TYPE_SAMPLE)
|
||||
{
|
||||
if (SUCCEEDED(IMFSample_ConvertToContiguousBuffer(obj->u.sample.sample, &buffer)))
|
||||
{
|
||||
if (SUCCEEDED(IMFMediaBuffer_Lock(buffer, &src, NULL, &src_len)))
|
||||
{
|
||||
if ((src_frames = src_len / renderer->frame_size))
|
||||
{
|
||||
if (SUCCEEDED(IAudioClient_GetBufferSize(renderer->audio_client, &max_frames)))
|
||||
{
|
||||
src_frames -= obj->u.sample.frame_offset;
|
||||
dst_frames = min(src_frames, max_frames);
|
||||
|
||||
if (SUCCEEDED(hr = IAudioRenderClient_GetBuffer(renderer->audio_render_client, dst_frames, &dst)))
|
||||
{
|
||||
memcpy(dst, src + obj->u.sample.frame_offset * renderer->frame_size,
|
||||
dst_frames * renderer->frame_size);
|
||||
|
||||
IAudioRenderClient_ReleaseBuffer(renderer->audio_render_client, dst_frames, 0);
|
||||
|
||||
obj->u.sample.frame_offset += dst_frames;
|
||||
}
|
||||
|
||||
keep_sample = FAILED(hr) || src_frames > max_frames;
|
||||
}
|
||||
}
|
||||
IMFMediaBuffer_Unlock(buffer);
|
||||
}
|
||||
IMFMediaBuffer_Release(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
if (keep_sample)
|
||||
break;
|
||||
|
||||
list_remove(&obj->entry);
|
||||
release_pending_object(obj);
|
||||
}
|
||||
|
||||
if (list_empty(&renderer->queue) && !(renderer->flags & SAR_SAMPLE_REQUESTED))
|
||||
{
|
||||
IMFMediaEventQueue_QueueEventParamVar(renderer->stream_event_queue, MEStreamSinkRequestSample, &GUID_NULL, S_OK, NULL);
|
||||
renderer->flags |= SAR_SAMPLE_REQUESTED;
|
||||
}
|
||||
|
||||
if (FAILED(hr = MFPutWaitingWorkItem(renderer->buffer_ready_event, 0, result, &renderer->buffer_ready_key)))
|
||||
WARN("Failed to submit wait item, hr %#x.\n", hr);
|
||||
|
||||
LeaveCriticalSection(&renderer->cs);
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
static const IMFAsyncCallbackVtbl audio_renderer_render_callback_vtbl =
|
||||
{
|
||||
audio_renderer_render_callback_QueryInterface,
|
||||
audio_renderer_render_callback_AddRef,
|
||||
audio_renderer_render_callback_Release,
|
||||
audio_renderer_render_callback_GetParameters,
|
||||
audio_renderer_render_callback_Invoke,
|
||||
};
|
||||
|
||||
static HRESULT sar_create_object(IMFAttributes *attributes, void *user_context, IUnknown **obj)
|
||||
{
|
||||
struct audio_renderer *renderer;
|
||||
|
@ -1552,9 +1815,11 @@ static HRESULT sar_create_object(IMFAttributes *attributes, void *user_context,
|
|||
renderer->IMFSimpleAudioVolume_iface.lpVtbl = &audio_renderer_simple_volume_vtbl;
|
||||
renderer->IMFAudioStreamVolume_iface.lpVtbl = &audio_renderer_stream_volume_vtbl;
|
||||
renderer->IMFAudioPolicy_iface.lpVtbl = &audio_renderer_policy_vtbl;
|
||||
renderer->render_callback.lpVtbl = &audio_renderer_render_callback_vtbl;
|
||||
renderer->refcount = 1;
|
||||
InitializeCriticalSection(&renderer->cs);
|
||||
renderer->buffer_ready_event = CreateEventW(NULL, FALSE, FALSE, NULL);
|
||||
list_init(&renderer->queue);
|
||||
|
||||
if (FAILED(hr = MFCreateEventQueue(&renderer->event_queue)))
|
||||
goto failed;
|
||||
|
|
|
@ -535,6 +535,7 @@ HRESULT WINAPI MFInitAttributesFromBlob(IMFAttributes *attributes, const UINT8 *
|
|||
HRESULT WINAPI MFInitMediaTypeFromWaveFormatEx(IMFMediaType *mediatype, const WAVEFORMATEX *format, UINT32 size);
|
||||
HRESULT WINAPI MFInvokeCallback(IMFAsyncResult *result);
|
||||
HRESULT WINAPI MFLockPlatform(void);
|
||||
HRESULT WINAPI MFPutWaitingWorkItem(HANDLE event, LONG priority, IMFAsyncResult *result, MFWORKITEM_KEY *key);
|
||||
HRESULT WINAPI MFPutWorkItem(DWORD queue, IMFAsyncCallback *callback, IUnknown *state);
|
||||
HRESULT WINAPI MFPutWorkItem2(DWORD queue, LONG priority, IMFAsyncCallback *callback, IUnknown *state);
|
||||
HRESULT WINAPI MFPutWorkItemEx(DWORD queue, IMFAsyncResult *result);
|
||||
|
|
Loading…
Reference in New Issue