Signed-off-by: Anton Baskanov baskanov@gmail.com --- dlls/amstream/audiostream.c | 11 +++++++++-- dlls/amstream/tests/amstream.c | 35 ++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 2 deletions(-)
diff --git a/dlls/amstream/audiostream.c b/dlls/amstream/audiostream.c index 1f9aed7338..e5d5d86daf 100644 --- a/dlls/amstream/audiostream.c +++ b/dlls/amstream/audiostream.c @@ -184,6 +184,7 @@ struct audio_stream IMemAllocator *allocator; AM_MEDIA_TYPE mt; WAVEFORMATEX format; + FILTER_STATE state; };
static inline struct audio_stream *impl_from_IAMMediaStream(IAMMediaStream *iface) @@ -342,9 +343,15 @@ static HRESULT WINAPI audio_IAMMediaStream_SetState(IAMMediaStream *iface, FILTE { struct audio_stream *This = impl_from_IAMMediaStream(iface);
- FIXME("(%p/%p)->(%u) stub!\n", This, iface, state); + TRACE("(%p/%p)->(%u)\n", This, iface, state);
- return S_FALSE; + EnterCriticalSection(&This->cs); + + This->state = state; + + LeaveCriticalSection(&This->cs); + + return S_OK; }
static HRESULT WINAPI audio_IAMMediaStream_JoinAMMultiMediaStream(IAMMediaStream *iface, diff --git a/dlls/amstream/tests/amstream.c b/dlls/amstream/tests/amstream.c index 82a833ad6f..472c6558fc 100644 --- a/dlls/amstream/tests/amstream.c +++ b/dlls/amstream/tests/amstream.c @@ -2780,6 +2780,40 @@ static void test_audiostream_receive_connection(void) ok(!ref, "Got outstanding refcount %d.\n", ref); }
+static void test_audiostream_set_state(void) +{ + IAMMultiMediaStream *mmstream = create_ammultimediastream(); + IAMMediaStream *am_stream; + IMediaStream *stream; + HRESULT hr; + ULONG ref; + + hr = IAMMultiMediaStream_Initialize(mmstream, STREAMTYPE_READ, 0, NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_AddMediaStream(mmstream, NULL, &MSPID_PrimaryAudio, 0, &stream); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMediaStream_QueryInterface(stream, &IID_IAMMediaStream, (void **)&am_stream); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAMMediaStream_SetState(am_stream, 4); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAMMediaStream_SetState(am_stream, State_Running); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAMMediaStream_SetState(am_stream, State_Paused); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAMMediaStream_SetState(am_stream, State_Stopped); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + ref = IAMMultiMediaStream_Release(mmstream); + ok(!ref, "Got outstanding refcount %d.\n", ref); + IAMMediaStream_Release(am_stream); + ref = IMediaStream_Release(stream); + ok(!ref, "Got outstanding refcount %d.\n", ref); +} + void test_mediastreamfilter_get_state(void) { IAMMultiMediaStream *mmstream = create_ammultimediastream(); @@ -2951,6 +2985,7 @@ START_TEST(amstream) test_audiostream_get_format(); test_audiostream_set_format(); test_audiostream_receive_connection(); + test_audiostream_set_state();
test_mediastreamfilter_get_state(); test_mediastreamfilter_stop_pause_run();
Signed-off-by: Anton Baskanov baskanov@gmail.com --- dlls/amstream/audiostream.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/dlls/amstream/audiostream.c b/dlls/amstream/audiostream.c index e5d5d86daf..26d615ebf4 100644 --- a/dlls/amstream/audiostream.c +++ b/dlls/amstream/audiostream.c @@ -30,7 +30,7 @@ static const WCHAR sink_id[] = L"I{A35FF56B-9FDA-11D0-8FDF-00C04FD9189D}"; typedef struct { IAudioStreamSample IAudioStreamSample_iface; LONG ref; - IMediaStream *parent; + IAudioMediaStream *parent; IAudioData *audio_data; } IAudioStreamSampleImpl;
@@ -158,7 +158,7 @@ static HRESULT audiostreamsample_create(IAudioMediaStream *parent, IAudioData *a
object->IAudioStreamSample_iface.lpVtbl = &AudioStreamSample_Vtbl; object->ref = 1; - object->parent = (IMediaStream*)parent; + object->parent = parent; object->audio_data = audio_data;
*audio_stream_sample = &object->IAudioStreamSample_iface;
Signed-off-by: Anton Baskanov baskanov@gmail.com --- dlls/amstream/audiostream.c | 66 ++++++++++++++++++++++- dlls/amstream/tests/amstream.c | 99 ++++++++++++++++++++++++++++++++-- 2 files changed, 159 insertions(+), 6 deletions(-)
diff --git a/dlls/amstream/audiostream.c b/dlls/amstream/audiostream.c index 26d615ebf4..81cd2975c8 100644 --- a/dlls/amstream/audiostream.c +++ b/dlls/amstream/audiostream.c @@ -166,6 +166,17 @@ static HRESULT audiostreamsample_create(IAudioMediaStream *parent, IAudioData *a return S_OK; }
+enum queued_event_type +{ + QET_END_OF_STREAM +}; + +struct queued_event +{ + struct list entry; + enum queued_event_type type; +}; + struct audio_stream { IAMMediaStream IAMMediaStream_iface; @@ -185,8 +196,27 @@ struct audio_stream AM_MEDIA_TYPE mt; WAVEFORMATEX format; FILTER_STATE state; + BOOL eos; + struct list event_queue; };
+static void remove_queued_event(struct queued_event *event) +{ + list_remove(&event->entry); + HeapFree(GetProcessHeap(), 0, event); +} + +static void flush_event_queue(struct audio_stream *stream) +{ + while (!list_empty(&stream->event_queue)) + { + struct queued_event *event = + LIST_ENTRY(list_head(&stream->event_queue), struct queued_event, entry); + + remove_queued_event(event); + } +} + static inline struct audio_stream *impl_from_IAMMediaStream(IAMMediaStream *iface) { return CONTAINING_RECORD(iface, struct audio_stream, IAMMediaStream_iface); @@ -347,6 +377,11 @@ static HRESULT WINAPI audio_IAMMediaStream_SetState(IAMMediaStream *iface, FILTE
EnterCriticalSection(&This->cs);
+ if (State_Stopped == state) + flush_event_queue(This); + if (State_Stopped == This->state) + This->eos = FALSE; + This->state = state;
LeaveCriticalSection(&This->cs); @@ -919,8 +954,34 @@ static HRESULT WINAPI audio_sink_QueryInternalConnections(IPin *iface, IPin **pi
static HRESULT WINAPI audio_sink_EndOfStream(IPin *iface) { - FIXME("iface %p, stub!\n", iface); - return E_NOTIMPL; + struct audio_stream *stream = impl_from_IPin(iface); + struct queued_event *event; + + TRACE("(%p/%p)->()\n", iface, stream); + + EnterCriticalSection(&stream->cs); + + if (stream->eos) + { + LeaveCriticalSection(&stream->cs); + return E_FAIL; + } + + event = HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, sizeof(*event)); + if (!event) + { + LeaveCriticalSection(&stream->cs); + return E_OUTOFMEMORY; + } + + event->type = QET_END_OF_STREAM; + list_add_tail(&stream->event_queue, &event->entry); + + stream->eos = TRUE; + + LeaveCriticalSection(&stream->cs); + + return S_OK; }
static HRESULT WINAPI audio_sink_BeginFlush(IPin *iface) @@ -1083,6 +1144,7 @@ HRESULT audio_stream_create(IMultiMediaStream *parent, const MSPID *purpose_id, object->parent = parent; object->purpose_id = *purpose_id; object->stream_type = stream_type; + list_init(&object->event_queue);
*media_stream = &object->IAMMediaStream_iface;
diff --git a/dlls/amstream/tests/amstream.c b/dlls/amstream/tests/amstream.c index 472c6558fc..2ac492bc3c 100644 --- a/dlls/amstream/tests/amstream.c +++ b/dlls/amstream/tests/amstream.c @@ -2381,10 +2381,21 @@ static HRESULT testsource_query_accept(struct strmbase_pin *iface, const AM_MEDI return S_OK; }
-static HRESULT WINAPI testsource_DecideAllocator(struct strmbase_source *iface, - IMemInputPin *peer, IMemAllocator **allocator) +static HRESULT WINAPI testsource_decide_buffer_size(struct strmbase_source *iface, + IMemAllocator *alloc, ALLOCATOR_PROPERTIES *requested) { - return S_OK; + ALLOCATOR_PROPERTIES actual; + + if (!requested->cbAlign) + requested->cbAlign = 1; + + if (requested->cbBuffer < 4096) + requested->cbBuffer = 4096; + + if (!requested->cBuffers) + requested->cBuffers = 2; + + return IMemAllocator_SetProperties(alloc, requested, &actual); }
static const struct strmbase_source_ops testsource_ops = @@ -2392,7 +2403,8 @@ static const struct strmbase_source_ops testsource_ops = .base.pin_query_accept = testsource_query_accept, .base.pin_get_media_type = strmbase_pin_get_media_type, .pfnAttemptConnection = BaseOutputPinImpl_AttemptConnection, - .pfnDecideAllocator = testsource_DecideAllocator, + .pfnDecideBufferSize = testsource_decide_buffer_size, + .pfnDecideAllocator = BaseOutputPinImpl_DecideAllocator, };
static void testfilter_init(struct testfilter *filter) @@ -2814,6 +2826,84 @@ static void test_audiostream_set_state(void) ok(!ref, "Got outstanding refcount %d.\n", ref); }
+void test_audiostream_end_of_stream(void) +{ + static const WAVEFORMATEX format = + { + .wFormatTag = WAVE_FORMAT_PCM, + .nChannels = 1, + .nSamplesPerSec = 11025, + .wBitsPerSample = 16, + .nBlockAlign = 2, + .nAvgBytesPerSec = 2 * 11025, + }; + + const AM_MEDIA_TYPE mt = + { + .majortype = MEDIATYPE_Audio, + .subtype = MEDIASUBTYPE_PCM, + .formattype = FORMAT_WaveFormatEx, + .cbFormat = sizeof(WAVEFORMATEX), + .pbFormat = (BYTE *)&format, + }; + + IAMMultiMediaStream *mmstream = create_ammultimediastream(); + struct testfilter source; + IGraphBuilder *graph; + IMediaStream *stream; + HRESULT hr; + ULONG ref; + IPin *pin; + + hr = IAMMultiMediaStream_Initialize(mmstream, STREAMTYPE_READ, 0, NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_AddMediaStream(mmstream, NULL, &MSPID_PrimaryAudio, 0, &stream); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMediaStream_QueryInterface(stream, &IID_IPin, (void **)&pin); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_GetFilterGraph(mmstream, &graph); + ok(hr == S_OK, "Got hr %#x.\n", hr); + ok(graph != NULL, "Expected non-null graph\n"); + testfilter_init(&source); + hr = IGraphBuilder_AddFilter(graph, &source.filter.IBaseFilter_iface, NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IGraphBuilder_ConnectDirect(graph, &source.source.pin.IPin_iface, pin, &mt); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IPin_EndOfStream(pin); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IPin_EndOfStream(pin); + ok(hr == E_FAIL, "Got hr %#x.\n", hr); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IPin_EndOfStream(pin); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IPin_EndOfStream(pin); + ok(hr == E_FAIL, "Got hr %#x.\n", hr); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IPin_EndOfStream(pin); + ok(hr == E_FAIL, "Got hr %#x.\n", hr); + + IGraphBuilder_Disconnect(graph, pin); + IGraphBuilder_Disconnect(graph, &source.source.pin.IPin_iface); + + ref = IAMMultiMediaStream_Release(mmstream); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IGraphBuilder_Release(graph); + ok(!ref, "Got outstanding refcount %d.\n", ref); + IPin_Release(pin); + ref = IMediaStream_Release(stream); + ok(!ref, "Got outstanding refcount %d.\n", ref); +} + void test_mediastreamfilter_get_state(void) { IAMMultiMediaStream *mmstream = create_ammultimediastream(); @@ -2986,6 +3076,7 @@ START_TEST(amstream) test_audiostream_set_format(); test_audiostream_receive_connection(); test_audiostream_set_state(); + test_audiostream_end_of_stream();
test_mediastreamfilter_get_state(); test_mediastreamfilter_stop_pause_run();
On 4/18/20 9:34 AM, Anton Baskanov wrote:
Signed-off-by: Anton Baskanov baskanov@gmail.com
dlls/amstream/audiostream.c | 66 ++++++++++++++++++++++- dlls/amstream/tests/amstream.c | 99 ++++++++++++++++++++++++++++++++-- 2 files changed, 159 insertions(+), 6 deletions(-)
diff --git a/dlls/amstream/audiostream.c b/dlls/amstream/audiostream.c index 26d615ebf4..81cd2975c8 100644 --- a/dlls/amstream/audiostream.c +++ b/dlls/amstream/audiostream.c @@ -166,6 +166,17 @@ static HRESULT audiostreamsample_create(IAudioMediaStream *parent, IAudioData *a return S_OK; }
+enum queued_event_type +{
- QET_END_OF_STREAM
+};
+struct queued_event +{
- struct list entry;
- enum queued_event_type type;
+};
struct audio_stream { IAMMediaStream IAMMediaStream_iface; @@ -185,8 +196,27 @@ struct audio_stream AM_MEDIA_TYPE mt; WAVEFORMATEX format; FILTER_STATE state;
- BOOL eos;
- struct list event_queue;
};
+static void remove_queued_event(struct queued_event *event) +{
- list_remove(&event->entry);
- HeapFree(GetProcessHeap(), 0, event);
+}
+static void flush_event_queue(struct audio_stream *stream) +{
- while (!list_empty(&stream->event_queue))
- {
struct queued_event *event =
LIST_ENTRY(list_head(&stream->event_queue), struct queued_event, entry);
remove_queued_event(event);
- }
+}
static inline struct audio_stream *impl_from_IAMMediaStream(IAMMediaStream *iface) { return CONTAINING_RECORD(iface, struct audio_stream, IAMMediaStream_iface); @@ -347,6 +377,11 @@ static HRESULT WINAPI audio_IAMMediaStream_SetState(IAMMediaStream *iface, FILTE
EnterCriticalSection(&This->cs);
- if (State_Stopped == state)
flush_event_queue(This);
- if (State_Stopped == This->state)
This->eos = FALSE;
This works, but it feels a little clearer to me to have
if (state == State_Stopped) { flush_event_queue(This); This->eos = FALSE; }
This->state = state; LeaveCriticalSection(&This->cs);
@@ -919,8 +954,34 @@ static HRESULT WINAPI audio_sink_QueryInternalConnections(IPin *iface, IPin **pi
static HRESULT WINAPI audio_sink_EndOfStream(IPin *iface) {
- FIXME("iface %p, stub!\n", iface);
- return E_NOTIMPL;
- struct audio_stream *stream = impl_from_IPin(iface);
- struct queued_event *event;
- TRACE("(%p/%p)->()\n", iface, stream);
- EnterCriticalSection(&stream->cs);
- if (stream->eos)
- {
LeaveCriticalSection(&stream->cs);
return E_FAIL;
- }
- event = HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, sizeof(*event));
amstream is compiled with msvcrt, so this can be calloc().
- if (!event)
- {
LeaveCriticalSection(&stream->cs);
return E_OUTOFMEMORY;
- }
- event->type = QET_END_OF_STREAM;
- list_add_tail(&stream->event_queue, &event->entry);
- stream->eos = TRUE;
- LeaveCriticalSection(&stream->cs);
- return S_OK;
}
static HRESULT WINAPI audio_sink_BeginFlush(IPin *iface) @@ -1083,6 +1144,7 @@ HRESULT audio_stream_create(IMultiMediaStream *parent, const MSPID *purpose_id, object->parent = parent; object->purpose_id = *purpose_id; object->stream_type = stream_type;
list_init(&object->event_queue);
*media_stream = &object->IAMMediaStream_iface;
diff --git a/dlls/amstream/tests/amstream.c b/dlls/amstream/tests/amstream.c index 472c6558fc..2ac492bc3c 100644 --- a/dlls/amstream/tests/amstream.c +++ b/dlls/amstream/tests/amstream.c @@ -2381,10 +2381,21 @@ static HRESULT testsource_query_accept(struct strmbase_pin *iface, const AM_MEDI return S_OK; }
-static HRESULT WINAPI testsource_DecideAllocator(struct strmbase_source *iface,
IMemInputPin *peer, IMemAllocator **allocator)
+static HRESULT WINAPI testsource_decide_buffer_size(struct strmbase_source *iface,
IMemAllocator *alloc, ALLOCATOR_PROPERTIES *requested)
{
- return S_OK;
- ALLOCATOR_PROPERTIES actual;
- if (!requested->cbAlign)
requested->cbAlign = 1;
- if (requested->cbBuffer < 4096)
requested->cbBuffer = 4096;
- if (!requested->cBuffers)
requested->cBuffers = 2;
- return IMemAllocator_SetProperties(alloc, requested, &actual);
}
static const struct strmbase_source_ops testsource_ops = @@ -2392,7 +2403,8 @@ static const struct strmbase_source_ops testsource_ops = .base.pin_query_accept = testsource_query_accept, .base.pin_get_media_type = strmbase_pin_get_media_type, .pfnAttemptConnection = BaseOutputPinImpl_AttemptConnection,
- .pfnDecideAllocator = testsource_DecideAllocator,
- .pfnDecideBufferSize = testsource_decide_buffer_size,
- .pfnDecideAllocator = BaseOutputPinImpl_DecideAllocator,
};
static void testfilter_init(struct testfilter *filter) @@ -2814,6 +2826,84 @@ static void test_audiostream_set_state(void) ok(!ref, "Got outstanding refcount %d.\n", ref); }
+void test_audiostream_end_of_stream(void) +{
- static const WAVEFORMATEX format =
- {
.wFormatTag = WAVE_FORMAT_PCM,
.nChannels = 1,
.nSamplesPerSec = 11025,
.wBitsPerSample = 16,
.nBlockAlign = 2,
.nAvgBytesPerSec = 2 * 11025,
- };
- const AM_MEDIA_TYPE mt =
- {
.majortype = MEDIATYPE_Audio,
.subtype = MEDIASUBTYPE_PCM,
.formattype = FORMAT_WaveFormatEx,
.cbFormat = sizeof(WAVEFORMATEX),
.pbFormat = (BYTE *)&format,
- };
- IAMMultiMediaStream *mmstream = create_ammultimediastream();
- struct testfilter source;
- IGraphBuilder *graph;
- IMediaStream *stream;
- HRESULT hr;
- ULONG ref;
- IPin *pin;
- hr = IAMMultiMediaStream_Initialize(mmstream, STREAMTYPE_READ, 0, NULL);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IAMMultiMediaStream_AddMediaStream(mmstream, NULL, &MSPID_PrimaryAudio, 0, &stream);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IMediaStream_QueryInterface(stream, &IID_IPin, (void **)&pin);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IAMMultiMediaStream_GetFilterGraph(mmstream, &graph);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- ok(graph != NULL, "Expected non-null graph\n");
- testfilter_init(&source);
- hr = IGraphBuilder_AddFilter(graph, &source.filter.IBaseFilter_iface, NULL);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IGraphBuilder_ConnectDirect(graph, &source.source.pin.IPin_iface, pin, &mt);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == E_FAIL, "Got hr %#x.\n", hr);
- hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == E_FAIL, "Got hr %#x.\n", hr);
- hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == E_FAIL, "Got hr %#x.\n", hr);
- IGraphBuilder_Disconnect(graph, pin);
- IGraphBuilder_Disconnect(graph, &source.source.pin.IPin_iface);
- ref = IAMMultiMediaStream_Release(mmstream);
- ok(!ref, "Got outstanding refcount %d.\n", ref);
- ref = IGraphBuilder_Release(graph);
- ok(!ref, "Got outstanding refcount %d.\n", ref);
- IPin_Release(pin);
- ref = IMediaStream_Release(stream);
- ok(!ref, "Got outstanding refcount %d.\n", ref);
+}
void test_mediastreamfilter_get_state(void) { IAMMultiMediaStream *mmstream = create_ammultimediastream(); @@ -2986,6 +3076,7 @@ START_TEST(amstream) test_audiostream_set_format(); test_audiostream_receive_connection(); test_audiostream_set_state();
test_audiostream_end_of_stream();
test_mediastreamfilter_get_state(); test_mediastreamfilter_stop_pause_run();
On 4/20/20 5:12 AM, you wrote:
On 4/18/20 9:34 AM, Anton Baskanov wrote:
Signed-off-by: Anton Baskanov baskanov@gmail.com
dlls/amstream/audiostream.c | 66 ++++++++++++++++++++++- dlls/amstream/tests/amstream.c | 99 ++++++++++++++++++++++++++++++++-- 2 files changed, 159 insertions(+), 6 deletions(-)
diff --git a/dlls/amstream/audiostream.c b/dlls/amstream/audiostream.c index 26d615ebf4..81cd2975c8 100644 --- a/dlls/amstream/audiostream.c +++ b/dlls/amstream/audiostream.c @@ -166,6 +166,17 @@ static HRESULT audiostreamsample_create(IAudioMediaStream *parent, IAudioData *a> return S_OK;
}
+enum queued_event_type +{
- QET_END_OF_STREAM
+};
+struct queued_event +{
- struct list entry;
- enum queued_event_type type;
+};
struct audio_stream {
IAMMediaStream IAMMediaStream_iface;
@@ -185,8 +196,27 @@ struct audio_stream
AM_MEDIA_TYPE mt; WAVEFORMATEX format; FILTER_STATE state;
- BOOL eos;
- struct list event_queue;
};
+static void remove_queued_event(struct queued_event *event) +{
- list_remove(&event->entry);
- HeapFree(GetProcessHeap(), 0, event);
+}
+static void flush_event_queue(struct audio_stream *stream) +{
- while (!list_empty(&stream->event_queue))
- {
struct queued_event *event =
LIST_ENTRY(list_head(&stream->event_queue), struct
queued_event, entry); +
remove_queued_event(event);
- }
+}
static inline struct audio_stream *impl_from_IAMMediaStream(IAMMediaStream *iface) {
return CONTAINING_RECORD(iface, struct audio_stream, IAMMediaStream_iface);
@@ -347,6 +377,11 @@ static HRESULT WINAPI audio_IAMMediaStream_SetState(IAMMediaStream *iface, FILTE> EnterCriticalSection(&This->cs);
- if (State_Stopped == state)
flush_event_queue(This);
- if (State_Stopped == This->state)
This->eos = FALSE;
This works, but it feels a little clearer to me to have
if (state == State_Stopped) { flush_event_queue(This); This->eos = FALSE; }
This change results in test failures, since the native implementation clears the EOS state on stopped->paused transition.
This->state = state; LeaveCriticalSection(&This->cs);
@@ -919,8 +954,34 @@ static HRESULT WINAPI audio_sink_QueryInternalConnections(IPin *iface, IPin **pi> static HRESULT WINAPI audio_sink_EndOfStream(IPin *iface) {
- FIXME("iface %p, stub!\n", iface);
- return E_NOTIMPL;
- struct audio_stream *stream = impl_from_IPin(iface);
- struct queued_event *event;
- TRACE("(%p/%p)->()\n", iface, stream);
- EnterCriticalSection(&stream->cs);
- if (stream->eos)
- {
LeaveCriticalSection(&stream->cs);
return E_FAIL;
- }
- event = HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY,
sizeof(*event));
amstream is compiled with msvcrt, so this can be calloc().
- if (!event)
- {
LeaveCriticalSection(&stream->cs);
return E_OUTOFMEMORY;
- }
- event->type = QET_END_OF_STREAM;
- list_add_tail(&stream->event_queue, &event->entry);
- stream->eos = TRUE;
- LeaveCriticalSection(&stream->cs);
- return S_OK;
}
static HRESULT WINAPI audio_sink_BeginFlush(IPin *iface)
@@ -1083,6 +1144,7 @@ HRESULT audio_stream_create(IMultiMediaStream *parent, const MSPID *purpose_id,> object->parent = parent; object->purpose_id = *purpose_id; object->stream_type = stream_type;
list_init(&object->event_queue);
*media_stream = &object->IAMMediaStream_iface;
diff --git a/dlls/amstream/tests/amstream.c b/dlls/amstream/tests/amstream.c index 472c6558fc..2ac492bc3c 100644 --- a/dlls/amstream/tests/amstream.c +++ b/dlls/amstream/tests/amstream.c @@ -2381,10 +2381,21 @@ static HRESULT testsource_query_accept(struct strmbase_pin *iface, const AM_MEDI> return S_OK;
}
-static HRESULT WINAPI testsource_DecideAllocator(struct strmbase_source *iface, - IMemInputPin *peer, IMemAllocator **allocator) +static HRESULT WINAPI testsource_decide_buffer_size(struct strmbase_source *iface, + IMemAllocator *alloc, ALLOCATOR_PROPERTIES *requested)
{
- return S_OK;
- ALLOCATOR_PROPERTIES actual;
- if (!requested->cbAlign)
requested->cbAlign = 1;
- if (requested->cbBuffer < 4096)
requested->cbBuffer = 4096;
- if (!requested->cBuffers)
requested->cBuffers = 2;
- return IMemAllocator_SetProperties(alloc, requested, &actual);
}
static const struct strmbase_source_ops testsource_ops =
@@ -2392,7 +2403,8 @@ static const struct strmbase_source_ops testsource_ops => .base.pin_query_accept = testsource_query_accept, .base.pin_get_media_type = strmbase_pin_get_media_type, .pfnAttemptConnection = BaseOutputPinImpl_AttemptConnection,
- .pfnDecideAllocator = testsource_DecideAllocator,
- .pfnDecideBufferSize = testsource_decide_buffer_size,
- .pfnDecideAllocator = BaseOutputPinImpl_DecideAllocator,
};
static void testfilter_init(struct testfilter *filter)
@@ -2814,6 +2826,84 @@ static void test_audiostream_set_state(void)
ok(!ref, "Got outstanding refcount %d.\n", ref);
}
+void test_audiostream_end_of_stream(void) +{
- static const WAVEFORMATEX format =
- {
.wFormatTag = WAVE_FORMAT_PCM,
.nChannels = 1,
.nSamplesPerSec = 11025,
.wBitsPerSample = 16,
.nBlockAlign = 2,
.nAvgBytesPerSec = 2 * 11025,
- };
- const AM_MEDIA_TYPE mt =
- {
.majortype = MEDIATYPE_Audio,
.subtype = MEDIASUBTYPE_PCM,
.formattype = FORMAT_WaveFormatEx,
.cbFormat = sizeof(WAVEFORMATEX),
.pbFormat = (BYTE *)&format,
- };
- IAMMultiMediaStream *mmstream = create_ammultimediastream();
- struct testfilter source;
- IGraphBuilder *graph;
- IMediaStream *stream;
- HRESULT hr;
- ULONG ref;
- IPin *pin;
- hr = IAMMultiMediaStream_Initialize(mmstream, STREAMTYPE_READ, 0,
NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IAMMultiMediaStream_AddMediaStream(mmstream, NULL,
&MSPID_PrimaryAudio, 0, &stream); + ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IMediaStream_QueryInterface(stream, &IID_IPin, (void **)&pin);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IAMMultiMediaStream_GetFilterGraph(mmstream, &graph);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- ok(graph != NULL, "Expected non-null graph\n");
- testfilter_init(&source);
- hr = IGraphBuilder_AddFilter(graph, &source.filter.IBaseFilter_iface,
NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IGraphBuilder_ConnectDirect(graph,
&source.source.pin.IPin_iface, pin, &mt); + ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == E_FAIL, "Got hr %#x.\n", hr);
- hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == E_FAIL, "Got hr %#x.\n", hr);
- hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == E_FAIL, "Got hr %#x.\n", hr);
- IGraphBuilder_Disconnect(graph, pin);
- IGraphBuilder_Disconnect(graph, &source.source.pin.IPin_iface);
- ref = IAMMultiMediaStream_Release(mmstream);
- ok(!ref, "Got outstanding refcount %d.\n", ref);
- ref = IGraphBuilder_Release(graph);
- ok(!ref, "Got outstanding refcount %d.\n", ref);
- IPin_Release(pin);
- ref = IMediaStream_Release(stream);
- ok(!ref, "Got outstanding refcount %d.\n", ref);
+}
void test_mediastreamfilter_get_state(void) {
IAMMultiMediaStream *mmstream = create_ammultimediastream();
@@ -2986,6 +3076,7 @@ START_TEST(amstream)
test_audiostream_set_format(); test_audiostream_receive_connection(); test_audiostream_set_state();
test_audiostream_end_of_stream();
test_mediastreamfilter_get_state(); test_mediastreamfilter_stop_pause_run();
On 4/21/20 2:35 PM, Anton Baskanov wrote:
On 4/20/20 5:12 AM, you wrote:
On 4/18/20 9:34 AM, Anton Baskanov wrote:
Signed-off-by: Anton Baskanov baskanov@gmail.com
dlls/amstream/audiostream.c | 66 ++++++++++++++++++++++- dlls/amstream/tests/amstream.c | 99 ++++++++++++++++++++++++++++++++-- 2 files changed, 159 insertions(+), 6 deletions(-)
diff --git a/dlls/amstream/audiostream.c b/dlls/amstream/audiostream.c index 26d615ebf4..81cd2975c8 100644 --- a/dlls/amstream/audiostream.c +++ b/dlls/amstream/audiostream.c @@ -166,6 +166,17 @@ static HRESULT audiostreamsample_create(IAudioMediaStream *parent, IAudioData *a> return S_OK;
}
+enum queued_event_type +{
- QET_END_OF_STREAM
+};
+struct queued_event +{
- struct list entry;
- enum queued_event_type type;
+};
struct audio_stream {
IAMMediaStream IAMMediaStream_iface;
@@ -185,8 +196,27 @@ struct audio_stream
AM_MEDIA_TYPE mt; WAVEFORMATEX format; FILTER_STATE state;
BOOL eos;
struct list event_queue;
};
+static void remove_queued_event(struct queued_event *event) +{
- list_remove(&event->entry);
- HeapFree(GetProcessHeap(), 0, event);
+}
+static void flush_event_queue(struct audio_stream *stream) +{
- while (!list_empty(&stream->event_queue))
- {
struct queued_event *event =
LIST_ENTRY(list_head(&stream->event_queue), struct
queued_event, entry); +
remove_queued_event(event);
- }
+}
static inline struct audio_stream *impl_from_IAMMediaStream(IAMMediaStream *iface) {
return CONTAINING_RECORD(iface, struct audio_stream, IAMMediaStream_iface);
@@ -347,6 +377,11 @@ static HRESULT WINAPI audio_IAMMediaStream_SetState(IAMMediaStream *iface, FILTE> EnterCriticalSection(&This->cs);
- if (State_Stopped == state)
flush_event_queue(This);
- if (State_Stopped == This->state)
This->eos = FALSE;
This works, but it feels a little clearer to me to have
if (state == State_Stopped) { flush_event_queue(This); This->eos = FALSE; }
This change results in test failures, since the native implementation clears the EOS state on stopped->paused transition.
Oh, I misread / assumed that EndOfStream() would fail if the filter was stopped (which, granted, at least some quartz filters do). Never mind that, then.
This->state = state; LeaveCriticalSection(&This->cs);
@@ -919,8 +954,34 @@ static HRESULT WINAPI audio_sink_QueryInternalConnections(IPin *iface, IPin **pi> static HRESULT WINAPI audio_sink_EndOfStream(IPin *iface) {
- FIXME("iface %p, stub!\n", iface);
- return E_NOTIMPL;
- struct audio_stream *stream = impl_from_IPin(iface);
- struct queued_event *event;
- TRACE("(%p/%p)->()\n", iface, stream);
- EnterCriticalSection(&stream->cs);
- if (stream->eos)
- {
LeaveCriticalSection(&stream->cs);
return E_FAIL;
- }
- event = HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY,
sizeof(*event));
amstream is compiled with msvcrt, so this can be calloc().
if (!event)
{
LeaveCriticalSection(&stream->cs);
return E_OUTOFMEMORY;
}
event->type = QET_END_OF_STREAM;
list_add_tail(&stream->event_queue, &event->entry);
stream->eos = TRUE;
LeaveCriticalSection(&stream->cs);
return S_OK;
}
static HRESULT WINAPI audio_sink_BeginFlush(IPin *iface)
@@ -1083,6 +1144,7 @@ HRESULT audio_stream_create(IMultiMediaStream *parent, const MSPID *purpose_id,> object->parent = parent; object->purpose_id = *purpose_id; object->stream_type = stream_type;
list_init(&object->event_queue);
*media_stream = &object->IAMMediaStream_iface;
diff --git a/dlls/amstream/tests/amstream.c b/dlls/amstream/tests/amstream.c index 472c6558fc..2ac492bc3c 100644 --- a/dlls/amstream/tests/amstream.c +++ b/dlls/amstream/tests/amstream.c @@ -2381,10 +2381,21 @@ static HRESULT testsource_query_accept(struct strmbase_pin *iface, const AM_MEDI> return S_OK;
}
-static HRESULT WINAPI testsource_DecideAllocator(struct strmbase_source *iface, - IMemInputPin *peer, IMemAllocator **allocator) +static HRESULT WINAPI testsource_decide_buffer_size(struct strmbase_source *iface, + IMemAllocator *alloc, ALLOCATOR_PROPERTIES *requested)
{
- return S_OK;
ALLOCATOR_PROPERTIES actual;
if (!requested->cbAlign)
requested->cbAlign = 1;
if (requested->cbBuffer < 4096)
requested->cbBuffer = 4096;
if (!requested->cBuffers)
requested->cBuffers = 2;
return IMemAllocator_SetProperties(alloc, requested, &actual);
}
static const struct strmbase_source_ops testsource_ops =
@@ -2392,7 +2403,8 @@ static const struct strmbase_source_ops testsource_ops => .base.pin_query_accept = testsource_query_accept, .base.pin_get_media_type = strmbase_pin_get_media_type, .pfnAttemptConnection = BaseOutputPinImpl_AttemptConnection,
- .pfnDecideAllocator = testsource_DecideAllocator,
.pfnDecideBufferSize = testsource_decide_buffer_size,
.pfnDecideAllocator = BaseOutputPinImpl_DecideAllocator,
};
static void testfilter_init(struct testfilter *filter)
@@ -2814,6 +2826,84 @@ static void test_audiostream_set_state(void)
ok(!ref, "Got outstanding refcount %d.\n", ref);
}
+void test_audiostream_end_of_stream(void) +{
- static const WAVEFORMATEX format =
- {
.wFormatTag = WAVE_FORMAT_PCM,
.nChannels = 1,
.nSamplesPerSec = 11025,
.wBitsPerSample = 16,
.nBlockAlign = 2,
.nAvgBytesPerSec = 2 * 11025,
- };
- const AM_MEDIA_TYPE mt =
- {
.majortype = MEDIATYPE_Audio,
.subtype = MEDIASUBTYPE_PCM,
.formattype = FORMAT_WaveFormatEx,
.cbFormat = sizeof(WAVEFORMATEX),
.pbFormat = (BYTE *)&format,
- };
- IAMMultiMediaStream *mmstream = create_ammultimediastream();
- struct testfilter source;
- IGraphBuilder *graph;
- IMediaStream *stream;
- HRESULT hr;
- ULONG ref;
- IPin *pin;
- hr = IAMMultiMediaStream_Initialize(mmstream, STREAMTYPE_READ, 0,
NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IAMMultiMediaStream_AddMediaStream(mmstream, NULL,
&MSPID_PrimaryAudio, 0, &stream); + ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IMediaStream_QueryInterface(stream, &IID_IPin, (void **)&pin);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IAMMultiMediaStream_GetFilterGraph(mmstream, &graph);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- ok(graph != NULL, "Expected non-null graph\n");
- testfilter_init(&source);
- hr = IGraphBuilder_AddFilter(graph, &source.filter.IBaseFilter_iface,
NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IGraphBuilder_ConnectDirect(graph,
&source.source.pin.IPin_iface, pin, &mt); + ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == E_FAIL, "Got hr %#x.\n", hr);
- hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == E_FAIL, "Got hr %#x.\n", hr);
- hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP);
- ok(hr == S_OK, "Got hr %#x.\n", hr);
- hr = IPin_EndOfStream(pin);
- ok(hr == E_FAIL, "Got hr %#x.\n", hr);
- IGraphBuilder_Disconnect(graph, pin);
- IGraphBuilder_Disconnect(graph, &source.source.pin.IPin_iface);
- ref = IAMMultiMediaStream_Release(mmstream);
- ok(!ref, "Got outstanding refcount %d.\n", ref);
- ref = IGraphBuilder_Release(graph);
- ok(!ref, "Got outstanding refcount %d.\n", ref);
- IPin_Release(pin);
- ref = IMediaStream_Release(stream);
- ok(!ref, "Got outstanding refcount %d.\n", ref);
+}
void test_mediastreamfilter_get_state(void) {
IAMMultiMediaStream *mmstream = create_ammultimediastream();
@@ -2986,6 +3076,7 @@ START_TEST(amstream)
test_audiostream_set_format(); test_audiostream_receive_connection(); test_audiostream_set_state();
test_audiostream_end_of_stream();
test_mediastreamfilter_get_state(); test_mediastreamfilter_stop_pause_run();
Signed-off-by: Anton Baskanov baskanov@gmail.com --- dlls/amstream/audiostream.c | 50 +++++++++++- dlls/amstream/tests/amstream.c | 135 +++++++++++++++++++++++++++++++++ 2 files changed, 182 insertions(+), 3 deletions(-)
diff --git a/dlls/amstream/audiostream.c b/dlls/amstream/audiostream.c index 81cd2975c8..d4136a3a58 100644 --- a/dlls/amstream/audiostream.c +++ b/dlls/amstream/audiostream.c @@ -168,13 +168,18 @@ static HRESULT audiostreamsample_create(IAudioMediaStream *parent, IAudioData *a
enum queued_event_type { - QET_END_OF_STREAM + QET_END_OF_STREAM, + QET_SAMPLE };
struct queued_event { struct list entry; enum queued_event_type type; + IMediaSample *sample; + DWORD length; + BYTE *pointer; + DWORD position; };
struct audio_stream @@ -203,6 +208,8 @@ struct audio_stream static void remove_queued_event(struct queued_event *event) { list_remove(&event->entry); + if (event->sample) + IMediaSample_Release(event->sample); HeapFree(GetProcessHeap(), 0, event); }
@@ -1090,8 +1097,45 @@ static HRESULT WINAPI audio_meminput_GetAllocatorRequirements(IMemInputPin *ifac
static HRESULT WINAPI audio_meminput_Receive(IMemInputPin *iface, IMediaSample *sample) { - FIXME("iface %p, sample %p, stub!\n", iface, sample); - return E_NOTIMPL; + struct audio_stream *stream = impl_from_IMemInputPin(iface); + struct queued_event *event; + BYTE *pointer; + HRESULT hr; + + TRACE("(%p)->(%p)\n", stream, sample); + + EnterCriticalSection(&stream->cs); + + if (stream->state == State_Stopped) + { + LeaveCriticalSection(&stream->cs); + return VFW_E_WRONG_STATE; + } + + hr = IMediaSample_GetPointer(sample, &pointer); + if (FAILED(hr)) + { + LeaveCriticalSection(&stream->cs); + return hr; + } + + event = HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, sizeof(*event)); + if (!event) + { + LeaveCriticalSection(&stream->cs); + return E_OUTOFMEMORY; + } + + event->type = QET_SAMPLE; + event->length = IMediaSample_GetActualDataLength(sample); + event->pointer = pointer; + event->sample = sample; + IMediaSample_AddRef(event->sample); + list_add_tail(&stream->event_queue, &event->entry); + + LeaveCriticalSection(&stream->cs); + + return S_OK; }
static HRESULT WINAPI audio_meminput_ReceiveMultiple(IMemInputPin *iface, diff --git a/dlls/amstream/tests/amstream.c b/dlls/amstream/tests/amstream.c index 2ac492bc3c..2582d3c69d 100644 --- a/dlls/amstream/tests/amstream.c +++ b/dlls/amstream/tests/amstream.c @@ -2904,6 +2904,140 @@ void test_audiostream_end_of_stream(void) ok(!ref, "Got outstanding refcount %d.\n", ref); }
+static void test_audiostream_receive(void) +{ + static const WAVEFORMATEX format = + { + .wFormatTag = WAVE_FORMAT_PCM, + .nChannels = 1, + .nSamplesPerSec = 11025, + .wBitsPerSample = 16, + .nBlockAlign = 2, + .nAvgBytesPerSec = 2 * 11025, + }; + + const AM_MEDIA_TYPE mt = + { + .majortype = MEDIATYPE_Audio, + .subtype = MEDIASUBTYPE_PCM, + .formattype = FORMAT_WaveFormatEx, + .cbFormat = sizeof(WAVEFORMATEX), + .pbFormat = (BYTE *)&format, + }; + + ALLOCATOR_PROPERTIES properties = + { + .cBuffers = 3, + .cbBuffer = 16, + .cbAlign = 1, + }; + + IAMMultiMediaStream *mmstream = create_ammultimediastream(); + ALLOCATOR_PROPERTIES actual; + struct testfilter source; + IMemAllocator *allocator; + IGraphBuilder *graph; + IMediaStream *stream; + IMediaSample *sample1; + IMediaSample *sample2; + IMediaSample *sample3; + HRESULT hr; + ULONG ref; + IPin *pin; + + hr = IAMMultiMediaStream_Initialize(mmstream, STREAMTYPE_READ, 0, NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_AddMediaStream(mmstream, NULL, &MSPID_PrimaryAudio, 0, &stream); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMediaStream_QueryInterface(stream, &IID_IPin, (void **)&pin); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_GetFilterGraph(mmstream, &graph); + ok(hr == S_OK, "Got hr %#x.\n", hr); + ok(graph != NULL, "Expected non-null graph\n"); + testfilter_init(&source); + hr = IGraphBuilder_AddFilter(graph, &source.filter.IBaseFilter_iface, NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = CoCreateInstance(&CLSID_MemoryAllocator, NULL, CLSCTX_INPROC_SERVER, &IID_IMemAllocator, (void **)&allocator); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMemAllocator_SetProperties(allocator, &properties, &actual); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMemAllocator_Commit(allocator); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IGraphBuilder_ConnectDirect(graph, &source.source.pin.IPin_iface, pin, &mt); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IMemAllocator_GetBuffer(allocator, &sample1, NULL, NULL, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMemInputPin_Receive(source.source.pMemInputPin, sample1); + ok(hr == VFW_E_WRONG_STATE, "Got hr %#x.\n", hr); + ref = IMediaSample_Release(sample1); + ok(!ref, "Got outstanding refcount %d.\n", ref); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IMemAllocator_GetBuffer(allocator, &sample1, NULL, NULL, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMemInputPin_Receive(source.source.pMemInputPin, sample1); + ok(hr == S_OK, "Got hr %#x.\n", hr); + IMediaSample_AddRef(sample1); + ref = IMediaSample_Release(sample1); + ok(ref == 2, "Got outstanding refcount %d.\n", ref); + + hr = IMemAllocator_GetBuffer(allocator, &sample2, NULL, NULL, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMemInputPin_Receive(source.source.pMemInputPin, sample2); + ok(hr == S_OK, "Got hr %#x.\n", hr); + IMediaSample_AddRef(sample2); + ref = IMediaSample_Release(sample2); + ok(ref == 2, "Got outstanding refcount %d.\n", ref); + + hr = IPin_EndOfStream(pin); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IMemAllocator_GetBuffer(allocator, &sample3, NULL, NULL, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMemInputPin_Receive(source.source.pMemInputPin, sample3); + ok(hr == S_OK, "Got hr %#x.\n", hr); + IMediaSample_AddRef(sample3); + ref = IMediaSample_Release(sample3); + ok(ref == 2, "Got outstanding refcount %d.\n", ref); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + ref = IMediaSample_Release(sample1); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IMediaSample_Release(sample2); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IMediaSample_Release(sample3); + ok(!ref, "Got outstanding refcount %d.\n", ref); + + hr = IMemAllocator_GetBuffer(allocator, &sample1, NULL, NULL, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMemInputPin_Receive(source.source.pMemInputPin, sample1); + ok(hr == VFW_E_WRONG_STATE, "Got hr %#x.\n", hr); + ref = IMediaSample_Release(sample1); + ok(!ref, "Got outstanding refcount %d.\n", ref); + + IGraphBuilder_Disconnect(graph, pin); + IGraphBuilder_Disconnect(graph, &source.source.pin.IPin_iface); + + hr = IMemAllocator_Decommit(allocator); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + ref = IAMMultiMediaStream_Release(mmstream); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IGraphBuilder_Release(graph); + ok(!ref, "Got outstanding refcount %d.\n", ref); + IPin_Release(pin); + ref = IMediaStream_Release(stream); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IMemAllocator_Release(allocator); + ok(!ref, "Got outstanding refcount %d.\n", ref); +} + void test_mediastreamfilter_get_state(void) { IAMMultiMediaStream *mmstream = create_ammultimediastream(); @@ -3077,6 +3211,7 @@ START_TEST(amstream) test_audiostream_receive_connection(); test_audiostream_set_state(); test_audiostream_end_of_stream(); + test_audiostream_receive();
test_mediastreamfilter_get_state(); test_mediastreamfilter_stop_pause_run();
Signed-off-by: Anton Baskanov baskanov@gmail.com --- dlls/amstream/audiostream.c | 150 +++++++++++++++- dlls/amstream/tests/amstream.c | 306 +++++++++++++++++++++++++++++++++ 2 files changed, 450 insertions(+), 6 deletions(-)
diff --git a/dlls/amstream/audiostream.c b/dlls/amstream/audiostream.c index d4136a3a58..5176a6cb36 100644 --- a/dlls/amstream/audiostream.c +++ b/dlls/amstream/audiostream.c @@ -32,6 +32,13 @@ typedef struct { LONG ref; IAudioMediaStream *parent; IAudioData *audio_data; + HANDLE update_event; + + struct list entry; + DWORD length; + BYTE *pointer; + DWORD position; + HRESULT update_hr; } IAudioStreamSampleImpl;
static inline IAudioStreamSampleImpl *impl_from_IAudioStreamSample(IAudioStreamSample *iface) @@ -78,7 +85,10 @@ static ULONG WINAPI IAudioStreamSampleImpl_Release(IAudioStreamSample *iface) TRACE("(%p)->(): new ref = %u\n", iface, ref);
if (!ref) + { + CloseHandle(This->update_event); HeapFree(GetProcessHeap(), 0, This); + }
return ref; } @@ -108,12 +118,7 @@ static HRESULT WINAPI IAudioStreamSampleImpl_SetSampleTimes(IAudioStreamSample * }
static HRESULT WINAPI IAudioStreamSampleImpl_Update(IAudioStreamSample *iface, DWORD flags, HANDLE event, - PAPCFUNC func_APC, DWORD APC_data) -{ - FIXME("(%p)->(%x,%p,%p,%u): stub\n", iface, flags, event, func_APC, APC_data); - - return E_NOTIMPL; -} + PAPCFUNC func_APC, DWORD APC_data);
static HRESULT WINAPI IAudioStreamSampleImpl_CompletionStatus(IAudioStreamSample *iface, DWORD flags, DWORD milliseconds) { @@ -160,6 +165,7 @@ static HRESULT audiostreamsample_create(IAudioMediaStream *parent, IAudioData *a object->ref = 1; object->parent = parent; object->audio_data = audio_data; + object->update_event = CreateEventW(NULL, FALSE, FALSE, NULL);
*audio_stream_sample = &object->IAudioStreamSample_iface;
@@ -203,6 +209,7 @@ struct audio_stream FILTER_STATE state; BOOL eos; struct list event_queue; + struct list update_queue; };
static void remove_queued_event(struct queued_event *event) @@ -213,6 +220,18 @@ static void remove_queued_event(struct queued_event *event) HeapFree(GetProcessHeap(), 0, event); }
+static void remove_queued_update(IAudioStreamSampleImpl *sample) +{ + HRESULT hr; + + hr = IAudioData_SetActual(sample->audio_data, sample->position); + if (FAILED(hr)) + sample->update_hr = hr; + + list_remove(&sample->entry); + SetEvent(sample->update_event); +} + static void flush_event_queue(struct audio_stream *stream) { while (!list_empty(&stream->event_queue)) @@ -224,6 +243,41 @@ static void flush_event_queue(struct audio_stream *stream) } }
+static void process_update(IAudioStreamSampleImpl *sample, struct queued_event *event) +{ + DWORD advance; + + if (event->type == QET_END_OF_STREAM) + { + sample->update_hr = sample->position ? S_OK : MS_S_ENDOFSTREAM; + return; + } + + advance = min(event->length - event->position, sample->length - sample->position); + CopyMemory(&sample->pointer[sample->position], &event->pointer[event->position], advance); + + event->position += advance; + sample->position += advance; + + sample->update_hr = (sample->position == sample->length) ? S_OK : MS_S_PENDING; +} + +static void process_updates(struct audio_stream *stream) +{ + while (!list_empty(&stream->update_queue) && !list_empty(&stream->event_queue)) + { + IAudioStreamSampleImpl *sample = LIST_ENTRY(list_head(&stream->update_queue), IAudioStreamSampleImpl, entry); + struct queued_event *event = LIST_ENTRY(list_head(&stream->event_queue), struct queued_event, entry); + + process_update(sample, event); + + if (MS_S_PENDING != sample->update_hr) + remove_queued_update(sample); + if ((event->type != QET_END_OF_STREAM) && (event->position == event->length)) + remove_queued_event(event); + } +} + static inline struct audio_stream *impl_from_IAMMediaStream(IAMMediaStream *iface) { return CONTAINING_RECORD(iface, struct audio_stream, IAMMediaStream_iface); @@ -449,6 +503,85 @@ static inline struct audio_stream *impl_from_IAudioMediaStream(IAudioMediaStream return CONTAINING_RECORD(iface, struct audio_stream, IAudioMediaStream_iface); }
+static HRESULT WINAPI IAudioStreamSampleImpl_Update(IAudioStreamSample *iface, DWORD flags, HANDLE event, + PAPCFUNC func_APC, DWORD APC_data) +{ + IAudioStreamSampleImpl *sample = impl_from_IAudioStreamSample(iface); + struct audio_stream *stream; + DWORD length; + BYTE *pointer; + HRESULT hr; + + TRACE("(%p)->(%x,%p,%p,%u)\n", iface, flags, event, func_APC, APC_data); + + stream = impl_from_IAudioMediaStream(sample->parent); + + hr = IAudioData_GetInfo(sample->audio_data, &length, &pointer, NULL); + if (FAILED(hr)) + return hr; + + if (event && func_APC) + return E_INVALIDARG; + + if (func_APC) + { + FIXME("APC support is not implemented!\n"); + return E_NOTIMPL; + } + + if (event) + { + FIXME("Event parameter support is not implemented!\n"); + return E_NOTIMPL; + } + + if (flags & ~SSUPDATE_ASYNC) + { + FIXME("Unsupported flags: %x\n", flags); + return E_NOTIMPL; + } + + EnterCriticalSection(&stream->cs); + + if (stream->state == State_Stopped) + { + LeaveCriticalSection(&stream->cs); + return MS_E_NOTRUNNING; + } + if (!stream->peer) + { + LeaveCriticalSection(&stream->cs); + return MS_S_ENDOFSTREAM; + } + if (MS_S_PENDING == sample->update_hr) + { + LeaveCriticalSection(&stream->cs); + return MS_E_BUSY; + } + + sample->length = length; + sample->pointer = pointer; + sample->position = 0; + sample->update_hr = MS_S_PENDING; + ResetEvent(sample->update_event); + list_add_tail(&stream->update_queue, &sample->entry); + + process_updates(stream); + + hr = sample->update_hr; + if (hr != MS_S_PENDING || (flags & SSUPDATE_ASYNC)) + { + LeaveCriticalSection(&stream->cs); + return hr; + } + + LeaveCriticalSection(&stream->cs); + + WaitForSingleObject(sample->update_event, INFINITE); + + return sample->update_hr; +} + /*** IUnknown methods ***/ static HRESULT WINAPI audio_IAudioMediaStream_QueryInterface(IAudioMediaStream *iface, REFIID riid, void **ret_iface) @@ -986,6 +1119,8 @@ static HRESULT WINAPI audio_sink_EndOfStream(IPin *iface)
stream->eos = TRUE;
+ process_updates(stream); + LeaveCriticalSection(&stream->cs);
return S_OK; @@ -1133,6 +1268,8 @@ static HRESULT WINAPI audio_meminput_Receive(IMemInputPin *iface, IMediaSample * IMediaSample_AddRef(event->sample); list_add_tail(&stream->event_queue, &event->entry);
+ process_updates(stream); + LeaveCriticalSection(&stream->cs);
return S_OK; @@ -1189,6 +1326,7 @@ HRESULT audio_stream_create(IMultiMediaStream *parent, const MSPID *purpose_id, object->purpose_id = *purpose_id; object->stream_type = stream_type; list_init(&object->event_queue); + list_init(&object->update_queue);
*media_stream = &object->IAMMediaStream_iface;
diff --git a/dlls/amstream/tests/amstream.c b/dlls/amstream/tests/amstream.c index 2582d3c69d..d5040747b8 100644 --- a/dlls/amstream/tests/amstream.c +++ b/dlls/amstream/tests/amstream.c @@ -2370,10 +2370,28 @@ static void testfilter_destroy(struct strmbase_filter *iface) strmbase_filter_cleanup(&filter->filter); }
+static HRESULT testfilter_init_stream(struct strmbase_filter *iface) +{ + struct testfilter *filter = impl_from_BaseFilter(iface); + + BaseOutputPinImpl_Active(&filter->source); + return S_OK; +} + +static HRESULT testfilter_cleanup_stream(struct strmbase_filter *iface) +{ + struct testfilter *filter = impl_from_BaseFilter(iface); + + BaseOutputPinImpl_Inactive(&filter->source); + return S_OK; +} + static const struct strmbase_filter_ops testfilter_ops = { .filter_get_pin = testfilter_get_pin, .filter_destroy = testfilter_destroy, + .filter_init_stream = testfilter_init_stream, + .filter_cleanup_stream = testfilter_cleanup_stream, };
static HRESULT testsource_query_accept(struct strmbase_pin *iface, const AM_MEDIA_TYPE *mt) @@ -3038,6 +3056,292 @@ static void test_audiostream_receive(void) ok(!ref, "Got outstanding refcount %d.\n", ref); }
+static void CALLBACK apc_func(ULONG_PTR param) +{ +} + +static IMediaSample *audiostream_allocate_sample(struct testfilter *source, const BYTE *input_data, DWORD input_length) +{ + IMediaSample *sample; + BYTE *sample_data; + HRESULT hr; + + hr = BaseOutputPinImpl_GetDeliveryBuffer(&source->source, &sample, NULL, NULL, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IMediaSample_GetPointer(sample, &sample_data); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IMediaSample_SetActualDataLength(sample, input_length); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + memcpy(sample_data, input_data, input_length); + + return sample; +} + +static IPin *audiostream_pin; +static IMemInputPin *audiostream_mem_input_pin; +static IMediaSample *audiostream_media_sample; + +static DWORD CALLBACK audiostream_end_of_stream(void *param) +{ + HRESULT hr; + + Sleep(100); + hr = IPin_EndOfStream(audiostream_pin); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + return 0; +} + +static DWORD CALLBACK audiostream_receive(void *param) +{ + HRESULT hr; + + Sleep(100); + hr = IMemInputPin_Receive(audiostream_mem_input_pin, audiostream_media_sample); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + return 0; +} + +static void test_audiostreamsample_update(void) +{ + static const WAVEFORMATEX format = + { + .wFormatTag = WAVE_FORMAT_PCM, + .nChannels = 1, + .nSamplesPerSec = 11025, + .wBitsPerSample = 16, + .nBlockAlign = 2, + .nAvgBytesPerSec = 2 * 11025, + }; + + const AM_MEDIA_TYPE mt = + { + .majortype = MEDIATYPE_Audio, + .subtype = MEDIASUBTYPE_PCM, + .formattype = FORMAT_WaveFormatEx, + .cbFormat = sizeof(WAVEFORMATEX), + .pbFormat = (BYTE *)&format, + }; + + static const BYTE test_data[] = { 0, 1, 2, 3, 4, 5, 6, 7 }; + IAMMultiMediaStream *mmstream = create_ammultimediastream(); + IAudioStreamSample *stream_sample; + IAudioMediaStream *audio_stream; + IMemInputPin *mem_input_pin; + IMediaSample *media_sample1; + IMediaSample *media_sample2; + struct testfilter source; + IAudioData *audio_data; + IGraphBuilder *graph; + IMediaStream *stream; + DWORD actual_length; + BYTE buffer[6]; + HANDLE thread; + HANDLE event; + HRESULT hr; + ULONG ref; + IPin *pin; + + hr = IAMMultiMediaStream_Initialize(mmstream, STREAMTYPE_READ, 0, NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_AddMediaStream(mmstream, NULL, &MSPID_PrimaryAudio, 0, &stream); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMediaStream_QueryInterface(stream, &IID_IAudioMediaStream, (void **)&audio_stream); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMediaStream_QueryInterface(stream, &IID_IPin, (void **)&pin); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMediaStream_QueryInterface(stream, &IID_IMemInputPin, (void **)&mem_input_pin); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_GetFilterGraph(mmstream, &graph); + ok(hr == S_OK, "Got hr %#x.\n", hr); + ok(graph != NULL, "Expected non-null graph\n"); + testfilter_init(&source); + hr = IGraphBuilder_AddFilter(graph, &source.filter.IBaseFilter_iface, NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = CoCreateInstance(&CLSID_AMAudioData, NULL, CLSCTX_INPROC_SERVER, &IID_IAudioData, (void **)&audio_data); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAudioMediaStream_CreateSample(audio_stream, audio_data, 0, &stream_sample); + ok(hr == S_OK, "Got hr %#x.\n", hr); + event = CreateEventW(NULL, FALSE, FALSE, NULL); + ok(event != NULL, "Expected non-NULL event."); + + hr = IAudioStreamSample_Update(stream_sample, 0, event, apc_func, 0); + ok(hr == MS_E_NOTINIT, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_Update(stream_sample, 0, NULL, NULL, 0); + ok(hr == MS_E_NOTINIT, "Got hr %#x.\n", hr); + + hr = IAudioData_SetBuffer(audio_data, sizeof(buffer), buffer, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_Update(stream_sample, 0, event, apc_func, 0); + ok(hr == E_INVALIDARG, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_Update(stream_sample, 0, NULL, NULL, 0); + ok(hr == MS_E_NOTRUNNING, "Got hr %#x.\n", hr); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_Update(stream_sample, 0, NULL, NULL, 0); + ok(hr == MS_S_ENDOFSTREAM, "Got hr %#x.\n", hr); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IGraphBuilder_ConnectDirect(graph, &source.source.pin.IPin_iface, pin, &mt); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + media_sample1 = audiostream_allocate_sample(&source, test_data, 8); + hr = IMemInputPin_Receive(mem_input_pin, media_sample1); + ok(hr == S_OK, "Got hr %#x.\n", hr); + IMediaSample_AddRef(media_sample1); + ref = IMediaSample_Release(media_sample1); + ok(ref == 2, "Got outstanding refcount %d.\n", ref); + + hr = IAudioStreamSample_Update(stream_sample, 0, NULL, NULL, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioData_GetInfo(audio_data, NULL, NULL, &actual_length); + ok(hr == S_OK, "Got hr %#x.\n", hr); + ok(actual_length == 6, "Got actual length %u.\n", actual_length); + + ok(memcmp(buffer, test_data, 6) == 0, "Sample data didn't match.\n"); + + IMediaSample_AddRef(media_sample1); + ref = IMediaSample_Release(media_sample1); + ok(ref == 2, "Got outstanding refcount %d.\n", ref); + + media_sample2 = audiostream_allocate_sample(&source, test_data, 8); + hr = IMemInputPin_Receive(mem_input_pin, media_sample2); + ok(hr == S_OK, "Got hr %#x.\n", hr); + IMediaSample_AddRef(media_sample2); + ref = IMediaSample_Release(media_sample2); + ok(ref == 2, "Got outstanding refcount %d.\n", ref); + + hr = IAudioStreamSample_Update(stream_sample, 0, NULL, NULL, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioData_GetInfo(audio_data, NULL, NULL, &actual_length); + ok(hr == S_OK, "Got hr %#x.\n", hr); + ok(actual_length == 6, "Got actual length %u.\n", actual_length); + + ok(memcmp(buffer, &test_data[6], 2) == 0, "Sample data didn't match.\n"); + ok(memcmp(&buffer[2], test_data, 4) == 0, "Sample data didn't match.\n"); + + ref = IMediaSample_Release(media_sample1); + ok(!ref, "Got outstanding refcount %d.\n", ref); + + hr = IPin_EndOfStream(pin); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_Update(stream_sample, 0, NULL, NULL, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioData_GetInfo(audio_data, NULL, NULL, &actual_length); + ok(hr == S_OK, "Got hr %#x.\n", hr); + ok(actual_length == 4, "Got actual length %u.\n", actual_length); + + ok(memcmp(buffer, &test_data[4], 4) == 0, "Sample data didn't match.\n"); + + ref = IMediaSample_Release(media_sample2); + ok(!ref, "Got outstanding refcount %d.\n", ref); + + hr = IAudioStreamSample_Update(stream_sample, 0, NULL, NULL, 0); + ok(hr == MS_S_ENDOFSTREAM, "Got hr %#x.\n", hr); + + media_sample1 = audiostream_allocate_sample(&source, test_data, 8); + hr = IMemInputPin_Receive(mem_input_pin, media_sample1); + ok(hr == S_OK, "Got hr %#x.\n", hr); + ref = IMediaSample_Release(media_sample1); + ok(ref == 1, "Got outstanding refcount %d.\n", ref); + + hr = IAudioStreamSample_Update(stream_sample, 0, NULL, NULL, 0); + ok(hr == MS_S_ENDOFSTREAM, "Got hr %#x.\n", hr); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + media_sample1 = audiostream_allocate_sample(&source, test_data, 6); + + audiostream_mem_input_pin = mem_input_pin; + audiostream_media_sample = media_sample1; + thread = CreateThread(NULL, 0, audiostream_receive, NULL, 0, NULL); + + hr = IAudioStreamSample_Update(stream_sample, 0, NULL, NULL, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioData_GetInfo(audio_data, NULL, NULL, &actual_length); + ok(hr == S_OK, "Got hr %#x.\n", hr); + ok(actual_length == 6, "Got actual length %u.\n", actual_length); + + ok(memcmp(buffer, test_data, 6) == 0, "Sample data didn't match.\n"); + + WaitForSingleObject(thread, INFINITE); + CloseHandle(thread); + + ref = IMediaSample_Release(media_sample1); + ok(!ref, "Got outstanding refcount %d.\n", ref); + + audiostream_pin = pin; + thread = CreateThread(NULL, 0, audiostream_end_of_stream, NULL, 0, NULL); + + hr = IAudioStreamSample_Update(stream_sample, 0, NULL, NULL, 0); + ok(hr == MS_S_ENDOFSTREAM, "Got hr %#x.\n", hr); + + WaitForSingleObject(thread, INFINITE); + CloseHandle(thread); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_Update(stream_sample, SSUPDATE_ASYNC, NULL, NULL, 0); + ok(hr == MS_S_PENDING, "Got hr %#x.\n", hr); + + IAudioStreamSample_AddRef(stream_sample); + ref = IAudioStreamSample_Release(stream_sample); + ok(ref == 1, "Got outstanding refcount %d.\n", ref); + + hr = IAudioStreamSample_Update(stream_sample, SSUPDATE_ASYNC, NULL, NULL, 0); + ok(hr == MS_E_BUSY, "Got hr %#x.\n", hr); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP); + ok(hr == S_OK, "Got hr %#x.\n", hr); + IGraphBuilder_Disconnect(graph, pin); + IGraphBuilder_Disconnect(graph, &source.source.pin.IPin_iface); + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_Update(stream_sample, 0, NULL, NULL, 0); + ok(hr == MS_S_ENDOFSTREAM, "Got hr %#x.\n", hr); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP); + + CloseHandle(event); + ref = IAudioStreamSample_Release(stream_sample); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IAudioData_Release(audio_data); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IAMMultiMediaStream_Release(mmstream); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IGraphBuilder_Release(graph); + ok(!ref, "Got outstanding refcount %d.\n", ref); + IPin_Release(pin); + IMemInputPin_Release(mem_input_pin); + IAudioMediaStream_Release(audio_stream); + ref = IMediaStream_Release(stream); + ok(!ref, "Got outstanding refcount %d.\n", ref); +} + void test_mediastreamfilter_get_state(void) { IAMMultiMediaStream *mmstream = create_ammultimediastream(); @@ -3213,6 +3517,8 @@ START_TEST(amstream) test_audiostream_end_of_stream(); test_audiostream_receive();
+ test_audiostreamsample_update(); + test_mediastreamfilter_get_state(); test_mediastreamfilter_stop_pause_run();
Hello Anton, nice work on the patch! It mostly looks good to me; I have some comments and questions inlined.
On 4/18/20 9:34 AM, Anton Baskanov wrote:
+static void process_update(IAudioStreamSampleImpl *sample, struct queued_event *event) +{
- DWORD advance;
- if (event->type == QET_END_OF_STREAM)
- {
sample->update_hr = sample->position ? S_OK : MS_S_ENDOFSTREAM;
return;
- }
- advance = min(event->length - event->position, sample->length - sample->position);
- CopyMemory(&sample->pointer[sample->position], &event->pointer[event->position], advance);
memcpy()?
- event->position += advance;
- sample->position += advance;
- sample->update_hr = (sample->position == sample->length) ? S_OK : MS_S_PENDING;
+}
+static void process_updates(struct audio_stream *stream) +{
- while (!list_empty(&stream->update_queue) && !list_empty(&stream->event_queue))
- {
IAudioStreamSampleImpl *sample = LIST_ENTRY(list_head(&stream->update_queue), IAudioStreamSampleImpl, entry);
struct queued_event *event = LIST_ENTRY(list_head(&stream->event_queue), struct queued_event, entry);
process_update(sample, event);
if (MS_S_PENDING != sample->update_hr)
remove_queued_update(sample);
if ((event->type != QET_END_OF_STREAM) && (event->position == event->length))
remove_queued_event(event);
It kind of feels weird to me to queue EOS events the same way as samples, given they apply to all streams at once, can't be interleaved, can't be sent multiple times, and don't get removed from the queue until the stream is stopped (or flushed, presumably?).
Maybe it would be clearer not to queue EOS events at all, but instead to loop through all queued samples in audio_sink_EndOfStream(), completing them with MS_E_ENDOFSTREAM, and similarly to check for stream->eos in process_updates(). I imagine you could get rid of the queued_event_type enumeration then, unless there's another kind of event that I'm not anticipating?
- }
+}
static inline struct audio_stream *impl_from_IAMMediaStream(IAMMediaStream *iface) { return CONTAINING_RECORD(iface, struct audio_stream, IAMMediaStream_iface); @@ -449,6 +503,85 @@ static inline struct audio_stream *impl_from_IAudioMediaStream(IAudioMediaStream return CONTAINING_RECORD(iface, struct audio_stream, IAudioMediaStream_iface); }
+static HRESULT WINAPI IAudioStreamSampleImpl_Update(IAudioStreamSample *iface, DWORD flags, HANDLE event,
PAPCFUNC func_APC, DWORD APC_data)
+{
- IAudioStreamSampleImpl *sample = impl_from_IAudioStreamSample(iface);
- struct audio_stream *stream;
- DWORD length;
- BYTE *pointer;
- HRESULT hr;
- TRACE("(%p)->(%x,%p,%p,%u)\n", iface, flags, event, func_APC, APC_data);
- stream = impl_from_IAudioMediaStream(sample->parent);
Perhaps easier would be to take patch 2/6 a step further and store sample->parent as a pointer to struct audio_stream. Then you wouldn't need to move this function out of place.
- hr = IAudioData_GetInfo(sample->audio_data, &length, &pointer, NULL);
- if (FAILED(hr))
return hr;
- if (event && func_APC)
return E_INVALIDARG;
- if (func_APC)
- {
FIXME("APC support is not implemented!\n");
return E_NOTIMPL;
- }
- if (event)
- {
FIXME("Event parameter support is not implemented!\n");
return E_NOTIMPL;
- }
- if (flags & ~SSUPDATE_ASYNC)
- {
FIXME("Unsupported flags: %x\n", flags);
return E_NOTIMPL;
- }
- EnterCriticalSection(&stream->cs);
- if (stream->state == State_Stopped)
- {
LeaveCriticalSection(&stream->cs);
return MS_E_NOTRUNNING;
- }
Do you have tests for how Update() behaves while paused? If it's as simple as "identical to stopped or running", it'd be nice to add a quick test below.
- if (!stream->peer)
- {
LeaveCriticalSection(&stream->cs);
return MS_S_ENDOFSTREAM;
- }
- if (MS_S_PENDING == sample->update_hr)
- {
LeaveCriticalSection(&stream->cs);
return MS_E_BUSY;
- }
- sample->length = length;
- sample->pointer = pointer;
- sample->position = 0;
- sample->update_hr = MS_S_PENDING;
- ResetEvent(sample->update_event);
- list_add_tail(&stream->update_queue, &sample->entry);
- process_updates(stream);
- hr = sample->update_hr;
- if (hr != MS_S_PENDING || (flags & SSUPDATE_ASYNC))
- {
LeaveCriticalSection(&stream->cs);
return hr;
- }
- LeaveCriticalSection(&stream->cs);
- WaitForSingleObject(sample->update_event, INFINITE);
- return sample->update_hr;
+}
Hello Zeb, thanks for the comments, I've replied inline.
On 4/20/20 5:07 AM, you wrote:
Hello Anton, nice work on the patch! It mostly looks good to me; I have some comments and questions inlined.
On 4/18/20 9:34 AM, Anton Baskanov wrote:
+static void process_update(IAudioStreamSampleImpl *sample, struct queued_event *event) +{
- DWORD advance;
- if (event->type == QET_END_OF_STREAM)
- {
sample->update_hr = sample->position ? S_OK : MS_S_ENDOFSTREAM;
return;
- }
- advance = min(event->length - event->position, sample->length -
sample->position); + CopyMemory(&sample->pointer[sample->position], &event->pointer[event->position], advance);
memcpy()?
Done.
- event->position += advance;
- sample->position += advance;
- sample->update_hr = (sample->position == sample->length) ? S_OK :
MS_S_PENDING; +}
+static void process_updates(struct audio_stream *stream) +{
- while (!list_empty(&stream->update_queue) &&
!list_empty(&stream->event_queue)) + {
IAudioStreamSampleImpl *sample =
LIST_ENTRY(list_head(&stream->update_queue), IAudioStreamSampleImpl, entry); + struct queued_event *event = LIST_ENTRY(list_head(&stream->event_queue), struct queued_event, entry);
process_update(sample, event);
if (MS_S_PENDING != sample->update_hr)
remove_queued_update(sample);
if ((event->type != QET_END_OF_STREAM) && (event->position ==
event->length)) + remove_queued_event(event);
It kind of feels weird to me to queue EOS events the same way as samples, given they apply to all streams at once, can't be interleaved, can't be sent multiple times, and don't get removed from the queue until the stream is stopped (or flushed, presumably?).
Maybe it would be clearer not to queue EOS events at all, but instead to loop through all queued samples in audio_sink_EndOfStream(), completing them with MS_E_ENDOFSTREAM, and similarly to check for stream->eos in process_updates(). I imagine you could get rid of the queued_event_type enumeration then, unless there's another kind of event that I'm not anticipating?
Queuing EOS events is actually the correct way of doing things, otherwise the final samples would get lost. Consider the following scenario:
Receive() EndOfStream() Update() -> S_OK Update() -> MS_S_ENDOFSTREAM
It is expected that Update retrieves all available data before returning MS_S_ENDOFSTREAM. The tests confirm this behavior.
- }
+}
static inline struct audio_stream *impl_from_IAMMediaStream(IAMMediaStream *iface) {
return CONTAINING_RECORD(iface, struct audio_stream, IAMMediaStream_iface);
@@ -449,6 +503,85 @@ static inline struct audio_stream *impl_from_IAudioMediaStream(IAudioMediaStream> return CONTAINING_RECORD(iface, struct audio_stream, IAudioMediaStream_iface);> }
+static HRESULT WINAPI IAudioStreamSampleImpl_Update(IAudioStreamSample *iface, DWORD flags, HANDLE event, + PAPCFUNC func_APC, DWORD APC_data) +{
- IAudioStreamSampleImpl *sample = impl_from_IAudioStreamSample(iface);
- struct audio_stream *stream;
- DWORD length;
- BYTE *pointer;
- HRESULT hr;
- TRACE("(%p)->(%x,%p,%p,%u)\n", iface, flags, event, func_APC,
APC_data); +
- stream = impl_from_IAudioMediaStream(sample->parent);
Perhaps easier would be to take patch 2/6 a step further and store sample->parent as a pointer to struct audio_stream. Then you wouldn't need to move this function out of place.
Done.
- hr = IAudioData_GetInfo(sample->audio_data, &length, &pointer, NULL);
- if (FAILED(hr))
return hr;
- if (event && func_APC)
return E_INVALIDARG;
- if (func_APC)
- {
FIXME("APC support is not implemented!\n");
return E_NOTIMPL;
- }
- if (event)
- {
FIXME("Event parameter support is not implemented!\n");
return E_NOTIMPL;
- }
- if (flags & ~SSUPDATE_ASYNC)
- {
FIXME("Unsupported flags: %x\n", flags);
return E_NOTIMPL;
- }
- EnterCriticalSection(&stream->cs);
- if (stream->state == State_Stopped)
- {
LeaveCriticalSection(&stream->cs);
return MS_E_NOTRUNNING;
- }
Do you have tests for how Update() behaves while paused? If it's as simple as "identical to stopped or running", it'd be nice to add a quick test below.
- if (!stream->peer)
- {
LeaveCriticalSection(&stream->cs);
return MS_S_ENDOFSTREAM;
- }
- if (MS_S_PENDING == sample->update_hr)
- {
LeaveCriticalSection(&stream->cs);
return MS_E_BUSY;
- }
- sample->length = length;
- sample->pointer = pointer;
- sample->position = 0;
- sample->update_hr = MS_S_PENDING;
- ResetEvent(sample->update_event);
- list_add_tail(&stream->update_queue, &sample->entry);
- process_updates(stream);
- hr = sample->update_hr;
- if (hr != MS_S_PENDING || (flags & SSUPDATE_ASYNC))
- {
LeaveCriticalSection(&stream->cs);
return hr;
- }
- LeaveCriticalSection(&stream->cs);
- WaitForSingleObject(sample->update_event, INFINITE);
- return sample->update_hr;
+}
On 4/21/20 2:35 PM, Anton Baskanov wrote:
Hello Zeb, thanks for the comments, I've replied inline.
On 4/20/20 5:07 AM, you wrote:
Hello Anton, nice work on the patch! It mostly looks good to me; I have some comments and questions inlined.
On 4/18/20 9:34 AM, Anton Baskanov wrote:
+static void process_update(IAudioStreamSampleImpl *sample, struct queued_event *event) +{
- DWORD advance;
- if (event->type == QET_END_OF_STREAM)
- {
sample->update_hr = sample->position ? S_OK : MS_S_ENDOFSTREAM;
return;
- }
- advance = min(event->length - event->position, sample->length -
sample->position); + CopyMemory(&sample->pointer[sample->position], &event->pointer[event->position], advance);
memcpy()?
Done.
- event->position += advance;
- sample->position += advance;
- sample->update_hr = (sample->position == sample->length) ? S_OK :
MS_S_PENDING; +}
+static void process_updates(struct audio_stream *stream) +{
- while (!list_empty(&stream->update_queue) &&
!list_empty(&stream->event_queue)) + {
IAudioStreamSampleImpl *sample =
LIST_ENTRY(list_head(&stream->update_queue), IAudioStreamSampleImpl, entry); + struct queued_event *event = LIST_ENTRY(list_head(&stream->event_queue), struct queued_event, entry);
process_update(sample, event);
if (MS_S_PENDING != sample->update_hr)
remove_queued_update(sample);
if ((event->type != QET_END_OF_STREAM) && (event->position ==
event->length)) + remove_queued_event(event);
It kind of feels weird to me to queue EOS events the same way as samples, given they apply to all streams at once, can't be interleaved, can't be sent multiple times, and don't get removed from the queue until the stream is stopped (or flushed, presumably?).
Maybe it would be clearer not to queue EOS events at all, but instead to loop through all queued samples in audio_sink_EndOfStream(), completing them with MS_E_ENDOFSTREAM, and similarly to check for stream->eos in process_updates(). I imagine you could get rid of the queued_event_type enumeration then, unless there's another kind of event that I'm not anticipating?
Queuing EOS events is actually the correct way of doing things, otherwise the final samples would get lost. Consider the following scenario:
Receive() EndOfStream() Update() -> S_OK Update() -> MS_S_ENDOFSTREAM
It is expected that Update retrieves all available data before returning MS_S_ENDOFSTREAM. The tests confirm this behavior.
Sure, I'd expect that. I guess as far as I see, it would still be simpler this way, you'd just need to make sure you consume all queued samples *before* checking stream->eos in process_updates().
- }
+}
static inline struct audio_stream *impl_from_IAMMediaStream(IAMMediaStream *iface) {
return CONTAINING_RECORD(iface, struct audio_stream, IAMMediaStream_iface);
@@ -449,6 +503,85 @@ static inline struct audio_stream *impl_from_IAudioMediaStream(IAudioMediaStream> return CONTAINING_RECORD(iface, struct audio_stream, IAudioMediaStream_iface);> }
+static HRESULT WINAPI IAudioStreamSampleImpl_Update(IAudioStreamSample *iface, DWORD flags, HANDLE event, + PAPCFUNC func_APC, DWORD APC_data) +{
- IAudioStreamSampleImpl *sample = impl_from_IAudioStreamSample(iface);
- struct audio_stream *stream;
- DWORD length;
- BYTE *pointer;
- HRESULT hr;
- TRACE("(%p)->(%x,%p,%p,%u)\n", iface, flags, event, func_APC,
APC_data); +
- stream = impl_from_IAudioMediaStream(sample->parent);
Perhaps easier would be to take patch 2/6 a step further and store sample->parent as a pointer to struct audio_stream. Then you wouldn't need to move this function out of place.
Done.
- hr = IAudioData_GetInfo(sample->audio_data, &length, &pointer, NULL);
- if (FAILED(hr))
return hr;
- if (event && func_APC)
return E_INVALIDARG;
- if (func_APC)
- {
FIXME("APC support is not implemented!\n");
return E_NOTIMPL;
- }
- if (event)
- {
FIXME("Event parameter support is not implemented!\n");
return E_NOTIMPL;
- }
- if (flags & ~SSUPDATE_ASYNC)
- {
FIXME("Unsupported flags: %x\n", flags);
return E_NOTIMPL;
- }
- EnterCriticalSection(&stream->cs);
- if (stream->state == State_Stopped)
- {
LeaveCriticalSection(&stream->cs);
return MS_E_NOTRUNNING;
- }
Do you have tests for how Update() behaves while paused? If it's as simple as "identical to stopped or running", it'd be nice to add a quick test below.
- if (!stream->peer)
- {
LeaveCriticalSection(&stream->cs);
return MS_S_ENDOFSTREAM;
- }
- if (MS_S_PENDING == sample->update_hr)
- {
LeaveCriticalSection(&stream->cs);
return MS_E_BUSY;
- }
- sample->length = length;
- sample->pointer = pointer;
- sample->position = 0;
- sample->update_hr = MS_S_PENDING;
- ResetEvent(sample->update_event);
- list_add_tail(&stream->update_queue, &sample->entry);
- process_updates(stream);
- hr = sample->update_hr;
- if (hr != MS_S_PENDING || (flags & SSUPDATE_ASYNC))
- {
LeaveCriticalSection(&stream->cs);
return hr;
- }
- LeaveCriticalSection(&stream->cs);
- WaitForSingleObject(sample->update_event, INFINITE);
- return sample->update_hr;
+}
Thanks for the suggestion. I've reworked the event queue, now it only stores the received samples.
On 22/4/20 4:36, you wrote:
On 4/21/20 2:35 PM, Anton Baskanov wrote:
Hello Zeb, thanks for the comments, I've replied inline.
On 4/20/20 5:07 AM, you wrote:
Hello Anton, nice work on the patch! It mostly looks good to me; I have some comments and questions inlined.
On 4/18/20 9:34 AM, Anton Baskanov wrote:
+static void process_update(IAudioStreamSampleImpl *sample, struct queued_event *event) +{
- DWORD advance;
- if (event->type == QET_END_OF_STREAM)
- {
sample->update_hr = sample->position ? S_OK : MS_S_ENDOFSTREAM;
return;
- }
- advance = min(event->length - event->position, sample->length -
sample->position); + CopyMemory(&sample->pointer[sample->position], &event->pointer[event->position], advance);
memcpy()?
Done.
- event->position += advance;
- sample->position += advance;
- sample->update_hr = (sample->position == sample->length) ? S_OK :
MS_S_PENDING; +}
+static void process_updates(struct audio_stream *stream) +{
- while (!list_empty(&stream->update_queue) &&
!list_empty(&stream->event_queue)) + {
IAudioStreamSampleImpl *sample =
LIST_ENTRY(list_head(&stream->update_queue), IAudioStreamSampleImpl, entry); + struct queued_event *event = LIST_ENTRY(list_head(&stream->event_queue), struct queued_event, entry);
process_update(sample, event);
if (MS_S_PENDING != sample->update_hr)
remove_queued_update(sample);
if ((event->type != QET_END_OF_STREAM) && (event->position ==
event->length)) + remove_queued_event(event);
It kind of feels weird to me to queue EOS events the same way as samples, given they apply to all streams at once, can't be interleaved, can't be sent multiple times, and don't get removed from the queue until the stream is stopped (or flushed, presumably?).
Maybe it would be clearer not to queue EOS events at all, but instead to loop through all queued samples in audio_sink_EndOfStream(), completing them with MS_E_ENDOFSTREAM, and similarly to check for stream->eos in process_updates(). I imagine you could get rid of the queued_event_type enumeration then, unless there's another kind of event that I'm not anticipating?
Queuing EOS events is actually the correct way of doing things, otherwise the final samples would get lost. Consider the following scenario:
Receive() EndOfStream() Update() -> S_OK Update() -> MS_S_ENDOFSTREAM
It is expected that Update retrieves all available data before returning MS_S_ENDOFSTREAM. The tests confirm this behavior.
Sure, I'd expect that. I guess as far as I see, it would still be simpler this way, you'd just need to make sure you consume all queued samples *before* checking stream->eos in process_updates().
- }
+}
static inline struct audio_stream *impl_from_IAMMediaStream(IAMMediaStream *iface) {
return CONTAINING_RECORD(iface, struct audio_stream, IAMMediaStream_iface);
@@ -449,6 +503,85 @@ static inline struct audio_stream *impl_from_IAudioMediaStream(IAudioMediaStream>
return CONTAINING_RECORD(iface, struct audio_stream, IAudioMediaStream_iface);>
}
+static HRESULT WINAPI IAudioStreamSampleImpl_Update(IAudioStreamSample *iface, DWORD flags, HANDLE event, + PAPCFUNC func_APC, DWORD APC_data) +{
- IAudioStreamSampleImpl *sample =
impl_from_IAudioStreamSample(iface);
- struct audio_stream *stream;
- DWORD length;
- BYTE *pointer;
- HRESULT hr;
- TRACE("(%p)->(%x,%p,%p,%u)\n", iface, flags, event, func_APC,
APC_data); +
- stream = impl_from_IAudioMediaStream(sample->parent);
Perhaps easier would be to take patch 2/6 a step further and store sample->parent as a pointer to struct audio_stream. Then you wouldn't need to move this function out of place.
Done.
- hr = IAudioData_GetInfo(sample->audio_data, &length, &pointer,
NULL);
- if (FAILED(hr))
return hr;
- if (event && func_APC)
return E_INVALIDARG;
- if (func_APC)
- {
FIXME("APC support is not implemented!\n");
return E_NOTIMPL;
- }
- if (event)
- {
FIXME("Event parameter support is not implemented!\n");
return E_NOTIMPL;
- }
- if (flags & ~SSUPDATE_ASYNC)
- {
FIXME("Unsupported flags: %x\n", flags);
return E_NOTIMPL;
- }
- EnterCriticalSection(&stream->cs);
- if (stream->state == State_Stopped)
- {
LeaveCriticalSection(&stream->cs);
return MS_E_NOTRUNNING;
- }
Do you have tests for how Update() behaves while paused? If it's as simple as "identical to stopped or running", it'd be nice to add a quick test below.
- if (!stream->peer)
- {
LeaveCriticalSection(&stream->cs);
return MS_S_ENDOFSTREAM;
- }
- if (MS_S_PENDING == sample->update_hr)
- {
LeaveCriticalSection(&stream->cs);
return MS_E_BUSY;
- }
- sample->length = length;
- sample->pointer = pointer;
- sample->position = 0;
- sample->update_hr = MS_S_PENDING;
- ResetEvent(sample->update_event);
- list_add_tail(&stream->update_queue, &sample->entry);
- process_updates(stream);
- hr = sample->update_hr;
- if (hr != MS_S_PENDING || (flags & SSUPDATE_ASYNC))
- {
LeaveCriticalSection(&stream->cs);
return hr;
- }
- LeaveCriticalSection(&stream->cs);
- WaitForSingleObject(sample->update_event, INFINITE);
- return sample->update_hr;
+}
Signed-off-by: Anton Baskanov baskanov@gmail.com --- dlls/amstream/audiostream.c | 33 +++++-- dlls/amstream/tests/amstream.c | 170 +++++++++++++++++++++++++++++++++ 2 files changed, 196 insertions(+), 7 deletions(-)
diff --git a/dlls/amstream/audiostream.c b/dlls/amstream/audiostream.c index 5176a6cb36..2bd3b00b90 100644 --- a/dlls/amstream/audiostream.c +++ b/dlls/amstream/audiostream.c @@ -119,13 +119,7 @@ static HRESULT WINAPI IAudioStreamSampleImpl_SetSampleTimes(IAudioStreamSample *
static HRESULT WINAPI IAudioStreamSampleImpl_Update(IAudioStreamSample *iface, DWORD flags, HANDLE event, PAPCFUNC func_APC, DWORD APC_data); - -static HRESULT WINAPI IAudioStreamSampleImpl_CompletionStatus(IAudioStreamSample *iface, DWORD flags, DWORD milliseconds) -{ - FIXME("(%p)->(%x,%u): stub\n", iface, flags, milliseconds); - - return E_NOTIMPL; -} +static HRESULT WINAPI IAudioStreamSampleImpl_CompletionStatus(IAudioStreamSample *iface, DWORD flags, DWORD milliseconds);
/*** IAudioStreamSample methods ***/ static HRESULT WINAPI IAudioStreamSampleImpl_GetAudioData(IAudioStreamSample *iface, IAudioData **audio_data) @@ -582,6 +576,31 @@ static HRESULT WINAPI IAudioStreamSampleImpl_Update(IAudioStreamSample *iface, D return sample->update_hr; }
+static HRESULT WINAPI IAudioStreamSampleImpl_CompletionStatus(IAudioStreamSample *iface, DWORD flags, DWORD milliseconds) +{ + IAudioStreamSampleImpl *sample = impl_from_IAudioStreamSample(iface); + struct audio_stream *stream; + HRESULT hr; + + TRACE("(%p)->(%x,%u)\n", iface, flags, milliseconds); + + stream = impl_from_IAudioMediaStream(sample->parent); + + if (flags) + { + FIXME("Unsupported flags: %x\n", flags); + return E_NOTIMPL; + } + + EnterCriticalSection(&stream->cs); + + hr = sample->update_hr; + + LeaveCriticalSection(&stream->cs); + + return hr; +} + /*** IUnknown methods ***/ static HRESULT WINAPI audio_IAudioMediaStream_QueryInterface(IAudioMediaStream *iface, REFIID riid, void **ret_iface) diff --git a/dlls/amstream/tests/amstream.c b/dlls/amstream/tests/amstream.c index d5040747b8..fe56b22943 100644 --- a/dlls/amstream/tests/amstream.c +++ b/dlls/amstream/tests/amstream.c @@ -3342,6 +3342,175 @@ static void test_audiostreamsample_update(void) ok(!ref, "Got outstanding refcount %d.\n", ref); }
+void test_audiostreamsample_completion_status(void) +{ + static const WAVEFORMATEX format = + { + .wFormatTag = WAVE_FORMAT_PCM, + .nChannels = 1, + .nSamplesPerSec = 11025, + .wBitsPerSample = 16, + .nBlockAlign = 2, + .nAvgBytesPerSec = 2 * 11025, + }; + + const AM_MEDIA_TYPE mt = + { + .majortype = MEDIATYPE_Audio, + .subtype = MEDIASUBTYPE_PCM, + .formattype = FORMAT_WaveFormatEx, + .cbFormat = sizeof(WAVEFORMATEX), + .pbFormat = (BYTE *)&format, + }; + + static const BYTE test_data[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 }; + IAMMultiMediaStream *mmstream = create_ammultimediastream(); + IAudioStreamSample *stream_sample1; + IAudioStreamSample *stream_sample2; + IAudioMediaStream *audio_stream; + IMediaSample *media_sample; + struct testfilter source; + IAudioData *audio_data1; + IAudioData *audio_data2; + IGraphBuilder *graph; + IMediaStream *stream; + HANDLE event; + HRESULT hr; + ULONG ref; + IPin *pin; + + event = CreateEventW(NULL, FALSE, FALSE, NULL); + ok(event != NULL, "Expected non-NULL event."); + + hr = IAMMultiMediaStream_Initialize(mmstream, STREAMTYPE_READ, 0, NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_AddMediaStream(mmstream, NULL, &MSPID_PrimaryAudio, 0, &stream); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMediaStream_QueryInterface(stream, &IID_IAudioMediaStream, (void **)&audio_stream); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IMediaStream_QueryInterface(stream, &IID_IPin, (void **)&pin); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_GetFilterGraph(mmstream, &graph); + ok(hr == S_OK, "Got hr %#x.\n", hr); + ok(graph != NULL, "Expected non-null graph\n"); + testfilter_init(&source); + hr = IGraphBuilder_AddFilter(graph, &source.filter.IBaseFilter_iface, NULL); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = CoCreateInstance(&CLSID_AMAudioData, NULL, CLSCTX_INPROC_SERVER, &IID_IAudioData, (void **)&audio_data1); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = CoCreateInstance(&CLSID_AMAudioData, NULL, CLSCTX_INPROC_SERVER, &IID_IAudioData, (void **)&audio_data2); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAudioData_SetBuffer(audio_data1, 6, NULL, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAudioData_SetBuffer(audio_data2, 6, NULL, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAudioMediaStream_CreateSample(audio_stream, audio_data1, 0, &stream_sample1); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAudioMediaStream_CreateSample(audio_stream, audio_data2, 0, &stream_sample2); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IGraphBuilder_ConnectDirect(graph, &source.source.pin.IPin_iface, pin, &mt); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_CompletionStatus(stream_sample1, 0, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_Update(stream_sample1, SSUPDATE_ASYNC, NULL, NULL, 0); + ok(hr == MS_S_PENDING, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_CompletionStatus(stream_sample1, 0, 0); + ok(hr == MS_S_PENDING, "Got hr %#x.\n", hr); + + media_sample = audiostream_allocate_sample(&source, test_data, 6); + hr = IMemInputPin_Receive(source.source.pMemInputPin, media_sample); + ok(hr == S_OK, "Got hr %#x.\n", hr); + ref = IMediaSample_Release(media_sample); + ok(!ref, "Got outstanding refcount %d.\n", ref); + + hr = IAudioStreamSample_CompletionStatus(stream_sample1, 0, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_Update(stream_sample1, SSUPDATE_ASYNC, NULL, NULL, 0); + ok(hr == MS_S_PENDING, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_Update(stream_sample2, SSUPDATE_ASYNC, NULL, NULL, 0); + ok(hr == MS_S_PENDING, "Got hr %#x.\n", hr); + + media_sample = audiostream_allocate_sample(&source, test_data, 12); + hr = IMemInputPin_Receive(source.source.pMemInputPin, media_sample); + ok(hr == S_OK, "Got hr %#x.\n", hr); + ref = IMediaSample_Release(media_sample); + ok(!ref, "Got outstanding refcount %d.\n", ref); + + hr = IAudioStreamSample_CompletionStatus(stream_sample1, 0, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_CompletionStatus(stream_sample2, 0, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_Update(stream_sample1, SSUPDATE_ASYNC, NULL, NULL, 0); + ok(hr == MS_S_PENDING, "Got hr %#x.\n", hr); + + hr = IPin_EndOfStream(pin); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_CompletionStatus(stream_sample1, 0, 0); + ok(hr == MS_S_ENDOFSTREAM, "Got hr %#x.\n", hr); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP); + ok(hr == S_OK, "Got hr %#x.\n", hr); + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_Update(stream_sample1, SSUPDATE_ASYNC, NULL, NULL, 0); + ok(hr == MS_S_PENDING, "Got hr %#x.\n", hr); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAudioStreamSample_CompletionStatus(stream_sample1, 0, 0); + ok(hr == MS_S_PENDING, "Got hr %#x.\n", hr); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_RUN); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + media_sample = audiostream_allocate_sample(&source, test_data, 6); + hr = IMemInputPin_Receive(source.source.pMemInputPin, media_sample); + ok(hr == S_OK, "Got hr %#x.\n", hr); + ref = IMediaSample_Release(media_sample); + ok(!ref, "Got outstanding refcount %d.\n", ref); + + hr = IAudioStreamSample_CompletionStatus(stream_sample1, 0, 0); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + hr = IAMMultiMediaStream_SetState(mmstream, STREAMSTATE_STOP); + ok(hr == S_OK, "Got hr %#x.\n", hr); + + IGraphBuilder_Disconnect(graph, pin); + IGraphBuilder_Disconnect(graph, &source.source.pin.IPin_iface); + + ref = IAudioStreamSample_Release(stream_sample1); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IAudioStreamSample_Release(stream_sample2); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IAudioData_Release(audio_data1); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IAudioData_Release(audio_data2); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IAMMultiMediaStream_Release(mmstream); + ok(!ref, "Got outstanding refcount %d.\n", ref); + ref = IGraphBuilder_Release(graph); + ok(!ref, "Got outstanding refcount %d.\n", ref); + IPin_Release(pin); + IAudioMediaStream_Release(audio_stream); + ref = IMediaStream_Release(stream); + ok(!ref, "Got outstanding refcount %d.\n", ref); + + CloseHandle(event); +} + void test_mediastreamfilter_get_state(void) { IAMMultiMediaStream *mmstream = create_ammultimediastream(); @@ -3518,6 +3687,7 @@ START_TEST(amstream) test_audiostream_receive();
test_audiostreamsample_update(); + test_audiostreamsample_completion_status();
test_mediastreamfilter_get_state(); test_mediastreamfilter_stop_pause_run();