Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/mpc-hc/sanear.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlex Marsev <alex.marsev@gmail.com>2016-01-24 08:59:28 +0300
committerAlex Marsev <alex.marsev@gmail.com>2016-01-27 09:11:52 +0300
commit4976a8c89b60f22c5dd4dce4a9eb8201606095f6 (patch)
tree23d7571848149ad700bfff724c5a9c62fa3b75ef
parent817ef3517ede551872dd4748f05c7c3958c933f4 (diff)
Add event mode audio device suppport
And enable it for exclusive and realtime paths.
-rw-r--r--sanear.vcxproj2
-rw-r--r--sanear.vcxproj.filters6
-rw-r--r--src/AudioDevice.h1
-rw-r--r--src/AudioDeviceEvent.cpp317
-rw-r--r--src/AudioDeviceEvent.h57
-rw-r--r--src/AudioDeviceManager.cpp59
-rw-r--r--src/AudioDevicePush.cpp5
-rw-r--r--src/AudioRenderer.cpp4
8 files changed, 442 insertions, 9 deletions
diff --git a/sanear.vcxproj b/sanear.vcxproj
index c4b6d1b..78f69f1 100644
--- a/sanear.vcxproj
+++ b/sanear.vcxproj
@@ -99,6 +99,7 @@
</ItemDefinitionGroup>
<ItemGroup>
<ClInclude Include="IGuidedReclock.h" />
+ <ClInclude Include="src\AudioDeviceEvent.h" />
<ClInclude Include="src\AudioDevicePush.h" />
<ClInclude Include="src\AudioDevice.h" />
<ClInclude Include="src\AudioDeviceManager.h" />
@@ -128,6 +129,7 @@
<ClInclude Include="src\DspRate.h" />
</ItemGroup>
<ItemGroup>
+ <ClCompile Include="src\AudioDeviceEvent.cpp" />
<ClCompile Include="src\AudioDevicePush.cpp" />
<ClCompile Include="src\AudioDeviceManager.cpp" />
<ClCompile Include="src\DspBalance.cpp" />
diff --git a/sanear.vcxproj.filters b/sanear.vcxproj.filters
index 2c4a1de..47e99ea 100644
--- a/sanear.vcxproj.filters
+++ b/sanear.vcxproj.filters
@@ -65,6 +65,9 @@
<ClCompile Include="src\AudioDevicePush.cpp">
<Filter>Device</Filter>
</ClCompile>
+ <ClCompile Include="src\AudioDeviceEvent.cpp">
+ <Filter>Device</Filter>
+ </ClCompile>
</ItemGroup>
<ItemGroup>
<ClInclude Include="src\DspMatrix.h">
@@ -145,6 +148,9 @@
<ClInclude Include="src\AudioDevice.h">
<Filter>Device</Filter>
</ClInclude>
+ <ClInclude Include="src\AudioDeviceEvent.h">
+ <Filter>Device</Filter>
+ </ClInclude>
</ItemGroup>
<ItemGroup>
<Filter Include="DirectShow">
diff --git a/src/AudioDevice.h b/src/AudioDevice.h
index f0a565b..80b0956 100644
--- a/src/AudioDevice.h
+++ b/src/AudioDevice.h
@@ -24,6 +24,7 @@ namespace SaneAudioRenderer
REFERENCE_TIME latency;
bool exclusive;
bool bitstream;
+ bool eventMode;
bool realtime;
bool ignoredSystemChannelMixer;
diff --git a/src/AudioDeviceEvent.cpp b/src/AudioDeviceEvent.cpp
new file mode 100644
index 0000000..d5475f3
--- /dev/null
+++ b/src/AudioDeviceEvent.cpp
@@ -0,0 +1,317 @@
+#include "pch.h"
+#include "AudioDeviceEvent.h"
+
+namespace SaneAudioRenderer
+{
+ namespace
+ {
+ template <class T>
+ bool IsLastInstance(T& smartPointer)
+ {
+ bool ret = (smartPointer.GetInterfacePtr()->AddRef() == 2);
+ smartPointer.GetInterfacePtr()->Release();
+ return ret;
+ }
+ }
+
+ AudioDeviceEvent::AudioDeviceEvent(std::shared_ptr<AudioDeviceBackend> backend)
+ {
+ assert(backend);
+ assert(backend->eventMode);
+ m_backend = backend;
+
+ if (static_cast<HANDLE>(m_wake) == NULL)
+ throw E_OUTOFMEMORY;
+
+ ThrowIfFailed(backend->audioClient->SetEventHandle(m_wake));
+
+ m_thread = std::thread(std::bind(&AudioDeviceEvent::EventFeed, this));
+ }
+
+ AudioDeviceEvent::~AudioDeviceEvent()
+ {
+ m_exit = true;
+ m_wake.Set();
+
+ if (m_thread.joinable())
+ m_thread.join();
+
+ auto areLastInstances = [this]
+ {
+ if (!m_backend.unique())
+ return false;
+
+ if (m_backend->audioClock && !IsLastInstance(m_backend->audioClock))
+ return false;
+
+ m_backend->audioClock = nullptr;
+
+ if (m_backend->audioRenderClient && !IsLastInstance(m_backend->audioRenderClient))
+ return false;
+
+ m_backend->audioRenderClient = nullptr;
+
+ if (m_backend->audioClient && !IsLastInstance(m_backend->audioClient))
+ return false;
+
+ return true;
+ };
+ assert(areLastInstances());
+
+ m_backend = nullptr;
+ }
+
+ void AudioDeviceEvent::Push(DspChunk& chunk, CAMEvent* pFilledEvent)
+ {
+ assert(!m_endOfStream);
+
+ if (m_error)
+ throw E_FAIL;
+
+ PushChunkToBuffer(chunk);
+
+ if (pFilledEvent && !chunk.IsEmpty())
+ pFilledEvent->Set();
+ }
+
+ REFERENCE_TIME AudioDeviceEvent::Finish(CAMEvent* pFilledEvent)
+ {
+ if (m_error)
+ throw E_FAIL;
+
+ if (!m_endOfStream)
+ {
+ DebugOut("AudioDeviceEvent finish");
+ m_endOfStream = true;
+ m_endOfStreamPos = GetEnd();
+ }
+
+ if (pFilledEvent)
+ pFilledEvent->Set();
+
+ return m_endOfStreamPos - GetPosition();
+ }
+
+ int64_t AudioDeviceEvent::GetPosition()
+ {
+ UINT64 deviceClockFrequency, deviceClockPosition;
+ ThrowIfFailed(m_backend->audioClock->GetFrequency(&deviceClockFrequency));
+ ThrowIfFailed(m_backend->audioClock->GetPosition(&deviceClockPosition, nullptr));
+
+ return llMulDiv(deviceClockPosition, OneSecond, deviceClockFrequency, 0);
+ }
+
+ int64_t AudioDeviceEvent::GetEnd()
+ {
+ return llMulDiv(m_receivedFrames, OneSecond, m_backend->waveFormat->nSamplesPerSec, 0);
+ }
+
+ int64_t AudioDeviceEvent::GetSilence()
+ {
+ return llMulDiv(m_silenceFrames, OneSecond, m_backend->waveFormat->nSamplesPerSec, 0);
+ }
+
+ void AudioDeviceEvent::Start()
+ {
+ bool delegateStart = false;
+
+ {
+ CAutoLock threadLock(&m_threadMutex);
+
+ if (m_sentFrames == 0)
+ {
+ m_queuedStart = true;
+ delegateStart = true;
+ }
+ }
+
+ if (delegateStart)
+ {
+ DebugOut("AudioDeviceEvent queue start");
+ m_wake.Set();
+ }
+ else
+ {
+ DebugOut("AudioDeviceEvent start");
+ m_backend->audioClient->Start();
+ }
+ }
+
+ void AudioDeviceEvent::Stop()
+ {
+ DebugOut("AudioDeviceEvent stop");
+
+ {
+ CAutoLock threadLock(&m_threadMutex);
+ m_queuedStart = false;
+ }
+
+ m_backend->audioClient->Stop();
+ }
+
+ void AudioDeviceEvent::Reset()
+ {
+ DebugOut("AudioDeviceEvent reset");
+
+ {
+ CAutoLock threadLock(&m_threadMutex);
+
+ m_backend->audioClient->Reset();
+
+ m_endOfStream = false;
+ m_endOfStreamPos = 0;
+
+ m_receivedFrames = 0;
+ m_sentFrames = 0;
+ m_silenceFrames = 0;
+
+ {
+ CAutoLock bufferLock(&m_bufferMutex);
+ m_bufferFrames = 0;
+ m_buffer.clear();
+ }
+ }
+ }
+
+ void AudioDeviceEvent::EventFeed()
+ {
+ SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_TIME_CRITICAL);
+
+ while (!m_exit)
+ {
+ {
+ CAutoLock threadLock(&m_threadMutex);
+
+ if (!m_error)
+ {
+ try
+ {
+ PushBufferToDevice();
+
+ if (m_queuedStart)
+ {
+ DebugOut("AudioDeviceEvent start");
+ m_backend->audioClient->Start();
+ m_queuedStart = false;
+ }
+ }
+ catch (HRESULT)
+ {
+ m_error = true;
+ }
+ }
+ }
+
+ m_wake.Wait();
+ }
+ }
+
+ void AudioDeviceEvent::PushBufferToDevice()
+ {
+ UINT32 deviceFrames;
+ ThrowIfFailed(m_backend->audioClient->GetBufferSize(&deviceFrames));
+
+ if (!m_backend->exclusive)
+ {
+ UINT32 bufferPadding;
+ ThrowIfFailed(m_backend->audioClient->GetCurrentPadding(&bufferPadding));
+ deviceFrames -= bufferPadding;
+ }
+
+ if (deviceFrames == 0)
+ return;
+
+ CAutoLock bufferLock(&m_bufferMutex);
+
+ if (deviceFrames > m_bufferFrames && !m_endOfStream && !m_backend->realtime)
+ return;
+
+ BYTE* deviceBuffer;
+ ThrowIfFailed(m_backend->audioRenderClient->GetBuffer(deviceFrames, &deviceBuffer));
+
+ const size_t frameSize = m_backend->waveFormat->wBitsPerSample / 8 * m_backend->waveFormat->nChannels;
+
+ for (UINT32 doneFrames = 0;;)
+ {
+ if (m_buffer.empty())
+ {
+ assert(m_endOfStream || m_backend->realtime);
+ UINT32 doFrames = deviceFrames - doneFrames;
+
+ if (doneFrames == 0)
+ {
+ ThrowIfFailed(m_backend->audioRenderClient->ReleaseBuffer(deviceFrames, AUDCLNT_BUFFERFLAGS_SILENT));
+ }
+ else
+ {
+ ZeroMemory(deviceBuffer + doneFrames * frameSize, doFrames * frameSize);
+ ThrowIfFailed(m_backend->audioRenderClient->ReleaseBuffer(deviceFrames, 0));
+ }
+
+ DebugOut("AudioDeviceEvent silence", doFrames * 1000. / m_backend->waveFormat->nSamplesPerSec, "ms");
+
+ m_silenceFrames += doFrames;
+
+ break;
+ }
+ else
+ {
+ DspChunk& chunk = m_buffer.front();
+ UINT32 doFrames = std::min(deviceFrames - doneFrames, (UINT32)chunk.GetFrameCount());
+ assert(chunk.GetFrameSize() == frameSize);
+ memcpy(deviceBuffer + doneFrames * frameSize, chunk.GetData(), doFrames * frameSize);
+
+ doneFrames += doFrames;
+ m_bufferFrames -= doFrames;
+
+ if (deviceFrames == doneFrames)
+ {
+ ThrowIfFailed(m_backend->audioRenderClient->ReleaseBuffer(deviceFrames, 0));
+
+ chunk.ShrinkHead(chunk.GetFrameCount() - doFrames);
+ if (chunk.IsEmpty())
+ m_buffer.pop_front();
+
+ break;
+ }
+
+ m_buffer.pop_front();
+ }
+ }
+
+ m_sentFrames += deviceFrames;
+ }
+
+ void AudioDeviceEvent::PushChunkToBuffer(DspChunk& chunk)
+ {
+ if (chunk.IsEmpty())
+ return;
+
+ try
+ {
+ // Don't deny the allocator its right to reuse
+ // IMediaSample while the chunk is hanging in the buffer.
+ chunk.FreeMediaSample();
+
+ size_t targetFrames = (size_t)llMulDiv(m_backend->bufferDuration,
+ m_backend->waveFormat->nSamplesPerSec, 1000, 0);
+
+ CAutoLock bufferLock(&m_bufferMutex);
+
+ if (m_bufferFrames > targetFrames)
+ return;
+
+ size_t chunkFrames = chunk.GetFrameCount();
+
+ m_bufferFrames += chunkFrames;
+ m_buffer.emplace_back(std::move(chunk));
+
+ m_receivedFrames += chunkFrames;
+ }
+ catch (std::bad_alloc&)
+ {
+ m_error = true;
+ throw E_OUTOFMEMORY;
+ }
+ }
+}
diff --git a/src/AudioDeviceEvent.h b/src/AudioDeviceEvent.h
new file mode 100644
index 0000000..e87422e
--- /dev/null
+++ b/src/AudioDeviceEvent.h
@@ -0,0 +1,57 @@
+#pragma once
+
+#include "AudioDevice.h"
+#include "DspChunk.h"
+#include "DspFormat.h"
+
+namespace SaneAudioRenderer
+{
+ class AudioDeviceEvent final
+ : public AudioDevice
+ {
+ public:
+
+ AudioDeviceEvent(std::shared_ptr<AudioDeviceBackend> backend);
+ AudioDeviceEvent(const AudioDeviceEvent&) = delete;
+ AudioDeviceEvent& operator=(const AudioDeviceEvent&) = delete;
+ ~AudioDeviceEvent();
+
+ void Push(DspChunk& chunk, CAMEvent* pFilledEvent) override;
+ REFERENCE_TIME Finish(CAMEvent* pFilledEvent) override;
+
+ int64_t GetPosition() override;
+ int64_t GetEnd() override;
+ int64_t GetSilence() override;
+
+ void Start() override;
+ void Stop() override;
+ void Reset() override;
+
+ private:
+
+ void EventFeed();
+
+ void PushBufferToDevice();
+ void PushChunkToBuffer(DspChunk& chunk);
+
+ std::atomic<bool> m_endOfStream = false;
+ int64_t m_endOfStreamPos = 0;
+
+ std::thread m_thread;
+ CCritSec m_threadMutex;
+
+ CAMEvent m_wake;
+ std::atomic<bool> m_exit = false;
+ std::atomic<bool> m_error = false;
+
+ uint64_t m_sentFrames = 0;
+ std::atomic<uint64_t> m_receivedFrames = 0;
+ std::atomic<uint64_t> m_silenceFrames = 0;
+
+ CCritSec m_bufferMutex;
+ std::deque<DspChunk> m_buffer;
+ size_t m_bufferFrames = 0;
+
+ bool m_queuedStart = false;
+ };
+}
diff --git a/src/AudioDeviceManager.cpp b/src/AudioDeviceManager.cpp
index c605bfc..f8f3d48 100644
--- a/src/AudioDeviceManager.cpp
+++ b/src/AudioDeviceManager.cpp
@@ -1,6 +1,7 @@
#include "pch.h"
#include "AudioDeviceManager.h"
+#include "AudioDeviceEvent.h"
#include "AudioDevicePush.h"
#include "DspMatrix.h"
@@ -335,11 +336,56 @@ namespace SaneAudioRenderer
}
}
- ThrowIfFailed(backend->audioClient->Initialize(
- backend->exclusive ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
- AUDCLNT_STREAMFLAGS_NOPERSIST,
- OneMillisecond * backend->bufferDuration,
- 0, &(*backend->waveFormat), nullptr));
+ backend->eventMode = realtime || backend->exclusive;
+
+ {
+ AUDCLNT_SHAREMODE mode = backend->exclusive ? AUDCLNT_SHAREMODE_EXCLUSIVE :
+ AUDCLNT_SHAREMODE_SHARED;
+
+ DWORD flags = AUDCLNT_STREAMFLAGS_NOPERSIST;
+ if (backend->eventMode)
+ flags |= AUDCLNT_STREAMFLAGS_EVENTCALLBACK;
+
+ REFERENCE_TIME defaultPeriod;
+ REFERENCE_TIME minimumPeriod;
+ ThrowIfFailed(backend->audioClient->GetDevicePeriod(&defaultPeriod, &minimumPeriod));
+
+ REFERENCE_TIME bufferDuration = OneMillisecond * backend->bufferDuration;
+ if (backend->eventMode)
+ bufferDuration = realtime ? minimumPeriod : defaultPeriod;
+
+ REFERENCE_TIME periodicy = 0;
+ if (backend->exclusive && backend->eventMode)
+ periodicy = bufferDuration;
+
+ // Initialize our audio client.
+ HRESULT result = backend->audioClient->Initialize(mode, flags, bufferDuration,
+ periodicy, &(*backend->waveFormat), nullptr);
+
+ // Requested periodicity may have not met alignment requirements of the audio device.
+ if (result == AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED &&
+ backend->exclusive && backend->eventMode)
+ {
+ // Ask the audio driver for closest supported periodicity.
+ UINT32 bufferFrames;
+ ThrowIfFailed(backend->audioClient->GetBufferSize(&bufferFrames));
+
+ // Recreate our audio client (MSDN suggests it).
+ backend->audioClient = nullptr;
+ CreateAudioClient(pEnumerator, *backend);
+ if (!backend->audioClient)
+ return E_FAIL;
+
+ bufferDuration = llMulDiv(bufferFrames, OneSecond, backend->waveFormat->nSamplesPerSec, 0);
+ periodicy = bufferDuration;
+
+ // Initialize our audio client again with the right periodicity.
+ result = backend->audioClient->Initialize(mode, flags, bufferDuration,
+ periodicy, &(*backend->waveFormat), nullptr);
+ }
+
+ ThrowIfFailed(result);
+ }
ThrowIfFailed(backend->audioClient->GetService(IID_PPV_ARGS(&backend->audioRenderClient)));
@@ -514,6 +560,9 @@ namespace SaneAudioRenderer
try
{
+ if (backend->eventMode)
+ return std::unique_ptr<AudioDevice>(new AudioDeviceEvent(backend));
+
return std::unique_ptr<AudioDevice>(new AudioDevicePush(backend));
}
catch (std::bad_alloc&)
diff --git a/src/AudioDevicePush.cpp b/src/AudioDevicePush.cpp
index e9e2a4b..8b75032 100644
--- a/src/AudioDevicePush.cpp
+++ b/src/AudioDevicePush.cpp
@@ -18,6 +18,7 @@ namespace SaneAudioRenderer
: m_woken(TRUE/*manual reset*/)
{
assert(backend);
+ assert(!backend->eventMode);
m_backend = backend;
if (static_cast<HANDLE>(m_wake) == NULL ||
@@ -347,6 +348,10 @@ namespace SaneAudioRenderer
try
{
+ // Don't deny the allocator its right to reuse
+ // IMediaSample while the chunk is hanging in the buffer.
+ chunk.FreeMediaSample();
+
CAutoLock lock(&m_bufferMutex);
if (m_bufferFrameCount > m_backend->waveFormat->nSamplesPerSec / 4) // 250ms
diff --git a/src/AudioRenderer.cpp b/src/AudioRenderer.cpp
index f6db707..281feb0 100644
--- a/src/AudioRenderer.cpp
+++ b/src/AudioRenderer.cpp
@@ -115,10 +115,6 @@ namespace SaneAudioRenderer
m_guidedReclockActive = true;
}
}
-
- // Don't deny the allocator its right to reuse IMediaSample while the chunk is hanging in the buffer.
- if (m_device && m_device->IsRealtime())
- chunk.FreeMediaSample();
}
catch (HRESULT)
{