This source file includes following definitions.
- m_startedRendering
- initialize
- uninitialize
- startRendering
- offlineRender
- notifyCompleteDispatch
- notifyComplete
#include "config.h"
#if ENABLE(WEB_AUDIO)
#include "modules/webaudio/OfflineAudioDestinationNode.h"
#include <algorithm>
#include "platform/audio/AudioBus.h"
#include "platform/audio/HRTFDatabaseLoader.h"
#include "modules/webaudio/AudioContext.h"
#include "platform/Task.h"
#include "public/platform/Platform.h"
#include "wtf/MainThread.h"
using namespace std;
namespace WebCore {
const size_t renderQuantumSize = 128;
OfflineAudioDestinationNode::OfflineAudioDestinationNode(AudioContext* context, AudioBuffer* renderTarget)
: AudioDestinationNode(context, renderTarget->sampleRate())
, m_renderTarget(renderTarget)
, m_startedRendering(false)
{
m_renderBus = AudioBus::create(renderTarget->numberOfChannels(), renderQuantumSize);
}
OfflineAudioDestinationNode::~OfflineAudioDestinationNode()
{
uninitialize();
}
void OfflineAudioDestinationNode::initialize()
{
if (isInitialized())
return;
AudioNode::initialize();
}
void OfflineAudioDestinationNode::uninitialize()
{
if (!isInitialized())
return;
if (m_renderThread)
m_renderThread.clear();
AudioNode::uninitialize();
}
void OfflineAudioDestinationNode::startRendering()
{
ASSERT(isMainThread());
ASSERT(m_renderTarget.get());
if (!m_renderTarget.get())
return;
if (!m_startedRendering) {
m_startedRendering = true;
ref();
m_renderThread = adoptPtr(blink::Platform::current()->createThread("Offline Audio Renderer"));
m_renderThread->postTask(new Task(WTF::bind(&OfflineAudioDestinationNode::offlineRender, this)));
}
}
void OfflineAudioDestinationNode::offlineRender()
{
ASSERT(!isMainThread());
ASSERT(m_renderBus.get());
if (!m_renderBus.get())
return;
bool isAudioContextInitialized = context()->isInitialized();
ASSERT(isAudioContextInitialized);
if (!isAudioContextInitialized)
return;
bool channelsMatch = m_renderBus->numberOfChannels() == m_renderTarget->numberOfChannels();
ASSERT(channelsMatch);
if (!channelsMatch)
return;
bool isRenderBusAllocated = m_renderBus->length() >= renderQuantumSize;
ASSERT(isRenderBusAllocated);
if (!isRenderBusAllocated)
return;
size_t framesToProcess = m_renderTarget->length();
unsigned numberOfChannels = m_renderTarget->numberOfChannels();
unsigned n = 0;
while (framesToProcess > 0) {
render(0, m_renderBus.get(), renderQuantumSize);
size_t framesAvailableToCopy = min(framesToProcess, renderQuantumSize);
for (unsigned channelIndex = 0; channelIndex < numberOfChannels; ++channelIndex) {
const float* source = m_renderBus->channel(channelIndex)->data();
float* destination = m_renderTarget->getChannelData(channelIndex)->data();
memcpy(destination + n, source, sizeof(float) * framesAvailableToCopy);
}
n += framesAvailableToCopy;
framesToProcess -= framesAvailableToCopy;
}
callOnMainThread(notifyCompleteDispatch, this);
}
void OfflineAudioDestinationNode::notifyCompleteDispatch(void* userData)
{
OfflineAudioDestinationNode* destinationNode = static_cast<OfflineAudioDestinationNode*>(userData);
ASSERT(destinationNode);
if (!destinationNode)
return;
destinationNode->notifyComplete();
destinationNode->deref();
}
void OfflineAudioDestinationNode::notifyComplete()
{
context()->fireCompletionEvent();
}
}
#endif