X-Git-Url: http://git.osdn.net/view?a=blobdiff_plain;f=Source%2FWebCore%2Fwebaudio%2FAudioDestinationNode.h;h=5913205f84b948d683334109520330bd0f6056c9;hb=2daae5fd11344eaa88a0d92b0f6d65f8d2255c00;hp=4c21bb8549c5840a73b4e0e778f5590fa96b60ca;hpb=87bdf0060a247bfbe668342b87e0874182e0ffa9;p=android-x86%2Fexternal-webkit.git diff --git a/Source/WebCore/webaudio/AudioDestinationNode.h b/Source/WebCore/webaudio/AudioDestinationNode.h index 4c21bb854..5913205f8 100644 --- a/Source/WebCore/webaudio/AudioDestinationNode.h +++ b/Source/WebCore/webaudio/AudioDestinationNode.h @@ -25,11 +25,9 @@ #ifndef AudioDestinationNode_h #define AudioDestinationNode_h -#include "AudioDestination.h" +#include "AudioBuffer.h" #include "AudioNode.h" #include "AudioSourceProvider.h" -#include -#include namespace WebCore { @@ -38,32 +36,25 @@ class AudioContext; class AudioDestinationNode : public AudioNode, public AudioSourceProvider { public: - static PassRefPtr create(AudioContext* context) - { - return adoptRef(new AudioDestinationNode(context)); - } - + AudioDestinationNode(AudioContext*, double sampleRate); virtual ~AudioDestinationNode(); // AudioNode virtual void process(size_t) { }; // we're pulled by hardware so this is never called virtual void reset() { m_currentTime = 0.0; }; - virtual void initialize(); - virtual void uninitialize(); // The audio hardware calls here periodically to gets its input stream. virtual void provideInput(AudioBus*, size_t numberOfFrames); double currentTime() { return m_currentTime; } - double sampleRate() const { return m_destination->sampleRate(); } + virtual double sampleRate() const = 0; - unsigned numberOfChannels() const { return 2; } // FIXME: update when multi-channel (more than stereo) is supported - -private: - AudioDestinationNode(AudioContext*); + virtual unsigned numberOfChannels() const { return 2; } // FIXME: update when multi-channel (more than stereo) is supported - OwnPtr m_destination; + virtual void startRendering() = 0; + +protected: double m_currentTime; };