1/*
2 *  Copyright (C) 2011, 2012 Igalia S.L
3 *
4 *  This library is free software; you can redistribute it and/or
5 *  modify it under the terms of the GNU Lesser General Public
6 *  License as published by the Free Software Foundation; either
7 *  version 2 of the License, or (at your option) any later version.
8 *
9 *  This library is distributed in the hope that it will be useful,
10 *  but WITHOUT ANY WARRANTY; without even the implied warranty of
11 *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12 *  Lesser General Public License for more details.
13 *
14 *  You should have received a copy of the GNU Lesser General Public
15 *  License along with this library; if not, write to the Free Software
16 *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
17 */
18
19#include "config.h"
20
21#if ENABLE(WEB_AUDIO)
22
23#include "AudioDestinationGStreamer.h"
24
25#include "AudioChannel.h"
26#include "AudioSourceProvider.h"
27#include <wtf/gobject/GOwnPtr.h>
28#include "GRefPtrGStreamer.h"
29#include "GStreamerVersioning.h"
30#include "Logging.h"
31#include "WebKitWebAudioSourceGStreamer.h"
32#include <gst/gst.h>
33#include <gst/pbutils/pbutils.h>
34
35namespace WebCore {
36
37// Size of the AudioBus for playback. The webkitwebaudiosrc element
38// needs to handle this number of frames per cycle as well.
39const unsigned framesToPull = 128;
40
41gboolean messageCallback(GstBus*, GstMessage* message, AudioDestinationGStreamer* destination)
42{
43    return destination->handleMessage(message);
44}
45
46PassOwnPtr<AudioDestination> AudioDestination::create(AudioIOCallback& callback, const String&, unsigned numberOfInputChannels, unsigned numberOfOutputChannels, float sampleRate)
47{
48    // FIXME: make use of inputDeviceId as appropriate.
49
50    // FIXME: Add support for local/live audio input.
51    if (numberOfInputChannels)
52        LOG(Media, "AudioDestination::create(%u, %u, %f) - unhandled input channels", numberOfInputChannels, numberOfOutputChannels, sampleRate);
53
54    // FIXME: Add support for multi-channel (> stereo) output.
55    if (numberOfOutputChannels != 2)
56        LOG(Media, "AudioDestination::create(%u, %u, %f) - unhandled output channels", numberOfInputChannels, numberOfOutputChannels, sampleRate);
57
58    return adoptPtr(new AudioDestinationGStreamer(callback, sampleRate));
59}
60
61float AudioDestination::hardwareSampleRate()
62{
63    return 44100;
64}
65
66unsigned long AudioDestination::maxChannelCount()
67{
68    // FIXME: query the default audio hardware device to return the actual number
69    // of channels of the device. Also see corresponding FIXME in create().
70    return 0;
71}
72
73#ifndef GST_API_VERSION_1
74static void onGStreamerWavparsePadAddedCallback(GstElement*, GstPad* pad, AudioDestinationGStreamer* destination)
75{
76    destination->finishBuildingPipelineAfterWavParserPadReady(pad);
77}
78#endif
79
80AudioDestinationGStreamer::AudioDestinationGStreamer(AudioIOCallback& callback, float sampleRate)
81    : m_callback(callback)
82    , m_renderBus(AudioBus::create(2, framesToPull, false))
83    , m_sampleRate(sampleRate)
84    , m_isPlaying(false)
85{
86    m_pipeline = gst_pipeline_new("play");
87    GRefPtr<GstBus> bus = webkitGstPipelineGetBus(GST_PIPELINE(m_pipeline));
88    ASSERT(bus);
89    gst_bus_add_signal_watch(bus.get());
90    g_signal_connect(bus.get(), "message", G_CALLBACK(messageCallback), this);
91
92    GstElement* webkitAudioSrc = reinterpret_cast<GstElement*>(g_object_new(WEBKIT_TYPE_WEB_AUDIO_SRC,
93                                                                            "rate", sampleRate,
94                                                                            "bus", m_renderBus.get(),
95                                                                            "provider", &m_callback,
96                                                                            "frames", framesToPull, NULL));
97
98    GstElement* wavParser = gst_element_factory_make("wavparse", 0);
99
100    m_wavParserAvailable = wavParser;
101    ASSERT_WITH_MESSAGE(m_wavParserAvailable, "Failed to create GStreamer wavparse element");
102    if (!m_wavParserAvailable)
103        return;
104
105#ifndef GST_API_VERSION_1
106    g_signal_connect(wavParser, "pad-added", G_CALLBACK(onGStreamerWavparsePadAddedCallback), this);
107#endif
108    gst_bin_add_many(GST_BIN(m_pipeline), webkitAudioSrc, wavParser, NULL);
109    gst_element_link_pads_full(webkitAudioSrc, "src", wavParser, "sink", GST_PAD_LINK_CHECK_NOTHING);
110
111#ifdef GST_API_VERSION_1
112    GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(wavParser, "src"));
113    finishBuildingPipelineAfterWavParserPadReady(srcPad.get());
114#endif
115}
116
117AudioDestinationGStreamer::~AudioDestinationGStreamer()
118{
119    GRefPtr<GstBus> bus = webkitGstPipelineGetBus(GST_PIPELINE(m_pipeline));
120    ASSERT(bus);
121    g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(messageCallback), this);
122    gst_bus_remove_signal_watch(bus.get());
123
124    gst_element_set_state(m_pipeline, GST_STATE_NULL);
125    gst_object_unref(m_pipeline);
126}
127
128void AudioDestinationGStreamer::finishBuildingPipelineAfterWavParserPadReady(GstPad* pad)
129{
130    ASSERT(m_wavParserAvailable);
131
132    GRefPtr<GstElement> audioSink = gst_element_factory_make("autoaudiosink", 0);
133    m_audioSinkAvailable = audioSink;
134
135    if (!audioSink) {
136        LOG_ERROR("Failed to create GStreamer autoaudiosink element");
137        return;
138    }
139
140    // Autoaudiosink does the real sink detection in the GST_STATE_NULL->READY transition
141    // so it's best to roll it to READY as soon as possible to ensure the underlying platform
142    // audiosink was loaded correctly.
143    GstStateChangeReturn stateChangeReturn = gst_element_set_state(audioSink.get(), GST_STATE_READY);
144    if (stateChangeReturn == GST_STATE_CHANGE_FAILURE) {
145        LOG_ERROR("Failed to change autoaudiosink element state");
146        gst_element_set_state(audioSink.get(), GST_STATE_NULL);
147        m_audioSinkAvailable = false;
148        return;
149    }
150
151    GstElement* audioConvert = gst_element_factory_make("audioconvert", 0);
152    gst_bin_add_many(GST_BIN(m_pipeline), audioConvert, audioSink.get(), NULL);
153
154    // Link wavparse's src pad to audioconvert sink pad.
155    GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(audioConvert, "sink"));
156    gst_pad_link_full(pad, sinkPad.get(), GST_PAD_LINK_CHECK_NOTHING);
157
158    // Link audioconvert to audiosink and roll states.
159    gst_element_link_pads_full(audioConvert, "src", audioSink.get(), "sink", GST_PAD_LINK_CHECK_NOTHING);
160    gst_element_sync_state_with_parent(audioConvert);
161    gst_element_sync_state_with_parent(audioSink.leakRef());
162}
163
164gboolean AudioDestinationGStreamer::handleMessage(GstMessage* message)
165{
166    GOwnPtr<GError> error;
167    GOwnPtr<gchar> debug;
168
169    switch (GST_MESSAGE_TYPE(message)) {
170    case GST_MESSAGE_WARNING:
171        gst_message_parse_warning(message, &error.outPtr(), &debug.outPtr());
172        g_warning("Warning: %d, %s. Debug output: %s", error->code,  error->message, debug.get());
173        break;
174    case GST_MESSAGE_ERROR:
175        gst_message_parse_error(message, &error.outPtr(), &debug.outPtr());
176        g_warning("Error: %d, %s. Debug output: %s", error->code,  error->message, debug.get());
177        gst_element_set_state(m_pipeline, GST_STATE_NULL);
178        m_isPlaying = false;
179        break;
180    default:
181        break;
182    }
183    return TRUE;
184}
185
186void AudioDestinationGStreamer::start()
187{
188    ASSERT(m_wavParserAvailable);
189    if (!m_wavParserAvailable)
190        return;
191
192    gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
193    m_isPlaying = true;
194}
195
196void AudioDestinationGStreamer::stop()
197{
198    ASSERT(m_wavParserAvailable && m_audioSinkAvailable);
199    if (!m_wavParserAvailable || !m_audioSinkAvailable)
200        return;
201
202    gst_element_set_state(m_pipeline, GST_STATE_PAUSED);
203    m_isPlaying = false;
204}
205
206} // namespace WebCore
207
208#endif // ENABLE(WEB_AUDIO)
209