1/*
2 *  Copyright (C) 2011, 2012 Igalia S.L
3 *
4 *  This library is free software; you can redistribute it and/or
5 *  modify it under the terms of the GNU Lesser General Public
6 *  License as published by the Free Software Foundation; either
7 *  version 2 of the License, or (at your option) any later version.
8 *
9 *  This library is distributed in the hope that it will be useful,
10 *  but WITHOUT ANY WARRANTY; without even the implied warranty of
11 *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12 *  Lesser General Public License for more details.
13 *
14 *  You should have received a copy of the GNU Lesser General Public
15 *  License along with this library; if not, write to the Free Software
16 *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
17 */
18
19#include "config.h"
20
21#include "WebKitWebAudioSourceGStreamer.h"
22
23#if ENABLE(WEB_AUDIO) && USE(GSTREAMER)
24
25#include "AudioBus.h"
26#include "AudioIOCallback.h"
27#include <wtf/gobject/GOwnPtr.h>
28#include "GRefPtrGStreamer.h"
29#include "GStreamerVersioning.h"
30#ifdef GST_API_VERSION_1
31#include <gst/audio/audio.h>
32#else
33#include <gst/audio/multichannel.h>
34#endif
35#include <gst/pbutils/pbutils.h>
36
37// GStaticRecMutex is deprecated in Glib, but required in GStreamer 0.10
38#if (COMPILER(GCC) && GCC_VERSION_AT_LEAST(4, 6, 0) && !defined(GST_API_VERSION_1))
39#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
40#endif
41
42using namespace WebCore;
43
44typedef struct _WebKitWebAudioSrcClass   WebKitWebAudioSrcClass;
45typedef struct _WebKitWebAudioSourcePrivate WebKitWebAudioSourcePrivate;
46
47struct _WebKitWebAudioSrc {
48    GstBin parent;
49
50    WebKitWebAudioSourcePrivate* priv;
51};
52
53struct _WebKitWebAudioSrcClass {
54    GstBinClass parentClass;
55};
56
57#define WEBKIT_WEB_AUDIO_SRC_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), WEBKIT_TYPE_WEBAUDIO_SRC, WebKitWebAudioSourcePrivate))
58struct _WebKitWebAudioSourcePrivate {
59    gfloat sampleRate;
60    AudioBus* bus;
61    AudioIOCallback* provider;
62    guint framesToPull;
63
64    GRefPtr<GstElement> interleave;
65    GRefPtr<GstElement> wavEncoder;
66
67    GRefPtr<GstTask> task;
68#ifdef GST_API_VERSION_1
69    GRecMutex mutex;
70#else
71    GStaticRecMutex mutex;
72#endif
73
74    GSList* pads; // List of queue sink pads. One queue for each planar audio channel.
75    GstPad* sourcePad; // src pad of the element, interleaved wav data is pushed to it.
76};
77
78enum {
79    PROP_RATE = 1,
80    PROP_BUS,
81    PROP_PROVIDER,
82    PROP_FRAMES
83};
84
85static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE("src",
86                                                                  GST_PAD_SRC,
87                                                                  GST_PAD_ALWAYS,
88                                                                  GST_STATIC_CAPS("audio/x-wav"));
89
90GST_DEBUG_CATEGORY_STATIC(webkit_web_audio_src_debug);
91#define GST_CAT_DEFAULT webkit_web_audio_src_debug
92
93static void webKitWebAudioSrcConstructed(GObject*);
94static void webKitWebAudioSrcFinalize(GObject*);
95static void webKitWebAudioSrcSetProperty(GObject*, guint propertyId, const GValue*, GParamSpec*);
96static void webKitWebAudioSrcGetProperty(GObject*, guint propertyId, GValue*, GParamSpec*);
97static GstStateChangeReturn webKitWebAudioSrcChangeState(GstElement*, GstStateChange);
98static void webKitWebAudioSrcLoop(WebKitWebAudioSrc*);
99
100static GstCaps* getGStreamerMonoAudioCaps(float sampleRate)
101{
102#ifdef GST_API_VERSION_1
103    return gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT, static_cast<int>(sampleRate),
104        "channels", G_TYPE_INT, 1,
105        "format", G_TYPE_STRING, gst_audio_format_to_string(GST_AUDIO_FORMAT_F32),
106        "layout", G_TYPE_STRING, "non-interleaved", NULL);
107#else
108    return gst_caps_new_simple("audio/x-raw-float", "rate", G_TYPE_INT, static_cast<int>(sampleRate),
109        "channels", G_TYPE_INT, 1,
110        "endianness", G_TYPE_INT, G_BYTE_ORDER,
111        "width", G_TYPE_INT, 32, NULL);
112#endif
113}
114
115static GstAudioChannelPosition webKitWebAudioGStreamerChannelPosition(int channelIndex)
116{
117    GstAudioChannelPosition position = GST_AUDIO_CHANNEL_POSITION_NONE;
118
119    switch (channelIndex) {
120    case AudioBus::ChannelLeft:
121        position = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
122        break;
123    case AudioBus::ChannelRight:
124        position = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
125        break;
126    case AudioBus::ChannelCenter:
127        position = GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER;
128        break;
129    case AudioBus::ChannelLFE:
130#ifdef GST_API_VERSION_1
131        position = GST_AUDIO_CHANNEL_POSITION_LFE1;
132#else
133        position = GST_AUDIO_CHANNEL_POSITION_LFE;
134#endif
135        break;
136    case AudioBus::ChannelSurroundLeft:
137        position = GST_AUDIO_CHANNEL_POSITION_REAR_LEFT;
138        break;
139    case AudioBus::ChannelSurroundRight:
140        position = GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT;
141        break;
142    default:
143        break;
144    };
145
146    return position;
147}
148
149#define webkit_web_audio_src_parent_class parent_class
150G_DEFINE_TYPE_WITH_CODE(WebKitWebAudioSrc, webkit_web_audio_src, GST_TYPE_BIN, GST_DEBUG_CATEGORY_INIT(webkit_web_audio_src_debug, \
151                            "webkitwebaudiosrc", \
152                            0, \
153                            "webaudiosrc element"));
154
155static void webkit_web_audio_src_class_init(WebKitWebAudioSrcClass* webKitWebAudioSrcClass)
156{
157    GObjectClass* objectClass = G_OBJECT_CLASS(webKitWebAudioSrcClass);
158    GstElementClass* elementClass = GST_ELEMENT_CLASS(webKitWebAudioSrcClass);
159
160    gst_element_class_add_pad_template(elementClass, gst_static_pad_template_get(&srcTemplate));
161    setGstElementClassMetadata(elementClass, "WebKit WebAudio source element", "Source", "Handles WebAudio data from WebCore", "Philippe Normand <pnormand@igalia.com>");
162
163    objectClass->constructed = webKitWebAudioSrcConstructed;
164    objectClass->finalize = webKitWebAudioSrcFinalize;
165    elementClass->change_state = webKitWebAudioSrcChangeState;
166
167    objectClass->set_property = webKitWebAudioSrcSetProperty;
168    objectClass->get_property = webKitWebAudioSrcGetProperty;
169
170    GParamFlags flags = static_cast<GParamFlags>(G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE);
171    g_object_class_install_property(objectClass,
172                                    PROP_RATE,
173                                    g_param_spec_float("rate", "rate",
174                                                       "Sample rate", G_MINDOUBLE, G_MAXDOUBLE,
175                                                       44100.0, flags));
176
177    g_object_class_install_property(objectClass,
178                                    PROP_BUS,
179                                    g_param_spec_pointer("bus", "bus",
180                                                         "Bus", flags));
181
182    g_object_class_install_property(objectClass,
183                                    PROP_PROVIDER,
184                                    g_param_spec_pointer("provider", "provider",
185                                                         "Provider", flags));
186
187    g_object_class_install_property(objectClass,
188                                    PROP_FRAMES,
189                                    g_param_spec_uint("frames", "frames",
190                                                      "Number of audio frames to pull at each iteration",
191                                                      0, G_MAXUINT8, 128, flags));
192
193    g_type_class_add_private(webKitWebAudioSrcClass, sizeof(WebKitWebAudioSourcePrivate));
194}
195
196static void webkit_web_audio_src_init(WebKitWebAudioSrc* src)
197{
198    WebKitWebAudioSourcePrivate* priv = G_TYPE_INSTANCE_GET_PRIVATE(src, WEBKIT_TYPE_WEB_AUDIO_SRC, WebKitWebAudioSourcePrivate);
199    src->priv = priv;
200    new (priv) WebKitWebAudioSourcePrivate();
201
202    priv->sourcePad = webkitGstGhostPadFromStaticTemplate(&srcTemplate, "src", 0);
203    gst_element_add_pad(GST_ELEMENT(src), priv->sourcePad);
204
205    priv->provider = 0;
206    priv->bus = 0;
207
208#ifdef GST_API_VERSION_1
209    g_rec_mutex_init(&priv->mutex);
210    priv->task = gst_task_new(reinterpret_cast<GstTaskFunction>(webKitWebAudioSrcLoop), src, 0);
211#else
212    g_static_rec_mutex_init(&priv->mutex);
213    priv->task = gst_task_create(reinterpret_cast<GstTaskFunction>(webKitWebAudioSrcLoop), src);
214#endif
215
216    gst_task_set_lock(priv->task.get(), &priv->mutex);
217}
218
219static void webKitWebAudioSrcConstructed(GObject* object)
220{
221    WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
222    WebKitWebAudioSourcePrivate* priv = src->priv;
223
224    ASSERT(priv->bus);
225    ASSERT(priv->provider);
226    ASSERT(priv->sampleRate);
227
228    priv->interleave = gst_element_factory_make("interleave", 0);
229    priv->wavEncoder = gst_element_factory_make("wavenc", 0);
230
231    if (!priv->interleave) {
232        GST_ERROR_OBJECT(src, "Failed to create interleave");
233        return;
234    }
235
236    if (!priv->wavEncoder) {
237        GST_ERROR_OBJECT(src, "Failed to create wavenc");
238        return;
239    }
240
241    gst_bin_add_many(GST_BIN(src), priv->interleave.get(), priv->wavEncoder.get(), NULL);
242    gst_element_link_pads_full(priv->interleave.get(), "src", priv->wavEncoder.get(), "sink", GST_PAD_LINK_CHECK_NOTHING);
243
244    // For each channel of the bus create a new upstream branch for interleave, like:
245    // queue ! capsfilter ! audioconvert. which is plugged to a new interleave request sinkpad.
246    for (unsigned channelIndex = 0; channelIndex < priv->bus->numberOfChannels(); channelIndex++) {
247        GstElement* queue = gst_element_factory_make("queue", 0);
248        GstElement* capsfilter = gst_element_factory_make("capsfilter", 0);
249        GstElement* audioconvert = gst_element_factory_make("audioconvert", 0);
250
251        GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate));
252
253#ifdef GST_API_VERSION_1
254        GstAudioInfo info;
255        gst_audio_info_from_caps(&info, monoCaps.get());
256        GST_AUDIO_INFO_POSITION(&info, 0) = webKitWebAudioGStreamerChannelPosition(channelIndex);
257        GRefPtr<GstCaps> caps = adoptGRef(gst_audio_info_to_caps(&info));
258        g_object_set(capsfilter, "caps", caps.get(), NULL);
259#else
260        g_object_set(capsfilter, "caps", monoCaps.get(), NULL);
261#endif
262
263        // Configure the queue for minimal latency.
264        g_object_set(queue, "max-size-buffers", static_cast<guint>(1), NULL);
265
266        GstPad* pad = gst_element_get_static_pad(queue, "sink");
267        priv->pads = g_slist_prepend(priv->pads, pad);
268
269        gst_bin_add_many(GST_BIN(src), queue, capsfilter, audioconvert, NULL);
270        gst_element_link_pads_full(queue, "src", capsfilter, "sink", GST_PAD_LINK_CHECK_NOTHING);
271        gst_element_link_pads_full(capsfilter, "src", audioconvert, "sink", GST_PAD_LINK_CHECK_NOTHING);
272        gst_element_link_pads_full(audioconvert, "src", priv->interleave.get(), 0, GST_PAD_LINK_CHECK_NOTHING);
273
274    }
275    priv->pads = g_slist_reverse(priv->pads);
276
277    // wavenc's src pad is the only visible pad of our element.
278    GRefPtr<GstPad> targetPad = adoptGRef(gst_element_get_static_pad(priv->wavEncoder.get(), "src"));
279    gst_ghost_pad_set_target(GST_GHOST_PAD(priv->sourcePad), targetPad.get());
280}
281
282static void webKitWebAudioSrcFinalize(GObject* object)
283{
284    WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
285    WebKitWebAudioSourcePrivate* priv = src->priv;
286
287#ifdef GST_API_VERSION_1
288    g_rec_mutex_clear(&priv->mutex);
289#else
290    g_static_rec_mutex_free(&priv->mutex);
291#endif
292
293    g_slist_free_full(priv->pads, reinterpret_cast<GDestroyNotify>(gst_object_unref));
294
295    priv->~WebKitWebAudioSourcePrivate();
296    GST_CALL_PARENT(G_OBJECT_CLASS, finalize, ((GObject* )(src)));
297}
298
299static void webKitWebAudioSrcSetProperty(GObject* object, guint propertyId, const GValue* value, GParamSpec* pspec)
300{
301    WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
302    WebKitWebAudioSourcePrivate* priv = src->priv;
303
304    switch (propertyId) {
305    case PROP_RATE:
306        priv->sampleRate = g_value_get_float(value);
307        break;
308    case PROP_BUS:
309        priv->bus = static_cast<AudioBus*>(g_value_get_pointer(value));
310        break;
311    case PROP_PROVIDER:
312        priv->provider = static_cast<AudioIOCallback*>(g_value_get_pointer(value));
313        break;
314    case PROP_FRAMES:
315        priv->framesToPull = g_value_get_uint(value);
316        break;
317    default:
318        G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, pspec);
319        break;
320    }
321}
322
323static void webKitWebAudioSrcGetProperty(GObject* object, guint propertyId, GValue* value, GParamSpec* pspec)
324{
325    WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
326    WebKitWebAudioSourcePrivate* priv = src->priv;
327
328    switch (propertyId) {
329    case PROP_RATE:
330        g_value_set_float(value, priv->sampleRate);
331        break;
332    case PROP_BUS:
333        g_value_set_pointer(value, priv->bus);
334        break;
335    case PROP_PROVIDER:
336        g_value_set_pointer(value, priv->provider);
337        break;
338    case PROP_FRAMES:
339        g_value_set_uint(value, priv->framesToPull);
340        break;
341    default:
342        G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, pspec);
343        break;
344    }
345}
346
347static void webKitWebAudioSrcLoop(WebKitWebAudioSrc* src)
348{
349    WebKitWebAudioSourcePrivate* priv = src->priv;
350
351    ASSERT(priv->bus);
352    ASSERT(priv->provider);
353    if (!priv->provider || !priv->bus)
354        return;
355
356    GSList* channelBufferList = 0;
357    unsigned bufferSize = priv->framesToPull * sizeof(float);
358    for (int i = g_slist_length(priv->pads) - 1; i >= 0; i--) {
359        GstBuffer* channelBuffer = gst_buffer_new_and_alloc(bufferSize);
360        ASSERT(channelBuffer);
361        channelBufferList = g_slist_prepend(channelBufferList, channelBuffer);
362#ifdef GST_API_VERSION_1
363        GstMapInfo info;
364        gst_buffer_map(channelBuffer, &info, GST_MAP_READ);
365        priv->bus->setChannelMemory(i, reinterpret_cast<float*>(info.data), priv->framesToPull);
366        gst_buffer_unmap(channelBuffer, &info);
367#else
368        priv->bus->setChannelMemory(i, reinterpret_cast<float*>(GST_BUFFER_DATA(channelBuffer)), priv->framesToPull);
369#endif
370    }
371
372    // FIXME: Add support for local/live audio input.
373    priv->provider->render(0, priv->bus, priv->framesToPull);
374
375    GSList* padsIt;
376    GSList* buffersIt;
377    for (padsIt = priv->pads, buffersIt = channelBufferList; padsIt && buffersIt; padsIt = g_slist_next(padsIt), buffersIt = g_slist_next(buffersIt)) {
378        GstPad* pad = static_cast<GstPad*>(padsIt->data);
379        GstBuffer* channelBuffer = static_cast<GstBuffer*>(buffersIt->data);
380
381#ifndef GST_API_VERSION_1
382        GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate));
383        GstStructure* structure = gst_caps_get_structure(monoCaps.get(), 0);
384        GstAudioChannelPosition channelPosition = webKitWebAudioGStreamerChannelPosition(g_slist_index(channelBufferList, channelBuffer));
385        gst_audio_set_channel_positions(structure, &channelPosition);
386        gst_buffer_set_caps(channelBuffer, monoCaps.get());
387#endif
388
389        GstFlowReturn ret = gst_pad_chain(pad, channelBuffer);
390        if (ret != GST_FLOW_OK)
391            GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to push buffer on %s", GST_DEBUG_PAD_NAME(pad)));
392    }
393
394    g_slist_free(channelBufferList);
395}
396
397static GstStateChangeReturn webKitWebAudioSrcChangeState(GstElement* element, GstStateChange transition)
398{
399    GstStateChangeReturn returnValue = GST_STATE_CHANGE_SUCCESS;
400    WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(element);
401
402    switch (transition) {
403    case GST_STATE_CHANGE_NULL_TO_READY:
404        if (!src->priv->interleave) {
405            gst_element_post_message(element, gst_missing_element_message_new(element, "interleave"));
406            GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (0), ("no interleave"));
407            return GST_STATE_CHANGE_FAILURE;
408        }
409        if (!src->priv->wavEncoder) {
410            gst_element_post_message(element, gst_missing_element_message_new(element, "wavenc"));
411            GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (0), ("no wavenc"));
412            return GST_STATE_CHANGE_FAILURE;
413        }
414        break;
415    default:
416        break;
417    }
418
419    returnValue = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
420    if (UNLIKELY(returnValue == GST_STATE_CHANGE_FAILURE)) {
421        GST_DEBUG_OBJECT(src, "State change failed");
422        return returnValue;
423    }
424
425    switch (transition) {
426    case GST_STATE_CHANGE_READY_TO_PAUSED:
427        GST_DEBUG_OBJECT(src, "READY->PAUSED");
428        if (!gst_task_start(src->priv->task.get()))
429            returnValue = GST_STATE_CHANGE_FAILURE;
430        break;
431    case GST_STATE_CHANGE_PAUSED_TO_READY:
432        GST_DEBUG_OBJECT(src, "PAUSED->READY");
433        if (!gst_task_join(src->priv->task.get()))
434            returnValue = GST_STATE_CHANGE_FAILURE;
435        break;
436    default:
437        break;
438    }
439
440    return returnValue;
441}
442
443#endif // ENABLE(WEB_AUDIO) && USE(GSTREAMER)
444