aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorSjoerd Simons <sjoerd@luon.net>2012-08-27 01:23:06 +0800
committerGuillaume Desmottes <guillaume.desmottes@collabora.co.uk>2012-09-03 16:15:13 +0800
commitd641d0d903b9847864acae4a47aa948131aa50b0 (patch)
treee089e935a5e5ad3efd1d175420fda9aa314706ba /src
parent0af7f92b16f1d1b3527463906936fa0ea602ad57 (diff)
downloadgsoc2013-empathy-d641d0d903b9847864acae4a47aa948131aa50b0.tar
gsoc2013-empathy-d641d0d903b9847864acae4a47aa948131aa50b0.tar.gz
gsoc2013-empathy-d641d0d903b9847864acae4a47aa948131aa50b0.tar.bz2
gsoc2013-empathy-d641d0d903b9847864acae4a47aa948131aa50b0.tar.lz
gsoc2013-empathy-d641d0d903b9847864acae4a47aa948131aa50b0.tar.xz
gsoc2013-empathy-d641d0d903b9847864acae4a47aa948131aa50b0.tar.zst
gsoc2013-empathy-d641d0d903b9847864acae4a47aa948131aa50b0.zip
Port to gstreamer 1.0
Diffstat (limited to 'src')
-rw-r--r--src/Makefile.am2
-rw-r--r--src/empathy-audio-sink.c19
-rw-r--r--src/empathy-audio-src.c39
-rw-r--r--src/empathy-call-window.c32
-rw-r--r--src/empathy-video-src.c34
5 files changed, 108 insertions, 18 deletions
diff --git a/src/Makefile.am b/src/Makefile.am
index ed3d8d0db..c2cb9f7a0 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -120,7 +120,7 @@ empathy_call_SOURCES = \
empathy-mic-monitor.c \
empathy-mic-monitor.h
-empathy_call_CFLAGS = $(EMPATHY_CALL_CFLAGS)
+empathy_call_CFLAGS = $(EMPATHY_CALL_CFLAGS) -DGST_USE_UNSTABLE_API
empathy_call_LDFLAGS = $(EMPATHY_CALL_LIBS)
empathy_handwritten_source = \
diff --git a/src/empathy-audio-sink.c b/src/empathy-audio-sink.c
index ba5cab77d..94fb95c4d 100644
--- a/src/empathy-audio-sink.c
+++ b/src/empathy-audio-sink.c
@@ -51,8 +51,12 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE(
"sink%d",
GST_PAD_SINK,
GST_PAD_REQUEST,
+#ifdef HAVE_GST1
+ GST_STATIC_CAPS ( "audio/x-raw" )
+#else
GST_STATIC_CAPS ( GST_AUDIO_INT_PAD_TEMPLATE_CAPS " ; "
GST_AUDIO_FLOAT_PAD_TEMPLATE_CAPS)
+#endif
);
enum {
@@ -80,9 +84,16 @@ empathy_audio_sink_init (EmpathyGstAudioSink *self)
g_mutex_init (&self->priv->volume_mutex);
}
+#ifdef HAVE_GST1
+static GstPad * empathy_audio_sink_request_new_pad (GstElement *self,
+ GstPadTemplate *templ,
+ const gchar* name,
+ const GstCaps *caps);
+#else
static GstPad * empathy_audio_sink_request_new_pad (GstElement *self,
GstPadTemplate *templ,
const gchar* name);
+#endif
static void empathy_audio_sink_release_pad (GstElement *self,
GstPad *pad);
@@ -293,10 +304,18 @@ empathy_audio_sink_volume_idle_setup (gpointer user_data)
return FALSE;
}
+#ifdef HAVE_GST1
+static GstPad *
+empathy_audio_sink_request_new_pad (GstElement *element,
+ GstPadTemplate *templ,
+ const gchar* name,
+ const GstCaps *caps)
+#else
static GstPad *
empathy_audio_sink_request_new_pad (GstElement *element,
GstPadTemplate *templ,
const gchar* name)
+#endif
{
EmpathyGstAudioSink *self = EMPATHY_GST_AUDIO_SINK (element);
GstElement *bin, *resample, *audioconvert0, *audioconvert1;
diff --git a/src/empathy-audio-src.c b/src/empathy-audio-src.c
index deff297d0..c47b8dea7 100644
--- a/src/empathy-audio-src.c
+++ b/src/empathy-audio-src.c
@@ -263,8 +263,6 @@ empathy_audio_src_init (EmpathyGstAudioSrc *obj)
{
EmpathyGstAudioSrcPrivate *priv = EMPATHY_GST_AUDIO_SRC_GET_PRIVATE (obj);
GstPad *ghost, *src;
- GstElement *capsfilter;
- GstCaps *caps;
obj->priv = priv;
g_mutex_init (&priv->lock);
@@ -309,21 +307,28 @@ empathy_audio_src_init (EmpathyGstAudioSrc *obj)
gst_bin_add (GST_BIN (obj), priv->src);
- /* Explicitly state what format we want from pulsesrc. This pushes resampling
- * and format conversion as early as possible, lowering the amount of data
- * transferred and thus improving performance. When moving to GStreamer
- * 0.11/1.0, this should change so that we actually request what the encoder
- * wants downstream. */
- caps = gst_caps_new_simple ("audio/x-raw-int",
- "channels", G_TYPE_INT, 1,
- "width", G_TYPE_INT, 16,
- "depth", G_TYPE_INT, 16,
- "rate", G_TYPE_INT, 32000,
- NULL);
- capsfilter = gst_element_factory_make ("capsfilter", NULL);
- g_object_set (G_OBJECT (capsfilter), "caps", caps, NULL);
- gst_bin_add (GST_BIN (obj), capsfilter);
- gst_element_link (priv->src, capsfilter);
+#ifndef HAVE_GST1
+ {
+ GstElement *capsfilter;
+ GstCaps *caps;
+
+ /* Explicitly state what format we want from pulsesrc. This pushes resampling
+ * and format conversion as early as possible, lowering the amount of data
+ * transferred and thus improving performance. When moving to GStreamer
+ * 0.11/1.0, this should change so that we actually request what the encoder
+ * wants downstream. */
+ caps = gst_caps_new_simple ("audio/x-raw-int",
+ "channels", G_TYPE_INT, 1,
+ "width", G_TYPE_INT, 16,
+ "depth", G_TYPE_INT, 16,
+ "rate", G_TYPE_INT, 32000,
+ NULL);
+ capsfilter = gst_element_factory_make ("capsfilter", NULL);
+ g_object_set (G_OBJECT (capsfilter), "caps", caps, NULL);
+ gst_bin_add (GST_BIN (obj), capsfilter);
+ gst_element_link (priv->src, capsfilter);
+ }
+#endif
priv->volume_element = gst_element_factory_make ("volume", NULL);
gst_bin_add (GST_BIN (obj), priv->volume_element);
diff --git a/src/empathy-call-window.c b/src/empathy-call-window.c
index fca76c3f7..749b4c6f3 100644
--- a/src/empathy-call-window.c
+++ b/src/empathy-call-window.c
@@ -3255,6 +3255,27 @@ empathy_call_window_check_video_cb (gpointer data)
}
/* Called from the streaming thread */
+#ifdef HAVE_GST1
+static GstPadProbeReturn
+empathy_call_window_video_probe_cb (GstPad *pad,
+ GstPadProbeInfo *info,
+ gpointer user_data)
+{
+ EmpathyCallWindow *self = user_data;
+
+ if (G_UNLIKELY (!self->priv->got_video))
+ {
+ /* show the remote video */
+ g_idle_add_full (G_PRIORITY_DEFAULT_IDLE,
+ empathy_call_window_show_video_output_cb,
+ g_object_ref (self), g_object_unref);
+
+ self->priv->got_video = TRUE;
+ }
+
+ return GST_PAD_PROBE_OK;
+}
+#else
static gboolean
empathy_call_window_video_probe_cb (GstPad *pad,
GstMiniObject *mini_obj,
@@ -3276,6 +3297,7 @@ empathy_call_window_video_probe_cb (GstPad *pad,
return TRUE;
}
+#endif
/* Called from the streaming thread */
static gboolean
@@ -3302,8 +3324,14 @@ empathy_call_window_src_added_cb (EmpathyCallHandler *handler,
g_idle_add (empathy_call_window_show_video_output_cb, self);
pad = empathy_call_window_get_video_sink_pad (self);
+#ifdef HAVE_GST1
+ gst_pad_add_probe (src,
+ GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BUFFER_LIST,
+ empathy_call_window_video_probe_cb, self, NULL);
+#else
gst_pad_add_data_probe (src,
G_CALLBACK (empathy_call_window_video_probe_cb), self);
+#endif
if (priv->got_video_src > 0)
g_source_remove (priv->got_video_src);
priv->got_video_src = g_timeout_add_seconds (1,
@@ -3482,7 +3510,11 @@ empathy_call_window_content_added_cb (EmpathyCallHandler *handler,
case FS_MEDIA_TYPE_VIDEO:
if (priv->video_tee != NULL)
{
+#ifdef HAVE_GST1
+ pad = gst_element_get_request_pad (priv->video_tee, "src_%u");
+#else
pad = gst_element_get_request_pad (priv->video_tee, "src%d");
+#endif
if (GST_PAD_LINK_FAILED (gst_pad_link (pad, sink)))
{
g_warning ("Could not link video source input pipeline");
diff --git a/src/empathy-video-src.c b/src/empathy-video-src.c
index 8c9c75015..2fc483567 100644
--- a/src/empathy-video-src.c
+++ b/src/empathy-video-src.c
@@ -23,7 +23,11 @@
#include <stdio.h>
#include <stdlib.h>
+#ifdef HAVE_GST1
+#include <gst/video/colorbalance.h>
+#else
#include <gst/interfaces/colorbalance.h>
+#endif
#define DEBUG_FLAG EMPATHY_DEBUG_VOIP
#include <libempathy/empathy-debug.h>
@@ -112,11 +116,24 @@ error:
return NULL;
}
+#ifdef HAVE_GST1
+static GstPadProbeReturn
+empathy_video_src_drop_eos (GstPad *pad,
+ GstPadProbeInfo *info,
+ gpointer user_data)
+{
+ if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) == GST_EVENT_EOS)
+ return GST_PAD_PROBE_DROP;
+
+ return GST_PAD_PROBE_OK;
+}
+#else
static gboolean
empathy_video_src_drop_eos (GstPad *pad, GstEvent *event, gpointer user_data)
{
return GST_EVENT_TYPE (event) != GST_EVENT_EOS;
}
+#endif
static void
empathy_video_src_init (EmpathyGstVideoSrc *obj)
@@ -128,7 +145,11 @@ empathy_video_src_init (EmpathyGstVideoSrc *obj)
gchar *str;
/* allocate caps here, so we can update it by optional elements */
+#ifdef HAVE_GST1
+ caps = gst_caps_new_simple ("video/x-raw",
+#else
caps = gst_caps_new_simple ("video/x-raw-yuv",
+#endif
"width", G_TYPE_INT, 320,
"height", G_TYPE_INT, 240,
NULL);
@@ -144,7 +165,14 @@ empathy_video_src_init (EmpathyGstVideoSrc *obj)
/* Drop EOS events, so that our sinks don't get confused when we restart the
* source (triggering an EOS) */
src = gst_element_get_static_pad (element, "src");
+
+#ifdef HAVE_GST1
+ gst_pad_add_probe (src, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
+ empathy_video_src_drop_eos, NULL, NULL);
+#else
gst_pad_add_event_probe (src, G_CALLBACK (empathy_video_src_drop_eos), NULL);
+#endif
+
gst_object_unref (src);
/* videorate with the required properties optional as it needs a currently
@@ -178,9 +206,15 @@ empathy_video_src_init (EmpathyGstVideoSrc *obj)
DEBUG ("Current video src caps are : %s", str);
g_free (str);
+#ifdef HAVE_GST1
+ if ((element = empathy_gst_add_to_bin (GST_BIN (obj),
+ element, "videoconvert")) == NULL)
+ g_error ("Failed to add \"videoconvert\" (gst-plugins-base missing?)");
+#else
if ((element = empathy_gst_add_to_bin (GST_BIN (obj),
element, "ffmpegcolorspace")) == NULL)
g_error ("Failed to add \"ffmpegcolorspace\" (gst-plugins-base missing?)");
+#endif
if ((element = empathy_gst_add_to_bin (GST_BIN (obj),
element, "videoscale")) == NULL)