diff --git a/configure.ac b/configure.ac index c79fbeef..e58133e7 100644 --- a/configure.ac +++ b/configure.ac @@ -82,7 +82,8 @@ if test "x$enable_debug" = xyes; then fi AM_CONDITIONAL(OWR_DEBUG, test x$enable_debug = xyes) -dnl build debug or not +dnl build tests or not +have_json_glib=no AC_MSG_CHECKING([whether to build tests or not]) AC_ARG_ENABLE(tests, AC_HELP_STRING( @@ -95,12 +96,13 @@ AC_HELP_STRING( esac],[enable_tests=yes]) AC_MSG_RESULT([$enable_tests]) if test "x$enable_tests" = xyes; then - PKG_CHECK_MODULES(JSON_GLIB, [json-glib-1.0]) + PKG_CHECK_MODULES(JSON_GLIB, [json-glib-1.0], [$have_json_glib=yes], [$have_json_glib=no]) PKG_CHECK_MODULES(LIBSOUP, [libsoup-2.4]) AC_DEFINE(OWR_TESTS, 1, [Define if building tests]) fi AM_CONDITIONAL(OWR_TESTS, test x$enable_tests = xyes) +AM_CONDITIONAL(HAVE_JSON_GLIB, test x$have_json_glib = xyes) dnl build static or not AC_MSG_CHECKING([whether to build static owr or not]) diff --git a/local/owr_device_list.c b/local/owr_device_list.c index 7991cf0b..2c15e2f6 100644 --- a/local/owr_device_list.c +++ b/local/owr_device_list.c @@ -131,7 +131,7 @@ static gboolean enumerate_audio_source_devices(GClosure *callback) source = _owr_local_media_source_new_cached(-1, "Default audio input", OWR_MEDIA_TYPE_AUDIO, OWR_SOURCE_TYPE_CAPTURE, - NULL); + NULL, OWR_MEDIA_SOURCE_SUPPORTS_NONE); sources = g_list_prepend(sources, source); _owr_utils_call_closure_with_list(callback, sources); g_list_free_full(sources, g_object_unref); @@ -177,7 +177,8 @@ static gboolean enumerate_source_devices(OwrMediaType type, GClosure *callback) source = _owr_local_media_source_new_cached(-1, name, type, OWR_SOURCE_TYPE_CAPTURE, - device); + device, + OWR_MEDIA_SOURCE_SUPPORTS_NONE); g_free(name); sources = g_list_prepend(sources, source); } @@ -486,11 +487,13 @@ static gboolean enumerate_video_source_devices(GClosure *callback) if (facing == CameraInfo.CAMERA_FACING_FRONT) { source = _owr_local_media_source_new_cached(i, "Front facing Camera", - OWR_MEDIA_TYPE_VIDEO, OWR_SOURCE_TYPE_CAPTURE, NULL); + OWR_MEDIA_TYPE_VIDEO, OWR_SOURCE_TYPE_CAPTURE, NULL, + OWR_MEDIA_SOURCE_SUPPORTS_NONE); sources = g_list_prepend(sources, source); } else if (facing == CameraInfo.CAMERA_FACING_BACK) { source = _owr_local_media_source_new_cached(i, "Back facing Camera", - OWR_MEDIA_TYPE_VIDEO, OWR_SOURCE_TYPE_CAPTURE, NULL); + OWR_MEDIA_TYPE_VIDEO, OWR_SOURCE_TYPE_CAPTURE, NULL, + OWR_MEDIA_SOURCE_SUPPORTS_NONE); sources = g_list_append(sources, source); } diff --git a/local/owr_local.c b/local/owr_local.c index d0581d56..2decef4d 100644 --- a/local/owr_local.c +++ b/local/owr_local.c @@ -65,14 +65,17 @@ static GList *get_test_sources(OwrMediaType types) OwrMediaType media_type; GList *result_list = NULL; GList *elem; + gboolean useh264 = g_ascii_strcasecmp (g_getenv("OWR_USE_TEST_SOURCES"),"H264") == 0; if (g_once_init_enter(&cached_sources)) { GList *sources = NULL; - source = _owr_local_media_source_new_cached(-1, "Audio test source", OWR_MEDIA_TYPE_AUDIO, OWR_SOURCE_TYPE_TEST, NULL); + source = _owr_local_media_source_new_cached(-1, "Audio test source", OWR_MEDIA_TYPE_AUDIO, OWR_SOURCE_TYPE_TEST, NULL, OWR_MEDIA_SOURCE_SUPPORTS_NONE); sources = g_list_append(sources, OWR_MEDIA_SOURCE(source)); - source = _owr_local_media_source_new_cached(-1, "Video test source", OWR_MEDIA_TYPE_VIDEO, OWR_SOURCE_TYPE_TEST, NULL); + source = _owr_local_media_source_new_cached(-1, "Video test source", OWR_MEDIA_TYPE_VIDEO, OWR_SOURCE_TYPE_TEST, NULL, OWR_MEDIA_SOURCE_SUPPORTS_NONE); + if (useh264) + _owr_media_source_set_codec(OWR_MEDIA_SOURCE(source), OWR_CODEC_TYPE_H264); sources = g_list_append(sources, OWR_MEDIA_SOURCE(source)); g_once_init_leave(&cached_sources, sources); diff --git a/local/owr_local_media_source.c b/local/owr_local_media_source.c index abbdbd7f..418142b0 100644 --- a/local/owr_local_media_source.c +++ b/local/owr_local_media_source.c @@ -571,7 +571,9 @@ static void on_caps(GstElement *source, GParamSpec *pspec, OwrMediaSource *media if (GST_IS_CAPS(caps)) { GST_INFO_OBJECT(source, "%s - configured with caps: %" GST_PTR_FORMAT, media_source_name, caps); + gst_caps_unref(caps); } + g_free(media_source_name); } static void @@ -773,8 +775,14 @@ static GstElement *owr_local_media_source_request_source(OwrMediaSource *media_s } break; case OWR_SOURCE_TYPE_TEST: { - GstElement *src, *time; + GstElement *src, *time, *h264enc = NULL; GstPad *srcpad; + gboolean useh264 = g_ascii_strcasecmp (g_getenv("OWR_USE_TEST_SOURCES"),"H264") == 0; + + if (useh264) + printf("video-source encoding: video/x-h264\n"); + else + printf("video-source encoding: video/x-raw\n"); source = gst_bin_new("video-source"); @@ -787,9 +795,24 @@ static GstElement *owr_local_media_source_request_source(OwrMediaSource *media_s g_object_set(time, "font-desc", "Sans 60", NULL); gst_bin_add(GST_BIN(source), time); gst_element_link(src, time); - srcpad = gst_element_get_static_pad(time, "src"); - } else - srcpad = gst_element_get_static_pad(src, "src"); + if (!useh264) + srcpad = gst_element_get_static_pad(time, "src"); + } else if (!useh264) + srcpad = gst_element_get_static_pad(src, "src"); + + if (useh264) { + h264enc = gst_element_factory_make("openh264enc", "openh264enc"); + if (!h264enc) { + GST_ERROR_OBJECT(source, "Failed to create openh264enc element!"); + printf("Failed to create openh264enc element!\n"); + } + gst_bin_add(GST_BIN(source), h264enc); + if (time) + gst_element_link(time, h264enc); + else + gst_element_link(src, h264enc); + srcpad = gst_element_get_static_pad(h264enc, "src"); + } gst_element_add_pad(source, gst_ghost_pad_new("src", srcpad)); gst_object_unref(srcpad); @@ -924,7 +947,8 @@ static GstElement *owr_local_media_source_request_source(OwrMediaSource *media_s } static OwrLocalMediaSource *_owr_local_media_source_new(gint device_index, const gchar *name, - OwrMediaType media_type, OwrSourceType source_type, GstDevice *device) + OwrMediaType media_type, OwrSourceType source_type, GstDevice *device, + OwrMediaSourceSupportedInterfaces interfaces) { OwrLocalMediaSource *source; @@ -936,12 +960,14 @@ static OwrLocalMediaSource *_owr_local_media_source_new(gint device_index, const source->priv->device = device; _owr_media_source_set_type(OWR_MEDIA_SOURCE(source), source_type); + _owr_media_source_set_supported_interfaces(OWR_MEDIA_SOURCE(source), interfaces); return source; } OwrLocalMediaSource *_owr_local_media_source_new_cached(gint device_index, const gchar *name, - OwrMediaType media_type, OwrSourceType source_type, GstDevice *device) + OwrMediaType media_type, OwrSourceType source_type, GstDevice *device, + OwrMediaSourceSupportedInterfaces interfaces) { static OwrLocalMediaSource *test_sources[2] = { NULL, }; static GHashTable *sources[2] = { NULL, }; @@ -962,7 +988,7 @@ OwrLocalMediaSource *_owr_local_media_source_new_cached(gint device_index, const if (source_type == OWR_SOURCE_TYPE_TEST) { if (!test_sources[i]) - test_sources[i] = _owr_local_media_source_new(device_index, name, media_type, source_type, device); + test_sources[i] = _owr_local_media_source_new(device_index, name, media_type, source_type, device, interfaces); ret = test_sources[i]; @@ -982,7 +1008,7 @@ OwrLocalMediaSource *_owr_local_media_source_new_cached(gint device_index, const } if (!ret) { - ret = _owr_local_media_source_new(device_index, name, media_type, source_type, device); + ret = _owr_local_media_source_new(device_index, name, media_type, source_type, device, interfaces); g_hash_table_insert(sources[i], GINT_TO_POINTER(device_index), ret); } diff --git a/local/owr_local_media_source_private.h b/local/owr_local_media_source_private.h index 81ccc1ab..13e3ee84 100644 --- a/local/owr_local_media_source_private.h +++ b/local/owr_local_media_source_private.h @@ -33,6 +33,7 @@ #define __OWR_LOCAL_MEDIA_SOURCE_PRIVATE_H__ #include "owr_local_media_source.h" +#include "owr_media_source_private.h" #include "owr_types.h" @@ -44,7 +45,8 @@ G_BEGIN_DECLS OwrLocalMediaSource *_owr_local_media_source_new_cached(gint device_index, - const gchar *name, OwrMediaType media_type, OwrSourceType source_type, GstDevice *device); + const gchar *name, OwrMediaType media_type, OwrSourceType source_type, GstDevice *device, + OwrMediaSourceSupportedInterfaces interfaces); void _owr_local_media_source_set_capture_device_index(OwrLocalMediaSource *source, guint index); G_END_DECLS diff --git a/local/owr_media_renderer.c b/local/owr_media_renderer.c index 5d688e8d..e62ee001 100644 --- a/local/owr_media_renderer.c +++ b/local/owr_media_renderer.c @@ -58,6 +58,7 @@ enum { PROP_0, PROP_MEDIA_TYPE, PROP_DISABLED, + PROP_SOURCE, N_PROPERTIES }; @@ -130,6 +131,10 @@ static void owr_media_renderer_class_init(OwrMediaRendererClass *klass) "Whether this renderer is disabled or not", DEFAULT_DISABLED, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + obj_properties[PROP_SOURCE] = g_param_spec_object("source", "Source", + "Current Media Source being rendered", OWR_TYPE_MEDIA_SOURCE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + gobject_class->set_property = owr_media_renderer_set_property; gobject_class->get_property = owr_media_renderer_get_property; @@ -263,6 +268,10 @@ static void owr_media_renderer_set_property(GObject *object, guint property_id, priv->disabled = g_value_get_boolean(value); break; + case PROP_SOURCE: + priv->source = g_value_get_object(value); + break; + default: G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); break; @@ -286,6 +295,10 @@ static void owr_media_renderer_get_property(GObject *object, guint property_id, g_value_set_boolean(value, priv->disabled); break; + case PROP_SOURCE: + g_value_set_object(value, priv->source); + break; + default: G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); break; @@ -305,6 +318,7 @@ static void on_caps(GstElement *sink, GParamSpec *pspec, OwrMediaRenderer *media media_renderer->priv->media_type == OWR_MEDIA_TYPE_AUDIO ? "Audio" : media_renderer->priv->media_type == OWR_MEDIA_TYPE_VIDEO ? "Video" : "Unknown", caps); + gst_caps_unref(caps); } } @@ -388,6 +402,7 @@ static gboolean set_source(GHashTable *args) } priv->source = g_object_ref(source); + g_object_notify_by_pspec(G_OBJECT(renderer), obj_properties[PROP_SOURCE]); maybe_start_renderer(renderer); @@ -469,6 +484,11 @@ void _owr_media_renderer_set_sink(OwrMediaRenderer *renderer, gpointer sink_ptr) g_mutex_unlock(&priv->media_renderer_lock); } +OwrMediaSource* _owr_media_renderer_get_source(OwrMediaRenderer *renderer) +{ + return renderer->priv->source; +} + gchar * owr_media_renderer_get_dot_data(OwrMediaRenderer *renderer) { g_return_val_if_fail(OWR_IS_MEDIA_RENDERER(renderer), NULL); diff --git a/local/owr_media_renderer.h b/local/owr_media_renderer.h index bb29a214..a67878a2 100644 --- a/local/owr_media_renderer.h +++ b/local/owr_media_renderer.h @@ -63,6 +63,7 @@ struct _OwrMediaRendererClass { /*< private >*/ void *(*get_caps)(OwrMediaRenderer *renderer); void *(*get_sink)(OwrMediaRenderer *renderer); + }; GType owr_media_renderer_get_type(void) G_GNUC_CONST; diff --git a/local/owr_media_renderer_private.h b/local/owr_media_renderer_private.h index dc99e22a..045491c7 100644 --- a/local/owr_media_renderer_private.h +++ b/local/owr_media_renderer_private.h @@ -39,6 +39,8 @@ G_BEGIN_DECLS void _owr_media_renderer_set_sink(OwrMediaRenderer *renderer, gpointer sink); GstPipeline * _owr_media_renderer_get_pipeline(OwrMediaRenderer *renderer); +OwrMediaSource* _owr_media_renderer_get_source(OwrMediaRenderer *renderer); +void _owr_media_renderer_reconfigure_element(OwrMediaRenderer *renderer); G_END_DECLS diff --git a/local/owr_video_renderer.c b/local/owr_video_renderer.c index 3e4582f4..a6f0fcc0 100644 --- a/local/owr_video_renderer.c +++ b/local/owr_video_renderer.c @@ -35,12 +35,14 @@ #include "owr_video_renderer.h" #include "owr_media_renderer_private.h" +#include "owr_media_source_private.h" #include "owr_private.h" #include "owr_utils.h" #include "owr_video_renderer_private.h" #include "owr_window_registry.h" #include "owr_window_registry_private.h" +#include #include #include @@ -86,9 +88,12 @@ static void owr_video_renderer_get_property(GObject *object, guint property_id, GValue *value, GParamSpec *pspec); static void owr_video_renderer_constructed(GObject *object); -static GstElement *owr_video_renderer_get_element(OwrMediaRenderer *renderer, guintptr window_handle); +static GstElement *owr_video_renderer_get_element(OwrMediaRenderer *renderer); +static void owr_video_renderer_reconfigure_element(OwrMediaRenderer *renderer); +static GstElement *owr_video_renderer_get_element_with_window_handle(OwrMediaRenderer *renderer, guintptr window_handle); static GstCaps *owr_video_renderer_get_caps(OwrMediaRenderer *renderer); static GstElement *owr_video_renderer_get_sink(OwrMediaRenderer *renderer); +static void _owr_video_renderer_notify_source_changed(OwrMediaRenderer *renderer, GParamSpec *pspec, gpointer user_data); struct _OwrVideoRendererPrivate { guint width; @@ -99,6 +104,7 @@ struct _OwrVideoRendererPrivate { gchar *tag; GMutex closure_mutex; GClosure *request_context; + GstElement *renderer_bin; }; static void owr_video_renderer_finalize(GObject *object) @@ -119,6 +125,11 @@ static void owr_video_renderer_finalize(GObject *object) priv->request_context = NULL; } + if (priv->renderer_bin) { + gst_object_unref(priv->renderer_bin); + priv->renderer_bin = NULL; + } + G_OBJECT_CLASS(owr_video_renderer_parent_class)->finalize(object); } @@ -162,6 +173,7 @@ static void owr_video_renderer_class_init(OwrVideoRendererClass *klass) media_renderer_class->get_caps = (void *(*)(OwrMediaRenderer *))owr_video_renderer_get_caps; media_renderer_class->get_sink = (void *(*)(OwrMediaRenderer *))owr_video_renderer_get_sink; + g_object_class_install_properties(gobject_class, N_PROPERTIES, obj_properties); } @@ -178,6 +190,9 @@ static void owr_video_renderer_init(OwrVideoRenderer *renderer) priv->mirror = DEFAULT_MIRROR; g_mutex_init(&priv->closure_mutex); priv->request_context = NULL; + priv->renderer_bin = NULL; + + g_signal_connect(renderer, "notify::source", G_CALLBACK(_owr_video_renderer_notify_source_changed), NULL); } static void owr_video_renderer_set_property(GObject *object, guint property_id, @@ -269,31 +284,67 @@ OwrVideoRenderer *owr_video_renderer_new(const gchar *tag) if (!gst_element_link(a, b)) \ GST_ERROR("Failed to link " #a " -> " #b); -static void renderer_disabled(OwrMediaRenderer *renderer, GParamSpec *pspec, GstElement *balance) +static void renderer_disabled(OwrMediaRenderer *renderer, G_GNUC_UNUSED GParamSpec *pspec, GstElement *balance) { + // FIXME: We need to be able to disable rendering without a + // balance element. This is highly inneficient. gboolean disabled = FALSE; + GstColorBalance* color_balance = NULL; g_return_if_fail(OWR_IS_MEDIA_RENDERER(renderer)); - g_return_if_fail(G_IS_PARAM_SPEC(pspec) || !pspec); - g_return_if_fail(GST_IS_ELEMENT(balance)); + + if (GST_IS_COLOR_BALANCE(balance)) { + color_balance = GST_COLOR_BALANCE(gst_object_ref(balance)); + } else { + OwrMediaSource* media_source = _owr_media_renderer_get_source(renderer); + GstElement* src_bin = _owr_media_source_get_source_bin(media_source); + balance = gst_bin_get_by_interface(GST_BIN(src_bin), GST_TYPE_COLOR_BALANCE); + gst_object_unref(src_bin); + g_return_if_fail(GST_IS_COLOR_BALANCE(balance)); + color_balance = GST_COLOR_BALANCE(balance); + } g_object_get(renderer, "disabled", &disabled, NULL); - g_object_set(balance, "saturation", (gdouble)!disabled, "brightness", (gdouble)-disabled, NULL); + + const GList* controls = gst_color_balance_list_channels(color_balance); + gint index = 0; + const GList* item; + for (item = controls; item != NULL; item = item->next, ++index) { + GstColorBalanceChannel* channel = item->data; + if (g_strcmp0(channel->label, "SATURATION") == 0 || g_strcmp0(channel->label, "BRIGHTNESS") == 0) { + gint new_value = disabled ? channel->min_value : ((channel->min_value + channel->max_value) / 2); + gst_color_balance_set_value(color_balance, channel, new_value); + } + } + + gst_object_unref(color_balance); } static void update_flip_method(OwrMediaRenderer *renderer, GParamSpec *pspec, GstElement *flip) { - guint rotation = 0; - gboolean mirror = FALSE; - gint flip_method; + g_assert(OWR_IS_MEDIA_RENDERER(renderer)); - g_return_if_fail(OWR_IS_MEDIA_RENDERER(renderer)); - g_return_if_fail(G_IS_PARAM_SPEC(pspec) || !pspec); - g_return_if_fail(GST_IS_ELEMENT(flip)); + if (!flip) { + OwrMediaSource* source = _owr_media_renderer_get_source(renderer); + + if (_owr_media_source_supports_interfaces(source, OWR_MEDIA_SOURCE_SUPPORTS_VIDEO_ORIENTATION)) { + GstElement* bin = _owr_media_source_get_source_bin(source); - g_object_get(renderer, "rotation", &rotation, "mirror", &mirror, NULL); - flip_method = _owr_rotation_and_mirror_to_video_flip_method(rotation, mirror); - g_object_set(flip, "method", flip_method, NULL); + flip = gst_bin_get_by_name(GST_BIN(bin), "video-source"); + g_assert(flip); + + pspec = g_object_class_find_property(G_OBJECT_GET_CLASS(flip), "orientation"); + + // For simplicity, and considering that we already assume + // with the parameter that the object is alive, we can + // safely unref here. + gst_object_unref(flip); + gst_object_unref(bin); + } + } + + if (flip) + _owr_update_flip_method(G_OBJECT(renderer), pspec, flip); } static void disable_last_sample_on_sink(const GValue *item, gpointer data) @@ -309,40 +360,89 @@ static void disable_last_sample_on_sink(const GValue *item, gpointer data) g_object_set(element, "enable-last-sample", FALSE, NULL); } -static GstElement *owr_video_renderer_get_element(OwrMediaRenderer *renderer, guintptr window_handle) +static GstElement *owr_video_renderer_get_element(OwrMediaRenderer *renderer) { OwrVideoRenderer *video_renderer; OwrVideoRendererPrivate *priv; - GstElement *renderer_bin; - GstElement *upload, *convert, *balance, *flip, *sink; - GstPad *ghostpad, *sinkpad; gchar *bin_name; - GValue value = G_VALUE_INIT; - g_assert(renderer); + g_assert(OWR_IS_VIDEO_RENDERER(renderer)); video_renderer = OWR_VIDEO_RENDERER(renderer); priv = video_renderer->priv; bin_name = g_strdup_printf("video-renderer-bin-%u", g_atomic_int_add(&unique_bin_id, 1)); - renderer_bin = gst_bin_new(bin_name); + priv->renderer_bin = gst_bin_new(bin_name); g_free(bin_name); + return GST_ELEMENT(gst_object_ref(priv->renderer_bin)); +} + +static void owr_video_renderer_reconfigure_element(OwrMediaRenderer *renderer) +{ + OwrVideoRenderer *video_renderer; + OwrVideoRendererPrivate *priv; + GstElement *parser = NULL; + GstElement *decoder = NULL; + GstElement *balance = NULL; + GstElement *upload, *sink, *flip = NULL; + GstPad *ghostpad, *sinkpad; + GValue value = G_VALUE_INIT; + OwrMediaSource *source; + OwrCodecType codec_type; + gboolean link_ok = TRUE; + GstElement *first = NULL; + + g_assert(OWR_IS_VIDEO_RENDERER(renderer)); + video_renderer = OWR_VIDEO_RENDERER(renderer); + priv = video_renderer->priv; + + source = _owr_media_renderer_get_source(renderer); + codec_type = _owr_media_source_get_codec(source); + + if (!_owr_codec_type_is_raw(codec_type)) { + parser = _owr_create_parser(codec_type); + decoder = _owr_create_decoder(codec_type); + if (parser) + gst_bin_add(GST_BIN(priv->renderer_bin), parser); + if (decoder) + gst_bin_add(GST_BIN(priv->renderer_bin), decoder); + } + upload = gst_element_factory_make("glupload", "video-renderer-upload"); - convert = gst_element_factory_make("glcolorconvert", "video-renderer-convert"); + gst_bin_add(GST_BIN(priv->renderer_bin), upload); - balance = gst_element_factory_make("glcolorbalance", "video-renderer-balance"); - g_signal_connect_object(renderer, "notify::disabled", G_CALLBACK(renderer_disabled), - balance, 0); - renderer_disabled(renderer, NULL, balance); + if (!_owr_media_source_supports_interfaces(source, OWR_MEDIA_SOURCE_SUPPORTS_COLOR_BALANCE)) { + GstElement *convert = NULL; - flip = gst_element_factory_make("glvideoflip", "video-renderer-flip"); - if (!flip) { - g_warning("The glvideoflip GStreamer element isn't available. Video mirroring and rotation functionalities are thus disabled."); - } else { - g_signal_connect_object(renderer, "notify::rotation", G_CALLBACK(update_flip_method), flip, 0); - g_signal_connect_object(renderer, "notify::mirror", G_CALLBACK(update_flip_method), flip, 0); - update_flip_method(renderer, NULL, flip); + balance = gst_element_factory_make("glcolorbalance", "video-renderer-balance"); + + if (G_LIKELY(balance)) { + convert = gst_element_factory_make("glcolorconvert", "video-renderer-convert"); + + if (G_LIKELY(convert)) { + renderer_disabled(renderer, NULL, balance); + gst_bin_add_many(GST_BIN(priv->renderer_bin), convert, balance, NULL); + } else + g_object_unref(balance); + } + + if (!convert || !balance) + g_warning("cannot create convert or balance elements to disable rendering"); + } + g_signal_connect_object(renderer, "notify::disabled", G_CALLBACK(renderer_disabled), balance, 0); + + if (!_owr_media_source_supports_interfaces(source, OWR_MEDIA_SOURCE_SUPPORTS_VIDEO_ORIENTATION)) { + flip = gst_element_factory_make("glvideoflip", "video-renderer-flip"); + if (G_LIKELY(flip)) { + _owr_update_flip_method(G_OBJECT(renderer), NULL, flip); + gst_bin_add(GST_BIN(priv->renderer_bin), flip); + } else + g_warning("the glvideoflip element isn't available, video rotation support is now disabled"); } + g_signal_connect_object(renderer, "notify::rotation", G_CALLBACK(update_flip_method), flip, 0); + g_signal_connect_object(renderer, "notify::mirror", G_CALLBACK(update_flip_method), flip, 0); + + g_object_unref(source); sink = OWR_MEDIA_RENDERER_GET_CLASS(renderer)->get_sink(renderer); g_assert(sink); @@ -352,7 +452,31 @@ static GstElement *owr_video_renderer_get_element(OwrMediaRenderer *renderer, gu disable_last_sample_on_sink(&value, NULL); g_value_unset(&value); - if (priv->tag) { + gst_bin_add(GST_BIN(priv->renderer_bin), sink); + + _owr_bin_link_and_sync_elements(GST_BIN(priv->renderer_bin), &link_ok, NULL, &first, NULL); + g_warn_if_fail(link_ok); + sinkpad = gst_element_get_static_pad(first, "sink"); + + g_assert(sinkpad); + ghostpad = gst_ghost_pad_new("sink", sinkpad); + gst_pad_set_active(ghostpad, TRUE); + gst_element_add_pad(priv->renderer_bin, ghostpad); + gst_object_unref(sinkpad); +} + +static GstElement *owr_video_renderer_get_element_with_window_handle(OwrMediaRenderer *renderer, guintptr window_handle) +{ + GstElement *renderer_bin, *sink; + + g_assert(OWR_IS_VIDEO_RENDERER(renderer)); + + renderer_bin = owr_video_renderer_get_element(renderer); + owr_video_renderer_reconfigure_element(renderer); + + sink = OWR_MEDIA_RENDERER_GET_CLASS(renderer)->get_sink(renderer); + g_assert(sink); + if (OWR_VIDEO_RENDERER(renderer)->priv->tag) { GstElement *sink_element = GST_IS_BIN(sink) ? gst_bin_get_by_interface(GST_BIN(sink), GST_TYPE_VIDEO_OVERLAY) : sink; if (GST_IS_ELEMENT(sink_element) && GST_IS_VIDEO_OVERLAY(sink)) @@ -363,26 +487,6 @@ static GstElement *owr_video_renderer_get_element(OwrMediaRenderer *renderer, gu g_object_unref(sink_element); } - gst_bin_add_many(GST_BIN(renderer_bin), upload, convert, balance, sink, NULL); - - LINK_ELEMENTS(upload, convert); - LINK_ELEMENTS(convert, balance); - - if (flip) { - gst_bin_add(GST_BIN(renderer_bin), flip); - LINK_ELEMENTS(balance, flip); - LINK_ELEMENTS(flip, sink); - } else { - LINK_ELEMENTS(balance, sink); - } - - sinkpad = gst_element_get_static_pad(upload, "sink"); - g_assert(sinkpad); - ghostpad = gst_ghost_pad_new("sink", sinkpad); - gst_pad_set_active(ghostpad, TRUE); - gst_element_add_pad(renderer_bin, ghostpad); - gst_object_unref(sinkpad); - return renderer_bin; } @@ -462,7 +566,7 @@ static void owr_video_renderer_constructed(GObject *object) /* If we have no tag, just directly create the sink */ if (!priv->tag) - _owr_media_renderer_set_sink(OWR_MEDIA_RENDERER(video_renderer), owr_video_renderer_get_element(OWR_MEDIA_RENDERER(video_renderer), 0)); + _owr_media_renderer_set_sink(OWR_MEDIA_RENDERER(video_renderer), owr_video_renderer_get_element(OWR_MEDIA_RENDERER(video_renderer))); pipeline = _owr_media_renderer_get_pipeline(OWR_MEDIA_RENDERER(video_renderer)); g_assert(pipeline); @@ -479,6 +583,7 @@ static GstCaps *owr_video_renderer_get_caps(OwrMediaRenderer *renderer) GstCaps *caps; guint width = 0, height = 0; gdouble max_framerate = 0.0; + OwrMediaSource *source; g_object_get(OWR_VIDEO_RENDERER(renderer), "width", &width, @@ -486,7 +591,9 @@ static GstCaps *owr_video_renderer_get_caps(OwrMediaRenderer *renderer) "max-framerate", &max_framerate, NULL); - caps = gst_caps_new_empty_simple("video/x-raw"); + source = _owr_media_renderer_get_source(renderer); + caps = gst_caps_new_empty_simple(_owr_codec_type_to_caps_mime(_owr_media_source_get_media_type(source), + _owr_media_source_get_codec(source))); gst_caps_set_features(caps, 0, gst_caps_features_new_any()); if (width > 0) gst_caps_set_simple(caps, "width", G_TYPE_INT, width, NULL); @@ -508,6 +615,13 @@ static GstElement *owr_video_renderer_get_sink(OwrMediaRenderer *renderer) return gst_element_factory_make(VIDEO_SINK, "video-renderer-sink"); } +static void _owr_video_renderer_notify_source_changed(OwrMediaRenderer *renderer, GParamSpec *pspec, gpointer user_data) +{ + OWR_UNUSED(pspec); + OWR_UNUSED(user_data); + owr_video_renderer_reconfigure_element(renderer); +} + void _owr_video_renderer_notify_tag_changed(OwrVideoRenderer *video_renderer, const gchar *tag, gboolean have_handle, guintptr new_handle) { OwrVideoRendererPrivate *priv; @@ -521,6 +635,6 @@ void _owr_video_renderer_notify_tag_changed(OwrVideoRenderer *video_renderer, co _owr_media_renderer_set_sink(OWR_MEDIA_RENDERER(video_renderer), NULL); if (have_handle) { _owr_media_renderer_set_sink(OWR_MEDIA_RENDERER(video_renderer), - owr_video_renderer_get_element(OWR_MEDIA_RENDERER(video_renderer), new_handle)); + owr_video_renderer_get_element_with_window_handle(OWR_MEDIA_RENDERER(video_renderer), new_handle)); } } diff --git a/owr/owr.c b/owr/owr.c index 406e78c5..73ec0f9f 100644 --- a/owr/owr.c +++ b/owr/owr.c @@ -198,6 +198,8 @@ static void gst_log_android_handler(GstDebugCategory *category, */ void owr_init(GMainContext *main_context) { + static GOnce g_once = G_ONCE_INIT; + g_return_if_fail(!owr_initialized); #ifdef __ANDROID__ @@ -320,6 +322,8 @@ void owr_init(GMainContext *main_context) owr_main_context = g_main_context_ref_thread_default(); else g_main_context_ref(owr_main_context); + + g_once(&g_once, _owr_detect_codecs, NULL); } static gboolean owr_running_callback(GAsyncQueue *msg_queue) diff --git a/owr/owr_media_source.c b/owr/owr_media_source.c index d7a59d95..8efb3dc1 100644 --- a/owr/owr_media_source.c +++ b/owr/owr_media_source.c @@ -121,6 +121,8 @@ struct _OwrMediaSourcePrivate { GstElement *source_bin; /* Tee element from which we can tap the source for multiple consumers */ GstElement *source_tee; + + OwrMediaSourceSupportedInterfaces supported_interfaces; }; static void owr_media_source_set_property(GObject *object, guint property_id, @@ -197,6 +199,7 @@ static void owr_media_source_init(OwrMediaSource *source) priv->source_bin = NULL; priv->source_tee = NULL; + priv->supported_interfaces = OWR_MEDIA_SOURCE_SUPPORTS_NONE; g_mutex_init(&source->lock); } @@ -263,7 +266,7 @@ static GstElement *owr_media_source_request_source_default(OwrMediaSource *media { OwrMediaType media_type; GstElement *source_pipeline, *tee; - GstElement *source_bin, *source = NULL, *queue_pre, *queue_post; + GstElement *source_bin, *source = NULL, *queue_pre = NULL, *queue_post = NULL; GstElement *capsfilter; GstElement *sink, *sink_queue, *sink_bin; GstPad *bin_pad = NULL, *srcpad, *sinkpad; @@ -283,9 +286,6 @@ static GstElement *owr_media_source_request_source_default(OwrMediaSource *media source_bin = gst_bin_new(bin_name); g_free(bin_name); - CREATE_ELEMENT_WITH_ID(queue_pre, "queue", "source-queue", source_id); - CREATE_ELEMENT_WITH_ID(capsfilter, "capsfilter", "source-output-capsfilter", source_id); - CREATE_ELEMENT_WITH_ID(queue_post, "queue", "source-output-queue", source_id); CREATE_ELEMENT_WITH_ID(sink_queue, "queue", "sink-queue", source_id); @@ -295,8 +295,12 @@ static GstElement *owr_media_source_request_source_default(OwrMediaSource *media { GstElement *audioresample, *audioconvert; + CREATE_ELEMENT_WITH_ID(capsfilter, "capsfilter", "source-output-capsfilter", source_id); g_object_set(capsfilter, "caps", caps, NULL); + CREATE_ELEMENT_WITH_ID(queue_pre, "queue", "source-queue", source_id); + CREATE_ELEMENT_WITH_ID(queue_post, "queue", "source-output-queue", source_id); + CREATE_ELEMENT_WITH_ID(audioresample, "audioresample", "source-audio-resample", source_id); CREATE_ELEMENT_WITH_ID(audioconvert, "audioconvert", "source-audio-convert", source_id); @@ -311,65 +315,71 @@ static GstElement *owr_media_source_request_source_default(OwrMediaSource *media } case OWR_MEDIA_TYPE_VIDEO: { - GstElement *videorate = NULL, *videoscale = NULL, *videoconvert; - GstStructure *s; - GstCapsFeatures *features; - - s = gst_caps_get_structure(caps, 0); - if (gst_structure_has_field(s, "framerate")) { - gint fps_n = 0, fps_d = 0; + if (_owr_codec_type_is_raw(_owr_media_source_get_codec(media_source))) { + GstElement *videorate = NULL, *videoscale = NULL, *videoconvert; + GstStructure *s; + GstCapsFeatures *features; - gst_structure_get_fraction(s, "framerate", &fps_n, &fps_d); - g_assert(fps_d); + CREATE_ELEMENT_WITH_ID(queue_pre, "queue", "source-queue", source_id); + CREATE_ELEMENT_WITH_ID(queue_post, "queue", "source-output-queue", source_id); - CREATE_ELEMENT_WITH_ID(videorate, "videorate", "source-video-rate", source_id); - g_object_set(videorate, "drop-only", TRUE, "max-rate", fps_n / fps_d, NULL); + s = gst_caps_get_structure(caps, 0); + if (gst_structure_has_field(s, "framerate")) { + gint fps_n = 0, fps_d = 0; - gst_structure_remove_field(s, "framerate"); - gst_bin_add(GST_BIN(source_bin), videorate); - } - - g_object_set(capsfilter, "caps", caps, NULL); - features = gst_caps_get_features(caps, 0); - if (gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) { - GstElement *glupload; + gst_structure_get_fraction(s, "framerate", &fps_n, &fps_d); + g_assert(fps_d); - CREATE_ELEMENT_WITH_ID(glupload, "glupload", "source-glupload", source_id); - CREATE_ELEMENT_WITH_ID(videoscale, "gleffects_identity", "source-glcolorscale", source_id); - CREATE_ELEMENT_WITH_ID(videoconvert, "glcolorconvert", "source-glcolorconvert", source_id); + CREATE_ELEMENT_WITH_ID(videorate, "videorate", "source-video-rate", source_id); + g_object_set(videorate, "drop-only", TRUE, "max-rate", fps_n / fps_d, NULL); - gst_bin_add_many(GST_BIN(source_bin), - queue_pre, glupload, videoconvert, videoscale, queue_post, NULL); - - if (videorate) { - LINK_ELEMENTS(queue_pre, videorate); - LINK_ELEMENTS(videorate, glupload); - } else { - LINK_ELEMENTS(queue_pre, glupload); + gst_structure_remove_field(s, "framerate"); + gst_bin_add(GST_BIN(source_bin), videorate); } - LINK_ELEMENTS(glupload, videoconvert); - LINK_ELEMENTS(videoconvert, videoscale); - LINK_ELEMENTS(videoscale, queue_post); - } else { - GstElement *gldownload; - - CREATE_ELEMENT_WITH_ID(gldownload, "gldownload", "source-gldownload", source_id); - CREATE_ELEMENT_WITH_ID(videoscale, "videoscale", "source-video-scale", source_id); - CREATE_ELEMENT_WITH_ID(videoconvert, VIDEO_CONVERT, "source-video-convert", source_id); - gst_bin_add_many(GST_BIN(source_bin), - queue_pre, gldownload, videoscale, videoconvert, capsfilter, queue_post, NULL); - if (videorate) { - LINK_ELEMENTS(queue_pre, videorate); - LINK_ELEMENTS(videorate, gldownload); + + features = gst_caps_get_features(caps, 0); + if (gst_caps_features_contains(features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) { + GstElement *glupload; + + CREATE_ELEMENT_WITH_ID(glupload, "glupload", "source-glupload", source_id); + CREATE_ELEMENT_WITH_ID(videoscale, "gleffects_identity", "source-glcolorscale", source_id); + CREATE_ELEMENT_WITH_ID(videoconvert, "glcolorconvert", "source-glcolorconvert", source_id); + + gst_bin_add_many(GST_BIN(source_bin), + queue_pre, glupload, videoconvert, videoscale, queue_post, NULL); + + if (videorate) { + LINK_ELEMENTS(queue_pre, videorate); + LINK_ELEMENTS(videorate, glupload); + } else { + LINK_ELEMENTS(queue_pre, glupload); + } + LINK_ELEMENTS(glupload, videoconvert); + LINK_ELEMENTS(videoconvert, videoscale); + LINK_ELEMENTS(videoscale, queue_post); } else { - LINK_ELEMENTS(queue_pre, gldownload); + GstElement *gldownload; + + CREATE_ELEMENT_WITH_ID(capsfilter, "capsfilter", "source-output-capsfilter", source_id); + g_object_set(capsfilter, "caps", caps, NULL); + + CREATE_ELEMENT_WITH_ID(gldownload, "gldownload", "source-gldownload", source_id); + CREATE_ELEMENT_WITH_ID(videoscale, "videoscale", "source-video-scale", source_id); + CREATE_ELEMENT_WITH_ID(videoconvert, VIDEO_CONVERT, "source-video-convert", source_id); + gst_bin_add_many(GST_BIN(source_bin), + queue_pre, gldownload, videoscale, videoconvert, capsfilter, queue_post, NULL); + if (videorate) { + LINK_ELEMENTS(queue_pre, videorate); + LINK_ELEMENTS(videorate, gldownload); + } else { + LINK_ELEMENTS(queue_pre, gldownload); + } + LINK_ELEMENTS(gldownload, videoscale); + LINK_ELEMENTS(videoscale, videoconvert); + LINK_ELEMENTS(videoconvert, capsfilter); + LINK_ELEMENTS(capsfilter, queue_post); } - LINK_ELEMENTS(gldownload, videoscale); - LINK_ELEMENTS(videoscale, videoconvert); - LINK_ELEMENTS(videoconvert, capsfilter); - LINK_ELEMENTS(capsfilter, queue_post); } - break; } case OWR_MEDIA_TYPE_UNKNOWN: @@ -408,7 +418,12 @@ static GstElement *owr_media_source_request_source_default(OwrMediaSource *media LINK_ELEMENTS(tee, sink_bin); /* Start up our new bin and link it all */ - srcpad = gst_element_get_static_pad(queue_post, "src"); + gst_bin_add(GST_BIN(source_bin), source); + if (queue_post) { + srcpad = gst_element_get_static_pad(queue_post, "src"); + } else { + srcpad = gst_element_get_static_pad(source, "src"); + } g_assert(srcpad); bin_pad = gst_ghost_pad_new("src", srcpad); @@ -416,8 +431,8 @@ static GstElement *owr_media_source_request_source_default(OwrMediaSource *media gst_pad_set_active(bin_pad, TRUE); gst_element_add_pad(source_bin, bin_pad); - gst_bin_add(GST_BIN(source_bin), source); - LINK_ELEMENTS(source, queue_pre); + if (queue_pre) + LINK_ELEMENTS(source, queue_pre); done: @@ -620,6 +635,12 @@ void _owr_media_source_set_type(OwrMediaSource *media_source, OwrSourceType type g_atomic_int_set(&media_source->priv->type, type); } +OwrSourceType _owr_media_source_get_media_type(OwrMediaSource *media_source) +{ + g_return_val_if_fail(OWR_IS_MEDIA_SOURCE(media_source), OWR_MEDIA_TYPE_UNKNOWN); + return media_source->priv->media_type; +} + OwrCodecType _owr_media_source_get_codec(OwrMediaSource *media_source) { g_return_val_if_fail(OWR_IS_MEDIA_SOURCE(media_source), OWR_CODEC_TYPE_NONE); @@ -646,3 +667,15 @@ gchar * owr_media_source_get_dot_data(OwrMediaSource *source) return g_strdup(""); #endif } + +void _owr_media_source_set_supported_interfaces(OwrMediaSource *source, OwrMediaSourceSupportedInterfaces interfaces) +{ + g_return_if_fail(OWR_IS_MEDIA_SOURCE(source)); + source->priv->supported_interfaces = interfaces; +} + +gboolean _owr_media_source_supports_interfaces(OwrMediaSource *source, OwrMediaSourceSupportedInterfaces interfaces) +{ + g_return_val_if_fail(OWR_IS_MEDIA_SOURCE(source), FALSE); + return source->priv->supported_interfaces & interfaces; +} diff --git a/owr/owr_media_source_private.h b/owr/owr_media_source_private.h index ca9bb4ae..8aeab425 100644 --- a/owr/owr_media_source_private.h +++ b/owr/owr_media_source_private.h @@ -43,6 +43,12 @@ G_BEGIN_DECLS +typedef enum { + OWR_MEDIA_SOURCE_SUPPORTS_NONE = 0, + OWR_MEDIA_SOURCE_SUPPORTS_VIDEO_ORIENTATION = (1 << 0), + OWR_MEDIA_SOURCE_SUPPORTS_COLOR_BALANCE = (1 << 1), +} OwrMediaSourceSupportedInterfaces; + GstElement *_owr_media_source_get_source_bin(OwrMediaSource *media_source); void _owr_media_source_set_source_bin(OwrMediaSource *media_source, GstElement *bin); @@ -52,11 +58,19 @@ void _owr_media_source_set_source_tee(OwrMediaSource *media_source, GstElement * GstElement *_owr_media_source_request_source(OwrMediaSource *media_source, GstCaps *caps); void _owr_media_source_release_source(OwrMediaSource *media_source, GstElement *source); +/* FIXME: At some point we should rename this function to + * set_media_type because get_type could eventually conflict with + * GObject required function if this function becomes public. */ void _owr_media_source_set_type(OwrMediaSource *source, OwrSourceType type); +OwrSourceType _owr_media_source_get_media_type(OwrMediaSource *source); void _owr_media_source_set_codec(OwrMediaSource *source, OwrCodecType codec_type); OwrCodecType _owr_media_source_get_codec(OwrMediaSource *source); +void _owr_media_source_set_supported_interfaces(OwrMediaSource *source, OwrMediaSourceSupportedInterfaces interfaces); + +gboolean _owr_media_source_supports_interfaces(OwrMediaSource *source, OwrMediaSourceSupportedInterfaces interfaces); + G_END_DECLS #endif /* __GTK_DOC_IGNORE__ */ diff --git a/owr/owr_utils.c b/owr/owr_utils.c index c84eedf1..e0228e8b 100644 --- a/owr/owr_utils.c +++ b/owr/owr_utils.c @@ -36,6 +36,25 @@ #include "owr_types.h" +/* To be extended once more codecs are supported */ +static GList *h264_decoders = NULL; +static GList *h264_encoders = NULL; +static GList *vp8_decoders = NULL; +static GList *vp8_encoders = NULL; +static GList *vp9_decoders = NULL; +static GList *vp9_encoders = NULL; + +static const gchar *OwrCodecTypeEncoderElementName[] = { NULL, "mulawenc", "alawenc", "opusenc", "openh264enc", "vp8enc", "vp9enc" }; +static const gchar *OwrCodecTypeDecoderElementName[] = { NULL, "mulawdec", "alawdec", "opusdec", "openh264dec", "vp8dec", "vp9dec" }; + +static const gchar *OwrCodecTypeParserElementName[] = { NULL, NULL, NULL, NULL, "h264parse", NULL, NULL }; + +guint _owr_get_unique_uint_id() +{ + static guint id = 0; + return g_atomic_int_add(&id, 1); +} + OwrCodecType _owr_caps_to_codec_type(GstCaps *caps) { GstStructure *structure; @@ -61,6 +80,227 @@ OwrCodecType _owr_caps_to_codec_type(GstCaps *caps) return OWR_CODEC_TYPE_NONE; } +const gchar* _owr_codec_type_to_caps_mime(OwrMediaType media_type, OwrCodecType codec_type) +{ + switch (codec_type) + { + case OWR_CODEC_TYPE_NONE: + switch (media_type) + { + case OWR_MEDIA_TYPE_AUDIO: + return "audio/x-raw"; + break; + case OWR_MEDIA_TYPE_VIDEO: + return "video/x-raw"; + break; + default: + g_return_val_if_reached("audio/x-raw"); + } + break; + case OWR_CODEC_TYPE_PCMU: + return "audio/x-mulaw"; + break; + case OWR_CODEC_TYPE_PCMA: + return "audio/x-alaw"; + break; + case OWR_CODEC_TYPE_OPUS: + return "audio/x-opus"; + break; + case OWR_CODEC_TYPE_H264: + return "video/x-h264"; + break; + case OWR_CODEC_TYPE_VP8: + return "video/x-vp8"; + break; + case OWR_CODEC_TYPE_VP9: + return "video/x-vp9"; + break; + default: + break; + } + g_return_val_if_reached("audio/x-raw"); +} + +gpointer _owr_detect_codecs(gpointer data) +{ + GList *decoder_factories; + GList *encoder_factories; + GstCaps *caps; + + OWR_UNUSED(data); + + decoder_factories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | + GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, + GST_RANK_MARGINAL); + encoder_factories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_ENCODER | + GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, + GST_RANK_MARGINAL); + + caps = gst_caps_new_empty_simple("video/x-h264"); + h264_decoders = gst_element_factory_list_filter(decoder_factories, caps, GST_PAD_SINK, FALSE); + h264_encoders = gst_element_factory_list_filter(encoder_factories, caps, GST_PAD_SRC, FALSE); + gst_caps_unref(caps); + + caps = gst_caps_new_empty_simple("video/x-vp8"); + vp8_decoders = gst_element_factory_list_filter(decoder_factories, caps, GST_PAD_SINK, FALSE); + vp8_encoders = gst_element_factory_list_filter(encoder_factories, caps, GST_PAD_SRC, FALSE); + gst_caps_unref(caps); + + caps = gst_caps_new_empty_simple("video/x-vp9"); + vp9_decoders = gst_element_factory_list_filter(decoder_factories, caps, GST_PAD_SINK, FALSE); + vp9_encoders = gst_element_factory_list_filter(encoder_factories, caps, GST_PAD_SRC, FALSE); + gst_caps_unref(caps); + + gst_plugin_feature_list_free(decoder_factories); + gst_plugin_feature_list_free(encoder_factories); + + h264_decoders = g_list_sort(h264_decoders, gst_plugin_feature_rank_compare_func); + h264_encoders = g_list_sort(h264_encoders, gst_plugin_feature_rank_compare_func); + vp8_decoders = g_list_sort(vp8_decoders, gst_plugin_feature_rank_compare_func); + vp8_encoders = g_list_sort(vp8_encoders, gst_plugin_feature_rank_compare_func); + vp9_decoders = g_list_sort(vp9_decoders, gst_plugin_feature_rank_compare_func); + vp9_encoders = g_list_sort(vp9_encoders, gst_plugin_feature_rank_compare_func); + + return NULL; +} + +const GList *_owr_get_detected_h264_encoders() +{ + return h264_encoders; +} + +const GList *_owr_get_detected_vp8_encoders() +{ + return vp8_encoders; +} + +const GList *_owr_get_detected_vp9_encoders() +{ + return vp9_encoders; +} + +GstElement *_owr_try_codecs(const GList *codecs, const gchar *name_prefix) +{ + GList *l; + gchar *element_name; + + for (l = (GList*) codecs; l; l = l->next) { + GstElementFactory *f = l->data; + GstElement *e; + + element_name = g_strdup_printf("%s_%s_%u", name_prefix, + gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(f)), + _owr_get_unique_uint_id()); + + e = gst_element_factory_create(f, element_name); + g_free(element_name); + + if (!e) + continue; + + /* Try setting to READY. If this fails the codec does not work, for + * example because the hardware codec is currently busy + */ + if (gst_element_set_state(e, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) { + gst_element_set_state(e, GST_STATE_NULL); + gst_object_unref(e); + continue; + } + + return e; + } + + return NULL; +} + +GstElement * _owr_create_decoder(OwrCodecType codec_type) +{ + GstElement * decoder = NULL; + gchar *element_name = NULL; + + switch (codec_type) { + case OWR_CODEC_TYPE_H264: + decoder = _owr_try_codecs(h264_decoders, "decoder"); + g_return_val_if_fail(decoder, NULL); + break; + case OWR_CODEC_TYPE_VP8: + decoder = _owr_try_codecs(vp8_decoders, "decoder"); + g_return_val_if_fail(decoder, NULL); + break; + case OWR_CODEC_TYPE_VP9: + decoder = _owr_try_codecs(vp9_decoders, "decoder"); + g_return_val_if_fail(decoder, NULL); + break; + default: + element_name = g_strdup_printf("decoder_%s_%u", OwrCodecTypeDecoderElementName[codec_type], _owr_get_unique_uint_id()); + decoder = gst_element_factory_make(OwrCodecTypeDecoderElementName[codec_type], element_name); + g_free(element_name); + g_return_val_if_fail(decoder, NULL); + break; + } + + return decoder; +} + +GstElement * _owr_create_parser(OwrCodecType codec_type) +{ + GstElement * parser = NULL; + gchar *element_name = NULL; + + if (!OwrCodecTypeParserElementName[codec_type]) + return NULL; + + element_name = g_strdup_printf("parser_%s_%u", OwrCodecTypeParserElementName[codec_type], _owr_get_unique_uint_id()); + parser = gst_element_factory_make(OwrCodecTypeParserElementName[codec_type], element_name); + g_free(element_name); + + switch (codec_type) { + case OWR_CODEC_TYPE_H264: + g_object_set(parser, "disable-passthrough", TRUE, NULL); + break; + default: + break; + } + return parser; +} + +const gchar* _owr_get_encoder_name(OwrCodecType codec_type) +{ + return OwrCodecTypeEncoderElementName[codec_type]; +} + +void _owr_bin_link_and_sync_elements(GstBin *bin, gboolean *out_link_ok, gboolean *out_sync_ok, GstElement **out_first, GstElement **out_last) +{ + GList *bin_elements, *current; + gboolean link_ok = TRUE, sync_ok = TRUE; + + g_assert(bin); + + bin_elements = g_list_last(bin->children); + g_assert(bin_elements); + for (current = bin_elements; current && current->prev && link_ok && sync_ok; current = g_list_previous(current)) { + if (out_link_ok) + link_ok &= gst_element_link(current->data, current->prev->data); + if (out_sync_ok) + sync_ok &= gst_element_sync_state_with_parent(current->data); + } + if (out_sync_ok && link_ok && sync_ok && current && !current->prev) + sync_ok &= gst_element_sync_state_with_parent(current->data); + + if (out_link_ok) + *out_link_ok = link_ok; + + if (out_sync_ok) + *out_sync_ok = sync_ok; + + if (link_ok && sync_ok) { + if (out_first) + *out_first = bin_elements->data; + if (out_last) + *out_last = current->data; + } +} + typedef struct { GClosure *callback; GList *list; @@ -222,6 +462,19 @@ int _owr_rotation_and_mirror_to_video_flip_method(guint rotation, gboolean mirro } } +void _owr_update_flip_method(GObject *source, GParamSpec *pspec, GstElement *flip) +{ + guint rotation = 0; + gboolean mirror = FALSE; + gint flip_method; + + g_assert(GST_IS_ELEMENT(flip)); + + g_object_get(source, "rotation", &rotation, "mirror", &mirror, NULL); + flip_method = _owr_rotation_and_mirror_to_video_flip_method(rotation, mirror); + g_object_set(flip, pspec ? pspec->name : "method", flip_method, NULL); +} + static void value_slice_free(gpointer value) { g_value_unset(value); diff --git a/owr/owr_utils.h b/owr/owr_utils.h index 85e2754e..ae1d9109 100644 --- a/owr/owr_utils.h +++ b/owr/owr_utils.h @@ -39,8 +39,21 @@ G_BEGIN_DECLS #define OWR_UNUSED(x) (void)x +#define _owr_codec_type_is_raw(codec_type) (codec_type == OWR_CODEC_TYPE_NONE) + void *_owr_require_symbols(void); +guint _owr_get_unique_uint_id(); OwrCodecType _owr_caps_to_codec_type(GstCaps *caps); +const gchar* _owr_codec_type_to_caps_mime(OwrMediaType media_type, OwrCodecType codec_type); +gpointer _owr_detect_codecs(gpointer data); +const GList *_owr_get_detected_h264_encoders(); +const GList *_owr_get_detected_vp8_encoders(); +const GList *_owr_get_detected_vp9_encoders(); +GstElement *_owr_try_codecs(const GList *codecs, const gchar *name_prefix); +GstElement *_owr_create_decoder(OwrCodecType codec_type); +GstElement *_owr_create_parser(OwrCodecType codec_type); +const gchar* _owr_get_encoder_name(OwrCodecType codec_type); +void _owr_bin_link_and_sync_elements(GstBin *bin, gboolean *out_link_ok, gboolean *out_sync_ok, GstElement **out_first, GstElement **out_last); void _owr_utils_call_closure_with_list(GClosure *callback, GList *list); GClosure *_owr_utils_list_closure_merger_new(GClosure *final_callback, GCopyFunc list_item_copy, @@ -59,6 +72,7 @@ gboolean _owr_gst_caps_foreach(const GstCaps *caps, OwrGstCapsForeachFunc func, void _owr_deep_notify(GObject *object, GstObject *orig, GParamSpec *pspec, gpointer user_data); +void _owr_update_flip_method(GObject *renderer, GParamSpec *pspec, GstElement *flip); int _owr_rotation_and_mirror_to_video_flip_method(guint rotation, gboolean mirror); GHashTable *_owr_value_table_new(); diff --git a/tests/Makefile.am b/tests/Makefile.am index 28ac557f..cd2cc590 100644 --- a/tests/Makefile.am +++ b/tests/Makefile.am @@ -20,10 +20,9 @@ bin_PROGRAMS = \ test-send-receive \ test-data-channel \ test-init \ - test-bus \ test-uri \ - test-client \ - test-crypto-utils + test-crypto-utils \ + test-bus if OWR_GST AM_CPPFLAGS += \ @@ -45,6 +44,27 @@ test_gst_io_LDADD = \ $(top_builddir)/gst/libopenwebrtc_gst.la endif +if HAVE_JSON_GLIB +bin_PROGRAMS += \ + test-client + +test_client_SOURCES = test_client.c + +test_client_CFLAGS = \ + $(AM_CFLAGS) \ + $(JSON_GLIB_CFLAGS) \ + $(LIBSOUP_CFLAGS) \ + -I$(top_srcdir)/local \ + -I$(top_srcdir)/transport \ + -I$(top_srcdir)/owr + +test_client_LDADD = \ + $(JSON_GLIB_LIBS) \ + $(LIBSOUP_LIBS) \ + $(GLIB_LIBS) \ + $(top_builddir)/owr/libopenwebrtc.la +endif + list_devices_SOURCES = list_devices.c list_devices_CFLAGS = \ @@ -113,22 +133,6 @@ test_bus_LDADD = \ $(GLIB_LIBS) \ $(top_builddir)/owr/libopenwebrtc.la -test_client_SOURCES = test_client.c - -test_client_CFLAGS = \ - $(AM_CFLAGS) \ - $(JSON_GLIB_CFLAGS) \ - $(LIBSOUP_CFLAGS) \ - -I$(top_srcdir)/local \ - -I$(top_srcdir)/transport \ - -I$(top_srcdir)/owr - -test_client_LDADD = \ - $(JSON_GLIB_LIBS) \ - $(LIBSOUP_LIBS) \ - $(GLIB_LIBS) \ - $(top_builddir)/owr/libopenwebrtc.la - test_uri_SOURCES = test_uri.c test_utils.c test_uri_CFLAGS = \ diff --git a/tests/test_self_view.c b/tests/test_self_view.c index b83b7747..8d53f4b7 100644 --- a/tests/test_self_view.c +++ b/tests/test_self_view.c @@ -49,7 +49,12 @@ static OwrMediaRenderer *audio_renderer = NULL, *video_renderer = NULL; gboolean dump_pipeline(gpointer user_data) { - g_print("Dumping pipelines\n"); + if (!g_getenv("OWR_DEBUG_DUMP_DOT_DIR")) { + g_print("Not dumping pipelines because OWR_DEBUG_DUMP_DOT_DIR environment variable is empty.\n"); + return FALSE; + } + + g_print("Dumping pipelines..."); if (audio_source) write_dot_file("test_self_view-audio_source", owr_media_source_get_dot_data(audio_source), FALSE); @@ -61,6 +66,8 @@ gboolean dump_pipeline(gpointer user_data) if (video_renderer) write_dot_file("test_self_view-video_renderer", owr_media_renderer_get_dot_data(video_renderer), FALSE); + g_print(" done.\n"); + return FALSE; } diff --git a/tests/test_send_receive.c b/tests/test_send_receive.c index e14bd2cd..f4bb44ea 100644 --- a/tests/test_send_receive.c +++ b/tests/test_send_receive.c @@ -236,7 +236,7 @@ static void got_sources(GList *sources, gpointer user_data) owr_bus_add_message_origin(bus, OWR_MESSAGE_ORIGIN(source)); - payload = owr_video_payload_new(OWR_CODEC_TYPE_VP8, 103, 90000, TRUE, FALSE); + payload = owr_video_payload_new(OWR_CODEC_TYPE_H264, 103, 90000, TRUE, FALSE); g_object_set(payload, "width", 640, "height", 480, "framerate", 30.0, NULL); g_object_set(payload, "rtx-payload-type", 123, NULL); if (adaptation) diff --git a/transport/owr_payload.c b/transport/owr_payload.c index 1b2f1436..a43ce3bb 100644 --- a/transport/owr_payload.c +++ b/transport/owr_payload.c @@ -92,8 +92,6 @@ enum { static GParamSpec *obj_properties[N_PROPERTIES] = {NULL, }; -static guint get_unique_id(); - static void owr_payload_set_property(GObject *object, guint property_id, const GValue *value, GParamSpec *pspec) { @@ -183,58 +181,6 @@ static void owr_payload_get_property(GObject *object, guint property_id, GValue } } -/* To be extended once more codecs are supported */ -static GList *h264_decoders = NULL; -static GList *h264_encoders = NULL; -static GList *vp8_decoders = NULL; -static GList *vp8_encoders = NULL; -static GList *vp9_decoders = NULL; -static GList *vp9_encoders = NULL; - -static gpointer owr_payload_detect_codecs(gpointer data) -{ - GList *decoder_factories; - GList *encoder_factories; - GstCaps *caps; - - OWR_UNUSED(data); - - decoder_factories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | - GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, - GST_RANK_MARGINAL); - encoder_factories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_ENCODER | - GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, - GST_RANK_MARGINAL); - - caps = gst_caps_new_empty_simple("video/x-h264"); - h264_decoders = gst_element_factory_list_filter(decoder_factories, caps, GST_PAD_SINK, FALSE); - h264_encoders = gst_element_factory_list_filter(encoder_factories, caps, GST_PAD_SRC, FALSE); - gst_caps_unref(caps); - - caps = gst_caps_new_empty_simple("video/x-vp8"); - vp8_decoders = gst_element_factory_list_filter(decoder_factories, caps, GST_PAD_SINK, FALSE); - vp8_encoders = gst_element_factory_list_filter(encoder_factories, caps, GST_PAD_SRC, FALSE); - gst_caps_unref(caps); - - caps = gst_caps_new_empty_simple("video/x-vp9"); - vp9_decoders = gst_element_factory_list_filter(decoder_factories, caps, GST_PAD_SINK, FALSE); - vp9_encoders = gst_element_factory_list_filter(encoder_factories, caps, GST_PAD_SRC, FALSE); - gst_caps_unref(caps); - - gst_plugin_feature_list_free(decoder_factories); - gst_plugin_feature_list_free(encoder_factories); - - h264_decoders = g_list_sort(h264_decoders, gst_plugin_feature_rank_compare_func); - h264_encoders = g_list_sort(h264_encoders, gst_plugin_feature_rank_compare_func); - vp8_decoders = g_list_sort(vp8_decoders, gst_plugin_feature_rank_compare_func); - vp8_encoders = g_list_sort(vp8_encoders, gst_plugin_feature_rank_compare_func); - vp9_decoders = g_list_sort(vp9_decoders, gst_plugin_feature_rank_compare_func); - vp9_encoders = g_list_sort(vp9_encoders, gst_plugin_feature_rank_compare_func); - - return NULL; -} - - static void owr_payload_class_init(OwrPayloadClass *klass) { GObjectClass *gobject_class = G_OBJECT_CLASS(klass); @@ -293,10 +239,6 @@ static void owr_payload_class_init(OwrPayloadClass *klass) static void owr_payload_init(OwrPayload *payload) { - static GOnce g_once = G_ONCE_INIT; - - g_once(&g_once, owr_payload_detect_codecs, NULL); - payload->priv = OWR_PAYLOAD_GET_PRIVATE(payload); payload->priv->mtu = DEFAULT_MTU; payload->priv->bitrate = DEFAULT_BITRATE; @@ -309,11 +251,8 @@ static void owr_payload_init(OwrPayload *payload) /* Private methods */ -static const gchar *OwrCodecTypeEncoderElementName[] = {"none", "mulawenc", "alawenc", "opusenc", "openh264enc", "vp8enc", "vp9enc"}; -static const gchar *OwrCodecTypeDecoderElementName[] = {"none", "mulawdec", "alawdec", "opusdec", "openh264dec", "vp8dec", "vp9dec"}; -static const gchar *OwrCodecTypeParserElementName[] = {"none", "none", "none", "none", "h264parse", "none", "none"}; -static const gchar *OwrCodecTypePayElementName[] = {"none", "rtppcmupay", "rtppcmapay", "rtpopuspay", "rtph264pay", "rtpvp8pay", "rtpvp9pay"}; -static const gchar *OwrCodecTypeDepayElementName[] = {"none", "rtppcmudepay", "rtppcmadepay", "rtpopusdepay", "rtph264depay", "rtpvp8depay", "rtpvp9depay"}; +static const gchar *OwrCodecTypePayElementName[] = { NULL, "rtppcmupay", "rtppcmapay", "rtpopuspay", "rtph264pay", "rtpvp8pay", "rtpvp9pay" }; +static const gchar *OwrCodecTypeDepayElementName[] = { NULL, "rtppcmudepay", "rtppcmadepay", "rtpopusdepay", "rtph264depay", "rtpvp8depay", "rtpvp9depay" }; static guint evaluate_bitrate_from_payload(OwrPayload *payload) { @@ -336,40 +275,6 @@ static guint evaluate_bitrate_from_payload(OwrPayload *payload) return bitrate; } -static GstElement * try_codecs(GList *codecs, const gchar *name_prefix) -{ - GList *l; - gchar *element_name; - - for (l = codecs; l; l = l->next) { - GstElementFactory *f = l->data; - GstElement *e; - - element_name = g_strdup_printf("%s_%s_%u", name_prefix, - gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(f)), - get_unique_id()); - - e = gst_element_factory_create(f, element_name); - g_free(element_name); - - if (!e) - continue; - - /* Try setting to READY. If this fails the codec does not work, for - * example because the hardware codec is currently busy - */ - if (gst_element_set_state(e, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) { - gst_element_set_state(e, GST_STATE_NULL); - gst_object_unref(e); - continue; - } - - return e; - } - - return NULL; -} - static gboolean binding_transform_to_kbps(GBinding *binding, const GValue *from_value, GValue *to_value, gpointer user_data) { guint bitrate; @@ -395,7 +300,7 @@ GstElement * _owr_payload_create_encoder(OwrPayload *payload) switch (payload->priv->codec_type) { case OWR_CODEC_TYPE_H264: - encoder = try_codecs(h264_encoders, "encoder"); + encoder = _owr_try_codecs(_owr_get_detected_h264_encoders(), "encoder"); g_return_val_if_fail(encoder, NULL); factory = gst_element_get_factory(encoder); @@ -424,7 +329,7 @@ GstElement * _owr_payload_create_encoder(OwrPayload *payload) #endif "max-keyframe-interval", G_MAXINT, NULL); - } else { + } else if (strcmp(factory_name, "omxh264enc")) { /* Assume bits/s instead of kbit/s */ g_object_bind_property(payload, "bitrate", encoder, "bitrate", G_BINDING_SYNC_CREATE); } @@ -432,7 +337,7 @@ GstElement * _owr_payload_create_encoder(OwrPayload *payload) break; case OWR_CODEC_TYPE_VP8: - encoder = try_codecs(vp8_encoders, "encoder"); + encoder = _owr_try_codecs(_owr_get_detected_vp8_encoders(), "encoder"); g_return_val_if_fail(encoder, NULL); #if (defined(__APPLE__) && TARGET_OS_IPHONE && !TARGET_IPHONE_SIMULATOR) || defined(__ANDROID__) @@ -460,7 +365,7 @@ GstElement * _owr_payload_create_encoder(OwrPayload *payload) g_object_set(payload, "bitrate", evaluate_bitrate_from_payload(payload), NULL); break; case OWR_CODEC_TYPE_VP9: - encoder = try_codecs(vp9_encoders, "encoder"); + encoder = _owr_try_codecs(_owr_get_detected_vp9_encoders(), "encoder"); g_return_val_if_fail(encoder, NULL); /* values are inspired by webrtc.org values in vp9_impl.cc */ g_object_set(encoder, @@ -484,8 +389,8 @@ GstElement * _owr_payload_create_encoder(OwrPayload *payload) g_object_set(payload, "bitrate", evaluate_bitrate_from_payload(payload), NULL); break; default: - element_name = g_strdup_printf("encoder_%s_%u", OwrCodecTypeEncoderElementName[payload->priv->codec_type], get_unique_id()); - encoder = gst_element_factory_make(OwrCodecTypeEncoderElementName[payload->priv->codec_type], element_name); + element_name = g_strdup_printf("encoder_%s_%u", _owr_get_encoder_name(payload->priv->codec_type), _owr_get_unique_uint_id()); + encoder = gst_element_factory_make(_owr_get_encoder_name(payload->priv->codec_type), element_name); g_free(element_name); g_return_val_if_fail(encoder, NULL); break; @@ -494,61 +399,6 @@ GstElement * _owr_payload_create_encoder(OwrPayload *payload) return encoder; } -GstElement * _owr_payload_create_decoder(OwrPayload *payload) -{ - GstElement * decoder = NULL; - gchar *element_name = NULL; - - g_return_val_if_fail(payload, NULL); - - switch (payload->priv->codec_type) { - case OWR_CODEC_TYPE_H264: - decoder = try_codecs(h264_decoders, "decoder"); - g_return_val_if_fail(decoder, NULL); - break; - case OWR_CODEC_TYPE_VP8: - decoder = try_codecs(vp8_decoders, "decoder"); - g_return_val_if_fail(decoder, NULL); - break; - case OWR_CODEC_TYPE_VP9: - decoder = try_codecs(vp9_decoders, "decoder"); - g_return_val_if_fail(decoder, NULL); - break; - default: - element_name = g_strdup_printf("decoder_%s_%u", OwrCodecTypeDecoderElementName[payload->priv->codec_type], get_unique_id()); - decoder = gst_element_factory_make(OwrCodecTypeDecoderElementName[payload->priv->codec_type], element_name); - g_free(element_name); - g_return_val_if_fail(decoder, NULL); - break; - } - - return decoder; -} - -GstElement * _owr_payload_create_parser(OwrPayload *payload) -{ - GstElement * parser = NULL; - gchar *element_name = NULL; - - g_return_val_if_fail(payload, NULL); - - if (!g_strcmp0(OwrCodecTypeParserElementName[payload->priv->codec_type], "none")) - return NULL; - - element_name = g_strdup_printf("parser_%s_%u", OwrCodecTypeParserElementName[payload->priv->codec_type], get_unique_id()); - parser = gst_element_factory_make(OwrCodecTypeParserElementName[payload->priv->codec_type], element_name); - g_free(element_name); - - switch (payload->priv->codec_type) { - case OWR_CODEC_TYPE_H264: - g_object_set(parser, "disable-passthrough", TRUE, NULL); - break; - default: - break; - } - return parser; -} - GstElement * _owr_payload_create_payload_packetizer(OwrPayload *payload) { GstElement * pay = NULL; @@ -557,7 +407,7 @@ GstElement * _owr_payload_create_payload_packetizer(OwrPayload *payload) g_return_val_if_fail(payload, NULL); - element_name = g_strdup_printf("pay_%s_%u", OwrCodecTypePayElementName[payload->priv->codec_type], get_unique_id()); + element_name = g_strdup_printf("pay_%s_%u", OwrCodecTypePayElementName[payload->priv->codec_type], _owr_get_unique_uint_id()); pay = gst_element_factory_make(OwrCodecTypePayElementName[payload->priv->codec_type], element_name); g_free(element_name); @@ -593,7 +443,7 @@ GstElement * _owr_payload_create_payload_depacketizer(OwrPayload *payload) g_return_val_if_fail(payload, NULL); - element_name = g_strdup_printf("depay_%s_%u", OwrCodecTypeDepayElementName[payload->priv->codec_type], get_unique_id()); + element_name = g_strdup_printf("depay_%s_%u", OwrCodecTypeDepayElementName[payload->priv->codec_type], _owr_get_unique_uint_id()); depay = gst_element_factory_make(OwrCodecTypeDepayElementName[payload->priv->codec_type], element_name); g_free(element_name); @@ -609,6 +459,14 @@ OwrMediaType _owr_payload_get_media_type(OwrPayload *payload) return media_type; } +OwrCodecType _owr_payload_get_codec_type(OwrPayload *payload) +{ + OwrCodecType codec_type = OWR_CODEC_TYPE_NONE; + + g_object_get(payload, "codec-type", &codec_type, NULL); + return codec_type; +} + GstCaps * _owr_payload_create_rtp_caps(OwrPayload *payload) { @@ -769,12 +627,40 @@ GstCaps * _owr_payload_create_encoded_caps(OwrPayload *payload) return caps; } +gboolean owr_payload_supported(OwrCodecType codec_type) +{ + gboolean supported = FALSE; + GstElement* encoder = NULL; + GstElement* decoder = _owr_create_decoder(codec_type); + + switch (codec_type) { + case OWR_CODEC_TYPE_H264: + encoder = _owr_try_codecs(_owr_get_detected_h264_encoders(), NULL); + break; + case OWR_CODEC_TYPE_VP8: + encoder = _owr_try_codecs(_owr_get_detected_vp8_encoders(), NULL); + break; + case OWR_CODEC_TYPE_VP9: + encoder = _owr_try_codecs(_owr_get_detected_vp9_encoders(), NULL); + break; + default: + encoder = gst_element_factory_make(_owr_get_encoder_name(codec_type), NULL); + } + + supported = encoder && decoder; + + if (encoder) { + gst_element_set_state(encoder, GST_STATE_NULL); + gst_object_unref(encoder); + } + + if (decoder) { + gst_element_set_state(decoder, GST_STATE_NULL); + gst_object_unref(decoder); + } + + return supported; +} /* local functions */ - -static guint get_unique_id() -{ - static guint id = 0; - return g_atomic_int_add(&id, 1); -} diff --git a/transport/owr_payload.h b/transport/owr_payload.h index 6de7d524..66d64090 100644 --- a/transport/owr_payload.h +++ b/transport/owr_payload.h @@ -65,6 +65,8 @@ struct _OwrPayloadClass { GType owr_payload_get_type(void) G_GNUC_CONST; +gboolean owr_payload_supported(OwrCodecType codec_type); + G_END_DECLS #endif /* __OWR_PAYLOAD_H__ */ diff --git a/transport/owr_payload_private.h b/transport/owr_payload_private.h index 8f9ec579..b1deb25e 100644 --- a/transport/owr_payload_private.h +++ b/transport/owr_payload_private.h @@ -38,11 +38,10 @@ G_BEGIN_DECLS /*< private >*/ GstElement * _owr_payload_create_encoder(OwrPayload *payload); -GstElement * _owr_payload_create_decoder(OwrPayload *payload); -GstElement * _owr_payload_create_parser(OwrPayload *payload); GstElement * _owr_payload_create_payload_packetizer(OwrPayload *payload); GstElement * _owr_payload_create_payload_depacketizer(OwrPayload *payload); OwrMediaType _owr_payload_get_media_type(OwrPayload *payload); +OwrCodecType _owr_payload_get_codec_type(OwrPayload *payload); GstCaps * _owr_payload_create_rtp_caps(OwrPayload *payload); GstCaps * _owr_payload_create_raw_caps(OwrPayload *payload); GstCaps * _owr_payload_create_encoded_caps(OwrPayload *payload); diff --git a/transport/owr_remote_media_source.c b/transport/owr_remote_media_source.c index 8ed45b20..e913a1c1 100644 --- a/transport/owr_remote_media_source.c +++ b/transport/owr_remote_media_source.c @@ -79,7 +79,9 @@ static void on_caps(GstElement *source, GParamSpec *pspec, OwrMediaSource *media if (GST_IS_CAPS(caps)) { GST_INFO_OBJECT(source, "%s - configured with caps: %" GST_PTR_FORMAT, media_source_name, caps); + gst_caps_unref(caps); } + g_free(media_source_name); } #define LINK_ELEMENTS(a, b) \ diff --git a/transport/owr_transport_agent.c b/transport/owr_transport_agent.c index ee1743be..6324cab0 100644 --- a/transport/owr_transport_agent.c +++ b/transport/owr_transport_agent.c @@ -64,6 +64,7 @@ #include #include #include +#include #include #include #include @@ -823,7 +824,7 @@ static gboolean link_source_to_transport_bin(GstPad *srcpad, GstElement *pipelin if (media_type == OWR_MEDIA_TYPE_VIDEO) g_snprintf(name, OWR_OBJECT_NAME_LENGTH_MAX, "video_sink_%u_%u", codec_type, stream_id); else if (media_type == OWR_MEDIA_TYPE_AUDIO) - g_snprintf(name, OWR_OBJECT_NAME_LENGTH_MAX, "audio_raw_sink_%u", stream_id); + g_snprintf(name, OWR_OBJECT_NAME_LENGTH_MAX, "audio_sink_%u_%u", codec_type, stream_id); sinkpad = gst_element_get_static_pad(transport_bin, name); ret = gst_pad_link(srcpad, sinkpad) == GST_PAD_LINK_OK; @@ -861,10 +862,7 @@ static void handle_new_send_source(OwrTransportAgent *transport_agent, return; } - /* FIXME - communicate what codec types are supported by the source - * and if one is reusable, use it, else raw? g_object_get(send_payload, "codec-type", &codec_type, NULL); - */ caps = _owr_payload_create_raw_caps(send_payload); src = _owr_media_source_request_source(send_source, caps); @@ -963,6 +961,8 @@ static void remove_existing_send_source_and_payload(OwrTransportAgent *transport OwrMediaType media_type = OWR_MEDIA_TYPE_UNKNOWN; GHashTable *event_data; GValue *value; + OwrPayload *send_payload; + OwrCodecType codec_type = OWR_CODEC_TYPE_NONE; g_assert(media_source); @@ -970,6 +970,12 @@ static void remove_existing_send_source_and_payload(OwrTransportAgent *transport value = _owr_value_table_add(event_data, "start_time", G_TYPE_INT64); g_value_set_int64(value, g_get_monotonic_time()); + send_payload = _owr_media_session_get_send_payload(media_session); + if (send_payload) { + g_object_get(send_payload, "codec-type", &codec_type, NULL); + g_object_unref(send_payload); + } + /* Setting a new, different source but have one already */ stream_id = _owr_session_get_stream_id(OWR_SESSION(media_session)); @@ -979,9 +985,9 @@ static void remove_existing_send_source_and_payload(OwrTransportAgent *transport g_object_get(media_source, "media-type", &media_type, NULL); g_warn_if_fail(media_type != OWR_MEDIA_TYPE_UNKNOWN); if (media_type == OWR_MEDIA_TYPE_VIDEO) - pad_name = g_strdup_printf("video_sink_%u_%u", OWR_CODEC_TYPE_NONE, stream_id); + pad_name = g_strdup_printf("video_sink_%u_%u", codec_type, stream_id); else - pad_name = g_strdup_printf("audio_raw_sink_%u", stream_id); + pad_name = g_strdup_printf("audio_sink_%u_%u", codec_type, stream_id); sinkpad = gst_element_get_static_pad(transport_agent->priv->transport_bin, pad_name); g_assert(sinkpad); g_free(pad_name); @@ -1672,8 +1678,9 @@ static void prepare_transport_bin_send_elements(OwrTransportAgent *transport_age } else { GSList *sessions = get_sessions_from_stream_id(transport_agent, stream_id); guint previous_session_id = 0; + GSList *walk; // Look for the session for which the global encoder was created. - for (GSList *walk = sessions; walk && previous_session_id == 0; walk = g_slist_next(walk)) { + for (walk = sessions; walk && previous_session_id == 0; walk = g_slist_next(walk)) { OwrMediaSession *current = OWR_MEDIA_SESSION(walk->data); guint current_session_id; @@ -1995,6 +2002,7 @@ static gboolean emit_new_candidate(GHashTable *args) gchar *ufrag = NULL, *password = NULL; gboolean got_credentials; GSList *sessions; + GSList *walk; transport_agent = OWR_TRANSPORT_AGENT(g_hash_table_lookup(args, "transport_agent")); g_return_val_if_fail(OWR_IS_TRANSPORT_AGENT(transport_agent), FALSE); @@ -2004,7 +2012,7 @@ static gboolean emit_new_candidate(GHashTable *args) g_return_val_if_fail(nice_candidate, FALSE); sessions = get_sessions_from_stream_id(transport_agent, nice_candidate->stream_id); - for (GSList *walk = sessions; walk; walk = g_slist_next(walk)) { + for (walk = sessions; walk; walk = g_slist_next(walk)) { OwrSession *session = OWR_SESSION(walk->data); if (!nice_candidate->username || !nice_candidate->password) { got_credentials = nice_agent_get_local_credentials(priv->nice_agent, @@ -2064,12 +2072,13 @@ static gboolean emit_candidate_gathering_done(GHashTable *args) guint stream_id; int i; GSList *sessions; + GSList *walk; transport_agent = g_hash_table_lookup(args, "transport-agent"); stream_id = GPOINTER_TO_UINT(g_hash_table_lookup(args, "stream-id")); sessions = get_sessions_from_stream_id(transport_agent, stream_id); - for (GSList *walk = sessions; walk; walk = g_slist_next(walk)) { + for (walk = sessions; walk; walk = g_slist_next(walk)) { session = OWR_SESSION(walk->data); g_signal_emit_by_name(session, "on-candidate-gathering-done", NULL); @@ -2124,6 +2133,7 @@ static gboolean emit_ice_state_changed(GHashTable *args) OwrComponentType component_type; OwrIceState state; GSList *sessions; + GSList *walk; transport_agent = g_hash_table_lookup(args, "transport-agent"); stream_id = GPOINTER_TO_UINT(g_hash_table_lookup(args, "stream-id")); @@ -2131,7 +2141,7 @@ static gboolean emit_ice_state_changed(GHashTable *args) state = GPOINTER_TO_UINT(g_hash_table_lookup(args, "ice-state")); sessions = get_sessions_from_stream_id(transport_agent, stream_id); - for (GSList *walk = sessions; walk; walk = g_slist_next(walk)) { + for (walk = sessions; walk; walk = g_slist_next(walk)) { OwrSession *session = OWR_SESSION(walk->data); _owr_session_emit_ice_state_changed(session, stream_id, component_type, state); @@ -2166,6 +2176,7 @@ static void on_new_selected_pair(NiceAgent *nice_agent, OwrTransportAgent *transport_agent) { GSList *sessions; + GSList *walk; OWR_UNUSED(nice_agent); OWR_UNUSED(lcandidate); @@ -2174,7 +2185,7 @@ static void on_new_selected_pair(NiceAgent *nice_agent, g_return_if_fail(OWR_IS_TRANSPORT_AGENT(transport_agent)); sessions = get_sessions_from_stream_id(transport_agent, stream_id); - for (GSList *walk = sessions; walk; walk = g_slist_next(walk)) { + for (walk = sessions; walk; walk = g_slist_next(walk)) { OwrSession *session = OWR_SESSION(walk->data); guint session_id = get_session_id(transport_agent, session); PendingSessionInfo *pending_session_info; @@ -2407,21 +2418,6 @@ static OwrPayload * get_payload(OwrTransportAgent *transport_agent, guint pt, Ow return NULL; } -static void update_flip_method(OwrPayload *payload, GParamSpec *pspec, GstElement *flip) -{ - guint rotation = 0; - gboolean mirror = FALSE; - gint flip_method; - - g_return_if_fail(OWR_IS_VIDEO_PAYLOAD(payload)); - g_return_if_fail(G_IS_PARAM_SPEC(pspec) || !pspec); - g_return_if_fail(GST_IS_ELEMENT(flip)); - - g_object_get(payload, "rotation", &rotation, "mirror", &mirror, NULL); - flip_method = _owr_rotation_and_mirror_to_video_flip_method(rotation, mirror); - g_object_set(flip, "method", flip_method, NULL); -} - /* pad is transfer full */ static void add_pads_to_bin_and_transport_bin(GstPad *pad, GstElement *bin, GstElement *transport_bin, const gchar *pad_name) @@ -2441,8 +2437,10 @@ static void on_caps(GstElement *sink, GParamSpec *pspec, OwrSession *session) g_object_get(sink, "caps", &caps, NULL); - if (GST_IS_CAPS(caps)) + if (GST_IS_CAPS(caps)) { GST_CAT_INFO_OBJECT(_owrsession_debug, session, "Sending media configured with caps: %" GST_PTR_FORMAT, caps); + gst_caps_unref(caps); + } } static void handle_new_send_payload(OwrTransportAgent *transport_agent, OwrMediaSession *media_session, OwrPayload * payload) @@ -2457,10 +2455,12 @@ static void handle_new_send_payload(OwrTransportAgent *transport_agent, OwrMedia gboolean link_ok = TRUE, sync_ok = TRUE; GstPad *sink_pad = NULL, *rtp_sink_pad = NULL, *rtp_capsfilter_src_pad = NULL, *ghost_src_pad = NULL, *encoder_sink_pad; + OwrCodecType codec_type = OWR_CODEC_TYPE_NONE; OwrMediaType media_type; - GstPadLinkReturn link_res; guint send_ssrc = 0; gchar *cname = NULL; + OwrMediaSource *media_source = NULL; + GstElement *first = NULL; g_return_if_fail(transport_agent); g_return_if_fail(media_session); @@ -2487,7 +2487,7 @@ static void handle_new_send_payload(OwrTransportAgent *transport_agent, OwrMedia link_rtpbin_to_send_output_bin(transport_agent, session_id, stream_id, TRUE, TRUE); - g_object_get(payload, "media-type", &media_type, NULL); + g_object_get(payload, "media-type", &media_type, "codec-type", &codec_type, NULL); name = g_strdup_printf("send-rtp-capsfilter-%u", stream_id); rtp_capsfilter = gst_element_factory_make("capsfilter", name); @@ -2514,119 +2514,95 @@ static void handle_new_send_payload(OwrTransportAgent *transport_agent, OwrMedia g_object_set(rtp_capsfilter, "caps", rtp_caps, NULL); gst_caps_unref(rtp_caps); - gst_bin_add(GST_BIN(send_input_bin), rtp_capsfilter); - - rtp_capsfilter_src_pad = gst_element_get_static_pad(rtp_capsfilter, "src"); - name = g_strdup_printf("src_%u", stream_id); - ghost_src_pad = ghost_pad_and_add_to_bin(rtp_capsfilter_src_pad, send_input_bin, name); - gst_object_unref(rtp_capsfilter_src_pad); - g_free(name); - link_res = gst_pad_link(ghost_src_pad, rtp_sink_pad); - g_warn_if_fail(link_res == GST_PAD_LINK_OK); - gst_object_unref(rtp_sink_pad); + media_source = _owr_media_session_get_send_source(media_session); - sync_ok &= gst_element_sync_state_with_parent(rtp_capsfilter); - g_warn_if_fail(sync_ok); - - if (media_type == OWR_MEDIA_TYPE_VIDEO) { - GstElement *gldownload, *flip, *queue = NULL, *encoder_capsfilter; - - name = g_strdup_printf("send-input-video-gldownload-%u", stream_id); - gldownload = gst_element_factory_make("gldownload", name); - g_free(name); - - name = g_strdup_printf("send-input-video-flip-%u", stream_id); - flip = gst_element_factory_make("videoflip", name); - g_assert(flip); - g_free(name); - g_return_if_fail(OWR_IS_VIDEO_PAYLOAD(payload)); - g_signal_connect_object(payload, "notify::rotation", G_CALLBACK(update_flip_method), flip, 0); - g_signal_connect_object(payload, "notify::mirror", G_CALLBACK(update_flip_method), flip, 0); - update_flip_method(payload, NULL, flip); - - name = g_strdup_printf("send-input-video-queue-%u", stream_id); - queue = gst_element_factory_make("queue", name); - g_free(name); - g_object_set(queue, "max-size-buffers", 3, "max-size-bytes", 0, - "max-size-time", G_GUINT64_CONSTANT(0), NULL); + if (media_type == OWR_MEDIA_TYPE_VIDEO && OWR_IS_VIDEO_PAYLOAD(payload)) { + GstElement *gldownload; + GstElement *flip = NULL, *queue = NULL, *encoder_capsfilter = NULL; + if (_owr_codec_type_is_raw(_owr_payload_get_codec_type(payload))) { + name = g_strdup_printf("send-input-video-gldownload-%u", stream_id); + gldownload = gst_element_factory_make("gldownload", name); + g_free(name); + gst_bin_add(GST_BIN(send_input_bin), gldownload); + } + if (!_owr_media_source_supports_interfaces(media_source, OWR_MEDIA_SOURCE_SUPPORTS_VIDEO_ORIENTATION)) { + name = g_strdup_printf("send-input-video-flip-%u", stream_id); + flip = gst_element_factory_make("videoflip", name); + g_assert(flip); + g_free(name); + g_signal_connect_object(payload, "notify::rotation", G_CALLBACK(_owr_update_flip_method), flip, 0); + g_signal_connect_object(payload, "notify::mirror", G_CALLBACK(_owr_update_flip_method), flip, 0); + _owr_update_flip_method(G_OBJECT(payload), NULL, flip); + + name = g_strdup_printf("send-input-video-queue-%u", stream_id); + queue = gst_element_factory_make("queue", name); + g_free(name); + g_object_set(queue, "max-size-buffers", 3, "max-size-bytes", 0, + "max-size-time", G_GUINT64_CONSTANT(0), NULL); + + encoder = _owr_payload_create_encoder(payload); + parser = _owr_create_parser(_owr_payload_get_codec_type(payload)); + + g_warn_if_fail(encoder); + + encoder_sink_pad = gst_element_get_static_pad(encoder, "sink"); + g_signal_connect(encoder_sink_pad, "notify::caps", G_CALLBACK(on_caps), OWR_SESSION(media_session)); + gst_object_unref(encoder_sink_pad); + + name = g_strdup_printf("send-input-video-encoder-capsfilter-%u", stream_id); + encoder_capsfilter = gst_element_factory_make("capsfilter", name); + g_free(name); + caps = _owr_payload_create_encoded_caps(payload); + g_object_set(encoder_capsfilter, "caps", caps, NULL); + gst_caps_unref(caps); + + gst_bin_add_many(GST_BIN(send_input_bin), flip, queue, encoder, NULL); + + if (parser) + gst_bin_add(GST_BIN(send_input_bin), parser); + gst_bin_add(GST_BIN(send_input_bin), encoder_capsfilter); + } + } else { /* Audio */ encoder = _owr_payload_create_encoder(payload); - parser = _owr_payload_create_parser(payload); - payloader = _owr_payload_create_payload_packetizer(payload); - g_warn_if_fail(payloader && encoder); + parser = _owr_create_parser(_owr_payload_get_codec_type(payload)); encoder_sink_pad = gst_element_get_static_pad(encoder, "sink"); g_signal_connect(encoder_sink_pad, "notify::caps", G_CALLBACK(on_caps), OWR_SESSION(media_session)); gst_object_unref(encoder_sink_pad); - name = g_strdup_printf("send-input-video-encoder-capsfilter-%u", stream_id); - encoder_capsfilter = gst_element_factory_make("capsfilter", name); - g_free(name); - caps = _owr_payload_create_encoded_caps(payload); - g_object_set(encoder_capsfilter, "caps", caps, NULL); - gst_caps_unref(caps); - - gst_bin_add_many(GST_BIN(send_input_bin), gldownload, flip, queue, encoder, encoder_capsfilter, payloader, NULL); - if (parser) { + gst_bin_add(GST_BIN(send_input_bin), encoder); + if (parser) gst_bin_add(GST_BIN(send_input_bin), parser); - link_ok &= gst_element_link_many(gldownload, flip, queue, encoder, parser, encoder_capsfilter, payloader, NULL); - } else - link_ok &= gst_element_link_many(gldownload, flip, queue, encoder, encoder_capsfilter, payloader, NULL); - - link_ok &= gst_element_link_many(payloader, rtp_capsfilter, NULL); + } - g_warn_if_fail(link_ok); + payloader = _owr_payload_create_payload_packetizer(payload); + g_assert(payloader); + gst_bin_add_many(GST_BIN(send_input_bin), payloader, rtp_capsfilter, NULL); - sync_ok &= gst_element_sync_state_with_parent(rtp_capsfilter); - sync_ok &= gst_element_sync_state_with_parent(payloader); - if (parser) - sync_ok &= gst_element_sync_state_with_parent(parser); - sync_ok &= gst_element_sync_state_with_parent(encoder_capsfilter); - sync_ok &= gst_element_sync_state_with_parent(encoder); - sync_ok &= gst_element_sync_state_with_parent(queue); - sync_ok &= gst_element_sync_state_with_parent(flip); - sync_ok &= gst_element_sync_state_with_parent(gldownload); - - name = g_strdup_printf("video_sink_%u_%u", OWR_CODEC_TYPE_NONE, stream_id); - sink_pad = gst_element_get_static_pad(gldownload, "sink"); - add_pads_to_bin_and_transport_bin(sink_pad, send_input_bin, - transport_agent->priv->transport_bin, name); - gst_object_unref(sink_pad); - g_free(name); - } else { /* Audio */ - encoder = _owr_payload_create_encoder(payload); - parser = _owr_payload_create_parser(payload); - payloader = _owr_payload_create_payload_packetizer(payload); + _owr_bin_link_and_sync_elements(GST_BIN(send_input_bin), &link_ok, &sync_ok, &first, NULL); + g_warn_if_fail(link_ok && sync_ok); - encoder_sink_pad = gst_element_get_static_pad(encoder, "sink"); - g_signal_connect(encoder_sink_pad, "notify::caps", G_CALLBACK(on_caps), OWR_SESSION(media_session)); - gst_object_unref(encoder_sink_pad); + name = g_strdup_printf("%s_sink_%u_%u", media_type == OWR_MEDIA_TYPE_VIDEO ? "video" : "audio", + codec_type, stream_id); + sink_pad = gst_element_get_static_pad(first, "sink"); + add_pads_to_bin_and_transport_bin(sink_pad, send_input_bin, + transport_agent->priv->transport_bin, name); + gst_object_unref(sink_pad); + g_free(name); - gst_bin_add_many(GST_BIN(send_input_bin), encoder, payloader, NULL); - if (parser) { - gst_bin_add(GST_BIN(send_input_bin), parser); - link_ok &= gst_element_link_many(encoder, parser, payloader, NULL); - } else - link_ok &= gst_element_link_many(encoder, payloader, NULL); + rtp_capsfilter_src_pad = gst_element_get_static_pad(rtp_capsfilter, "src"); + name = g_strdup_printf("src_%u", stream_id); + ghost_src_pad = ghost_pad_and_add_to_bin(rtp_capsfilter_src_pad, send_input_bin, name); + gst_object_unref(rtp_capsfilter_src_pad); + g_free(name); + g_warn_if_fail(gst_pad_link(ghost_src_pad, rtp_sink_pad) == GST_PAD_LINK_OK); + gst_object_unref(rtp_sink_pad); - link_ok &= gst_element_link_many(payloader, rtp_capsfilter, NULL); - g_warn_if_fail(link_ok); + GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(send_input_bin), GST_DEBUG_GRAPH_SHOW_ALL, "send-input-bin"); - sync_ok &= gst_element_sync_state_with_parent(rtp_capsfilter); - sync_ok &= gst_element_sync_state_with_parent(payloader); - if (parser) - sync_ok &= gst_element_sync_state_with_parent(parser); - sync_ok &= gst_element_sync_state_with_parent(encoder); - g_warn_if_fail(sync_ok); - - name = g_strdup_printf("audio_raw_sink_%u", stream_id); - sink_pad = gst_element_get_static_pad(encoder, "sink"); - add_pads_to_bin_and_transport_bin(sink_pad, send_input_bin, - transport_agent->priv->transport_bin, name); - gst_object_unref(sink_pad); - g_free(name); - } + g_object_unref(media_source); } static void on_new_remote_candidate(OwrTransportAgent *transport_agent, gboolean forced, OwrSession *session) @@ -2769,7 +2745,7 @@ static void on_transport_bin_pad_added(GstElement *transport_bin, GstPad *new_pa sscanf(new_pad_name, "video_src_%u_%u_%u", &codec_type, &session_id, &stream_id); } - if (media_type != OWR_MEDIA_TYPE_UNKNOWN && codec_type == OWR_CODEC_TYPE_NONE) + if (media_type != OWR_MEDIA_TYPE_UNKNOWN) signal_incoming_source(media_type, transport_agent, session_id, stream_id, codec_type); g_free(new_pad_name); @@ -2852,18 +2828,29 @@ static GstPadProbeReturn check_for_keyframe(GstPad *pad, GstPadProbeInfo *info, return GST_PAD_PROBE_OK; } +static gboolean force_key_unit_event(gpointer data) +{ + GstElement *videorepair = GST_ELEMENT_CAST(data); + GstPad *sink_pad = gst_element_get_static_pad(videorepair, "sink"); + gst_pad_push_event(sink_pad, gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0)); + gst_object_unref(sink_pad); + gst_object_unref(videorepair); + return G_SOURCE_REMOVE; +} + static void setup_video_receive_elements(GstPad *new_pad, guint32 session_id, guint32 stream_id, OwrPayload *payload, OwrTransportAgent *transport_agent) { - GstPad *depay_sink_pad = NULL, *ghost_pad = NULL; + GstPad *sink_pad = NULL, *ghost_pad = NULL; gboolean sync_ok = TRUE; GstElement *receive_output_bin; - GstElement *rtpdepay, *videorepair1, *parser, *decoder; + GstElement *rtpdepay, *videorepair1, *parser; GstPadLinkReturn link_res; gboolean link_ok = TRUE; OwrCodecType codec_type; gchar name[100]; GstPad *pad; SessionData *session_data; + GstElement *first = NULL, *last = NULL; g_snprintf(name, OWR_OBJECT_NAME_LENGTH_MAX, "receive-output-bin-%u-%u", session_id, stream_id); receive_output_bin = gst_bin_new(name); @@ -2875,8 +2862,16 @@ static void setup_video_receive_elements(GstPad *new_pad, guint32 session_id, gu } rtpdepay = _owr_payload_create_payload_depacketizer(payload); + gst_bin_add(GST_BIN(receive_output_bin), rtpdepay); + + codec_type = _owr_payload_get_codec_type(payload); + parser = _owr_create_parser(codec_type); + if (parser) + gst_bin_add(GST_BIN(receive_output_bin), parser); + g_snprintf(name, OWR_OBJECT_NAME_LENGTH_MAX, "videorepair1_%u", stream_id); videorepair1 = gst_element_factory_make("videorepair", name); + gst_bin_add(GST_BIN(receive_output_bin), videorepair1); pad = gst_element_get_static_pad(videorepair1, "src"); session_data = g_slice_new(SessionData); @@ -2887,34 +2882,23 @@ static void setup_video_receive_elements(GstPad *new_pad, guint32 session_id, gu gst_object_unref(pad); pad = NULL; - g_object_get(payload, "codec-type", &codec_type, NULL); - parser = _owr_payload_create_parser(payload); - decoder = _owr_payload_create_decoder(payload); + // The OMX video decoder and vp8dec element don't seem to handle very well the incoming + // streams from Chrome, not taking into account the first intra frame + // received. So for now, force a PLI request towards the sender after 250ms. + g_timeout_add(250, force_key_unit_event, gst_object_ref(videorepair1)); - gst_bin_add_many(GST_BIN(receive_output_bin), rtpdepay, - videorepair1, decoder, /*decoded_tee,*/ NULL); - depay_sink_pad = gst_element_get_static_pad(rtpdepay, "sink"); - if (parser) { - gst_bin_add(GST_BIN(receive_output_bin), parser); - link_ok &= gst_element_link_many(rtpdepay, parser, videorepair1, decoder, NULL); - } else - link_ok &= gst_element_link_many(rtpdepay, videorepair1, decoder, NULL); + _owr_bin_link_and_sync_elements(GST_BIN(receive_output_bin), &link_ok, &sync_ok, &first, &last); + g_warn_if_fail(link_ok && sync_ok); - ghost_pad = ghost_pad_and_add_to_bin(depay_sink_pad, receive_output_bin, "sink"); + sink_pad = gst_element_get_static_pad(first, "sink"); + ghost_pad = ghost_pad_and_add_to_bin(sink_pad, receive_output_bin, "sink"); link_res = gst_pad_link(new_pad, ghost_pad); - gst_object_unref(depay_sink_pad); + gst_object_unref(sink_pad); ghost_pad = NULL; - g_warn_if_fail(link_ok && (link_res == GST_PAD_LINK_OK)); - - sync_ok &= gst_element_sync_state_with_parent(decoder); - if (parser) - sync_ok &= gst_element_sync_state_with_parent(parser); - sync_ok &= gst_element_sync_state_with_parent(videorepair1); - sync_ok &= gst_element_sync_state_with_parent(rtpdepay); - g_warn_if_fail(sync_ok); + g_warn_if_fail(link_res == GST_PAD_LINK_OK); - pad = gst_element_get_static_pad(decoder, "src"); - g_snprintf(name, OWR_OBJECT_NAME_LENGTH_MAX, "video_src_%u_%u_%u", OWR_CODEC_TYPE_NONE, + pad = gst_element_get_static_pad(last, "src"); + g_snprintf(name, OWR_OBJECT_NAME_LENGTH_MAX, "video_src_%u_%u_%u", codec_type, session_id, stream_id); add_pads_to_bin_and_transport_bin(pad, receive_output_bin, transport_agent->priv->transport_bin, name); gst_object_unref(pad); @@ -2948,8 +2932,8 @@ static void setup_audio_receive_elements(GstPad *new_pad, guint32 session_id, gu rtpdepay = _owr_payload_create_payload_depacketizer(payload); - parser = _owr_payload_create_parser(payload); - decoder = _owr_payload_create_decoder(payload); + parser = _owr_create_parser(_owr_payload_get_codec_type(payload)); + decoder = _owr_create_decoder(_owr_payload_get_codec_type(payload)); gst_bin_add_many(GST_BIN(receive_output_bin), rtp_capsfilter, rtpdepay, decoder, NULL); @@ -3111,8 +3095,9 @@ static GstElement * on_rtpbin_request_aux_receiver(G_GNUC_UNUSED GstElement *rtp g_object_unref(media_session); if ((transport_agent->priv->bundle_policy == OWR_BUNDLE_POLICY_TYPE_MAX_BUNDLE) && !pt_map) { + GSList* walk; GST_DEBUG("no valid pt_map found, looking for one in the staged sessions"); - for (GSList* walk = transport_agent->priv->unstarted_sessions; walk; walk = g_slist_next(walk)) { + for (walk = transport_agent->priv->unstarted_sessions; walk; walk = g_slist_next(walk)) { OwrSession* session = OWR_SESSION(walk->data); if (!OWR_IS_MEDIA_SESSION(session)) continue; @@ -4513,9 +4498,12 @@ static gboolean dump_bin(gpointer data) void owr_transport_agent_start(OwrTransportAgent *agent) { + GSList *walk; + if (!agent->priv->unstarted_sessions) return; - for (GSList *walk = agent->priv->unstarted_sessions; walk; walk = g_slist_next(walk)) { + + for (walk = agent->priv->unstarted_sessions; walk; walk = g_slist_next(walk)) { GHashTable *args; args = _owr_create_schedule_table(OWR_MESSAGE_ORIGIN(agent)); g_hash_table_insert(args, "transport_agent", agent); @@ -4653,7 +4641,8 @@ static GstPadProbeReturn probe_rtp_info(GstPad *srcpad, GstPadProbeInfo *info, S rx_payload = _owr_media_session_get_receive_payload(media_session, pt); } else { GSList *sessions = get_sessions_from_stream_id(transport_agent, stream_id); - for (GSList *walk = sessions; walk; walk = g_slist_next(walk)) { + GSList *walk; + for (walk = sessions; walk; walk = g_slist_next(walk)) { media_session = OWR_MEDIA_SESSION(walk->data); rx_payload = _owr_media_session_get_receive_payload(media_session, pt); if (rx_payload)